repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 22
values | size
stringlengths 4
7
| content
stringlengths 626
1.05M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 5.21
99.9
| line_max
int64 12
999
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
tensorflow/tpu
|
models/experimental/inference/load_test/targets/grpc_target.py
|
1
|
4163
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""GRPC targets."""
from typing import Any, Callable, Iterable, Mapping, Optional
from absl import logging
import grpc
import numpy as np
import tensorflow as tf
from load_test.targets import target
from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import prediction_service_pb2_grpc
class TfServingGrpcWorker:
"""A worker that sends a gRPC request."""
def __init__(self,
request: predict_pb2.PredictRequest,
request_timeout: float,
stub: prediction_service_pb2_grpc.PredictionServiceStub,
completion_callback: Optional[Callable[[], Any]] = None,
query_handle: target.QueryHandle = None,
metadata: Optional[Iterable[str]] = None):
self._request = request
self._request_timeout = request_timeout
self._stub = stub
self._completion_callback = completion_callback
self._query_handle = query_handle
if not metadata:
self._metadata = []
else:
self._metadata = metadata
def start(self):
"""Starts the gRPC request."""
def _callback(future_response):
exception = future_response.exception()
if exception:
logging.error(exception)
if self._completion_callback:
if self._query_handle:
callback_args = [self._query_handle]
else:
callback_args = []
self._completion_callback(*callback_args)
def _send_rpc():
future_response = self._stub.Predict.future(
self._request,
self._request_timeout,
self._metadata)
future_response.add_done_callback(_callback)
_send_rpc()
class TfServingGrpcTarget(target.Target):
"""A TF model serving target assuming gRPC communication."""
def __init__(self,
grpc_channel: str,
request_timeout: float = 300.0,
model_name: str = '',
batch_size: int = 1,
signature_key: str = 'serving_default',
input_name: str = 'input'):
self._grpc_channel = grpc_channel
self._request_timeout = request_timeout
self._model_name = model_name
self._batch_size = batch_size
self._input_name = input_name
self._signature_key = signature_key
grpc_channel = grpc.insecure_channel(grpc_channel[len('grpc://'):])
self._stub = prediction_service_pb2_grpc.PredictionServiceStub(
grpc_channel)
def prepare(self, sample: Mapping[str, Any]) -> predict_pb2.PredictRequest:
"""Converts a sample into gRPC `PredictRequest`."""
request = predict_pb2.PredictRequest()
request.model_spec.name = self._model_name
request.model_spec.signature_name = self._signature_key
for k, v in sample.items():
if hasattr(v, 'shape'):
tensor_shape = (self._batch_size,) + v.shape
else:
tensor_shape = (self._batch_size,)
request.inputs[k].CopyFrom(
tf.make_tensor_proto(
np.array([v] * self._batch_size), shape=tensor_shape))
return request
def send(
self,
query: predict_pb2.PredictRequest,
completion_callback: Optional[Callable[[int], Any]],
query_handle: target.QueryHandle = None):
"""Sends a request over gRPC."""
worker = TfServingGrpcWorker(
stub=self._stub,
completion_callback=completion_callback,
request=query,
request_timeout=self._request_timeout,
query_handle=query_handle)
worker.start()
|
apache-2.0
| 7,416,422,030,999,926,000 | 33.122951 | 80 | 0.639683 | false |
Firefly-Automation/Firefly
|
Firefly/services/darksky.py
|
1
|
3196
|
from Firefly import logging
from Firefly.const import SERVICE_CONFIG_FILE
from Firefly.helpers.service import Service
import configparser
from forecastiopy import ForecastIO, FIOCurrently, FIOAlerts, FIODaily
from Firefly import scheduler
TITLE = 'Dark Sky Service for Firefly'
AUTHOR = 'Zachary Priddy [email protected]'
SERVICE_ID = 'service_darksky'
COMMANDS = ['refresh']
REQUESTS = ['current']
SECTION = 'DARKSKY'
# TODO: Setup function should get the config from the service config file. If the
# required params are not in the config file then it should log and error message
# and abort install
# TODO: push this data to location weather info.. this could be useful
def Setup(firefly, package, **kwargs):
config = configparser.ConfigParser()
config.read(SERVICE_CONFIG_FILE)
enable = config.getboolean(SECTION, 'enable', fallback=False)
if enable is False:
return False
api_key = config.get(SECTION,'api_key',fallback=None)
refresh = config.getint(SECTION,'refresh',fallback=30)
if api_key is None:
logging.error(code='FF.DAR.SET.001') # darksky api key missing
return False
darksky = Darksky(firefly, package, api_key=api_key, refresh=refresh)
firefly.install_component(darksky)
return True
class Darksky(Service):
def __init__(self, firefly, package, **kwargs):
super().__init__(firefly, SERVICE_ID, package, TITLE, AUTHOR, COMMANDS, REQUESTS)
self._api_key = kwargs.get('api_key')
self._long = firefly.location.longitude
self._lat = firefly.location.latitude
self._refresh_time = kwargs.get('refresh')
self._darksky = ForecastIO.ForecastIO(self._api_key,
units=ForecastIO.ForecastIO.UNITS_SI,
lang=ForecastIO.ForecastIO.LANG_ENGLISH,
latitude = self._lat,
longitude= self._long
)
self._currently = None
self._alerts = None
self.add_command('refresh', self.refresh)
self.add_request('current', self.current)
scheduler.runEveryM(self._refresh_time, self.refresh)
self.refresh()
def refresh(self):
if self._darksky.has_currently() is True:
currently = FIOCurrently.FIOCurrently(self._darksky)
alerts = FIOAlerts.FIOAlerts(self._darksky)
#TODO: Fix this in FIODaily.has_daily()
daily = FIODaily.FIODaily(self._darksky)
print('Currently')
for item in currently.get().keys():
print(item + ' : ' + str(currently.get()[item]))
# Or access attributes directly
#print(currently.temperature)
#print(currently.humidity)
if self._darksky.has_alerts() is True:
for a in alerts.alerts:
print(a)
self._currently = currently.currently
self._alerts = alerts.alerts
else:
print('No Alert data')
if self._darksky.has_daily():
for d in range(0,daily.days()):
print(daily.get(d))
else:
print('No Currently data')
def current(self, command, refresh=False, **kwargs):
if refresh:
self.refresh()
return self._currently.currently
|
apache-2.0
| -675,673,617,958,015,900 | 32.291667 | 85 | 0.648623 | false |
kernsuite-debian/lofar
|
LCS/PyCommon/test/t_cache.py
|
1
|
2465
|
import unittest
from lofar.common.cache import cache
class TestCache(unittest.TestCase):
@cache
def cached_func(self, arg, kwarg=None):
self.invocations += 1
return arg, kwarg
def setUp(self):
self.invocations = 0
def test_simple_function(self):
""" Check whether the cache class works with simple functions. """
@cache
def cached_func(arg, kwarg=None):
return arg, kwarg
result = cached_func(1, 2)
self.assertEqual(result, (1, 2))
def test_class_member(self):
""" Check whether the cache class works with class members. """
class myclass:
@cache
def cached_func(self, arg, kwarg=None):
return arg, kwarg
obj = myclass()
result = obj.cached_func(1, 2)
self.assertEqual(result, (1, 2))
def test_class_static_member(self):
""" Check whether the cache class works with static class members. """
class myclass:
@staticmethod
@cache
def cached_func(arg, kwarg=None):
return arg, kwarg
obj = myclass()
result = obj.cached_func(1, 2)
self.assertEqual(result, (1, 2))
def test_class_property(self):
""" Check whether the cache class works with class properties. """
class myclass:
@property
@cache
def cached_func(self):
return True
obj = myclass()
result = obj.cached_func
self.assertEqual(result, True)
def test_initial_call(self):
""" Does the cache return the correct result? """
result = self.cached_func(1, 2)
self.assertEqual(result, (1, 2))
self.assertEqual(self.invocations, 1)
def test_cached_call(self):
""" Does the cache cache results? """
result = self.cached_func(1, 2)
result = self.cached_func(1, 2)
self.assertEqual(result, (1, 2))
self.assertEqual(self.invocations, 1)
def test_different_calls(self):
""" Does the cache NOT cache results if new parameters are provided? """
result = self.cached_func(1, 2)
result = self.cached_func(1, 3)
self.assertEqual(result, (1, 3))
self.assertEqual(self.invocations, 2)
def main(argv):
unittest.main()
if __name__ == "__main__":
# run all tests
import sys
main(sys.argv[1:])
|
gpl-3.0
| -6,182,551,038,842,737,000 | 23.65 | 80 | 0.56998 | false |
rusty1s/embedded_gcnn
|
lib/model/placeholder.py
|
1
|
1960
|
from six.moves import xrange
import numpy as np
import tensorflow as tf
from ..tf.convert import sparse_to_tensor
def generate_placeholders(batch_size, levels, num_features, num_labels):
placeholders = {
'features': [
tf.placeholder(tf.float32, [None, num_features],
'features_{}'.format(i + 1))
for i in xrange(batch_size)
],
'labels':
tf.placeholder(tf.uint8, [batch_size, num_labels], 'labels'),
'dropout':
tf.placeholder(tf.float32, [], 'dropout'),
}
for j in xrange(1, levels + 1):
placeholders.update({
'adj_dist_{}'.format(j): [
tf.sparse_placeholder(
tf.float32, name='adj_dist_{}_{}'.format(j, i + 1))
for i in xrange(batch_size)
],
})
placeholders.update({
'adj_rad_{}'.format(j): [
tf.sparse_placeholder(
tf.float32, name='adj_rad_{}_{}'.format(j, i + 1))
for i in xrange(batch_size)
],
})
return placeholders
def feed_dict_with_batch(placeholders, batch, dropout=0.0):
batch_size = len(batch)
levels = len(batch[0][1]) - 1
labels = np.array([batch[i][-1] for i in xrange(batch_size)], np.int32)
feed_dict = {
placeholders['labels']: labels,
placeholders['dropout']: dropout,
}
feed_dict.update(
{placeholders['features'][i]: batch[i][0]
for i in xrange(batch_size)})
for j in xrange(levels):
feed_dict.update({
placeholders['adj_dist_{}'.format(j + 1)][i]:
sparse_to_tensor(batch[i][1][j])
for i in xrange(batch_size)
})
feed_dict.update({
placeholders['adj_rad_{}'.format(j + 1)][i]:
sparse_to_tensor(batch[i][2][j])
for i in xrange(batch_size)
})
return feed_dict
|
mit
| 4,823,953,470,051,804,000 | 27.405797 | 75 | 0.519388 | false |
cklb/PyMoskito
|
pymoskito/metaprocessing/eval_L1perA.py
|
1
|
2948
|
# -*- coding: utf-8 -*-
import os
import matplotlib as mpl
import settings as st
mpl.rcParams['text.usetex'] = True
mpl.rcParams['text.latex.unicode'] = True
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from processing_gui import MetaProcessingModule
class eval_L1perA(MetaProcessingModule):
'''
create diagrams for evaluation of itea metric
'''
line_color = '#aaaaaa'
line_style = '-'
font_size = 20
epsPercent = 2.5
spacing = 0.01
counter = 0
def __init__(self):
MetaProcessingModule.__init__(self)
return
def sortLists(self, val):
val[1] = [x for (y, x) in sorted(zip(val[0], val[1]))]
val[0] = sorted(val[0])
return val
def run(self, postResults):
controller_dict = {"FController": [[], []],
"GController": [[], []],
"JController": [[], []],
"LSSController": [[], []],
"PIFeedbackController": [[], []]}
# TODO: fill levels per input
level1 = 'modules'
level2 = 'trajectory'
level3 = 'Amplitude'
x_label = 'A [m]'
y_label = 'E [m^2]'
for elem in postResults:
controller_dict[elem['modules']['controller']['type']][0].append(elem[level1][level2][level3])
controller_dict[elem['modules']['controller']['type']][1].append(elem['metrics']['L1NormAbs'])
fig = Figure()
axes = fig.add_subplot(1, 1, 1)
xMax = 0
leg = []
for elem in controller_dict:
controller_dict[elem] = self.sortLists(controller_dict[elem])
axes.plot(controller_dict[elem][0], controller_dict[elem][1], 'o-',
c=st.color_cycle[elem])
leg.append(elem)
if controller_dict[elem][0]:
if xMax < controller_dict[elem][0][-1]:
xMax = controller_dict[elem][0][-1]
axes.legend(leg, loc=0)
axes.set_xlim(left=0.1, right=xMax)
# axes.set_ylim(top=6.0, bottom=3)
axes.set_xlabel(r'$' + x_label + '$', size=st.label_size)
axes.set_ylabel(r'$' + y_label + '$', size=st.label_size)
axes.set_title(r'Fehlerintegral \"uber Amplitude', size=st.label_size)
axes.grid(color='#ababab', linestyle='--')
# write results
filePath = os.path.join(os.path.pardir, 'results', 'metaprocessing', 'A2')
if not os.path.isdir(filePath):
os.makedirs(filePath)
metaName = 'L1-plotA'
fileName = os.path.join(filePath, metaName)
canvas = FigureCanvas(fig)
fig.savefig(fileName + '.svg')
fig.savefig(fileName + '.png')
fig.savefig(fileName + '.pdf')
results = [{'figure': canvas, 'name': metaName}, \
]
return results
|
bsd-3-clause
| -2,960,670,445,152,885,000 | 31.755556 | 106 | 0.546811 | false |
kfieldho/SMQTK
|
bin/createFileIngest.py
|
1
|
2811
|
#!/usr/bin/env python
"""
Create an ingest of files in a specified directory.
"""
import argparse
import glob
import json
import logging
import os.path as osp
from smqtk.representation import get_data_set_impls
from smqtk.representation.data_element.file_element import DataFileElement
from smqtk.utils import bin_utils, plugin
def default_config():
return {
"data_set": plugin.make_config(get_data_set_impls)
}
def cli_parser():
description = "Add a set of local system files to a data set via " \
"explicit paths or shell-style glob strings."
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='Add debug messaged to output logging.')
group_configuration = parser.add_argument_group("Configuration")
group_configuration.add_argument('-c', '--config',
help="Path to the JSON configuration file")
group_configuration.add_argument('--output-config',
help="Optional path to output a default "
"JSON configuration file to. "
"This output file should be modified "
"and used for this executable.")
parser.add_argument("input_files", metavar='GLOB', nargs='*')
return parser
def main():
parser = cli_parser()
args = parser.parse_args()
bin_utils.initialize_logging(logging.getLogger(),
logging.INFO - (10*args.verbose))
log = logging.getLogger("main")
# Merge loaded config with default
config_loaded = False
config = default_config()
if args.config:
if osp.isfile(args.config):
with open(args.config, 'r') as f:
config.update(json.load(f))
config_loaded = True
elif not osp.isfile(args.config):
log.error("Configuration file path not valid.")
exit(1)
# output configuration dictionary when asked for.
bin_utils.output_config(args.output_config, config, log, True)
if not config_loaded:
log.error("No configuration provided")
exit(1)
#: :type: smqtk.representation.DataSet
ds = plugin.from_plugin_config(config['data_set'], get_data_set_impls)
log.debug("Script arguments:\n%s" % args)
def ingest_file(fp):
ds.add_data(DataFileElement(fp))
for f in args.input_files:
f = osp.expanduser(f)
if osp.isfile(f):
ingest_file(f)
else:
log.debug("Expanding glob: %s" % f)
for g in glob.glob(f):
ingest_file(g)
if __name__ == '__main__':
main()
|
bsd-3-clause
| 6,114,654,768,378,331,000 | 29.89011 | 80 | 0.590537 | false |
TheAlgorithms/Python
|
data_structures/binary_tree/number_of_possible_binary_trees.py
|
1
|
2900
|
"""
Hey, we are going to find an exciting number called Catalan number which is use to find
the number of possible binary search trees from tree of a given number of nodes.
We will use the formula: t(n) = SUMMATION(i = 1 to n)t(i-1)t(n-i)
Further details at Wikipedia: https://en.wikipedia.org/wiki/Catalan_number
"""
"""
Our Contribution:
Basically we Create the 2 function:
1. catalan_number(node_count: int) -> int
Returns the number of possible binary search trees for n nodes.
2. binary_tree_count(node_count: int) -> int
Returns the number of possible binary trees for n nodes.
"""
def binomial_coefficient(n: int, k: int) -> int:
"""
Since Here we Find the Binomial Coefficient:
https://en.wikipedia.org/wiki/Binomial_coefficient
C(n,k) = n! / k!(n-k)!
:param n: 2 times of Number of nodes
:param k: Number of nodes
:return: Integer Value
>>> binomial_coefficient(4, 2)
6
"""
result = 1 # To kept the Calculated Value
# Since C(n, k) = C(n, n-k)
if k > (n - k):
k = n - k
# Calculate C(n,k)
for i in range(k):
result *= n - i
result //= i + 1
return result
def catalan_number(node_count: int) -> int:
"""
We can find Catalan number many ways but here we use Binomial Coefficient because it
does the job in O(n)
return the Catalan number of n using 2nCn/(n+1).
:param n: number of nodes
:return: Catalan number of n nodes
>>> catalan_number(5)
42
>>> catalan_number(6)
132
"""
return binomial_coefficient(2 * node_count, node_count) // (node_count + 1)
def factorial(n: int) -> int:
"""
Return the factorial of a number.
:param n: Number to find the Factorial of.
:return: Factorial of n.
>>> import math
>>> all(factorial(i) == math.factorial(i) for i in range(10))
True
>>> factorial(-5) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
ValueError: factorial() not defined for negative values
"""
if n < 0:
raise ValueError("factorial() not defined for negative values")
result = 1
for i in range(1, n + 1):
result *= i
return result
def binary_tree_count(node_count: int) -> int:
"""
Return the number of possible of binary trees.
:param n: number of nodes
:return: Number of possible binary trees
>>> binary_tree_count(5)
5040
>>> binary_tree_count(6)
95040
"""
return catalan_number(node_count) * factorial(node_count)
if __name__ == "__main__":
node_count = int(input("Enter the number of nodes: ").strip() or 0)
if node_count <= 0:
raise ValueError("We need some nodes to work with.")
print(
f"Given {node_count} nodes, there are {binary_tree_count(node_count)} "
f"binary trees and {catalan_number(node_count)} binary search trees."
)
|
mit
| 7,677,864,668,484,840,000 | 27.431373 | 88 | 0.621724 | false |
madflow/seahub
|
tests/api/endpoints/test_dir_shared_items.py
|
1
|
6670
|
import json
from seaserv import seafile_api
from seahub.test_utils import BaseTestCase
class DirSharedItemsTest(BaseTestCase):
def tearDown(self):
self.remove_repo()
def _add_shared_items(self):
sub_repo_id = seafile_api.create_virtual_repo(self.repo.id,
self.folder,
self.repo.name, '',
self.user.username)
# A user shares a folder to admin with permission 'rw'.
seafile_api.share_repo(sub_repo_id, self.user.username,
self.admin.username, 'rw')
# A user shares a folder to group with permission 'rw'.
seafile_api.set_group_repo(sub_repo_id, self.group.id,
self.user.username, 'rw')
def test_can_list_all(self):
self._add_shared_items()
self.login_as(self.user)
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s&share_type=user,group' % (
self.repo.id,
self.folder))
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert len(json_resp) == 2
def test_list_without_repo_permission(self):
self._add_shared_items()
self.login_as(self.admin)
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s&share_type=user,group' % (
self.repo.id,
self.folder))
self.assertEqual(403, resp.status_code)
def test_can_list_without_share_type_arg(self):
self._add_shared_items()
self.login_as(self.user)
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s' % (
self.repo.id,
self.folder))
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert len(json_resp) == 2
def test_can_share_folder_to_users(self):
self.login_as(self.user)
resp = self.client.put(
'/api2/repos/%s/dir/shared_items/?p=%s' % (self.repo.id,
self.folder),
"share_type=user&[email protected]&[email protected]",
'application/x-www-form-urlencoded',
)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert len(json_resp['success']) == 2
assert json_resp['success'][0]['permission'] == 'r'
def test_can_share_root_to_groups(self):
self.login_as(self.user)
grp1 = self.group
grp2 = self.create_group(group_name="test-grp2",
username=self.user.username)
resp = self.client.put(
'/api2/repos/%s/dir/shared_items/?p=/' % (self.repo.id),
"share_type=group&group_id=%d&group_id=%d&permission=rw" % (grp1.id, grp2.id),
'application/x-www-form-urlencoded',
)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert len(json_resp['success']) == 2
assert json_resp['success'][0]['permission'] == 'rw'
def test_can_share_folder_to_groups(self):
self.login_as(self.user)
grp1 = self.group
grp2 = self.create_group(group_name="test-grp2",
username=self.user.username)
resp = self.client.put(
'/api2/repos/%s/dir/shared_items/?p=%s' % (self.repo.id,
self.folder),
"share_type=group&group_id=%d&group_id=%d&permission=rw" % (grp1.id, grp2.id),
'application/x-www-form-urlencoded',
)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert len(json_resp['success']) == 2
assert json_resp['success'][0]['permission'] == 'rw'
def test_can_modify_user_shared_repo(self):
self._add_shared_items()
self.login_as(self.user)
resp = self.client.post('/api2/repos/%s/dir/shared_items/?p=%s&share_type=user&username=%s' % (
self.repo.id,
self.folder,
self.admin.username), {
'permission': 'r'
}
)
json_resp = json.loads(resp.content)
assert json_resp['success'] is True
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s&share_type=user' % (
self.repo.id,
self.folder))
json_resp = json.loads(resp.content)
assert json_resp[0]['permission'] == 'r'
def test_can_modify_group_shared_repo(self):
self._add_shared_items()
self.login_as(self.user)
resp = self.client.post('/api2/repos/%s/dir/shared_items/?p=%s&share_type=group&group_id=%d' % (
self.repo.id,
self.folder,
self.group.id), {
'permission': 'r'
}
)
json_resp = json.loads(resp.content)
assert json_resp['success'] is True
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s&share_type=group' % (
self.repo.id,
self.folder))
json_resp = json.loads(resp.content)
assert json_resp[0]['permission'] == 'r'
def test_can_unshare_repo_to_user(self):
self._add_shared_items()
self.login_as(self.user)
resp = self.client.delete('/api2/repos/%s/dir/shared_items/?p=%s&share_type=user&username=%s' % (
self.repo.id,
self.folder,
self.admin.username
))
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp['success'] is True
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s&share_type=user' % (
self.repo.id,
self.folder))
json_resp = json.loads(resp.content)
assert len(json_resp) == 0
def test_can_unshare_repo_to_group(self):
self._add_shared_items()
self.login_as(self.user)
resp = self.client.delete('/api2/repos/%s/dir/shared_items/?p=%s&share_type=group&group_id=%d' % (
self.repo.id,
self.folder,
self.group.id
))
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp['success'] is True
resp = self.client.get('/api2/repos/%s/dir/shared_items/?p=%s&share_type=group' % (
self.repo.id,
self.folder))
json_resp = json.loads(resp.content)
assert len(json_resp) == 0
|
apache-2.0
| -8,485,250,962,879,520,000 | 35.25 | 106 | 0.544228 | false |
kejbaly2/metrique
|
metrique/metrique.py
|
1
|
15455
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
# Author: "Chris Ward" <[email protected]>
# FIXME: add to *Container a 'sync' command which will export
# across the network all data, persist to some other container
# and enable future 'delta' syncs.
'''
metrique.metrique
~~~~~~~~~~~~~~~~~
**Python data warehouse and information platform**
metrique is used to bring data from any number of arbitrary
sources into unified data collections that supports
transparent historical version snapshotting, client
side querying and analysis with the support of an array
of scientific computing python libraries, such as ipython,
pandas, numpy, matplotlib, and more.
The main client interface is `metrique.pyclient`
A simple example of how one might interact with metrique is
demonstrated below. In short, we import one of the many
pre-defined metrique cubes -- `osinfo_rpm` -- in this case.
Then get all the objects which that cube is built to extract --
a full list of installed RPMs on the current host system. Followed
up by persisting those objects to an external `metriqued` host.
And finishing with some querying and simple charting of the data.
>>> from metrique import pyclient
>>> g = pyclient(cube="osinfo_rpm")
>>> g.get_objects() # get information about all installed RPMs
>>> 'Total RPMs: %s' % len(g.objects)
>>> 'Example Object:', g.objects[0]
{'_oid': 'dhcp129-66.brq.redhat.com__libreoffice-ure-4.1.4.2[...]',
'_start': 1390619596.0,
'arch': 'x86_64',
'host': 'bla.host.com',
'license': '(MPLv1.1 or LGPLv3+) and LGPLv3 and LGPLv2+ and[...]',
'name': 'libreoffice-ure',
'nvra': 'libreoffice-ure-4.1.4.2-2.fc20.x86_64',
'os': 'linux',
'packager': 'Fedora Project',
'platform': 'x86_64-redhat-linux-gnu',
'release': '2.fc20',
'sourcepackage': None,
'sourcerpm': 'libreoffice-4.1.4.2-2.fc20.src.rpm',
'summary': 'UNO Runtime Environment',
'version': '4.1.4.2'
}
>>> _ids = osinfo_rpm.get_objects(flush=True) # persist to storage
>>> df = osinfo_rpm.find(fields='license')
>>> threshold = 5
>>> license_k = df.groupby('license').apply(len)
>>> license_k.sort()
>>> sub = license_k[license_k >= threshold]
>>> # shorten the names a bit
>>> sub.index = [i[0:20] + '...' if len(i) > 20 else i for i in sub.index]
>>> sub.plot(kind='bar')
... <matplotlib.axes.AxesSubplot at 0x6f77ad0>
.. note::
example date ranges: 'd', '~d', 'd~', 'd~d'
.. note::
valid date format: '%Y-%m-%d %H:%M:%S,%f', '%Y-%m-%d %H:%M:%S', '%Y-%m-%d'
'''
from __future__ import unicode_literals, absolute_import
from getpass import getuser
import logging
logger = logging.getLogger('metrique')
from copy import copy, deepcopy
from inspect import isclass
import os
from time import time
from metrique.utils import get_cube, load_config, configure
from metrique.utils import debug_setup, is_true
from metrique.utils import filename_append
ETC_DIR = os.environ.get('METRIQUE_ETC')
CACHE_DIR = os.environ.get('METRIQUE_CACHE')
LOG_DIR = os.environ.get('METRIQUE_LOGS')
TMP_DIR = os.environ.get('METRIQUE_TMP')
DEFAULT_CONFIG = os.path.join(ETC_DIR, 'metrique.json')
HASH_EXCLUDE_KEYS = ('_hash', '_id', '_start', '_end', '__v__', 'id')
class MetriqueFactory(type):
def __call__(cls, cube=None, name=None, backends=None, *args, **kwargs):
name = name or cube
if cube:
cls = get_cube(cube=cube, name=name, init=False, backends=backends)
_type = type.__call__(cls, name=name, *args, **kwargs)
return _type
class Metrique(object):
'''
:param cube_pkgs: list of package names where to search for cubes
:param cube_paths: Additional paths to search for client cubes
:param debug: turn on debug mode logging
:param log_file: filename for logs
:param log2file: boolean - log output to file?
:param logstout: boolean - log output to stdout?
:param workers: number of workers for threaded operations
Wrapper object that provides consistent access to defined
backend Proxy source (source data), backend MetriqueContainer (target
storage container) and other 'helper' functionality, including
methods for loading data from csv and json, loading metrique client
cubes, config file loading and logging setup.
:cvar name: name of the cube
:cvar config: local cube config object
If cube is specified as a kwarg upon initialization, the specific cube
class will be located and returned, assuming its available in sys.path.
If the cube fails to import, RuntimeError will be raised.
Example usage::
>>> import pyclient
>>> c = pyclient(cube='git_commit')
<type HTTPClient(...)>
# is the same as... except calling .get_cube() also tries
# to clone custom config changes made after pyclient
# was originally iniated along to the newly initiated cube.
>>> z = pyclient()
>>> z.get_cube(cube='git_commit')
<type HTTPClient(...)>
'''
config = None
config_file = DEFAULT_CONFIG
config_key = 'metrique'
container_config_key = 'container'
proxy_config_key = 'proxy'
name = None
fields = None
_container = None
_container_cls = None
_proxy = None
_proxy_cls = None
_schema_valid_keys = ('type', 'container', 'convert', 'variants')
__metaclass__ = MetriqueFactory
def __init__(self, name=None, db=None, config_file=None,
config=None, config_key=None, cube_pkgs=None,
cube_paths=None, debug=None, log_file=None,
log2file=None, log2stdout=None, log_format=None,
workers=None, log_dir=None, cache_dir=None,
etc_dir=None, tmp_dir=None, container=None,
container_config=None, container_config_key=None,
proxy=None, proxy_config=None, proxy_config_key=None,
version=None, schema=None):
super(Metrique, self).__init__()
self.name = name or self.name or Metrique.name
# cube class defined name
# FIXME: this is ugly... and not obvious...
# only used currently in sqldata.Generic
self._cube = type(self).name
options = dict(cache_dir=cache_dir,
cube_pkgs=cube_pkgs,
cube_paths=cube_paths,
db=db,
debug=debug,
etc_dir=etc_dir,
log_dir=log_dir,
log_file=log_file,
log_format=log_format,
log2file=log2file,
log2stdout=log2stdout,
name=self.name,
schema=schema,
tmp_dir=tmp_dir,
version=version,
workers=workers)
defaults = dict(cache_dir=CACHE_DIR,
cube_pkgs=['cubes'],
cube_paths=[],
db=getuser(),
debug=None,
etc_dir=ETC_DIR,
log_file='metrique.log',
log_dir=LOG_DIR,
log_format=None,
log2file=True,
log2stdout=False,
name=None,
schema={},
tmp_dir=TMP_DIR,
version=0,
workers=2)
if not self.config:
self.config = {}
if not self.config.get(self.config_key):
self.config[self.config_key] = {}
# FIXME: update os.environ LOG_DIR, ETC_DIR, etc to config'd value
# if config is passed in, set it, otherwise start
# with class assigned default or empty dict
self.config.update(copy(config or Metrique.config or {}))
self.config_file = config_file or Metrique.config_file
self.config_key = config_key or Metrique.config_key
# load defaults + set args passed in
self.config = configure(options, defaults,
config_file=self.config_file,
section_key=self.config_key,
update=self.config)
level = self.lconfig.get('debug')
log2stdout = self.lconfig.get('log2stdout')
log_format = self.lconfig.get('log_format')
log2file = self.lconfig.get('log2file')
log_dir = self.lconfig.get('log_dir')
log_file = self.lconfig.get('log_file')
if self.name:
log_file = filename_append(log_file, '.%s' % self.name)
self.lconfig['log_file'] = log_file
debug_setup(logger='metrique', level=level, log2stdout=log2stdout,
log_format=log_format, log2file=log2file,
log_dir=log_dir, log_file=log_file)
if not schema:
# schema (and more) might be defined within self.fields attr
schema = getattr(self, 'fields') or {}
# filter out invalid schema keys (eg, if derived from .fields)
schema = self._schema_filter(schema)
self.container_config_key = (container_config_key or
Metrique.container_config_key)
container_config = dict(container_config or {})
container_config.setdefault('name', self.name)
container_config.setdefault('config_file', self.config_file)
container_config.setdefault('schema', schema)
self.config[self.container_config_key].update(container_config)
self.proxy_config_key = proxy_config_key or Metrique.proxy_config_key
proxy_config = dict(proxy_config or {})
proxy_config.setdefault('table', self.name)
proxy_config.setdefault('config_file', self.config_file)
self.config.setdefault(self.proxy_config_key, {}).update(proxy_config)
self._proxy = proxy
self._container = container
if self._container_cls is None:
from metrique.core_api import MetriqueContainer
self._container_cls = MetriqueContainer
if self._proxy_cls is None:
from metrique.sqlalchemy import SQLAlchemyProxy
self._proxy_cls = SQLAlchemyProxy
def __len__(self):
return len(self.container)
@property
def container(self):
if self._container is None or isclass(self._container):
self.container_init()
# in case we haven't assigned schema, but the calling cube
# does have non-null .fields attr, assign it as the schema...
if not self.container_config['schema'] and getattr(self, 'fields', 0):
self._container.config['schema'] = getattr(self, 'fields')
return self._container
@container.setter
def container(self, value):
self.container_init(value=value)
@container.deleter
def container(self):
# replacing existing container with a new, empty one
self._container = self.container_init()
@property
def container_config(self):
self.config.setdefault(self.container_config_key, {})
return copy(self.config[self.container_config_key])
def container_init(self, value=None, **kwargs):
config = self.container_config
# don't pass 'proxy' config section as kwarg, but rather as
# proxy_config kwarg
config['proxy_config'] = config.get(self.proxy_config_key)
config[self.proxy_config_key] = None
config.update(kwargs)
if self._container is None:
self._container = self._container_cls
msg = "Invalid container: %s" % self._container
if isclass(self._container):
self._container = self._container(objects=value, **config)
is_true(isinstance(self._container, self._container_cls), msg)
return self._container
def flush(self, objects=None, autosnap=None, **kwargs):
self.container.autotable()
return self.container.flush(objects=objects, autosnap=autosnap,
**kwargs)
def get_objects(self, flush=False, autosnap=True, **kwargs):
'''
Main API method for sub-classed cubes to override for the
generation of the objects which are to (potentially) be added
to the cube (assuming no duplicates)
'''
logger.debug('Running get_objects(flush=%s, autosnap=%s, %s)' % (
flush, autosnap, kwargs))
if flush:
s = time()
result = self.flush(autosnap=autosnap, **kwargs)
diff = time() - s
logger.debug("Flush complete (%ss)" % int(diff))
return result
else:
return self
def get_cube(self, cube, init=True, name=None, copy_config=True, **kwargs):
'''wrapper for :func:`metrique.utils.get_cube`
Locates and loads a metrique cube
:param cube: name of cube to load
:param init: (bool) initialize cube before returning?
:param name: override the name of the cube
:param copy_config: apply config of calling cube to new?
Implies init=True.
:param kwargs: additional :func:`metrique.utils.get_cube`
'''
name = name or cube
config = copy(self.config) if copy_config else {}
config_file = self.config_file
container = type(self.container)
container_config = copy(self.container_config)
proxy = str(type(self.proxy))
return get_cube(cube=cube, init=init, name=name, config=config,
config_file=config_file, container=container,
container_config=container_config,
proxy=proxy, proxy_config=self.proxy_config, **kwargs)
@property
def lconfig(self):
return self.config[self.config_key]
def load_config(self, path):
return load_config(path)
@property
def objects(self):
return self.container
@objects.setter
def objects(self, value):
self.container = value
@objects.deleter
def objects(self):
del self.container
@property
def proxy(self):
if self._proxy is None or isclass(self._proxy):
self.proxy_init()
return self._proxy
@property
def proxy_config(self):
self.config.setdefault(self.proxy_config_key, {})
return copy(self.config[self.proxy_config_key])
def proxy_init(self, **kwargs):
config = self.proxy_config
config.update(kwargs)
if self._proxy is None:
self._proxy = self._proxy_cls
msg = "Invalid proxy: %s" % self._proxy
if isclass(self._proxy):
self._proxy = self._proxy(**config)
is_true(isinstance(self._proxy, self._proxy_cls), msg)
self._proxy.initialize()
return self._proxy
def _schema_filter(self, schema):
# remove all schema illegal meta keys
schema = deepcopy(schema)
for field, meta in schema.iteritems():
for k in meta.keys():
if k not in self._schema_valid_keys:
del schema[field][k]
return dict(schema)
|
gpl-3.0
| 7,268,603,715,232,907,000 | 37.25495 | 79 | 0.595212 | false |
shlomif/PySolFC
|
pysollib/app_stat_result.py
|
1
|
2248
|
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pysollib.mfxutil import Struct
from pysollib.settings import TOP_SIZE
class GameStatResult:
def __init__(self):
self.min = 0
self.max = 0
self.top = []
self.num = 0
self.total = 0 # sum of all values
self.average = 0
def update(self, gameid, value, game_number, game_start_time):
# update min & max
if not self.min or value < self.min:
self.min = value
if not self.max or value > self.max:
self.max = value
# calculate position & update top
position = None
n = 0
for i in self.top:
if value < i.value:
position = n+1
v = Struct(gameid=gameid,
value=value,
game_number=game_number,
game_start_time=game_start_time)
self.top.insert(n, v)
del self.top[TOP_SIZE:]
break
n += 1
if not position and len(self.top) < TOP_SIZE:
v = Struct(gameid=gameid,
value=value,
game_number=game_number,
game_start_time=game_start_time)
self.top.append(v)
position = len(self.top)
# update average
self.total += value
self.num += 1
self.average = float(self.total)/self.num
return position
|
gpl-3.0
| 5,478,710,273,865,740,000 | 35.258065 | 72 | 0.579626 | false |
gnudo/python-scripts
|
changeMagnification.py
|
1
|
5376
|
#! /usr/bin/env python
# Date: 2019-01-18
# Author: Goran Lovric (with additions from C. M. Schlepuetz)
# License: GPL 3 (see LICENSE file in root folder)
import time
import sys
from epicsPV import epicsPV
from epicsMotor import epicsMotor
def setNewFocus(foc):
print "Moving focus to: "+str(foc)
chFocus.move(foc)
chFocus.wait(poll=1)
def setNewXValue(xvalue):
print "Moving X Basestage to: "+str(xvalue)
chXBaseStage.move(xvalue)
chXBaseStage.wait(poll=1)
def setNewZValue(zvalue):
print "Moving Z Basestage to: "+str(zvalue)
chZBaseStage.move(zvalue)
chZBaseStage.wait(poll=1)
def setFESlitsValues(H_val,V_val):
print "Setting Frontend slits values to (H,V): ("+str(H_val)+",",str(V_val)+")"
chFESlits_H.putw(H_val)
chFESlits_V.putw(V_val)
def getMagnification():
return chMagnification.getw()
if __name__ == "__main__":
if not len(sys.argv) == 2:
print ">>>>Error!! Call the script as follows: 'python changeMagnification.py [4x/10x/20x]' "
exit(1)
desired_objective = sys.argv[1]
if desired_objective not in ('4x', '10x', '20x'):
print ">>>>Error!! Not supported objective: Chooser either 4x, 10x, or 20x"
exit(1)
if desired_objective == '4x':
desired_magnification = 4.0
if desired_objective == '10x':
desired_magnification = 10.0
if desired_objective == '20x':
desired_magnification = 20.0
'''
-------------------------------------------------------
(0) Define variables and Check state
-------------------------------------------------------
'''
chFocus=epicsMotor('X02DA-ES1-MS1:FOC')
chToggleLens=epicsPV("X02DA-ES1-MS1:LNS+")
chMagnification=epicsPV("X02DA-ES1-MS:MAGNF")
chLensSelector=epicsPV("X02DA-ES1-MS1:LNSSEL")
chXBaseStage=epicsMotor("X02DA-ES1-SMP1:TRX")
chZBaseStage=epicsMotor("X02DA-ES1-SMP1:TRZ")
chTomoPanelSampleInPos=epicsPV("X02DA-SCAN-SCN1:SMPIN")
chFESlits_H=epicsPV("X02DA-FE-SHsize")
chFESlits_V=epicsPV("X02DA-FE-SVsize")
foc_4x = 1820
foc_10x = 2692
foc_20x = 2685.0
X_4x = -227
X_10x = -114
X_20x = 0.0
FE4x_H = 1.484
FE4x_V = 1.067
FE10x_H = 0.8
FE10x_V = 0.5
FE20x_H = 0.45
FE20x_V = 0.30
Z_4x = -21500
Z_10x = -21500
Z_20x = 8500
magnificationState = str(getMagnification())
print magnificationState
if magnificationState == '4.0' and desired_objective == '4x':
print "Already at desired magnification - Nothing to do!!!"
exit(0)
if magnificationState == '10.0' and desired_objective == '10x':
print "Already at desired magnification - Nothing to do!!!"
exit(0)
if magnificationState == '20.0' and desired_objective == '20x':
print "Already at desired magnification - Nothing to do!!!"
exit(0)
'''
-------------------------------------------------------
(1) Move focus to zero
-------------------------------------------------------
'''
setNewFocus(0)
'''
-------------------------------------------------------
(2) Change objective
-------------------------------------------------------
'''
print "Changing objective ..."
retries = 0
while desired_magnification != getMagnification():
chToggleLens.putWait(1)
time.sleep(7)
retries += 1
if retries > 4:
print ">>>>Error in objective change! State not supported..."
exit(1)
'''
-------------------------------------------------------
(3) Set focus in new objective
-------------------------------------------------------
'''
print "Move focus to the correct value ..."
while True:
magnificationState = str(chMagnification.getw())
if magnificationState == '4.0' and desired_objective == '4x':
setNewFocus(foc_4x)
break
if magnificationState == '10.0' and desired_objective == '10x':
setNewFocus(foc_10x)
break
if magnificationState == '20.0' and desired_objective == '20x':
setNewFocus(foc_20x)
break
time.sleep(0.5)
'''
-------------------------------------------------------
(4) Set new X-value
-------------------------------------------------------
'''
magnificationState = str(chMagnification.getw())
if magnificationState == '4.0':
setNewXValue(X_4x)
chTomoPanelSampleInPos.putw(X_4x)
elif magnificationState == '10.0':
setNewXValue(X_10x)
chTomoPanelSampleInPos.putw(X_10x)
elif magnificationState == '20.0':
setNewXValue(X_20x)
chTomoPanelSampleInPos.putw(X_20x)
'''
-------------------------------------------------------
(5) Set new Z-value
-------------------------------------------------------
'''
if magnificationState == '4.0':
setNewZValue(Z_4x)
elif magnificationState == '10.0':
setNewZValue(Z_10x)
elif magnificationState == '20.0':
setNewZValue(Z_20x)
'''
-------------------------------------------------------
(6) Set new FE-Slits values
-------------------------------------------------------
'''
if magnificationState == '4.0':
setFESlitsValues(FE4x_H,FE4x_V)
elif magnificationState == '10.0':
setFESlitsValues(FE10x_H,FE10x_V)
elif magnificationState == '20.0':
setFESlitsValues(FE20x_H,FE20x_V)
print "DONE: Changed magnification successfully!"
|
gpl-2.0
| 5,879,329,834,699,678,000 | 29.039106 | 99 | 0.538318 | false |
mattmcd/PySnippets
|
FizzBuzz/fizzbuzz.py
|
1
|
1283
|
# Fizz Buzz with and without decorators
# Algorithm: convert input list of intergers to (x, '') tuples, then map
# fizz buzz functions over the tuples, appending the the string part if the
# required condition is met (fizz: x % 3 == 0, buzz: x % 5 ==0). Finally,
# extract the string part and print, or print the number if string is empty
def tuplizer(x):
return (x, '')
def detuplizer(x):
if x[1]:
return x[1]
else:
return str(x[0])
def make_filter(k, msg):
def f(x):
if x[0] % k == 0:
out = (x[0], x[1] + msg)
else:
out = x
return out
return f
def make_decorator(f):
def f_decorator(func):
def inner(x):
return f(func(x))
return inner
return f_decorator
def fb(x):
"""FizzBuzz using composition"""
f = make_filter(3, 'fizz')
b = make_filter(5, 'buzz')
return detuplizer(b(f(tuplizer(x))))
@make_decorator(detuplizer)
@make_decorator(make_filter(5, 'buzz'))
@make_decorator(make_filter(3, 'fizz'))
@make_decorator(tuplizer)
def fb_dec(x):
"""FizzBuzz using decorators"""
return x
def fizzbuzz(n):
def aux(x):
print fb_dec(x),
map( aux, xrange(1,n+1));
print
if __name__ == '__main__':
fizzbuzz(20)
|
apache-2.0
| -5,021,501,119,154,161,000 | 20.383333 | 75 | 0.583009 | false |
nmayorov/pyins
|
pyins/util.py
|
1
|
2057
|
"""Utility functions."""
import numpy as np
def mm_prod(a, b, at=False, bt=False):
"""Compute products of multiple matrices stored in a stack.
Parameters
----------
a, b : array_like with 2 or 3 dimensions
Single matrix or stack of matrices. Matrices are stored in the two
trailing dimensions. If one of the arrays is 2-D and another is
3-D then broadcasting along the 0-th axis is applied.
at, bt : bool, optional
Whether to use transpose of `a` and `b` respectively.
Returns
-------
ab : ndarray
Computed products.
"""
a = np.asarray(a)
b = np.asarray(b)
if a.ndim not in [2, 3]:
raise ValueError("Wrong number of dimensions in `a`.")
if b.ndim not in [2, 3]:
raise ValueError("Wrong number of dimensions in `b`.")
if at:
if a.ndim == 3:
a = np.transpose(a, (0, 2, 1))
else:
a = a.T
if bt:
if a.ndim == 3:
b = np.transpose(b, (0, 2, 1))
else:
b = b.T
return np.einsum("...ij,...jk->...ik", a, b)
def mv_prod(a, b, at=False):
"""Compute products of multiple matrices and vectors stored in a stack.
Parameters
----------
a : array_like with 2 or 3 dimensions
Single matrix or stack of matrices. Matrices are stored in the two
trailing dimensions.
b : ndarray with 1 or 2 dimensions
Single vector or stack of vectors. Vectors are stored in the trailing
dimension.
at : bool, optional
Whether to use transpose of `a`.
Returns
-------
ab : ndarray
Computed products.
"""
a = np.asarray(a)
b = np.asarray(b)
if a.ndim not in [2, 3]:
raise ValueError("Wrong number of dimensions in `a`.")
if b.ndim not in [1, 2]:
raise ValueError("Wrong number of dimensions in `b`.")
if at:
if a.ndim == 3:
a = np.transpose(a, (0, 2, 1))
else:
a = a.T
return np.einsum("...ij,...j->...i", a, b)
|
mit
| -5,462,642,243,853,522,000 | 25.714286 | 77 | 0.551288 | false |
akaszynski/vtkInterface
|
examples/03-advanced/antarctica-compare.py
|
1
|
3639
|
"""
Compare Field Across Mesh Regions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is some velocity data from a glacier modelling simulation that is compared
across nodes in the simulation. We have simplified the mesh to have the
simulation node value already on the mesh.
This was originally posted to `pyvista/pyvista-support#83 <https://github.com/pyvista/pyvista-support/issues/83>`_.
The modeling results are courtesy of `Urruty Benoit <https://github.com/BenoitURRUTY>`_
and are from the `Elmer/Ice <http://elmerice.elmerfem.org>`_ simulation
software.
"""
# sphinx_gallery_thumbnail_number = 2
import pyvista as pv
from pyvista import examples
import numpy as np
# Load the sample data
mesh = examples.download_antarctica_velocity()
mesh["magnitude"] = np.linalg.norm(mesh["ssavelocity"], axis=1)
mesh
###############################################################################
# Here is a helper to extract regions of the mesh based on the simulation node.
def extract_node(node):
idx = mesh["node_value"] == node
return mesh.extract_points(idx)
###############################################################################
p = pv.Plotter()
p.add_mesh(mesh, scalars="node_value")
for node in np.unique(mesh["node_value"]):
loc = extract_node(node).center
p.add_point_labels(loc, ["Node {}".format(node)])
p.show(cpos="xy")
###############################################################################
vel_dargs = dict(scalars="magnitude", clim=[1e-3, 1e4], cmap='Blues', log_scale=True)
mesh.plot(cpos="xy", **vel_dargs)
###############################################################################
a = extract_node(12)
b = extract_node(20)
###############################################################################
pl = pv.Plotter()
pl.add_mesh(a, **vel_dargs)
pl.add_mesh(b, **vel_dargs)
pl.show(cpos='xy')
###############################################################################
# plot vectors without mesh
pl = pv.Plotter()
pl.add_mesh(a.glyph(orient="ssavelocity", factor=20), **vel_dargs)
pl.add_mesh(b.glyph(orient="ssavelocity", factor=20), **vel_dargs)
pl.camera_position = [(-1114684.6969340036, 293863.65389149904, 752186.603224546),
(-1114684.6969340036, 293863.65389149904, 0.0),
(0.0, 1.0, 0.0)]
pl.show()
###############################################################################
# Compare directions. Normalize them so we can get a reasonable direction
# comparison.
flow_a = a.point_arrays['ssavelocity'].copy()
flow_a /= np.linalg.norm(flow_a, axis=1).reshape(-1, 1)
flow_b = b.point_arrays['ssavelocity'].copy()
flow_b /= np.linalg.norm(flow_b, axis=1).reshape(-1, 1)
# plot normalized vectors
pl = pv.Plotter()
pl.add_arrows(a.points, flow_a, mag=10000, color='b', label='flow_a')
pl.add_arrows(b.points, flow_b, mag=10000, color='r', label='flow_b')
pl.add_legend()
pl.camera_position = [(-1044239.3240694795, 354805.0268606294, 484178.24825854995),
(-1044239.3240694795, 354805.0268606294, 0.0),
(0.0, 1.0, 0.0)]
pl.show()
###############################################################################
# flow_a that agrees with the mean flow path of flow_b
agree = flow_a.dot(flow_b.mean(0))
pl = pv.Plotter()
pl.add_mesh(a, scalars=agree, cmap='bwr', stitle='Flow agreement with block b')
pl.add_mesh(b, color='w')
pl.show(cpos='xy')
###############################################################################
agree = flow_b.dot(flow_a.mean(0))
pl = pv.Plotter()
pl.add_mesh(a, color='w')
pl.add_mesh(b, scalars=agree, cmap='bwr', stitle='Flow agreement with block a')
pl.show(cpos='xy')
|
mit
| 1,542,510,731,848,778,800 | 31.783784 | 115 | 0.552075 | false |
MarineLasbleis/GrowYourIC
|
setup.py
|
1
|
1640
|
from __future__ import absolute_import, print_function
import os
import re
import sys
import codecs
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open
return codecs.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
version = find_version('GrowYourIC', '__init__.py')
metadata = dict(name="GrowYourIC",
version=version,
description="a toolkit to propagate seismic rays through models of Earth's inner core",
url="https://github.com/MarineLasbleis/GrowYourIC",
license='GPL',
long_description="a toolkit to propagate seismic rays through models of Earth's inner core",
py_modules=['GrowYourIC'],
packages=['GrowYourIC'],
package_dir={'GrowYourIC': 'GrowYourIC'},
package_data={'GrowYourIC': ['data/*.dat', 'data/*.mat']},
classifiers=[
'Programming Language :: Python :: 3.6'],
author='Marine Lasbleis',
author_email='[email protected]',
)
try:
from setuptools import setup
metadata['install_requires'] = ['numpy', 'matplotlib', 'scipy']
except ImportError:
from distutils.core import setup
setup(**metadata)
|
mit
| -3,185,438,997,555,225,000 | 33.893617 | 108 | 0.590854 | false |
nkskalyan/ganesh-yrg
|
EcoKitchen_backend/EcoKitchen/views.py
|
1
|
13640
|
import logging,hashlib,base64
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.db.models import Q
from django.shortcuts import render, get_object_or_404, redirect
from django.http import HttpResponse
from django.http import JsonResponse
from django.shortcuts import render, get_object_or_404
from rest_framework import serializers
from rest_framework.decorators import api_view, parser_classes
from rest_framework.parsers import JSONParser
from .models import UserProfile,Location,FeedBack,ReferredPerson
import logging
# Create your views here.
logging.basicConfig()
logger = logging.getLogger(__name__)
UserProfile_NAME = "name"
UserProfile_PASSWORD = "password"
UserProfile_EMAIL = "email"
UserProfile_ADDRESS = "address"
UserProfile_MOBILE = "mobile"
HASH_SALT = b'SECRET_SALT'
NUM_ITERATIONS = 10000
def post_list(self, *args, **kwargs):
return HttpResponse('{"success":"true"}')
class LocationSerializer(serializers.Serializer):
lat = serializers.CharField(max_length=30)
long = serializers.CharField(max_length=30)
id = serializers.IntegerField()
address = serializers.CharField(max_length=200)
description = serializers.CharField(max_length=50)
status = serializers.BooleanField()
@api_view(['POST'])
@parser_classes((JSONParser,))
def signInUser(request):
logger.critical(request.method)
#logger.critical("DATA :: " + request.body)
result = True;
msg = None;
userProfile = None
response_data = {}
if request.method == 'POST' and request.content_type == 'application/json' :
mobile = request.data[UserProfile_MOBILE]
password = request.data[UserProfile_PASSWORD]
passwordHash = base64.b64encode(hashlib.pbkdf2_hmac('sha256', password, HASH_SALT, NUM_ITERATIONS))
try :
userProfile = UserProfile.objects.get(mobile=mobile, password=passwordHash)
except ObjectDoesNotExist:
logger.critical("User did not exist")
result = False
msg = "Wrong User Name password Combination"
except MultipleObjectsReturned:
logger.critical("Multiple objects returned")
msg = "Multiple objects returned"
result = False
else:
msg = "Unknown ContentType or Method Name"
result = False
response_data['success'] = result
response_data['message'] = msg
if result:
response_data['userId'] = userProfile.id
return JsonResponse(response_data)
@api_view(['POST'])
@parser_classes((JSONParser,))
def signUpUser(request):
#logger.critical("DATA :: " + request.body)
result = True
msg = None
userProfile = None
if request.method == 'POST' and request.content_type == 'application/json' :
name = request.data[UserProfile_NAME]
password = request.data[UserProfile_PASSWORD]
#logger.critical("PAssword:: " + password)
passwordHash = base64.b64encode(hashlib.pbkdf2_hmac('sha256', password, HASH_SALT, NUM_ITERATIONS))
#logger.critical("PAssword hash:: " + passwordHash)
email = request.data[UserProfile_EMAIL]
address = request.data[UserProfile_ADDRESS]
mobile = request.data[UserProfile_MOBILE]
try:
if len(email) > 0:
existingUser = UserProfile.objects.filter(Q(email=email)|Q(mobile=mobile))
else:
existingUser = UserProfile.objects.filter(Q(mobile=mobile))
if existingUser != None and existingUser.count() > 0:
result = False
msg = "A user is present with same email or mobile number"
else:
userProfile = UserProfile(name=name, password=passwordHash,
email=email, address=address, mobile=mobile)
userProfile.save()
except Exception as e:
logger.critical("Cannot insert succesfully")
logger.critical(e)
result = False
msg = "DB insertion error"
else:
result = False
msg = "Unknown ContentType or Method"
response_data = {}
response_data['success'] = result
response_data['message'] = msg
if result:
response_data['userId'] = userProfile.id
return JsonResponse(response_data)
def getLocation(request, locationId):
logger.critical(request.method)
#logger.critical("DATA :: " + request.body)
result = True
msg = None
location = None
try :
location = Location.objects.get(id=locationId)
except ObjectDoesNotExist:
logger.critical("Location does not exist")
result = False
msg = "Location does not exist"
except MultipleObjectsReturned:
logger.critical("Multiple objects returned")
msg = "Multiple objects returned"
result = False
response_data = {}
if result:
response_data = {}
response_data['status'] = location.status
response_data['description'] = location.description
response_data['address'] = location.address
response_data['userId'] = location.user.id
response_data['lat'] = location.lat
response_data['long'] = location.long
response_data['id'] = location.id
return JsonResponse(response_data)
else:
response_data['success'] = False
response_data['message'] = msg
return JsonResponse(response_data)
@api_view(['POST'])
@parser_classes((JSONParser,))
def postLocation(request):
result = True
msg = None
location = None
if request.method == 'POST' and request.content_type == 'application/json' :
description = request.data['description']
address = request.data['address']
userId = request.data['userId']
lat = str(request.data['lat'])
long = str(request.data['long'])
status = False
try:
existingLocation = Location.objects.filter(Q(lat=lat) & Q(long=long))
userReferenced = None
if existingLocation != None and existingLocation.count() > 0:
result = False
msg = "A Location is present with same Latitude and Longitude"
else:
try :
userReferenced = UserProfile.objects.get(id=userId)
except ObjectDoesNotExist:
result = False
msg = "A User Id specified for location doesn't exist"
location = Location(lat=lat, long=long,
address=address, description=description, status=status, user=userReferenced)
location.save()
except Exception as ex:
#logger.critical("Cannot insert succesfully:" + str(ex))
result = False
msg = "DB insertion error"
else:
result = False
msg = "Unknown ContentType or Method"
response_data = {}
response_data['success'] = result
response_data['message'] = msg
if result:
response_data['locationId'] = location.id
return JsonResponse(response_data)
@api_view(['GET', 'POST'])
@parser_classes((JSONParser,))
def getAllLocations(request):
locationList = Location.objects.all()
if locationList != None and locationList.count() > 0:
locationJson = LocationSerializer(locationList, many=True)
return JsonResponse(locationJson.data, safe=False)
else:
return JsonResponse({})
@api_view(['POST'])
@parser_classes((JSONParser,))
def addFeedback(request):
result = True
msg = None
feedback = None
if request.method == 'POST' and request.content_type == 'application/json' :
content = request.data['content']
locationId = request.data['locationId']
userId = request.data['userId']
courtesy = request.data['courtesy']
cleanliness = request.data['cleanliness']
qualityOfFood = request.data['qualityOfFood']
quantityOfFood = request.data['quantityOfFood']
foodTaste = request.data['foodTaste']
try:
existingLocations = Location.objects.filter(id=locationId)
existingUsers = UserProfile.objects.filter(id=userId)
if existingLocations != None and existingLocations.count() > 0 and existingUsers != None and existingUsers.count() > 0:
location = existingLocations.first()
user = existingUsers.first()
feedback = FeedBack(content=content, user=user, location=location, courtesy= courtesy, qualityOfFood = qualityOfFood,
quantityOfFood = quantityOfFood, foodTaste = foodTaste, cleanliness= cleanliness)
feedback.save()
else:
result = False
msg = "Invalid Location or User!"
except Exception as ex:
logger.critical("Cannot insert succesfully:" + str(ex))
result = False
msg = "DB insertion error"
else:
result = False
msg = "Unknown ContentType or Method"
response_data = {}
response_data['success'] = result
response_data['message'] = msg
if result:
response_data['feedbackId'] = feedback.id
return JsonResponse(response_data)
@api_view(['POST'])
@parser_classes((JSONParser,))
def createEntreprenuerReferral(request):
result = True
msg = None
referredPerson = None
if request.method == 'POST' and request.content_type == 'application/json' :
name = request.data['name']
phone = request.data['phone']
incomeRange = request.data['incomeRange']
maritalStatus = request.data['maritalStatus']
description = request.data['description']
qualification = request.data['qualification']
userId = request.data['userId']
try:
existingUsers = UserProfile.objects.filter(id=userId)
if existingUsers != None and existingUsers.count() > 0:
user = existingUsers.first()
referredPerson = ReferredPerson(name=name, phone=phone, incomeRange=incomeRange,
qualification=qualification, maritalStatus=maritalStatus, description=description, user=user)
referredPerson.save()
else:
result = False
msg = "Invalid User!"
except Exception as ex:
logger.critical("Cannot insert succesfully:" + str(ex))
result = False
msg = "DB insertion error"
else:
result = False
msg = "Unknown ContentType or Method"
response_data = {}
response_data['success'] = result
response_data['message'] = msg
if result:
response_data['referredPersonId'] = referredPerson.id
return JsonResponse(response_data)
@api_view(['GET'])
def askQuestion(request):
response_data = {}
try:
question = request.GET['question']
response_data['answer'] = "some answer"
except Exception as ex:
response_data["message"] = str(ex)
return JsonResponse(response_data)
return JsonResponse(response_data)
def locationspage(request):
locations_list = Location.objects.all()
context = {'locations_list': locations_list}
return render(request,'EcoKitchen/locations.html',context)
def userspage(request):
users_list = UserProfile.objects.all()
context = {'users_list': users_list}
return render(request,'EcoKitchen/users.html',context)
def feedbackpage(request):
feedback_list = FeedBack.objects.all()
context = {'feedback_list': feedback_list}
return render(request,'EcoKitchen/feedbackpage.html',context)
def entrepreneurs(request):
entre_list = ReferredPerson.objects.all()
context = {'entre_list': entre_list}
return render(request,'EcoKitchen/entrepreneurs.html',context)
def userdetail(request, user_id):
usr = get_object_or_404(UserProfile, pk=user_id)
if(request.POST.get('subbtn')):
inplist = request.POST.getlist('i_case')
outlist = request.POST.getlist('o_case')
return render(request, 'EcoKitchen/userdetail.html', {'usr': usr})
def locationdetail(request, location_id):
location = get_object_or_404(Location, pk=location_id)
if(request.POST.get('update')):
location.address = request.POST.get('field6')
location.status = request.POST.get('choice')
location.description = request.POST.get('field3')
location.lat = request.POST.get('field4')
location.long = request.POST.get('field5')
location.save()
return redirect('/EcoKitchen/locationspage')
return render(request, 'EcoKitchen/locationdetail.html', {'location': location})
def entredetail(request, entre_id):
entre = get_object_or_404(ReferredPerson, pk=entre_id)
locs = Location.objects.all()
logger.critical(request.body)
if(request.POST.get('update')):
if(request.POST.get('choice')=='nil'):
logger.critical("+++++++ Trying to make as inactive")
locationId =request.POST.get('locationId')
loca = Location.objects.get(id=locationId )
loca.status=False
loca.save()
entre.location = None
entre.save()
if(request.POST.get('choice')!='nil'):
logger.critical("+++++++ Trying to make as active")
locationId =request.POST.get('locationId')
loca = Location.objects.get(id=locationId )
loca.status=True
loca.save()
entre.location = loca
entre.save()
return redirect('/EcoKitchen/locationspage')
return render(request, 'EcoKitchen/entredetail.html', {'entre': entre,'locs':locs})
|
apache-2.0
| -3,771,626,611,849,599,000 | 36.26776 | 133 | 0.639956 | false |
JonathanSeguin/Mariana
|
examples/mnist_mlp.py
|
1
|
2841
|
import Mariana.activations as MA
import Mariana.initializations as MI
import Mariana.layers as ML
import Mariana.costs as MC
import Mariana.regularizations as MR
import Mariana.scenari as MS
import Mariana.training.trainers as MT
import Mariana.training.recorders as MREC
import Mariana.training.datasetmaps as MDM
import Mariana.training.stopcriteria as MSTOP
from useful import load_mnist
"""
This is the equivalent the theano MLP from here: http://deeplearning.net/tutorial/mlp.html
But Mariana style.
This version uses a trainer/dataset mapper setup:
* automatically saves the best model for each set (train, test, validation)
* automatically saves the model if the training halts because of an error or if the process is killed
* saves a log if the process dies unexpectedly
* training results and hyper parameters values are recorded to a file
* allows you to define custom stop criteria
* training info is printed at each epoch, including best scores and at which epoch they were achieved
"""
if __name__ == "__main__":
# Let's define the network
ls = MS.GradientDescent(lr=0.01)
cost = MC.NegativeLogLikelihood()
i = ML.Input(28 * 28, name='inp')
h = ML.Hidden(500, activation=MA.Tanh(), initializations=[MI.GlorotTanhInit()], regularizations=[MR.L1(0), MR.L2(0.0001)], name="hid")
o = ML.SoftmaxClassifier(10, learningScenario=ls, costObject=cost, name="out", regularizations=[MR.L1(0), MR.L2(0.0001)])
mlp = i > h > o
mlp.saveDOT("mnist_mlp")
mlp.saveHTML("mnist_mlp")
# And then map sets to the inputs and outputs of our network
train_set, validation_set, test_set = load_mnist()
trainData = MDM.Series(images=train_set[0], numbers=train_set[1])
trainMaps = MDM.DatasetMapper()
trainMaps.mapInput(i, trainData.images)
trainMaps.mapOutput(o, trainData.numbers)
testData = MDM.Series(images=test_set[0], numbers=test_set[1])
testMaps = MDM.DatasetMapper()
testMaps.mapInput(i, testData.images)
testMaps.mapOutput(o, testData.numbers)
validationData = MDM.Series(images=validation_set[0], numbers=validation_set[1])
validationMaps = MDM.DatasetMapper()
validationMaps.mapInput(i, validationData.images)
validationMaps.mapOutput(o, validationData.numbers)
earlyStop = MSTOP.GeometricEarlyStopping(testMaps, patience=100, patienceIncreaseFactor=1.1, significantImprovement=0.00001, outputFunction="score", outputLayer=o)
epochWall = MSTOP.EpochWall(1000)
trainer = MT.DefaultTrainer(
trainMaps=trainMaps,
testMaps=testMaps,
validationMaps=validationMaps,
stopCriteria=[earlyStop, epochWall],
testFunctionName="testAndAccuracy",
validationFunctionName="testAndAccuracy",
trainMiniBatchSize=20,
saveIfMurdered=False
)
recorder = MREC.GGPlot2("MLP", whenToSave = [MREC.SaveMin("test", o.name, "score")], printRate=1, writeRate=1)
trainer.start("MLP", mlp, recorder = recorder)
|
apache-2.0
| -4,072,527,809,350,081,500 | 36.381579 | 164 | 0.767687 | false |
leriomaggio/code-coherence-evaluation-tool
|
code_comments_coherence/source_code_analysis/code_analysis/utils.py
|
1
|
4146
|
"""
:Author: Valerio Maggio
:Organization: University of Naples Federico II
:Contact: [email protected]
"""
import os
from xml_parsers import MethodLevelParser
import sys
#===================
# Utility Functions
#===================
def get_sources_from_dir(root_path, extension):
"""
This function recursively traverse the input `root_path` and
returns a list containing the absolute paths of all the files whose
extension is equal to `extension`
"""
if not os.path.isdir(root_path):
raise ValueError('The input root_path is not a valid directory')
files_list = list()
for root, dirs, files in os.walk(root_path):
for filename in files:
if not filename.startswith('.'):
filepath = os.path.abspath(os.path.join(root, filename))
path, ext = os.path.splitext(filepath)
if ext == extension:
files_list.append(filepath)
return files_list
def parse_xml_files_list(cls_parser, xml_files_list):
tree_list = list()
tree_map = dict()
for xml_filepath in xml_files_list:
parser = cls_parser()
tlist, tmap = parser.parse(xml_filepath)
if tlist:
tree_list.extend(tlist)
tree_map.update(tmap)
return tree_list, tree_map
def extract_trees(xml_folder_name, cls_parser=MethodLevelParser):
"""
This function parses each xml files found in the
input _xml_folder_name_ (xml files are identified by
the .xml file extension) and returns the list of all
methods found after parsing those files.
Input xml files should represent code syntax trees.
"""
xml_files_list = get_sources_from_dir(xml_folder_name, extension='.xml')
return parse_xml_files_list(cls_parser, xml_files_list)
def set_xml_filepath(xml_dest_folderpath, source_filepath, root_path):
"""
Returns the filepath of the xml file representing the AST.
This generated filepath will be composed by a folder path that will reproduce
the package directory structures of the original source file in
the input `xml_dest_folderpath`, while the name of the file will be
the same of the original source file (except for the file extension).
"""
source_filename = os.path.basename(source_filepath)
source_file_extension = source_filename.split(os.path.extsep)[1]
package_folder_path = source_filepath.replace(source_filename, '').replace(root_path, '')
if package_folder_path.startswith(os.path.sep):
if len(package_folder_path) > 1:
package_folder_path = package_folder_path[1:]
else:
package_folder_path = ''
xml_package_folder_path = os.path.join(xml_dest_folderpath, package_folder_path)
if not os.path.exists(xml_package_folder_path):
os.makedirs(xml_package_folder_path)
xml_filename = source_filename.replace(source_file_extension,'xml')
return os.path.join(xml_package_folder_path, xml_filename)
def print_tree_to_xml(node, indent, outfile=sys.stdout):
"""
Recursively writes to the input `outfile` the `node`
in XML format
"""
node_representation = u'{0}<{1} name="{2}" line="{3}" instruction_class="{4}" ' \
u'instruction="{5}">\n'.format(indent, node.xml_node_name,
node.node_name, node.token.line,
node.instruction_class, node.instruction)
outfile.write(node_representation.encode('utf8'))
for child in node.children:
print_tree_to_xml(child, indent + " ", outfile)
outfile.write(u'{0}</{1}>\n'.format(indent, node.xml_node_name).encode('utf8'))
def ensure_xml_files_folder(src_folder_path):
# Preprocessing: Check and create the XML destination folder if it does not exist yet!
xml_folder_path = os.path.join(os.path.dirname(src_folder_path), 'xml_source_files')
folder_existing = True
if not os.path.exists(xml_folder_path):
os.makedirs(xml_folder_path)
folder_existing = False
return xml_folder_path, folder_existing
|
bsd-3-clause
| -7,368,947,614,906,367,000 | 36.026786 | 98 | 0.647612 | false |
ytsapras/robonet_site
|
events/models.py
|
1
|
50854
|
import datetime
from django.db import models
from django.utils import timezone
from decimal import Decimal
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
# Known Operator (survey/follow-up)
class Operator(models.Model):
"""
This can be the survey name or the name of the follow-up group.
All uppercase.
Attributes:
name -- The operator name
(string, required)
"""
def __str__(self):
return self.name
name = models.CharField("Operator Name", max_length=50, default='OTHER')
# Known Telescopes
class Telescope(models.Model):
"""
Known telescope names in the database.
Attributes:
operator -- The operator
(object, required) -- ForeignKey object
name -- The telescope name
(string, required)
aperture -- The telescope aperture
(float, optional)
latitude -- The telescope latitude (N) in decimal degrees
(float, optional)
longitude -- The telescope longitude (E) in decimal degrees
(float, optional)
altitude -- The telescope altitude in meters
(float, optional)
site -- The site name
(string, optional)
"""
def __str__(self):
return self.name
operator = models.ForeignKey(Operator, on_delete=models.PROTECT)
name = models.CharField("Telescope name", max_length=100)
aperture = models.DecimalField("Telescope Aperture (m)", max_digits=6,
decimal_places=2, null=True, blank=True)
latitude = models.DecimalField("Latitude (N) in decimal degrees",
max_digits=8, decimal_places=4, null=True,
blank=True)
longitude = models.DecimalField("Longitude (E) in decimal degrees",
max_digits=8, decimal_places=4, null=True,
blank=True)
altitude = models.DecimalField("Altitude (m)", max_digits=8,
decimal_places=4, null=True, blank=True)
site = models.CharField("Site name", max_length=100, blank=True, default="")
# Known Instruments
class Instrument(models.Model):
"""
Known instrument names in the database for a specific telescope.
A single instrument can appear multiple times as it can be moved to
different telescopes.
Attributes:
telescope -- The telescope
(object, required) -- ForeignKey object
name -- The instrument name
(string, required)
pixscale -- The pixel scale of the CCD (arcsec/pix)
(float, optional)
"""
def __str__(self):
return self.name
telescope = models.ForeignKey(Telescope, on_delete=models.PROTECT)
name = models.CharField("Instrument name", max_length=50)
pixscale = models.DecimalField("Pixel scale (arcsec/pix)", max_digits=12,
decimal_places=4, null=True, blank=True)
# Known Filters
class Filter(models.Model):
"""
Known filter names in the database for a specific instrument.
A single filter can appear multiple times as it can exist for different
instruments.
Attributes:
instrument -- The instrument
(object, required) -- ForeignKey object
name -- The filter name
(string, required)
"""
def __str__(self):
return self.name
instrument = models.ForeignKey(Instrument, on_delete=models.PROTECT)
name = models.CharField("Filter name", max_length=50, blank=True)
# Known Fields
class Field(models.Model):
"""
Known ROME/REA field name in the database.
Keyword arguments:
name -- The field name
(string, optional, default='Outside ROMEREA footprint')
field_ra -- Field RA.
(string, optional, default='')
field_dec -- Field DEC.
(string, optional, default='')
"""
def __str__(self):
return self.name
name = models.CharField("Field name", max_length=50, default='Outside ROMEREA footprint')
field_ra = models.CharField("RA", max_length=50, default='')
field_dec = models.CharField("Dec", max_length=50, default='')
field_ra_decimal = models.DecimalField("RA_deg", max_digits=12, decimal_places=9, null=True, blank=True)
field_dec_decimal = models.DecimalField("Dec_deg", max_digits=12, decimal_places=8, null=True, blank=True)
# Generic Events class
class Event(models.Model):
"""
Known events in the database.
Attributes:
field -- Field Object
(object, required) -- ForeignKey object
operator -- Operator object
(object, required) -- ForeignKey object
ev_ra -- Event RA. (string, required)
ev_dec -- Event DEC. (string, required)
ra -- Event RA (float, decimal, optional)
dec -- Event Dec (float, decimal, optional)
ibase -- Event baseline magnitude (float, decimal, optional)
status -- Events status (string, optional, default='NF')
Available choices:
'NF':'Not in footprint'
'AC':'active'
'MO':'monitor'
'AN':'anomaly'
'EX':'expired'
anomaly_rank -- The relative importance of the anomaly. -1 for no anomaly, or
a positive decimal number.
(float, optional, default=-1.0)
year -- Year of discovery. (string, optional, default='')
"""
def __str__(self):
return "RA: "+str(self.ev_ra)+" Dec: "+str(self.ev_dec)+" ID: "+str(self.pk)
# Which ROME field does this event belong to?
field = models.ForeignKey(Field, related_name="ev_field_id", on_delete=models.PROTECT)
operator = models.ForeignKey(Operator, related_name="ev_operator_id", on_delete=models.PROTECT)
ev_ra = models.CharField("RA", max_length=50)
ev_dec = models.CharField("Dec", max_length=50)
ra = models.DecimalField("RA_deg", max_digits=12, decimal_places=9, null=True, blank=True)
dec = models.DecimalField("Dec_deg", max_digits=12, decimal_places=8, null=True, blank=True)
ibase = models.DecimalField("i_base", max_digits=6, decimal_places=3, null=True, blank=True)
# Event status (not in ROME footprint, active (in ROME footprint), monitor (60m REA cadence),
# anomaly (20m REA cadence), expired)
possible_status = (
('NF', 'Not in footprint'),
('AC', 'active'),
('MO', 'monitor'),
('AN', 'anomaly'),
('EX', 'expired')
)
status = models.CharField("Event status", max_length=30, choices=possible_status,
default='NF')
# What is the relative importance of the anomaly?
anomaly_rank = models.DecimalField("Anomaly Rank", max_digits=12, decimal_places=4,
default=Decimal('-1.0'))
# Year the event was discovered
year = models.CharField("Year of discovery", max_length=10, default='', blank=True)
# Manual override flag
override = models.BooleanField(default=False,blank=True)
# Generic Events Name class
# EventName uses two foreign keys so related_name needs to be set
class EventName(models.Model):
"""
Known event names in the database. Multiple event names can refer to a
single event at specific coordinates.
Attributes:
event -- Event object
(object, required) -- ForeignKey object
operator -- Operator object
(object, required) -- ForeignKey object
name -- Event name as given by Operator
(string, required)
"""
def __str__(self):
return "Name:"+str(self.name)+" ID: "+str(self.event_id)
event = models.ForeignKey(Event, related_name="event_id", on_delete=models.PROTECT)
operator = models.ForeignKey(Operator, related_name="operator_id", on_delete=models.PROTECT)
name = models.CharField("Survey Event Name", max_length=50)
# Single lens parameters
class SingleModel(models.Model):
"""
Single Lens model parameters
in the database.
Attributes:
event -- The event object
(object, required) -- ForeignKey object
Tmax -- Time of maximum magnification.
(float, required)
e_Tmax -- Error in Tmax
(float, optional, default=None)
tau -- Event timescale (in days).
(float, required)
e_tau -- error in tau.
(float, optional, default=None)
umin -- Minimum impact parameter (in units of R_E).
(float, required)
e_umin -- Error in umin.
(float, optional, default=None)
rho -- Finite source size (in units of R_E).
(float, optional, default=None)
e_rho -- Error in rho.
(float, optional, default=None)
pi_e_n -- E,N component of the parallax vector.
(float, optional, default=None)
e_pi_e_n -- Error in pi_e_n.
(float, optional, default=None)
pi_e_e -- E,E component of the parallax vector.
(float, optional, default=None)
e_pi_e_e -- Error in pi_e_e.
(float, optional, default=None)
modeler -- Name of the modeler.
(string, optional, default='')
last_updated -- datetime of last update. (datetime, required)
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
tap_omega -- Omega value to be updated by TAP.
(float, optional, default=None)
chi_sq -- Chi square of the fit
(float, optional, default=None)
"""
def __str__(self):
return str(self.event)+' updated at '+str(self.last_updated)
event = models.ForeignKey(Event, on_delete=models.PROTECT)
Tmax = models.DecimalField("Tmax", max_digits=12,decimal_places=4)
e_Tmax = models.DecimalField("sig(Tmax)", max_digits=12,decimal_places=4,
null=True, blank=True)
tau = models.DecimalField("T_E", max_digits=12,decimal_places=4)
e_tau = models.DecimalField("sig(T_E)", max_digits=12,decimal_places=4,
null=True, blank=True)
umin = models.DecimalField("u_min", max_digits=12,decimal_places=4)
e_umin = models.DecimalField("sig(u_min)", max_digits=12,decimal_places=4,
null=True, blank=True)
rho = models.DecimalField("rho", max_digits=12,decimal_places=4,
null=True, blank=True)
e_rho = models.DecimalField("sig(rho)", max_digits=12,decimal_places=4,
null=True, blank=True)
pi_e_n = models.DecimalField("Parallax EN", max_digits=12,decimal_places=4,
null=True, blank=True)
e_pi_e_n = models.DecimalField("sig(Parallax EN)", max_digits=12,decimal_places=4,
null=True, blank=True)
pi_e_e = models.DecimalField("Parallax EE", max_digits=12,decimal_places=4,
null=True, blank=True)
e_pi_e_e = models.DecimalField("sig(Parallax EE)", max_digits=12,decimal_places=4,
null=True, blank=True)
modeler = models.CharField("Modeler", max_length=25, blank=True, default="")
last_updated = models.DateTimeField('date last updated')
tap_omega = models.DecimalField("TAP Omega", max_digits=12,decimal_places=4,
null=True, blank=True)
chi_sq = models.DecimalField("Chi sq", max_digits=12,decimal_places=4,
null=True, blank=True)
# Binary Lens parameters
class BinaryModel(models.Model):
"""
Known Binary Lens model parameters
in the database.
Attributes:
event -- The event object
(object, required) -- ForeignKey object
Tmax -- Time of maximum magnification.
(float, required)
e_Tmax -- Trror in Tmax
(float, optional, default=None)
tau -- Tvent timescale (in days).
(float, required)
e_tau -- Trror in tau.
(float, optional, default=None)
umin -- Minimum impact parameter (in units of R_E).
(float, required)
e_umin -- Error in umin.
(float, optional, default=None)
mass_ratio -- Mass ratio q between the two lens components.
(float, required)
e_mass_ratio -- Error in q
(float, optional, default=None)
separation -- Separation between the two lens components
(in units of R_E)
(float, required)
e_separation -- Error in separation
(float, optional, default=None)
angle_a -- Trajectory angle with respect to the binary axis.
(float, required)
e_angle_a -- Error in trajectory angle.
(float, optional, default=None)
dsdt -- Orbital motion ds/dt
(float, optional, default=None)
e_dsdt -- Error in ds/dt
(float, optional, default=None)
dadt -- Orbital motion da/dt
(float, optional, default=None)
e_dadt -- Error in da/dt
(float, optional, default=None)
rho -- Finite source size (in units of R_E).
(float, optional, default=None)
e_rho -- error in rho.
(float, optional, default=None)
pi_e_n -- E,N component of the parallax vector.
(float, optional, default=None)
e_pi_e_n -- error in pi_e_n.
(float, optional, default=None)
pi_e_e -- E,E component of the parallax vector.
(float, optional, default=None)
e_pi_e_e -- error in pi_e_e.
(float, optional, default=None)
modeler -- Name of the modeler.
(string, optional, default='')
last_updated -- datetime of last update. (datetime, required)
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
chi_sq -- Chi square of the fit
(float, optional, default=None)
"""
def __str__(self):
return str(self.event)+' updated at '+str(self.last_updated)
event = models.ForeignKey(Event, on_delete=models.PROTECT)
Tmax = models.DecimalField("Tmax", max_digits=12,decimal_places=4)
e_Tmax = models.DecimalField("sig(Tmax)", max_digits=12,decimal_places=4,
null=True, blank=True)
tau = models.DecimalField("T_E", max_digits=12,decimal_places=4)
e_tau = models.DecimalField("sig(T_E)", max_digits=12,decimal_places=4,
null=True, blank=True)
umin = models.DecimalField("u_min", max_digits=12,decimal_places=4)
e_umin = models.DecimalField("u_min", max_digits=12,decimal_places=4,
null=True, blank=True)
mass_ratio = models.DecimalField("q", max_digits=12,decimal_places=4)
e_mass_ratio = models.DecimalField("q", max_digits=12,decimal_places=4,
null=True, blank=True)
separation = models.DecimalField("s", max_digits=12,decimal_places=4)
e_separation = models.DecimalField("s", max_digits=12,decimal_places=4,
null=True, blank=True)
angle_a = models.DecimalField("alpha", max_digits=12,decimal_places=4)
e_angle_a = models.DecimalField("sig(alpha)", max_digits=12,decimal_places=4,
null=True, blank=True)
rho = models.DecimalField("rho", max_digits=12,decimal_places=4,
null=True, blank=True)
e_rho = models.DecimalField("sig(rho)", max_digits=12,decimal_places=4,
null=True, blank=True)
pi_e_n = models.DecimalField("Parallax EN", max_digits=12,decimal_places=4,
null=True, blank=True)
e_pi_e_n = models.DecimalField("sig(Parallax EN)", max_digits=12,decimal_places=4,
null=True, blank=True)
pi_e_e = models.DecimalField("Parallax EE", max_digits=12,decimal_places=4,
null=True, blank=True)
e_pi_e_e = models.DecimalField("sig(Parallax EE)", max_digits=12,decimal_places=4,
null=True, blank=True)
dsdt = models.DecimalField("Orbital motion ds/dt", max_digits=12,decimal_places=4,
null=True, blank=True)
e_dsdt = models.DecimalField("sig(Orbital motion ds/dt)", max_digits=12, decimal_places=4,
null=True, blank=True)
dadt = models.DecimalField("Orbital motion da/dt", max_digits=12,decimal_places=4,
null=True, blank=True)
e_dadt = models.DecimalField("sig(Orbital motion da/dt)", max_digits=12,decimal_places=4,
null=True, blank=True)
modeler = models.CharField("Modeler", max_length=25, blank=True, default="")
last_updated = models.DateTimeField('date last updated')
chi_sq = models.DecimalField("Chi sq", max_digits=12,decimal_places=4,
null=True, blank=True)
# Reductions
class EventReduction(models.Model):
"""
Known light curve location for a specific event and pipeline event reduction parameters
in the database. Also stores the reference frame name and DanDIA parameters
used to generate the lightcurve.
Attributes:
event -- The event object.
(object, required) -- ForeignKey object.
lc_file -- The lightcurve file.
(string, required)
timestamp -- The date the lightcurve file was created.
(datetime, required)
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
target_found -- Has the target been identified by the pipeline?
(boolean, required, default=False)
ref_image -- Reference image used.
(string, required)
-+-+- DanDIA parameters -+-+-
ron -- CCD readout noise (in ADU)
(float, optional, default=0.0)
gain -- CCD gain. (e-/ADU)
(float, optional, default=1.0)
oscanx1 -- Overscan strip coordinate x1
(integer, optional, default=1)
oscanx2 -- Overscan strip coordinate x2
(integer, optional, default=50)
oscany1 -- Overscan strip coordinate y1
(integer, optional, default=1)
oscany2 -- Overscan strip coordinate y2
(integer, optional, default=500)
imagex1 -- Image region coordinate x1
(integer, optional, default=51)
imagex2 -- Image region coordinate x2
(integer, optional, default=1000)
imagey1 -- Image region coordinate y1
(integer, optional, default=1)
imagey2 -- Image region coordinate y2
(integer, optional, default=1000)
minval -- Minimum useful pixel value in a raw image (ADU).
(float, optional, default=1.0)
maxval -- maximum useful pixel value in a raw image (ADU).
(float, optional, default=55000.0)
growsatx -- Half box size in the x direction (pix) to be used for growing
saturated bad pixels in the bad pixel mask for each science image.
This parameter should be non-negative.
(integer, optional, default=0)
growsaty -- Half box size in the y direction (pix) to be used for growing
saturated bad pixels in the bad pixel mask for each science image.
This parameter should be non-negative.
(integer, optional, default=0)
coeff2 -- Coefficient a1 in the linearisation equation:
Xnew = X + a1*X^2 + a2*X^3
where X represents the image counts after bias level and bias pattern
correction.
(float, optional, default=1.0e-06)
coeff3 -- Coefficient a1 in the linearisation equation:
Xnew = X + a1*X^2 + a2*X^3
where X represents the image counts after bias level and bias pattern
correction.
(float, optional, default=1.0e-12)
sigclip -- Threshold in units of sigma for cosmic ray detection on the Laplacian
image. This parameter should be positive.
(float, optional, default=4.5)
sigfrac -- Fraction of "sigclip" to be used as a threshold for cosmic ray growth.
This parameter should be positive.
(float, optional, default=0.5)
flim --.Minimum contrast between the Laplacian image and the fine structure image.
This parameter should be positive.
(float, optional, default=2.0)
niter -- Maximum number of iterations to perform.
This parameter should be positive.
(integer, optional, default=4)
use_reflist -- Use images in reflist.<filt>.txt?
(integer, optional, default=0 (No))
max_nimages -- Maximum number of images to combine for reference.
(integer, optional, default=1)
max_sky -- Maximum acceptable value for sky background.
(float, optional, default=5000.0)
min_ell -- Minimum PSF ellipticity for image to be used in reference.
(float, optional, default=0.8)
trans_type -- Type of coordinate transformation to fit when fitting a coordinate
transformation between two images.
Options:["shift"=General pixel shift, "rot_shift"=Rotation and
general pixel shift, "rot_mag_shift"=Rotation magnification
and general pixel shift, "linear"=Linear, "polynomial"=Polynomial]
(string, optional, default='polynomial')
trans_auto -- Use automatic determination of the coordinate transformation type
when fitting a coordinate transformation between two images?
(integer, optional, default=0 (No))
replace_cr -- Replace cosmic ray pixels? (integer, optional, default=0 (No))
min_scale -- Minimum possible transformation scale factor (magnification) between
any two images.
(float, optional, default=0.99)
max_scale -- Maximum possible transformation scale factor (magnification) between
any two images.
(float, optional, default=1.01)
fov -- Field of view of the CCD camera (deg).
(float, optional, default=0.1)
star_space -- Average spacing (pix) between stars.
(integer, optional, default=30)
init_mthresh -- Initial distance threshold (pix) to reject false star matches.
(float, optional, default=1.0)
smooth_pro -- Smooth image? (integer, optional, default=2)
smooth_fwhm -- Amount of smoothing to perform (float, optional, default=3.0)
var_deg -- Polynomial degree of the spatial variation of the model used to
represent the image PSF.
(0=Constant, 1=Linear, 2=Quadratic, 3=Cubic)
(integer, optional, default=1)
det_thresh -- Detection threshold used to detect stars in units of image sky
sigma.
(float, optional, default=2.0)
psf_thresh -- Detection threshold used to detect candidate PSF stars in units
of image sky sigma.
(float, optional, default=8.0)
psf_size -- Size of the model PSF stamp in units of FWHM.
(float, optional, default=8.0)
psf_comp_dist -- Any star within a distance "0.5*psf_comp_dist*psf_size",
in units of FWHM, of another star is considered to be a companion
of that star for PSF star selection.
(float, optional, default=0.7)
psf_comp_flux -- Maximum flux ratio that any companion star may have for a star to
be considered a PSF star.
(float, optional, default=0.1)
psf_corr_thres -- Minimum correlation coefficient of a star with the image PSF
model in order to be considered a PSF star.
(float, optional, default=0.9)
ker_rad -- Radius of the kernel pixel array in units of image FWHM.
(float, optional, default=2.0)
lres_ker_rad -- Threshold radius of the kernel pixel array, in units of image FWHM,
beyond which kernel pixels are of lower resolution.
(float, optional, default=2.0)
subframes_x -- Number of subdivisions in the x direction used in defining the grid
of kernel solutions.
(integer, optional, default=1)
subframes_y -- Number of subdivisions in the y direction used in defining the grid
of kernel solutions.
(integer, optional, default=1)
grow -- Amount of overlap between the image regions used for the kernel solutions.
(float, optional, default = 0.0)
ps_var -- Use spatially variable photometric scale factor?
(integer, optional, default=0 (No))
back_var -- Use spatially variable differential background.
(integer, optional, default=1 (Yes))
diffpro -- Switch for the method of difference image creation.
(integer, optional, default=0 (No))
"""
def __str__(self):
return str(self.event)+' '+str(self.lc_file)
event = models.ForeignKey(Event, on_delete=models.PROTECT)
# location of lightcurve file
lc_file = models.CharField(max_length=1000)
timestamp = models.DateTimeField('date created')
ref_image = models.CharField(max_length=1000)
target_found = models.BooleanField(default=False)
# DanDIA parameters
trans_types = (
('S', 'shift'),
('R', 'rot_shift'),
('M', 'rot_mag_shift'),
('L', 'linear'),
('P', 'polynomial')
)
# CCD par
ron = models.DecimalField(max_digits=10,decimal_places=2,default=Decimal('0.0'))
gain = models.DecimalField(max_digits=10,decimal_places=2,default=Decimal('1.0'))
# S1 par
oscanx1 = models.IntegerField(default=1)
oscanx2 = models.IntegerField(default=50)
oscany1 = models.IntegerField(default=1)
oscany2 = models.IntegerField(default=500)
imagex1 = models.IntegerField(default=51)
imagex2 = models.IntegerField(default=1000)
imagey1 = models.IntegerField(default=1)
imagey2 = models.IntegerField(default=1000)
minval = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('1.0'))
maxval = models.DecimalField(max_digits=10,decimal_places=2,default=Decimal('55000.0'))
growsatx = models.IntegerField(default=0)
growsaty = models.IntegerField(default=0)
coeff2 = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('1.0e-06'))
coeff3 = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('1.0e-12'))
# S2 par
sigclip = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('4.5'))
sigfrac = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.5'))
flim = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('2.0'))
niter = models.IntegerField(default=4)
# S3 par
use_reflist = models.IntegerField(default=0)
max_nimages = models.IntegerField(default=1)
max_sky = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('5000.0'))
min_ell = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.8'))
trans_type = models.CharField(max_length=100,choices=trans_types,default='P')
trans_auto = models.IntegerField(default=0)
replace_cr = models.IntegerField(default=0)
min_scale = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.99'))
max_scale = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('1.01'))
fov = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.1'))
star_space = models.IntegerField(default=30)
init_mthresh = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('1.0'))
smooth_pro = models.IntegerField(default=2)
smooth_fwhm = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('3.0'))
# S4 par
var_deg = models.IntegerField(default=1)
det_thresh = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('2.0'))
psf_thresh = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('8.0'))
psf_size = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('8.0'))
psf_comp_dist = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.7'))
psf_comp_flux = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.1'))
psf_corr_thresh = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.9'))
# S5 par - same as S3
# S6 par
ker_rad = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('2.0'))
lres_ker_rad = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('2.0'))
subframes_x = models.IntegerField(default=1)
subframes_y = models.IntegerField(default=1)
grow = models.DecimalField(max_digits=6,decimal_places=2,default=Decimal('0.0'))
ps_var = models.IntegerField(default=0)
back_var = models.IntegerField(default=1)
# niter same as S2
diffpro = models.IntegerField(default=0)
# S7 par - same as S6
# Observing request parameters
class ObsRequest(models.Model):
"""
Known observing requests in the database.
Attributes:
field -- The field object.
(object, required) -- ForeignKey object
t_sample -- Sampling interval to use. (in minutes)
(float, required)
exptime -- Exposure time to use. (in seconds) (integer, required)
timestamp -- The request submission time.
(datetime, optional, default=timezone.now())
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
time_expire -- When the request expires.
(datetime, optional, default=timezone.now()+24 hours)
request_status -- Status of obs request (ACtive or EXpired)
(string, optional, default='AC')
pfrm_on -- Observe on 0.4m network?
(Boolean, optional, default=False)
onem_on -- Observe on 1m network?
(boolean, optional, default=False)
twom_on -- Observe on 2m network?
(boolean, optional, default=False)
request_type -- Observation request class
(string, optional, default='L')
'A':'REA High - 20 min cadence',
'M':'REA Low - 60 min cadence',
'L':'ROME Standard - every 7 hours'
which_site -- Site identifier string.
(string, optional, default='')
which_filter -- Filter identifier string.
(string, optional, default='')
which_inst -- Instrument identifier string.
(string, optional, default='')
grp_id -- Group ID
(string, optional, default='')
track_id -- Track ID
(string, optional, default='')
req_id -- Request ID
(string, optional, default='')
n_exp -- Number of exposures to obtain.
(integer, optional, default=1)
"""
def __str__(self):
#return str(self.field)+' updated at '+str(self.timestamp)
return str(self.grp_id)+' '+str(self.field)+' status='+str(self.request_status)
field = models.ForeignKey(Field, on_delete=models.PROTECT)
possible_types = (
('A', 'REA High - 20 min cadence'),
('M', 'REA Low - 60 min cadence'),
('L', 'ROME Standard - every 7 hours'),
('I', 'Manual request' )
)
status_choice = (
('AC', 'ACTIVE'),
('EX', 'EXPIRED'),
('CN', 'CANCELLED')
)
timestamp = models.DateTimeField('request submit date', blank=True)
# observe on 0.4m telescopes?
pfrm_on = models.BooleanField(default=False)
# observe on 1m telescopes?
onem_on = models.BooleanField(default=False)
# observe on 2m telescopes?
twom_on = models.BooleanField(default=False)
# t_sample = cadence to use (in minutes)
t_sample = models.DecimalField(max_digits=6,decimal_places=2)
# exposure time (in seconds)
exptime = models.IntegerField()
# ToO flag
request_type = models.CharField(max_length=40, choices=possible_types, default='L')
# Which site to use?
which_site = models.CharField(max_length=10, default='', blank=True)
# Which filter to use?
which_filter = models.CharField(max_length=20, default='', blank=True)
# Which instrument to use?
which_inst = models.CharField(max_length=20, default='', blank=True)
# Expiry date of request
time_expire = models.DateTimeField('request expiry date', blank=True)
# Group ID
grp_id = models.CharField(max_length=30, default='', blank=True)
# Track ID
track_id = models.CharField(max_length=30, default='', blank=True)
# Request ID
req_id = models.CharField(max_length=300, default='', blank=True)
# Number of exposures requested
n_exp = models.IntegerField(default=1)
request_status = models.CharField(max_length=40, choices=status_choice, default='AC')
class SubObsRequest(models.Model):
"""Individual subrequest blocks as generated by the LCO scheduler
in order to execute a cadence ObsRequest.
Attributes:
request -- The originating ObsRequest.
(object, required) -- ForeignKey object
window_start -- First timestamp when the subrequest can be executed
(object, DateTime)
window_end -- Last timestamp, after which the subrequest expires
(object, DateTime)
status -- The last known state of the request in the LCO scheduler
(string, choice field)
('PENDING', 'COMPLETED', 'CANCELED', 'WINDOW_EXPIRED')
time_executed -- The timestamp of when the subrequest was executed
(object, optional, DateTime)
"""
def __str__(self):
return str(self.sr_id)+' '+str(self.grp_id)+' '+\
str(self.track_id)+' '+str(self.status)
status_choice = (
('PENDING', 'PENDING'),
('COMPLETED', 'COMPLETED'),
('CANCELED', 'CANCELED'),
('WINDOW_EXPIRED', 'WINDOW_EXPIRED'),
)
sr_id = models.CharField(max_length=30,blank=True)
grp_id = models.CharField(max_length=30,blank=True)
track_id = models.CharField(max_length=30,blank=True)
window_start = models.DateTimeField('subrequest start time',blank=True)
window_end = models.DateTimeField('subrequest end time',blank=True)
status = models.CharField(max_length=40, choices=status_choice, default='PENDING')
time_executed = models.DateTimeField('subrequest executed time', null=True, blank=True)
# Event status parameters
class EventStatus(models.Model):
"""
Known event status in the database.
Attributes:
event -- The event object.
(object, required) -- ForeignKey object
timestamp -- The request submission time.
(datetime, optional, default=timezone.now())
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
status -- Event status.
(NF:not in footprint, AC:active, MO:monitor, AN:anomaly, EX:expired)
(string, optional, default='NF')
comment -- Comment field.
(string, optional, default='')
updated_by -- Updated by which user?
(string, optional, default='')
rec_cad -- Recommended cadence (in hours).
(float, optional, default=0)
rec_texp -- Recommended exposure time (in seconds).
(float, optional, default=0)
rec_nexp -- Recommended number of exposures.
(integer, optional, default=0)
rec_telclass -- Recommended telescope class.
(string, optional, default='1m')
"""
def __str__(self):
return str(self.event)+' updated at '+str(self.timestamp)
event = models.ForeignKey(Event, on_delete=models.PROTECT)
possible_status = (
('NF', 'Not in footprint'),
('AC', 'active'),
('MO', 'monitor'),
('AN', 'anomaly'),
('EX', 'expired')
)
timestamp = models.DateTimeField('date last updated')
# Event status (check, active, anomaly, rejected, expired)
status = models.CharField(max_length=12, choices=possible_status, default='NF')
# Comment (for internal RoboNet users)
comment = models.CharField(max_length=200, default='--')
# Updated by (<user>/automatic)?
updated_by = models.CharField(max_length=25, default='--')
# Recommended cadence (in hours)
rec_cad = models.DecimalField("Recommended cadence (hr)", max_digits=6,decimal_places=2,
null=True, blank=True)
# Recommended exposure time (in seconds)
rec_texp = models.IntegerField("Recommended t_exp (sec)", null=True, blank=True)
# Recommended number of exposures
rec_nexp = models.IntegerField("Recommended n_exp", null=True, blank=True)
# Recommended telescope aperture class
rec_telclass = models.CharField("Recommended telescope class", max_length=12, default='1m',
blank=True)
# ARTEMiS data files (.dat)
class DataFile(models.Model):
"""
All ARTEMiS data files known to the database.
Uses the .dat files rsynced from ARTEMiS.
Attributes:
event -- The event object.
(object, required) -- ForeignKey object
datafile -- Full path to the data file.
(string, required)
last_upd -- Datetime of last update. (datetime, required,
default=timezone.now())
last_hjd -- HJD of last observation. (float, required)
last_mag -- Last recorded magnitude.
(float, required)
tel -- Telescope identifier.
(string, required)
inst -- Instrument used for the observations.
(string, optional, default='')
filt -- Filter used for the observations.
(string, optional, default='')
baseline -- I0 blend parameter from ARTEMiS .align file.
(float, optional, default=22.0)
g -- g blend parameter from ARTEMiS .align file.
(float, optional, default=0.0)
ndata -- Number of data points.
(integer, required)
"""
def __str__(self):
return str(self.event)+' '+str((self.datafile).split('/')[-1])
event = models.ForeignKey(Event, on_delete=models.PROTECT)
datafile = models.CharField(max_length=1000)
# Date the file was last updated
last_upd = models.DateTimeField('date last updated')
# HJD of last observation in file
last_hjd = models.DecimalField(max_digits=20,decimal_places=8)
# Last known magnitude
last_mag = models.DecimalField(max_digits=10,decimal_places=2)
# Telescope where observations were taken
tel = models.CharField(max_length=50, blank=True, default='')
# instrument used for the observations
inst = models.CharField(max_length=50, blank=True, default='')
# Filter used for the obsevations
filt = models.CharField(max_length=50, blank=True, default='')
# blend parameters from ARTEMiS (.align) for lightcurve calibration
baseline = models.DecimalField(max_digits=6, decimal_places=2, default=22.0, blank=True)
g = models.DecimalField(max_digits=8, decimal_places=2, default=0.0, blank=True)
# Number of data points in file
ndata =models.IntegerField()
# TAP parameters
class Tap(models.Model):
"""
Known TAP entries in the database.
Keyword arguments:
event -- The event object.
(object, required) --ForeignKey object
timestamp -- The TAP submission time.
(datetime, optional, default=timezone.now())
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
priority -- Priority flag for human observers.
(string, optional, default='N')
'A':'REA High',
'L':'REA Low',
'B':'REA Post-High'
'N':'None'
tsamp -- Recommended cadence (in hours).
(float, optional, default=0)
texp -- Recommended exposure time (in seconds).
(integer, optional, default=0)
nexp -- Recommended number of exposures.
(integer, optional, default=1)
telclass -- Recommended telescope aperture class.
(string, optional, default='1m')
imag -- Current I magnitude.
(float, optional, default=22.0)
omega -- omega_s.
(float, optional, default=None)
err_omega -- sig(omega_s).
(float, optional, default=None)
peak_omega -- omega_s at peak
(float, optional, default=None)
blended -- target blended?
(boolean, optional, default=False)
visibility -- Current target visibility (in hours)
(float, optional, default=None)
cost1m -- Estimated observational cost per night for the 1m network (in minutes)
(float, optional, default=None)
passband -- Passband for which the priority function has been evaluated
(string, optional, default='SDSS-i')
ipp -- Inter Proposal Priority Value
(float, optional, default='1.0')
"""
def __str__(self):
return str(self.event)+' priority: '+str(self.priority)
event = models.ForeignKey(Event, on_delete=models.PROTECT)
possible_priority = (
('A','REA High'),
('L','REA Low'),
('B','REA Post-High'),
('N','None')
)
timestamp = models.DateTimeField('Date generated')
# Priority flag for human observers (rea high, rea low, rea post-high, none)
priority = models.CharField(max_length=12, choices=possible_priority, default='N')
# Recommended cadence (in hours): Can only take two possible values : 60 or 20 min
tsamp = models.DecimalField(max_digits=6,decimal_places=2, default=0, blank=True)
# Recommended exposure time (in seconds)
texp = models.IntegerField(default=0, blank=True)
# Recommended number of exposures
nexp = models.IntegerField(default=1, blank=True)
# Recommended telescope aperture class
telclass = models.CharField(max_length=12, default='1m', blank=True)
# Current I magnitude
imag = models.DecimalField(max_digits=6,decimal_places=2, blank=True, default=22.0)
# omega_s
omega = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# sig(omega_s)
err_omega= models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# omega_s @ peak
peak_omega = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# target blended?
blended = models.BooleanField(default=False)
# visibility for the event now (in hours)
visibility = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# cost in minutes for the 1m network
cost1m = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# Passband for which the priority function has been evaluated
passband = models.CharField(max_length=12, default='SDSS-i', blank=True)
# Inter Proposal Priority value
ipp = models.DecimalField(max_digits=10,decimal_places=3, blank=True, default=1.0)
# TAPLIMA parameters
class TapLima(models.Model):
"""
Known TAPLima entries in the database.
Keyword arguments:
event -- The event object.
(object, required) --ForeignKey object
timestamp -- The TAP submission time.
(datetime, optional, default=timezone.now())
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
priority -- Priority flag for human observers.
(string, optional, default='N')
'A':'REA High',
'L':'REA Low',
'B':'REA Post-High'
'N':'None'
tsamp -- Recommended cadence (in hours).
(float, optional, default=0)
texp -- Recommended exposure time (in seconds).
(integer, optional, default=0)
nexp -- Recommended number of exposures.
(integer, optional, default=1)
telclass -- Recommended telescope aperture class.
(string, optional, default='1m')
imag -- Current I magnitude.
(float, optional, default=22.0)
omega -- omega_s.
(float, optional, default=None)
err_omega -- sig(omega_s).
(float, optional, default=None)
peak_omega -- omega_s at peak
(float, optional, default=None)
blended -- target blended?
(boolean, optional, default=False)
visibility -- Current target visibility (in hours)
(float, optional, default=None)
cost1m -- Estimated observational cost per night for the 1m network (in minutes)
(float, optional, default=None)
passband -- Passband for which the priority function has been evaluated
(string, optional, default='SDSS-i')
ipp -- Inter Proposal Priority Value
(float, optional, default='1.0')
"""
def __str__(self):
return str(self.event)+' priority: '+str(self.priority)
event = models.ForeignKey(Event, on_delete=models.PROTECT)
possible_priority = (
('A','REA High'),
('L','REA Low'),
('B','REA Post-High'),
('N','None')
)
timestamp = models.DateTimeField('Date generated')
# Priority flag for human observers (rea high, rea low, rea post-high, none)
priority = models.CharField(max_length=12, choices=possible_priority, default='N')
# Recommended cadence (in hours): Can only take two possible values : 60 or 20 min
tsamp = models.DecimalField(max_digits=6,decimal_places=2, default=0, blank=True)
# Recommended exposure time (in seconds)
texp = models.IntegerField(default=0, blank=True)
# Recommended number of exposures
nexp = models.IntegerField(default=1, blank=True)
# Recommended telescope aperture class
telclass = models.CharField(max_length=12, default='1m', blank=True)
# Current I magnitude
imag = models.DecimalField(max_digits=6,decimal_places=2, blank=True, default=22.0)
# omega_s
omega = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# sig(omega_s)
err_omega= models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# omega_s @ peak
peak_omega = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# target blended?
blended = models.BooleanField(default=False)
# visibility for the event now (in hours)
visibility = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# cost in minutes for the 1m network
cost1m = models.DecimalField(max_digits=6,decimal_places=2, blank=True, null=True)
# Passband for which the priority function has been evaluated
passband = models.CharField(max_length=12, default='SDSS-i', blank=True)
# Inter Proposal Priority value
ipp = models.DecimalField(max_digits=10,decimal_places=3, blank=True, default=1.0)
# Image parameters
class Image(models.Model):
"""
Images known to the database.
Attributes:
field -- The field object.
(object, required) -- ForeignKey object
image_name -- The name of the image.
(string, required)
date_obs -- The date of observation.
(datetime, required)
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
timestamp -- The time the image was written/updated in the database.
(datetime, optional, default=timezone.now())
e.g. datetime(2016, 9, 23, 15, 26, 13, 104683, tzinfo=<UTC>)
tel -- Telescope where the image was taken.
(string, optional, default='')
inst -- Instrument used for the observation.
(string, optional, default='')
filt -- Filter used for the observation.
(string, optional, default='')
grp_id -- Group ID.
(string, optional, default='')
track_id -- Tracking ID.
(string, optional, default='')
req_id -- Observing Request ID.
(string, optional, default='')
airmass -- Airmass of observation.
(float, optional, default=None)
avg_fwhm -- Average FWHM of stars in image.
(float, optional, default=None)
avg_sky -- Average sky background counts in image.
(float, optional, default=None)
avg_sigsky -- Error in sky background counts.
(float, optional, default=None)
moon_sep -- Moon-target separation (in degrees).
(float, optional, default=None)
moon_phase -- Moon phase (% of Full).
(float, optional, default=None)
moon_up -- Was the moon above the horizon at the time of this observation?
(boolean, optional, default=False)
elongation -- Detected object elongation.
(float, optional, default=None)
nstars -- Number of stars detected.
(integer, optional, default=None)
ztemp -- ztemp parameter.
(float, optional, default=None)
shift_x -- x-axis WCS shift from template image in pixels
(integer, optional, default=None)
shift_y -- y-axis WCS shift from template image in pixels
(integer, optional, default=None)
quality -- Image quality description.
(string, optional, default='')
"""
def __str__(self):
return str(self.field)+' Image: '+str(self.image_name)
field = models.ForeignKey(Field, on_delete=models.PROTECT)
image_name = models.CharField(max_length=200)
timestamp = models.DateTimeField('Date received')
date_obs = models.DateTimeField('Date of observation')
# Telescope where image was taken
tel = models.CharField(max_length=50, blank=True, default='')
# instrument used for the observation
inst = models.CharField(max_length=50, blank=True, default='')
# Filter used for the obsevation
filt = models.CharField(max_length=50, blank=True, default='')
# Group ID
grp_id = models.CharField(max_length=30, default='', blank=True)
# Track ID
track_id = models.CharField(max_length=30, default='', blank=True)
# Request ID
req_id = models.CharField(max_length=30, default='', blank=True)
airmass = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
avg_fwhm = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
avg_sky = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
# Error in sky background counts
avg_sigsky = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
# Moon-target separation (in degrees)
moon_sep = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
# Moon phase (% of Full)
moon_phase = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
# Was the moon above the horizon at the time of this observation?
moon_up = models.BooleanField(default=False)
elongation = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
nstars = models.IntegerField(blank=True, null=True)
ztemp = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
shift_x = models.IntegerField(blank=True, null=True)
shift_y = models.IntegerField(blank=True, null=True)
quality = models.CharField(max_length=400, blank=True, default='')
|
gpl-2.0
| -2,272,723,726,060,283,100 | 45.272975 | 109 | 0.637433 | false |
geggo/gpyfft
|
gpyfft/test/test_batched.py
|
1
|
2146
|
from __future__ import print_function
import unittest
from parameterized import parameterized
import numpy as np
import pyopencl as cl
import pyopencl.array as cla
from gpyfft import FFT
from gpyfft.test.util import get_contexts
contexts = [(ctx,) for ctx in get_contexts()]
class test_fft_batched(unittest.TestCase):
@parameterized.expand(contexts)
def test_2d_out_of_place(self, ctx):
queue = cl.CommandQueue(ctx)
L = 4
M = 64
N = 32
axes = (-1, -2)
nd_data = np.arange(L*M*N, dtype=np.complex64)
nd_data.shape = (L, M, N)
cl_data = cla.to_device(queue, nd_data)
cl_data_transformed = cla.zeros_like(cl_data)
transform = FFT(ctx, queue,
cl_data,
cl_data_transformed,
axes = axes,
)
transform.enqueue()
print(cl_data_transformed.get)
print(np.fft.fft2(nd_data))
assert np.allclose(cl_data_transformed.get(),
np.fft.fft2(nd_data, axes=axes),
rtol=1e-3, atol=1e-3)
@parameterized.expand(contexts)
def test_2d_in_4d_out_of_place(self, ctx):
queue = cl.CommandQueue(ctx)
L1 = 4
L2 = 5
M = 64
N = 32
axes = (-1, -2) #ok
#axes = (0,1) #ok
#axes = (0,2) #cannot be collapsed
nd_data = np.arange(L1*L2*M*N, dtype=np.complex64)
nd_data.shape = (L1, L2, M, N)
cl_data = cla.to_device(queue, nd_data)
cl_data_transformed = cla.zeros_like(cl_data)
transform = FFT(ctx, queue,
cl_data,
cl_data_transformed,
axes = axes,
)
transform.enqueue()
print(cl_data_transformed.get)
print(np.fft.fft2(nd_data))
assert np.allclose(cl_data_transformed.get(),
np.fft.fft2(nd_data, axes=axes),
rtol=1e-3, atol=1e-3)
|
lgpl-3.0
| -4,428,060,298,961,073,700 | 26.87013 | 59 | 0.498602 | false |
johnnoone/aioconsul
|
tests/conftest.py
|
1
|
2597
|
import json
import os
import pytest
import subprocess
import time
from aioconsul import Consul
from tempfile import NamedTemporaryFile, TemporaryDirectory
from uuid import uuid4
def run(cmd, **kwargs):
kwargs.setdefault("stdout", subprocess.PIPE)
kwargs.setdefault("stderr", subprocess.PIPE)
return subprocess.Popen(cmd, **kwargs)
class Namespace:
def __init__(self, **attrs):
for k, v in attrs.items():
setattr(self, k, v)
@pytest.fixture(scope="session")
def master_token():
return uuid4().__str__()
@pytest.fixture(scope="session")
def server(master_token):
with NamedTemporaryFile(mode="w+") as file, TemporaryDirectory() as dir:
conf = {
"bootstrap_expect": 1,
"node_name": "server1",
"server": True,
"acl_datacenter": "dc1",
"acl_default_policy": "deny",
"acl_master_token": master_token,
"data_dir": dir,
"advertise_addr": "127.0.0.1"
}
json.dump(conf, file)
file.seek(0)
env = os.environ.copy()
env.setdefault('GOMAXPROCS', '4')
bin = env.get("CONSUL_BIN", "consul")
proc = run([bin, "agent", "-config-file", file.name], env=env)
buf = bytearray()
while b"cluster leadership acquired" not in buf:
buf = proc.stdout.readline()
time.sleep(.01)
if proc.returncode is not None:
raise Exception("Server failed to start")
time.sleep(.5)
yield Namespace(address="http://127.0.0.1:8500",
name="server1",
dc="dc1",
token=master_token, **conf)
proc.terminate()
@pytest.fixture(scope="function")
def client(server, event_loop):
consul = Consul(server.address, token=server.token, loop=event_loop)
yield consul
# handle some cleanup
async def cleanup(consul):
consul.token = server.token
keys, meta = await consul.kv.keys("")
for key in keys:
await consul.kv.delete(key)
await consul.catalog.deregister({
"Node": "foobar"
})
# remove created tokens
tokens, meta = await consul.acl.items()
for token in tokens:
if token["Name"].startswith("foo"):
await consul.acl.delete(token)
# remove prepared queries
queries = await consul.query.items()
for query in queries:
await consul.query.delete(query)
event_loop.run_until_complete(cleanup(consul))
|
bsd-3-clause
| -1,777,548,198,512,575,700 | 27.228261 | 76 | 0.577204 | false |
mrmacete/r2scripts
|
esilburner/esilburner.py
|
1
|
4718
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import r2pipe
import json
import re
import sys
from cStringIO import StringIO
xtract = re.compile('^(0x[0-9a-fA-F]*)[^;]*;(.*)$')
blacks = re.compile('[><|]')
xtflag = re.compile('([a-z]{3}\.[^ ;]+)')
def iter_lines(foo):
stri = StringIO(foo)
while True:
nl = stri.readline()
if nl == '':
break
yield nl.strip('\n')
class EsilBurner:
emulate_flags = ['sym.', 'fcn.']
reference_flags = ['str.', 'obj.', 'sym.', 'fcn.']
dump_commands = False
auto_reference = False
single_shot = None
def __init__(self, r, options={}):
self.r = r
if 'emulate_flags' in options:
self.emulate_flags = options['emulate_flags']
if 'auto_reference' in options:
self.auto_reference = options['auto_reference']
if 'single_shot' in options:
self.single_shot = options['single_shot']
self.initial_setup()
def cmd(self, command):
if self.dump_commands:
print "dump> " + command
return self.r.cmd(command)
def cmdj(self, command):
if self.dump_commands:
print "dump> " + command
return self.r.cmdj(command)
def is_prelude(self, addr):
pd = self.cmdj('pdj 1 @ ' + addr)
if pd is not None and len(pd) > 0 and 'opcode' in pd[0]:
return pd[0]['opcode'].startswith('lui gp')
return False
def maximize_coverage(self):
self.cmd("e anal.prelude=3c1c")
self.cmd("aap")
def initial_setup(self):
try:
self.arch = self.cmd('i~arch[1]').strip()
except:
self.arch = ''
if self.arch == 'mips':
self.cmd("e anal.gp=`? (section..got+0x7ff0)~[1]`")
self.cmd("f loc._gp=`? (section..got+0x7ff0)~[1]`")
self.cmd("(pdfmips at,ar gp=loc._gp,ar t9=$0,pdf @$0)")
self.cmd("e anal.prelude=3c1c") # mips prelude "lui gp, *"
else:
self.cmd("(pdfmips at,pdf @$0)")
if self.single_shot != None:
self.cmd("af @ " + self.single_shot)
else:
self.cmd("aa")
self.cmd("aap")
self.cmd("aa")
print "code coverage: " + self.cmd('aai~percent[1]')
def flags_to_emulate(self):
if self.single_shot != None:
return [self.single_shot]
raw = self.cmdj("fj")
def flagtest(f):
for tf in self.emulate_flags:
if f.startswith(tf):
return True
return False
return [f['name'] for f in raw if flagtest(f['name'])]
def sanitize_command(self, command):
return re.sub(blacks, '_', command).replace(';', '')
def add_auto_reference(self, addr, comment):
m = re.findall(xtflag, comment)
if m is not None:
def flagtest(f):
for tf in self.reference_flags:
if f.find(tf) == 0:
return True
return False
flags = [f for f in m if flagtest(f)]
for f in flags:
if f.startswith('sym.') or f.startswith('fcn.'):
self.cmd('axC ' + f + ' @ ' + addr)
else:
self.cmd('axd ' + f + ' @ ' + addr)
def burn_emu_lines(self, lines):
for l in iter_lines(lines):
m = re.match(xtract, l)
if m is not None and len(m.groups()) == 2:
addr, comment = m.groups()
if self.auto_reference:
self.add_auto_reference(addr, comment)
command = self.sanitize_command('CCu ' + comment.strip() + ' @ ' + addr)
self.cmd(command)
# print command
def burn_emu_comments(self):
self.cmd("e scr.color=false")
self.cmd("e asm.fcnlines=false")
self.cmd("e asm.lines=false")
self.cmd("e asm.emu = true")
self.cmd("e asm.emuwrite=true")
self.cmd("aeim 0x100000 0x300000")
for f in self.flags_to_emulate():
print "emulating " + f + ' ...'
lines = self.cmd(".(pdfmips " + f + ")")
self.burn_emu_lines(lines)
self.cmd("e scr.color=true")
self.cmd("e asm.fcnlines=true")
self.cmd("e asm.lines=true")
self.cmd("e asm.emu = false")
print "code coverage: " + self.cmd('aai~percent[1]')
if __name__ == '__main__':
r = r2pipe.open("#!pipe")
options = {
"auto_reference": True
}
if len(sys.argv) > 1:
options['single_shot'] = sys.argv[1]
ms = EsilBurner(r, options)
ms.burn_emu_comments()
|
mit
| -3,844,727,233,316,918,000 | 27.944785 | 88 | 0.504239 | false |
dwwkelly/note
|
tests/test_mongo_driver.py
|
2
|
11016
|
#!/usr/bin/env python
__author__ = "Devin Kelly"
import unittest
import note
import time
import os
import sys
class NoteDBTest(unittest.TestCase):
def setUp(self):
self.db = note.mongoDB('noteTest')
def tearDown(self):
self.db.client.drop_database("noteTest")
def test_mongodb_create_1(self):
assert self.db.noteDB['IDs'].find({"currentMax": 0}).count() == 1
assert self.db.noteDB['IDs'].find({"unusedIDs": []}).count() == 1
assert self.db.noteDB['IDs'].find().count() == 2
def test_mongodb_create_2(self):
self.db = note.mongoDB('noteTest', "mongodb://localhost:27017")
assert self.db.noteDB['IDs'].find({"currentMax": 0}).count() == 1
assert self.db.noteDB['IDs'].find({"unusedIDs": []}).count() == 1
assert self.db.noteDB['IDs'].find().count() == 2
def test_mongodb_addItem_1(self):
self.db.addItem("note", {"note": "this is a test note",
"tags": ["one", "two"]})
result = {"note": "this is a test note"}
assert self.db.noteDB['note'].find(result).count() == 1
assert self.db.noteDB['note'].find().count() == 1
assert self.db.noteDB['IDs'].find({"currentMax": 1}).count() == 1
assert self.db.noteDB['IDs'].find({"unusedIDs": []}).count() == 1
assert self.db.noteDB['IDs'].find().count() == 2
def test_mongodb_addItem_2(self):
self.db.addItem("note", {"note": "this is a test note",
"tags": ["one", "two"]})
result = {"note": "this is a test note"}
assert self.db.noteDB['note'].find(result).count() == 1
assert self.db.noteDB['note'].find().count() == 1
self.db.addItem("note", {"note": "this is a second test note",
"tags": ["three", "four"]}, 1)
result = {"note": "this is a second test note"}
assert self.db.noteDB['note'].find(result).count() == 1
result = {"tags": ["three", "four"]}
assert self.db.noteDB['note'].find(result).count() == 1
assert self.db.noteDB['note'].find().count() == 1
assert self.db.noteDB['IDs'].find({"currentMax": 1}).count() == 1
assert self.db.noteDB['IDs'].find({"unusedIDs": []}).count() == 1
assert self.db.noteDB['IDs'].find().count() == 2
def test_mongodb_getItem(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
itemContents = self.db.getItem(1)
assert itemContents['note'] == 'ONE'
assert itemContents['tags'][0] == 'one'
assert len(itemContents['tags']) == 1
itemContents = self.db.getItem(2)
assert itemContents['note'] == 'TWO'
assert itemContents['tags'][0] == 'two'
assert len(itemContents['tags']) == 1
def test_mongodb_getItemType(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addItem("todo", {"todoText": "get this done",
"done": "False",
"date": "03 24 14"})
itemType = self.db.getItemType(1)
assert itemType == "note"
itemType = self.db.getItemType(2)
assert itemType == "note"
itemType = self.db.getItemType(3)
assert itemType == "todo"
def test_mongodb_deleteItem_1(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
assert self.db.noteDB['note'].find({"note": "ONE"}).count() == 1
assert self.db.noteDB['note'].find({"note": "TWO"}).count() == 1
self.db.deleteItem(2)
assert self.db.noteDB['note'].find({"note": "ONE"}).count() == 1
assert self.db.noteDB['note'].find({"note": "TWO"}).count() == 0
assert self.db.noteDB['note'].find().count() == 1
assert self.db.noteDB['IDs'].find({"currentMax": 1}).count() == 1
assert self.db.noteDB['IDs'].find({"unusedIDs": []}).count() == 1
def test_mongodb_deleteItem_2(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
assert self.db.noteDB['note'].find({"note": "ONE"}).count() == 1
assert self.db.noteDB['note'].find({"note": "TWO"}).count() == 1
with self.assertRaises(ValueError):
self.db.deleteItem(100)
assert self.db.noteDB['note'].find({"note": "ONE"}).count() == 1
assert self.db.noteDB['note'].find({"note": "TWO"}).count() == 1
assert self.db.noteDB['note'].find().count() == 2
assert self.db.noteDB['IDs'].find({"currentMax": 2}).count() == 1
assert self.db.noteDB['IDs'].find({"unusedIDs": []}).count() == 1
def test_mongodb_getByTime(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
ids = self.db.getByTime(startTime=time.time() - 1,
endTime=time.time() + 1)
assert ids == [1, 2]
ids = self.db.getByTime(startTime=time.time() + 1,
endTime=time.time() + 4)
assert ids == []
def test_mongodb_getDone(self):
self.db.addItem("todo", {"todoText": "get this done",
"done": "False",
"date": "03 25 14"})
self.db.addItem("todo", {"todoText": "get this done!",
"done": "True",
"date": "03 26 14"})
self.db.addItem("todo", {"todoText": "get this done!!",
"done": "True",
"date": "03 27 14"})
self.db.addItem("todo", {"todoText": "get this done!!!",
"done": "False",
"date": "03 28 14"})
# Use sets so order doesn't matter
ids = self.db.getDone("True")
assert set(ids) == set([2, 3])
ids = self.db.getDone("False")
assert set(ids) == set([1, 4])
def test_mongodb_getNewID(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addItem("note", {"note": "THREE", "tags": ["three"]})
assert 4 == self.db.getNewID()
self.db.deleteItem(2)
assert 2 == self.db.getNewID()
def test_mongodb_makeBackupFile(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.makeBackupFile('/tmp', 'noteTestBackupFile.zip')
assert os.path.isfile('/tmp/noteTestBackupFile.zip')
os.remove('/tmp/noteTestBackupFile.zip')
def test_mongodb_searchForItem_1(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
results = self.db.searchForItem("one")
assert results[0]['obj']['note'] == "ONE"
assert len(results) == 1
def test_mongodb_searchForItem_2(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addItem("note", {"note": "ONE THREE",
"tags": ["one", "three"]})
results = self.db.searchForItem("one", sortBy="date")
assert results[0]['obj']['note'] == "ONE"
assert results[0]['obj']['tags'] == ["one"]
assert results[1]['obj']['note'] == "ONE THREE"
assert results[1]['obj']['tags'] == ["one", "three"]
assert len(results) == 2
def test_mongodb_searchForItem_3(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addItem("note", {"note": "ONE THREE",
"tags": ["one", "three"]})
results = self.db.searchForItem("one", sortBy="id")
assert results[0]['obj']['note'] == "ONE"
assert results[0]['obj']['tags'] == ["one"]
assert results[1]['obj']['note'] == "ONE THREE"
assert results[1]['obj']['tags'] == ["one", "three"]
assert len(results) == 2
def test_mongodb_verify(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addItem("note", {"note": "THREE", "tags": ["three"]})
from StringIO import StringIO
try:
saved_stdout = sys.stdout
out = StringIO()
sys.stdout = out
self.db.verify()
output = out.getvalue().strip()
finally:
sys.stdout = saved_stdout
self.assertEquals(output, 'Database is valid')
def test_mongodb_get_by_time_1(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
t1 = time.time()
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
results = self.db.getByTime(startTime=t1)
self.assertEqual(results, [2])
self.assertEqual(len(results), 1)
def test_mongodb_get_by_time_2(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
t1 = time.time()
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
results = self.db.getByTime(endTime=t1)
self.assertEqual(results, [1])
self.assertEqual(len(results), 1)
def test_mongodb_add_label_1(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addLabel("testLabel", 1)
self.db.addLabel("anothertestLabel", 2)
results = self.db.getIDByLabel("testLabel")
self.assertEqual(results, 1)
results = self.db.getIDByLabel("anothertestLabel")
self.assertEqual(results, 2)
def test_mongodb_add_label_2(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addLabel("testLabel", 1)
results = self.db.addLabel("testLabel", 1)
self.assertEqual(results, None)
results = self.db.addLabel("testLabel", 2)
self.assertEqual(results, None)
def test_mongodb_delete_label(self):
self.db.addItem("note", {"note": "ONE", "tags": ["one"]})
self.db.addItem("note", {"note": "TWO", "tags": ["two"]})
self.db.addLabel("testLabel", 1)
results = self.db.getIDByLabel("testLabel")
self.assertEqual(results, 1)
self.db.deleteLabel('testLabel')
results = self.db.getIDByLabel("testLabel")
self.assertEqual(results, None)
|
gpl-2.0
| 7,981,978,288,582,214,000 | 38.625899 | 73 | 0.523057 | false |
BreizhGeek/wavedrompy
|
wavedrom/waveskin.py
|
1
|
67433
|
from . import css
WaveSkin = {}
WaveSkin['default'] = ["svg", {"id": "svg", "xmlns": "http://www.w3.org/2000/svg", "xmlns:xlink": "http://www.w3.org/1999/xlink", "height": "0"},
["style", {"type": "text/css"}, css.css.default],
["defs",
["g", {"id": "socket"},
["rect", {"y": "15", "x": "6", "height": "20", "width": "20"}]
],
["g", {"id": "pclk"},
["path", {"d": "M0,20 0,0 20,0", "class": "s1"}]
],
["g", {"id": "nclk"},
["path", {"d": "m0,0 0,20 20,0", "class": "s1"}]
],
["g", {"id": "000"},
["path", {"d": "m0,20 20,0", "class": "s1"}]
],
["g", {"id": "0m0"},
["path", {"d": "m0,20 3,0 3,-10 3,10 11,0", "class": "s1"}]
],
["g", {"id": "0m1"},
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]
],
["g", {"id": "0mx"},
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 5,20", "class": "s2"}],
["path", {"d": "M20,0 4,16", "class": "s2"}],
["path", {"d": "M15,0 6,9", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]
],
["g", {"id": "0md"},
["path", {"d": "m8,20 10,0", "class": "s3"}],
["path", {"d": "m0,20 5,0", "class": "s1"}]
],
["g", {"id": "0mu"}, ["path", {"d": "m0,20 3,0 C 7,10 10.107603,0 20,0", "class": "s1"}]],
["g", {"id": "0mz"}, ["path", {"d": "m0,20 3,0 C 10,10 15,10 20,10", "class": "s1"}]],
["g", {"id": "111"}, ["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "1m0"}, ["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}]],
["g", {"id": "1m1"}, ["path", {"d": "M0,0 3,0 6,10 9,0 20,0", "class": "s1"}]],
["g", {"id": "1mx"}, ["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 6,9", "class": "s2"}],
["path", {"d": "M10,0 5,5", "class": "s2"}],
["path", {"d": "M3.5,1.5 5,0", "class": "s2"}]],
["g", {"id": "1md"}, ["path", {"d": "m0,0 3,0 c 4,10 7,20 17,20", "class": "s1"}]],
["g", {"id": "1mu"}, ["path", {"d": "M0,0 5,0", "class": "s1"}],
["path", {"d": "M8,0 18,0", "class": "s3"}]],
["g", {"id": "1mz"}, ["path", {"d": "m0,0 3,0 c 7,10 12,10 17,10", "class": "s1"}]],
["g", {"id": "xxx"}, ["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,5 5,0", "class": "s2"}],
["path", {"d": "M0,10 10,0", "class": "s2"}],
["path", {"d": "M0,15 15,0", "class": "s2"}],
["path", {"d": "M0,20 20,0", "class": "s2"}],
["path", {"d": "M5,20 20,5", "class": "s2"}],
["path", {"d": "M10,20 20,10", "class": "s2"}],
["path", {"d": "m15,20 5,-5", "class": "s2"}]],
["g", {"id": "xm0"}, ["path", {"d": "M0,0 4,0 9,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,5 4,1", "class": "s2"}],
["path", {"d": "M0,10 5,5", "class": "s2"}],
["path", {"d": "M0,15 6,9", "class": "s2"}],
["path", {"d": "M0,20 7,13", "class": "s2"}],
["path", {"d": "M5,20 8,17", "class": "s2"}]],
["g", {"id": "xm1"}, ["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,20 4,20 9,0", "class": "s1"}],
["path", {"d": "M0,5 5,0", "class": "s2"}],
["path", {"d": "M0,10 9,1", "class": "s2"}],
["path", {"d": "M0,15 7,8", "class": "s2"}],
["path", {"d": "M0,20 5,15", "class": "s2"}]],
["g", {"id": "xmx"}, ["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,5 5,0", "class": "s2"}],
["path", {"d": "M0,10 10,0", "class": "s2"}],
["path", {"d": "M0,15 15,0", "class": "s2"}],
["path", {"d": "M0,20 20,0", "class": "s2"}],
["path", {"d": "M5,20 20,5", "class": "s2"}],
["path", {"d": "M10,20 20,10", "class": "s2"}],
["path", {"d": "m15,20 5,-5", "class": "s2"}]],
["g", {"id": "xmd"}, ["path", {"d": "m0,0 4,0 c 3,10 6,20 16,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,5 4,1", "class": "s2"}],
["path", {"d": "M0,10 5.5,4.5", "class": "s2"}],
["path", {"d": "M0,15 6.5,8.5", "class": "s2"}],
["path", {"d": "M0,20 8,12", "class": "s2"}],
["path", {"d": "m5,20 5,-5", "class": "s2"}],
["path", {"d": "m10,20 2.5,-2.5", "class": "s2"}]],
["g", {"id": "xmu"}, ["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 4,0 C 7,10 10,0 20,0", "class": "s1"}],
["path", {"d": "M0,5 5,0", "class": "s2"}],
["path", {"d": "M0,10 10,0", "class": "s2"}],
["path", {"d": "M0,15 10,5", "class": "s2"}],
["path", {"d": "M0,20 6,14", "class": "s2"}]],
["g", {"id": "xmz"}, ["path", {"d": "m0,0 4,0 c 6,10 11,10 16,10", "class": "s1"}],
["path", {"d": "m0,20 4,0 C 10,10 15,10 20,10", "class": "s1"}],
["path", {"d": "M0,5 4.5,0.5", "class": "s2"}],
["path", {"d": "M0,10 6.5,3.5", "class": "s2"}],
["path", {"d": "M0,15 8.5,6.5", "class": "s2"}],
["path", {"d": "M0,20 11.5,8.5", "class": "s2"}]],
["g", {"id": "ddd"}, ["path", {"d": "m0,20 20,0", "class": "s3"}]],
["g", {"id": "dm0"}, ["path", {"d": "m0,20 10,0", "class": "s3"}],
["path", {"d": "m12,20 8,0", "class": "s1"}]],
["g", {"id": "dm1"}, ["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "dmx"}, ["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 5,20", "class": "s2"}],
["path", {"d": "M20,0 4,16", "class": "s2"}],
["path", {"d": "M15,0 6,9", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "dmd"}, ["path", {"d": "m0,20 20,0", "class": "s3"}]],
["g", {"id": "dmu"}, ["path", {"d": "m0,20 3,0 C 7,10 10.107603,0 20,0", "class": "s1"}]],
["g", {"id": "dmz"}, ["path", {"d": "m0,20 3,0 C 10,10 15,10 20,10", "class": "s1"}]],
["g", {"id": "uuu"}, ["path", {"d": "M0,0 20,0", "class": "s3"}]],
["g", {"id": "um0"}, ["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}]],
["g", {"id": "um1"}, ["path", {"d": "M0,0 10,0", "class": "s3"}],
["path", {"d": "m12,0 8,0", "class": "s1"}]],
["g", {"id": "umx"}, ["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 6,9", "class": "s2"}],
["path", {"d": "M10,0 5,5", "class": "s2"}],
["path", {"d": "M3.5,1.5 5,0", "class": "s2"}]],
["g", {"id": "umd"}, ["path", {"d": "m0,0 3,0 c 4,10 7,20 17,20", "class": "s1"}]],
["g", {"id": "umu"}, ["path", {"d": "M0,0 20,0", "class": "s3"}]],
["g", {"id": "umz"}, ["path", {"d": "m0,0 3,0 c 7,10 12,10 17,10", "class": "s4"}]],
["g", {"id": "zzz"}, ["path", {"d": "m0,10 20,0", "class": "s1"}]],
["g", {"id": "zm0"}, ["path", {"d": "m0,10 6,0 3,10 11,0", "class": "s1"}]],
["g", {"id": "zm1"}, ["path", {"d": "M0,10 6,10 9,0 20,0", "class": "s1"}]],
["g", {"id": "zmx"}, ["path", {"d": "m6,10 3,10 11,0", "class": "s1"}],
["path", {"d": "M0,10 6,10 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 6.5,8.5", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}]],
["g", {"id": "zmd"}, ["path", {"d": "m0,10 7,0 c 3,5 8,10 13,10", "class": "s1"}]],
["g", {"id": "zmu"}, ["path", {"d": "m0,10 7,0 C 10,5 15,0 20,0", "class": "s1"}]],
["g", {"id": "zmz"}, ["path", {"d": "m0,10 20,0", "class": "s1"}]],
["g", {"id": "gap"}, ["path", {"d": "m7,-2 -4,0 c -5,0 -5,24 -10,24 l 4,0 C 2,22 2,-2 7,-2 z", "class": "s5"}],
["path", {"d": "M-7,22 C -2,22 -2,-2 3,-2", "class": "s1"}],
["path", {"d": "M-3,22 C 2,22 2,-2 7,-2", "class": "s1"}]],
["g", {"id": "0mv-3"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s6"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "1mv-3"}, ["path", {"d": "M2.875,0 20,0 20,20 9,20 z", "class": "s6"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "xmv-3"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s6"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,5 3.5,1.5", "class": "s2"}],
["path", {"d": "M0,10 4.5,5.5", "class": "s2"}],
["path", {"d": "M0,15 6,9", "class": "s2"}],
["path", {"d": "M0,20 4,16", "class": "s2"}]],
["g", {"id": "dmv-3"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s6"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "umv-3"}, ["path", {"d": "M3,0 20,0 20,20 9,20 z", "class": "s6"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "zmv-3"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s6"}],
["path", {"d": "m6,10 3,10 11,0", "class": "s1"}],
["path", {"d": "M0,10 6,10 9,0 20,0", "class": "s1"}]],
["g", {"id": "vvv-3"}, ["path", {"d": "M20,20 0,20 0,0 20,0", "class": "s6"}],
["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vm0-3"}, ["path", {"d": "M0,20 0,0 3,0 9,20", "class": "s6"}],
["path", {"d": "M0,0 3,0 9,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vm1-3"}, ["path", {"d": "M0,0 0,20 3,20 9,0", "class": "s6"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0", "class": "s1"}]],
["g", {"id": "vmx-3"}, ["path", {"d": "M0,0 0,20 3,20 6,10 3,0", "class": "s6"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 7,8", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}]],
["g", {"id": "vmd-3"}, ["path", {"d": "m0,0 0,20 20,0 C 10,20 7,10 3,0", "class": "s6"}],
["path", {"d": "m0,0 3,0 c 4,10 7,20 17,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vmu-3"}, ["path", {"d": "m0,0 0,20 3,0 C 7,10 10,0 20,0", "class": "s6"}],
["path", {"d": "m0,20 3,0 C 7,10 10,0 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vmz-3"}, ["path", {"d": "M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20", "class": "s6"}],
["path", {"d": "m0,0 3,0 c 7,10 12,10 17,10", "class": "s1"}],
["path", {"d": "m0,20 3,0 C 10,10 15,10 20,10", "class": "s1"}]],
["g", {"id": "vmv-3-3"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s6"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s6"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-3-4"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s7"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s6"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-3-5"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s8"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s6"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-4-3"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s6"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s7"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-4-4"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s7"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s7"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-4-5"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s8"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s7"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-5-3"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s6"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s8"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-5-4"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s7"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s8"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-5-5"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s8"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s8"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "0mv-4"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s7"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "1mv-4"}, ["path", {"d": "M2.875,0 20,0 20,20 9,20 z", "class": "s7"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "xmv-4"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s7"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,5 3.5,1.5", "class": "s2"}],
["path", {"d": "M0,10 4.5,5.5", "class": "s2"}],
["path", {"d": "M0,15 6,9", "class": "s2"}],
["path", {"d": "M0,20 4,16", "class": "s2"}]],
["g", {"id": "dmv-4"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s7"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "umv-4"}, ["path", {"d": "M3,0 20,0 20,20 9,20 z", "class": "s7"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "zmv-4"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s7"}],
["path", {"d": "m6,10 3,10 11,0", "class": "s1"}],
["path", {"d": "M0,10 6,10 9,0 20,0", "class": "s1"}]],
["g", {"id": "0mv-5"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s8"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "1mv-5"}, ["path", {"d": "M2.875,0 20,0 20,20 9,20 z", "class": "s8"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "xmv-5"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s8"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,5 3.5,1.5", "class": "s2"}],
["path", {"d": "M0,10 4.5,5.5", "class": "s2"}],
["path", {"d": "M0,15 6,9", "class": "s2"}],
["path", {"d": "M0,20 4,16", "class": "s2"}]],
["g", {"id": "dmv-5"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s8"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "umv-5"}, ["path", {"d": "M3,0 20,0 20,20 9,20 z", "class": "s8"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "zmv-5"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s8"}],
["path", {"d": "m6,10 3,10 11,0", "class": "s1"}],
["path", {"d": "M0,10 6,10 9,0 20,0", "class": "s1"}]],
["g", {"id": "vvv-4"}, ["path", {"d": "M20,20 0,20 0,0 20,0", "class": "s7"}],
["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vm0-4"}, ["path", {"d": "M0,20 0,0 3,0 9,20", "class": "s7"}],
["path", {"d": "M0,0 3,0 9,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vm1-4"}, ["path", {"d": "M0,0 0,20 3,20 9,0", "class": "s7"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0", "class": "s1"}]],
["g", {"id": "vmx-4"}, ["path", {"d": "M0,0 0,20 3,20 6,10 3,0", "class": "s7"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 7,8", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}]],
["g", {"id": "vmd-4"}, ["path", {"d": "m0,0 0,20 20,0 C 10,20 7,10 3,0", "class": "s7"}],
["path", {"d": "m0,0 3,0 c 4,10 7,20 17,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vmu-4"}, ["path", {"d": "m0,0 0,20 3,0 C 7,10 10,0 20,0", "class": "s7"}],
["path", {"d": "m0,20 3,0 C 7,10 10,0 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vmz-4"}, ["path", {"d": "M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20", "class": "s7"}],
["path", {"d": "m0,0 3,0 c 7,10 12,10 17,10", "class": "s1"}],
["path", {"d": "m0,20 3,0 C 10,10 15,10 20,10", "class": "s1"}]],
["g", {"id": "vvv-5"}, ["path", {"d": "M20,20 0,20 0,0 20,0", "class": "s8"}],
["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vm0-5"}, ["path", {"d": "M0,20 0,0 3,0 9,20", "class": "s8"}],
["path", {"d": "M0,0 3,0 9,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vm1-5"}, ["path", {"d": "M0,0 0,20 3,20 9,0", "class": "s8"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0", "class": "s1"}]],
["g", {"id": "vmx-5"}, ["path", {"d": "M0,0 0,20 3,20 6,10 3,0", "class": "s8"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 7,8", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}]],
["g", {"id": "vmd-5"}, ["path", {"d": "m0,0 0,20 20,0 C 10,20 7,10 3,0", "class": "s8"}],
["path", {"d": "m0,0 3,0 c 4,10 7,20 17,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vmu-5"}, ["path", {"d": "m0,0 0,20 3,0 C 7,10 10,0 20,0", "class": "s8"}],
["path", {"d": "m0,20 3,0 C 7,10 10,0 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vmz-5"}, ["path", {"d": "M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20", "class": "s8"}],
["path", {"d": "m0,0 3,0 c 7,10 12,10 17,10", "class": "s1"}],
["path", {"d": "m0,20 3,0 C 10,10 15,10 20,10", "class": "s1"}]],
["g", {"id": "Pclk"}, ["path", {"d": "M-3,12 0,3 3,12 C 1,11 -1,11 -3,12 z", "class": "s9"}],
["path", {"d": "M0,20 0,0 20,0", "class": "s1"}]],
["g", {"id": "Nclk"}, ["path", {"d": "M-3,8 0,17 3,8 C 1,9 -1,9 -3,8 z", "class": "s9"}],
["path", {"d": "m0,0 0,20 20,0", "class": "s1"}]],
["g", {"id": "vvv-2"}, ["path", {"d": "M20,20 0,20 0,0 20,0", "class": "s10"}],
["path", {"d": "m0,20 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vm0-2"}, ["path", {"d": "M0,20 0,0 3,0 9,20", "class": "s10"}],
["path", {"d": "M0,0 3,0 9,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vm1-2"}, ["path", {"d": "M0,0 0,20 3,20 9,0", "class": "s10"}],
["path", {"d": "M0,0 20,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0", "class": "s1"}]],
["g", {"id": "vmx-2"}, ["path", {"d": "M0,0 0,20 3,20 6,10 3,0", "class": "s10"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m20,15 -5,5", "class": "s2"}],
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 8,17", "class": "s2"}],
["path", {"d": "M20,0 7,13", "class": "s2"}],
["path", {"d": "M15,0 7,8", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}]],
["g", {"id": "vmd-2"}, ["path", {"d": "m0,0 0,20 20,0 C 10,20 7,10 3,0", "class": "s10"}],
["path", {"d": "m0,0 3,0 c 4,10 7,20 17,20", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "vmu-2"}, ["path", {"d": "m0,0 0,20 3,0 C 7,10 10,0 20,0", "class": "s10"}],
["path", {"d": "m0,20 3,0 C 7,10 10,0 20,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "vmz-2"}, ["path", {"d": "M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20", "class": "s10"}],
["path", {"d": "m0,0 3,0 c 7,10 12,10 17,10", "class": "s1"}],
["path", {"d": "m0,20 3,0 C 10,10 15,10 20,10", "class": "s1"}]],
["g", {"id": "0mv-2"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s10"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "1mv-2"}, ["path", {"d": "M2.875,0 20,0 20,20 9,20 z", "class": "s10"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "xmv-2"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s10"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,5 3.5,1.5", "class": "s2"}],
["path", {"d": "M0,10 4.5,5.5", "class": "s2"}],
["path", {"d": "M0,15 6,9", "class": "s2"}],
["path", {"d": "M0,20 4,16", "class": "s2"}]],
["g", {"id": "dmv-2"}, ["path", {"d": "M9,0 20,0 20,20 3,20 z", "class": "s10"}],
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}],
["path", {"d": "m0,20 20,0", "class": "s1"}]],
["g", {"id": "umv-2"}, ["path", {"d": "M3,0 20,0 20,20 9,20 z", "class": "s10"}],
["path", {"d": "m3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,0 20,0", "class": "s1"}]],
["g", {"id": "zmv-2"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s10"}],
["path", {"d": "m6,10 3,10 11,0", "class": "s1"}],
["path", {"d": "M0,10 6,10 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-3-2"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s10"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s6"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-4-2"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s10"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s7"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-5-2"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s10"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s8"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-2-3"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s6"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s10"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-2-4"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s7"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s10"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-2-5"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s8"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s10"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "vmv-2-2"}, ["path", {"d": "M9,0 20,0 20,20 9,20 6,10 z", "class": "s10"}],
["path", {"d": "M3,0 0,0 0,20 3,20 6,10 z", "class": "s10"}],
["path", {"d": "m0,0 3,0 6,20 11,0", "class": "s1"}],
["path", {"d": "M0,20 3,20 9,0 20,0", "class": "s1"}]],
["g", {"id": "arrow0"},
["path", {"d": "m-12,-3 9,3 -9,3 c 1,-2 1,-4 0,-6 z", "class": "s11"}],
["path", {"d": "M0,0 -15,0", "class": "s12"}]
],
["marker", {"id": "arrowhead", "style": "fill:#0041c4", "markerHeight": "7", "markerWidth": "10", "markerUnits": "strokeWidth",
"viewBox": "0 -4 11 8", "refX": "15", "refY": "0", "orient": "auto"},
["path", {"d": "M0 -4 11 0 0 4z"}]
],
["marker", {"id": "arrowtail", "style": "fill:#0041c4", "markerHeight": "7", "markerWidth": "10", "markerUnits": "strokeWidth", "viewBox": "-11 -4 11 8", "refX": "-15", "refY": "0", "orient": "auto"},
["path", {"d": "M0 -4 -11 0 0 4z"}]
]
],
["g", {"id": "waves"},
["g", {"id": "lanes"}],
["g", {"id": "groups"}]
]
]
WaveSkin['narrow'] = ["svg", {"id": "svg", "xmlns": "http://www.w3.org/2000/svg", "xmlns:xlink": "http://www.w3.org/1999/xlink", "height": "0"}, ["style", {"type": "text/css"}, css.css.narrow],
["defs",
["g", {"id": "socket"},
["rect", {"y": "15", "x": "4", "height": "20", "width": "10"}]
],
["g", {"id": "pclk"},
["path", {"d": "M 0,20 0,0 10,0", "class": "s1"}]
],
["g", {"id": "nclk"},
["path", {"d": "m 0,0 0,20 10,0", "class": "s1"}]
],
["g", {"id": "000"},
["path", {"d": "m 0,20 10,0", "class": "s1"}]
],
["g", {"id": "0m0"},
["path", {"d": "m 0,20 1,0 3,-10 3,10 3,0", "class": "s1"}]
],
["g", {"id": "0m1"},
["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]
],
["g", {"id": "0mx"},
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}],
["path", {"d": "M 10,15 5,20", "class": "s2"}],
["path", {"d": "M 10,10 2,18", "class": "s2"}],
["path", {"d": "M 10,5 4,11", "class": "s2"}],
["path", {"d": "M 10,0 6,4", "class": "s2"}],
["path", {"d": "m 0,20 10,0", "class": "s1"}]
],
["g", {"id": "0md"},
["path", {"d": "m 1,20 9,0", "class": "s3"}],
["path", {"d": "m 0,20 1,0", "class": "s1"}]
],
["g", {"id": "0mu"},
["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}]
],
["g", {"id": "0mz"},
["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}]
],
["g", {"id": "111"},
["path", {"d": "M 0,0 10,0", "class": "s1"}]
],
["g", {"id": "1m0"},
["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}]
],
["g", {"id": "1m1"},
["path", {"d": "M 0,0 1,0 4,10 7,0 10,0", "class": "s1"}]
],
["g", {"id": "1mx"},
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}],
["path", {"d": "M 0,0 10,0", "class": "s1"}],
["path", {"d": "M 10,15 6.5,18.5", "class": "s2"}],
["path", {"d": "M 10,10 5.5,14.5", "class": "s2"}],
["path", {"d": "M 10,5 4.5,10.5", "class": "s2"}],
["path", {"d": "M 10,0 3,7", "class": "s2"}],
["path", {"d": "M 2,3 5,0", "class": "s2"}]
],
["g", {"id": "1md"},
["path", {"d": "m 0,0 1,0 c 1,7 4,20 9,20", "class": "s1"}]
],
["g", {"id": "1mu"},
["path", {"d": "M 0,0 1,0", "class": "s1"}],
["path", {"d": "m 1,0 9,0", "class": "s3"}]
],
["g", {"id": "1mz"}, ["path", {"d": "m 0,0 1,0 c 2,4 6,10 9,10", "class": "s1"}]],
["g", {"id": "xxx"}, ["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,5 5,0", "class": "s2"}], ["path", {
"d": "M 0,10 10,0", "class": "s2"}], ["path", {"d": "M 0,15 10,5", "class": "s2"}], ["path", {"d": "M 0,20 10,10", "class": "s2"}], ["path", {"d": "m 5,20 5,-5", "class": "s2"}]],
["g", {"id": "xm0"}, ["path", {"d": "M 0,0 1,0 7,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,5 2,3", "class": "s2"}], ["path", {
"d": "M 0,10 3,7", "class": "s2"}], ["path", {"d": "M 0,15 4,11", "class": "s2"}], ["path", {"d": "M 0,20 5,15", "class": "s2"}], ["path", {"d": "M 5,20 6,19", "class": "s2"}]],
["g", {"id": "xm1"}, ["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0", "class": "s1"}], ["path", {"d": "M 0,5 5,0", "class": "s2"}], [
"path", {"d": "M 0,10 6,4", "class": "s2"}], ["path", {"d": "M 0,15 3,12", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "xmx"}, ["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,5 5,0", "class": "s2"}], ["path", {
"d": "M 0,10 10,0", "class": "s2"}], ["path", {"d": "M 0,15 10,5", "class": "s2"}], ["path", {"d": "M 0,20 10,10", "class": "s2"}], ["path", {"d": "m 5,20 5,-5", "class": "s2"}]],
["g", {"id": "xmd"}, ["path", {"d": "m 0,0 1,0 c 1,7 4,20 9,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,5 1.5,3.5", "class": "s2"}], ["path", {
"d": "M 0,10 2.5,7.5", "class": "s2"}], ["path", {"d": "M 0,15 3.5,11.5", "class": "s2"}], ["path", {"d": "M 0,20 5,15", "class": "s2"}], ["path", {"d": "M 5,20 7,18", "class": "s2"}]],
["g", {"id": "xmu"}, ["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}], ["path", {"d": "M 0,5 5,0",
"class": "s2"}], ["path", {"d": "M 0,10 5,5", "class": "s2"}], ["path", {"d": "M 0,15 2,13", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "xmz"}, ["path", {"d": "m 0,0 1,0 c 2,6 6,10 9,10", "class": "s1"}], ["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}], ["path", {
"d": "M 0,5 2,3", "class": "s2"}], ["path", {"d": "M 0,10 4,6", "class": "s2"}], ["path", {"d": "m 0,15.5 6,-7", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "ddd"}, ["path", {"d": "m 0,20 10,0", "class": "s3"}]],
["g", {"id": "dm0"}, ["path", {"d": "m 0,20 7,0", "class": "s3"}],
["path", {"d": "m 7,20 3,0", "class": "s1"}]],
["g", {"id": "dm1"}, ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "dmx"}, ["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "M 10,15 5,20", "class": "s2"}], ["path", {"d": "M 10,10 1.5,18.5",
"class": "s2"}], ["path", {"d": "M 10,5 4,11", "class": "s2"}], ["path", {"d": "M 10,0 6,4", "class": "s2"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "dmd"}, ["path", {"d": "m 0,20 10,0", "class": "s3"}]],
["g", {"id": "dmu"}, ["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}]],
["g", {"id": "dmz"}, ["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}]],
["g", {"id": "uuu"}, ["path", {"d": "M 0,0 10,0", "class": "s3"}]],
["g", {"id": "um0"}, ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}]],
["g", {"id": "um1"}, ["path", {"d": "M 0,0 7,0", "class": "s3"}],
["path", {"d": "m 7,0 3,0", "class": "s1"}]],
["g", {"id": "umx"}, ["path", {"d": "M 1.4771574,0 7,20 l 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 10,15 6.5,18.5", "class": "s2"}], ["path", {
"d": "M 10,10 5.5,14.5", "class": "s2"}], ["path", {"d": "M 10,5 4.5,10.5", "class": "s2"}], ["path", {"d": "M 10,0 3.5,6.5", "class": "s2"}], ["path", {"d": "M 2.463621,2.536379 5,0", "class": "s2"}]],
["g", {"id": "umd"}, ["path", {"d": "m 0,0 1,0 c 1,7 4,20 9,20", "class": "s1"}]],
["g", {"id": "umu"}, ["path", {"d": "M 0,0 10,0", "class": "s3"}]],
["g", {"id": "umz"}, ["path", {"d": "m 0,0 1,0 c 2,6 6,10 9,10", "class": "s4"}]],
["g", {"id": "zzz"}, ["path", {"d": "m 0,10 10,0", "class": "s1"}]],
["g", {"id": "zm0"}, ["path", {"d": "m 0,10 1,0 4,10 5,0", "class": "s1"}]],
["g", {"id": "zm1"}, ["path", {"d": "M 0,10 1,10 5,0 10,0", "class": "s1"}]],
["g", {"id": "zmx"}, ["path", {"d": "m 1,10 4,10 5,0", "class": "s1"}], ["path", {"d": "M 0,10 1,10 5,0 10,0", "class": "s1"}], ["path", {"d": "M 10,15 5,20", "class": "s2"}], [
"path", {"d": "M 10,10 4,16", "class": "s2"}], ["path", {"d": "M 10,5 2.5,12.5", "class": "s2"}], ["path", {"d": "M 10,0 2,8", "class": "s2"}]],
["g", {"id": "zmd"}, ["path", {"d": "m 0,10 1,0 c 2,6 6,10 9,10", "class": "s1"}]],
["g", {"id": "zmu"}, ["path", {"d": "m 0,10 1,0 C 3,4 7,0 10,0", "class": "s1"}]],
["g", {"id": "zmz"}, ["path", {"d": "m 0,10 10,0", "class": "s1"}]],
["g", {"id": "gap"}, ["path", {"d": "m 7,-2 -4,0 c -5,0 -5,24 -10,24 l 4,0 C 2,22 2,-2 7,-2 z", "class": "s5"}],
["path", {"d": "M -7,22 C -2,22 -2,-2 3,-2", "class": "s1"}], ["path", {"d": "M -3,22 C 2,22 2,-2 7,-2", "class": "s1"}]],
["g", {"id": "0mv-3"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s6"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "1mv-3"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s6"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "xmv-3"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s6"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}],
["path", {"d": "M 0,5 2,3", "class": "s2"}], ["path", {"d": "M 0,10 3,7", "class": "s2"}], ["path", {"d": "M 0,15 3,12", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "dmv-3"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s6"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "umv-3"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s6"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "zmv-3"}, ["path", {"d": "M 5,0 10,0 10,20 5,20 1,10 z", "class": "s6"}],
["path", {"d": "m 1,10 4,10 5,0", "class": "s1"}], ["path", {"d": "M 0,10 1,10 5,0 10,0", "class": "s1"}]],
["g", {"id": "vvv-3"}, ["path", {"d": "M 10,20 0,20 0,0 10,0", "class": "s6"}],
["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vm0-3"}, ["path", {"d": "m 0,20 0,-20 1.000687,-0.00391 6,20", "class": "s6"}], ["path", {
"d": "m 0,0 1.000687,-0.00391 6,20", "class": "s1"}], ["path", {"d": "m 0,20 10.000687,-0.0039", "class": "s1"}]],
["g", {"id": "vm1-3"}, ["path", {"d": "M 0,0 0,20 1,20 7,0", "class": "s6"}],
["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0", "class": "s1"}]],
["g", {"id": "vmx-3"}, ["path", {"d": "M 0,0 0,20 1,20 4,10 1,0", "class": "s6"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}],
["path", {"d": "M 10,15 6.5,18.5", "class": "s2"}], ["path", {"d": "M 10,10 5.5,14.5", "class": "s2"}], ["path", {"d": "M 10,5 4,11", "class": "s2"}], ["path", {"d": "M 10,0 6,4", "class": "s2"}]],
["g", {"id": "vmd-3"}, ["path", {"d": "m 0,0 0,20 10,0 C 5,20 2,7 1,0", "class": "s6"}],
["path", {"d": "m 0,0 1,0 c 1,7 4,20 9,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "vmu-3"}, ["path", {"d": "m 0,0 0,20 1,0 C 2,13 5,0 10,0", "class": "s6"}],
["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vmz-3"}, ["path", {"d": "M 0,0 1,0 C 3,6 7,10 10,10 7,10 3,14 1,20 L 0,20", "class": "s6"}], ["path",
{"d": "m 0,0 1,0 c 2,6 6,10 9,10", "class": "s1"}], ["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}]],
["g", {"id": "vmv-3-3"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s6"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s6"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-3-4"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s7"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s6"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-3-5"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s8"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s6"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-4-3"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s6"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s7"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-4-4"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s7"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s7"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-4-5"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s8"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s7"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-5-3"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s6"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s8"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-5-4"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s7"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s8"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-5-5"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s8"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s8"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "0mv-4"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s7"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "1mv-4"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s7"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "xmv-4"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s7"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}],
["path", {"d": "M 0,5 2,3", "class": "s2"}], ["path", {"d": "M 0,10 3,7", "class": "s2"}], ["path", {"d": "M 0,15 4,11", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "dmv-4"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s7"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "umv-4"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s7"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "zmv-4"}, ["path", {"d": "M 5,0 10,0 10,20 5,20 1,10 z", "class": "s7"}],
["path", {"d": "m 1,10 4,10 5,0", "class": "s1"}], ["path", {"d": "M 0,10 1,10 5,0 10,0", "class": "s1"}]],
["g", {"id": "0mv-5"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s8"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "1mv-5"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s8"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "xmv-5"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s8"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}],
["path", {"d": "M 0,5 2,3", "class": "s2"}], ["path", {"d": "M 0,10 3,7", "class": "s2"}], ["path", {"d": "M 0,15 4,11", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "dmv-5"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s8"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "umv-5"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s8"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "zmv-5"}, ["path", {"d": "M 5,0 10,0 10,20 5,20 1,10 z", "class": "s8"}],
["path", {"d": "m 1,10 4,10 5,0", "class": "s1"}], ["path", {"d": "M 0,10 1,10 5,0 10,0", "class": "s1"}]],
["g", {"id": "vvv-4"}, ["path", {"d": "M 10,20 0,20 0,0 10,0", "class": "s7"}],
["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vm0-4"}, ["path", {"d": "M 0,20 0,0 1,0 7,20", "class": "s7"}],
["path", {"d": "M 0,0 1,0 7,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "vm1-4"}, ["path", {"d": "M 0,0 0,20 1,20 7,0", "class": "s7"}],
["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0", "class": "s1"}]],
["g", {"id": "vmx-4"}, ["path", {"d": "M 0,0 0,20 1,20 4,10 1,0", "class": "s7"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}],
["path", {"d": "M 10,15 6.5,18.5", "class": "s2"}], ["path", {"d": "M 10,10 5.5,14.5", "class": "s2"}], ["path", {"d": "M 10,5 4,11", "class": "s2"}], ["path", {"d": "M 10,0 6,4", "class": "s2"}]],
["g", {"id": "vmd-4"}, ["path", {"d": "m 0,0 0,20 10,0 C 5,20 2,7 1,0", "class": "s7"}],
["path", {"d": "m 0,0 1,0 c 1,7 4,20 9,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "vmu-4"}, ["path", {"d": "m 0,0 0,20 1,0 C 2,13 5,0 10,0", "class": "s7"}],
["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vmz-4"}, ["path", {"d": "M 0,0 1,0 C 3,6 7,10 10,10 7,10 3,14 1,20 L 0,20", "class": "s7"}], ["path",
{"d": "m 0,0 1,0 c 2,6 6,10 9,10", "class": "s1"}], ["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}]],
["g", {"id": "vvv-5"}, ["path", {"d": "M 10,20 0,20 0,0 10,0", "class": "s8"}],
["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vm0-5"}, ["path", {"d": "M 0,20 0,0 1,0 7,20", "class": "s8"}],
["path", {"d": "M 0,0 1,0 7,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "vm1-5"}, ["path", {"d": "M 0,0 0,20 1,20 7,0", "class": "s8"}],
["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0", "class": "s1"}]],
["g", {"id": "vmx-5"}, ["path", {"d": "M 0,0 0,20 1,20 4,10 1,0", "class": "s8"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}],
["path", {"d": "M 10,15 6.5,18.5", "class": "s2"}], ["path", {"d": "M 10,10 5.5,14.5", "class": "s2"}], ["path", {"d": "M 10,5 4,11", "class": "s2"}], ["path", {"d": "M 10,0 6,4", "class": "s2"}]],
["g", {"id": "vmd-5"}, ["path", {"d": "m 0,0 0,20 10,0 C 5,20 2,7 1,0", "class": "s8"}],
["path", {"d": "m 0,0 1,0 c 1,7 4,20 9,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "vmu-5"}, ["path", {"d": "m 0,0 0,20 1,0 C 2,13 5,0 10,0", "class": "s8"}],
["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vmz-5"}, ["path", {"d": "M 0,0 1,0 C 3,6 7,10 10,10 7,10 3,14 1,20 L 0,20", "class": "s8"}], ["path",
{"d": "m 0,0 1,0 c 2,6 6,10 9,10", "class": "s1"}], ["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}]],
["g", {"id": "Pclk"}, ["path", {"d": "M -3,12 0,3 3,12 C 1,11 -1,11 -3,12 z", "class": "s9"}],
["path", {"d": "M 0,20 0,0 10,0", "class": "s1"}]],
["g", {"id": "Nclk"}, ["path", {"d": "M -3,8 0,17 3,8 C 1,9 -1,9 -3,8 z", "class": "s9"}],
["path", {"d": "m 0,0 0,20 10,0", "class": "s1"}]],
["g", {"id": "vvv-2"}, ["path", {"d": "M 10,20 0,20 0,0 10,0", "class": "s10"}],
["path", {"d": "m 0,20 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vm0-2"}, ["path", {"d": "m 0,20 0,-20 1.000687,-0.00391 5,20", "class": "s10"}], ["path",
{"d": "m 0,0 1.000687,-0.00391 6,20", "class": "s1"}], ["path", {"d": "m 0,20 10.000687,-0.0039", "class": "s1"}]],
["g", {"id": "vm1-2"}, ["path", {"d": "M 0,0 0,20 3,20 9,0", "class": "s10"}],
["path", {"d": "M 0,0 10,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0", "class": "s1"}]],
["g", {"id": "vmx-2"}, ["path", {"d": "M 0,0 0,20 1,20 4,10 1,0", "class": "s10"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}],
["path", {"d": "M 10,15 6.5,18.5", "class": "s2"}], ["path", {"d": "M 10,10 5.5,14.5", "class": "s2"}], ["path", {"d": "M 10,5 4,11", "class": "s2"}], ["path", {"d": "M 10,0 6,4", "class": "s2"}]],
["g", {"id": "vmd-2"}, ["path", {"d": "m 0,0 0,20 10,0 C 5,20 2,7 1,0", "class": "s10"}], ["path",
{"d": "m 0,0 1,0 c 1,7 4.0217106,19.565788 9,20", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "vmu-2"}, ["path", {"d": "m 0,0 0,20 1,0 C 2,13 5,0 10,0", "class": "s10"}],
["path", {"d": "m 0,20 1,0 C 2,13 5,0 10,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "vmz-2"}, ["path", {"d": "M 0,0 1,0 C 3,6 7,10 10,10 7,10 3,14 1,20 L 0,20", "class": "s10"}], ["path",
{"d": "m 0,0 1,0 c 2,6 6,10 9,10", "class": "s1"}], ["path", {"d": "m 0,20 1,0 C 3,14 7,10 10,10", "class": "s1"}]],
["g", {"id": "0mv-2"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s10"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "1mv-2"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s10"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "xmv-2"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s10"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}],
["path", {"d": "M 0,5 2,3", "class": "s2"}], ["path", {"d": "M 0,10 3,7", "class": "s2"}], ["path", {"d": "M 0,15 4,11", "class": "s2"}], ["path", {"d": "M 0,20 1,19", "class": "s2"}]],
["g", {"id": "dmv-2"}, ["path", {"d": "m 7,0 3,0 0,20 -9,0 z", "class": "s10"}],
["path", {"d": "M 1,20 7,0 10,0", "class": "s1"}], ["path", {"d": "m 0,20 10,0", "class": "s1"}]],
["g", {"id": "umv-2"}, ["path", {"d": "m 1,0 9,0 0,20 -3,0 z", "class": "s10"}],
["path", {"d": "m 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,0 10,0", "class": "s1"}]],
["g", {"id": "zmv-2"}, ["path", {"d": "M 5,0 10,0 10,20 5,20 1,10 z", "class": "s10"}],
["path", {"d": "m 1,10 4,10 5,0", "class": "s1"}], ["path", {"d": "M 0,10 1,10 5,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-3-2"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s10"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s6"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-4-2"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s10"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s7"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-5-2"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s10"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s8"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-2-3"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s6"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s10"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-2-4"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s7"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s10"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-2-5"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s8"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s10"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["g", {"id": "vmv-2-2"}, ["path", {"d": "M 7,0 10,0 10,20 7,20 4,10 z", "class": "s10"}], ["path", {"d": "M 1,0 0,0 0,20 1,20 4,10 z",
"class": "s10"}], ["path", {"d": "m 0,0 1,0 6,20 3,0", "class": "s1"}], ["path", {"d": "M 0,20 1,20 7,0 10,0", "class": "s1"}]],
["marker", {"id": "arrowhead", "style": "fill:#0041c4", "markerHeight": "7", "markerWidth": "10", "markerUnits": "strokeWidth",
"viewBox": "0 -4 11 8", "refX": "15", "refY": "0", "orient": "auto"}, ["path", {"d": "M0 -4 11 0 0 4z"}]],
["marker", {"id": "arrowtail", "style": "fill:#0041c4", "markerHeight": "7", "markerWidth": "10", "markerUnits": "strokeWidth", "viewBox": "-11 -4 11 8", "refX": "-15", "refY": "0", "orient": "auto"}, ["path", {"d": "M0 -4 -11 0 0 4z"}]]],
["g", {"id": "waves"}, ["g", {"id": "lanes"}], ["g", {"id": "groups"}]]]
|
mit
| -5,629,264,635,243,988,000 | 97.15575 | 319 | 0.296072 | false |
Justaphf/BitcoinUnlimited
|
qa/rpc-tests/electrum_transaction_get.py
|
1
|
3452
|
#!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Unlimited developers
"""
Tests the electrum call 'blockchain.transaction.get'
"""
from test_framework.util import assert_equal, p2p_port
from test_framework.test_framework import BitcoinTestFramework
from test_framework.loginit import logging
from test_framework.electrumutil import *
from test_framework.nodemessages import COIN, ToHex
from test_framework.blocktools import create_coinbase, create_block, \
create_transaction
from test_framework.txtools import pad_tx
from test_framework.mininode import (
P2PDataStore,
NodeConn,
NetworkThread,
)
from test_framework.script import CScript, OP_TRUE, OP_DROP, OP_NOP
import time
TX_GET = "blockchain.transaction.get"
class ElectrumTransactionGet(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [bitcoind_electrum_args()]
def bootstrap_p2p(self):
"""Add a P2P connection to the node.
Helper to connect and wait for version handshake."""
self.p2p = P2PDataStore()
self.connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.p2p)
self.p2p.add_connection(self.connection)
NetworkThread().start()
self.p2p.wait_for_verack()
assert(self.p2p.connection.state == "connected")
def mine_blocks(self, n, num_blocks, txns = None):
prev = n.getblockheader(n.getbestblockhash())
print(prev)
prev_height = prev['height']
prev_hash = prev['hash']
prev_time = max(prev['time'] + 1, int(time.time()))
blocks = [ ]
for i in range(num_blocks):
coinbase = create_coinbase(prev_height + 1)
b = create_block(
hashprev = prev_hash,
coinbase = coinbase,
txns = txns,
nTime = prev_time + 1)
txns = None
b.solve()
blocks.append(b)
prev_time = b.nTime
prev_height += 1
prev_hash = b.hash
self.p2p.send_blocks_and_test(blocks, n)
assert_equal(blocks[-1].hash, n.getbestblockhash())
# Return coinbases for spending later
return [b.vtx[0] for b in blocks]
def run_test(self):
n = self.nodes[0]
self.bootstrap_p2p()
coinbases = self.mine_blocks(n, 5)
self.client = create_electrum_connection()
# Test raw
for tx in coinbases:
assert_equal(ToHex(tx), self.client.call(TX_GET, tx.hash))
# Test verbose.
# The spec is unclear. It states:
#
# "whatever the coin daemon returns when asked for a
# verbose form of the raw transaction"
#
# Just check the basics.
for tx in coinbases:
electrum = self.client.call(TX_GET, tx.hash, True)
bitcoind = n.getrawtransaction(tx.hash, True)
assert_equal(bitcoind['txid'], electrum['txid'])
assert_equal(bitcoind['locktime'], electrum['locktime'])
assert_equal(bitcoind['size'], electrum['size'])
assert_equal(bitcoind['hex'], electrum['hex'])
assert_equal(len(bitcoind['vin']), len(bitcoind['vin']))
assert_equal(len(bitcoind['vout']), len(bitcoind['vout']))
if __name__ == '__main__':
ElectrumTransactionGet().main()
|
mit
| -8,309,527,667,063,251,000 | 32.514563 | 85 | 0.605736 | false |
jrkerns/pylinac
|
pylinac/ct.py
|
1
|
69530
|
"""The CatPhan module automatically analyzes DICOM images of a CatPhan 504, 503, or 600 acquired when doing CBCT or CT quality assurance.
It can load a folder or zip file that the images are in and automatically correct for translational and rotational errors.
It can analyze the HU regions and image scaling (CTP404), the high-contrast line pairs (CTP528) to calculate the modulation transfer function (MTF),
the HU uniformity (CTP486), and Low Contrast (CTP515) on the corresponding slices.
Features:
* **Automatic phantom registration** - Your phantom can be tilted, rotated, or translated--pylinac will automatically register the phantom.
* **Automatic testing of all major modules** - Major modules are automatically registered and analyzed.
* **Any scan protocol** - Scan your CatPhan with any protocol; even scan it in a regular CT scanner.
Any field size or field extent is allowed.
"""
import dataclasses
import io
import os
import webbrowser
import zipfile
from dataclasses import dataclass
from datetime import datetime
from os import path as osp
from typing import Optional, Union, Dict, Tuple, Sequence, List, BinaryIO, Type
import argue
import matplotlib.pyplot as plt
import numpy as np
from cached_property import cached_property
from py_linq import Enumerable
from scipy import ndimage
from skimage import filters, measure, segmentation
from .core import image, pdf
from .core.geometry import Point, Line
from .core.image import DicomImageStack, ArrayImage
from .core.io import TemporaryZipDirectory, get_url, retrieve_demo_file
from .core.mtf import MTF
from .core.profile import CollapsedCircleProfile, SingleProfile, Interpolation
from .core.roi import DiskROI, RectangleROI, LowContrastDiskROI, Contrast
from .core.utilities import ResultBase
from .settings import get_dicom_cmap
AIR = -1000
PMP = -196
LDPE = -104
POLY = -47
ACRYLIC = 115
DELRIN = 365
TEFLON = 1000
BONE_20 = 237
BONE_50 = 725
@dataclass
class ROIResult:
"""This class should not be called directly. It is returned by the ``results_data()`` method.
It is a dataclass under the hood and thus comes with all the dunder magic.
Use the following attributes as normal class attributes."""
name: str #:
value: float #:
difference: float #:
nominal_value: float #:
passed: bool #:
@dataclass
class CTP404Result:
"""This class should not be called directly. It is returned by the ``results_data()`` method.
It is a dataclass under the hood and thus comes with all the dunder magic.
Use the following attributes as normal class attributes."""
offset: int #:
low_contrast_visibility: float #:
thickness_passed: bool #:
measured_slice_thickness_mm: float #:
thickness_num_slices_combined: int #:
geometry_passed: bool #:
avg_line_distance_mm: float #:
line_distances_mm: List[float] #:
hu_linearity_passed: bool #:
hu_tolerance: float #:
hu_rois: Dict[str, ROIResult] #:
@dataclass
class CTP486Result:
"""This class should not be called directly. It is returned by the ``results_data()`` method.
It is a dataclass under the hood and thus comes with all the dunder magic.
Use the following attributes as normal class attributes."""
uniformity_index: float #:
integral_non_uniformity: float #:
passed: bool #:
@dataclass
class CTP515Result:
"""This class should not be called directly. It is returned by the ``results_data()`` method.
It is a dataclass under the hood and thus comes with all the dunder magic.
Use the following attributes as normal class attributes."""
cnr_threshold: float #:
num_rois_seen: int #:
roi_settings: dict #:
@dataclass
class CTP528Result:
"""This class should not be called directly. It is returned by the ``results_data()`` method.
It is a dataclass under the hood and thus comes with all the dunder magic.
Use the following attributes as normal class attributes."""
start_angle_radians: float #:
mtf_lp_mm: dict #:
roi_settings: dict #:
@dataclass
class CatphanResult(ResultBase):
"""This class should not be called directly. It is returned by the ``results_data()`` method.
It is a dataclass under the hood and thus comes with all the dunder magic.
Use the following attributes as normal class attributes."""
catphan_model: str #:
catphan_roll_deg: float #:
origin_slice: int #:
num_images: int #:
ctp404: CTP404Result #:
ctp486: Optional[CTP486Result] = None #:
ctp528: Optional[CTP528Result] = None #:
ctp515: Optional[CTP515Result] = None #:
class HUDiskROI(DiskROI):
"""An HU ROI object. Represents a circular area measuring either HU sample (Air, Poly, ...)
or HU uniformity (bottom, left, ...).
"""
def __init__(self, array: Union[np.ndarray, ArrayImage], angle: float, roi_radius: float, dist_from_center: float,
phantom_center: Union[tuple, Point], nominal_value: Optional[float] = None,
tolerance: Optional[float] = None,
background_mean: Optional[float]=None, background_std: Optional[float]=None):
"""
Parameters
----------
nominal_value
The nominal pixel value of the HU ROI.
tolerance
The roi pixel value tolerance.
"""
super().__init__(array, angle, roi_radius, dist_from_center, phantom_center)
self.nominal_val = nominal_value
self.tolerance = tolerance
@property
def value_diff(self) -> float:
"""The difference in HU between measured and nominal."""
return self.pixel_value - self.nominal_val
@property
def passed(self) -> bool:
"""Boolean specifying if ROI pixel value was within tolerance of the nominal value."""
return abs(self.value_diff) <= self.tolerance
@property
def plot_color(self) -> str:
"""Return one of two colors depending on if ROI passed."""
return 'green' if self.passed else 'red'
class ThicknessROI(RectangleROI):
"""A rectangular ROI that measures the angled wire rod in the HU linearity slice which determines slice thickness."""
@cached_property
def long_profile(self) -> SingleProfile:
"""The profile along the axis perpendicular to ramped wire."""
img = image.load(self.pixel_array)
img.filter(size=1, kind='gaussian')
prof = SingleProfile(img.array.max(axis=np.argmin(img.shape)), interpolation=Interpolation.NONE)
return prof
@cached_property
def wire_fwhm(self) -> float:
"""The FWHM of the wire in pixels."""
return self.long_profile.fwxm_data(x=50)['width (exact)']
@property
def plot_color(self) -> str:
"""The plot color."""
return 'blue'
class Slice:
"""Base class for analyzing specific slices of a CBCT dicom set."""
@argue.options(combine_method=('mean', 'max'))
def __init__(self, catphan, slice_num: Optional[int]=None, combine: bool=True, combine_method: str='mean', num_slices: int=0):
"""
Parameters
----------
catphan : `~pylinac.cbct.CatPhanBase` instance.
slice_num : int
The slice number of the DICOM array desired. If None, will use the ``slice_num`` property of subclass.
combine : bool
If True, combines the slices +/- ``num_slices`` around the slice of interest to improve signal/noise.
combine_method : {'mean', 'max'}
How to combine the slices if ``combine`` is True.
num_slices : int
The number of slices on either side of the nominal slice to combine to improve signal/noise; only
applicable if ``combine`` is True.
"""
if slice_num is not None:
self.slice_num = slice_num
if combine:
array = combine_surrounding_slices(catphan.dicom_stack, self.slice_num, mode=combine_method, slices_plusminus=num_slices)
else:
array = catphan.dicom_stack[self.slice_num].array
self.image = image.load(array)
self.catphan_size = catphan.catphan_size
self.mm_per_pixel = catphan.mm_per_pixel
@property
def __getitem__(self, item):
return self.image.array[item]
@cached_property
def phan_center(self) -> Point:
"""Determine the location of the center of the phantom.
The image is analyzed to see if:
1) the CatPhan is even in the image (if there were any ROIs detected)
2) an ROI is within the size criteria of the catphan
3) the ROI area that is filled compared to the bounding box area is close to that of a circle
Raises
------
ValueError
If any of the above conditions are not met.
"""
# convert the slice to binary and label ROIs
edges = filters.scharr(self.image.as_type(np.float))
if np.max(edges) < 0.1:
raise ValueError("Unable to locate Catphan")
larr, regionprops, num_roi = get_regions(self, fill_holes=True, threshold='mean')
# check that there is at least 1 ROI
if num_roi < 1 or num_roi is None:
raise ValueError("Unable to locate the CatPhan")
catphan_region = sorted(regionprops, key=lambda x: np.abs(x.filled_area - self.catphan_size))[0]
if (self.catphan_size * 1.2 < catphan_region.filled_area) or (catphan_region.filled_area < self.catphan_size / 1.2):
raise ValueError("Unable to locate Catphan")
center_pixel = catphan_region.centroid
return Point(center_pixel[1], center_pixel[0])
class CatPhanModule(Slice):
"""Base class for a CTP module.
"""
combine_method: str = 'mean'
num_slices: int = 0
roi_settings: dict = {}
background_roi_settings: dict = {}
roi_dist_mm = float
roi_radius_mm = float
rois: dict = {} # dicts of HUDiskROIs
background_rois: dict = {} # dict of HUDiskROIs; possibly empty
def __init__(self, catphan, tolerance: Optional[float], offset: int = 0):
self.model = ''
self._offset = offset
self.origin_slice = catphan.origin_slice
self.tolerance = tolerance
self.slice_thickness = catphan.dicom_stack.metadata.SliceThickness
self.catphan_roll = catphan.catphan_roll
self.mm_per_pixel = catphan.mm_per_pixel
self.rois: Dict[str, HUDiskROI] = {}
self.background_rois: Dict[str, HUDiskROI] = {}
Slice.__init__(self, catphan, combine_method=self.combine_method, num_slices=self.num_slices)
self._convert_units_in_settings()
self.preprocess(catphan)
self._setup_rois()
def _convert_units_in_settings(self) -> None:
for roi, settings in self.roi_settings.items():
self.roi_settings[roi]['distance_pixels'] = settings['distance'] / self.mm_per_pixel
self.roi_settings[roi]['angle_corrected'] = settings['angle'] + self.catphan_roll
self.roi_settings[roi]['radius_pixels'] = settings['radius'] / self.mm_per_pixel
for roi, settings in self.background_roi_settings.items():
self.background_roi_settings[roi]['distance_pixels'] = settings['distance'] / self.mm_per_pixel
self.background_roi_settings[roi]['angle_corrected'] = settings['angle'] + self.catphan_roll
self.background_roi_settings[roi]['radius_pixels'] = settings['radius'] / self.mm_per_pixel
def preprocess(self, catphan):
"""A preprocessing step before analyzing the CTP module.
Parameters
----------
catphan : `~pylinac.cbct.CatPhanBase` instance.
"""
pass
@property
def slice_num(self) -> int:
"""The slice number of the spatial resolution module.
Returns
-------
float
"""
return int(self.origin_slice+round(self._offset/self.slice_thickness))
def _setup_rois(self) -> None:
for name, setting in self.background_roi_settings.items():
self.background_rois[name] = HUDiskROI(self.image, setting['angle_corrected'], setting['radius_pixels'], setting['distance_pixels'],
self.phan_center)
if self.background_rois:
background_mean = np.mean([roi.pixel_value for roi in self.background_rois.values()])
background_std = np.std([roi.pixel_value for roi in self.background_rois.values()])
else:
background_mean = None
background_std = None
for name, setting in self.roi_settings.items():
nominal_value = setting.get('value', 0)
self.rois[name] = HUDiskROI(self.image, setting['angle_corrected'], setting['radius_pixels'], setting['distance_pixels'],
self.phan_center, nominal_value, self.tolerance,
background_mean=background_mean, background_std=background_std)
# TODO: better define threshold
def plot_rois(self, axis: plt.Axes, threshold=None) -> None:
"""Plot the ROIs to the axis."""
for roi in self.rois.values():
if not threshold:
roi.plot2axes(axis, edgecolor=roi.plot_color)
else:
roi.plot2axes(axis, edgecolor=roi.plot_color_cnr)
for roi in self.background_rois.values():
roi.plot2axes(axis, edgecolor='blue')
@property
def roi_vals_as_str(self) -> str:
return ', '.join(f'{name}: {roi.pixel_value}' for name, roi in self.rois.items())
class CTP404CP504(CatPhanModule):
"""Class for analysis of the HU linearity, geometry, and slice thickness regions of the CTP404.
"""
attr_name = 'ctp404'
common_name = 'HU Linearity'
roi_dist_mm = 58.7
roi_radius_mm = 5
roi_settings = {
'Air': {'value': AIR, 'angle': -90, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'PMP': {'value': PMP, 'angle': -120, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'LDPE': {'value': LDPE, 'angle': 180, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Poly': {'value': POLY, 'angle': 120, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Acrylic': {'value': ACRYLIC, 'angle': 60, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Delrin': {'value': DELRIN, 'angle': 0, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Teflon': {'value': TEFLON, 'angle': -60, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
}
background_roi_settings = {
'1': {'angle': -30, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'2': {'angle': -150, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'3': {'angle': -210, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'4': {'angle': 30, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
}
# thickness
thickness_roi_height = 40
thickness_roi_width = 10
thickness_roi_distance_mm = 38
thickness_roi_settings = {
'Left': {'angle': 180, 'width': thickness_roi_width, 'height': thickness_roi_height, 'distance': thickness_roi_distance_mm},
'Bottom': {'angle': 90, 'width': thickness_roi_height, 'height': thickness_roi_width, 'distance': thickness_roi_distance_mm},
'Right': {'angle': 0, 'width': thickness_roi_width, 'height': thickness_roi_height, 'distance': thickness_roi_distance_mm},
'Top': {'angle': -90, 'width': thickness_roi_height, 'height': thickness_roi_width, 'distance': thickness_roi_distance_mm},
}
# geometry
geometry_roi_size_mm = 35
geometry_roi_settings = {
'Top-Horizontal': (0, 1),
'Bottom-Horizontal': (2, 3),
'Left-Vertical': (0, 2),
'Right-Vertical': (1, 3),
}
def __init__(self, catphan, offset: int, hu_tolerance: float, thickness_tolerance: float, scaling_tolerance: float):
"""
Parameters
----------
catphan : `~pylinac.cbct.CatPhanBase` instance.
offset : int
hu_tolerance : float
thickness_tolerance : float
scaling_tolerance : float
"""
self.mm_per_pixel = catphan.mm_per_pixel
self.hu_tolerance = hu_tolerance
self.thickness_tolerance = thickness_tolerance
self.scaling_tolerance = scaling_tolerance
self.thickness_rois = {}
self.lines = {}
super().__init__(catphan, tolerance=hu_tolerance, offset=offset)
def _convert_units_in_settings(self):
super()._convert_units_in_settings()
for roi, settings in self.thickness_roi_settings.items():
self.thickness_roi_settings[roi]['width_pixels'] = settings['width'] / self.mm_per_pixel
self.thickness_roi_settings[roi]['height_pixels'] = settings['height'] / self.mm_per_pixel
self.thickness_roi_settings[roi]['angle_corrected'] = settings['angle'] + self.catphan_roll
self.thickness_roi_settings[roi]['distance_pixels'] = settings['distance'] / self.mm_per_pixel
def preprocess(self, catphan) -> None:
# for the thickness analysis image, combine thin slices or just use one slice if slices are thick
if float(catphan.dicom_stack.metadata.SliceThickness) < 3.5:
self.pad = 1
else:
self.pad = 0
self.thickness_image = Slice(catphan, combine_method='mean', num_slices=self.num_slices+self.pad, slice_num=self.slice_num).image
def _setup_rois(self) -> None:
super()._setup_rois()
self._setup_thickness_rois()
self._setup_geometry_rois()
def _setup_thickness_rois(self) -> None:
for name, setting in self.thickness_roi_settings.items():
self.thickness_rois[name] = ThicknessROI(self.thickness_image, setting['width_pixels'],
setting['height_pixels'], setting['angle_corrected'],
setting['distance_pixels'], self.phan_center)
def _setup_geometry_rois(self) -> None:
boxsize = self.geometry_roi_size_mm / self.mm_per_pixel
xbounds = (int(self.phan_center.x-boxsize), int(self.phan_center.x+boxsize))
ybounds = (int(self.phan_center.y-boxsize), int(self.phan_center.y+boxsize))
geo_img = self.image[ybounds[0]:ybounds[1], xbounds[0]:xbounds[1]]
larr, regionprops, num_roi = get_regions(geo_img, fill_holes=True, clear_borders=False)
# check that there is at least 1 ROI
if num_roi < 4:
raise ValueError("Unable to locate the Geometric nodes")
elif num_roi > 4:
regionprops = sorted(regionprops, key=lambda x: x.filled_area, reverse=True)[:4]
sorted_regions = sorted(regionprops, key=lambda x: (2*x.centroid[0]+x.centroid[1]))
centers = [Point(r.weighted_centroid[1]+xbounds[0], r.weighted_centroid[0]+ybounds[0]) for r in sorted_regions]
# setup the geometric lines
for name, order in self.geometry_roi_settings.items():
self.lines[name] = GeometricLine(centers[order[0]], centers[order[1]], self.mm_per_pixel, self.scaling_tolerance)
@property
def lcv(self) -> float:
"""The low-contrast visibility"""
return 2 * abs(self.rois['LDPE'].pixel_value - self.rois['Poly'].pixel_value) / (self.rois['LDPE'].std + self.rois['Poly'].std)
def plot_linearity(self, axis: Optional[plt.Axes]=None, plot_delta: bool=True) -> tuple:
"""Plot the HU linearity values to an axis.
Parameters
----------
axis : None, matplotlib.Axes
The axis to plot the values on. If None, will create a new figure.
plot_delta : bool
Whether to plot the actual measured HU values (False), or the difference from nominal (True).
"""
nominal_x_values = [roi.nominal_val for roi in self.rois.values()]
if axis is None:
fig, axis = plt.subplots()
if plot_delta:
values = [roi.value_diff for roi in self.rois.values()]
nominal_measurements = [0]*len(values)
ylabel = 'HU Delta'
else:
values = [roi.pixel_value for roi in self.rois.values()]
nominal_measurements = nominal_x_values
ylabel = 'Measured Values'
points = axis.plot(nominal_x_values, values, 'g+', markersize=15, mew=2)
axis.plot(nominal_x_values, nominal_measurements)
axis.plot(nominal_x_values, np.array(nominal_measurements) + self.hu_tolerance, 'r--')
axis.plot(nominal_x_values, np.array(nominal_measurements) - self.hu_tolerance, 'r--')
axis.margins(0.05)
axis.grid(True)
axis.set_xlabel("Nominal Values")
axis.set_ylabel(ylabel)
axis.set_title("HU linearity")
return points
@property
def passed_hu(self) -> bool:
"""Boolean specifying whether all the ROIs passed within tolerance."""
return all(roi.passed for roi in self.rois.values())
def plot_rois(self, axis: plt.Axes) -> None:
"""Plot the ROIs onto the image, as well as the background ROIs"""
# plot HU linearity ROIs
super().plot_rois(axis)
# plot thickness ROIs
for roi in self.thickness_rois.values():
roi.plot2axes(axis, edgecolor='blue')
# plot geometry lines
for line in self.lines.values():
line.plot2axes(axis, color=line.pass_fail_color)
@property
def passed_thickness(self) -> bool:
"""Whether the slice thickness was within tolerance from nominal."""
return self.slice_thickness-self.thickness_tolerance<self.meas_slice_thickness<self.slice_thickness+self.thickness_tolerance
@property
def meas_slice_thickness(self) -> float:
"""The average slice thickness for the 4 wire measurements in mm."""
return np.mean(sorted(roi.wire_fwhm*self.mm_per_pixel*0.42 for roi in self.thickness_rois.values()))/(1+2*self.pad)
@property
def avg_line_length(self) -> float:
return float(np.mean([line.length_mm for line in self.lines.values()]))
@property
def passed_geometry(self) -> bool:
"""Returns whether all the line lengths were within tolerance."""
return all(line.passed for line in self.lines.values())
class CTP404CP503(CTP404CP504):
"""Alias for namespace consistency"""
pass
class CTP404CP600(CTP404CP504):
roi_dist_mm = 58.7
roi_radius_mm = 5
roi_settings = {
'Air': {'value': AIR, 'angle': 90, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'PMP': {'value': PMP, 'angle': 60, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'LDPE': {'value': LDPE, 'angle': 0, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Poly': {'value': POLY, 'angle': -60, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Acrylic': {'value': ACRYLIC, 'angle': -120, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Delrin': {'value': DELRIN, 'angle': -180, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Teflon': {'value': TEFLON, 'angle': 120, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
}
class CTP404CP604(CTP404CP504):
roi_dist_mm = 58.7
roi_radius_mm = 5
roi_settings = {
'Air': {'value': AIR, 'angle': -90, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'PMP': {'value': PMP, 'angle': -120, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'50% Bone': {'value': BONE_50, 'angle': -150, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'LDPE': {'value': LDPE, 'angle': 180, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Poly': {'value': POLY, 'angle': 120, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Acrylic': {'value': ACRYLIC, 'angle': 60, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'20% Bone': {'value': BONE_20, 'angle': 30, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Delrin': {'value': DELRIN, 'angle': 0, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Teflon': {'value': TEFLON, 'angle': -60, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
}
background_roi_settings = {
'1': {'angle': -30, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'2': {'angle': -210, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
}
class CTP486(CatPhanModule):
"""Class for analysis of the Uniformity slice of the CTP module. Measures 5 ROIs around the slice that
should all be close to the same value.
"""
attr_name = 'ctp486'
common_name = 'HU Uniformity'
roi_dist_mm = 53
roi_radius_mm = 10
nominal_value = 0
roi_settings = {
'Top': {'value': nominal_value, 'angle': -90, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Right': {'value': nominal_value, 'angle': 0, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Bottom': {'value': nominal_value, 'angle': 90, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Left': {'value': nominal_value, 'angle': 180, 'distance': roi_dist_mm, 'radius': roi_radius_mm},
'Center': {'value': nominal_value, 'angle': 0, 'distance': 0, 'radius': roi_radius_mm},
}
def plot_profiles(self, axis: plt.Axes=None) -> None:
"""Plot the horizontal and vertical profiles of the Uniformity slice.
Parameters
----------
axis : None, matplotlib.Axes
The axis to plot on; if None, will create a new figure.
"""
if axis is None:
fig, axis = plt.subplots()
horiz_data = self.image[int(self.phan_center.y), :]
vert_data = self.image[:, int(self.phan_center.x)]
axis.plot(horiz_data, 'g', label='Horizontal')
axis.plot(vert_data, 'b', label='Vertical')
axis.autoscale(tight=True)
axis.axhline(self.tolerance, color='r', linewidth=3)
axis.axhline(-self.tolerance, color='r', linewidth=3)
axis.grid(True)
axis.set_ylabel("HU")
axis.legend(loc=8, fontsize='small', title="")
axis.set_title("Uniformity Profiles")
@property
def overall_passed(self) -> bool:
"""Boolean specifying whether all the ROIs passed within tolerance."""
return all(roi.passed for roi in self.rois.values())
@property
def uniformity_index(self) -> float:
"""The Uniformity Index"""
center = self.rois['Center']
uis = [100*((roi.pixel_value-center.pixel_value)/(center.pixel_value+1000)) for roi in self.rois.values()]
abs_uis = np.abs(uis)
return uis[np.argmax(abs_uis)]
@property
def integral_non_uniformity(self) -> float:
"""The Integral Non-Uniformity"""
maxhu = max(roi.pixel_value for roi in self.rois.values())
minhu = min(roi.pixel_value for roi in self.rois.values())
return (maxhu - minhu)/(maxhu + minhu + 2000)
class CTP528CP504(CatPhanModule):
"""Class for analysis of the Spatial Resolution slice of the CBCT dicom data set.
A collapsed circle profile is taken of the line-pair region. This profile is search for
peaks and valleys. The MTF is calculated from those peaks & valleys.
Attributes
----------
radius2linepairs_mm : float
The radius in mm to the line pairs.
"""
attr_name: str = 'ctp528'
common_name: str = 'Spatial Resolution'
radius2linepairs_mm = 47
combine_method: str = 'max'
num_slices: int = 3
boundaries: Tuple[float, ...] = (0, 0.107, 0.173, 0.236, 0.286, 0.335, 0.387, 0.434, 0.479)
start_angle: float = np.pi
ccw: bool = True
roi_settings = {
'region 1': {'start': boundaries[0], 'end': boundaries[1], 'num peaks': 2, 'num valleys': 1,
'peak spacing': 0.021, 'gap size (cm)': 0.5, 'lp/mm': 0.1},
'region 2': {'start': boundaries[1], 'end': boundaries[2], 'num peaks': 3, 'num valleys': 2,
'peak spacing': 0.01, 'gap size (cm)': 0.25, 'lp/mm': 0.2},
'region 3': {'start': boundaries[2], 'end': boundaries[3], 'num peaks': 4, 'num valleys': 3,
'peak spacing': 0.006, 'gap size (cm)': 0.167, 'lp/mm': 0.3},
'region 4': {'start': boundaries[3], 'end': boundaries[4], 'num peaks': 4, 'num valleys': 3,
'peak spacing': 0.00557, 'gap size (cm)': 0.125, 'lp/mm': 0.4},
'region 5': {'start': boundaries[4], 'end': boundaries[5], 'num peaks': 4, 'num valleys': 3,
'peak spacing': 0.004777, 'gap size (cm)': 0.1, 'lp/mm': 0.5},
'region 6': {'start': boundaries[5], 'end': boundaries[6], 'num peaks': 5, 'num valleys': 4,
'peak spacing': 0.00398, 'gap size (cm)': 0.083, 'lp/mm': 0.6},
'region 7': {'start': boundaries[6], 'end': boundaries[7], 'num peaks': 5, 'num valleys': 4,
'peak spacing': 0.00358, 'gap size (cm)': 0.071, 'lp/mm': 0.7},
'region 8': {'start': boundaries[7], 'end': boundaries[8], 'num peaks': 5, 'num valleys': 4,
'peak spacing': 0.0027866, 'gap size (cm)': 0.063, 'lp/mm': 0.8},
}
def _setup_rois(self):
pass
def _convert_units_in_settings(self):
pass
@cached_property
def mtf(self) -> MTF:
"""The Relative MTF of the line pairs, normalized to the first region.
Returns
-------
dict
"""
maxs = list()
mins = list()
for key, value in self.roi_settings.items():
max_indices, max_values = self.circle_profile.find_peaks(min_distance=value['peak spacing'], max_number=value['num peaks'],
search_region=(value['start'], value['end']))
# check that the right number of peaks were found before continuing, otherwise stop searching for regions
if len(max_values) != value['num peaks']:
break
maxs.append(max_values.mean())
_, min_values = self.circle_profile.find_valleys(min_distance=value['peak spacing'], max_number=value['num valleys'],
search_region=(min(max_indices), max(max_indices)))
mins.append(min_values.mean())
if not maxs:
raise ValueError("Did not find any spatial resolution pairs to analyze. File an issue on github (https://github.com/jrkerns/pylinac/issues) if this is a valid dataset.")
spacings = [roi['lp/mm'] for roi in self.roi_settings.values()]
mtf = MTF(lp_spacings=spacings, lp_maximums=maxs, lp_minimums=mins)
return mtf
@property
def radius2linepairs(self) -> float:
"""Radius from the phantom center to the line-pair region, corrected for pixel spacing."""
return self.radius2linepairs_mm / self.mm_per_pixel
def plot_rois(self, axis: plt.Axes) -> None:
"""Plot the circles where the profile was taken within."""
self.circle_profile.plot2axes(axis, edgecolor='blue', plot_peaks=False)
@cached_property
def circle_profile(self) -> CollapsedCircleProfile:
"""Calculate the median profile of the Line Pair region.
Returns
-------
:class:`pylinac.core.profile.CollapsedCircleProfile` : A 1D profile of the Line Pair region.
"""
circle_profile = CollapsedCircleProfile(self.phan_center, self.radius2linepairs, image_array=self.image,
start_angle=self.start_angle + np.deg2rad(self.catphan_roll),
width_ratio=0.04, sampling_ratio=2, ccw=self.ccw)
circle_profile.filter(0.001, kind='gaussian')
circle_profile.ground()
return circle_profile
def plot_mtf(self, axis: Optional[plt.Axes] = None) -> Tuple:
"""Plot the Relative MTF.
Parameters
----------
axis : None, matplotlib.Axes
The axis to plot the MTF on. If None, will create a new figure.
"""
if axis is None:
fig, axis = plt.subplots()
points = axis.plot(list(self.mtf.norm_mtfs.keys()), list(self.mtf.norm_mtfs.values()), marker='o')
axis.margins(0.05)
axis.grid(True)
axis.set_xlabel('Line pairs / mm')
axis.set_ylabel("Relative MTF")
axis.set_title('RMTF')
return points
class CTP528CP604(CTP528CP504):
"""Alias for namespace consistency."""
pass
class CTP528CP600(CTP528CP504):
start_angle = np.pi - 0.1
ccw = False
boundaries = (0, 0.116, 0.182, 0.244, 0.294, 0.344, 0.396, 0.443, 0.488)
class CTP528CP503(CTP528CP504):
start_angle = 0
ccw = False
boundaries = (0, 0.111, 0.176, 0.240, 0.289, 0.339, 0.390, 0.436, 0.481)
class GeometricLine(Line):
"""Represents a line connecting two nodes/ROIs on the Geometry Slice.
Attributes
----------
nominal_length_mm : int, float
The nominal distance between the geometric nodes, in mm.
"""
nominal_length_mm: Union[float, int] = 50
def __init__(self, geo_roi1: Point, geo_roi2: Point, mm_per_pixel: float, tolerance: Union[int, float]):
"""
Parameters
----------
geo_roi1 : GEO_ROI
One of two ROIs representing one end of the line.
geo_roi2 : GEO_ROI
The other ROI which is the other end of the line.
mm_per_pixel : float
The mm/pixel value.
tolerance : int, float
The tolerance of the geometric line, in mm.
"""
super().__init__(geo_roi1, geo_roi2)
self.mm_per_pixel = mm_per_pixel
self.tolerance = tolerance
@property
def passed(self) -> bool:
"""Whether the line passed tolerance."""
return self.nominal_length_mm - self.tolerance < self.length_mm < self.nominal_length_mm + self.tolerance
@property
def pass_fail_color(self) -> str:
"""Plot color for the line, based on pass/fail status."""
return 'blue' if self.passed else 'red'
@property
def length_mm(self) -> float:
"""Return the length of the line in mm."""
return self.length*self.mm_per_pixel
class CTP515(CatPhanModule):
"""Class for analysis of the low contrast slice of the CTP module. Low contrast is measured by obtaining
the average pixel value of the contrast ROIs and comparing that value to the average background value. To obtain
a more "human" detection level, the contrast (which is largely the same across different-sized ROIs) is multiplied
by the diameter. This value is compared to the contrast threshold to decide if it can be "seen".
"""
attr_name = 'ctp515'
common_name = 'Low Contrast'
num_slices = 1
roi_dist_mm = 50
roi_radius_mm = [6, 3.5, 3, 2.5, 2, 1.5]
roi_angles = [-87.4, -69.1, -52.7, -38.5, -25.1, -12.9]
roi_settings = {
'15': {'angle': roi_angles[0], 'distance': roi_dist_mm, 'radius': roi_radius_mm[0]},
'9': {'angle': roi_angles[1], 'distance': roi_dist_mm, 'radius': roi_radius_mm[1]},
'8': {'angle': roi_angles[2], 'distance': roi_dist_mm, 'radius': roi_radius_mm[2]},
'7': {'angle': roi_angles[3], 'distance': roi_dist_mm, 'radius': roi_radius_mm[3]},
'6': {'angle': roi_angles[4], 'distance': roi_dist_mm, 'radius': roi_radius_mm[4]},
'5': {'angle': roi_angles[5], 'distance': roi_dist_mm, 'radius': roi_radius_mm[5]},
}
background_roi_dist_ratio = 0.75
background_roi_radius_mm = 4
WINDOW_SIZE = 50
def __init__(self, catphan, tolerance: float, cnr_threshold: float, offset: int, contrast_method: Contrast, visibility_threshold: float):
self.cnr_threshold = cnr_threshold
self.contrast_method = contrast_method
self.visibility_threshold = visibility_threshold
super().__init__(catphan, tolerance=tolerance, offset=offset)
def _setup_rois(self):
# create both background rois dynamically, then create the actual sample ROI as normal
for name, setting in self.roi_settings.items():
self.background_rois[name+'-outer'] = LowContrastDiskROI(self.image, setting['angle_corrected'],
self.background_roi_radius_mm / self.mm_per_pixel,
setting['distance_pixels'] * (2-self.background_roi_dist_ratio),
self.phan_center)
self.background_rois[name+'-inner'] = LowContrastDiskROI(self.image, setting['angle_corrected'],
self.background_roi_radius_mm / self.mm_per_pixel,
setting['distance_pixels'] * self.background_roi_dist_ratio,
self.phan_center)
background_val = float(np.mean([self.background_rois[name+'-outer'].pixel_value, self.background_rois[name+'-inner'].pixel_value]))
self.rois[name] = LowContrastDiskROI(self.image, setting['angle_corrected'], setting['radius_pixels'], setting['distance_pixels'],
self.phan_center, contrast_reference=background_val, cnr_threshold=self.cnr_threshold,
contrast_method=self.contrast_method, visibility_threshold=self.visibility_threshold)
@property
def rois_visible(self) -> int:
"""The number of ROIs "visible"."""
return sum(roi.passed_visibility for roi in self.rois.values())
@property
def lower_window(self) -> float:
"""Lower bound of CT window/leveling to show on the plotted image. Improves apparent contrast."""
return Enumerable(self.background_rois.values()).min(lambda r: r.pixel_value) - self.WINDOW_SIZE
@property
def upper_window(self) -> float:
"""Upper bound of CT window/leveling to show on the plotted image. Improves apparent contrast"""
return Enumerable(self.rois.values()).max(lambda r: r.pixel_value) + self.WINDOW_SIZE
class CTP515CP600(CTP515):
roi_angles = [-87.4+180, -69.1+180, -52.7+180, -38.5+180, -25.1+180, -12.9+180]
roi_dist_mm = 50
roi_radius_mm = [6, 3.5, 3, 2.5, 2, 1.5]
roi_settings = {
'15': {'angle': roi_angles[0], 'distance': roi_dist_mm, 'radius': roi_radius_mm[0]},
'9': {'angle': roi_angles[1], 'distance': roi_dist_mm, 'radius': roi_radius_mm[1]},
'8': {'angle': roi_angles[2], 'distance': roi_dist_mm, 'radius': roi_radius_mm[2]},
'7': {'angle': roi_angles[3], 'distance': roi_dist_mm, 'radius': roi_radius_mm[3]},
'6': {'angle': roi_angles[4], 'distance': roi_dist_mm, 'radius': roi_radius_mm[4]},
'5': {'angle': roi_angles[5], 'distance': roi_dist_mm, 'radius': roi_radius_mm[5]},
}
class CatPhanBase:
"""A class for loading and analyzing CT DICOM files of a CatPhan 504 & CatPhan 503. Can be from a CBCT or CT scanner
Analyzes: Uniformity (CTP486), High-Contrast Spatial Resolution (CTP528), Image Scaling & HU Linearity (CTP404).
"""
_demo_url: str = ''
_model: str = ''
air_bubble_radius_mm: Union[int, float] = 7
localization_radius: Union[int, float] = 59
was_from_zip: bool = False
def __init__(self, folderpath: str, check_uid: bool=True):
"""
Parameters
----------
folderpath : str
String that points to the CBCT image folder location.
check_uid : bool
Whether to enforce raising an error if more than one UID is found in the dataset.
Raises
------
NotADirectoryError
If folder str passed is not a valid directory.
FileNotFoundError
If no CT images are found in the folder
"""
self.origin_slice = 0
self.catphan_roll = 0
if not osp.isdir(folderpath):
raise NotADirectoryError("Path given was not a Directory/Folder")
self.dicom_stack = image.DicomImageStack(folderpath, check_uid=check_uid)
self.localize()
@classmethod
def from_demo_images(cls):
"""Construct a CBCT object from the demo images."""
demo_file = retrieve_demo_file(url=cls._demo_url)
return cls.from_zip(demo_file)
@classmethod
def from_url(cls, url: str, check_uid: bool=True):
"""Instantiate a CBCT object from a URL pointing to a .zip object.
Parameters
----------
url : str
URL pointing to a zip archive of CBCT images.
check_uid : bool
Whether to enforce raising an error if more than one UID is found in the dataset.
"""
filename = get_url(url)
return cls.from_zip(filename, check_uid=check_uid)
@classmethod
def from_zip(cls, zip_file: Union[str, zipfile.ZipFile, BinaryIO], check_uid: bool=True):
"""Construct a CBCT object and pass the zip file.
Parameters
----------
zip_file : str, ZipFile
Path to the zip file or a ZipFile object.
check_uid : bool
Whether to enforce raising an error if more than one UID is found in the dataset.
Raises
------
FileExistsError : If zip_file passed was not a legitimate zip file.
FileNotFoundError : If no CT images are found in the folder
"""
with TemporaryZipDirectory(zip_file) as temp_zip:
obj = cls(temp_zip, check_uid=check_uid)
obj.was_from_zip = True
return obj
def plot_analyzed_image(self, show: bool=True) -> None:
"""Plot the images used in the calculate and summary data.
Parameters
----------
show : bool
Whether to plot the image or not.
"""
def plot(ctp_module, axis, vmin=None, vmax=None):
axis.imshow(ctp_module.image.array, cmap=get_dicom_cmap(), vmin=vmin, vmax=vmax)
ctp_module.plot_rois(axis)
axis.autoscale(tight=True)
axis.set_title(ctp_module.common_name)
axis.axis('off')
# set up grid and axes
grid_size = (2, 4)
hu_ax = plt.subplot2grid(grid_size, (0, 1))
plot(self.ctp404, hu_ax)
hu_lin_ax = plt.subplot2grid(grid_size, (0, 2))
self.ctp404.plot_linearity(hu_lin_ax)
if self._has_module(CTP486):
unif_ax = plt.subplot2grid(grid_size, (0, 0))
plot(self.ctp486, unif_ax)
unif_prof_ax = plt.subplot2grid(grid_size, (1, 2), colspan=2)
self.ctp486.plot_profiles(unif_prof_ax)
if self._has_module(CTP528CP504):
sr_ax = plt.subplot2grid(grid_size, (1, 0))
plot(self.ctp528, sr_ax)
mtf_ax = plt.subplot2grid(grid_size, (0, 3))
self.ctp528.plot_mtf(mtf_ax)
if self._has_module(CTP515):
locon_ax = plt.subplot2grid(grid_size, (1, 1))
plot(self.ctp515, locon_ax, vmin=self.ctp515.lower_window, vmax=self.ctp515.upper_window)
# finish up
plt.tight_layout()
if show:
plt.show()
def save_analyzed_image(self, filename: str, **kwargs) -> None:
"""Save the analyzed summary plot.
Parameters
----------
filename : str, file object
The name of the file to save the image to.
kwargs :
Any valid matplotlib kwargs.
"""
self.plot_analyzed_image(show=False)
plt.savefig(filename, **kwargs)
def plot_analyzed_subimage(self, subimage: str='hu', delta: bool=True, show: bool=True) -> None:
"""Plot a specific component of the CBCT analysis.
Parameters
----------
subimage : {'hu', 'un', 'sp', 'lc', 'mtf', 'lin', 'prof'}
The subcomponent to plot. Values must contain one of the following letter combinations.
E.g. ``linearity``, ``linear``, and ``lin`` will all draw the HU linearity values.
* ``hu`` draws the HU linearity image.
* ``un`` draws the HU uniformity image.
* ``sp`` draws the Spatial Resolution image.
* ``mtf`` draws the RMTF plot.
* ``lin`` draws the HU linearity values. Used with ``delta``.
* ``prof`` draws the HU uniformity profiles.
delta : bool
Only for use with ``lin``. Whether to plot the HU delta or actual values.
show : bool
Whether to actually show the plot.
"""
subimage = subimage.lower()
plt.clf()
plt.axis('off')
if 'hu' in subimage: # HU, GEO & thickness objects
plt.imshow(self.ctp404.image.array, cmap=get_dicom_cmap())
self.ctp404.plot_rois(plt.gca())
plt.autoscale(tight=True)
elif 'un' in subimage: # uniformity
plt.imshow(self.ctp486.image.array, cmap=get_dicom_cmap())
self.ctp486.plot_rois(plt.gca())
plt.autoscale(tight=True)
elif 'sp' in subimage: # SR objects
plt.imshow(self.ctp528.image.array, cmap=get_dicom_cmap())
self.ctp528.plot_rois(plt.gca())
plt.autoscale(tight=True)
elif 'mtf' in subimage:
plt.axis('on')
self.ctp528.plot_mtf(plt.gca())
elif 'lc' in subimage:
plt.imshow(self.ctp515.image.array, cmap=get_dicom_cmap(), vmin=self.ctp515.lower_window, vmax=self.ctp515.upper_window)
self.ctp515.plot_rois(plt.gca())
plt.autoscale(tight=True)
elif 'lin' in subimage:
plt.axis('on')
self.ctp404.plot_linearity(plt.gca(), delta)
elif 'prof' in subimage:
plt.axis('on')
self.ctp486.plot_profiles(plt.gca())
else:
raise ValueError(f"Subimage parameter {subimage} not understood")
if show:
plt.show()
def save_analyzed_subimage(self, filename: Union[str, BinaryIO], subimage: str='hu', **kwargs):
"""Save a component image to file.
Parameters
----------
filename : str, file object
The file to write the image to.
subimage : str
See :meth:`~pylinac.cbct.CBCT.plot_analyzed_subimage` for parameter info.
"""
self.plot_analyzed_subimage(subimage, show=False)
plt.savefig(filename, **kwargs)
if isinstance(filename, str):
print(f"CatPhan subimage figure saved to {osp.abspath(filename)}")
def _results(self) -> None:
"""Helper function to spit out values that will be tested."""
print(self.results())
print(f"Phantom roll: {self.catphan_roll}")
print(f"Origin slice: {self.origin_slice}")
mtfs = {}
for mtf in (95, 90, 80, 50, 30):
mtfval = self.ctp528.mtf.relative_resolution(mtf)
mtfs[mtf] = mtfval
print(f'MTFs: {mtfs}')
def localize(self) -> None:
"""Find the slice number of the catphan's HU linearity module and roll angle"""
self.origin_slice = self.find_origin_slice()
self.catphan_roll = self.find_phantom_roll()
@property
def mm_per_pixel(self) -> float:
"""The millimeters per pixel of the DICOM images."""
return self.dicom_stack.metadata.PixelSpacing[0]
def find_origin_slice(self) -> int:
"""Using a brute force search of the images, find the median HU linearity slice.
This method walks through all the images and takes a collapsed circle profile where the HU
linearity ROIs are. If the profile contains both low (<800) and high (>800) HU values and most values are the same
(i.e. it's not an artifact), then
it can be assumed it is an HU linearity slice. The median of all applicable slices is the
center of the HU slice.
Returns
-------
int
The middle slice of the HU linearity module.
"""
hu_slices = []
for image_number in range(0, self.num_images, 2):
slice = Slice(self, image_number, combine=False)
#print(image_number)
# slice.image.plot()
try:
center = slice.phan_center
except ValueError: # a slice without the phantom in view
pass
else:
circle_prof = CollapsedCircleProfile(center, radius=self.localization_radius/self.mm_per_pixel, image_array=slice.image, width_ratio=0.05, num_profiles=5)
prof = circle_prof.values
# determine if the profile contains both low and high values and that most values are the same
low_end, high_end = np.percentile(prof, [2, 98])
median = np.median(prof)
middle_variation = np.percentile(prof, 80) - np.percentile(prof, 20)
variation_limit = max(100, self.dicom_stack.metadata.SliceThickness*-100+300)
if (low_end < median - 400) and (high_end > median + 400) and (middle_variation < variation_limit):
hu_slices.append(image_number)
if not hu_slices:
raise ValueError("No slices were found that resembled the HU linearity module")
hu_slices = np.array(hu_slices)
c = int(round(float(np.median(hu_slices))))
ln = len(hu_slices)
# drop slices that are way far from median
hu_slices = hu_slices[((c + ln/2) >= hu_slices) & (hu_slices >= (c - ln/2))]
center_hu_slice = int(round(float(np.median(hu_slices))))
if self._is_within_image_extent(center_hu_slice):
#print(center_hu_slice)
return center_hu_slice
def find_phantom_roll(self) -> float:
"""Determine the "roll" of the phantom.
This algorithm uses the two air bubbles in the HU slice and the resulting angle between them.
Returns
-------
float : the angle of the phantom in **degrees**.
"""
def is_right_area(region):
thresh = np.pi * ((self.air_bubble_radius_mm / self.mm_per_pixel) ** 2)
return thresh * 2 > region.filled_area > thresh / 2
def is_right_eccentricity(region):
return region.eccentricity < 0.5
# get edges and make ROIs from it
slice = Slice(self, self.origin_slice)
larr, regions, _ = get_regions(slice)
# find appropriate ROIs and grab the two most centrally positioned ones
hu_bubbles = [r for r in regions if (is_right_area(r) and is_right_eccentricity(r))]
central_bubbles = sorted(hu_bubbles, key=lambda x: abs(x.centroid[1] - slice.phan_center.x))[:2]
sorted_bubbles = sorted(central_bubbles, key=lambda x: x.centroid[0]) # top, bottom
y_dist = sorted_bubbles[1].centroid[0] - sorted_bubbles[0].centroid[0]
x_dist = sorted_bubbles[1].centroid[1] - sorted_bubbles[0].centroid[1]
phan_roll = np.arctan2(y_dist, x_dist)
anglroll = np.rad2deg(phan_roll) - 90
return anglroll
@property
def num_images(self) -> int:
"""The number of images loaded."""
return len(self.dicom_stack)
def _is_within_image_extent(self, image_num: int) -> bool:
"""Determine if the image number is beyond the edges of the images (negative or past last image)."""
if self.num_images - 1 > image_num > 1:
return True
else:
raise ValueError("The determined image number is beyond the image extent. Either the entire dataset "
"wasn't loaded or the entire phantom wasn't scanned.")
@property
def catphan_size(self) -> float:
"""The expected size of the phantom in pixels, based on a 20cm wide phantom."""
phan_area = np.pi*(self.catphan_radius_mm**2)
return phan_area/(self.mm_per_pixel**2)
def publish_pdf(self, filename: str, notes: str = None, open_file: bool = False,
metadata: Optional[dict] = None) -> None:
"""Publish (print) a PDF containing the analysis and quantitative results.
Parameters
----------
filename : (str, file-like object}
The file to write the results to.
notes : str, list of strings
Text; if str, prints single line.
If list of strings, each list item is printed on its own line.
open_file : bool
Whether to open the file using the default program after creation.
metadata : dict
Extra data to be passed and shown in the PDF. The key and value will be shown with a colon.
E.g. passing {'Author': 'James', 'Unit': 'TrueBeam'} would result in text in the PDF like:
--------------
Author: James
Unit: TrueBeam
--------------
"""
analysis_title = f'CatPhan {self._model} Analysis'
module_texts = [
[' - CTP404 Results - ',
f'HU Linearity tolerance: {self.ctp404.hu_tolerance}',
f'HU Linearity ROIs: {self.ctp404.roi_vals_as_str}',
f'Geometric node spacing (mm): {self.ctp404.avg_line_length:2.2f}',
f'Slice thickness (mm): {self.ctp404.meas_slice_thickness:2.2f}',
f'Low contrast visibility: {self.ctp404.lcv:2.2f}',
],
]
module_images = [('hu', 'lin')]
if self._has_module(CTP528CP504):
add = [' - CTP528 Results - ',
f'MTF 80% (lp/mm): {self.ctp528.mtf.relative_resolution(80):2.2f}',
f'MTF 50% (lp/mm): {self.ctp528.mtf.relative_resolution(50):2.2f}',
f'MTF 30% (lp/mm): {self.ctp528.mtf.relative_resolution(30):2.2f}',
]
module_texts.append(add)
module_images.append(('sp', 'mtf'))
if self._has_module(CTP486):
add = [' - CTP486 Results - ',
f'Uniformity tolerance: {self.ctp486.tolerance}',
f'Uniformity ROIs: {self.ctp486.roi_vals_as_str}',
f'Uniformity Index: {self.ctp486.uniformity_index:2.2f}',
f'Integral non-uniformity: {self.ctp486.integral_non_uniformity:2.4f}',
]
module_texts.append(add)
module_images.append(('un', 'prof'))
if self._has_module(CTP515):
add = [' - CTP515 Results - ',
f'CNR threshold: {self.ctp515.cnr_threshold}',
f'Low contrast ROIs "seen": {self.ctp515.rois_visible}'
]
module_texts.append(add)
module_images.append(('lc', None))
self._publish_pdf(filename, metadata, notes, analysis_title,
module_texts, module_images)
if open_file:
webbrowser.open(filename)
def _publish_pdf(self, filename: str, metadata: Optional[dict], notes: str, analysis_title: str, texts: Sequence[str], imgs: Sequence[Tuple[str, str]]):
try:
date = datetime.strptime(self.dicom_stack[0].metadata.InstanceCreationDate, "%Y%m%d").strftime("%A, %B %d, %Y")
except:
date = "Unknown"
canvas = pdf.PylinacCanvas(filename, page_title=analysis_title, metadata=metadata)
if notes is not None:
canvas.add_text(text="Notes:", location=(1, 4.5), font_size=14)
canvas.add_text(text=notes, location=(1, 4))
for page, ((img1, img2), text) in enumerate(zip(imgs, texts)):
for img, offset in zip((img1, img2), (12, 2)):
if img is not None:
data = io.BytesIO()
self.save_analyzed_subimage(data, img)
canvas.add_image(data, location=(4, offset), dimensions=(15, 10))
canvas.add_text(text=text, location=(1.5, 23))
canvas.add_new_page()
canvas.finish()
def _zip_images(self) -> None:
"""Compress the raw images into a ZIP archive and remove the uncompressed images."""
zip_name = f'{osp.dirname(self.dicom_stack[0].path)}\CBCT - {self.dicom_stack[0].date_created(format="%A, %I-%M-%S, %B %d, %Y")}.zip'
with zipfile.ZipFile(zip_name, 'w', compression=zipfile.ZIP_DEFLATED) as zfile:
for image in self.dicom_stack:
zfile.write(image.path, arcname=osp.basename(image.path))
for image in self.dicom_stack:
try:
os.remove(image.path)
except:
pass
def analyze(self, hu_tolerance: Union[int, float]=40, scaling_tolerance: Union[int, float]=1, thickness_tolerance: Union[int, float]=0.2,
low_contrast_tolerance: Union[int, float]=1, cnr_threshold: Union[int, float]=15, zip_after: bool=False,
contrast_method: Contrast = Contrast.MICHELSON, visibility_threshold: float = 0.1):
"""Single-method full analysis of CBCT DICOM files.
Parameters
----------
hu_tolerance : int
The HU tolerance value for both HU uniformity and linearity.
scaling_tolerance : float, int
The scaling tolerance in mm of the geometric nodes on the HU linearity slice (CTP404 module).
thickness_tolerance : float, int
The tolerance of the thickness calculation in mm, based on the wire ramps in the CTP404 module.
.. warning:: Thickness accuracy degrades with image noise; i.e. low mAs images are less accurate.
low_contrast_tolerance : int
The number of low-contrast bubbles needed to be "seen" to pass.
cnr_threshold : float, int
The threshold for "detecting" low-contrast image. See RTD for calculation info.
zip_after : bool
If the CT images were not compressed before analysis and this is set to true, pylinac will compress
the analyzed images into a ZIP archive.
"""
ctp404, offset = self._get_module(CTP404CP504, raise_empty=True)
self.ctp404 = ctp404(self, offset=offset, hu_tolerance=hu_tolerance, thickness_tolerance=thickness_tolerance,
scaling_tolerance=scaling_tolerance)
if self._has_module(CTP486):
ctp486, offset = self._get_module(CTP486)
self.ctp486 = ctp486(self, offset=offset, tolerance=hu_tolerance)
if self._has_module(CTP528CP504):
ctp528, offset = self._get_module(CTP528CP504)
self.ctp528 = ctp528(self, offset=offset, tolerance=None)
if self._has_module(CTP515):
ctp515, offset = self._get_module(CTP515)
self.ctp515 = ctp515(self, tolerance=low_contrast_tolerance, cnr_threshold=cnr_threshold,
offset=offset, contrast_method=contrast_method, visibility_threshold=visibility_threshold)
if zip_after and not self.was_from_zip:
self._zip_images()
def _has_module(self, module_of_interest: Type[CatPhanModule]) -> bool:
return any(issubclass(module, module_of_interest) for module in self.modules.keys())
def _get_module(self, module_of_interest: Type[CatPhanModule], raise_empty: bool = False) -> Tuple[Type[CatPhanModule], int]:
"""Grab the module that is, or is a subclass of, the module of interest. This allows users to subclass a CTP module and pass that in."""
for module, values in self.modules.items():
if issubclass(module, module_of_interest):
return module, values['offset']
if raise_empty:
raise ValueError(f"Tried to find the {module_of_interest} or a subclass of it. Did you override `modules` and not pass this module in?")
def results(self) -> str:
"""Return the results of the analysis as a string. Use with print()."""
string = (f'\n - CatPhan {self._model} QA Test - \n'
f'HU Linearity ROIs: {self.ctp404.roi_vals_as_str}\n'
f'HU Passed?: {self.ctp404.passed_hu}\n'
f'Low contrast visibility: {self.ctp404.lcv:2.2f}\n'
f'Geometric Line Average (mm): {self.ctp404.avg_line_length:2.2f}\n'
f'Geometry Passed?: {self.ctp404.passed_geometry}\n'
f'Measured Slice Thickness (mm): {self.ctp404.meas_slice_thickness:2.3f}\n'
f'Slice Thickness Passed? {self.ctp404.passed_thickness}\n')
if self._has_module(CTP486):
add = (f'Uniformity ROIs: {self.ctp486.roi_vals_as_str}\n'
f'Uniformity index: {self.ctp486.uniformity_index:2.3f}\n'
f'Integral non-uniformity: {self.ctp486.integral_non_uniformity:2.4f}\n'
f'Uniformity Passed?: {self.ctp486.overall_passed}\n')
string += add
if self._has_module(CTP528CP504):
add = (f'MTF 50% (lp/mm): {self.ctp528.mtf.relative_resolution(50):2.2f}\n')
string += add
if self._has_module(CTP515):
add = (f'Low contrast ROIs "seen": {self.ctp515.rois_visible}\n')
string += add
return string
def results_data(self, as_dict=False) -> Union[CatphanResult, dict]:
"""Present the results data and metadata as a dataclass or dict.
The default return type is a dataclass."""
hu_rois = {name: ROIResult(name=name,
value=roi.pixel_value,
difference=roi.value_diff,
nominal_value=roi.nominal_val,
passed=roi.passed
) for name, roi in self.ctp404.rois.items()}
ctp404_result = CTP404Result(
offset=self.ctp404._offset,
low_contrast_visibility=self.ctp404.lcv,
thickness_passed=self.ctp404.passed_thickness,
measured_slice_thickness_mm=self.ctp404.meas_slice_thickness,
thickness_num_slices_combined=self.ctp404.num_slices + self.ctp404.pad,
geometry_passed=self.ctp404.passed_geometry,
avg_line_distance_mm=self.ctp404.avg_line_length,
line_distances_mm=[l.length_mm for name, l in self.ctp404.lines.items()],
hu_linearity_passed=self.ctp404.passed_hu,
hu_tolerance=self.ctp404.hu_tolerance,
hu_rois=hu_rois
)
data = CatphanResult(
catphan_model=self._model,
catphan_roll_deg=self.catphan_roll,
origin_slice=self.origin_slice,
num_images=self.num_images,
ctp404=ctp404_result
)
# CTP 486 Uniformity stuff
if self._has_module(CTP486):
data.ctp486 = CTP486Result(
passed=self.ctp486.overall_passed,
uniformity_index=self.ctp486.uniformity_index,
integral_non_uniformity=self.ctp486.integral_non_uniformity,
)
# CTP 528 stuff
if self._has_module(CTP528CP504):
data.ctp528 = CTP528Result(
roi_settings=self.ctp528.roi_settings,
start_angle_radians=self.ctp528.start_angle,
mtf_lp_mm={p: self.ctp528.mtf.relative_resolution(p) for p in (80, 50, 30)}
)
# CTP 515 stuff
if self._has_module(CTP515):
data.ctp515 = CTP515Result(
cnr_threshold=self.ctp515.cnr_threshold,
num_rois_seen=self.ctp515.rois_visible,
roi_settings=self.ctp515.roi_settings
)
if as_dict:
return dataclasses.asdict(data)
return data
class CatPhan503(CatPhanBase):
"""A class for loading and analyzing CT DICOM files of a CatPhan 503.
Analyzes: Uniformity (CTP486), High-Contrast Spatial Resolution (CTP528), Image Scaling & HU Linearity (CTP404).
"""
_demo_url = 'CatPhan503.zip'
_model = '503'
catphan_radius_mm = 97
modules = {
CTP404CP503: {'offset': 0},
CTP486: {'offset': -110},
CTP528CP503: {'offset': -30},
}
@staticmethod
def run_demo(show: bool=True):
"""Run the CBCT demo using high-quality head protocol images."""
cbct = CatPhan503.from_demo_images()
cbct.analyze()
print(cbct.results())
cbct.plot_analyzed_image(show)
class CatPhan504(CatPhanBase):
"""A class for loading and analyzing CT DICOM files of a CatPhan 504. Can be from a CBCT or CT scanner
Analyzes: Uniformity (CTP486), High-Contrast Spatial Resolution (CTP528),
Image Scaling & HU Linearity (CTP404), and Low contrast (CTP515).
"""
_demo_url = 'CatPhan504.zip'
_model = '504'
catphan_radius_mm = 101
modules = {
CTP404CP504: {'offset': 0},
CTP486: {'offset': -65},
CTP528CP504: {'offset': 30},
CTP515: {'offset': -30}
}
@staticmethod
def run_demo(show: bool=True):
"""Run the CBCT demo using high-quality head protocol images."""
cbct = CatPhan504.from_demo_images()
cbct.analyze()
print(cbct.results())
cbct.plot_analyzed_image(show)
class CatPhan604(CatPhanBase):
"""A class for loading and analyzing CT DICOM files of a CatPhan 604. Can be from a CBCT or CT scanner
Analyzes: Uniformity (CTP486), High-Contrast Spatial Resolution (CTP528),
Image Scaling & HU Linearity (CTP404), and Low contrast (CTP515).
"""
_demo_url = 'CatPhan604.zip'
_model = '604'
catphan_radius_mm = 101
modules = {
CTP404CP604: {'offset': 0},
CTP486: {'offset': -80},
CTP528CP604: {'offset': 42},
CTP515: {'offset': -40}
}
@staticmethod
def run_demo(show: bool=True):
"""Run the CBCT demo using high-quality head protocol images."""
cbct = CatPhan604.from_demo_images()
cbct.analyze()
print(cbct.results())
cbct.plot_analyzed_image(show)
class CatPhan600(CatPhanBase):
"""A class for loading and analyzing CT DICOM files of a CatPhan 600.
Analyzes: Uniformity (CTP486), High-Contrast Spatial Resolution (CTP528),
Image Scaling & HU Linearity (CTP404), and Low contrast (CTP515).
"""
_demo_url = 'CatPhan600.zip'
_model = '600'
catphan_radius_mm = 101
modules = {
CTP404CP600: {'offset': 0},
CTP486: {'offset': -160},
CTP515CP600: {'offset': -110},
CTP528CP600: {'offset': -70},
}
@staticmethod
def run_demo(show: bool=True):
"""Run the CatPhan 600 demo."""
cbct = CatPhan600.from_demo_images()
cbct.analyze()
print(cbct.results())
cbct.plot_analyzed_image(show)
@argue.options(threshold=('otsu', 'mean'))
def get_regions(slice_or_arr: Union[Slice, np.ndarray], fill_holes: bool=False, clear_borders: bool=True, threshold: str='otsu') -> Tuple[np.ndarray, list, int]:
"""Get the skimage regions of a black & white image."""
if threshold == 'otsu':
thresmeth = filters.threshold_otsu
elif threshold == 'mean':
thresmeth = np.mean
if isinstance(slice_or_arr, Slice):
edges = filters.scharr(slice_or_arr.image.array.astype(np.float))
center = slice_or_arr.image.center
elif isinstance(slice_or_arr, np.ndarray):
edges = filters.scharr(slice_or_arr.astype(np.float))
center = (int(edges.shape[1]/2), int(edges.shape[0]/2))
edges = filters.gaussian(edges, sigma=1)
if isinstance(slice_or_arr, Slice):
box_size = 100/slice_or_arr.mm_per_pixel
thres_img = edges[int(center.y-box_size):int(center.y+box_size),
int(center.x-box_size):int(center.x+box_size)]
thres = thresmeth(thres_img)
else:
thres = thresmeth(edges)
bw = edges > thres
if clear_borders:
segmentation.clear_border(bw, buffer_size=int(max(bw.shape)/50), in_place=True)
if fill_holes:
bw = ndimage.binary_fill_holes(bw)
labeled_arr, num_roi = measure.label(bw, return_num=True)
regionprops = measure.regionprops(labeled_arr, edges)
return labeled_arr, regionprops, num_roi
@argue.options(mode=('mean', 'median', 'max'))
def combine_surrounding_slices(dicomstack: DicomImageStack, nominal_slice_num: int, slices_plusminus: int=1, mode: str='mean') -> np.ndarray:
"""Return an array that is the combination of a given slice and a number of slices surrounding it.
Parameters
----------
dicomstack : `~pylinac.core.image.DicomImageStack`
The CBCT DICOM stack.
nominal_slice_num : int
The slice of interest (along 3rd dim).
slices_plusminus: int
How many slices plus and minus to combine (also along 3rd dim).
mode : {'mean', 'median', 'max}
Specifies the method of combination.
Returns
-------
combined_array : numpy.array
The combined array of the DICOM stack slices.
"""
slices = range(nominal_slice_num - slices_plusminus, nominal_slice_num + slices_plusminus + 1)
arrays = tuple(dicomstack[s].array for s in slices)
array_stack = np.dstack(arrays)
if mode == 'mean':
combined_array = np.mean(array_stack, 2)
elif mode == 'median':
combined_array = np.median(array_stack, 2)
else:
combined_array = np.max(array_stack, 2)
return combined_array
|
mit
| -6,795,491,446,454,082,000 | 43.399745 | 181 | 0.603207 | false |
jlmadurga/listenclosely
|
listenclosely/tasks.py
|
1
|
1391
|
from __future__ import absolute_import
from celery import Task, shared_task
from functools import wraps
def listening_required(f):
@wraps(f)
def decorated_function(self, *args, **kwargs):
if not self.facade.listening:
listen.apply_async(queue=self.request.delivery_info['routing_key'])
return self.retry()
else:
return f(self, *args, **kwargs)
return decorated_function
class ListenCloselyTask(Task):
abstract = True
default_retry_delay = 0.5
@property
def facade(self):
return self.app.listenclosely_app
@shared_task(base=ListenCloselyTask, bind=True, ignore_result=True)
def listen(self):
if not self.facade.listening:
return self.facade.listen()
else:
return "Already listening"
@shared_task(base=ListenCloselyTask, bind=True)
@listening_required
def disconnect(self):
return self.facade.disconnect()
@shared_task(base=ListenCloselyTask, bind=True)
@listening_required
def send_message(self, number, content):
self.facade.send_message(number, content)
return True
@shared_task(base=ListenCloselyTask, bind=True)
@listening_required
def attend_pendings(self):
self.facade.attend_pendings()
@shared_task(base=ListenCloselyTask, bind=True)
@listening_required
def terminate_obsolete(self):
self.facade.terminate_obsolete()
|
bsd-3-clause
| 6,976,494,609,904,004,000 | 25.769231 | 79 | 0.698059 | false |
inabhi9/django-miniauth
|
miniauth/admin.py
|
1
|
3031
|
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
from miniauth.models import User
class UserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email', 'is_active', 'is_admin', 'is_staff')
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = User
fields = ('email', 'password', 'is_active', 'is_staff', 'is_admin')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class UserAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'password',)}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions',)}),
(_('Important dates'), {'fields': ('date_joined', 'last_login',)}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')}
),
)
form = UserChangeForm
add_form = UserCreationForm
list_display = ('email', 'is_staff', 'is_active', 'get_group')
list_filter = ('is_staff', 'is_active', 'groups', )
search_fields = ('email',)
ordering = ('email',)
filter_horizontal = ('groups', 'user_permissions',)
def get_group(self, obj):
""" Converts group ManyToMany fields to comma separated string to show in list_display """
return ",".join([g.name for g in obj.groups.all()])
# Naming field
get_group.short_description = "Group"
# Now register the new UserAdmin...
admin.site.register(User, UserAdmin)
|
gpl-2.0
| 5,542,544,458,145,692,000 | 35.518072 | 98 | 0.638073 | false |
qedsoftware/commcare-hq
|
corehq/apps/sms/views.py
|
1
|
80444
|
#!/usr/bin/env python
from StringIO import StringIO
import base64
from datetime import datetime, timedelta, time
import re
import json
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseBadRequest, Http404
from django.shortcuts import render
from django.utils.decorators import method_decorator
from corehq import privileges
from corehq.apps.hqadmin.views import BaseAdminSectionView
from corehq.apps.hqwebapp.doc_info import get_doc_info_by_id
from corehq.apps.hqwebapp.utils import get_bulk_upload_form, sign
from corehq.apps.accounting.decorators import requires_privilege_with_fallback, requires_privilege_plaintext_response
from corehq.apps.api.models import require_api_user_permission, PERMISSION_POST_SMS, ApiUser
from corehq.apps.commtrack.models import AlertConfig
from corehq.apps.sms.api import (
send_sms,
incoming,
send_sms_with_backend_name,
send_sms_to_verified_number,
MessageMetadata,
)
from corehq.apps.sms.resources.v0_5 import SelfRegistrationUserInfo
from corehq.apps.domain.views import BaseDomainView, DomainViewMixin
from corehq.apps.hqwebapp.views import CRUDPaginatedViewMixin
from corehq.apps.sms.dbaccessors import get_forwarding_rules_for_domain
from corehq.apps.style.decorators import (
use_timepicker,
use_typeahead,
use_select2,
use_jquery_ui,
use_datatables,
)
from corehq.apps.users.decorators import require_permission
from corehq.apps.users.models import CouchUser, Permissions, CommCareUser
from corehq.apps.users import models as user_models
from corehq.apps.users.views.mobile.users import EditCommCareUserView
from corehq.apps.sms.models import (
SMS, INCOMING, OUTGOING, ForwardingRule,
MessagingEvent, SelfRegistrationInvitation,
SQLMobileBackend, SQLMobileBackendMapping, PhoneLoadBalancingMixin,
SQLLastReadMessage, PhoneNumber
)
from corehq.apps.sms.mixin import BadSMSConfigException
from corehq.apps.sms.forms import (ForwardingRuleForm, BackendMapForm,
InitiateAddSMSBackendForm, SubscribeSMSForm,
SettingsForm, SHOW_ALL, SHOW_INVALID, HIDE_ALL, ENABLED, DISABLED,
DEFAULT, CUSTOM, SendRegistrationInvitationsForm,
WELCOME_RECIPIENT_NONE, WELCOME_RECIPIENT_CASE,
WELCOME_RECIPIENT_MOBILE_WORKER, WELCOME_RECIPIENT_ALL, ComposeMessageForm)
from corehq.apps.sms.util import get_contact, get_sms_backend_classes, ContactNotFoundException
from corehq.apps.sms.messages import _MESSAGES
from corehq.apps.smsbillables.utils import country_name_from_isd_code_or_empty as country_name_from_code
from corehq.apps.groups.models import Group
from corehq.apps.domain.decorators import (
login_and_domain_required,
login_or_digest_ex,
domain_admin_required,
require_superuser,
)
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.form_processor.utils import is_commcarecase
from corehq.messaging.smsbackends.test.models import SQLTestSMSBackend
from corehq.messaging.smsbackends.telerivet.models import SQLTelerivetBackend
from corehq.apps.translations.models import StandaloneTranslationDoc
from corehq.util.dates import iso_string_to_datetime
from corehq.util.soft_assert import soft_assert
from corehq.util.spreadsheets.excel import WorkbookJSONReader
from corehq.util.timezones.conversions import ServerTime, UserTime
from corehq.util.quickcache import quickcache
from django.contrib import messages
from django.db.models import Q
from corehq.util.timezones.utils import get_timezone_for_user
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from corehq.apps.domain.models import Domain
from corehq.const import SERVER_DATETIME_FORMAT, SERVER_DATE_FORMAT
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
from dimagi.utils.parsing import json_format_datetime, string_to_boolean
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.decorators.view import get_file
from dimagi.utils.logging import notify_exception
from dimagi.utils.web import json_response
from dimagi.utils.couch import CriticalSection
from dimagi.utils.couch.database import iter_docs
from dimagi.utils.couch.cache import cache_core
from django.conf import settings
from couchdbkit.resource import ResourceNotFound
from couchexport.models import Format
from couchexport.export import export_raw
from couchexport.shortcuts import export_response
# Tuple of (description, days in the past)
SMS_CHAT_HISTORY_CHOICES = (
(ugettext_noop("Yesterday"), 1),
(ugettext_noop("1 Week"), 7),
(ugettext_noop("30 Days"), 30),
)
@login_and_domain_required
def default(request, domain):
return HttpResponseRedirect(reverse(ComposeMessageView.urlname, args=[domain]))
class BaseMessagingSectionView(BaseDomainView):
section_name = ugettext_noop("Messaging")
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
def dispatch(self, *args, **kwargs):
return super(BaseMessagingSectionView, self).dispatch(*args, **kwargs)
@property
def section_url(self):
return reverse("sms_default", args=[self.domain])
class BaseAdvancedMessagingSectionView(BaseMessagingSectionView):
"""
Just like BaseMessagingSectionView, only requires access to inbound SMS
as well.
"""
@method_decorator(requires_privilege_with_fallback(privileges.INBOUND_SMS))
def dispatch(self, *args, **kwargs):
return super(BaseAdvancedMessagingSectionView, self).dispatch(*args, **kwargs)
class ComposeMessageView(BaseMessagingSectionView):
template_name = 'sms/compose.html'
urlname = 'sms_compose_message'
page_title = _('Compose SMS Message')
@property
def page_context(self):
page_context = super(ComposeMessageView, self).page_context
tz = get_timezone_for_user(self.request.couch_user, self.domain)
page_context.update({
'now': datetime.utcnow(),
'timezone': tz,
'timezone_now': datetime.now(tz=tz),
'form': ComposeMessageForm(domain=self.domain)
})
page_context.update(get_sms_autocomplete_context(self.request, self.domain))
return page_context
@method_decorator(require_permission(Permissions.edit_data))
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
@use_typeahead
def dispatch(self, *args, **kwargs):
return super(BaseMessagingSectionView, self).dispatch(*args, **kwargs)
@require_api_user_permission(PERMISSION_POST_SMS)
def sms_in(request):
"""
CommCareHQ's generic inbound sms post api, requiring an ApiUser with permission to post sms.
The request must be a post, and must have the following post parameters:
username - ApiUser username
password - ApiUser password
phone_number - phone number message was sent from
message - text of message
"""
backend_api = "HQ_HTTP_INBOUND"
phone_number = request.POST.get("phone_number", None)
message = request.POST.get("message", None)
if phone_number is None or message is None:
return HttpResponse("Please specify 'phone_number' and 'message' parameters.", status=400)
else:
incoming(phone_number, message, backend_api)
return HttpResponse("OK")
def get_sms_autocomplete_context(request, domain):
"""A helper view for sms autocomplete"""
phone_users = CouchUser.view("users/phone_users_by_domain",
startkey=[domain], endkey=[domain, {}], include_docs=True
)
groups = Group.by_domain(domain)
contacts = ["[send to all]"]
contacts.extend(['%s [group]' % group.name for group in groups])
user_id = None
for user in phone_users:
if user._id == user_id:
continue
contacts.append(user.username)
user_id = user._id
return {"sms_contacts": contacts}
@login_and_domain_required
@requires_privilege_with_fallback(privileges.OUTBOUND_SMS)
def send_to_recipients(request, domain):
recipients = request.POST.get('recipients')
message = request.POST.get('message')
if not recipients:
messages.error(request, _("You didn't specify any recipients"))
elif not message:
messages.error(request, _("You can't send an empty message"))
else:
recipients = [x.strip() for x in recipients.split(',') if x.strip()]
phone_numbers = []
# formats: GroupName (group), "Username", +15555555555
group_names = []
usernames = []
phone_numbers = []
unknown_usernames = []
GROUP = "[group]"
send_to_all_checked = False
for recipient in recipients:
if recipient == "[send to all]":
send_to_all_checked = True
phone_users = CouchUser.view("users/phone_users_by_domain",
startkey=[domain], endkey=[domain, {}], include_docs=True
)
for user in phone_users:
usernames.append(user.username)
group_names = []
break
elif (not send_to_all_checked) and recipient.endswith(GROUP):
name = recipient[:-len(GROUP)].strip()
group_names.append(name)
elif re.match(r'^\+\d+$', recipient): # here we expect it to have a plus sign
def wrap_user_by_type(u):
return getattr(user_models, u['doc']['doc_type']).wrap(u['doc'])
phone_users = CouchUser.view("users/by_default_phone", # search both with and w/o the plus
keys=[recipient, recipient[1:]], include_docs=True,
wrapper=wrap_user_by_type).all()
phone_users = filter(lambda u: u.is_member_of(domain), phone_users)
if len(phone_users) > 0:
phone_numbers.append((phone_users[0], recipient))
else:
phone_numbers.append((None, recipient))
elif (not send_to_all_checked) and re.match(r'[\w\.]+', recipient):
usernames.append(recipient)
else:
unknown_usernames.append(recipient)
login_ids = {
r['key']: r['id'] for r in CommCareUser.get_db().view(
"users/by_username", keys=usernames, reduce=False).all()}
for username in usernames:
if username not in login_ids:
unknown_usernames.append(username)
login_ids = login_ids.values()
users = []
empty_groups = []
if len(group_names) > 0:
users.extend(CouchUser.view('users/by_group', keys=[[domain, gn] for gn in group_names],
include_docs=True).all())
if len(users) == 0:
empty_groups = group_names
users.extend(CouchUser.view('_all_docs', keys=login_ids, include_docs=True).all())
users = [user for user in users if user.is_active and not user.is_deleted()]
phone_numbers.extend([(user, user.phone_number) for user in users])
failed_numbers = []
no_numbers = []
sent = []
if len(phone_numbers) == 1:
recipient = phone_numbers[0][0]
else:
recipient = None
logged_event = MessagingEvent.create_event_for_adhoc_sms(domain, recipient=recipient)
for user, number in phone_numbers:
if not number:
no_numbers.append(user.raw_username)
else:
args = [user.doc_type, user.get_id] if user else []
logged_subevent = logged_event.create_subevent_for_single_sms(*args)
if send_sms(
domain, user, number, message,
metadata=MessageMetadata(messaging_subevent_id=logged_subevent.pk)
):
sent.append("%s" % (user.raw_username if user else number))
logged_subevent.completed()
else:
failed_numbers.append("%s (%s)" % (
number,
user.raw_username if user else "<no username>"
))
logged_subevent.error(MessagingEvent.ERROR_INTERNAL_SERVER_ERROR)
logged_event.completed()
def comma_reminder():
messages.error(request, _("Please remember to separate recipients"
" with a comma."))
if empty_groups or failed_numbers or unknown_usernames or no_numbers:
if empty_groups:
messages.error(request, _("The following groups don't exist: ") + (', '.join(empty_groups)))
comma_reminder()
if no_numbers:
messages.error(request, _("The following users don't have phone numbers: ") + (', '.join(no_numbers)))
if failed_numbers:
messages.error(request, _("Couldn't send to the following number(s): ") + (', '.join(failed_numbers)))
if unknown_usernames:
messages.error(request, _("Couldn't find the following user(s): ") + (', '.join(unknown_usernames)))
comma_reminder()
if sent:
messages.success(request, _("Successfully sent: ") + (', '.join(sent)))
else:
messages.info(request, _("No messages were sent."))
else:
messages.success(request, _("Message sent"))
return HttpResponseRedirect(
request.META.get('HTTP_REFERER') or
reverse(ComposeMessageView.urlname, args=[domain])
)
class TestSMSMessageView(BaseDomainView):
urlname = 'message_test'
template_name = 'sms/message_tester.html'
section_name = ugettext_lazy("Messaging")
page_title = ugettext_lazy("Test SMS Message")
@property
def section_url(self):
return reverse('sms_default', args=(self.domain,))
@property
def page_url(self):
return reverse(self.urlname, args=(self.domain, self.phone_number,))
@method_decorator(domain_admin_required)
@method_decorator(requires_privilege_with_fallback(privileges.INBOUND_SMS))
def dispatch(self, request, *args, **kwargs):
return super(TestSMSMessageView, self).dispatch(request, *args, **kwargs)
@property
def phone_number(self):
return self.kwargs['phone_number']
@property
def page_context(self):
return {
'phone_number': self.phone_number,
}
def post(self, request, *args, **kwargs):
message = request.POST.get("message", "")
phone_entry = PhoneNumber.by_phone(self.phone_number)
if phone_entry and phone_entry.domain != self.domain:
messages.error(
request,
_("Invalid phone number being simulated. Please choose a "
"two-way phone number belonging to a contact in your project.")
)
else:
incoming(self.phone_number, message, SQLTestSMSBackend.get_api_id(), domain_scope=self.domain)
messages.success(
request,
_("Test message received.")
)
return self.get(request, *args, **kwargs)
@csrf_exempt
@login_or_digest_ex(allow_cc_users=True)
@requires_privilege_plaintext_response(privileges.OUTBOUND_SMS)
def api_send_sms(request, domain):
"""
An API to send SMS.
Expected post parameters:
phone_number - the phone number to send to
contact_id - the _id of a contact to send to (overrides phone_number)
vn_id - the couch_id of a PhoneNumber to send to (overrides contact_id)
text - the text of the message
backend_id - the name of the MobileBackend to use while sending
"""
if request.method == "POST":
phone_number = request.POST.get("phone_number", None)
contact_id = request.POST.get("contact_id", None)
vn_id = request.POST.get("vn_id", None)
text = request.POST.get("text", None)
backend_id = request.POST.get("backend_id", None)
chat = request.POST.get("chat", None)
contact = None
if (phone_number is None and contact_id is None and not vn_id) or (text is None):
return HttpResponseBadRequest("Not enough arguments.")
vn = None
if vn_id:
vn = PhoneNumber.by_couch_id(vn_id)
if not vn:
return HttpResponseBadRequest("PhoneNumber not found.")
if vn.domain != domain:
return HttpResponseBadRequest("PhoneNumber not found.")
phone_number = vn.phone_number
contact = vn.owner
elif contact_id is not None:
try:
contact = get_contact(domain, contact_id)
assert contact is not None
assert contact.domain == domain
except Exception:
return HttpResponseBadRequest("Contact not found.")
try:
vn = contact.get_verified_number()
assert vn is not None
phone_number = vn.phone_number
except Exception:
return HttpResponseBadRequest("Contact has no phone number.")
try:
chat_workflow = string_to_boolean(chat)
except Exception:
chat_workflow = False
if chat_workflow:
chat_user_id = request.couch_user.get_id
else:
chat_user_id = None
logged_event = MessagingEvent.create_event_for_adhoc_sms(
domain, recipient=contact,
content_type=(MessagingEvent.CONTENT_CHAT_SMS if chat_workflow
else MessagingEvent.CONTENT_API_SMS))
args = [contact.doc_type, contact.get_id] if contact else []
logged_subevent = logged_event.create_subevent_for_single_sms(*args)
metadata = MessageMetadata(
chat_user_id=chat_user_id,
messaging_subevent_id=logged_subevent.pk,
)
if backend_id is not None:
success = send_sms_with_backend_name(domain, phone_number, text, backend_id, metadata)
elif vn is not None:
success = send_sms_to_verified_number(vn, text, metadata)
else:
success = send_sms(domain, None, phone_number, text, metadata)
if success:
logged_subevent.completed()
logged_event.completed()
return HttpResponse("OK")
else:
logged_subevent.error(MessagingEvent.ERROR_INTERNAL_SERVER_ERROR)
return HttpResponse("ERROR")
else:
return HttpResponseBadRequest("POST Expected.")
class BaseForwardingRuleView(BaseDomainView):
section_name = ugettext_noop("Messaging")
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(BaseForwardingRuleView, self).dispatch(request, *args, **kwargs)
@property
def section_url(self):
return reverse("sms_default", args=(self.domain,))
class ListForwardingRulesView(BaseForwardingRuleView):
urlname = 'list_forwarding_rules'
template_name = 'sms/list_forwarding_rules.html'
page_title = ugettext_lazy("Forwarding Rules")
@property
def page_context(self):
forwarding_rules = get_forwarding_rules_for_domain(self.domain)
return {
'forwarding_rules': forwarding_rules,
}
class BaseEditForwardingRuleView(BaseForwardingRuleView):
template_name = 'sms/add_forwarding_rule.html'
@property
def forwarding_rule_id(self):
return self.kwargs.get('forwarding_rule_id')
@property
def forwarding_rule(self):
raise NotImplementedError("must return ForwardingRule")
@property
@memoized
def rule_form(self):
if self.request.method == 'POST':
return ForwardingRuleForm(self.request.POST)
initial = {}
if self.forwarding_rule_id:
initial["forward_type"] = self.forwarding_rule.forward_type
initial["keyword"] = self.forwarding_rule.keyword
initial["backend_id"] = self.forwarding_rule.backend_id
return ForwardingRuleForm(initial=initial)
@property
def page_url(self):
if self.forwarding_rule_id:
return reverse(self.urlname, args=(self.domain, self.forwarding_rule_id,))
return super(BaseEditForwardingRuleView, self).page_url
def post(self, request, *args, **kwargs):
if self.rule_form.is_valid():
self.forwarding_rule.forward_type = self.rule_form.cleaned_data.get(
'forward_type'
)
self.forwarding_rule.keyword = self.rule_form.cleaned_data.get(
'keyword'
)
self.forwarding_rule.backend_id = self.rule_form.cleaned_data.get(
'backend_id'
)
self.forwarding_rule.save()
return HttpResponseRedirect(reverse(
ListForwardingRulesView.urlname, args=(self.domain,)))
return self.get(request, *args, **kwargs)
@property
def page_context(self):
return {
'form': self.rule_form,
'forwarding_rule_id': self.forwarding_rule_id,
}
@property
def parent_pages(self):
return [
{
'url': reverse(ListForwardingRulesView.urlname, args=(self.domain,)),
'title': ListForwardingRulesView.page_title,
}
]
class AddForwardingRuleView(BaseEditForwardingRuleView):
urlname = 'add_forwarding_rule'
page_title = ugettext_lazy("Add Forwarding Rule")
@property
@memoized
def forwarding_rule(self):
return ForwardingRule(domain=self.domain)
class EditForwardingRuleView(BaseEditForwardingRuleView):
urlname = 'edit_forwarding_rule'
page_title = ugettext_lazy("Edit Forwarding Rule")
@property
@memoized
def forwarding_rule(self):
forwarding_rule = ForwardingRule.get(self.forwarding_rule_id)
if forwarding_rule.domain != self.domain:
raise Http404()
return forwarding_rule
@login_and_domain_required
@require_superuser
def delete_forwarding_rule(request, domain, forwarding_rule_id):
forwarding_rule = ForwardingRule.get(forwarding_rule_id)
if forwarding_rule.domain != domain or forwarding_rule.doc_type != "ForwardingRule":
raise Http404
forwarding_rule.retire()
return HttpResponseRedirect(reverse("list_forwarding_rules", args=[domain]))
class GlobalBackendMap(BaseAdminSectionView):
urlname = 'global_backend_map'
template_name = 'sms/backend_map.html'
page_title = ugettext_lazy("Default Gateways")
@property
def page_url(self):
return reverse(self.urlname)
@property
@memoized
def backends(self):
return SQLMobileBackend.get_global_backends(SQLMobileBackend.SMS)
@property
@memoized
def backend_map_form(self):
if self.request.method == 'POST':
return BackendMapForm(self.request.POST, backends=self.backends)
backend_map = SQLMobileBackendMapping.get_prefix_to_backend_map(SQLMobileBackend.SMS)
initial = {
'catchall_backend_id': backend_map.catchall_backend_id,
'backend_map': json.dumps([
{'prefix': prefix, 'backend_id': backend_id}
for prefix, backend_id in backend_map.backend_map_tuples
]),
}
return BackendMapForm(initial=initial, backends=self.backends)
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(GlobalBackendMap, self).dispatch(request, *args, **kwargs)
@property
def page_context(self):
return {
'form': self.backend_map_form,
'backends': self.backends,
}
def post(self, request, *args, **kwargs):
form = self.backend_map_form
if form.is_valid():
new_backend_map = form.cleaned_data.get('backend_map')
new_catchall_backend_id = form.cleaned_data.get('catchall_backend_id')
with transaction.atomic():
SQLMobileBackendMapping.get_prefix_to_backend_map.clear(
SQLMobileBackendMapping, SQLMobileBackend.SMS
)
SQLMobileBackendMapping.objects.filter(
is_global=True,
backend_type=SQLMobileBackend.SMS,
).delete()
for prefix, backend_id in new_backend_map.items():
SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix=prefix,
backend_id=backend_id
)
if new_catchall_backend_id:
SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='*',
backend_id=new_catchall_backend_id
)
messages.success(request, _("Changes Saved."))
return HttpResponseRedirect(reverse(self.urlname))
return self.get(request, *args, **kwargs)
class ChatOverSMSView(BaseMessagingSectionView):
urlname = 'chat_contacts'
template_name = 'sms/chat_contacts.html'
page_title = _("Chat over SMS")
@method_decorator(require_permission(Permissions.edit_data))
@use_datatables
def dispatch(self, *args, **kwargs):
return super(ChatOverSMSView, self).dispatch(*args, **kwargs)
def get_case_contact_info(domain_obj, case_ids):
data = {}
for case in CaseAccessors(domain_obj.name).iter_cases(case_ids):
if domain_obj.custom_case_username:
name = case.get_case_property(domain_obj.custom_case_username)
else:
name = case.name
data[case.case_id] = [name or _('(unknown)')]
return data
def get_mobile_worker_contact_info(domain_obj, user_ids):
data = {}
for doc in iter_docs(CommCareUser.get_db(), user_ids):
user = CommCareUser.wrap(doc)
data[user.get_id] = [user.raw_username]
return data
def get_contact_info(domain):
# If the data has been cached, just retrieve it from there
cache_key = 'sms-chat-contact-list-%s' % domain
cache_expiration = 30 * 60
try:
client = cache_core.get_redis_client()
cached_data = client.get(cache_key)
if cached_data:
return json.loads(cached_data)
except:
pass
domain_obj = Domain.get_by_name(domain, strict=True)
case_ids = []
mobile_worker_ids = []
data = []
for p in PhoneNumber.by_domain(domain):
if p.owner_doc_type == 'CommCareCase':
case_ids.append(p.owner_id)
data.append([
None,
'case',
p.phone_number,
p.owner_id,
p.couch_id,
])
elif p.owner_doc_type == 'CommCareUser':
mobile_worker_ids.append(p.owner_id)
data.append([
None,
'mobile_worker',
p.phone_number,
p.owner_id,
p.couch_id,
])
contact_data = get_case_contact_info(domain_obj, case_ids)
contact_data.update(get_mobile_worker_contact_info(domain_obj, mobile_worker_ids))
for row in data:
contact_info = contact_data.get(row[3])
row[0] = contact_info[0] if contact_info else _('(unknown)')
# Save the data to the cache for faster lookup next time
try:
client.set(cache_key, json.dumps(data))
client.expire(cache_key, cache_expiration)
except:
pass
return data
def format_contact_data(domain, data):
for row in data:
contact_id = row[3]
vn_id = row[4]
if row[1] == 'case':
row[1] = _('Case')
row[4] = reverse('case_details', args=[domain, contact_id])
elif row[1] == 'mobile_worker':
row[1] = _('Mobile Worker')
row[4] = reverse(EditCommCareUserView.urlname, args=[domain, contact_id])
else:
row[4] = '#'
row.append(reverse('sms_chat', args=[domain, contact_id, vn_id]))
@require_permission(Permissions.edit_data)
@requires_privilege_with_fallback(privileges.OUTBOUND_SMS)
def chat_contact_list(request, domain):
sEcho = request.GET.get('sEcho')
iDisplayStart = int(request.GET.get('iDisplayStart'))
iDisplayLength = int(request.GET.get('iDisplayLength'))
sSearch = request.GET.get('sSearch', '').strip()
data = get_contact_info(domain)
total_records = len(data)
if sSearch:
regex = re.compile('^.*%s.*$' % sSearch)
data = filter(lambda row: regex.match(row[0]) or regex.match(row[2]), data)
filtered_records = len(data)
data.sort(key=lambda row: row[0])
data = data[iDisplayStart:iDisplayStart + iDisplayLength]
format_contact_data(domain, data)
result = {
'sEcho': sEcho,
'aaData': data,
'iTotalRecords': total_records,
'iTotalDisplayRecords': filtered_records,
}
return HttpResponse(json.dumps(result))
def get_contact_name_for_chat(contact, domain_obj):
if is_commcarecase(contact):
if domain_obj.custom_case_username:
contact_name = contact.get_case_property(domain_obj.custom_case_username)
else:
contact_name = contact.name
else:
if contact.first_name:
contact_name = contact.first_name
else:
contact_name = contact.raw_username
return contact_name
@require_permission(Permissions.edit_data)
@requires_privilege_with_fallback(privileges.OUTBOUND_SMS)
def chat(request, domain, contact_id, vn_id=None):
domain_obj = Domain.get_by_name(domain, strict=True)
timezone = get_timezone_for_user(None, domain)
# floored_utc_timestamp is the datetime in UTC representing
# midnight today in local time. This is used to calculate
# all message history choices' timestamps, so that choosing
# "Yesterday", for example, gives you data from yesterday at
# midnight local time.
local_date = datetime.now(timezone).date()
floored_utc_timestamp = UserTime(
datetime.combine(local_date, time(0, 0)),
timezone
).server_time().done()
def _fmt(d):
return json_format_datetime(floored_utc_timestamp - timedelta(days=d))
history_choices = [(_(x), _fmt(y)) for (x, y) in SMS_CHAT_HISTORY_CHOICES]
history_choices.append(
(_("All Time"), json_format_datetime(datetime(1970, 1, 1)))
)
contact = get_contact(domain, contact_id)
context = {
"domain": domain,
"contact_id": contact_id,
"contact": contact,
"contact_name": get_contact_name_for_chat(contact, domain_obj),
"use_message_counter": domain_obj.chat_message_count_threshold is not None,
"message_count_threshold": domain_obj.chat_message_count_threshold or 0,
"history_choices": history_choices,
"vn_id": vn_id,
}
template = settings.CUSTOM_CHAT_TEMPLATES.get(domain_obj.custom_chat_template) or "sms/chat.html"
return render(request, template, context)
class ChatMessageHistory(View, DomainViewMixin):
urlname = 'api_history'
@method_decorator(require_permission(Permissions.edit_data))
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
def dispatch(self, request, *args, **kwargs):
return super(ChatMessageHistory, self).dispatch(request, *args, **kwargs)
@property
@memoized
def contact_id(self):
return self.request.GET.get('contact_id')
@property
@memoized
def contact(self):
if not self.contact_id:
return None
try:
return get_contact(self.domain, self.contact_id)
except ContactNotFoundException:
return None
@property
@memoized
def contact_name(self):
return get_contact_name_for_chat(self.contact, self.domain_object)
@quickcache(['user_id'], timeout=60 * 60, memoize_timeout=5 * 60)
def get_chat_user_name(self, user_id):
if not user_id:
return _("System")
try:
user = CouchUser.get_by_user_id(user_id)
return user.first_name or user.raw_username
except:
return _("Unknown")
@property
@memoized
def start_date_str(self):
return self.request.GET.get('start_date')
@property
@memoized
def start_date(self):
if not self.start_date_str:
return None
try:
return iso_string_to_datetime(self.start_date_str)
except (TypeError, ValueError):
return None
def get_raw_data(self):
result = SMS.objects.filter(
domain=self.domain,
couch_recipient_doc_type=self.contact.doc_type,
couch_recipient=self.contact_id
).exclude(
direction=OUTGOING,
processed=False
)
if self.start_date:
result = result.filter(date__gt=self.start_date)
result = self.filter_survey_data(result)
return result.order_by('date')
def filter_survey_data(self, queryset):
if not self.domain_object.filter_surveys_from_chat:
return queryset
if self.domain_object.show_invalid_survey_responses_in_chat:
return queryset.exclude(
Q(xforms_session_couch_id__isnull=False) &
~Q(direction=INCOMING, invalid_survey_response=True)
)
else:
return queryset.exclude(
xforms_session_couch_id__isnull=False
)
def get_response_data(self, requesting_user_id):
timezone = get_timezone_for_user(None, self.domain)
result = []
last_sms = None
for sms in self.get_raw_data():
last_sms = sms
if sms.direction == INCOMING:
sender = self.contact_name
else:
sender = self.get_chat_user_name(sms.chat_user_id)
result.append({
'sender': sender,
'text': sms.text,
'timestamp': (
ServerTime(sms.date).user_time(timezone)
.ui_string("%I:%M%p %m/%d/%y").lower()
),
'utc_timestamp': json_format_datetime(sms.date),
'sent_by_requester': (sms.chat_user_id == requesting_user_id),
})
return result, last_sms
def update_last_read_message(self, requesting_user_id, sms):
domain = self.domain
contact_id = self.contact_id
key = 'update-last-read-message-%s-%s-%s' % (domain, requesting_user_id, contact_id)
with CriticalSection([key]):
try:
entry = SQLLastReadMessage.objects.get(
domain=domain,
read_by=requesting_user_id,
contact_id=contact_id
)
except SQLLastReadMessage.DoesNotExist:
entry = SQLLastReadMessage(
domain=domain,
read_by=requesting_user_id,
contact_id=contact_id
)
if not entry.message_timestamp or entry.message_timestamp < sms.date:
entry.message_id = sms.couch_id
entry.message_timestamp = sms.date
entry.save()
def get(self, request, *args, **kwargs):
if not self.contact:
return HttpResponse('[]')
data, last_sms = self.get_response_data(request.couch_user.get_id)
if last_sms:
try:
self.update_last_read_message(request.couch_user.get_id, last_sms)
except:
notify_exception(request, "Error updating last read message for %s" % last_sms.pk)
return HttpResponse(json.dumps(data))
class ChatLastReadMessage(View, DomainViewMixin):
urlname = 'api_last_read_message'
@method_decorator(require_permission(Permissions.edit_data))
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
def dispatch(self, request, *args, **kwargs):
return super(ChatLastReadMessage, self).dispatch(request, *args, **kwargs)
@property
@memoized
def contact_id(self):
return self.request.GET.get('contact_id')
def get(self, request, *args, **kwargs):
lrm_timestamp = None
if self.contact_id:
if self.domain_object.count_messages_as_read_by_anyone:
lrm = SQLLastReadMessage.by_anyone(self.domain, self.contact_id)
else:
lrm = SQLLastReadMessage.by_user(self.domain, request.couch_user.get_id, self.contact_id)
if lrm:
lrm_timestamp = json_format_datetime(lrm.message_timestamp)
return HttpResponse(json.dumps({
'message_timestamp': lrm_timestamp,
}))
class DomainSmsGatewayListView(CRUDPaginatedViewMixin, BaseMessagingSectionView):
template_name = "sms/gateway_list.html"
urlname = 'list_domain_backends'
page_title = ugettext_noop("SMS Connectivity")
strict_domain_fetching = True
@method_decorator(domain_admin_required)
def dispatch(self, request, *args, **kwargs):
return super(DomainSmsGatewayListView, self).dispatch(request, *args, **kwargs)
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain])
@property
def parameters(self):
return self.request.POST if self.request.method == 'POST' else self.request.GET
@property
@memoized
def total(self):
return SQLMobileBackend.get_domain_backends(SQLMobileBackend.SMS, self.domain, count_only=True)
@property
def column_names(self):
return [
_("Gateway"),
_("Description"),
_("Supported Countries"),
_("Status"),
_("Actions"),
]
@property
def page_context(self):
context = self.pagination_context
context.update({
'initiate_new_form': InitiateAddSMSBackendForm(is_superuser=self.request.couch_user.is_superuser),
})
return context
@property
def paginated_list(self):
backends = SQLMobileBackend.get_domain_backends(
SQLMobileBackend.SMS,
self.domain,
offset=self.skip,
limit=self.limit
)
default_backend = SQLMobileBackend.get_domain_default_backend(
SQLMobileBackend.SMS,
self.domain
)
if len(backends) > 0 and not default_backend:
yield {
'itemData': {
'id': 'nodefault',
'name': "Automatic Choose",
'status': 'DEFAULT',
},
'template': 'gateway-automatic-template',
}
elif default_backend:
yield {
'itemData': self._fmt_backend_data(default_backend),
'template': 'gateway-default-template',
}
default_backend_id = default_backend.pk if default_backend else None
for backend in backends:
if backend.pk != default_backend_id:
yield {
'itemData': self._fmt_backend_data(backend),
'template': 'gateway-template',
}
def _fmt_backend_data(self, backend):
is_editable = not backend.is_global and backend.domain == self.domain
if len(backend.supported_countries) > 0:
if backend.supported_countries[0] == '*':
supported_country_names = _('Multiple%s') % '*'
else:
supported_country_names = ', '.join(
[_(country_name_from_code(int(c))) for c in backend.supported_countries])
else:
supported_country_names = ''
return {
'id': backend.pk,
'name': backend.name,
'description': backend.description,
'supported_countries': supported_country_names,
'editUrl': reverse(
EditDomainGatewayView.urlname,
args=[self.domain, backend.hq_api_id, backend.pk]
) if is_editable else "",
'canDelete': is_editable,
'isGlobal': backend.is_global,
'isShared': not backend.is_global and backend.domain != self.domain,
'deleteModalId': 'delete_%s' % backend.pk,
}
def _get_backend_from_item_id(self, item_id):
try:
item_id = int(item_id)
backend = SQLMobileBackend.load(item_id)
return item_id, backend
except (BadSMSConfigException, SQLMobileBackend.DoesNotExist, TypeError, ValueError):
raise Http404()
def get_deleted_item_data(self, item_id):
item_id, backend = self._get_backend_from_item_id(item_id)
if backend.is_global or backend.domain != self.domain:
raise Http404()
# Do not actually delete so that linkage always exists between SMS and
# MobileBackend for billable history
backend.soft_delete()
return {
'itemData': self._fmt_backend_data(backend),
'template': 'gateway-deleted-template',
}
def refresh_item(self, item_id):
item_id, backend = self._get_backend_from_item_id(item_id)
if not backend.domain_is_authorized(self.domain):
raise Http404()
domain_default_backend_id = SQLMobileBackend.get_domain_default_backend(
SQLMobileBackend.SMS,
self.domain,
id_only=True
)
if domain_default_backend_id == item_id:
SQLMobileBackendMapping.unset_default_domain_backend(self.domain)
else:
SQLMobileBackendMapping.set_default_domain_backend(self.domain, backend)
@property
def allowed_actions(self):
actions = super(DomainSmsGatewayListView, self).allowed_actions
return actions + ['new_backend']
def post(self, request, *args, **kwargs):
if self.action == 'new_backend':
hq_api_id = request.POST['hq_api_id']
if hq_api_id == SQLTelerivetBackend.get_api_id():
from corehq.messaging.smsbackends.telerivet.views import TelerivetSetupView
return HttpResponseRedirect(reverse(TelerivetSetupView.urlname, args=[self.domain]))
return HttpResponseRedirect(reverse(AddDomainGatewayView.urlname, args=[self.domain, hq_api_id]))
return self.paginate_crud_response
class AddGatewayViewMixin(object):
"""
A mixin to help extract the common functionality between adding/editing
domain-level backends and adding/editing global backends.
"""
@property
def is_superuser(self):
return self.request.couch_user.is_superuser
@property
@memoized
def hq_api_id(self):
return self.kwargs.get('hq_api_id')
@property
@memoized
def backend_class(self):
# Superusers can create/edit any backend
# Regular users can only create/edit Telerivet backends for now
if not self.is_superuser and self.hq_api_id != SQLTelerivetBackend.get_api_id():
raise Http404()
backend_classes = get_sms_backend_classes()
try:
return backend_classes[self.hq_api_id]
except KeyError:
raise Http404()
@property
def use_load_balancing(self):
return issubclass(self.backend_class, PhoneLoadBalancingMixin)
@property
def page_name(self):
return _("Add %s Gateway") % self.backend_class.get_generic_name()
@property
def button_text(self):
return _("Create %s Gateway") % self.backend_class.get_generic_name()
@property
def page_context(self):
return {
'form': self.backend_form,
'button_text': self.button_text,
'use_load_balancing': self.use_load_balancing,
}
def post(self, request, *args, **kwargs):
if self.backend_form.is_valid():
self.backend.name = self.backend_form.cleaned_data.get('name')
self.backend.description = self.backend_form.cleaned_data.get('description')
self.backend.reply_to_phone_number = self.backend_form.cleaned_data.get('reply_to_phone_number')
extra_fields = {}
for key, value in self.backend_form.cleaned_data.items():
if key in self.backend.get_available_extra_fields():
extra_fields[key] = value
self.backend.set_extra_fields(**extra_fields)
if self.use_load_balancing:
self.backend.load_balancing_numbers = self.backend_form.cleaned_data['phone_numbers']
self.backend.save()
if not self.backend.is_global:
self.backend.set_shared_domains(self.backend_form.cleaned_data.get('authorized_domains'))
return self.redirect_to_gateway_list()
return self.get(request, *args, **kwargs)
@property
def backend(self):
raise NotImplementedError()
@property
def page_url(self):
raise NotImplementedError()
@property
def backend_form(self):
raise NotImplementedError()
@property
def parent_pages(self):
raise NotImplementedError()
def redirect_to_gateway_list(self):
raise NotImplementedError()
class AddDomainGatewayView(AddGatewayViewMixin, BaseMessagingSectionView):
urlname = 'add_domain_gateway'
template_name = 'sms/add_gateway.html'
page_title = ugettext_lazy("Add SMS Gateway")
@property
@memoized
def backend(self):
return self.backend_class(
domain=self.domain,
is_global=False,
backend_type=SQLMobileBackend.SMS,
hq_api_id=self.backend_class.get_api_id()
)
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain, self.hq_api_id])
@property
@memoized
def backend_form(self):
form_class = self.backend_class.get_form_class()
if self.request.method == 'POST':
return form_class(
self.request.POST,
button_text=self.button_text,
domain=self.domain,
backend_id=None
)
return form_class(
button_text=self.button_text,
domain=self.domain,
backend_id=None
)
@property
def parent_pages(self):
return [{
'title': DomainSmsGatewayListView.page_title,
'url': reverse(DomainSmsGatewayListView.urlname, args=[self.domain]),
}]
def redirect_to_gateway_list(self):
return HttpResponseRedirect(reverse(DomainSmsGatewayListView.urlname, args=[self.domain]))
@use_select2
@method_decorator(domain_admin_required)
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
def dispatch(self, request, *args, **kwargs):
return super(AddDomainGatewayView, self).dispatch(request, *args, **kwargs)
class EditDomainGatewayView(AddDomainGatewayView):
urlname = 'edit_domain_gateway'
page_title = ugettext_lazy("Edit SMS Gateway")
@property
def backend_id(self):
return self.kwargs['backend_id']
@property
@memoized
def backend(self):
try:
backend = self.backend_class.objects.get(pk=self.backend_id)
except ResourceNotFound:
raise Http404()
if (
backend.is_global or
backend.domain != self.domain or
backend.hq_api_id != self.backend_class.get_api_id() or
backend.deleted
):
raise Http404()
return backend
@property
@memoized
def backend_form(self):
form_class = self.backend_class.get_form_class()
authorized_domains = self.backend.get_authorized_domain_list()
initial = {
'name': self.backend.name,
'description': self.backend.description,
'give_other_domains_access': len(authorized_domains) > 0,
'authorized_domains': ','.join(authorized_domains),
'reply_to_phone_number': self.backend.reply_to_phone_number,
}
initial.update(self.backend.get_extra_fields())
if self.use_load_balancing:
initial['phone_numbers'] = json.dumps(
[{'phone_number': p} for p in self.backend.load_balancing_numbers]
)
if self.request.method == 'POST':
return form_class(
self.request.POST,
initial=initial,
button_text=self.button_text,
domain=self.domain,
backend_id=self.backend.pk
)
return form_class(
initial=initial,
button_text=self.button_text,
domain=self.domain,
backend_id=self.backend.pk
)
@property
def page_name(self):
return _("Edit %s Gateway") % self.backend_class.get_generic_name()
@property
def button_text(self):
return _("Update %s Gateway") % self.backend_class.get_generic_name()
@property
def page_url(self):
return reverse(self.urlname, kwargs=self.kwargs)
class GlobalSmsGatewayListView(CRUDPaginatedViewMixin, BaseAdminSectionView):
template_name = "sms/global_gateway_list.html"
urlname = 'list_global_backends'
page_title = ugettext_noop("SMS Connectivity")
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(GlobalSmsGatewayListView, self).dispatch(request, *args, **kwargs)
@property
def page_url(self):
return reverse(self.urlname)
@property
def parameters(self):
return self.request.POST if self.request.method == 'POST' else self.request.GET
@property
@memoized
def total(self):
return SQLMobileBackend.get_global_backends(SQLMobileBackend.SMS, count_only=True)
@property
def column_names(self):
return [
_("Gateway"),
_("Description"),
_("Supported Countries"),
_("Actions"),
]
@property
def page_context(self):
context = self.pagination_context
context.update({
'initiate_new_form': InitiateAddSMSBackendForm(is_superuser=self.request.couch_user.is_superuser),
})
return context
@property
def paginated_list(self):
backends = SQLMobileBackend.get_global_backends(
SQLMobileBackend.SMS,
offset=self.skip,
limit=self.limit
)
for backend in backends:
yield {
'itemData': self._fmt_backend_data(backend),
'template': 'gateway-template',
}
def _fmt_backend_data(self, backend):
if len(backend.supported_countries) > 0:
if backend.supported_countries[0] == '*':
supported_country_names = _('Multiple%s') % '*'
else:
supported_country_names = ', '.join(
[_(country_name_from_code(int(c))) for c in backend.supported_countries])
else:
supported_country_names = ''
return {
'id': backend.pk,
'name': backend.name,
'description': backend.description,
'supported_countries': supported_country_names,
'editUrl': reverse(
EditGlobalGatewayView.urlname,
args=[backend.hq_api_id, backend.pk]
),
'deleteModalId': 'delete_%s' % backend.pk,
}
def _get_backend_from_item_id(self, item_id):
try:
item_id = int(item_id)
backend = SQLMobileBackend.load(item_id)
return item_id, backend
except (BadSMSConfigException, SQLMobileBackend.DoesNotExist, TypeError, ValueError):
raise Http404()
def get_deleted_item_data(self, item_id):
item_id, backend = self._get_backend_from_item_id(item_id)
if not backend.is_global:
raise Http404()
# Do not actually delete so that linkage always exists between SMS and
# MobileBackend for billable history
backend.soft_delete()
return {
'itemData': self._fmt_backend_data(backend),
'template': 'gateway-deleted-template',
}
@property
def allowed_actions(self):
actions = super(GlobalSmsGatewayListView, self).allowed_actions
return actions + ['new_backend']
def post(self, request, *args, **kwargs):
if self.action == 'new_backend':
hq_api_id = request.POST['hq_api_id']
return HttpResponseRedirect(reverse(AddGlobalGatewayView.urlname, args=[hq_api_id]))
return self.paginate_crud_response
class AddGlobalGatewayView(AddGatewayViewMixin, BaseAdminSectionView):
urlname = 'add_global_gateway'
template_name = 'sms/add_gateway.html'
page_title = ugettext_lazy("Add SMS Gateway")
@property
@memoized
def backend(self):
return self.backend_class(
is_global=True,
backend_type=SQLMobileBackend.SMS,
hq_api_id=self.backend_class.get_api_id()
)
@property
def page_url(self):
return reverse(self.urlname, args=[self.hq_api_id])
@property
@memoized
def backend_form(self):
form_class = self.backend_class.get_form_class()
if self.request.method == 'POST':
return form_class(
self.request.POST,
button_text=self.button_text,
domain=None,
backend_id=None
)
return form_class(
button_text=self.button_text,
domain=None,
backend_id=None
)
@property
def parent_pages(self):
return [{
'title': GlobalSmsGatewayListView.page_title,
'url': reverse(GlobalSmsGatewayListView.urlname),
}]
def redirect_to_gateway_list(self):
return HttpResponseRedirect(reverse(GlobalSmsGatewayListView.urlname))
@use_select2
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(AddGlobalGatewayView, self).dispatch(request, *args, **kwargs)
class EditGlobalGatewayView(AddGlobalGatewayView):
urlname = 'edit_global_gateway'
page_title = ugettext_lazy("Edit SMS Gateway")
@property
def backend_id(self):
return self.kwargs['backend_id']
@property
@memoized
def backend(self):
try:
backend = self.backend_class.objects.get(pk=self.backend_id)
except ResourceNotFound:
raise Http404()
if (
not backend.is_global or
backend.deleted or
backend.hq_api_id != self.backend_class.get_api_id()
):
raise Http404()
return backend
@property
@memoized
def backend_form(self):
form_class = self.backend_class.get_form_class()
initial = {
'name': self.backend.name,
'description': self.backend.description,
'reply_to_phone_number': self.backend.reply_to_phone_number,
}
initial.update(self.backend.get_extra_fields())
if self.use_load_balancing:
initial['phone_numbers'] = json.dumps(
[{'phone_number': p} for p in self.backend.load_balancing_numbers]
)
if self.request.method == 'POST':
return form_class(
self.request.POST,
initial=initial,
button_text=self.button_text,
domain=None,
backend_id=self.backend.pk
)
return form_class(
initial=initial,
button_text=self.button_text,
domain=None,
backend_id=self.backend.pk
)
@property
def page_name(self):
return _("Edit %s Gateway") % self.backend_class.get_generic_name()
@property
def button_text(self):
return _("Update %s Gateway") % self.backend_class.get_generic_name()
@property
def page_url(self):
return reverse(self.urlname, kwargs=self.kwargs)
class SubscribeSMSView(BaseMessagingSectionView):
template_name = "sms/subscribe_sms.html"
urlname = 'subscribe_sms'
page_title = ugettext_noop("Subscribe SMS")
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
def dispatch(self, *args, **kwargs):
return super(SubscribeSMSView, self).dispatch(*args, **kwargs)
@property
def commtrack_settings(self):
return Domain.get_by_name(self.domain).commtrack_settings
@property
@memoized
def form(self):
if self.request.method == 'POST':
return SubscribeSMSForm(self.request.POST)
if self.commtrack_settings and self.commtrack_settings.alert_config:
alert_config = self.commtrack_settings.alert_config
else:
alert_config = AlertConfig()
initial = {
'stock_out_facilities': alert_config.stock_out_facilities,
'stock_out_commodities': alert_config.stock_out_commodities,
'stock_out_rates': alert_config.stock_out_rates,
'non_report': alert_config.non_report,
}
return SubscribeSMSForm(initial=initial)
@property
def page_context(self):
context = {
"form": self.form,
"domain": self.domain,
}
return context
def post(self, request, *args, **kwargs):
if self.form.is_valid():
self.form.save(self.commtrack_settings)
messages.success(request, _("Updated CommCare Supply settings."))
return HttpResponseRedirect(reverse(SubscribeSMSView.urlname, args=[self.domain]))
return self.get(request, *args, **kwargs)
class SMSLanguagesView(BaseMessagingSectionView):
urlname = 'sms_languages'
template_name = "sms/languages.html"
page_title = ugettext_noop("Languages")
@use_jquery_ui
@use_select2
@method_decorator(domain_admin_required)
def dispatch(self, *args, **kwargs):
return super(SMSLanguagesView, self).dispatch(*args, **kwargs)
@property
def page_context(self):
with StandaloneTranslationDoc.get_locked_obj(self.domain, "sms", create=True) as tdoc:
if len(tdoc.langs) == 0:
tdoc.langs = ["en"]
tdoc.translations["en"] = {}
tdoc.save()
context = {
"domain": self.domain,
"sms_langs": tdoc.langs,
"bulk_upload": {
"action": reverse("upload_sms_translations", args=(self.domain,)),
"download_url": reverse("download_sms_translations", args=(self.domain,)),
"adjective": _("messaging translation"),
"plural_noun": _("messaging translations"),
},
}
context.update({
"bulk_upload_form": get_bulk_upload_form(context),
})
return context
@domain_admin_required
@requires_privilege_with_fallback(privileges.OUTBOUND_SMS)
def edit_sms_languages(request, domain):
"""
Accepts same post body as corehq.apps.app_manager.views.edit_app_langs
"""
with StandaloneTranslationDoc.get_locked_obj(domain, "sms",
create=True) as tdoc:
try:
from corehq.apps.app_manager.views.utils import validate_langs
langs, rename = validate_langs(request, tdoc.langs)
except AssertionError:
return HttpResponse(status=400)
for old, new in rename.items():
if old != new:
tdoc.translations[new] = tdoc.translations[old]
del tdoc.translations[old]
for lang in langs:
if lang not in tdoc.translations:
tdoc.translations[lang] = {}
for lang in tdoc.translations.keys():
if lang not in langs:
del tdoc.translations[lang]
tdoc.langs = langs
tdoc.save()
return json_response(langs)
@domain_admin_required
@requires_privilege_with_fallback(privileges.OUTBOUND_SMS)
def download_sms_translations(request, domain):
tdoc = StandaloneTranslationDoc.get_obj(domain, "sms")
columns = ["property"] + tdoc.langs + ["default"]
msg_ids = sorted(_MESSAGES.keys())
rows = []
for msg_id in msg_ids:
rows.append([msg_id])
for lang in tdoc.langs:
for row in rows:
row.append(tdoc.translations[lang].get(row[0], ""))
for row in rows:
row.append(_MESSAGES.get(row[0]))
temp = StringIO()
headers = (("translations", tuple(columns)),)
data = (("translations", tuple(rows)),)
export_raw(headers, data, temp)
return export_response(temp, Format.XLS_2007, "translations")
@domain_admin_required
@requires_privilege_with_fallback(privileges.OUTBOUND_SMS)
@get_file("bulk_upload_file")
def upload_sms_translations(request, domain):
try:
workbook = WorkbookJSONReader(request.file)
translations = workbook.get_worksheet(title='translations')
with StandaloneTranslationDoc.get_locked_obj(domain, "sms") as tdoc:
msg_ids = sorted(_MESSAGES.keys())
result = {}
for lang in tdoc.langs:
result[lang] = {}
for row in translations:
for lang in tdoc.langs:
if row.get(lang):
msg_id = row["property"]
if msg_id in msg_ids:
val = row[lang]
if not isinstance(val, basestring):
val = str(val)
val = val.strip()
result[lang][msg_id] = val
tdoc.translations = result
tdoc.save()
messages.success(request, _("SMS Translations Updated."))
except Exception:
notify_exception(request, 'SMS Upload Translations Error')
messages.error(request, _("Update failed. We're looking into it."))
return HttpResponseRedirect(reverse('sms_languages', args=[domain]))
class SMSSettingsView(BaseMessagingSectionView):
urlname = "sms_settings"
template_name = "sms/settings.html"
page_title = ugettext_noop("SMS Settings")
@property
def page_name(self):
return _("SMS Settings")
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain])
@property
@memoized
def previewer(self):
return self.request.couch_user.is_previewer()
def get_welcome_message_recipient(self, domain_obj):
if (
domain_obj.enable_registration_welcome_sms_for_case and
domain_obj.enable_registration_welcome_sms_for_mobile_worker
):
return WELCOME_RECIPIENT_ALL
elif domain_obj.enable_registration_welcome_sms_for_case:
return WELCOME_RECIPIENT_CASE
elif domain_obj.enable_registration_welcome_sms_for_mobile_worker:
return WELCOME_RECIPIENT_MOBILE_WORKER
else:
return WELCOME_RECIPIENT_NONE
@property
@memoized
def form(self):
if self.request.method == "POST":
form = SettingsForm(self.request.POST, cchq_domain=self.domain,
cchq_is_previewer=self.previewer)
else:
domain_obj = Domain.get_by_name(self.domain, strict=True)
enabled_disabled = lambda b: (ENABLED if b else DISABLED)
default_custom = lambda b: (CUSTOM if b else DEFAULT)
initial = {
"use_default_sms_response":
enabled_disabled(domain_obj.use_default_sms_response),
"default_sms_response":
domain_obj.default_sms_response,
"use_restricted_sms_times":
enabled_disabled(len(domain_obj.restricted_sms_times) > 0),
"restricted_sms_times_json":
[w.to_json() for w in domain_obj.restricted_sms_times],
"send_to_duplicated_case_numbers":
enabled_disabled(domain_obj.send_to_duplicated_case_numbers),
"sms_survey_date_format":
domain_obj.sms_survey_date_format,
"use_custom_case_username":
default_custom(domain_obj.custom_case_username),
"custom_case_username":
domain_obj.custom_case_username,
"use_custom_message_count_threshold":
default_custom(
domain_obj.chat_message_count_threshold is not None),
"custom_message_count_threshold":
domain_obj.chat_message_count_threshold,
"use_sms_conversation_times":
enabled_disabled(len(domain_obj.sms_conversation_times) > 0),
"sms_conversation_times_json":
[w.to_json() for w in domain_obj.sms_conversation_times],
"sms_conversation_length":
domain_obj.sms_conversation_length,
"survey_traffic_option":
(SHOW_ALL
if not domain_obj.filter_surveys_from_chat else
SHOW_INVALID
if domain_obj.show_invalid_survey_responses_in_chat else
HIDE_ALL),
"count_messages_as_read_by_anyone":
enabled_disabled(domain_obj.count_messages_as_read_by_anyone),
"use_custom_chat_template":
default_custom(domain_obj.custom_chat_template),
"custom_chat_template":
domain_obj.custom_chat_template,
"sms_case_registration_enabled":
enabled_disabled(domain_obj.sms_case_registration_enabled),
"sms_case_registration_type":
domain_obj.sms_case_registration_type,
"sms_case_registration_owner_id":
domain_obj.sms_case_registration_owner_id,
"sms_case_registration_user_id":
domain_obj.sms_case_registration_user_id,
"sms_mobile_worker_registration_enabled":
enabled_disabled(domain_obj.sms_mobile_worker_registration_enabled),
"registration_welcome_message":
self.get_welcome_message_recipient(domain_obj),
}
form = SettingsForm(initial=initial, cchq_domain=self.domain,
cchq_is_previewer=self.previewer)
return form
@property
def page_context(self):
return {
"form": self.form,
}
def post(self, request, *args, **kwargs):
form = self.form
if form.is_valid():
domain_obj = Domain.get_by_name(self.domain, strict=True)
field_map = [
("use_default_sms_response",
"use_default_sms_response"),
("default_sms_response",
"default_sms_response"),
("custom_case_username",
"custom_case_username"),
("send_to_duplicated_case_numbers",
"send_to_duplicated_case_numbers"),
("sms_survey_date_format",
"sms_survey_date_format"),
("sms_conversation_length",
"sms_conversation_length"),
("count_messages_as_read_by_anyone",
"count_messages_as_read_by_anyone"),
("chat_message_count_threshold",
"custom_message_count_threshold"),
("restricted_sms_times",
"restricted_sms_times_json"),
("sms_conversation_times",
"sms_conversation_times_json"),
("sms_mobile_worker_registration_enabled",
"sms_mobile_worker_registration_enabled"),
]
if self.previewer:
field_map.append(
("custom_chat_template",
"custom_chat_template")
)
for (model_field_name, form_field_name) in field_map:
setattr(domain_obj, model_field_name,
form.cleaned_data[form_field_name])
survey_traffic_option = form.cleaned_data["survey_traffic_option"]
if survey_traffic_option == HIDE_ALL:
domain_obj.filter_surveys_from_chat = True
domain_obj.show_invalid_survey_responses_in_chat = False
elif survey_traffic_option == SHOW_INVALID:
domain_obj.filter_surveys_from_chat = True
domain_obj.show_invalid_survey_responses_in_chat = True
else:
domain_obj.filter_surveys_from_chat = False
domain_obj.show_invalid_survey_responses_in_chat = False
if form.cleaned_data["sms_case_registration_enabled"]:
domain_obj.sms_case_registration_enabled = True
domain_obj.sms_case_registration_type = form.cleaned_data[
"sms_case_registration_type"]
domain_obj.sms_case_registration_owner_id = form.cleaned_data[
"sms_case_registration_owner_id"]
domain_obj.sms_case_registration_user_id = form.cleaned_data[
"sms_case_registration_user_id"]
else:
domain_obj.sms_case_registration_enabled = False
domain_obj.enable_registration_welcome_sms_for_case = \
form.enable_registration_welcome_sms_for_case
domain_obj.enable_registration_welcome_sms_for_mobile_worker = \
form.enable_registration_welcome_sms_for_mobile_worker
domain_obj.save()
messages.success(request, _("Changes Saved."))
return self.get(request, *args, **kwargs)
@method_decorator(domain_admin_required)
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
@use_timepicker
def dispatch(self, request, *args, **kwargs):
return super(SMSSettingsView, self).dispatch(request, *args, **kwargs)
class ManageRegistrationInvitationsView(BaseAdvancedMessagingSectionView, CRUDPaginatedViewMixin):
template_name = 'sms/manage_registration_invitations.html'
urlname = 'sms_manage_registration_invitations'
page_title = ugettext_lazy('Manage Registration Invitations')
limit_text = ugettext_noop("invitations per page")
empty_notification = ugettext_noop("No registration invitations sent yet.")
loading_message = ugettext_noop("Loading invitations...")
strict_domain_fetching = True
@method_decorator(require_permission(Permissions.edit_data))
def dispatch(self, request, *args, **kwargs):
return super(ManageRegistrationInvitationsView, self).dispatch(request, *args, **kwargs)
@property
@memoized
def invitations_form(self):
if self.request.method == 'POST':
return SendRegistrationInvitationsForm(self.request.POST, domain=self.domain)
else:
return SendRegistrationInvitationsForm(domain=self.domain)
@property
@memoized
def project_timezone(self):
return get_timezone_for_user(None, self.domain)
@property
def parameters(self):
return self.request.POST if self.request.method == 'POST' else self.request.GET
@property
def page_context(self):
context = self.pagination_context
context.update({
'form': self.invitations_form,
'sms_mobile_worker_registration_enabled':
self.domain_object.sms_mobile_worker_registration_enabled,
})
return context
@property
def total(self):
return SelfRegistrationInvitation.objects.filter(domain=self.domain).count()
@property
def column_names(self):
return [
_('Created On'),
_('Phone Number'),
_('Status'),
_('Expiration Date'),
_('Application'),
_('Phone Type'),
]
def format_status(self, invitation):
if invitation.status == SelfRegistrationInvitation.STATUS_REGISTERED:
registered_date = (ServerTime(invitation.registered_date)
.user_time(self.project_timezone)
.done()
.strftime(SERVER_DATETIME_FORMAT))
return _("Registered on %(date)s") % {'date': registered_date}
else:
return {
SelfRegistrationInvitation.STATUS_PENDING: _("Pending"),
SelfRegistrationInvitation.STATUS_EXPIRED: _("Expired"),
}.get(invitation.status)
@property
def paginated_list(self):
invitations = SelfRegistrationInvitation.objects.filter(
domain=self.domain
).order_by('-created_date')
doc_info_cache = {}
for invitation in invitations[self.skip:self.skip + self.limit]:
if invitation.app_id in doc_info_cache:
doc_info = doc_info_cache[invitation.app_id]
else:
doc_info = get_doc_info_by_id(self.domain, invitation.app_id)
doc_info_cache[invitation.app_id] = doc_info
yield {
'itemData': {
'id': invitation.pk,
'created_date': (ServerTime(invitation.created_date)
.user_time(self.project_timezone)
.done()
.strftime(SERVER_DATETIME_FORMAT)),
'phone_number': '+%s' % invitation.phone_number,
'status': self.format_status(invitation),
'expiration_date': invitation.expiration_date.strftime(SERVER_DATE_FORMAT),
'app_name': doc_info.display,
'app_link': doc_info.link,
'phone_type': dict(SelfRegistrationInvitation.PHONE_TYPE_CHOICES).get(invitation.phone_type),
},
'template': 'invitations-template',
}
def post(self, *args, **kwargs):
if self.request.POST.get('action') == 'invite':
if not self.domain_object.sms_mobile_worker_registration_enabled:
return self.get(*args, **kwargs)
if self.invitations_form.is_valid():
phone_numbers = self.invitations_form.cleaned_data.get('phone_numbers')
app_id = self.invitations_form.cleaned_data.get('app_id')
custom_registration_message = self.invitations_form.cleaned_data.get('custom_registration_message')
result = SelfRegistrationInvitation.initiate_workflow(
self.domain,
[SelfRegistrationUserInfo(p) for p in phone_numbers],
app_id=app_id,
custom_first_message=custom_registration_message,
android_only=self.invitations_form.android_only,
require_email=self.invitations_form.require_email,
)
success_numbers, invalid_format_numbers, numbers_in_use = result
if success_numbers:
messages.success(
self.request,
_("Invitations sent to: %(phone_numbers)s") % {
'phone_numbers': ','.join(success_numbers),
}
)
if invalid_format_numbers:
messages.error(
self.request,
_("Invitations could not be sent to: %(phone_numbers)s. "
"These number(s) are in an invalid format.") % {
'phone_numbers': ','.join(invalid_format_numbers)
}
)
if numbers_in_use:
messages.error(
self.request,
_("Invitations could not be sent to: %(phone_numbers)s. "
"These number(s) are already in use.") % {
'phone_numbers': ','.join(numbers_in_use)
}
)
return self.get(*args, **kwargs)
else:
if not self.domain_object.sms_mobile_worker_registration_enabled:
raise Http404()
return self.paginate_crud_response
class InvitationAppInfoView(View, DomainViewMixin):
urlname = 'sms_registration_invitation_app_info'
@property
@memoized
def app_id(self):
app_id = self.kwargs.get('app_id')
if not app_id:
raise Http404()
return app_id
@property
def token(self):
return self.app_id
@property
@memoized
def invitation(self):
return SelfRegistrationInvitation.by_token(self.token)
@property
@memoized
def odk_url(self):
try:
odk_url = SelfRegistrationInvitation.get_app_odk_url(self.domain, self.app_id)
except Http404:
odk_url = None
if odk_url:
return odk_url
if self.invitation:
# There shouldn't be many instances of this. Once we stop getting these asserts,
# we can stop supporting the old way of looking up the SelfRegistrationInvitation
# by token, and only support the new way of looking up the app by app id.
_assert = soft_assert('@'.join(['gcapalbo', 'dimagi.com']), exponential_backoff=False)
_assert(False, "InvitationAppInfoView references invitation token")
if self.invitation.odk_url:
return self.invitation.odk_url
raise Http404()
def get(self, *args, **kwargs):
url = str(self.odk_url).strip()
response = 'ccapp: %s signature: %s' % (url, sign(url))
response = base64.b64encode(response)
return HttpResponse(response)
class IncomingBackendView(View):
def dispatch(self, request, api_key, *args, **kwargs):
try:
api_user = ApiUser.get('ApiUser-%s' % api_key)
except ResourceNotFound:
return HttpResponse(status=401)
if api_user.doc_type != 'ApiUser' or not api_user.has_permission(PERMISSION_POST_SMS):
return HttpResponse(status=401)
return super(IncomingBackendView, self).dispatch(request, api_key, *args, **kwargs)
class NewIncomingBackendView(View):
def __init__(self, *args, **kwargs):
super(NewIncomingBackendView, self).__init__(*args, **kwargs)
self.domain = None
self.backend_couch_id = None
@property
def backend_class(self):
"""
Should return the model class of the backend (a subclass of SQLMobileBackend).
"""
raise NotImplementedError("Please implement this method")
@method_decorator(csrf_exempt)
def dispatch(self, request, api_key, *args, **kwargs):
try:
self.domain, self.backend_couch_id = SQLMobileBackend.get_backend_info_by_api_key(
self.backend_class.get_api_id(),
api_key
)
except SQLMobileBackend.DoesNotExist:
return HttpResponse(status=401)
return super(NewIncomingBackendView, self).dispatch(request, api_key, *args, **kwargs)
|
bsd-3-clause
| 2,815,698,352,287,126,000 | 35.598726 | 118 | 0.60135 | false |
cfobel/sconspiracy
|
Python/racy/plugins/templating/wix_project.py
|
1
|
7560
|
import os
import uuid
import functools
from os.path import join as opjoin
import racy
import glob
import os.path
from racy.rproject import ConstructibleRacyProject, LibName
from racy.rutils import memoize, run_once
from global_dico import *
from templating import *
class WixProjectError(racy.RacyProjectError):
pass
class WixProject(ConstructibleRacyProject):
var_name = 'WIX_PRJ'
prj = ''
call_prj_deps ={}
qt = False
wx = False
def __init__(self, prj, config=None, **kwargs):
if not isinstance(prj,ConstructibleRacyProject):
msg = 'WIX take a ConstructibleRacyProject as first argument'
raise WixProjectError(self, msg)
opts = prj.opts_source
self.prj = prj
self.graphviz_buffer = ''
super(WixProject, self).__init__(
build_options = opts,
config = config,
**prj.projects_db.prj_args
)
@property
def name (self):
name = super(WixProject, self).name
return LibName.SEP.join( [self.var_name, name])
@run_once
def configure_env(self):
super(WixProject, self).configure_env()
@memoize
def result(self, deps_results=True):
result = []
self.configure_env()
return result
def split_project_path(self, path):
res = []
for i in racy.renv.dirs.code:
if path.startswith(i):
temp = path.replace(i, '')
def_dir = i.split(os.path.sep)[-1]
res = temp.split(os.path.sep)
res[0] = def_dir
return res
def create_prj (self, prj):
# This dictionary contains all supported ide
prj_deps = []
for i in prj.rec_deps:
prj_deps.append( { 'PRJ_NAME' : i.base_name ,
'PRJ_TYPE' : i.get_lower('TYPE'), })
self.call_prj_deps[prj.base_name] = {
'PRJ_NAME' : prj.base_name,
'PRJ_FULL_NAME' : prj.full_name,
'PRJ_VERSION_NAME' : prj.versioned_name,
'PRJ_TYPE' : prj.get_lower('TYPE'),
'PRJ_TARGET' : prj.target_path,
}
profile_without_rc = self.prj.get('WIX_PROFILE').replace("rc",'')
profile_without_rc = profile_without_rc[1:]
profile_path = os.path.join('Bundles', self.prj.versioned_name,
profile_without_rc)
icon_path = self.prj.get('WIX_ICON')
if icon_path:
icon_path = self.prj.get_path(icon_path)
# this dictionary contains all varibles for templates
dico = {
'PRJ_INSTALL_DIR' : prj.install_path,
'PRJ_VERSION_NAME' : prj.versioned_name,
'PRJ_ROOT_DIR' : prj.root_path,
'PRJ_NAME' : prj.base_name,
'PRJ_FULL_NAME' : prj.full_name,
'HEADERS' : prj.get_includes(False),
'SOURCES' : prj.get_sources(False),
'OTHERS_FILE' : prj.get_others(),
'PRJ_TYPE' : prj.get_lower('TYPE'),
'RACY_CLEAN_CMD' : racy.get_racy_cmd() +' '+ prj.base_name,
'CALLING_PROJECT' : self.prj.base_name,
'CALLING_PROJECT_VERSION_NAME' : self.prj.versioned_name,
'CALLING_PROJECT_FULL_NAME' : self.prj.full_name,
'CALLING_PROJECT_DEPS' : self.call_prj_deps,
'CALLING_PROJECT_VERSION' : self.prj.version,
'CALLING_PROJECT_PROFILE' : profile_path,
'CALLING_PROJECT_ICON' : icon_path,
'DEPS_INCLUDES' : prj.deps_include_path,
'VERSION' : prj.version,
'ARCH' : self.prj.get_lower('ARCH'),
'DEPS' : prj_deps,
'PROJECT_SPLIT_PATH' : self.split_project_path(prj.root_path),
'uuid' : functools.partial(uuid.uuid5, uuid.NAMESPACE_OID),
}
dico.update(dico_g)
dico_vars = dico
dico_prj = dico_prj_template['dico_create_wix']['yes']
dico_vars = self.gen_file(dico_vars, dico_prj)
racy.print_msg("Create {0} wix file".format(prj.base_name))
def create_extra_dir(self, tuple_dir_targets):
folder,targets = tuple_dir_targets
if not targets == []:
self.call_prj_deps[folder] = {
'PRJ_NAME' : '',
'PRJ_FULL_NAME' : '',
'PRJ_VERSION_NAME' : '',
}
dico = {
'CALLING_PROJECT_VERSION_NAME' : self.prj.versioned_name,
'CALLING_PROJECT' : self.prj.base_name,
'TARGETS': targets,
'uuid' : functools.partial(uuid.uuid5, uuid.NAMESPACE_OID),
'EXTRA_NAME' : folder,
'ARCH' : self.prj.get_lower('ARCH'),
}
dico.update(dico_g)
dico_prj = {
'dirs':
[
('WIX_DIR' ,'${WIX_INSTALL_DIR}/${CALLING_PROJECT}/'),
('ROOT_TMP_DIR', '${TEMPLATING_PLUGIN_PATH}/rc/'),
('TPL_DIR' , '${ROOT_TMP_DIR}/wix/'),
],
'template_prj':
[
('${TPL_DIR}/extra.wxs', '${WIX_DIR}/${EXTRA_NAME}.wxs'),
]
}
self.gen_file(dico, dico_prj)
racy.print_msg("Create "+ folder+ " wix file")
# def create_targets(path,self):
# targets = []
#
# for i in os.listdir(bin_dir):
# if not i.endswith('.exe'):
# targets.append(os.path.join(bin_dir,i))
#
#
# return targets
#
def create_targets(self,path):
targets=[]
l = glob.glob(path+'\\*')
for i in l:
if os.path.isdir(i):
targets.extend(self.create_targets(i))
else:
if not i.endswith('.exe'):
targets.append(i)
return targets
def create_install_targets(self,list_dir):
# list targets = [(dir, list_targets),...]
list_targets = []
install_dir = racy.renv.dirs.install
for tdir in list_dir:
dir_path = opjoin(install_dir,tdir)
if os.path.exists(dir_path):
targets = self.create_targets(dir_path)
list_targets.append((tdir,targets))
return list_targets
def gen_file(self, dico_vars, dico_prj):
# Added vars
if dico_prj.has_key('vars'):
dico_vars = add_vars(dico_prj['vars'], dico_vars)
# Added dirs
if dico_prj.has_key('dirs'):
dico_vars = add_dirs_template(dico_prj['dirs'], dico_vars)
# Added template_prj
if dico_prj.has_key('template_prj'):
add_template_prj(dico_prj['template_prj'], dico_vars)
return dico_vars
def install (self, opts = ['rc', 'deps'] ):
result = self.result(deps_results = 'deps' in opts)
for i in self.prj.rec_deps:
if i.get_lower('TYPE') in ['exec', 'bundle', 'shared']:
self.create_prj(i)
extra_dirs = ['bin','Python','PythonHome','qtplugins']
for i in self.create_install_targets(extra_dirs):
self.create_extra_dir(i)
self.create_prj(self.prj)
return result
|
bsd-3-clause
| -4,741,032,357,835,840,000 | 29.857143 | 81 | 0.503439 | false |
compops/lic-thesis
|
examplesForThesis-python/ch3-example-hwsvllscoreinfo.py
|
1
|
3338
|
########################################################################
#
# Code to reproduce examples from the Licentiate's thesis:
# Sequential Monte Carlo for inference in nonlinear state space models
#
# written by
# Johan Dahlin ( johan.dahlin (at) liu.se
#
# Available from:
# http://users.isy.liu.se/en/rt/johda87/
#
# Copyright (c) 2014 Johan Dahlin
#
# Run this code together with the corresponding R-file to reproduce:
#
# Estimation in the Hull-White SV model
# Example 3.7 in Section 3.4.2
#
########################################################################
from smc import *
from classes import *
from helpers import *
import pandas
import numpy as np
########################################################################
# Arrange the data structures
########################################################################
data = stData();
smc = smcSampler();
par = stParameters();
########################################################################
# Setup the system
########################################################################
sys = stSystemHW()
sys.version = "standard"
sys.par = np.zeros((3,1))
sys.par[0] = 0.996;
sys.par[1] = 0.129;
sys.par[2] = 0.837;
sys.T = 3567;
########################################################################
# Setup the parameters for the algorithm
########################################################################
par.fileprefix = "hw"
par.nPars = 1;
smc.nPart = 5000;
smc.resamplingType = "systematic"; # multinomial or systematic
smc.filterType = "bootstrap"; # kalman or bootstrap or fullyadapted
smc.smootherType = "fixedlag"; # kalman or filtersmoother or fixedlag or ffbsm (not implemented)
smc.flVersion = "full"; # filtersmoother or neglectcross or full
smc.fixedLag = 12;
smc.onlydiagInfo = 0;
smc.makeInfoPSD = 1;
smc.resampFactor = 2;
########################################################################
# Generate data from the model
########################################################################
par.dataset = 0;
data.sample(sys,np.zeros(sys.T))
data.y = 100 * np.loadtxt("data/seOMXdata.csv",delimiter=",");
########################################################################
# Make grid and estimate ll, score and info
########################################################################
# Make the grid
xx = arange(0.70,1.00,0.005);
# Allocate vectors
ll = np.zeros(len(xx))
score = np.zeros(len(xx))
info = np.zeros(len(xx))
ng = np.zeros(len(xx))
# Run the FL smoother for each grid point
for ii in range(0,len(xx)):
sys.par[0] = xx[ii];
smc.flPS(data,sys,par)
ll[ii] = smc.ll;
score[ii] = smc.score;
info[ii] = smc.infom;
print((ii,len(xx)))
# Plot the score and observed information matrix
subplot(2,1,1); plot(xx,score)
subplot(2,1,2); plot(xx,info)
# Export data to R for plotting
out = vstack((xx,ll,score,info)).transpose()
pandas.DataFrame(out).to_csv("ch3-example-hwsvllscoreinfo.csv");
########################################################################
# End of file
########################################################################
|
gpl-3.0
| 7,931,684,923,885,638,000 | 31.105769 | 110 | 0.441881 | false |
tucanae47/Cosmos_streaming
|
Cosmos_Streaming.py
|
1
|
10966
|
#http://www.rasterbar.com/products/libtorrent/manual.html
import libtorrent as lt
import time
import types
import sys
from subprocess import *
import thread
import shutil
import tempfile
import os.path as pt
from time import sleep
from BeautifulSoup import BeautifulSoup
import requests
class Torrent_Cosmos:
def __init__(self):
self.cache={}
self.completed=False
self.ses=None
self.h=None
self.piecestart=None
self.pieceend=None
self.offset1=None
self.offset2=None
self.piecesperite=None
self.outputcmd=None
self.torrents=[]
#self.cur_selected=None
def searchForCosmos(self):
r= requests.get('https://thepiratebay.se/search/cosmos%20space%20time%20odyssey/0/7/200')
soup = BeautifulSoup(r.content)
table = soup.find(lambda tag: tag.name=='table' and tag.has_key('id') and tag['id']=="searchResult")
rows = table.findAll(lambda tag: tag.name=='tr' and not tag.has_key('class') )
self.torrents=[]
i=0
for tr in rows:
ithItem=tr.findChildren('td')
seeders=ithItem[2].getText()
leechers=ithItem[3].getText()
name=ithItem[1].findAll('a')[0].getText()
magnet=ithItem[1].findAll('a')[1]['href']
torrent=ithItem[1].findAll('a')[0]['href']
self.torrents.append({'s':seeders,'l':leechers, 'name':name,'magnet':magnet,'torrent':torrent})
print "%d seeders:%s, leechers:%s, name:%s "%(i,leechers, seeders,name)
i+=1
def selectItemAndPlay(self, ith):
selected=self.torrents[ith]
print 'loading magnet', selected['magnet']
self.start(selected['magnet'],0,'/tmp','mplayer -fs -')
#self.start(selected['magnet'],0,'/tmp','vlc -f -')
def magnet2torrent(self,magnet, output_name=None):
if output_name and \
not pt.isdir(output_name) and \
not pt.isdir(pt.dirname(pt.abspath(output_name))):
print("Invalid output folder: " + pt.dirname(pt.abspath(output_name)))
print("")
sys.exit(0)
tempdir = tempfile.mkdtemp()
ses = lt.session()
params = {
'save_path': tempdir,
'duplicate_is_error': True,
'storage_mode': lt.storage_mode_t(2),
'paused': False,
'auto_managed': True,
'duplicate_is_error': True
}
handle = lt.add_magnet_uri(ses, magnet, params)
print("Downloading Metadata (this may take a while)")
while (not handle.has_metadata()):
try:
sleep(1)
except KeyboardInterrupt:
print("Aborting...")
ses.pause()
print("Cleanup dir " + tempdir)
shutil.rmtree(tempdir)
sys.exit(0)
ses.pause()
print("Done")
torinfo = handle.get_torrent_info()
torfile = lt.create_torrent(torinfo)
output = pt.abspath(torinfo.name() + ".torrent")
if output_name:
if pt.isdir(output_name):
output = pt.abspath(pt.join(
output_name, torinfo.name() + ".torrent"))
elif pt.isdir(pt.dirname(pt.abspath(output_name))):
output = pt.abspath(output_name)
print("Saving torrent file here : " + output + " ...")
torcontent = lt.bencode(torfile.generate())
f = open(output, "wb")
f.write(lt.bencode(torfile.generate()))
f.close()
print("Saved! Cleaning up dir: " + tempdir)
ses.remove_torrent(handle)
shutil.rmtree(tempdir)
return output
def printstatus(self):
state_str = ['queued', 'checking', 'downloading metadata', 'downloading', 'finished', 'seeding', 'allocating', 'checking fastresume']
s = self.h.status()
print >> sys.stderr,'%.2f%% complete (down: %.1f kb/s up: %.1f kB/s peers: %d) %s\n' % (s.progress * 100, s.download_rate / 1000, s.upload_rate / 1000, s.num_peers, state_str[s.state]),
#if s.state == 4:
# break
sys.stdout.flush()
l = ''
i = 0
for p in s.pieces:
if i >= self.piecestart and i <= self.pieceend:
if p == True:
l = l + '1'
if p == False:
l = l + '0'
i = i+1
print >> sys.stderr,l
def addnewpieces(self):
prio = self.h.piece_priorities()
s = self.h.status()
downloading = 0
if len(s.pieces) == 0:
return
for piece in range(self.piecestart,self.pieceend+1):
if prio[piece] != 0 and s.pieces[piece]==False:
downloading = downloading+1
for piece in range(self.piecestart,self.pieceend+1):
if prio[piece] == 0 and downloading < self.piecesperite:
print >> sys.stderr,'downloading piece ',piece
self.h.piece_priority(piece,1)
downloading = downloading+1
for piece in range(self.piecestart,self.pieceend+1):
if prio[piece] != 0 and s.pieces[piece]==False:
print >> sys.stderr,'high prio ',piece
self.h.piece_priority(piece,7)
break
def getpiece(self,i):
#global cache
if i in self.cache:
ret = self.cache[i]
self.cache[i] = 0
return ret
while True:
s = self.h.status()
if len(s.pieces)==0:
break
if s.pieces[i]==True:
break
time.sleep(.1)
self.h.read_piece(i)
while True:
#printstatus()
#addnewpieces()
piece = self.ses.pop_alert()
if isinstance(piece, lt.read_piece_alert):
if piece.piece == i:
#sys.stdout.write(piece.buffer)
return piece.buffer
else:
print >> sys.stderr,'store somewhere'
self.cache[piece.piece] = piece.buffer
break
time.sleep(.1)
def writethread(self):
stream = 0
print "state"
for piece in range(self.piecestart,self.pieceend+1):
buf=self.getpiece(piece)
if piece==self.piecestart:
buf = buf[self.offset1:]
if piece==self.pieceend:
buf = buf[:self.offset2]
print >> sys.stderr, 'output',piece,len(buf)
if self.outputcmd=='-':
stream = sys.stdout
else:
if stream == 0:
stream = Popen(self.outputcmd.split(' '), stdin=PIPE).stdin
try:
stream.write(buf)
except Exception, err:
print err
self.ses.remove_torrent(self.h)
self.completed = True
exit(0)
time.sleep(.1)
self.ses.remove_torrent(self.h)
self.completed = True
def start(self,magnet,fileid,outdir,_outputcmd):
self.outputcmd=_outputcmd
torrent=self.magnet2torrent(magnet)
info = lt.torrent_info(torrent)
#self.ses = lt.session()
print torrent, outdir, self.ses,fileid,self.outputcmd
#cur_t_handle=self.ses.add_torrent({'url':torrent, 'save_path':outdir})
#print cur_t_handle
#info=cur_t_handle.get_torrent_info()
self.piecesperite = 40*1024*1024/info.piece_length() # 40 MB
print >> sys.stderr, 'piecesperite',self.piecesperite
print >> sys.stderr, 'info.piece_length()',info.piece_length()
sizes = []
i = 0
for f in info.files():
self.piecestart = f.offset/info.piece_length()
self.pieceend = (f.offset+f.size)/info.piece_length()
sizes.append(f.size)
print >> sys.stderr, i,f.path,f.size,f.offset,self.piecestart,self.pieceend
i=i+1
if fileid == 'list':
return
if fileid == 'max':
fileid = sizes.index(max(sizes))
else:
fileid = int(fileid)
f = info.files()[fileid]
print >> sys.stderr, f.path
self.piecestart = f.offset/info.piece_length()
self.pieceend = (f.offset+f.size)/info.piece_length()
self.offset1 = f.offset%info.piece_length() #how many bytes need to be removed from the 1st piece
self.offset2 = ((f.offset+f.size)%info.piece_length()) #how many bytes need we keep from the last piece
print >> sys.stderr,self.piecestart,self.pieceend,self.offset1,self.offset2,info.piece_length()
print >> sys.stderr,(self.pieceend-self.piecestart+1)*info.piece_length()-(self.offset1+self.offset2),f.size
self.ses = lt.session()
state = None
#state = lt.bdecode(open(state_file, "rb").read())
self.ses.start_dht(state)
self.ses.add_dht_router("router.bittorrent.com", 6881)
self.ses.add_dht_router("router.utorrent.com", 6881)
self.ses.add_dht_router("router.bitcomet.com", 6881)
self.ses.listen_on(6881, 6891)
self.ses.set_alert_mask(lt.alert.category_t.storage_notification)
self.h = self.ses.add_torrent({'ti': info, 'save_path': outdir})
for i in range(info.num_pieces()):
self.h.piece_priority(i,0)
print >> sys.stderr,'starting', self.h.name()
for i in range(self.piecestart,self.piecestart+self.piecesperite):
if i <= self.pieceend:
self.h.piece_priority(i,7)
print >> sys.stderr,'downloading piece '+str(i)
thread.start_new_thread(self.writethread,())
while not self.completed:
self.printstatus()
self.addnewpieces()
time.sleep(1)
|
mit
| 5,363,241,110,164,181,000 | 40.695817 | 197 | 0.49544 | false |
ttrifonov/EventBrain
|
src/eventbrain/contrib/rabbitmq/pika_wrapper.py
|
1
|
7592
|
import pika
import logging
import time
LOG = logging.getLogger(__name__)
class ChannelWrapper(object):
"""
Wrapper for pika library for AMQP
"""
reconnect_timeout = 3
def __init__(self, channel_id, exchange_type, callback=None,
publish=False, manual_ack=False, **kwargs):
(exch, rtg_key) = self._parse_id(channel_id)
self.routing_wildcard_match = "match_all" in kwargs and \
kwargs['match_all']
self.channel_id = exch
self.routing_key = rtg_key
self.exchange_type = exchange_type
self.callback = callback
self.publish = publish
self.manual_ack = manual_ack
self._create(channel_id=self.channel_id, exchange_type=exchange_type,
callback=callback, publish=publish,
manual_ack=manual_ack, **kwargs)
self.stopping = False
def _parse_id(self, id):
if (":" in id):
# break to exchange and rtg key
tokens = id.split(":")
else:
tokens = [id]
return (tokens[0], ".".join(tokens[1:]))
def _create(self, channel_id, exchange_type, callback,
publish, manual_ack, **kwargs):
self.queue = None
def on_connected(connection):
LOG.info("Connected to %s:%s" % (self.host,
self.vhost))
connection.channel(on_channel_open)
def on_closed(frame):
if not self.stopping:
# unexpected close, try reconnect
LOG.warn("Invoked unexpected on_close, "
"trying to reconnect")
self.connection.add_timeout(self.reconnect_timeout,
self._reconnect)
def on_backpressure():
LOG.warn("Backpressure detected")
def push(sender, data):
LOG.info("Pushing data from [%s]:%s to exc: %s" % \
(sender,
"%s%s" % (self.routing_key + "." if
self.routing_key else "",
sender),
self.channel_id))
try:
self.channel.basic_publish(exchange=self.channel_id,
routing_key="%s%s" % (self.routing_key + "." if
self.routing_key else "",
sender),
body=data)
except Exception, ex:
LOG.exception(unicode(ex))
self._reconnect()
def on_channel_open(channel):
LOG.info("Channel open")
self.channel = channel
self.queue = channel
self.queue.push = push
self.queue.escalate = self.publish_once
channel.exchange_declare(exchange=self.channel_id,
type=exchange_type,
callback=on_exchange_declared)
def on_exchange_declared(frame):
LOG.info("Exchange_declared: %s" % self.channel_id)
if not publish:
# We have a decision, or listener, bind a queue
self.channel.queue_declare(durable=True,
exclusive=False,
auto_delete=False,
callback=on_queue_declared)
def on_queue_declared(frame):
LOG.info("Queue declared on exchange %s:%s [%s]" % (
self.channel_id,
self.routing_key,
exchange_type))
if (self.routing_wildcard_match):
if (self.routing_key):
routing_key = "%s.#" % self.routing_key
else:
routing_key = "#"
else:
routing_key = self.routing_key
self.channel.queue_bind(exchange=self.channel_id,
queue=frame.method.queue,
routing_key=routing_key)
self.channel.basic_consume(on_consume, no_ack=not manual_ack,
queue=frame.method.queue)
def on_consume(channel, method_frame, header_frame, body):
if manual_ack:
channel.basic_ack(delivery_tag=method_frame.delivery_tag)
self.on_receive(self.channel_id, method_frame, header_frame, body)
self.host = kwargs.get('host', "localhost")
self.vhost = kwargs.get("vhost", "/")
params = pika.ConnectionParameters(host=self.host,
virtual_host=self.vhost)
if "user" in kwargs.keys() and "password" in kwargs.keys():
self.user = kwargs['user']
self.password = kwargs['password']
credentials = pika.PlainCredentials(self.user, self.password)
params.credentials = credentials
self.params = params
reconnect = pika.reconnection_strategies.SimpleReconnectionStrategy()
self.connection = pika.SelectConnection(params,
on_open_callback=on_connected,
reconnection_strategy=reconnect)
#self.connection.add_on_close_callback(on_closed)
self.connection.add_backpressure_callback(on_backpressure)
def _reconnect(self):
LOG.info("Trying reconnect...")
self.queue = None
self.connection.ioloop.stop()
self.connection.close()
self._create(channel_id=self.channel_id,
exchange_type=self.exchange_type,
callback=self.callback, publish=self.publish,
manual_ack=self.manual_ack, **self.connection_kwargs)
self.connect(**self.connection_kwargs)
def on_receive(self, channel, method, properties, body):
if self.callback:
self.callback(method.routing_key,
body,
method=method,
properties=properties)
def connect(self, **kwargs):
try:
self.connection_kwargs = kwargs
self.connection.ioloop.start()
except Exception, ex:
LOG.exception("Channel error: %r" % str(ex))
# retry
if not self.connection.closing:
self._reconnect(**self.connection_kwargs)
def stop(self, **kwargs):
self.stopping = True
# connection.ioloop is blocking, this will stop and exit the app
if (self.connection):
LOG.info("Closing connection")
self.connection.ioloop.stop()
self.connection.close()
def publish_once(self, sender, receiver, data):
LOG.info("Escalating from %s to %s" % (sender, receiver))
try:
(exch, _) = self._parse_id(receiver)
def on_channel(channel):
LOG.info("New channel opened: %s" % receiver)
channel.exchange_declare(exchange=exch,
type='topic')
channel.basic_publish(exchange=exch,
routing_key=sender,
body=data)
self.connection.channel(on_channel)
except Exception, ex:
LOG.exception(ex)
|
apache-2.0
| -6,043,318,420,473,618,000 | 39.169312 | 78 | 0.498683 | false |
potassco/clingo
|
libpyclingo/clingo/theory_atoms.py
|
1
|
7053
|
'''
Functions and classes to work with theory atoms.
Examples
--------
>>> from clingo.control import Control
>>>
>>> ctl = Control()
>>> ctl.add('base', [], """\\
... #theory example {
... t { };
... &a/0 : t, head
... }.
... {c}.
... &a { t: c }.
... """)
>>> ctl.ground([('base', [])])
>>> atm = next(ctl.theory_atoms)
>>> print(atm)
&a{t: c}
>>> elm = atm.elements[0]
>>> print(elm)
t: c
'''
from typing import List, Optional, Tuple
from enum import Enum
from functools import total_ordering
from ._internal import _c_call, _c_call2, _lib, _str, _to_str
__all__ = [ 'TheoryAtom', 'TheoryElement', 'TheoryTerm', 'TheoryTermType' ]
class TheoryTermType(Enum):
'''
Enumeration of theory term types.
'''
Function = _lib.clingo_theory_term_type_function
'''
For a function theory terms.
'''
List = _lib.clingo_theory_term_type_list
'''
For list theory terms.
'''
Number = _lib.clingo_theory_term_type_number
'''
For numeric theory terms.
'''
Set = _lib.clingo_theory_term_type_set
'''
For set theory terms.
'''
Symbol = _lib.clingo_theory_term_type_symbol
'''
For symbolic theory terms (symbol here means the term is a string).
'''
Tuple = _lib.clingo_theory_term_type_tuple
'''
For tuple theory terms.
'''
@total_ordering
class TheoryTerm:
'''
`TheoryTerm` objects represent theory terms.
Theory terms have a readable string representation, implement Python's rich
comparison operators, and can be used as dictionary keys.
'''
def __init__(self, rep, idx):
self._rep = rep
self._idx = idx
def __hash__(self):
return self._idx
def __eq__(self, other):
return self._idx == other._idx
def __lt__(self, other):
return self._idx < other._idx
def __str__(self):
return _str(_lib.clingo_theory_atoms_term_to_string_size,
_lib.clingo_theory_atoms_term_to_string,
self._rep, self._idx)
def __repr__(self):
return f'TheoryTerm({self._rep!r})'
@property
def arguments(self) -> List['TheoryTerm']:
'''
The arguments of the term (for functions, tuples, list, and sets).
'''
args, size = _c_call2('clingo_id_t*', 'size_t', _lib.clingo_theory_atoms_term_arguments, self._rep, self._idx)
return [TheoryTerm(self._rep, args[i]) for i in range(size)]
@property
def name(self) -> str:
'''
The name of the term (for symbols and functions).
'''
return _to_str(_c_call('char*', _lib.clingo_theory_atoms_term_name, self._rep, self._idx))
@property
def number(self) -> int:
'''
The numeric representation of the term (for numbers).
'''
return _c_call('int', _lib.clingo_theory_atoms_term_number, self._rep, self._idx)
@property
def type(self) -> TheoryTermType:
'''
The type of the theory term.
'''
type_ = _c_call('clingo_theory_term_type_t', _lib.clingo_theory_atoms_term_type, self._rep, self._idx)
return TheoryTermType(type_)
@total_ordering
class TheoryElement:
'''
Class to represent theory elements.
Theory elements have a readable string representation, implement Python's rich
comparison operators, and can be used as dictionary keys.
'''
def __init__(self, rep, idx):
self._rep = rep
self._idx = idx
def __hash__(self):
return self._idx
def __eq__(self, other):
return self._idx == other._idx
def __lt__(self, other):
return self._idx < other._idx
def __str__(self):
return _str(_lib.clingo_theory_atoms_element_to_string_size,
_lib.clingo_theory_atoms_element_to_string,
self._rep, self._idx)
def __repr__(self):
return f'TheoryElement({self._rep!r})'
@property
def condition(self) -> List[int]:
'''
The condition of the element in form of a list of program literals.
'''
cond, size = _c_call2('clingo_literal_t*', 'size_t', _lib.clingo_theory_atoms_element_condition,
self._rep, self._idx)
return [cond[i] for i in range(size)]
@property
def condition_id(self) -> int:
'''
Each condition has an id, which is a temporary program literal. This id
can be passed to `clingo.propagator.PropagateInit.solver_literal` to
obtain a corresponding solver literal.
'''
return _c_call('clingo_literal_t', _lib.clingo_theory_atoms_element_condition_id, self._rep, self._idx)
@property
def terms(self) -> List[TheoryTerm]:
'''
The tuple of the element.
'''
terms, size = _c_call2('clingo_id_t*', 'size_t', _lib.clingo_theory_atoms_element_tuple, self._rep, self._idx)
return [TheoryTerm(self._rep, terms[i]) for i in range(size)]
@total_ordering
class TheoryAtom:
'''
Class to represent theory atoms.
Theory atoms have a readable string representation, implement Python's rich
comparison operators, and can be used as dictionary keys.
'''
def __init__(self, rep, idx):
self._rep = rep
self._idx = idx
def __hash__(self):
return self._idx
def __eq__(self, other):
return self._idx == other._idx
def __lt__(self, other):
return self._idx < other._idx
def __str__(self):
return _str(_lib.clingo_theory_atoms_atom_to_string_size,
_lib.clingo_theory_atoms_atom_to_string,
self._rep, self._idx)
def __repr__(self):
return f'TheoryAtom({self._rep!r})'
@property
def elements(self) -> List[TheoryElement]:
'''
The elements of the atom.
'''
elems, size = _c_call2('clingo_id_t*', 'size_t', _lib.clingo_theory_atoms_atom_elements, self._rep, self._idx)
return [TheoryElement(self._rep, elems[i]) for i in range(size)]
@property
def guard(self) -> Optional[Tuple[str, TheoryTerm]]:
'''
The guard of the atom or None if the atom has no guard.
'''
if not _c_call('bool', _lib.clingo_theory_atoms_atom_has_guard, self._rep, self._idx):
return None
conn, term = _c_call2('char*', 'clingo_id_t', _lib.clingo_theory_atoms_atom_guard, self._rep, self._idx)
return (_to_str(conn), TheoryTerm(self._rep, term))
@property
def literal(self) -> int:
'''
The program literal associated with the atom.
'''
return _c_call('clingo_literal_t', _lib.clingo_theory_atoms_atom_literal, self._rep, self._idx)
@property
def term(self) -> TheoryTerm:
'''
The term of the atom.
'''
term = _c_call('clingo_id_t', _lib.clingo_theory_atoms_atom_term, self._rep, self._idx)
return TheoryTerm(self._rep, term)
|
mit
| -6,718,412,580,768,552,000 | 28.51046 | 118 | 0.572097 | false |
astrosat/cOSMos
|
cosmos/utils.py
|
1
|
2453
|
import geojson
from geojson import Feature, FeatureCollection
from geopy.geocoders import Nominatim
from shapely.geometry import shape
from cosmos.types import BoundingBox
def compute_geojson(data_type, elements):
geojson_features = ()
features = data_type['features'](elements)
tags = (element.get('tags') for element in elements)
try:
geometry = getattr(geojson, data_type['geometry'])
geojson_features = (Feature(geometry=geometry(feature), properties=tag)
for feature, tag in zip(features, tags))
except AttributeError:
raise AttributeError('Invalid geometry type in data_type.')
return FeatureCollection(list(geojson_features))
def extract_elements_from_osm(data, element_type):
return (element for element in data[
'elements'] if element['type'] == element_type)
def process_osm_output(data, data_type, format):
features = ()
tags = ()
elements = []
if isinstance(data, list):
for d in data:
elements.extend(
list(extract_elements_from_osm(d, data_type['element'])))
else:
elements = list(extract_elements_from_osm(data, data_type['element']))
if format == 'geojson':
return compute_geojson(data_type, elements)
else:
features = data_type['features'](elements)
tags = (element.get('tags') for element in elements)
return features, tags
def coords_for(name):
geocoder = Nominatim()
location = geocoder.geocode(name, geometry='geojson')
try:
geometry = shape(location.raw['geojson'])
# Coordinates have to be flipped in order to work in overpass
if geometry.geom_type == 'Polygon':
west, south, east, north = geometry.bounds
return BoundingBox(south, west, north, east)
elif geometry.geom_type == 'MultiPolygon':
bboxs = (BoundingBox(*(g.bounds[0:2][::-1] + g.bounds[2:][::-1]))
for g in geometry)
return bboxs
elif geometry.geom_type == 'Point':
south, north, west, east = (float(coordinate)
for coordinate in
location.raw['boundingbox'])
return BoundingBox(south, west, north, east)
except (KeyError, AttributeError):
raise AttributeError(
'No bounding box available for this location name.')
|
mit
| -3,079,552,596,203,956,700 | 32.60274 | 79 | 0.615573 | false |
4mba/practice
|
exif-geotag/exif_reader.py
|
1
|
1947
|
# -*- coding: utf-8 -*-
"""
exif_reader
~~~~~~~~~~~~~~
JPEG 사진에서 EXIF(교환 이미지 파일 형식, EXchangable Image File format)를 이용하여,
PhotoLog에서 사용할 위치기반 정보를 얻어 온다.
:copyright: (c) 2013 by liks79 [http://www.github.com/liks79]
:license: MIT LICENSE 2.0, see license for more details.
"""
# Library to extract Exif information from digital camera image files.
# https://github.com/ianare/exif-py
import EXIF;
class EXIFReader :
file_path = ""
def __init__(self, file_path) :
""" EXIFReader 클래스 생성자 """
self.file_path = file_path
def get_thumbnails(self, file_path) :
"""EXIF 정에서 썸네일을 읽는 것이 가능할 경우 썸네일을 읽은 후 리턴한다. """
print "ThumbNails"
def get_gps_data(self, file_path):
"""EXIF 정보에서 GPS 정보를 읽어서 리턴한다. """
print "GPS Data"
def print_all(self, file_path):
"""모든 EXIF 정보를 STDOUT에 출력한다. """
# Open image file for reading (binary mode)
f = open(file_path, 'rb')
# Return Exif tags
tags = EXIF.process_file(f)
print "All Information of EXIF in " + file_path
for tag in tags.keys():
if tag not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename', 'EXIF MakerNote'):
print "Key: %s, value %s" % (tag, tags[tag])
#All Information of EXIF in photos/2012-07-12 12.12.12.jpg
#================================================
#Key: GPS GPSLongitude, value [0, 843/100, 0]
#Key: GPS GPSLatitude, value [51, 3003/100, 0]
#Key: Image GPSInfo, value 594
#Key: GPS GPSLatitudeRef, value N
#Key: GPS GPSAltitudeRef, value 0
#Key: GPS GPSTimeStamp, value [11, 12, 1181/100]
#Key: GPS GPSAltitude, value 1820/317
#Key: GPS GPSLongitudeRef, value W
|
mit
| -3,204,913,604,858,570,000 | 22.223684 | 91 | 0.576771 | false |
glennmatthews/cot
|
COT/utilities.py
|
1
|
6117
|
#!/usr/bin/env python
#
# utilities.py - General utility functions
#
# February 2017, Glenn F. Matthews
# Copyright (c) 2015-2017, 2019 the COT project developers.
# See the COPYRIGHT.txt file at the top-level directory of this distribution
# and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt.
#
# This file is part of the Common OVF Tool (COT) project.
# It is subject to the license terms in the LICENSE.txt file found in the
# top-level directory of this distribution and at
# https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part
# of COT, including this file, may be copied, modified, propagated, or
# distributed except according to the terms contained in the LICENSE.txt file.
"""General-purpose utility functions for COT.
**Functions**
.. autosummary::
:nosignatures:
available_bytes_at_path
directory_size
pretty_bytes
tar_entry_size
to_string
"""
import errno
import logging
import os
import sys
try:
# Python 3.x
from shutil import disk_usage
except ImportError:
# Python 2.7
from psutil import disk_usage
import xml.etree.ElementTree as ET # noqa: N814
logger = logging.getLogger(__name__)
def available_bytes_at_path(path):
"""Get the available disk space in a given directory.
Args:
path (str): Directory path to check.
Returns:
int: Available space, in bytes
Raises:
OSError: if the specified path does not exist or is not readable.
"""
if not os.path.exists(path):
raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), path)
if not os.path.isdir(path):
raise OSError(errno.ENOTDIR, os.strerror(errno.ENOTDIR), path)
available = disk_usage(path).free
logger.debug("There appears to be %s available at %s",
pretty_bytes(available), path)
return available
def directory_size(path):
"""Total bytes consumed by the contents of a directory.
Args:
path (str): Directory path
Returns:
int: Total bytes consumed by files in this directory.
Raises:
OSError: if the specified path does not exist or is not a directory.
"""
if not os.path.exists(path):
raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), path)
if not os.path.isdir(path):
raise OSError(errno.ENOTDIR, os.strerror(errno.ENOTDIR), path)
total_size = 0
for dirpath, _, filenames in os.walk(path):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
try:
total_size += os.path.getsize(filepath)
except OSError as exc:
logger.debug("Unable to get size of %s (%s), continuing.",
filepath, exc.strerror)
logger.debug("Total disk space consumed by %s is %s",
path, pretty_bytes(total_size))
return total_size
def pretty_bytes(byte_value, base_shift=0):
"""Pretty-print the given bytes value.
Args:
byte_value (float): Value
base_shift (int): Base value of byte_value
(0 = bytes, 1 = KiB, 2 = MiB, etc.)
Returns:
str: Pretty-printed byte string such as "1.00 GiB"
Examples:
::
>>> pretty_bytes(512)
'512 B'
>>> pretty_bytes(512, 2)
'512 MiB'
>>> pretty_bytes(65536, 2)
'64 GiB'
>>> pretty_bytes(65547)
'64.01 KiB'
>>> pretty_bytes(65530, 3)
'63.99 TiB'
>>> pretty_bytes(1023850)
'999.9 KiB'
>>> pretty_bytes(1024000)
'1000 KiB'
>>> pretty_bytes(1048575)
'1024 KiB'
>>> pretty_bytes(1049200)
'1.001 MiB'
>>> pretty_bytes(2560)
'2.5 KiB'
>>> pretty_bytes(.0001, 3)
'104.9 KiB'
>>> pretty_bytes(.01, 1)
'10 B'
>>> pretty_bytes(.001, 1)
'1 B'
>>> pretty_bytes(.0001, 1)
'0 B'
>>> pretty_bytes(100, -1)
Traceback (most recent call last):
...
ValueError: base_shift must not be negative
"""
if base_shift < 0:
raise ValueError("base_shift must not be negative")
tags = ["B", "KiB", "MiB", "GiB", "TiB", 'PiB', 'EiB', 'ZiB', 'YiB']
byte_value = float(byte_value)
shift = base_shift
while byte_value >= 1024.0:
byte_value /= 1024.0
shift += 1
while byte_value < 1.0 and shift > 0:
byte_value *= 1024.0
shift -= 1
# Fractions of a byte should be considered a rounding error:
if shift == 0:
byte_value = round(byte_value)
return "{0:.4g} {1}".format(byte_value, tags[shift])
def tar_entry_size(filesize):
"""Get the space a file of the given size will actually require in a TAR.
The entry has a 512-byte header followd by the actual file data,
padded to a multiple of 512 bytes if necessary.
Args:
filesize (int): File size in bytes
Returns:
int: Bytes consumed in a TAR archive by this file.
Examples:
::
>>> tar_entry_size(1)
1024
>>> tar_entry_size(511)
1024
>>> tar_entry_size(512)
1024
>>> tar_entry_size(513)
1536
"""
# round up to next multiple of 512
return 512 + filesize + ((512 - filesize) % 512)
def to_string(obj):
"""Get string representation of an object, special-case for XML Element.
Args:
obj (object): Object to represent as a string.
Returns:
str: string representation
Examples:
::
>>> to_string("Hello")
'Hello'
>>> to_string(27.5)
'27.5'
>>> e = ET.Element('hello', attrib={'key': 'value'})
>>> print(e) # doctest: +ELLIPSIS
<Element ...hello... at ...>
>>> print(to_string(e))
<hello key="value" />
"""
if ET.iselement(obj):
if sys.version_info[0] >= 3:
return ET.tostring(obj, encoding='unicode')
else:
return ET.tostring(obj)
else:
return str(obj)
if __name__ == "__main__": # pragma: no cover
import doctest
doctest.testmod()
|
mit
| -3,087,795,395,802,680,300 | 26.554054 | 78 | 0.59163 | false |
dmccloskey/python_statistics
|
python_statistics/calculate_svd.py
|
1
|
5949
|
from .calculate_dependencies import *
from .calculate_base import calculate_base
class calculate_svd(calculate_base):
def calculate_svd(self):
'''calculate SVD'''
pass;
def extract_UAndVMatrices_2D(self,data_u,data_v,PCs):
'''Extract out the scores and loadings
INPUT:
data_u = listDict of pca/pls scores
data_v = listDict of pca/pls loadings
PCs = [[],[],...] of integers, describing the 2D PC plots
E.G. PCs = [[1,2],[1,3],[2,3]];
OUTPUT:
data_u_O = {'[1,2]':[],'[1,3]':[],'[2,3]':[],...} where each [] is a listDict of the data from PCs e.g. 1,2
data_v_O = {'[1,2]':[],'[1,3]':[],'[2,3]':[],...}
'''
data_u_O,data_v_O = {},{};
for PC_cnt,PC in enumerate(PCs):
#extract out the scores
data_u_O[str(PC)]=[];
for cnt,d in enumerate(data_u[PC[0]][:]):
if d['sample_name_short'] != data_u[PC[1]][cnt]['sample_name_short'] and d['calculated_concentration_units'] != data_u[PC[1]][cnt]['calculated_concentration_units']:
print('data is not in the correct order');
tmp = copy.copy(d);
tmp['score_' + str(PC[0])] = d['u_matrix'];
tmp['axislabel'+str(PC[0])] = 'PC' + str(PC[0]);
tmp['score_' + str(PC[1])] = data_u[PC[1]][cnt]['u_matrix'];
tmp['axislabel'+str(PC[1])] = 'PC' + str(PC[1]);
del tmp['u_matrix'];
del tmp['singular_value_index'];
data_u_O[str(PC)].append(tmp);
#extract out the loadings
data_v_O[str(PC)]=[];
for cnt,d in enumerate(data_v[PC[0]][:]):
if d['component_name'] != data_v[PC[1]][cnt]['component_name'] and d['calculated_concentration_units'] != data_v[PC[1]][cnt]['calculated_concentration_units']:
print('data is not in the correct order');
tmp = copy.copy(d);
tmp['loadings_' + str(PC[0])] = d['v_matrix'];
tmp['axislabel'+str(PC[0])] = 'Loadings' + str(PC[0]);
tmp['loadings_' + str(PC[1])] = data_v[PC[1]][cnt]['v_matrix'];
tmp['axislabel'+str(PC[1])] = 'Loadings' + str(PC[1]);
del tmp['v_matrix'];
del tmp['singular_value_index'];
data_v_O[str(PC)].append(tmp);
return data_u_O,data_v_O;
def extract_UAndVMatrices_2D_byPCAndMethod(self,data_u,data_v,
PCs,methods,
score_column_I = 'u_matrix',
loadings_column_I = 'v_matrix',
method_column_I='svd_method'):
'''Extract out the scores and loadings
INPUT:
data_u = listDict of pca/pls scores
data_v = listDict of pca/pls loadings
PCs = [[],[],...] of integers, describing the 2D PC plots
E.G. PCs = [[1,1],[2,2],[3,3]];
methods = [[],[],...] of strings, describing the 2D plots
E.G. methods = [['svd','robustSvd'],['svd','robustSvd'],[['svd','robustSvd']];
OUTPUT:
data_u_O = {'[1_svd,1_robustSvd]':[],'[2,2]':[],'[3,3]':[],...} where each [] is a listDict of the data from PCs e.g. 1,2
data_v_O = {'[1_svd,1_robustSvd]':[],'[2,2]':[],'[3,3]':[],...}
PCs_O = [[],[],...]
E.G. [[1_svd,1_robustSvd],[2_svd,2_robustSvd],[3_svd,3_robustSvd],..]
'''
PCs_O = [];
data_u_O,data_v_O = {},{};
for PC_cnt,PC in enumerate(PCs):
#extract out the scores
pc0 = str(PC[0])+'_'+methods[PC_cnt][0];
pc1 = str(PC[1])+'_'+methods[PC_cnt][1];
pc_list = [pc0,pc1];
PCs_O.append(pc_list);
data_u_O[str(pc_list)]=[];
for cnt1,d1 in enumerate(data_u[PC[0]][:]):
for cnt2,d2 in enumerate(data_u[PC[1]][:]):
if d1['sample_name_short'] == d2['sample_name_short'] \
and d1['calculated_concentration_units'] == d2['calculated_concentration_units'] \
and d1[method_column_I] == methods[PC_cnt][0] \
and d2[method_column_I] == methods[PC_cnt][1] :
tmp = copy.copy(d2);
tmp['score_' + pc0] = d1[score_column_I];
tmp['axislabel'+pc0] = 'PC' + pc0;
tmp['score_' + pc1] = d2[score_column_I];
tmp['axislabel'+pc1] = 'PC' + pc1;
del tmp['u_matrix'];
del tmp[method_column_I];
del tmp['singular_value_index'];
data_u_O[str(pc_list)].append(tmp);
break;
#extract out the loadings
data_v_O[str(pc_list)]=[];
for cnt1,d1 in enumerate(data_v[PC[0]][:]):
for cnt2,d2 in enumerate(data_v[PC[1]][:]):
if d1['component_name'] == d2['component_name'] \
and d1['calculated_concentration_units'] == d2['calculated_concentration_units'] \
and d1[method_column_I] == methods[PC_cnt][0] \
and d2[method_column_I] == methods[PC_cnt][1] :
tmp = copy.copy(d2);
tmp['loadings_' + pc0] = d1[loadings_column_I];
tmp['axislabel'+pc0] = 'Loadings' + pc0;
tmp['loadings_' + pc1] = d2[loadings_column_I];
tmp['axislabel'+pc1] = 'Loadings' + pc1;
del tmp['v_matrix'];
del tmp[method_column_I];
del tmp['singular_value_index'];
data_v_O[str(pc_list)].append(tmp);
break;
return data_u_O,data_v_O,PCs_O;
|
mit
| -5,974,848,579,057,604,000 | 52.116071 | 181 | 0.457892 | false |
LaboratoireMecaniqueLille/crappy
|
impact/video_extenso_full.py
|
1
|
1735
|
# coding: utf-8
"""
Base file for tests using videoextensometry and a marker following actuator.
"""
from time import ctime
import crappy
out_gain = 1/30 # V/mm
gains = [50, 1/out_gain] # N/V mm/V
timestamp = ctime()[:-5].replace(" ", "_").replace(":", "_")
save_path = "./" + timestamp + "/"
# VideoExtenso and Autodrive blocks
ve = crappy.blocks.Video_extenso(camera='Ximea_cv', show_image=True,
white_spots=False, max_fps=30)
ad = crappy.blocks.AutoDrive(
actuator={'name': 'CM_drive', 'port': '/dev/ttyUSB0'}, direction='X-')
graph_extenso = crappy.blocks.Grapher(('t(s)', 'Exx(%)'), ('t(s)', 'Eyy(%)'))
saver_extenso = crappy.blocks.Saver(save_path+"extenso.csv",
labels=['t(s)', 'Exx(%)', 'Eyy(%)'])
# Linking them
crappy.link(ve, graph_extenso)
crappy.link(ve, saver_extenso)
crappy.link(ve, ad)
# Labjack
lj = crappy.blocks.IOBlock("Labjack_t7", channels=[
{'name': 'AIN0', 'gain': gains[0], 'make_zero':True},
{'name': 'AIN1', 'gain': gains[1], 'make_zero':True},
{'name': 'TDAC0', 'gain': out_gain}],
labels=['t(s)', 'F(N)', 'x(mm)'],
cmd_labels=['cmd'])
# Graph
graph_sensors = crappy.blocks.Grapher(('t(s)', 'F(N)'), ('t(s)', 'x(mm)'))
crappy.link(lj, graph_sensors, modifier=crappy.modifier.Mean(10))
# Generator
g = crappy.blocks.Generator(path=[
{'type': 'cyclic_ramp', 'condition1': 'Exx(%)>20',
'speed1': 20 / 60, 'condition2': 'F(N)<.1', 'speed2': -20 / 60,
'cycles': 5}, ])
saver_sensors = crappy.blocks.Saver(save_path + "sensors.csv",
labels=['t(s)', 'F(N)', 'x(mm)'])
# Linking the generator to all the blocks
crappy.link(ve, g)
crappy.link(lj, g)
crappy.link(g, lj)
crappy.link(lj, saver_sensors)
crappy.start()
|
gpl-2.0
| 8,650,245,869,778,098,000 | 28.913793 | 77 | 0.604611 | false |
spreeker/democracygame
|
external_apps/docutils-snapshot/test/test_parsers/test_rst/test_directives/test_admonitions.py
|
1
|
4131
|
#! /usr/bin/env python
# $Id: test_admonitions.py 4667 2006-07-12 21:40:56Z wiemann $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Tests for admonitions.py directives.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.ParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['admonitions'] = [
["""\
.. Attention:: Directives at large.
.. Note:: This is a note.
.. Tip:: 15% if the
service is good.
.. Hint:: It's bigger than a bread box.
- .. WARNING:: Strong prose may provoke extreme mental exertion.
Reader discretion is strongly advised.
- .. Error:: Does not compute.
.. Caution::
Don't take any wooden nickels.
.. DANGER:: Mad scientist at work!
.. Important::
- Wash behind your ears.
- Clean up your room.
- Call your mother.
- Back up your data.
""",
"""\
<document source="test data">
<attention>
<paragraph>
Directives at large.
<note>
<paragraph>
This is a note.
<tip>
<paragraph>
15% if the
service is good.
<hint>
<paragraph>
It's bigger than a bread box.
<bullet_list bullet="-">
<list_item>
<warning>
<paragraph>
Strong prose may provoke extreme mental exertion.
Reader discretion is strongly advised.
<list_item>
<error>
<paragraph>
Does not compute.
<caution>
<paragraph>
Don't take any wooden nickels.
<danger>
<paragraph>
Mad scientist at work!
<important>
<bullet_list bullet="-">
<list_item>
<paragraph>
Wash behind your ears.
<list_item>
<paragraph>
Clean up your room.
<list_item>
<paragraph>
Call your mother.
<list_item>
<paragraph>
Back up your data.
"""],
["""\
.. note:: One-line notes.
.. note:: One after the other.
.. note:: No blank lines in-between.
""",
"""\
<document source="test data">
<note>
<paragraph>
One-line notes.
<note>
<paragraph>
One after the other.
<note>
<paragraph>
No blank lines in-between.
"""],
["""\
.. note::
""",
"""\
<document source="test data">
<system_message level="3" line="1" source="test data" type="ERROR">
<paragraph>
Content block expected for the "note" directive; none found.
<literal_block xml:space="preserve">
.. note::
"""],
["""\
.. admonition:: Admonition
This is a generic admonition.
""",
"""\
<document source="test data">
<admonition classes="admonition-admonition">
<title>
Admonition
<paragraph>
This is a generic admonition.
"""],
["""\
.. admonition:: And, by the way...
You can make up your own admonition too.
""",
"""\
<document source="test data">
<admonition classes="admonition-and-by-the-way">
<title>
And, by the way...
<paragraph>
You can make up your own admonition too.
"""],
["""\
.. admonition:: Admonition
:class: emergency
Test the "class" override.
""",
"""\
<document source="test data">
<admonition classes="emergency">
<title>
Admonition
<paragraph>
Test the "class" override.
"""],
["""\
.. admonition::
Generic admonitions require a title.
""",
"""\
<document source="test data">
<system_message level="3" line="1" source="test data" type="ERROR">
<paragraph>
Error in "admonition" directive:
1 argument(s) required, 0 supplied.
<literal_block xml:space="preserve">
.. admonition::
Generic admonitions require a title.
"""],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
|
bsd-3-clause
| -7,850,461,171,203,881,000 | 21.697802 | 72 | 0.541031 | false |
bytedance/fedlearner
|
fedlearner/trainer/data/data_block_loader.py
|
1
|
2927
|
# Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import queue
import tensorflow.compat.v1 as tf
from fedlearner.common import fl_logging
class DataBlockLoader(object):
def __init__(self, batch_size, role, bridge, trainer_master):
self._batch_size = batch_size
self._role = role
self._bridge = bridge
self._trainer_master = trainer_master
assert self._trainer_master is not None
self._count = 0
if role == 'follower':
self._block_queue = queue.Queue()
self._bridge.register_data_block_handler(self._data_block_handler)
def _data_block_handler(self, msg):
if self._count > msg.count:
fl_logging.warn('DataBlock: ignore repeated datablock "%s" at %d',
msg.block_id, msg.count)
return True
fl_logging.info('DataBlock: recv "%s" at %d', msg.block_id, msg.count)
assert self._count == msg.count
if not msg.block_id:
block = None
else:
block = self._trainer_master.request_data_block(msg.block_id)
if block is None:
return False
self._count += 1
self._block_queue.put(block)
return True
def get_next_block(self):
if self._role == 'leader':
while True:
block = self._trainer_master.request_data_block(None)
if block is not None:
if not self._bridge.load_data_block(
self._count, block.block_id):
continue
else:
self._bridge.load_data_block(self._count, '')
break
self._count += 1
else:
block = self._block_queue.get()
return block
def make_dataset(self):
def gen():
while True:
block = self.get_next_block()
if not block:
break
yield block.data_path
dataset = tf.data.Dataset.from_generator(gen, tf.string)
dataset = tf.data.TFRecordDataset(dataset)
dataset = dataset.batch(self._batch_size, drop_remainder=True)
dataset = dataset.prefetch(1)
return dataset
def make_batch_iterator(self):
return self.make_dataset().make_one_shot_iterator()
|
apache-2.0
| 6,034,772,646,600,977,000 | 33.845238 | 78 | 0.591049 | false |
CroissanceCommune/autonomie
|
autonomie/views/invoices/lists.py
|
1
|
19734
|
# -*- coding: utf-8 -*-
# * Copyright (C) 2012-2013 Croissance Commune
# * Authors:
# * Arezki Feth <[email protected]>;
# * Miotte Julien <[email protected]>;
# * Pettier Gabriel;
# * TJEBBES Gaston <[email protected]>
#
# This file is part of Autonomie : Progiciel de gestion de CAE.
#
# Autonomie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Autonomie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Autonomie. If not, see <http://www.gnu.org/licenses/>.
#
"""
Company invoice list view
"""
import logging
import datetime
import colander
from deform import (
Form,
ValidationFailure,
)
from sqlalchemy import (
or_,
distinct,
asc,
desc,
)
from sqlalchemy.orm import (
contains_eager,
load_only,
)
from beaker.cache import cache_region
from pyramid.httpexceptions import HTTPFound
from autonomie_celery.tasks.export import export_to_file
from autonomie_celery.models import FileGenerationJob
from autonomie_celery.tasks.utils import check_alive
from autonomie_base.models.base import (
DBSESSION,
)
from autonomie.models.task import (
Task,
Invoice,
CancelInvoice,
Payment,
)
from autonomie.models.customer import Customer
from autonomie.models.company import Company
from autonomie.utils.renderer import set_close_popup_response
from autonomie.utils.widgets import (
PopUp,
ViewLink,
)
from autonomie.utils.pdf import write_pdf
from autonomie.views.task.views import html
from autonomie.forms.tasks.invoice import (
get_list_schema,
pdfexportSchema,
)
from autonomie.views import (
BaseListView,
submit_btn,
)
logger = log = logging.getLogger(__name__)
# Here we do some multiple function stuff to allow caching to work
# Beaker caching is done through signature (dbsession is changing each time, so
# it won't cache if it's an argument of the cached function
def get_taskdates(dbsession):
"""
Return all taskdates
"""
@cache_region("long_term", "taskdates")
def taskdates():
"""
Cached version
"""
return dbsession.query(distinct(Invoice.financial_year))
return taskdates()
def get_years(dbsession):
"""
We consider that all documents should be dated after 2000
"""
inv = get_taskdates(dbsession)
@cache_region("long_term", "taskyears")
def years():
"""
cached version
"""
return [invoice[0] for invoice in inv.all()]
return years()
def get_year_range(year):
"""
Return the first january of the current and the next year
"""
fday = datetime.date(year, 1, 1)
lday = datetime.date(year + 1, 1, 1)
return fday, lday
def filter_all_status(self, query, appstruct):
"""
Filter the invoice by status
"""
status = appstruct.get('status', 'all')
if status != 'all':
logger.info(" + Status filtering : %s" % status)
query = query.filter(Task.status == status)
return query
class InvoiceListTools(object):
title = u"Factures de la CAE"
schema = get_list_schema(is_global=True, excludes=('status',))
sort_columns = dict(
date=Task.date,
internal_number=Task.internal_number,
customer=Customer.name,
company=Company.name,
official_number=Task.official_number,
ht=Task.ht,
ttc=Task.ttc,
tva=Task.tva,
payment=Payment.date,
)
default_sort = "official_number"
default_direction = 'desc'
def query(self):
query = DBSESSION().query(Task)
query = query.with_polymorphic([Invoice, CancelInvoice])
query = query.outerjoin(Invoice.payments)
query = query.outerjoin(Task.customer)
query = query.outerjoin(Task.company)
query = query.options(
contains_eager(Invoice.payments).load_only(
Payment.id, Payment.date, Payment.mode
)
)
query = query.options(
contains_eager(Task.customer).load_only(
Customer.name, Customer.code, Customer.id,
Customer.firstname, Customer.lastname, Customer.civilite,
Customer.type_,
)
)
query = query.options(
contains_eager(Task.company).load_only(
Company.name,
Company.id,
)
)
query = query.options(
load_only(
"_acl",
"name",
"date",
"id",
"ht",
"tva",
"ttc",
"company_id",
"customer_id",
"official_number",
"internal_number",
"status",
Invoice.paid_status,
)
)
return query
def sort_by_official_number(self, query, appstruct):
"""
Sort the query by official number
:param obj query: The query to sort
:param dict appstruct: filtered datas provided by the search form
:returns: The sorted query
"""
sort_direction = self._get_sort_direction(appstruct)
if sort_direction == 'asc':
func = asc
else:
func = desc
return query.order_by(
func(Task.status_date),
func(Task.official_number),
)
def _get_company_id(self, appstruct):
"""
Return the company_id found in the appstruct
Should be overriden if we want a company specific list view
"""
res = appstruct.get('company_id')
logger.debug("Company id : %s" % res)
return res
def filter_company(self, query, appstruct):
company_id = self._get_company_id(appstruct)
if company_id not in (None, colander.null):
query = query.filter(Task.company_id == company_id)
return query
def filter_official_number(self, query, appstruct):
number = appstruct['search']
if number and number != -1:
logger.debug(u" Filtering by official_number : %s" % number)
prefix = self.request.config.get('invoiceprefix', '')
if prefix and number.startswith(prefix):
number = number[len(prefix):]
query = query.filter(Task.official_number.like("%" + number + "%"))
return query
def filter_ttc(self, query, appstruct):
ttc = appstruct.get('ttc', {})
if ttc.get('start') not in (None, colander.null):
log.info(u"Filtering by ttc amount : %s" % ttc)
start = ttc.get('start')
end = ttc.get('end')
if end in (None, colander.null):
query = query.filter(Task.ttc >= start)
else:
query = query.filter(Task.ttc.between(start, end))
return query
def filter_customer(self, query, appstruct):
customer_id = appstruct.get('customer_id')
if customer_id not in (None, colander.null):
logger.debug(u"Customer id : %s" % customer_id)
query = query.filter(Task.customer_id == customer_id)
return query
def filter_date(self, query, appstruct):
logger.debug(u" + Filtering date")
period = appstruct.get('period', {})
if period.get('start') not in (None, colander.null):
start = period.get('start')
end = period.get('end')
if end in (None, colander.null):
end = datetime.date.today()
query = query.filter(Task.date.between(start, end))
logger.debug(u" Between %s and %s" % (start, end))
year = appstruct.get('year', -1)
if year != -1:
query = query.filter(
or_(
Invoice.financial_year == year,
CancelInvoice.financial_year == year,
)
)
logger.debug(u" Year : %s" % year)
return query
def filter_status(self, query, appstruct):
"""
Filter the status a first time (to be overriden)
"""
logger.debug("Filtering status")
query = query.filter(Task.status == 'valid')
return query
def filter_paid_status(self, query, appstruct):
status = appstruct['paid_status']
if status == 'paid':
query = self._filter_paid(query)
elif status == 'notpaid':
query = self._filter_not_paid(query)
return query
def _filter_paid(self, query):
return query.filter(
or_(
Invoice.paid_status == 'resulted',
Task.type_ == 'cancelinvoice',
)
)
def _filter_not_paid(self, query):
return query.filter(
Invoice.paid_status.in_(('waiting', 'paid'))
)
def filter_doctype(self, query, appstruct):
"""
Filter invocies by type (invoice/cancelinvoice)
"""
type_ = appstruct.get('doctype')
if type_ in ('invoice', 'cancelinvoice'):
query = query.filter(Task.type_ == type_)
else:
query = query.filter(Task.type_.in_(('invoice', 'cancelinvoice')))
return query
class GlobalInvoicesListView(InvoiceListTools, BaseListView):
"""
Used as base for company invoices listing
"""
add_template_vars = (u'title', u'pdf_export_btn', 'is_admin', "legends")
is_admin = True
legends = (
('paid-status-resulted', u"Factures payées"),
('paid-status-paid', u"Factures payées partiellement"),
('shadow-sm', u"Factures non payées depuis moins de 45 jours"),
("tolate-True", u"Factures non payées depuis plus de 45 jours"),
)
@property
def pdf_export_btn(self):
"""
return a popup open button for the pdf export form and place the popup
in the request attribute
"""
form = get_invoice_pdf_export_form(self.request)
popup = PopUp("pdfexportform", u'Export massif', form.render())
self.request.popups = {popup.name: popup}
return popup.open_btn()
class CompanyInvoicesListView(GlobalInvoicesListView):
"""
Invoice list for one given company
"""
is_admin = False
schema = get_list_schema(is_global=False, excludes=("company_id",))
add_template_vars = (u'title', u"is_admin", u"legends")
legends = GlobalInvoicesListView.legends + (
('status-draft', u"Factures en brouillon"),
("status-wait", u"Factures en attente de validation"),
("status-invalid", u"Factures invalides"),
)
@property
def with_draft(self):
return True
def _get_company_id(self, appstruct):
return self.request.context.id
@property
def title(self):
return u"Factures de l'entreprise {0}".format(self.request.context.name)
filter_status = filter_all_status
class GlobalInvoicesCsvView(InvoiceListTools, BaseListView):
model = Invoice
file_format = "csv"
filename = "factures_"
def query(self):
query = self.request.dbsession.query(
Task
).with_polymorphic(
[Invoice, CancelInvoice]
)
query = query.options(load_only(Task.id))
return query
def _build_return_value(self, schema, appstruct, query):
"""
Return the streamed file object
"""
all_ids = [elem.id for elem in query]
logger.debug(" + All_ids where collected : {0}".format(all_ids))
service_ok, msg = check_alive()
if not service_ok:
if "popup" in self.request.GET:
set_close_popup_response(self.request, error=msg)
return self.request.response
else:
self.request.session.flash(msg, 'error')
return HTTPFound(self.request.referrer)
logger.debug(" + In the GlobalInvoicesCsvView._build_return_value")
job = FileGenerationJob()
job.set_owner(self.request.user.login.login)
self.request.dbsession.add(job)
self.request.dbsession.flush()
logger.debug(" + The job {job.id} was initialized".format(job=job))
logger.debug(" + Delaying the export_to_file task")
celery_job = export_to_file.delay(
job.id,
'invoices',
all_ids,
self.filename,
self.file_format
)
logger.info(
u"The Celery Task {0} has been delayed, its result "
"sould be retrieved from the FileGenerationJob {1}".format(
celery_job.id, job.id
)
)
return HTTPFound(
self.request.route_path('job', id=job.id, _query={'popup': 1})
)
class GlobalInvoicesXlsView(GlobalInvoicesCsvView):
file_format = "xls"
class GlobalInvoicesOdsView(GlobalInvoicesCsvView):
file_format = "ods"
class CompanyInvoicesCsvView(GlobalInvoicesCsvView):
schema = get_list_schema(is_global=False, excludes=('company_id',))
def _get_company_id(self, appstruct):
return self.request.context.id
filter_status = filter_all_status
class CompanyInvoicesXlsView(GlobalInvoicesXlsView):
schema = get_list_schema(is_global=False, excludes=('company_id',))
def _get_company_id(self, appstruct):
return self.request.context.id
filter_status = filter_all_status
class CompanyInvoicesOdsView(GlobalInvoicesOdsView):
schema = get_list_schema(is_global=False, excludes=('company_id',))
def _get_company_id(self, appstruct):
return self.request.context.id
filter_status = filter_all_status
def get_invoice_pdf_export_form(request):
"""
Return the form used to search for invoices that will be exported
"""
schema = pdfexportSchema.bind(request=request)
action = request.route_path(
"invoices",
_query=dict(action="export_pdf"),
)
query_form = Form(schema, buttons=(submit_btn,), action=action)
return query_form
def query_documents_for_export(from_number, to_number, year):
"""
Query the database to retrieve the documents for the pdf export
"""
# querying the database
query = Task.query().with_polymorphic([Invoice, CancelInvoice])
query = query.filter(Task.official_number >= from_number)
# Default provided in the form schema is -1
if to_number > 0:
query = query.filter(Task.official_number <= to_number)
query = query.filter(
or_(
Invoice.financial_year == year,
CancelInvoice.financial_year == year,
)
)
records = query.order_by(Task.official_number).all()
return records
def invoices_pdf_view(request):
"""
Bulk pdf output : output a large amount of invoices/cancelinvoices
"""
# We retrieve the form
query_form = get_invoice_pdf_export_form(request)
if 'submit' in request.params:
try:
appstruct = query_form.validate(request.params.items())
except ValidationFailure as e:
# Form validation failed, the error contains the form with the error
# messages
query_form = e
appstruct = None
if appstruct is not None:
# The form has been validated, we can query for documents
start_number = appstruct["start"]
end_number = appstruct["end"]
year = appstruct['year']
documents = query_documents_for_export(
start_number,
end_number,
year
)
# We've got some documents to export
if documents:
# Getting the html output
html_string = html(request, documents, bulk=True)
filename = u"factures_{0}_{1}_{2}.pdf".format(
year,
start_number,
end_number,
)
try:
# Placing the pdf datas in the request
write_pdf(request, filename, html_string)
return request.response
except BaseException as e:
import traceback
traceback.print_exc()
request.session.flash(u"Erreur à l'export des factures, \
essayez de limiter le nombre de factures à exporter. Prévenez \
votre administrateur si le problème persiste.", queue="error")
else:
# There were no documents to export, we send a message to the
# end user
request.session.flash(
u"Aucune facture n'a pu être retrouvée",
queue="error"
)
gotolist_btn = ViewLink(
u"Liste des factures",
"admin_invoices",
path="invoices"
)
request.actionmenu.add(gotolist_btn)
return dict(
title=u"Export massif de factures au format PDF",
form=query_form.render(),
)
def add_routes(config):
"""
Add module's related route
"""
# Company invoices route
config.add_route(
'company_invoices',
'/company/{id:\d+}/invoices',
traverse='/companies/{id}',
)
# Global invoices route
config.add_route("invoices", "/invoices")
# invoice export routes
config.add_route(
"invoices_export",
"/invoices.{extension}"
)
config.add_route(
"company_invoices_export",
"/company/{id:\d+}/invoices.{extension}",
traverse='/companies/{id}',
)
def includeme(config):
add_routes(config)
config.add_view(
GlobalInvoicesListView,
route_name="invoices",
renderer="invoices.mako",
permission="admin_invoices"
)
config.add_view(
GlobalInvoicesCsvView,
route_name="invoices_export",
match_param="extension=csv",
permission="admin_invoices"
)
config.add_view(
GlobalInvoicesOdsView,
route_name="invoices_export",
match_param="extension=ods",
permission="admin_invoices"
)
config.add_view(
GlobalInvoicesXlsView,
route_name="invoices_export",
match_param="extension=xls",
permission="admin_invoices"
)
config.add_view(
CompanyInvoicesListView,
route_name='company_invoices',
renderer='invoices.mako',
permission='list_invoices'
)
config.add_view(
CompanyInvoicesCsvView,
route_name="company_invoices_export",
match_param="extension=csv",
permission="list_invoices"
)
config.add_view(
CompanyInvoicesOdsView,
route_name="company_invoices_export",
match_param="extension=ods",
permission="list_invoices"
)
config.add_view(
CompanyInvoicesXlsView,
route_name="company_invoices_export",
match_param="extension=xls",
permission="list_invoices"
)
config.add_view(
invoices_pdf_view,
route_name="invoices",
request_param='action=export_pdf',
renderer="/base/formpage.mako",
permission="list_invoices",
)
|
gpl-3.0
| 749,725,627,219,927,700 | 29.112977 | 80 | 0.587761 | false |
ARM-software/bart
|
tests/test_sched_functions.py
|
2
|
3649
|
# Copyright 2016-2016 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import trappy
import utils_tests
class TestSchedFunctions(utils_tests.SetupDirectory):
def __init__(self, *args, **kwargs):
super(TestSchedFunctions, self).__init__([], *args, **kwargs)
def test_get_pids_for_processes_no_sched_switch(self):
"""get_pids_for_processes() raises an exception if the trace doesn't have a sched_switch event"""
from bart.sched.functions import get_pids_for_process
trace_file = "trace.txt"
raw_trace_file = "trace.raw.txt"
with open(trace_file, "w") as fout:
fout.write("")
with open(raw_trace_file, "w") as fout:
fout.write("")
trace = trappy.FTrace(trace_file)
with self.assertRaises(ValueError):
get_pids_for_process(trace, "foo")
def test_get_pids_for_process_funny_process_names(self):
"""get_pids_for_process() works when a process name is a substring of another"""
from bart.sched.functions import get_pids_for_process
trace_file = "trace.txt"
raw_trace_file = "trace.raw.txt"
in_data = """ <idle>-0 [001] 10826.894644: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=0 next_comm=rt-app next_pid=3268 next_prio=120
wmig-3268 [001] 10826.894778: sched_switch: prev_comm=wmig prev_pid=3268 prev_prio=120 prev_state=1 next_comm=rt-app next_pid=3269 next_prio=120
wmig1-3269 [001] 10826.905152: sched_switch: prev_comm=wmig1 prev_pid=3269 prev_prio=120 prev_state=1 next_comm=wmig next_pid=3268 next_prio=120
wmig-3268 [001] 10826.915384: sched_switch: prev_comm=wmig prev_pid=3268 prev_prio=120 prev_state=1 next_comm=swapper/1 next_pid=0 next_prio=120
<idle>-0 [005] 10826.995169: sched_switch: prev_comm=swapper/5 prev_pid=0 prev_prio=120 prev_state=0 next_comm=wmig1 next_pid=3269 next_prio=120
wmig1-3269 [005] 10827.007064: sched_switch: prev_comm=wmig1 prev_pid=3269 prev_prio=120 prev_state=0 next_comm=wmig next_pid=3268 next_prio=120
wmig-3268 [005] 10827.019061: sched_switch: prev_comm=wmig prev_pid=3268 prev_prio=120 prev_state=0 next_comm=wmig1 next_pid=3269 next_prio=120
wmig1-3269 [005] 10827.031061: sched_switch: prev_comm=wmig1 prev_pid=3269 prev_prio=120 prev_state=0 next_comm=wmig next_pid=3268 next_prio=120
wmig-3268 [005] 10827.050645: sched_switch: prev_comm=wmig prev_pid=3268 prev_prio=120 prev_state=1 next_comm=swapper/5 next_pid=0 next_prio=120
"""
# We create an empty trace.txt to please trappy ...
with open(trace_file, "w") as fout:
fout.write("")
# ... but we only put the sched_switch events in the raw trace
# file because that's where trappy is going to look for
with open(raw_trace_file, "w") as fout:
fout.write(in_data)
trace = trappy.FTrace(trace_file)
self.assertEquals(get_pids_for_process(trace, "wmig"), [3268])
|
apache-2.0
| -2,284,812,691,782,188,000 | 51.884058 | 189 | 0.659085 | false |
jinankjain/zamboni
|
sites/landfill/settings_base.py
|
1
|
5546
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import CACHE_PREFIX, ES_INDEXES, KNOWN_PROXIES, LOGGING
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['sa_pool_key'] = 'master'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'BACKEND': 'memcachepool.cache.UMemcacheCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_ALWAYS_EAGER = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': { 'level': logging.DEBUG },
'z.hera': { 'level': logging.INFO },
'z.redis': { 'level': logging.DEBUG },
'z.pool': { 'level': logging.ERROR },
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt-landfill'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons-dev.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_landfill' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons-dev.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {})},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
}
USE_HEKA_FOR_CEF = True
USE_HEKA_FOR_TASTYPIE = True
ALLOW_SELF_REVIEWS = True
AES_KEYS = private.AES_KEYS
|
bsd-3-clause
| 6,724,756,952,774,644,000 | 28.036649 | 94 | 0.668049 | false |
shub0/algorithm-data-structure
|
python/subsets.py
|
1
|
1312
|
#! /usr/bin/python
'''
Given a set of distinct integers, S, return all possible subsets.
Note:
Elements in a subset must be in non-descending order.
The solution set must not contain duplicate subsets.
For example,
If S = [1,2,3], a solution is:
[
[3], [1], [2], [1,2,3], [1,3], [2,3], [1,2], []
]
Extend:
Assume there are duplicates in the collection
'''
class Solution:
# @param S, a list of integer
# @return a list of lists of integer
def subsets(self, S):
size = len(S)
if size == 0:
return []
sorted_s = sorted(S)
result_list = [[]]
for index in range(size):
result_list.extend( [ prev_list+[sorted_s[index]] for prev_list in result_list ])
return result_list
# @param num, a list of integer
# @return a list of lists of integer
def subsetsWithDup(self, S):
size = len(S)
if size == 0:
return []
result_list = [[]]
sorted_s = sorted(S)
start_index = 0
for index in range(size):
if index == 0 or sorted_s[index] != sorted_s[index - 1]:
start_index = len(result_list)
result_list.extend([ prev_list+[sorted_s[index]] for prev_list in result_list[-start_index:] ])
return result_list
if __name__ == '__main__':
solution = Solution()
print solution.subsets([1,2,3])
print solution.subsetsWithDup([5,5,5,5,5,5])
|
bsd-3-clause
| 3,785,617,152,183,180,000 | 23.296296 | 98 | 0.628049 | false |
speedlight/rblmonitor
|
src/rbls/views.py
|
1
|
1389
|
from django.shortcuts import render
from django.template import loader
from django.views import generic
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, redirect
from rbls.forms import AddrForm
from rbls.models import Rbllist
from rbls.bin.blcheck import _ipstatus
rbls = Rbllist.objects.values_list('name', flat=True)
bls_url = Rbllist.objects.values_list('url', flat=True)
class RBLView(generic.TemplateView):
template_name = 'rbl_list.html'
def get(self, request):
addrform = AddrForm()
data = { 'rbls': rbls, 'form': addrform }
return render(request, self.template_name, data)
class RBLCheck(generic.TemplateView):
template_name = 'rbl_check.html'
def get(self, request):
check = []
if request.method == 'GET':
addrform = AddrForm(request.GET)
if addrform.is_valid():
address = addrform.cleaned_data['address']
for bl in bls_url:
check.append(_ipstatus(address, bl))
data = {
'bls': bls_url,
'address': address,
'check': check,
}
return render(request, self.template_name, data)
else:
addrform = AddrForm()
return HttpResponseRedirect('/rbls/list')
return HttpResponseRedirect('/rbls/list')
|
mit
| 7,949,800,061,574,404,000 | 29.195652 | 60 | 0.62635 | false |
gpodder/mygpo
|
mygpo/suggestions/views.py
|
1
|
1485
|
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.contrib.sites.requests import RequestSite
from django.views.decorators.vary import vary_on_cookie
from django.views.decorators.cache import never_cache, cache_control
from mygpo.podcasts.views.podcast import slug_decorator, id_decorator
from mygpo.suggestions.models import PodcastSuggestion
from mygpo.podcasts.models import Podcast
import logging
logger = logging.getLogger(__name__)
@never_cache
@login_required
def blacklist(request, blacklisted_podcast):
user = request.user
logger.info(
'Removing suggestion of "{podcast}" for "{user}"'.format(
podcast=blacklisted_podcast, user=user
)
)
suggestion = PodcastSuggestion.objects.filter(
suggested_to=user, podcast=blacklisted_podcast
).update(deleted=True)
return HttpResponseRedirect(reverse("suggestions"))
@vary_on_cookie
@cache_control(private=True)
@login_required
def suggestions(request):
user = request.user
suggestions = Podcast.objects.filter(
podcastsuggestion__suggested_to=user, podcastsuggestion__deleted=False
)
current_site = RequestSite(request)
return render(
request, "suggestions.html", {"entries": suggestions, "url": current_site}
)
blacklist_slug = slug_decorator(blacklist)
blacklist_id = id_decorator(blacklist)
|
agpl-3.0
| -6,633,457,275,113,974,000 | 28.7 | 82 | 0.753535 | false |
UPDDI/mps-database-server
|
assays/migrations/0009.py
|
1
|
1554
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('microdevices', '0003'),
('assays', '0008'),
]
operations = [
migrations.AlterField(
model_name='assaychipsetup',
name='device',
field=models.ForeignKey(verbose_name=b'Organ Model Name', blank=True, to='microdevices.OrganModel', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.RenameField(
model_name='assaychipsetup',
old_name='device',
new_name='organ_model'
),
migrations.AddField(
model_name='assaychipsetup',
name='organ_model_protocol',
field=models.ForeignKey(verbose_name=b'Organ Model Protocol', blank=True, to='microdevices.OrganModelProtocol', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='assaychipsetup',
name='device',
field=models.ForeignKey(verbose_name=b'Device', to='microdevices.Microdevice', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterField(
model_name='assayrun',
name='file',
field=models.FileField(help_text=b'Protocol File for Study', upload_to=b'study_protocol', null=True, verbose_name=b'Protocol File', blank=True),
preserve_default=True,
),
]
|
mit
| 549,551,874,547,023,800 | 34.318182 | 161 | 0.593308 | false |
syhpoon/xyzcmd
|
libxyz/ui/tests.py
|
1
|
4547
|
#-*- coding: utf8 -*
#
# Max E. Kuznecov ~syhpoon <[email protected]> 2008-2009
#
# This file is part of XYZCommander.
# XYZCommander is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# XYZCommander is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
# You should have received a copy of the GNU Lesser Public License
# along with XYZCommander. If not, see <http://www.gnu.org/licenses/>.
# UI tests
from nose.tools import raises
from libxyz.ui import colors
from libxyz.exceptions import XYZValueError
import __builtin__
import locale
def setup():
__builtin__._ = lambda x: x
__builtin__.xyzenc = locale.getpreferredencoding()
class TestColors(object):
@raises(XYZValueError)
def testForegroundColor1(self):
"""
Raise error on wrong color
"""
colors.Foreground("AAA")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def testForegroundColor2(self):
"""
Check correct color
"""
assert colors.Foreground("BLACK")
assert colors.Foreground("DARK_BLUE")
assert colors.Foreground("LIGHT_RED")
assert colors.Foreground("DEFAULT")
assert colors.Foreground("BLACK")
assert colors.Foreground("BROWN")
assert colors.Foreground("YELLOW")
assert colors.Foreground("WHITE")
assert colors.Foreground("DEFAULT")
assert colors.Foreground("DARK_BLUE")
assert colors.Foreground("DARK_MAGENTA")
assert colors.Foreground("DARK_CYAN")
assert colors.Foreground("DARK_RED")
assert colors.Foreground("DARK_GREEN")
assert colors.Foreground("DARK_GRAY")
assert colors.Foreground("LIGHT_GRAY")
assert colors.Foreground("LIGHT_RED")
assert colors.Foreground("LIGHT_GREEN")
assert colors.Foreground("LIGHT_BLUE")
assert colors.Foreground("LIGHT_MAGENTA")
assert colors.Foreground("LIGHT_CYAN")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@raises(XYZValueError)
def testBackgroundColor1(self):
"""
Raise error on wrong color
"""
colors.Background("AAA")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def testBackgroundColor2(self):
"""
Check correct color
"""
assert colors.Background("BLACK")
assert colors.Background("BROWN")
assert colors.Background("DEFAULT")
assert colors.Background("DARK_RED")
assert colors.Background("DARK_GREEN")
assert colors.Background("DARK_BLUE")
assert colors.Background("DARK_MAGENTA")
assert colors.Background("DARK_CYAN")
assert colors.Background("LIGHT_GRAY")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@raises(XYZValueError)
def testAttributeColor1(self):
"""
Raise error on wrong attribute
"""
colors.Attribute("AAA")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def testAttributeColor2(self):
"""
Check correct attribute
"""
assert colors.Attribute("BOLD")
assert colors.Attribute("UNDERLINE")
assert colors.Attribute("BLINK")
assert colors.Attribute("STANDOUT")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@raises(XYZValueError)
def testHighForegroundColorIncorrect(self):
colors.ForegroundHigh("WTF?")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@raises(XYZValueError)
def testHighBackgroundColorIncorrect(self):
colors.ForegroundHigh("WTF?")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _testHighColorCorrect(self, cl):
assert cl("#009")
assert cl("#fcc")
assert cl("g40")
assert cl("g#cc")
assert cl("h8")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def testForegroundHighColorCorrect(self):
self._testHighColorCorrect(colors.ForegroundHigh)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def testBackgroundHighColorCorrect(self):
self._testHighColorCorrect(colors.BackgroundHigh)
#++++++++++++++++++++++++++++++++++++++++++++++++
class TestPalette(object):
pass
|
gpl-3.0
| 2,684,836,863,749,151,000 | 27.961783 | 70 | 0.571366 | false |
ngageoint/six-library
|
six/modules/python/six/unittests/testWrapper.py
|
1
|
7359
|
import unittest
import numpy
from coda.math_poly import Poly1D, Poly2D
from pysix.wrapper import wrap, unwrap
from pysix.six_sicd import *
from pysix.six_base import *
class TestWrapping(unittest.TestCase):
def testCanWrapPrimitives(self):
originalObject = HalfPowerBeamwidths()
wrappedObject = wrap(originalObject)
self.assertEqual(wrappedObject.dcx, originalObject.dcx)
self.assertEqual(wrappedObject.dcy, originalObject.dcy)
def testWrappingEmptyPoly1D(self):
originalObject = ElectricalBoresight()
wrappedObject = wrap(originalObject)
self.assertIsInstance(wrappedObject.dcxPoly, type(numpy.zeros(0)))
self.assertEqual(len(wrappedObject.dcxPoly), 0)
def testWrappingFilledPoly1D(self):
originalObject = ElectricalBoresight()
originalObject.dcxPoly = Poly1D(1)
originalObject.dcxPoly[0] = 4
wrappedObject = wrap(originalObject)
self.assertEqual(len(wrappedObject.dcxPoly), 2)
self.assertEqual(wrappedObject.dcxPoly[0], 4)
def testWrappingEmptyPoly2D(self):
originalObject = GainAndPhasePolys()
wrappedObject = wrap(originalObject)
self.assertIsInstance(wrappedObject.gainPoly, type(numpy.zeros(0)))
self.assertEqual(len(wrappedObject.gainPoly), 0)
def testCanWrapEmptySmartPointer(self):
originalObject = AntennaParameters()
wrappedObject = wrap(originalObject)
self.assertEqual(wrappedObject.electricalBoresight, None)
def testWrappingNestedPoly(self):
originalObject = AntennaParameters()
originalObject.electricalBoresight = (
makeScopedCopyableElectricalBoresight())
originalObject.electricalBoresight.dcxPoly = Poly1D(1)
originalObject.electricalBoresight.dcxPoly[0] = 5
wrappedObject = wrap(originalObject)
self.assertEqual(wrappedObject.electricalBoresight.dcxPoly[0], 5)
self.assertIsInstance(wrappedObject.electricalBoresight.dcxPoly,
type(numpy.zeros(0)))
def testChangeVectorToList(self):
originalObject = CollectionInformation()
originalObject.countryCodes.push_back('element')
wrappedObject = wrap(originalObject)
self.assertIsInstance(wrappedObject.countryCodes, list)
self.assertEqual(len(wrappedObject.countryCodes), 1)
self.assertEqual(wrappedObject.countryCodes[0], 'element')
def testWrapVectorOfSmartPointers(self):
originalObject = GeoData()
originalObject.geoInfos.resize(3)
originalObject.geoInfos[0] = makeScopedCopyableGeoInfo()
originalObject.geoInfos[0].name = "first name"
originalObject.geoInfos[2] = makeScopedCopyableGeoInfo()
originalObject.geoInfos[2].name = "third name"
wrappedObject = wrap(originalObject)
self.assertEqual(len(wrappedObject.geoInfos), 3)
self.assertEqual(wrappedObject.geoInfos[0].name, "first name")
self.assertEqual(wrappedObject.geoInfos[1], None)
self.assertEqual(wrappedObject.geoInfos[2].name, "third name")
class TestUnwrapping(unittest.TestCase):
def testCanUnwrapPrimitives(self):
originalObject = HalfPowerBeamwidths()
wrappedObject = wrap(originalObject)
wrappedObject.dcx = 5.0
wrappedObject.dcy = 3.4
unwrapped = unwrap(wrappedObject)
self.assertEqual(unwrapped.dcx, 5)
self.assertEqual(unwrapped.dcy, 3.4)
def testUnwrappingPoly1D(self):
originalObject = ElectricalBoresight()
originalObject.dcxPoly = Poly1D(1)
originalObject.dcxPoly[0] = 3
wrappedObject = wrap(originalObject)
wrappedObject.dcxPoly = numpy.zeros(0)
wrappedObject.dcyPoly = numpy.ones(1)
unwrapped = unwrap(wrappedObject)
self.assertTrue(unwrapped.dcxPoly.empty())
self.assertEqual(unwrapped.dcyPoly[0], 1)
self.assertIsInstance(unwrapped.dcxPoly, Poly1D)
def testUnwrappingPoly2D(self):
originalObject = GainAndPhasePolys()
originalObject.gainPoly = Poly2D(1, 1)
wrappedObject = wrap(originalObject)
wrappedObject.gainPoly = numpy.zeros(0)
wrappedObject.phasePoly = numpy.ones((1, 1))
unwrapped = unwrap(wrappedObject)
self.assertIsInstance(unwrapped.gainPoly, Poly2D)
self.assertTrue(unwrapped.gainPoly.empty())
self.assertIsInstance(unwrapped.phasePoly, Poly2D)
self.assertEqual(unwrapped.phasePoly[(0,0)], 1)
def testCanUnwrapSmartPointer(self):
originalObject = AntennaParameters()
originalObject.electricalBoresight = (
makeScopedCopyableElectricalBoresight())
wrappedObject = wrap(originalObject)
wrappedObject.electricalBoresight = None
wrappedObject.halfPowerBeamwidths = (
makeScopedCopyableHalfPowerBeamwidths())
wrappedObject.halfPowerBeamwidths.dcx = 3
unwrapped = unwrap(wrappedObject)
self.assertIsNone(unwrapped.electricalBoresight.get())
self.assertIsNotNone(unwrapped.halfPowerBeamwidths.get())
self.assertEqual(unwrapped.halfPowerBeamwidths.dcx, 3)
def testUnwrapList(self):
originalObject = CollectionInformation()
originalObject.countryCodes.push_back('element')
wrappedObject = wrap(originalObject)
wrappedObject.countryCodes.append('secondElement')
unwrapped = unwrap(wrappedObject)
self.assertIn('Vector', type(unwrapped.countryCodes).__name__)
self.assertEqual(unwrapped.countryCodes[1], 'secondElement')
def testUnwrapVectorOfSmartPointers(self):
originalObject = GeoData()
originalObject.geoInfos.resize(3)
originalObject.geoInfos[0] = makeScopedCopyableGeoInfo()
originalObject.geoInfos[0].name = "first name"
originalObject.geoInfos[2] = makeScopedCopyableGeoInfo()
originalObject.geoInfos[2].name = "third name"
wrappedObject = wrap(originalObject)
wrappedObject.geoInfos[2] = None
wrappedObject.geoInfos[1] = makeScopedCopyableGeoInfo()
wrappedObject.geoInfos[1].name = "second name"
unwrapped = unwrap(wrappedObject)
self.assertEqual(unwrapped.geoInfos.size(), 3)
self.assertIsNotNone(unwrapped.geoInfos[1].get())
self.assertEqual(unwrapped.geoInfos[1].name, "second name")
self.assertIsNone(unwrapped.geoInfos[2].get())
class TestMethods(unittest.TestCase):
def testMethodsUseUpdatedValues(self):
originalObject = ComplexData()
originalObject.imageData.numRows = 3
wrappedObject = wrap(originalObject)
wrappedObject.imageData.numRows = 5
self.assertEqual(wrappedObject.getNumRows(), 5)
def testMethodsCanChangeWrapper(self):
originalObject = ComplexData()
wrappedObject = wrap(originalObject)
wrappedObject.setNumRows(5)
self.assertEqual(wrappedObject.getNumRows(), 5)
def testMethodsDeep(self):
originalObject = ComplexData()
wrappedObject = wrap(ComplexData())
wrappedObject.fillDefaultFields()
originalObject.fillDefaultFields()
unwrapped = unwrap(wrappedObject)
self.assertEqual(originalObject, unwrapped)
if __name__ == '__main__':
unittest.main()
|
lgpl-3.0
| -7,804,782,137,442,265,000 | 36.35533 | 75 | 0.699144 | false |
vlarson/class-scripts
|
mc_integration/simple_mc_2d.py
|
1
|
15482
|
# -*- coding: utf-8 -*-
""" A function that performs simple Monte Carlo integration """
import pdb
# pdb.set_trace()
def autoconversionRate(TwoDSamplePoint,alpha,beta):
"""Return a quantity proportional to the Khairoutdinov-Kogan autoconversion rate."""
chi = TwoDSamplePoint[0]
Nc = TwoDSamplePoint[1]
if chi < 0:
fncValue = 0
else:
fncValue = chi**alpha * Nc**beta
# pdb.set_trace()
return fncValue
def evaporationRate(TwoDSamplePoint,alpha,beta):
"""A simple function that mimics an evaporation formula."""
chi = TwoDSamplePoint[0]
Nc = TwoDSamplePoint[1]
if chi > 0:
fncValue = 0
else:
fncValue = abs(chi)**alpha * Nc**beta
# pdb.set_trace()
return fncValue
def calcAutoconversionIntegral(muChi,sigmaChi,muNcn,sigmaNcn,rhoChiNcn,alpha,beta):
"""Calculate the Khairoutdinov-Kogan autoconversion rate,
upscaled over a single normal-lognormal PDF."""
from scipy.special import gamma, pbdv
from math import sqrt, exp, pi
sC = muChi/sigmaChi + rhoChiNcn*sigmaNcn*beta
# sC = muChi/sigmaChi - rhoChiNcn*sigmaNcn*beta
(parabCylFnc, parabCylDer) = pbdv(-alpha-1,-sC)
# (parabCylFnc, parabCylDer) = pbdv(-alpha-1,sC)
analyticIntegral = (1/sqrt(2*pi))*(sigmaChi**alpha) \
*exp(muNcn*beta + 0.5*(sigmaNcn*beta)**2 - 0.25*sC**2) \
*gamma(alpha+1)*parabCylFnc
# pdb.set_trace()
return analyticIntegral
def drawNormalLognormalPoints(numSamples,muN,sigmaN,muLNn,sigmaLNn,rhon):
"""Return sample points from a non-standard normal-lognormal PDF."""
from mc_utilities import drawStdNormalPoints
from numpy import zeros, exp, dot, copy
from numpy.linalg import cholesky
stdNormalPoints = zeros((numSamples,2))
stdNormalPoints[:,0] = drawStdNormalPoints(numSamples)
stdNormalPoints[:,1] = drawStdNormalPoints(numSamples)
covarMatn = [ [sigmaN**2, rhon*sigmaN*sigmaLNn],
[rhon*sigmaN*sigmaLNn, sigmaLNn**2]
]
LCholesky = cholesky(covarMatn)
# normalPoints = dot(stdNormalPoints, LCholesky) + [muN, muLNn]
normalPoints = dot(stdNormalPoints, LCholesky.T) + [muN, muLNn]
# pdb.set_trace()
normalLognormalPoints = copy(normalPoints)
normalLognormalPoints[:,1] = exp(normalLognormalPoints[:,1])
return normalLognormalPoints
def calcNormalLognormalPDFValues(samplePoints,muN,sigmaN,muLNn,sigmaLNn,rhon):
"""Given a sample, return values of a normal-lognormal PDF.
Inputs:
samplePoints = a 2D array of sample points. Column 0 contains normally
distributed points. Column 1 contains lognormally
distributed points.
muN = mean of normal variate
sigmaN = standard deviation of normal variate
muLNn = mean of lognormal variate, transformed to normal space
sigmaLNn = standard deviation of lognormal variate, transformed to normal space
rhon = correlation between the 2 variates, transformed to normal space
"""
from numpy import exp, multiply, sqrt, log, pi
xN = samplePoints[:,0] # Column of normally distributed sample points
xLN = samplePoints[:,1] # Column of lognormally distributed sample points,
# left in lognormal space
prefactor = 1.0 / ( 2.0 * pi * sigmaN * sigmaLNn \
* sqrt( 1.0 - rhon**2 ) * xLN \
)
exponent = - ( 1.0 / ( 2.0 * ( 1.0 - rhon**2 ) ) ) \
* ( ( 1.0 / sigmaN**2 ) * ( xN - muN )**2 \
- ( 2.0 * rhon / ( sigmaN * sigmaLNn ) ) \
* ( xN - muN ) * ( log( xLN ) - muLNn ) \
+ ( 1.0 / sigmaLNn**2 ) * ( log( xLN ) - muLNn )**2 \
)
PDFValues = multiply( prefactor, exp( exponent ) )
return PDFValues
def calcWeightsArrayImp(samplePointsQ,alphaDefQ,
muN,sigmaN,muLNn,sigmaLNn,rhon,
muNQ,sigmaNQ,muLNnQ,sigmaLNnQ):
"""Given a sample, return importance weights P(x)/q(x).
Inputs:
samplePointsQ = a 2D array of sample points of size numSamples.
Column 0 contains normally distributed points.
Column 1 contains lognormally distributed points.
First numSamplesDefP points in array come from P(x).
alphaDefQ = Fraction of samples drawn from q(x) rather than P(x)
numSamplesDefP = Number of samples drawn from P(x) rather than q(x)
muN = mean of normal variate
sigmaN = standard deviation of normal variate
muLNn = mean of lognormal variate, transformed to normal space
sigmaLNn = standard deviation of lognormal variate, transformed to normal space
rhon = correlation between the 2 variates, transformed to normal space
muNDeltaImp = importance muN - muN
sigmaNDeltaImp = importance sigmaN - sigmaN
muLNnDeltaImp = importance muLNn - muLNn
sigmaLNnDeltaImp = importance sigmaLNn - sigmaLNn """
from numpy import divide
POfX = calcNormalLognormalPDFValues(samplePointsQ,
muN,sigmaN,muLNn,sigmaLNn,
rhon)
qOfX = calcNormalLognormalPDFValues(samplePointsQ,
muNQ,sigmaNQ,muLNnQ,sigmaLNnQ,
rhon)
qOfXAlpha = alphaDefQ * qOfX + (1-alphaDefQ) * POfX
weightsArrayImp = divide( POfX, qOfXAlpha )
#pdb.set_trace()
return weightsArrayImp
def computeFracRmseN(numSamples):
"""Return the fractional root-mean-square error
in a Monte-Carlo integration of Khairoutdinov-Kogan autoconversion.
As we go, optionally produce plots."""
from numpy import zeros, arange, copy, cov, corrcoef, any, nan, \
clip, finfo, amax, multiply, mean, divide, power, \
floor, concatenate
from mc_utilities import computeRmse, calcFncValues, integrateFncValues
from math import isnan
import matplotlib.pyplot as plt
import sys
# print("In computeRmseN")
fncDim = 2 # Dimension of uni- or multi-variate integrand function
muChi = 0
sigmaChi = 1
muNcn = 0
sigmaNcn = 0.5
rhoChiNcn = 0.5
alpha = 2.47 #2.47
beta = -1.79 #-1.79
# Control variate parameters
alphaDelta = -0.3 # Increment to alpha for control variates function, h
betaDelta = -0.3 # Increment to beta for control variates function, h
# Importance sampling parameters: Importance values - Basic MC values
muChiDeltaImp = 1.8 * sigmaChi # 1.4 * sigmaChi
sigmaChiDeltaImp = -0.00 * sigmaChi
muNcnDeltaImp = -1.0 * sigmaNcn
sigmaNcnDeltaImp = -0.00 * sigmaNcn
# Defensive sampling parameter
alphaDefQ = 0.5 # Fraction of points drawn from q(x) rather than P(x)
numExperiments = 100#1000
createCVScatterplots = False
mcIntegral = zeros(numExperiments)
mcIntegralImp = zeros(numExperiments)
mcIntegralCV = zeros(numExperiments)
analyticIntegral = calcAutoconversionIntegral( muChi,sigmaChi,
muNcn,sigmaNcn,
rhoChiNcn,
alpha,beta
)
#print "Analytic calculation of true integral = %s" % analyticIntegral
analyticIntegralCV = calcAutoconversionIntegral( muChi,sigmaChi,
muNcn,sigmaNcn,
rhoChiNcn,
alpha+alphaDelta,beta+betaDelta
)
#print "Analytic calculation of CV integral = %s" % analyticIntegralCV
muChiQ = muChi + muChiDeltaImp
sigmaChiQ = sigmaChi + sigmaChiDeltaImp
muNcnQ = muNcn + muNcnDeltaImp
sigmaNcnQ = sigmaNcn+sigmaNcnDeltaImp
# pdb.set_trace()
for idx in arange(numExperiments):
#pdb.set_trace()
samplePoints = drawNormalLognormalPoints( numSamples,
muChi,sigmaChi,
muNcn,sigmaNcn,
rhoChiNcn)
# pdb.set_trace()
fncValuesArray = calcFncValues(numSamples,fncDim,samplePoints,
autoconversionRate,alpha,beta)
# print"Function values = %s" % fncValuesArray
mcIntegral[idx] = integrateFncValues(fncValuesArray,numSamples)
#print "Monte Carlo estimate = %s" % mcIntegral[idx]
#########################################
#
# Calculate integral using control variates
#
############################################
fncValuesArrayCV = calcFncValues(numSamples,fncDim,samplePoints,
autoconversionRate,alpha+alphaDelta,beta+betaDelta)
if any(fncValuesArrayCV==nan):
pdb.set_trace()
#pdb.set_trace()
# Compute optimal beta (pre-factor for control variate)
covCV = cov(fncValuesArray,fncValuesArrayCV)
#print "covCV = %s" % covCV
# Optimal beta
betaOpt = covCV[0,1]/amax([ covCV[1,1] , finfo(float).eps ])
#betaOpt = clip(betaOpt, 0.0, 1.0)
#print "betaOpt = %s" % betaOpt
corrCV = corrcoef(fncValuesArray,fncValuesArrayCV)
# pdb.set_trace()
mcIntegralCV[idx] = integrateFncValues(fncValuesArray-betaOpt*fncValuesArrayCV,numSamples) \
+ betaOpt*analyticIntegralCV
#print "CV Monte Carlo estimate = %s" % mcIntegralCV[idx]
#pdb.set_trace()
if isnan(mcIntegralCV[idx]):
pdb.set_trace()
#########################################
#
# Calculate integral using importance sampling (+ control variate)
#
############################################
# Number of samples drawn from q(x) ( and not P(x) ) in defensive importance sampling
numSamplesDefQ = floor( alphaDefQ * numSamples )
# Number of samples drawn from q(x) ( and not P(x) ) in defensive importance sampling
numSamplesDefP = numSamples-numSamplesDefQ
# Draw numSamplesDefQ samples from q(x), without including defensive points from P(x)
samplePointsQOnly = drawNormalLognormalPoints( numSamplesDefQ,
muChiQ,sigmaChiQ,muNcnQ,sigmaNcnQ,
rhoChiNcn)
# Concatenate sample points drawn from q(x) and P(x)
# P(x) points come first
samplePointsQ = concatenate( ( samplePoints[0:numSamplesDefP,:], samplePointsQOnly ),
axis=0 ) # Add rows to the bottom of the 2-column array
#pdb.set_trace()
# Assertion check:
if ( samplePointsQ.shape != samplePoints.shape ):
print "ERROR: Defensive importance sampling generates the wrong number of sample points!!!!"
sys.exit(1)
fncValuesArrayQ = calcFncValues(numSamples,fncDim,samplePointsQ,
autoconversionRate,alpha,beta)
fncValuesArrayCVQ = calcFncValues(numSamples,fncDim,samplePointsQ,
autoconversionRate,alpha+alphaDelta,beta+betaDelta)
weightsArrayImp = calcWeightsArrayImp(samplePointsQ,alphaDefQ,
muChi,sigmaChi,muNcn,sigmaNcn,rhoChiNcn,
muChiQ,sigmaChiQ,muNcnQ,sigmaNcnQ)
# Effective sample size
neOnN = divide( (mean(weightsArrayImp))**2,
mean(power(weightsArrayImp,2))
)
#print "Effective sample size = neOnN = %s" % neOnN
betaCVQ = 0.0
#betaCVQ = betaOpt # betaOpt is the optimal beta for non-importance sampling
integrandArrayImp = multiply( fncValuesArrayQ-betaCVQ*fncValuesArrayCVQ,
weightsArrayImp )
mcIntegralImp[idx] = integrateFncValues(integrandArrayImp,numSamples) \
+ betaCVQ*analyticIntegralCV
#pdb.set_trace()
fracRmse = computeRmse(analyticIntegral,mcIntegral)/analyticIntegral
print "Fractional RMSE of Monte Carlo estimate = %s" % fracRmse
# pdb.set_trace()
fracRmseImp = computeRmse(analyticIntegral,mcIntegralImp)/analyticIntegral
print "Fractional RMSE of Monte Carlo estimate = %s" % fracRmse
fracRmseCV = computeRmse(analyticIntegral,mcIntegralCV)/analyticIntegral
print "Fractional RMSE of CV Monte Carlo estimate = %s" % fracRmseCV
# if isnan(fracRmseCV):
# pdb.set_trace
if ( createCVScatterplots == True ):
plt.scatter(fncValuesArray,fncValuesArrayCV)
plt.plot([min(fncValuesArray), max(fncValuesArray)],
[min(fncValuesArray), max(fncValuesArray)])
plt.grid()
plt.xlabel('Original function values')
plt.ylabel('Control variate function values')
plt.show()
# pdb.set_trace()
return (fracRmse, fracRmseImp, fracRmseCV, corrCV[0,1])
def main():
from numpy import zeros, arange, sqrt, divide, abs
import matplotlib.pyplot as plt
numNValues = 10#20#10 # Number of trials with different sample size
fracRmseNValues = zeros(numNValues)
fracRmseNValuesImp = zeros(numNValues)
fracRmseNValuesCV = zeros(numNValues)
corrCVNValues = zeros(numNValues)
numSamplesN = zeros(numNValues)
for idx in arange(numNValues):
numSamplesN[idx] = 2**(idx+2)
print "numSamplesN = %s" % numSamplesN[idx]
fracRmseNValues[idx], fracRmseNValuesImp[idx], fracRmseNValuesCV[idx], \
corrCVNValues[idx] = \
computeFracRmseN(numSamplesN[idx])
theoryError = 10.0/sqrt(numSamplesN)
# pdb.set_trace()
plt.ion() # Use interactive mode so that program continues when plot appears
plt.clf()
# plt.subplot(221)
plt.loglog(numSamplesN, fracRmseNValues, label='Fractional MC Error')
plt.loglog(numSamplesN, fracRmseNValuesImp, label='Fractional Imp MC Error')
plt.loglog(numSamplesN, fracRmseNValuesCV, label='Fractional CV MC Error')
plt.loglog(numSamplesN, theoryError, label='Theory (1/sqrt(N))')
plt.legend()
plt.xlabel('Number of sample points')
plt.ylabel('Root-mean-square error')
plt.figure()
plt.clf()
plt.semilogx( numSamplesN, divide( fracRmseNValuesCV, fracRmseNValues ), label="Sample CV Err" )
plt.semilogx( numSamplesN, sqrt( abs( 1 - corrCVNValues**2 ) ), label="sqrt(|1-rho**2|)" )
plt.xlabel('Number of sample points')
plt.ylabel('Data and theoretical estimate [-]')
plt.title('Control variate RMSE normalized by MC RMSE')
plt.legend()
plt.show()
# Standard boilerplate to call the main() function to begin
# the program.
if __name__ == '__main__':
main()
|
gpl-2.0
| -230,107,617,624,090,880 | 36.489104 | 130 | 0.591719 | false |
kangwonlee/ECA
|
lab_07_linear_algebra/linear_algebra.py
|
1
|
2220
|
# -*- coding: utf8 -*-
# 위 주석은 이 .py 파일 안에 한글이 사용되었다는 점을 표시하는 것임
def dot(a, b):
"""
크기가 같은 두 벡터 a, b의 내적 dot product
"""
# 벡터 a 의 크기.
# 벡터 b 의 크기는 같을 것이라고 가정한다
# (어떤 경우 오류가 발생할 수 있겠는가?)
n = len(a)
result = 0.0
for i in xrange(n):
result += a[i] * b[i]
return result
def multiply_matrix_vector(A, x):
n_row = len(A)
n_column = len(A[0])
result = [0.0] * n_row
for i in xrange(n_row):
result[i] = dot(A[i], x)
return result
def multiply_matrix_matrix(A, B):
n_row = len(A)
n_column = len(B[0])
n_dummy = len(A[0])
n_dummy2 = len(B)
# 행렬 크기 확인
if n_dummy != n_dummy2:
print "Incorrect Matrix Size"
return None
# 행렬을 저장할 공간을 지정
result = []
for i_row in xrange(n_row):
# 각 행을 저장할 공간을 지정
result.append([0.0] * n_column)
# 행 반복문
for i in xrange(n_row):
# 열 반복문
for j in xrange(n_column):
result[i][j] = 0.0
# dummy index
for k in xrange(n_dummy):
result[i][j] += A[i][k] * B[k][j]
return result
def main():
a_vector = [1.0, 0.0]
b_vector = [3.0, 4.0]
a_dot_b = dot(a_vector, b_vector)
print "a =", a_vector
print "b =", b_vector
print "a dot b =", a_dot_b
A_matrix = [[0.0, 1.0],
[1.0, 0.0]]
x_vector = [3.0, 4.0]
A_x = multiply_matrix_vector(A_matrix, x_vector)
print "A =", A_matrix
print "x =", x_vector
print "A*x =", A_x
A_matrix2 = [[0.0, 1.0],
[1.0, 0.0]]
x_vector2T = [[3.0, 4.0]]
x_vector2 = zip(*x_vector2T)
A_x2 = multiply_matrix_matrix(A_matrix2, x_vector2)
print "A2 =", A_matrix2
print "x2 =", x_vector2
print "A2*x2 =", A_x2
B_matrix = [[100, 101],
[110, 111]]
print "A =", A_matrix
print "B =", B_matrix
print "A*B =", multiply_matrix_matrix(A_matrix, B_matrix)
if "__main__" == __name__:
main()
|
apache-2.0
| 2,096,145,424,285,517,800 | 19.222222 | 61 | 0.484016 | false |
gpodder/mygpo
|
mygpo/directory/views.py
|
1
|
11742
|
from math import ceil
from collections import Counter
from django.http import HttpResponseNotFound, Http404, HttpResponseRedirect
from django.urls import reverse
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render
from django.db.models import Count
from django.contrib.sites.requests import RequestSite
from django.views.decorators.cache import cache_control
from django.views.decorators.vary import vary_on_cookie
from django.views.generic import ListView
from django.utils.decorators import method_decorator
from django.views import View
from django.views.generic import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from mygpo.podcasts.models import Podcast, Episode
from mygpo.directory.search import search_podcasts
from mygpo.web.utils import (
process_lang_params,
get_language_names,
get_page_list,
get_podcast_link_target,
sanitize_language_codes,
)
from mygpo.directory.tags import Topics
from mygpo.categories.models import Category
from mygpo.podcastlists.models import PodcastList
from mygpo.data.feeddownloader import (
verify_podcast_url,
NoEpisodesException,
UpdatePodcastException,
)
from mygpo.data.tasks import update_podcasts
class ToplistView(TemplateView):
""" Generic Top List view """
@method_decorator(vary_on_cookie)
@method_decorator(cache_control(private=True))
def dispatch(self, *args, **kwargs):
""" Only used for applying decorators """
return super(ToplistView, self).dispatch(*args, **kwargs)
def all_languages(self):
"""Returns all 2-letter language codes that are used by podcasts.
It filters obviously invalid strings, but does not check if any
of these codes is contained in ISO 639."""
query = Podcast.objects.exclude(language__isnull=True)
query = query.distinct("language").values("language")
langs = [o["language"] for o in query]
langs = sorted(sanitize_language_codes(langs))
return get_language_names(langs)
def language(self):
""" Currently selected language """
return process_lang_params(self.request)
def site(self):
""" Current site for constructing absolute links """
return RequestSite(self.request)
class PodcastToplistView(ToplistView):
""" Most subscribed podcasts """
template_name = "toplist.html"
def get_context_data(self, num=100):
context = super(PodcastToplistView, self).get_context_data()
entries = (
Podcast.objects.all()
.prefetch_related("slugs")
.toplist(self.language())[:num]
)
context["entries"] = entries
context["max_subscribers"] = max([0] + [p.subscriber_count() for p in entries])
return context
class EpisodeToplistView(ToplistView):
""" Most listened-to episodes """
template_name = "episode_toplist.html"
def get_context_data(self, num=100):
context = super(EpisodeToplistView, self).get_context_data()
entries = (
Episode.objects.all()
.select_related("podcast")
.prefetch_related("slugs", "podcast__slugs")
.toplist(self.language())[:num]
)
context["entries"] = entries
# Determine maximum listener amount (or 0 if no entries exist)
listeners = [e.listeners for e in entries if e.listeners is not None]
max_listeners = max(listeners, default=0)
context["max_listeners"] = max_listeners
return context
class Carousel(View):
""" A carousel demo """
@method_decorator(cache_control(private=True))
@method_decorator(vary_on_cookie)
def get(self, request):
return render(
request,
"carousel.html",
{
# evaluated lazyly, cached by template
"topics": Topics()
},
)
class Directory(View):
""" The main directory page """
@method_decorator(cache_control(private=True))
@method_decorator(vary_on_cookie)
def get(self, request):
return render(
request,
"directory.html",
{
# evaluated lazyly, cached by template
"topics": Topics(),
"podcastlists": self.get_random_list(),
"random_podcast": Podcast.objects.all().random().first(),
"podcast_ad": Podcast.objects.get_advertised_podcast(),
},
)
def get_random_list(self, podcasts_per_list=5):
random_list = PodcastList.objects.order_by("?").first()
yield random_list
@cache_control(private=True)
@vary_on_cookie
def category(request, category, page_size=20):
try:
category = Category.objects.get(tags__tag=category)
except Category.DoesNotExist:
return HttpResponseNotFound()
podcasts = category.entries.all().prefetch_related("podcast", "podcast__slugs")
paginator = Paginator(podcasts, page_size)
page = request.GET.get("page")
try:
podcasts = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
podcasts = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
podcasts = paginator.page(paginator.num_pages)
page_list = get_page_list(1, paginator.num_pages, podcasts.number, 15)
return render(
request,
"category.html",
{"entries": podcasts, "category": category.title, "page_list": page_list},
)
RESULTS_PER_PAGE = 20
@cache_control(private=True)
@vary_on_cookie
def search(request, template="search.html", args={}):
if "q" in request.GET:
q = request.GET.get("q", "")
try:
page = int(request.GET.get("page", 1))
except ValueError:
page = 1
start = RESULTS_PER_PAGE * (page - 1)
results = search_podcasts(q)
total = len(results)
num_pages = int(ceil(total / RESULTS_PER_PAGE))
results = results[start : start + RESULTS_PER_PAGE]
page_list = get_page_list(1, num_pages, page, 15)
else:
results = []
q = None
page_list = []
max_subscribers = max([p.subscribers for p in results] + [0])
current_site = RequestSite(request)
return render(
request,
template,
dict(
q=q,
results=results,
page_list=page_list,
max_subscribers=max_subscribers,
domain=current_site.domain,
**args,
),
)
@cache_control(private=True)
@vary_on_cookie
def podcast_lists(request, page_size=20):
lists = (
PodcastList.objects.all()
.annotate(num_votes=Count("votes"))
.order_by("-num_votes")
)
paginator = Paginator(lists, page_size)
page = request.GET.get("page")
try:
lists = paginator.page(page)
except PageNotAnInteger:
lists = paginator.page(1)
page = 1
except EmptyPage:
lists = paginator.page(paginator.num_pages)
page = paginator.num_pages
num_pages = int(ceil(PodcastList.objects.count() / float(page_size)))
page_list = get_page_list(1, num_pages, int(page), 15)
return render(
request, "podcast_lists.html", {"lists": lists, "page_list": page_list}
)
class MissingPodcast(View):
""" Check if a podcast is missing """
@method_decorator(login_required)
def get(self, request):
site = RequestSite(request)
# check if we're doing a query
url = request.GET.get("q", None)
if not url:
podcast = None
can_add = False
else:
try:
podcast = Podcast.objects.get(urls__url=url)
can_add = False
except Podcast.DoesNotExist:
# check if we could add a podcast for the given URL
podcast = False
try:
can_add = verify_podcast_url(url)
except (UpdatePodcastException, NoEpisodesException) as ex:
can_add = False
messages.error(request, str(ex))
return render(
request,
"missing.html",
{"site": site, "q": url, "podcast": podcast, "can_add": can_add},
)
class AddPodcast(View):
""" Add a missing podcast"""
@method_decorator(login_required)
@method_decorator(cache_control(private=True))
@method_decorator(vary_on_cookie)
def post(self, request):
url = request.POST.get("url", None)
if not url:
raise Http404
res = update_podcasts.delay([url])
return HttpResponseRedirect(reverse("add-podcast-status", args=[res.task_id]))
class AddPodcastStatus(TemplateView):
""" Status of adding a podcast """
template_name = "directory/add-podcast-status.html"
def get(self, request, task_id):
result = update_podcasts.AsyncResult(task_id)
if not result.ready():
return self.render_to_response({"ready": False})
try:
podcast_ids = result.get()
podcasts = Podcast.objects.filter(pk__in=podcast_ids)
messages.success(request, _("%d podcasts added" % len(podcasts)))
except (UpdatePodcastException, NoEpisodesException) as ex:
messages.error(request, str(ex))
podcast = None
return self.render_to_response({"ready": True, "podcasts": podcasts})
class PodcastListView(ListView):
""" A generic podcast list view """
paginate_by = 15
context_object_name = "podcasts"
@method_decorator(vary_on_cookie)
@method_decorator(cache_control(private=True))
def dispatch(self, *args, **kwargs):
""" Only used for applying decorators """
return super(PodcastListView, self).dispatch(*args, **kwargs)
@property
def _page(self):
"""The current page
There seems to be no other pre-defined method for getting the current
page, see
https://docs.djangoproject.com/en/dev/ref/class-based-views/mixins-multiple-object/#multipleobjectmixin
"""
return self.get_context_data()["page_obj"]
def page_list(self, page_size=15):
""" Return a list of pages, eg [1, 2, 3, '...', 6, 7, 8] """
page = self._page
return get_page_list(
1, page.paginator.num_pages, page.number, page.paginator.per_page
)
def max_subscribers(self):
""" Maximum subscribers of the podcasts on this page """
page = self._page
podcasts = page.object_list
return max([p.subscriber_count() for p in podcasts] + [0])
class LicensePodcastList(PodcastListView):
""" Lists podcasts with a given license """
template_name = "directory/license-podcasts.html"
def get_queryset(self):
return Podcast.objects.all().license(self.license_url)
@property
def license_url(self):
return self.kwargs["license_url"]
class LicenseList(TemplateView):
""" Lists all podcast licenses """
template_name = "directory/licenses.html"
def licenses(self):
""" Returns all podcast licenses """
query = Podcast.objects.exclude(license__isnull=True)
values = query.values("license").annotate(Count("id")).order_by()
counter = Counter({l["license"]: l["id__count"] for l in values})
return counter.most_common()
|
agpl-3.0
| -3,174,001,669,703,650,000 | 28.136476 | 111 | 0.622977 | false |
gbrammer/pabeta
|
driz_tools/back_wcs.py
|
1
|
1194
|
from drizzlepac import astrodrizzle, tweakreg, tweakback
from stwcs import updatewcs
import glob, os
from astropy.io import fits
from multiprocessing import Pool
from stsci.tools import teal
import argparse
def parse_args():
"""Parse command line arguements.
Parameters:
Nothing
Returns:
arguments: argparse.Namespace object
An object containing all of the added arguments.
Outputs:
Nothing
"""
input_help = 'Images to tweak back to flts. Default is f*dr?.fits'
inp = 'f*dr?.fits'
parser = argparse.ArgumentParser()
parser.add_argument('-i', type=str, help=input_help, action='store',
required=False, default=inp)
arguments = parser.parse_args()
return arguments
def tback(drz):
"""Runs tweakback to align flt/flcs with the drz/drc astrometry"""
flts = tweakback.extract_input_filenames(drz)
print 'Tweaking back exposures for {} with input ims:'.format(drz)
for f in flts:
print f
try:
tweakback.tweakback(drz)
except:
return
if __name__ =='__main__':
options = parse_args()
drzs=glob.glob(options.i)
p = Pool(32)
p.map(tback, drzs)
|
mit
| 938,047,989,270,665,600 | 24.404255 | 72 | 0.658291 | false |
feinheit/django-cleverreach
|
cleverreach/tests/api_v51.py
|
1
|
7594
|
# coding: utf-8
from cleverreach import CleverreachAPIException
import datetime, time
from django.conf import settings
from django.test import TestCase
from ..api.v5_1 import Client
class User(object): # Bunch class
def __init__(self, **kwargs):
for k, v in kwargs.items():
if k not in self.__dict__:
self.__dict__[k] = v
class TestAPI51(TestCase):
def setUp(self):
self.list1 = settings.CLEVERREACH['list1']
self.list2 = settings.CLEVERREACH['list2']
self.form1 = settings.CLEVERREACH['form1']
self.form2 = settings.CLEVERREACH['form2'] # email only
self.form3 = settings.CLEVERREACH['form3']
self.client = Client()
self.email1 = '[email protected]'
self.email2 = '[email protected]'
def test_group_get_list(self):
groups = self.client.group_get_list()
group_names = [unicode(g.name) for g in groups]
self.assertTrue('test1' in group_names)
self.assertTrue('test2' in group_names)
def test_forms_get_list(self):
forms = self.client.forms_get_list(self.list1)
self.assertEqual(len(forms), 1)
obj = forms[0]
self.assertTrue(isinstance(obj, object))
self.assertEqual(obj.name, 'testform')
self.assertEqual(obj.id, self.form1)
forms = self.client.forms_get_list(self.list2)
self.assertEqual(len(forms), 2)
form_names = [f.name for f in forms]
form_ids = [f.id for f in forms]
self.assertTrue('testform2' in form_names)
self.assertTrue('testform3' in form_names)
self.assertTrue(self.form2 in form_ids)
self.assertTrue(self.form3 in form_ids)
def test_forms_get_code(self):
code = self.client.forms_get_code(self.form1)
self.assertTrue(isinstance(code, basestring))
self.assertTrue('<form' in code)
def test_forms_activation_mail(self):
try:
receiver1 = {
'email': self.email1,
'source':'API TEST ADD',
'deactivated': time.mktime(datetime.datetime.now().timetuple())
}
receiver2 = {
'email': self.email2,
'source':'API TEST ADD',
'activated': time.mktime(datetime.datetime.now().timetuple())
}
self.client.receiver_add(self.list2, receiver1)
self.client.receiver_add(self.list2, receiver2)
response = self.client.forms_activation_mail(form_id=self.form2,
email=self.email1)
self.assertEqual(response, self.email1)
self.assertRaisesMessage(CleverreachAPIException,
'subscriber allready active',
self.client.forms_activation_mail, form_id=self.form2,
email=self.email2)
finally:
self.client.group_clear(self.list2)
def test_receiver_add(self):
try:
# create two receivers. One activated, the other not.
receiver1 = {
'email': self.email1,
'registered': time.mktime(datetime.datetime.now().timetuple()),
'source':'API TEST ADD',
'activated': time.mktime(datetime.datetime.now().timetuple())
}
receiver2 = {
'email': self.email2,
'registered': time.mktime(datetime.datetime.now().timetuple()),
'source':'API TEST ADD',
'deactivated': time.mktime(datetime.datetime.now().timetuple())
}
data = self.client.receiver_add(self.list2, receiver1)
self.assertEqual(data.email, self.email1)
self.assertEqual(data.active, True)
self.assertEqual(data.source, 'API TEST ADD')
data = self.client.receiver_add(self.list2, receiver2)
self.assertEqual(data.email, self.email2)
self.assertEqual(data.active, False)
self.assertEqual(data.source, 'API TEST ADD')
# add an existing user
self.assertRaisesMessage(CleverreachAPIException,
'duplicate data',
self.client.receiver_add, list_id=self.list2,
receiver=receiver1)
finally:
self.client.group_clear(self.list2)
def test_receiver_get_by_email(self):
try:
# add a user with some attributes
receiver1 = {
'email': self.email1,
'registered': time.mktime(datetime.datetime.now().timetuple()),
'source':'API TEST GET',
'activated': time.mktime(datetime.datetime.now().timetuple()),
'attributes': [
{'key': 'first_name', 'value': u'Brüce'},
{'key': 'last_name', 'value': u'Wayne'},
{'key': 'salutation', 'value': 'male'},
]
}
data = self.client.receiver_add(self.list1, receiver1)
self.assertEqual(data.email, self.email1)
# test the actual mehtod
data = self.client.receiver_get_by_email(self.list1, self.email1)
self.assertEqual(data.email, self.email1)
self.assertEqual(data.source, 'API TEST GET')
self.assertEqual(len(data.attributes), 3)
values = [unicode(a.value) for a in data.attributes]
self.assertTrue(u'Brüce' in values)
self.assertTrue(u'Wayne' in values)
self.assertTrue(u'male' in values)
finally:
self.client.group_clear(self.list1)
def test_receiver_set_active_and_receiver_set_inactive(self):
try:
# create an inactive receiver:
receiver1 = {
'email': self.email1,
'source': 'API TEST SET ACTIVE',
'deactivated': time.mktime(datetime.datetime.now().timetuple())
}
data = self.client.receiver_add(self.list2, receiver1)
self.assertEqual(data.email, self.email1)
self.assertEqual(data.active, False)
# set it active
data = self.client.receiver_set_active(self.list2, self.email1)
self.assertEqual(data.email, self.email1)
self.assertEqual(data.active, True)
# set it inactive
data = self.client.receiver_set_inactive(self.list2, self.email1)
self.assertEqual(data.email, self.email1)
self.assertEqual(data.active, False)
finally:
self.client.group_clear(self.list2)
def test_receiver_delete(self):
try:
receiver1 = {
'email': self.email1,
'source': 'API TEST DELETE',
}
data = self.client.receiver_add(self.list2, receiver1)
self.assertEqual(data.email, self.email1)
data = self.client.receiver_delete(self.list2, self.email1)
self.assertEqual(data, self.email1)
self.assertRaisesMessage(CleverreachAPIException,
'''Error for method receiverGetByEmail: data not found. Data: (receiverData){
email = "%s"
}''' % self.email1,
self.client.receiver_get_by_email, list_id=self.list2,
email=self.email1)
finally:
self.client.group_clear(self.list2)
|
bsd-2-clause
| -7,707,834,318,979,234,000 | 37.150754 | 93 | 0.556507 | false |
ultrabug/uhashring
|
tests/test_ketama_compatibility.py
|
1
|
1338
|
# -*- coding: utf-8 -*-
"""ketama is not released under pypi !
This test is only run on my dev machine and is not really part of the CI.
"""
try:
import ketama
except Exception:
ketama = None
from random import randint
from tempfile import NamedTemporaryFile
import pytest
from uhashring import HashRing
@pytest.fixture
def ketama_config_file(request):
valid_list = NamedTemporaryFile(prefix="py.test_")
valid_list.write(b"127.0.0.1:11211\t600\n")
valid_list.write(b"127.0.0.1:11212\t400\n")
valid_list.flush()
def fin():
valid_list.close()
print("closed valid_list")
request.addfinalizer(fin)
return valid_list.name
def test_ketama_hashi():
if not ketama:
return
ring = HashRing()
assert ring.hashi("test") == ketama.hashi("test")
def test_ketama_compatibility(ketama_config_file):
if not ketama:
return
ring = HashRing(
nodes={"127.0.0.1:11211": 600, "127.0.0.1:11212": 400},
replicas=4,
vnodes=40,
compat=True,
)
continuum = ketama.Continuum(ketama_config_file)
assert ring.get_points() == continuum.get_points()
numhits = 1000
numvalues = 10000
for i in range(numhits):
key = str(randint(1, numvalues))
assert ring.get_server(key) == continuum.get_server(key)
|
bsd-3-clause
| -2,153,386,487,287,292,000 | 22.068966 | 73 | 0.646487 | false |
snowcloud/django-sitedown
|
sitedown/middleware.py
|
1
|
2076
|
from django.conf import settings
from django.contrib.auth.views import login
from django.contrib.flatpages.models import FlatPage
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext, loader
import urlparse
class HttpResponseServiceUnavailable(HttpResponse):
status_code = 503
class SitedownMiddleware(object):
"""
Site down middleware. If enabled, each Django-powered URL will
redirect to a message, except for admin.
"""
def __init__(self):
self.template = getattr(settings, 'SITEDOWN_TEMPLATE', 'sitedown/default.html' )
self.disabled = getattr(settings, 'SITEDOWN_DISABLE', False)
self.static_url = getattr(settings, 'STATIC_URL', False)
self.redirect = getattr(settings, 'SITEDOWN_REDIRECT', '/sitedown/')
self.use_302 = getattr(settings, 'SITEDOWN_USE_302', False)
self.flatpage = getattr(settings, 'SITEDOWN_FLATPAGE', False)
def process_request(self, request):
if self.disabled or \
request.path.startswith('/admin') or \
request.path.startswith(urlparse.urlparse(settings.MEDIA_URL).path) or \
(self.static_url and request.path.startswith(urlparse.urlparse(settings.STATIC_URL).path)):
return None
if request.path == self.redirect:
return render_to_response(self.template,
RequestContext( request, {}))
if self.use_302:
return HttpResponseRedirect(self.redirect)
else:
response = HttpResponseServiceUnavailable(mimetype='text/html')
t = loader.get_template(self.template)
if self.flatpage:
fp = FlatPage.objects.get(url=self.flatpage)
title = fp.title
message = fp.content
else:
title = message = ''
response.write(t.render(RequestContext( request, { 'title': title, 'message': message })))
return response
|
bsd-3-clause
| -4,390,148,274,306,541,600 | 41.367347 | 103 | 0.647399 | false |
drtoful/pyvault
|
tests/02_store_test.py
|
1
|
1342
|
import unittest
import os
from nose.tools import assert_true
from nose.tools import assert_false
from pyvault import PyVault
from pyvault.backends.file import PyVaultFileBackend
from pyvault.backends.ptree import PyVaultPairtreeBackend
class VaultStore(unittest.TestCase):
"""
testing storing data into the vault with different
backends and their resulting files.
"""
def test_store_file(self):
backend = PyVaultFileBackend("/tmp/_pyvault_file")
vault = PyVault(backend)
vault.unlock("passphrase", False)
assert_false(vault.is_locked())
vault.store("key", "secret")
assert_true(os.path.isfile("/tmp/_pyvault_file/8335fa56d487562de248f47befc72743334051ddffcc2c09275f665454990317594745ee17c08f798cd7dce0ba8155dcda14f6398c1d1545116520a133017c09"))
def test_store_ptree(self):
backend = PyVaultPairtreeBackend("/tmp/_pyvault_ptree")
vault = PyVault(backend)
vault.unlock("passphrase", False)
assert_false(vault.is_locked())
vault.store("key", "secret")
assert_true(os.path.isfile("/tmp/_pyvault_ptree/pairtree_root/83/35/fa/56/d4/87/56/2d/e2/48/f4/7b/ef/c7/27/43/33/40/51/dd/ff/cc/2c/09/27/5f/66/54/54/99/03/17/59/47/45/ee/17/c0/8f/79/8c/d7/dc/e0/ba/81/55/dc/da/14/f6/39/8c/1d/15/45/11/65/20/a1/33/01/7c/09/obj/data"))
|
bsd-3-clause
| -8,959,360,979,639,010,000 | 39.666667 | 273 | 0.71237 | false |
gajim/gajim
|
gajim/common/zeroconf/zeroconf_avahi_const.py
|
1
|
1365
|
# Copyright (C) 2018 Philipp Hörist <philipp AT hoerist.com>
#
# This file is part of Gajim.
#
# Gajim is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; version 3 only.
#
# Gajim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gajim. If not, see <http://www.gnu.org/licenses/>.
from enum import IntEnum
DBUS_NAME = "org.freedesktop.Avahi"
DBUS_INTERFACE_SERVER = DBUS_NAME + ".Server"
DBUS_INTERFACE_ENTRY_GROUP = DBUS_NAME + ".EntryGroup"
DBUS_INTERFACE_DOMAIN_BROWSER = DBUS_NAME + ".DomainBrowser"
class ServerState(IntEnum):
INVALID = 0
REGISTERING = 1
RUNNING = 2
COLLISION = 3
FAILURE = 4
class EntryGroup(IntEnum):
UNCOMMITTED = 0
REGISTERING = 1
ESTABLISHED = 2
COLLISION = 3
FAILURE = 4
class DomainBrowser(IntEnum):
BROWSE = 0
BROWSE_DEFAULT = 1
REGISTER = 2
REGISTER_DEFAULT = 3
BROWSE_LEGACY = 4
class Protocol(IntEnum):
UNSPEC = -1
INET = 0
INET6 = 1
class Interface(IntEnum):
UNSPEC = -1
|
gpl-3.0
| 2,970,460,507,138,177,000 | 22.517241 | 67 | 0.70088 | false |
bingopodcast/bingos
|
bingo_emulator/graphics/ballerina.py
|
1
|
36277
|
import pygame
import random
pygame.display.set_caption("Multi Bingo")
screen = pygame.display.set_mode((0,0))
screen.fill([0,0,0])
pygame.mouse.set_visible(False)
meter = pygame.image.load('graphics/assets/register_cover.png').convert()
magic_screen = pygame.image.load('ballerina/assets/magic_screen.png').convert()
number_card = pygame.image.load('ballerina/assets/number_card.png').convert()
odds = pygame.image.load('ballerina/assets/odds.png').convert_alpha()
eb = pygame.image.load('ballerina/assets/extra_ball.png').convert_alpha()
eb_number = pygame.image.load('ballerina/assets/eb_number.png').convert_alpha()
extra_balls = pygame.image.load('ballerina/assets/extra_balls.png').convert_alpha()
feature = pygame.image.load('ballerina/assets/selection.png').convert_alpha()
ms_indicator = pygame.image.load('ballerina/assets/ms_arrow.png').convert_alpha()
ms_teaser = pygame.image.load('ballerina/assets/ms_teaser.png').convert_alpha()
ti = pygame.image.load('ballerina/assets/selection_arrow.png').convert_alpha()
super_section = pygame.image.load('ballerina/assets/super_section.png').convert_alpha()
ms_letter = pygame.image.load('ballerina/assets/ms_letter.png').convert_alpha()
number = pygame.image.load('ballerina/assets/number.png').convert_alpha()
select_now = pygame.image.load('ballerina/assets/select_now.png').convert_alpha()
three_blue = pygame.image.load('ballerina/assets/blue_section.png').convert_alpha()
tilt = pygame.image.load('ballerina/assets/tilt.png').convert_alpha()
one_seven_feature = pygame.image.load('ballerina/assets/one_seven_feature.png').convert_alpha()
one_seven = pygame.image.load('ballerina/assets/one_seven.png').convert_alpha()
bg_menu = pygame.image.load('ballerina/assets/ballerina_menu.png').convert_alpha()
bg_gi = pygame.image.load('ballerina/assets/ballerina_gi.png').convert_alpha()
bg_off = pygame.image.load('ballerina/assets/ballerina_off.png').convert_alpha()
class scorereel():
""" Score Reels are used to count replays """
def __init__(self, pos, image):
self.position = pos
self.default_y = self.position[1]
self.image = pygame.image.load(image).convert()
reel1 = scorereel([97,785], "graphics/assets/white_reel.png")
reel10 = scorereel([78,785], "graphics/assets/white_reel.png")
reel100 = scorereel([59,785], "graphics/assets/white_reel.png")
def display(s, replays=0, menu=False):
meter.set_colorkey((255,0,252))
meter_position = [51,785]
screen.blit(reel1.image, reel1.position)
screen.blit(reel10.image, reel10.position)
screen.blit(reel100.image, reel100.position)
screen.blit(meter, meter_position)
number_card_position = [235,363]
screen.blit(number_card, number_card_position)
magic_screen.set_colorkey((255,0,252))
#default position 230x 359y
#subtract 47 per position
if s.game.magic_screen.position == 0:
magic_screen_position = [240,358]
elif s.game.magic_screen.position == 1:
magic_screen_position = [192,358]
elif s.game.magic_screen.position == 2:
magic_screen_position = [145,358]
elif s.game.magic_screen.position == 3:
magic_screen_position = [96,358]
elif s.game.magic_screen.position == 4:
magic_screen_position = [49,358]
elif s.game.magic_screen.position == 5:
magic_screen_position = [3,358]
elif s.game.magic_screen.position == 6:
magic_screen_position = [-42,358]
elif s.game.magic_screen.position == 7:
magic_screen_position = [-89,358]
elif s.game.magic_screen.position == 8:
magic_screen_position = [-136,358]
elif s.game.magic_screen.position == 9:
magic_screen_position = [-183,358]
screen.blit(magic_screen, magic_screen_position)
backglass_position = [0, 0]
backglass = pygame.Surface(screen.get_size(), flags=pygame.SRCALPHA)
backglass.fill((0, 0, 0))
if menu == True:
screen.blit(bg_menu, backglass_position)
else:
if (s.game.anti_cheat.status == True):
screen.blit(bg_gi, backglass_position)
else:
screen.blit(bg_off, backglass_position)
if s.game.eb_play.status == True:
eb_position = [41,1014]
screen.blit(extra_balls, eb_position)
if s.game.extra_ball.position >= 1:
eb_position = [151,1014]
screen.blit(eb_number, eb_position)
if s.game.extra_ball.position >= 2:
eb_position = [198,1014]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 3:
eb_position = [262,1014]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 4:
eb_position = [330,1014]
screen.blit(eb_number, eb_position)
if s.game.extra_ball.position >= 5:
eb_position = [376,1014]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 6:
eb_position = [436,1014]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 7:
eb_position = [506,1014]
screen.blit(eb_number, eb_position)
if s.game.extra_ball.position >= 8:
eb_position = [554,1014]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 9:
eb_position = [617,1014]
screen.blit(eb, eb_position)
if s.game.magic_screen_feature.position >= 4:
if s.game.before_fourth.status == True:
p = [551,572]
screen.blit(feature, p)
if s.game.rollovers.status == True:
p = [549,519]
screen.blit(feature, p)
if s.game.before_fifth.status == True:
p = [549,465]
screen.blit(feature, p)
if s.game.after_fifth.status == True:
p = [550,409]
screen.blit(feature, p)
if s.game.one_seven_feature.status == True:
p = [527,231]
screen.blit(one_seven_feature, p)
if s.game.one_seven.status == True:
p = [527,295]
screen.blit(one_seven, p)
if s.game.seven_one.status == True:
p = [527,329]
screen.blit(one_seven, p)
if s.game.magic_screen_feature.position >= 8:
if s.game.three_blue.status == True:
bp = [51,359]
screen.blit(three_blue, bp)
elif s.game.two_blue.status == True:
bp = [112,357]
screen.blit(three_blue, bp)
if s.game.yellow_super_section.status == True:
rss = [50,452]
screen.blit(super_section, rss)
if s.game.red_super_section.status == True:
yss = [50,539]
screen.blit(super_section, yss)
if s.game.magic_screen_feature.position >= 1:
ms = [31,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position >= 2:
ms = [68,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position >= 3:
ms = [105,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position >= 4:
ms = [142,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position == 5:
ms = [183,680]
screen.blit(ms_teaser, ms)
if s.game.magic_screen_feature.position >= 6:
ms = [211,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position == 7:
ms = [249,681]
screen.blit(ms_teaser, ms)
if s.game.magic_screen_feature.position >= 8:
ms = [277,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position == 9:
ms = [315,681]
screen.blit(ms_teaser, ms)
if s.game.magic_screen_feature.position >= 10:
ms = [343,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen_feature.position == 11:
ms = [381,680]
screen.blit(ms_teaser, ms)
if s.game.magic_screen_feature.position >= 12:
ms = [408,669]
screen.blit(ms_letter, ms)
if s.game.magic_screen.position == 1:
ms = [30,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 2:
ms = [67,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 3:
ms = [103,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 4:
ms = [141,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 5:
ms = [211,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 6:
ms = [275,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 7:
ms = [342,724]
screen.blit(ms_indicator, ms)
if s.game.magic_screen.position == 8:
ms = [409,724]
screen.blit(ms_indicator, ms)
if s.game.tilt.status == False:
if s.holes:
if 1 in s.holes:
number_position = [287,373]
screen.blit(number, number_position)
if 2 in s.holes:
number_position = [333,373]
screen.blit(number, number_position)
if 3 in s.holes:
number_position = [382,568]
screen.blit(number, number_position)
if 4 in s.holes:
number_position = [238,423]
screen.blit(number, number_position)
if 5 in s.holes:
number_position = [335,520]
screen.blit(number, number_position)
if 6 in s.holes:
number_position = [238,520]
screen.blit(number, number_position)
if 7 in s.holes:
number_position = [333,423]
screen.blit(number, number_position)
if 8 in s.holes:
number_position = [286,568]
screen.blit(number, number_position)
if 9 in s.holes:
number_position = [240,374]
screen.blit(number, number_position)
if 10 in s.holes:
number_position = [430,565]
screen.blit(number, number_position)
if 11 in s.holes:
number_position = [382,373]
screen.blit(number, number_position)
if 12 in s.holes:
number_position = [238,568]
screen.blit(number, number_position)
if 13 in s.holes:
number_position = [382,470]
screen.blit(number, number_position)
if 14 in s.holes:
number_position = [334,569]
screen.blit(number, number_position)
if 15 in s.holes:
number_position = [428,373]
screen.blit(number, number_position)
if 16 in s.holes:
number_position = [334,470]
screen.blit(number, number_position)
if 17 in s.holes:
number_position = [428,470]
screen.blit(number, number_position)
if 18 in s.holes:
number_position = [428,423]
screen.blit(number, number_position)
if 19 in s.holes:
number_position = [285,421]
screen.blit(number, number_position)
if 20 in s.holes:
number_position = [430,520]
screen.blit(number, number_position)
if 21 in s.holes:
number_position = [382,520]
screen.blit(number, number_position)
if 22 in s.holes:
number_position = [382,421]
screen.blit(number, number_position)
if 23 in s.holes:
number_position = [286,520]
screen.blit(number, number_position)
if 24 in s.holes:
number_position = [286,470]
screen.blit(number, number_position)
if 25 in s.holes:
number_position = [240,470]
screen.blit(number, number_position)
if s.game.red_odds.position == 1:
o = [190,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 2:
o = [250,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 3:
o = [312,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 4:
o = [373,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 5:
o = [425,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 6:
o = [474,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 7:
o = [522,765]
screen.blit(odds, o)
elif s.game.red_odds.position == 8:
o = [568,765]
screen.blit(odds, o)
if s.game.yellow_odds.position == 1:
o = [190,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 2:
o = [250,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 3:
o = [312,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 4:
o = [373,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 5:
o = [425,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 6:
o = [474,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 7:
o = [522,831]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 8:
o = [568,831]
screen.blit(odds, o)
if s.game.green_odds.position == 1:
o = [190,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 2:
o = [250,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 3:
o = [312,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 4:
o = [373,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 5:
o = [425,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 6:
o = [474,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 7:
o = [522,903]
screen.blit(odds, o)
elif s.game.green_odds.position == 8:
o = [572,903]
screen.blit(odds, o)
if s.game.magic_screen_feature.position >= 1:
if s.game.before_fourth.status == True and s.game.ball_count.position == 3:
s.cancel_delayed(name="blink")
blink([s,1,1])
elif s.game.before_fifth.status == True and s.game.ball_count.position == 4:
s.cancel_delayed(name="blink")
blink([s,1,1])
elif s.game.after_fifth.status == True and s.game.ball_count.position == 5:
s.cancel_delayed(name="blink")
blink([s,1,1])
else:
s.cancel_delayed(name="blink")
if s.game.tilt.status == True:
tilt_position = [572,632]
screen.blit(tilt, tilt_position)
pygame.display.update()
def blink(args):
dirty_rects = []
s = args[0]
b = args[1]
sn = args[2]
if b == 0:
if sn == 1:
p = [575,677]
dirty_rects.append(screen.blit(select_now, p))
pygame.display.update(dirty_rects)
else:
dirty_rects.append(screen.blit(bg_gi, (575,677), pygame.Rect(575,677,123,32)))
pygame.display.update(dirty_rects)
b = not b
args = [s,b,sn]
s.delay(name="blink", delay=0.1, handler=blink, param=args)
def screen_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
direction = args[2]
number_card_position = [235,363]
dirty_rects.append(screen.blit(number_card, number_card_position))
magic_screen.set_colorkey((255,0,252))
if s.game.magic_screen.position == 0:
p = [240,358]
elif s.game.magic_screen.position == 1:
p = [192,358]
elif s.game.magic_screen.position == 2:
p = [145,358]
elif s.game.magic_screen.position == 3:
p = [96,358]
elif s.game.magic_screen.position == 4:
p = [49,358]
elif s.game.magic_screen.position == 5:
p = [3,358]
elif s.game.magic_screen.position == 6:
p = [-45,358]
elif s.game.magic_screen.position == 7:
p = [-92,358]
elif s.game.magic_screen.position == 8:
p = [-139,358]
elif s.game.magic_screen.position == 9:
p = [-187,358]
if direction == "left":
p[0] = p[0] + num
else:
p[0] = p[0] - num
dirty_rects.append(screen.blit(magic_screen, p))
backglass_position = [0, 0]
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],612,280)))
else:
dirty_rects.append(screen.blit(bg_off, p, pygame.Rect(p[0],p[1],612,280)))
if s.game.magic_screen_feature.position >= 9:
if s.game.three_blue.status == True:
bp = [51,359]
dirty_rects.append(screen.blit(bg_gi, bp, pygame.Rect(bp[0],bp[1],46,77)))
dirty_rects.append(screen.blit(three_blue, bp))
elif s.game.two_blue.status == True:
bp = [112,357]
dirty_rects.append(screen.blit(bg_gi, bp, pygame.Rect(bp[0],bp[1],46,77)))
dirty_rects.append(screen.blit(three_blue, bp))
if s.game.magic_screen_feature.position >= 4:
if s.game.before_fourth.status == True:
p = [551,572]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],124,54)))
dirty_rects.append(screen.blit(feature, p))
if s.game.rollovers.status == True:
p = [549,519]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],124,54)))
dirty_rects.append(screen.blit(feature, p))
if s.game.before_fifth.status == True:
p = [549,465]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],124,54)))
dirty_rects.append(screen.blit(feature, p))
if s.game.after_fifth.status == True:
p = [550,409]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],124,54)))
dirty_rects.append(screen.blit(feature, p))
if s.game.red_super_section.status == True:
yss = [50,539]
dirty_rects.append(screen.blit(bg_gi, (50,539), pygame.Rect(50,539,121,81)))
dirty_rects.append(screen.blit(super_section, rss))
if s.game.yellow_super_section.status == True:
rss = [50,452]
dirty_rects.append(screen.blit(bg_gi, (50,452), pygame.Rect(50,452,121,81)))
dirty_rects.append(screen.blit(super_section, yss))
pygame.display.update(dirty_rects)
def eb_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
if s.game.extra_ball.position < 1:
dirty_rects.append(screen.blit(bg_gi, (151,1014), pygame.Rect(151,1014,47,33)))
if s.game.extra_ball.position < 2:
dirty_rects.append(screen.blit(bg_gi, (198,1014), pygame.Rect(198,1014,63,36)))
if s.game.extra_ball.position < 3:
dirty_rects.append(screen.blit(bg_gi, (262,1014), pygame.Rect(262,1014,63,36)))
if s.game.extra_ball.position < 4:
dirty_rects.append(screen.blit(bg_gi, (330,1014), pygame.Rect(330,1014,47,33)))
if s.game.extra_ball.position < 5:
dirty_rects.append(screen.blit(bg_gi, (376,1014), pygame.Rect(376,1014,63,36)))
if s.game.extra_ball.position < 6:
dirty_rects.append(screen.blit(bg_gi, (436,1014), pygame.Rect(436,1014,63,36)))
if s.game.extra_ball.position < 7:
dirty_rects.append(screen.blit(bg_gi, (506,1014), pygame.Rect(506,1014,47,33)))
if s.game.extra_ball.position < 8:
dirty_rects.append(screen.blit(bg_gi, (554,1014), pygame.Rect(554,1014,63,36)))
if s.game.extra_ball.position < 9:
dirty_rects.append(screen.blit(bg_gi, (617,1014), pygame.Rect(617,1014,63,36)))
pygame.display.update(dirty_rects)
if num in [0,25,14,49]:
if s.game.extra_ball.position < 1:
p = [151,1014]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [39,1,26,15]:
if s.game.extra_ball.position < 2:
p = [198,1014]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [3,4,17,28,29,40]:
if s.game.extra_ball.position < 3:
p = [262,1014]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [5,18,30,43]:
if s.game.extra_ball.position < 4:
p = [330,1014]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [7,8,19,32,33,44]:
if s.game.extra_ball.position < 5:
p = [376,1014]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [9,10,20,34,35,45]:
if s.game.extra_ball.position < 6:
p = [436,1014]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [11,21,36,46]:
if s.game.extra_ball.position < 7:
p = [506,1014]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [12,22,37,47]:
if s.game.extra_ball.position < 8:
p = [554,1014]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [2,6,13,16,23,27,31,38,41,48]:
if s.game.extra_ball.position < 9:
p = [617,1014]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
def clear_odds(s, num):
global screen
dirty_rects = []
if s.game.yellow_odds.position != 1:
dirty_rects.append(screen.blit(bg_gi, (190,831), pygame.Rect(190,831,46,68)))
if s.game.yellow_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (250,831), pygame.Rect(250,831,46,68)))
if s.game.yellow_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (312,831), pygame.Rect(312,831,46,68)))
if s.game.yellow_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (373,831), pygame.Rect(373,831,46,68)))
if s.game.yellow_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (425,831), pygame.Rect(425,831,46,68)))
if s.game.yellow_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (474,831), pygame.Rect(474,831,46,68)))
if s.game.yellow_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (522,831), pygame.Rect(522,831,46,68)))
if s.game.yellow_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (568,831), pygame.Rect(568,831,46,68)))
if s.game.red_odds.position != 1:
dirty_rects.append(screen.blit(bg_gi, (190,765), pygame.Rect(190,765,46,68)))
if s.game.red_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (250,765), pygame.Rect(250,765,46,68)))
if s.game.red_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (312,765), pygame.Rect(312,765,46,68)))
if s.game.red_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (373,765), pygame.Rect(373,765,46,68)))
if s.game.red_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (425,765), pygame.Rect(425,765,46,68)))
if s.game.red_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (474,765), pygame.Rect(474,765,46,68)))
if s.game.red_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (522,765), pygame.Rect(522,765,46,68)))
if s.game.red_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (568,765), pygame.Rect(568,765,46,68)))
if s.game.green_odds.position != 1:
dirty_rects.append(screen.blit(bg_gi, (190,903), pygame.Rect(190,903,46,68)))
if s.game.green_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (250,903), pygame.Rect(250,903,46,68)))
if s.game.green_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (312,903), pygame.Rect(312,903,46,68)))
if s.game.green_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (373,903), pygame.Rect(373,903,46,68)))
if s.game.green_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (425,903), pygame.Rect(425,903,46,68)))
if s.game.green_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (474,903), pygame.Rect(474,903,46,68)))
if s.game.green_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (522,903), pygame.Rect(522,903,46,68)))
if s.game.green_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (568,903), pygame.Rect(568,903,46,68)))
pygame.display.update(dirty_rects)
def draw_odds_animation(s, num):
global screen
dirty_rects = []
if num in [5,30]:
if s.game.yellow_odds.position != 1:
p = [190,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [4,29]:
if s.game.yellow_odds.position != 2:
p = [250,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [7,32]:
if s.game.yellow_odds.position != 3:
p = [312,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [13,38]:
if s.game.yellow_odds.position != 4:
p = [373,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [23,48]:
if s.game.yellow_odds.position != 5:
p = [425,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [17,42]:
if s.game.yellow_odds.position != 6:
p = [474,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [19,44]:
if s.game.yellow_odds.position != 7:
p = [522,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [11,36]:
if s.game.yellow_odds.position != 8:
p = [568,831]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [2,27]:
if s.game.red_odds.position != 1:
p = [190,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [15,40]:
if s.game.red_odds.position != 2:
p = [250,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [0,25]:
if s.game.red_odds.position != 3:
p = [312,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [9,34]:
if s.game.red_odds.position != 4:
p = [373,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [20,45]:
if s.game.red_odds.position != 5:
p = [425,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [12,37]:
if s.game.red_odds.position != 6:
p = [474,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [1,26]:
if s.game.red_odds.position != 7:
p = [522,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [6,31]:
if s.game.red_odds.position != 8:
p = [568,765]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [17,42]:
if s.game.green_odds.position != 1:
p = [190,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [19,44]:
if s.game.green_odds.position != 2:
p = [250,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [14,39]:
if s.game.green_odds.position != 3:
p = [312,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [3,28]:
if s.game.green_odds.position != 4:
p = [373,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [23,48]:
if s.game.green_odds.position != 5:
p = [425,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [8,33]:
if s.game.green_odds.position != 6:
p = [474,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [21,46]:
if s.game.green_odds.position != 7:
p = [522,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [10,35]:
if s.game.green_odds.position != 8:
p = [568,903]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
def odds_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_odds(s, num)
draw_odds_animation(s, num)
def clear_features(s, num):
global screen
dirty_rects = []
if s.game.one_seven_feature.status == False:
dirty_rects.append(screen.blit(bg_gi, (527,231), pygame.Rect(527,231,169,63)))
if s.game.before_fourth.status == False:
dirty_rects.append(screen.blit(bg_gi, (551,572), pygame.Rect(551,572,124,54)))
if s.game.rollovers.status == False:
dirty_rects.append(screen.blit(bg_gi, (549,519), pygame.Rect(549,519,124,54)))
if s.game.before_fifth.status == False:
dirty_rects.append(screen.blit(bg_gi, (549,465), pygame.Rect(549,465,124,54)))
if s.game.after_fifth.status == False:
dirty_rects.append(screen.blit(bg_gi, (550,409), pygame.Rect(550,409,124,54)))
if s.game.yellow_super_section.status == False:
dirty_rects.append(screen.blit(bg_gi, (50,452), pygame.Rect(50,452,122,88)))
if s.game.red_super_section.status == False:
dirty_rects.append(screen.blit(bg_gi, (50,539), pygame.Rect(50,539,122,88)))
if s.game.magic_screen_feature.position < 4:
dirty_rects.append(screen.blit(bg_gi, (31,669), pygame.Rect(31,669,36,49)))
dirty_rects.append(screen.blit(bg_gi, (68,669), pygame.Rect(68,669,36,49)))
dirty_rects.append(screen.blit(bg_gi, (105,669), pygame.Rect(105,669,36,49)))
dirty_rects.append(screen.blit(bg_gi, (142,669), pygame.Rect(142,669,36,49)))
if s.game.magic_screen_feature.position < 6:
dirty_rects.append(screen.blit(bg_gi, (211,669), pygame.Rect(211,669,36,49)))
if s.game.magic_screen_feature.position < 8:
dirty_rects.append(screen.blit(bg_gi, (277,669), pygame.Rect(277,669,36,49)))
dirty_rects.append(screen.blit(bg_gi, (51,359), pygame.Rect(51,359,57,92)))
dirty_rects.append(screen.blit(bg_gi, (112,357), pygame.Rect(112,357,57,92)))
if s.game.magic_screen_feature.position < 10:
dirty_rects.append(screen.blit(bg_gi, (343,669), pygame.Rect(343,669,36,49)))
if s.game.magic_screen_feature.position < 12:
dirty_rects.append(screen.blit(bg_gi, (408,669), pygame.Rect(408,669,36,49)))
pygame.display.update(dirty_rects)
def draw_feature_animation(s, num):
global screen
dirty_rects = []
if num in [4,13,29,38]:
if s.game.one_seven_feature.status == False:
p = [527,231]
dirty_rects.append(screen.blit(one_seven_feature, p))
pygame.display.update(dirty_rects)
return
if num in [10,19,35,44]:
if s.game.before_fourth.status == False:
p = [551,572]
dirty_rects.append(screen.blit(feature, p))
pygame.display.update(dirty_rects)
return
if num in [9,24,34,49]:
if s.game.before_fifth.status == False:
p = [549,465]
dirty_rects.append(screen.blit(feature, p))
pygame.display.update(dirty_rects)
return
if num in [6,17,21,31,42,46]:
if s.game.after_fifth.status == False:
p = [550,409]
dirty_rects.append(screen.blit(feature, p))
pygame.display.update(dirty_rects)
return
if num in [11,36]:
if s.game.rollovers.status == False:
p = [549,519]
dirty_rects.append(screen.blit(feature, p))
s.game.coils.redROLamp.pulse()
s.game.coils.yellowROLamp.pulse()
pygame.display.update(dirty_rects)
return
if num in [7,0,25,32]:
if s.game.yellow_super_section.status == False:
if s.game.red_super_section.status == False:
p = [50,452]
dirty_rects.append(screen.blit(super_section, p))
pygame.display.update(dirty_rects)
return
if num in [18,24,49,43]:
if s.game.red_super_section.status == False:
if s.game.yellow_super_section.status == False:
p = [50,539]
dirty_rects.append(screen.blit(super_section, p))
pygame.display.update(dirty_rects)
return
if num in [2,12,27,37]:
if s.game.magic_screen_feature.position < 4:
p = [31,669]
dirty_rects.append(screen.blit(ms_letter, p))
p = [68,669]
dirty_rects.append(screen.blit(ms_letter, p))
p = [105,669]
dirty_rects.append(screen.blit(ms_letter, p))
p = [142,669]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [22,47]:
if s.game.magic_screen_feature.position < 6:
p = [211,669]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [14,25,39,50]:
if s.game.magic_screen_feature.position < 8:
p = [277,669]
dirty_rects.append(screen.blit(ms_letter, p))
p = [51,359]
dirty_rects.append(screen.blit(three_blue, p))
p = [112,357]
dirty_rects.append(screen.blit(three_blue, p))
pygame.display.update(dirty_rects)
return
if num in [5,15,30,40]:
if s.game.magic_screen_feature.position < 10:
p = [343,669]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
if num in [16,23,41,48]:
if s.game.magic_screen_feature.position < 12:
p = [408,669]
dirty_rects.append(screen.blit(ms_letter, p))
pygame.display.update(dirty_rects)
return
def feature_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_features(s, num)
draw_feature_animation(s, num)
def both_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_features(s, num)
clear_odds(s, num)
draw_odds_animation(s, num)
draw_feature_animation(s, num)
|
gpl-3.0
| 8,196,188,574,081,520,000 | 36.788542 | 95 | 0.57378 | false |
albugrimenko/Python_Pieces
|
samples/temp_rickandmortyapi.py
|
1
|
1306
|
import urllib.request
import json
def characters_get():
""" Gets character list from the rickandmorty API """
url = 'https://rickandmortyapi.com/api/character/'
resp = urllib.request.urlopen(url)
data = json.loads(resp.read().decode())
#print(data)
return data
def characters_save(data, file_name):
with open(file_name, 'w', encoding='utf-8') as fout:
fout.write("id\tname\tstatus\tspecies\tgender\timage\turl\n")
for item in data["results"]:
fout.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
item["id"],
item["name"],
item["status"],
item["species"],
item["gender"],
item["image"],
item["url"]
))
return
def solution(A):
""" Finds a value that occurs in more than half of the elements of an array """
n = len(A)
L = [-1] + A
count = 0
pos = (n + 1) // 2
candidate = L[pos]
for i in range(1, n + 1):
if L[i] == candidate:
count = count + 1
print(count, n/2)
if count > n/2:
return candidate
return -1
if __name__ == '__main__':
#characters_get()
#characters_save(characters_get(), 'e:/temp/rick_and_morty.tsv')
print(solution([2,2]))
|
mit
| -1,933,995,682,509,857,500 | 25.653061 | 83 | 0.524502 | false |
fbessho/python-sybase
|
examples/cursor_sel.py
|
1
|
6134
|
#!/usr/bin/python
#
# From cursor_sel.c - sybase example program
#
# Description:
# This program uses a cursor to retrieve data from a table. It
# also accepts an input parameter for the "where" clause.
#
# Inputs:
# Value for the input parameter (state column from the publishers
# table).
#
# References:
# Open Client Reference Manual pages for ct_cursor and ct_param.
#
import sys
from sybasect import *
from example import *
MAX_COLSIZE = 255
def init_db():
# allocate a context
status, ctx = cs_ctx_alloc(EX_CTLIB_VERSION)
if status != CS_SUCCEED:
raise Error('cs_ctx_alloc failed')
set_global_ctx(ctx)
if ctx.cs_diag(CS_INIT) != CS_SUCCEED:
raise CSError(ctx, 'cs_diag failed')
# initialize the library
if ctx.ct_init(EX_CTLIB_VERSION) != CS_SUCCEED:
raise CSError(ctx, 'ct_init failed')
return ctx
def connect_db(ctx, user_name, password):
# Allocate a connection pointer
status, conn = ctx.ct_con_alloc()
if status != CS_SUCCEED:
raise CSError(ctx, 'ct_con_alloc failed')
if conn.ct_diag(CS_INIT) != CS_SUCCEED:
raise CTError(conn, 'ct_diag failed')
# Set the username and password properties
if conn.ct_con_props(CS_SET, CS_USERNAME, user_name) != CS_SUCCEED:
raise CTError(conn, 'ct_con_props CS_USERNAME failed')
if conn.ct_con_props(CS_SET, CS_PASSWORD, password) != CS_SUCCEED:
raise CTError(conn, 'ct_con_props CS_PASSWORD failed')
# connect to the server
if conn.ct_connect() != CS_SUCCEED:
raise CTError(conn, 'ct_connect failed')
return conn
def bind_columns(cmd):
status, num_cols = cmd.ct_res_info(CS_NUMDATA)
if status != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_res_info failed')
bufs = [None] * num_cols
for i in range(num_cols):
fmt = CS_DATAFMT()
fmt.datatype = CS_CHAR_TYPE
fmt.maxlength = MAX_COLSIZE
fmt.count = 1
fmt.format = CS_FMT_NULLTERM
# Bind returned data to host variables
status, buf = cmd.ct_bind(i + 1, fmt)
if status != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_bind failed')
bufs[i] = buf
return bufs
def fetch_n_print(cmd, bufs):
status, num_cols = cmd.ct_res_info(CS_NUMDATA)
if status != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_res_info failed')
# Fetch the bound data into host variables
while 1:
status, rows_read = cmd.ct_fetch()
if status not in (CS_SUCCEED, CS_ROW_FAIL):
break
if status == CS_ROW_FAIL:
print 'ct_fetch returned row fail'
continue
for i in range(num_cols):
print ' %s \t' % bufs[i][0],
print
if status != CS_END_DATA:
raise CTError(cmd.conn, 'ct_fetch failed')
def handle_returns(cmd):
# Process all returned result types
while 1:
status, result = cmd.ct_results()
if status != CS_SUCCEED:
break
if result == CS_ROW_RESULT:
print 'TYPE: ROW RESULT'
bufs = bind_columns(cmd)
fetch_n_print(cmd, bufs)
elif result == CS_CMD_SUCCEED:
print 'TYPE: CMD SUCCEEDED'
elif result == CS_CMD_DONE:
print 'TYPE : CMD DONE'
elif result == CS_CMD_FAIL:
raise CTError(cmd.conn, 'ct_results: CS_CMD_FAIL')
elif result == CS_CURSOR_RESULT:
print 'TYPE: CURSOR RESULT'
bufs = bind_columns(cmd)
fetch_n_print(cmd, bufs)
else:
sys.stderr.write('unknown results\n')
return
if status != CS_END_RESULTS:
raise CTError(cmd.conn, 'ct_results failed')
def open_cursor(cmd):
sql = 'select * from pubs2.dbo.publishers where state = @state'
# This cursor will retrieve the records of all publishers for a
# given state. This value is to be input by the user.
prompt = 'Retrieve records of publishers from which state: [CA/MA] ?'
inp_state = raw_input(prompt)
# Declare and open the cursor.
if cmd.ct_cursor(CS_CURSOR_DECLARE,
'browse_cursor', sql, CS_READ_ONLY) != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_cursor failed')
# Declare the input parameter for the cursor.
fmt = CS_DATAFMT()
fmt.name = '@state'
fmt.datatype = CS_CHAR_TYPE
fmt.status = CS_INPUTVALUE
fmt.maxlength = CS_UNUSED
if cmd.ct_param(fmt) != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_param failed')
if cmd.ct_cursor(CS_CURSOR_OPEN) != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_cursor failed')
# Define the input parameter.
buf2 = DataBuf(inp_state)
buf2.name = '@state'
if cmd.ct_param(buf2) != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_param failed')
if cmd.ct_send() != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_send failed')
handle_returns(cmd)
if cmd.ct_cursor(CS_CURSOR_CLOSE, CS_DEALLOC) != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_cursor failed')
if cmd.ct_send() != CS_SUCCEED:
raise CTError(cmd.conn, 'ct_send failed')
handle_returns(cmd)
def cleanup_db(ctx, status):
if status != CS_SUCCEED:
exit_type = CS_FORCE_EXIT
else:
exit_type = CS_UNUSED
# close and cleanup connection to the server
if ctx.ct_exit(exit_type) != CS_SUCCEED:
raise CSError(ctx, 'ct_exit failed')
# drop the context
if ctx.cs_ctx_drop() != CS_SUCCEED:
raise CSError(ctx, 'cs_ctx_drop failed')
# Allocate a context and initialize client-library
ctx = init_db()
# Establish a connection to the server
conn = connect_db(ctx, EX_USERNAME, EX_PASSWORD)
# Allocate a command structure
status, cmd = conn.ct_cmd_alloc()
if status != CS_SUCCEED:
raise CTError(conn, 'ct_cmd_alloc failed')
# Perform cursor operations
open_cursor(cmd)
# Drop the command structure
if cmd.ct_cmd_drop() != CS_SUCCEED:
raise CTError(conn, 'ct_cmd_drop failed')
# Close connection to the server
status = conn.ct_close()
# Drop the context and do general cleanup
cleanup_db(ctx, status)
print '\n End of program run!'
|
bsd-3-clause
| -8,926,185,777,761,423,000 | 33.077778 | 73 | 0.623574 | false |
d120/kifplan
|
neuigkeiten/migrations/0001_initial.py
|
1
|
1030
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-03 12:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Beitrag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250, verbose_name='Titel')),
('author', models.CharField(max_length=250, verbose_name='Autor')),
('content_text', models.TextField(verbose_name='Inhalt')),
('published_date', models.DateTimeField(auto_now_add=True, verbose_name='Veröffentlichungsdatum')),
],
options={
'ordering': ['published_date'],
'verbose_name': 'Beitrag',
'verbose_name_plural': 'Beiträge',
},
),
]
|
agpl-3.0
| -8,535,844,243,372,026,000 | 32.16129 | 115 | 0.560311 | false |
nickaigi/django-pesapal
|
pesapal/pesapal.py
|
1
|
5477
|
#!/usr/bin/env python
import xml.etree.cElementTree as etree
from cgi import escape
import oauth
from oauth import OAuthConsumer, OAuthRequest
SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1()
class InvalidOption(Exception):
pass
class PesaPal(object):
def __init__(self, consumer_key, consumer_secret, testing=True):
self.oauth_consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
self.base_url = 'https://www.pesapal.com/api/'
if testing:
self.base_url = 'http://demo2.pesapal.com/api/'
# method to validate passed options
def validateOptions(self, options, default_options):
for k, v in options.iteritems():
if k not in default_options:
msg = 'Option %s not found in %s' % (k, default_options.keys())
raise InvalidOption(msg)
# method to build and return oauth request
def getOauthRequest(self, http_url, params, default_params):
self.validateOptions(params, default_params)
default_params.update(params)
params = default_params
http_method='GET'
token = params.pop('token', None)
url = self.base_url + http_url
request = OAuthRequest.from_consumer_and_token(
self.oauth_consumer,
http_url= url,
http_method=http_method,
parameters=params
)
request.sign_request(SIGNATURE_METHOD, self.oauth_consumer, token)
return request
def postDirectOrder(self, params, request_data):
"""
PostPesapalDirectOrderV4
---
Use this to post a transaction to PesaPal. PesaPal will present the user with a page which contains the available payment options and will redirect to your site once the user has completed the payment process.
"""
default_request_data = {
'Amount': '',
'Description': '',
'Type': 'MERCHANT',
'Reference': '',
'Email': '',
'PhoneNumber': '',
# optional
'Currency': '',
'FirstName': '',
'LastName': '',
'LineItems': [
# {
# 'UniqueId': '',
# 'Particulars': '',
# 'Quantity': '',
# 'UnitCost': '',
# 'SubTotal': ''
# }
]
}
# validate xml data
self.validateOptions(request_data, default_request_data)
default_request_data.update(request_data)
request_data = default_request_data
root_xml = etree.Element('PesapalDirectOrderInfo')
root_xml.attrib['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
root_xml.attrib['xmlns:xsd'] = 'http://www.w3.org/2001/XMLSchema'
root_xml.attrib['xmlns'] = 'http://www.pesapal.com'
# populate line items
line_items = request_data.pop('LineItems')
if len(line_items) > 0:
line_items_xml = etree.SubElement(root_xml, 'LineItems')
for item in line_items:
item_xml = etree.SubElement(line_items_xml, 'LineItem')
item_xml.attrib.update(item)
# populate info
root_xml.attrib.update(request_data)
# pesapal_request_data
pesapal_request_data = escape(etree.tostring(root_xml))
default_params = {
'oauth_callback': '',
#'oauth_consumer_key': '',
#'oauth_nonce': '',
#'oauth_signature': '',
#'oauth_signature_method': '',
#'oauth_timestamp': '',
#'oauth_version': '1.0',
'pesapal_request_data': pesapal_request_data
}
http_url = 'PostPesapalDirectOrderV4'
request = self.getOauthRequest(http_url, params, default_params)
return request
def queryPaymentStatus(self, params):
"""
Use this to query the status of the transaction. When a transaction is posted to PesaPal, it may be in a PENDING, COMPLETED or FAILED state. If the transaction is PENDING, the payment may complete or fail at a later stage. Both the unique order id generated by your system and the pesapal tracking id are required as input parameters.
"""
http_url = 'QueryPaymentStatus'
default_params = {
'pesapal_merchant_reference': '',
'pesapal_transaction_tracking_id': ''
}
request = self.getOauthRequest(http_url, params, default_params)
return request
def queryPaymentStatusByMerchantRef(self, params):
"""
Same as QueryPaymentStatus, but only the unique order id genereated by your system is required as the input parameter.
"""
http_url = 'QueryPaymentStatusByMerchantRef'
default_params = {
'pesapal_merchant_reference': ''
}
request = self.getOauthRequest(http_url, params, default_params)
return request
def queryPaymentDetails(self, params):
"""
Same as QueryPaymentStatus, but additional information is returned.
"""
http_url = 'QueryPaymentDetails'
default_params = {
'pesapal_merchant_reference': '',
'pesapal_transaction_tracking_id': ''
}
request = self.getOauthRequest(http_url, params, default_params)
return request
|
mit
| 6,676,665,389,157,907,000 | 31.60119 | 342 | 0.580975 | false |
BROADSoftware/hadeploy
|
lib/hadeploy/plugins/supervisor/code.py
|
1
|
17894
|
# Copyright (C) 2017 BROADSoftware
#
# This file is part of HADeploy
#
# HADeploy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HADeploy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HADeploy. If not, see <http://www.gnu.org/licenses/>.
import logging
import hadeploy.core.misc as misc
import os
from hadeploy.core.plugin import Plugin
from hadeploy.core.templator import Templator
from hadeploy.core.const import SRC,DATA,ACTION_DEPLOY,ACTION_REMOVE,ACTION_START,ACTION_STOP,SCOPE_SUPERVISOR,ACTION_STATUS
from hadeploy.plugins.files.code import lookupSrc
from sets import Set
logger = logging.getLogger("hadeploy.plugins.supervisor")
ENABLED="enabled"
STATE="state"
ST_STARTED="started"
ST_STOPPED="stopped"
ST_CURRENT="current"
#ACTION_ON_NOTIFY="action_on_notify"
#AON_RESTART="restart"
#AON_RELOAD="reload"
#AON_NONE="none"
#validAon = Set([AON_NONE, AON_RELOAD, AON_RESTART])
validState= Set([ST_STARTED, ST_STOPPED, ST_CURRENT])
NAME="name"
NO_REMOVE="no_remove"
SCOPE="scope"
SCOPE_BY_NAME="scopeByName"
_SCOPE_="_scope_"
SUPERVISORS="supervisors"
USER="user"
GROUP="group"
MANAGED="managed"
CONF_FILE_SRC="conf_file_src"
CONF_FILE_DST="conf_file_dst"
LOGS_DIR="logs_dir"
PID_DIR="pid_dir"
SOCKS_DIR="socks_dir"
INCLUDE_DIR="include_dir"
SUPERVISORCTL="supervisorctl"
HTTP_SERVER="http_server"
ENDPOINT="endpoint"
USERNAME="username"
PASSWORD="password"
AUTOSTART="autostart"
# The supervisor.conf file is a double template.
# Rendered a first time during build time (in buildAuxTemplates()), the result is in fact a runtime template, rendered by ansible
# All paths are absolute
CONF_FILE_SRC_JJ2="conf_file_src_jj2"
CONF_FILE_SRC_J2="conf_file_src_j2"
SERVICE_UNIT_JJ2="service_unit_jj2"
SERVICE_UNIT_J2="service_unit_j2"
SUPERVISORCTL_JJ2="supervisorctl_jj2"
SUPERVISORCTL_J2="supervisorctl_j2"
SUPERVISORS_TO_REMOVE="supervisorsToRemove"
SUPERVISORS_TO_MANAGE="supervisorsToManage"
SUPERVISOR_BY_NAME="supervisorByName"
SUPERVISOR="supervisor"
SUPERVISOR_PROGRAMS="supervisor_programs"
COMMAND="command"
SUPERVISOR_OWNER="supervisorOwner"
SUPERVISOR_GROUP="supervisorGroup"
SUPERVISOR_CONF="supervisorConf"
PROGRAMS_TO_REMOVE="programsToRemove"
PROGRAMS_TO_MANAGE="programsToManage"
_NAME_="_name_"
NUMPROCS="numprocs"
SUPERVISOR_GROUPS="supervisor_groups"
PROGRAMS="programs"
GROUPS_TO_REMOVE="groupsToRemove"
GROUPS_TO_MANAGE="groupsToManage"
PROGRAM_BY_NAME="programByName"
#PROGRAMS_JJ2="programs_jj2"
#PROGRAMS_J2="programs_j2"
#PROGRAMS_DEST="programs_dest"
#UNPROGRAMS_JJ2="unprograms_jj2"
#UNPROGRAMS_J2="unprograms_j2"
GROUP_BY_NAME="groupByName"
class SupervisorPlugin(Plugin):
def __init__(self, name, path, context):
Plugin.__init__(self, name, path, context)
def getGroomingPriority(self):
return 2510
def getSupportedScopes(self):
return [SCOPE_SUPERVISOR]
def getSupportedActions(self):
return [ACTION_DEPLOY, ACTION_REMOVE,ACTION_START,ACTION_STOP,ACTION_STATUS]
def getPriority(self, action):
if action == ACTION_DEPLOY:
return 7000
elif action == ACTION_REMOVE:
return 1600
elif action == ACTION_START:
return 6000
elif action == ACTION_STOP:
return 4000
elif action == ACTION_STATUS:
return 5000
else:
misc.ERROR("Plugin 'supervisor' called with invalid action: '{0}'".format(action))
def onGrooming(self):
misc.ensureObjectInMaps(self.context.model[DATA], [SUPERVISORS, SCOPE_BY_NAME], {})
misc.applyWhenOnList(self.context.model[SRC], SUPERVISORS)
misc.applyWhenOnList(self.context.model[SRC], SUPERVISOR_PROGRAMS)
misc.applyWhenOnList(self.context.model[SRC], SUPERVISOR_GROUPS)
self.groomSupervisors()
self.groomPrograms()
self.groomGroups()
def groomSupervisors(self):
if self.context.toExclude(SCOPE_SUPERVISOR):
return
model = self.context.model
if SUPERVISORS in model[SRC]:
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS], [SUPERVISOR_BY_NAME], {})
for supervisord in model[SRC][SUPERVISORS]:
if supervisord[NAME] in self.context.model[DATA][SUPERVISORS][SUPERVISOR_BY_NAME]:
misc.ERROR("supervisor '{0}' is defined twice!".format(supervisord[NAME]))
self.context.model[DATA][SUPERVISORS][SUPERVISOR_BY_NAME][supervisord[NAME]] = supervisord
misc.setDefaultInMap(supervisord, MANAGED, True)
self.groomOneSupervisord(model, supervisord)
# ---------------------- Insert in scope
if not self.context.checkScope(supervisord[SCOPE]):
misc.ERROR("Supervisor {0}: scope attribute '{1}' does not match any host or host_group!".format(supervisord[NAME], supervisord[SCOPE]))
else:
#misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [supervisord[SCOPE], SUPERVISORS], [])
#misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [supervisord[SCOPE], PROGRAMS], [])
#model[DATA][SUPERVISORS][SCOPE_BY_NAME][supervisord[SCOPE]][SUPERVISORS].append(supervisord)
if supervisord[MANAGED]:
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [supervisord[SCOPE], SUPERVISORS_TO_MANAGE], [])
model[DATA][SUPERVISORS][SCOPE_BY_NAME][supervisord[SCOPE]][SUPERVISORS_TO_MANAGE].append(supervisord)
if not supervisord[NO_REMOVE]:
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [supervisord[SCOPE], SUPERVISORS_TO_REMOVE], [])
model[DATA][SUPERVISORS][SCOPE_BY_NAME][supervisord[SCOPE]][SUPERVISORS_TO_REMOVE].append(supervisord)
def groomOneSupervisord(self, model, supervisord):
misc.setDefaultInMap(supervisord, CONF_FILE_DST, "/etc/supervisord_{0}.conf".format(supervisord[NAME]))
misc.setDefaultInMap(supervisord, LOGS_DIR, "/var/log/supervisor_{0}".format(supervisord[NAME]))
misc.setDefaultInMap(supervisord, PID_DIR, "/var/run/supervisor_{0}".format(supervisord[NAME]))
misc.setDefaultInMap(supervisord, SOCKS_DIR, "/var/run/supervisor_{0}".format(supervisord[NAME]))
misc.setDefaultInMap(supervisord, INCLUDE_DIR, "/etc/supervisord_{0}.d".format(supervisord[NAME]))
misc.setDefaultInMap(supervisord, SUPERVISORCTL, "/usr/bin/supervisorctl_{0}".format(supervisord[NAME]))
misc.setDefaultInMap(supervisord, NO_REMOVE, False)
misc.setDefaultInMap(supervisord, ENABLED, True)
misc.setDefaultInMap(supervisord, STATE, ST_STARTED)
misc.setDefaultInMap(supervisord, MANAGED, True)
supervisord[PROGRAM_BY_NAME] = {}
supervisord[GROUP_BY_NAME] = {}
if supervisord[STATE] not in validState:
misc.ERROR("Supervisor {0}: state value '{1}' is not valid. Must be one of {2}".format(supervisord[NAME], supervisord[STATE], validState))
#supervisord[PROGRAMS_JJ2] = os.path.join(self.path, "templates/programs.ini.jj2")
#supervisord[PROGRAMS_J2] = "supervisor_{}_programs.ini.j2".format(supervisord[NAME])
#supervisord[UNPROGRAMS_JJ2] = os.path.join(self.path, "templates/unprograms.ini.jj2")
#supervisord[UNPROGRAMS_J2] = "supervisor_{}_unprograms.ini.j2".format(supervisord[NAME])
#supervisord[PROGRAMS_DEST] = os.path.join(supervisord[INCLUDE_DIR], "_hadeploy_.ini")
if supervisord[MANAGED]:
if HTTP_SERVER in supervisord:
misc.setDefaultInMap(supervisord[HTTP_SERVER], ENDPOINT, "127.0.0.1:9001")
if PASSWORD in supervisord[HTTP_SERVER]:
misc.setDefaultInMap(supervisord[HTTP_SERVER], USERNAME, supervisord[USER])
if CONF_FILE_SRC in supervisord:
path, _, errMsg = lookupSrc(model, supervisord[CONF_FILE_SRC])
if path != None:
supervisord[CONF_FILE_SRC_JJ2] = path
else:
misc.ERROR("Supervisor '{0}': {1}".format(supervisord[NAME], errMsg))
else:
supervisord[CONF_FILE_SRC_JJ2] = os.path.join(self.path, "templates/supervisord.conf.jj2")
supervisord[CONF_FILE_SRC_J2] = "supervisord_{0}.conf.j2".format(supervisord[NAME])
supervisord[SERVICE_UNIT_JJ2] = os.path.join(self.path, "templates/supervisord.service.jj2")
supervisord[SERVICE_UNIT_J2] = "supervisord_{0}.service.j2".format(supervisord[NAME])
supervisord[SUPERVISORCTL_JJ2] = os.path.join(self.path, "templates/supervisorctl.jj2")
supervisord[SUPERVISORCTL_J2] = "supervisorctl_{0}".format(supervisord[NAME])
def groomPrograms(self):
if self.context.toExclude(SCOPE_SUPERVISOR):
return
model = self.context.model
if SUPERVISOR_PROGRAMS in model[SRC]:
for prg in model[SRC][SUPERVISOR_PROGRAMS]:
if not SUPERVISOR_BY_NAME in model[DATA][SUPERVISORS] or not prg[SUPERVISOR] in model[DATA][SUPERVISORS][SUPERVISOR_BY_NAME]:
misc.ERROR("supervisor_program '{}' refer to an undefined supervisor '{}'".format(prg[NAME], prg[SUPERVISOR]))
else:
supervisord = model[DATA][SUPERVISORS][SUPERVISOR_BY_NAME][prg[SUPERVISOR]]
if prg[NAME] in supervisord[PROGRAM_BY_NAME]:
misc.ERROR("supervisor_program '{}' is defined twice in supervisor '{}'".format(prg[NAME], supervisord[NAME]))
# Register in model.data
supervisord[PROGRAM_BY_NAME][prg[NAME]] = prg
#model[DATA][SUPERVISORS][SCOPE_BY_NAME][supervisord[SCOPE]][PROGRAMS].append(prg)
# Adjust attributes
misc.setDefaultInMap(prg, NO_REMOVE, False)
if prg[NO_REMOVE] and not supervisord[NO_REMOVE]:
misc.ERROR("Supervisor_program '{}' has no remove flag set while its supervisor ({}) has not!".format(prg[NAME], supervisord[NAME]))
if CONF_FILE_SRC in prg:
path, _, errMsg = lookupSrc(model, prg[CONF_FILE_SRC])
if path != None:
prg[CONF_FILE_SRC_JJ2] = path
else:
misc.ERROR("Supervisor_program '{0}': {1}".format(prg[NAME], errMsg))
else:
prg[CONF_FILE_SRC_JJ2] = os.path.join(self.path, "templates/program.conf.jj2")
if COMMAND not in prg:
misc.ERROR("Supervisor_program '{}': A 'command' parameter must be provided if using the default configuration file (No 'conf_file_src' parameter".format(prg[NAME]))
prg[CONF_FILE_SRC_J2] = "supervisor_{}_program_{}.conf".format(supervisord[NAME], prg[NAME])
prg[CONF_FILE_DST] = os.path.join(supervisord[INCLUDE_DIR], "{}_prg.ini".format(prg[NAME]))
prg[SUPERVISOR_OWNER] = supervisord[USER]
prg[SUPERVISOR_GROUP] = supervisord[GROUP]
prg[SUPERVISOR_CONF] = supervisord[CONF_FILE_DST]
misc.setDefaultInMap(prg, STATE, ST_STARTED)
if NUMPROCS in prg and prg[NUMPROCS] > 1:
prg[_NAME_] = prg[NAME] + ":" # This is in fact a group of process
else:
prg[_NAME_] = prg[NAME]
if prg[STATE] not in validState:
misc.ERROR("Supervisor_program {0}: state value '{1}' is not valid. Must be one of {2}".format(prg[NAME], prg[STATE], validState))
misc.setDefaultInMap(prg, AUTOSTART, prg[STATE] == ST_STARTED)
if SCOPE not in prg:
prg[SCOPE] = supervisord[SCOPE]
# Note we don't set prg[USER], as we want to be unset in config file if not set
# ---------------------- Insert in scope
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [prg[SCOPE], PROGRAMS_TO_MANAGE], [])
model[DATA][SUPERVISORS][SCOPE_BY_NAME][prg[SCOPE]][PROGRAMS_TO_MANAGE].append(prg)
if not prg[NO_REMOVE]:
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [prg[SCOPE], PROGRAMS_TO_REMOVE], [])
model[DATA][SUPERVISORS][SCOPE_BY_NAME][prg[SCOPE]][PROGRAMS_TO_REMOVE].append(prg)
def groomGroups(self):
if self.context.toExclude(SCOPE_SUPERVISOR):
return
model = self.context.model
if SUPERVISOR_GROUPS in model[SRC]:
for grp in model[SRC][SUPERVISOR_GROUPS]:
if not SUPERVISOR_BY_NAME in model[DATA][SUPERVISORS] or not grp[SUPERVISOR] in model[DATA][SUPERVISORS][SUPERVISOR_BY_NAME]:
misc.ERROR("supervisor_group '{}' refer to an undefined supervisor '{}'".format(grp[NAME], grp[SUPERVISOR]))
else:
supervisord = model[DATA][SUPERVISORS][SUPERVISOR_BY_NAME][grp[SUPERVISOR]]
if grp[NAME] in supervisord[GROUP_BY_NAME]:
misc.ERROR("supervisor_group '{}' is defined twice in supervisor '{}'".format(grp[NAME], supervisord[NAME]))
supervisord[GROUP_BY_NAME][grp[NAME]] = grp
for prgName in grp[PROGRAMS]:
if prgName not in supervisord[PROGRAM_BY_NAME]:
misc.ERROR("supervisor_group '{}' refer to an undefined program '{}'".format(grp[NAME], prgName))
else:
prg = supervisord[PROGRAM_BY_NAME][prgName]
# The program name must be patched:
prg[_NAME_] = grp[NAME] + ":" + supervisord[PROGRAM_BY_NAME][prgName][_NAME_]
if _SCOPE_ in grp:
if grp[_SCOPE_] != prg[SCOPE]:
misc.ERROR("supervisor_group '{}' host programs with different scope ({} != {}). Must be same".format(grp[NAME], grp[_SCOPE_], prg[SCOPE]))
else:
grp[_SCOPE_] = prg[SCOPE]
misc.setDefaultInMap(grp, NO_REMOVE, False)
if grp[NO_REMOVE] and not supervisord[NO_REMOVE]:
misc.ERROR("Supervisor_group '{}' has no remove flag set while its supervisor ({}) has not!".format(grp[NAME], supervisord[NAME]))
grp[CONF_FILE_SRC_JJ2] = os.path.join(self.path, "templates/group.conf.jj2")
grp[CONF_FILE_SRC_J2] = "supervisor_{}_group_{}.conf".format(supervisord[NAME], grp[NAME])
grp[CONF_FILE_DST] = os.path.join(supervisord[INCLUDE_DIR], "{}_grp.ini".format(grp[NAME]))
grp[SUPERVISOR_OWNER] = supervisord[USER]
grp[SUPERVISOR_GROUP] = supervisord[GROUP]
grp[SUPERVISOR_CONF] = supervisord[CONF_FILE_DST]
grp[_NAME_] = grp[NAME] + ":"
# ---------------------- Insert in scope
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [grp[_SCOPE_], GROUPS_TO_MANAGE], [])
model[DATA][SUPERVISORS][SCOPE_BY_NAME][grp[_SCOPE_]][GROUPS_TO_MANAGE].append(grp)
if not grp[NO_REMOVE]:
misc.ensureObjectInMaps(self.context.model[DATA][SUPERVISORS][SCOPE_BY_NAME], [grp[_SCOPE_], GROUPS_TO_REMOVE], [])
model[DATA][SUPERVISORS][SCOPE_BY_NAME][grp[_SCOPE_]][GROUPS_TO_REMOVE].append(grp)
def buildAuxTemplates(self, action, priority):
if self.context.toExclude(SCOPE_SUPERVISOR):
return
if action != ACTION_DEPLOY:
return
model = self.context.model
if SUPERVISORS in model[SRC]:
for supervisord in model[SRC][SUPERVISORS]:
if supervisord[MANAGED]:
templator = Templator(["/"], supervisord) # All template path are absolute
templator.generate(supervisord[CONF_FILE_SRC_JJ2], os.path.join(self.context.workingFolder, supervisord[CONF_FILE_SRC_J2]))
templator.generate(supervisord[SERVICE_UNIT_JJ2], os.path.join(self.context.workingFolder, supervisord[SERVICE_UNIT_J2]))
templator.generate(supervisord[SUPERVISORCTL_JJ2], os.path.join(self.context.workingFolder, supervisord[SUPERVISORCTL_J2]))
if SUPERVISOR_PROGRAMS in model[SRC]:
for prg in model[SRC][SUPERVISOR_PROGRAMS]:
templator = Templator(["/"], prg) # All template path are absolute
templator.generate(prg[CONF_FILE_SRC_JJ2], os.path.join(self.context.workingFolder, prg[CONF_FILE_SRC_J2]))
if SUPERVISOR_GROUPS in model[SRC]:
for grp in model[SRC][SUPERVISOR_GROUPS]:
templator = Templator(["/"], grp) # All template path are absolute
templator.generate(grp[CONF_FILE_SRC_JJ2], os.path.join(self.context.workingFolder, grp[CONF_FILE_SRC_J2]))
|
gpl-3.0
| -4,927,535,448,563,638,000 | 52.097923 | 189 | 0.625405 | false |
OCA/management-system
|
mgmtsystem_action/models/mgmtsystem_action.py
|
1
|
6187
|
# Copyright (C) 2010 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from datetime import datetime, timedelta
from odoo import _, api, exceptions, fields, models
class MgmtsystemAction(models.Model):
_name = "mgmtsystem.action"
_inherit = ["mail.thread", "mail.activity.mixin"]
_description = "Action"
_order = "priority desc, sequence, id desc"
name = fields.Char("Subject", required=True)
system_id = fields.Many2one("mgmtsystem.system", "System")
company_id = fields.Many2one(
"res.company", "Company", default=lambda self: self.env.company
)
active = fields.Boolean("Active", default=True)
priority = fields.Selection(
[("0", "Low"), ("1", "Normal")], default="0", index=True, string="Priority"
)
sequence = fields.Integer(
"Sequence",
index=True,
default=10,
help="Gives the sequence order when displaying a list of actions.",
)
date_deadline = fields.Date("Deadline")
date_open = fields.Datetime("Opening Date", readonly=True)
date_closed = fields.Datetime("Closed Date", readonly=True)
number_of_days_to_open = fields.Integer(
"# of days to open", compute="_compute_number_of_days_to_open", store=True
)
number_of_days_to_close = fields.Integer(
"# of days to close", compute="_compute_number_of_days_to_close", store=True
)
reference = fields.Char(
"Reference", required=True, readonly=True, default=lambda self: _("New")
)
user_id = fields.Many2one(
"res.users",
"Responsible",
default=lambda self: self._default_owner(),
required=True,
)
description = fields.Html("Description")
type_action = fields.Selection(
[
("immediate", "Immediate Action"),
("correction", "Corrective Action"),
("prevention", "Preventive Action"),
("improvement", "Improvement Opportunity"),
],
"Response Type",
required=True,
)
stage_id = fields.Many2one(
"mgmtsystem.action.stage",
"Stage",
track_visibility="onchange",
index=True,
copy=False,
default=lambda self: self._default_stage(),
group_expand="_stage_groups",
)
tag_ids = fields.Many2many("mgmtsystem.action.tag", string="Tags")
def _default_owner(self):
return self.env.user
def _default_stage(self):
return self.env["mgmtsystem.action.stage"].search(
[("is_starting", "=", True)], limit=1
)
@api.model
def _elapsed_days(self, dt1_text, dt2_text):
res = 0
if dt1_text and dt2_text:
res = (dt1_text - dt2_text).days
return res
@api.depends("date_open", "create_date")
def _compute_number_of_days_to_open(self):
for action in self:
action.number_of_days_to_close_open = action._elapsed_days(
action.create_date, action.date_open
)
@api.depends("date_closed", "create_date")
def _compute_number_of_days_to_close(self):
for action in self:
action.number_of_days_to_close_open = action._elapsed_days(
action.create_date, action.date_closed
)
@api.model
def _stage_groups(self, stages=None, domain=None, order=None):
return self.env["mgmtsystem.action.stage"].search([], order=order)
@api.model_create_multi
def create(self, vals_list):
for one_vals in vals_list:
if one_vals.get("reference", _("New")) == _("New"):
Sequence = self.env["ir.sequence"]
one_vals["reference"] = Sequence.next_by_code("mgmtsystem.action")
actions = super().create(vals_list)
actions.send_mail_for_action()
return actions
@api.constrains("stage_id")
def _check_stage_id(self):
for rec in self:
# Do not allow to bring back actions to draft
if rec.date_open and rec.stage_id.is_starting:
raise exceptions.ValidationError(
_("We cannot bring back the action to draft stage")
)
# If stage is changed, the action is opened
if not rec.date_open and not rec.stage_id.is_starting:
rec.date_open = fields.Datetime.now()
# If stage is ending, set closed date
if not rec.date_closed and rec.stage_id.is_ending:
rec.date_closed = fields.Datetime.now()
def send_mail_for_action(self, force_send=True):
template = self.env.ref("mgmtsystem_action.email_template_new_action_reminder")
for action in self:
template.send_mail(action.id, force_send=force_send)
return True
def get_action_url(self):
"""Return action url to be used in email templates."""
base_url = (
self.env["ir.config_parameter"]
.sudo()
.get_param("web.base.url", default="http://localhost:8069")
)
url = ("{}/web#db={}&id={}&model={}").format(
base_url, self.env.cr.dbname, self.id, self._name
)
return url
@api.model
def process_reminder_queue(self, reminder_days=10):
"""Notify user when we are 10 days close to a deadline."""
cur_date = datetime.now().date() + timedelta(days=reminder_days)
stage_close = self.env.ref("mgmtsystem_action.stage_close")
actions = self.search(
[("stage_id", "!=", stage_close.id), ("date_deadline", "=", cur_date)]
)
if actions:
template = self.env.ref(
"mgmtsystem_action.action_email_template_reminder_action"
)
for action in actions:
template.send_mail(action.id)
return True
return False
@api.model
def _get_stage_open(self):
return self.env.ref("mgmtsystem_action.stage_open")
def case_open(self):
"""Opens case."""
# TODO smk: is this used?
return self.write({"active": True, "stage_id": self._get_stage_open().id})
|
agpl-3.0
| -6,366,316,024,966,657,000 | 35.394118 | 87 | 0.586552 | false |
iwm911/plaso
|
plaso/parsers/selinux.py
|
1
|
6640
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains SELinux log file parser in plaso.
Information updated 16 january 2013.
The parser applies to SELinux 'audit.log' file.
An entry log file example is the following:
type=AVC msg=audit(1105758604.519:420): avc: denied { getattr } for pid=5962
comm="httpd" path="/home/auser/public_html" dev=sdb2 ino=921135
The Parser will extract the 'type' value, the timestamp abd the 'pid'.
In the previous example, the timestamp is '1105758604.519', and it
represents the EPOCH time (seconds since Jan 1, 1970) plus the
milliseconds past current time (epoch: 1105758604, milliseconds: 519).
The number after the timestamp (420 in the example) is a 'serial number'
that can be used to correlate multiple logs generated from the same event.
References
http://selinuxproject.org/page/NB_AL
http://blog.commandlinekungfu.com/2010/08/episode-106-epoch-fail.html
http://www.redhat.com/promo/summit/2010/presentations/
taste_of_training/Summit_2010_SELinux.pdf
"""
import logging
import re
from plaso.lib import errors
from plaso.lib import event
from plaso.lib import lexer
from plaso.lib import text_parser
from plaso.lib import timelib
__author__ = 'Francesco Picasso ([email protected])'
class SELinuxLineEvent(event.TextEvent):
"""Convenience class for a SELinux log line event."""
DATA_TYPE = 'selinux:line'
def __init__(self, timestamp, offset, attributes):
"""Initializes the event object.
Args:
timestamp: The timestamp time value. The timestamp contains the
number of microseconds since Jan 1, 1970 00:00:00 UTC.
offset: The offset of the event.
attributes: A dict that contains the events attributes
"""
super(SELinuxLineEvent, self).__init__(timestamp, attributes)
self.offset = offset
class SELinuxParser(text_parser.SlowLexicalTextParser):
"""Parse SELinux audit log files."""
NAME = 'selinux'
PID_RE = re.compile(r'pid=([0-9]+)[\s]+', re.DOTALL)
tokens = [
# Skipping empty lines, both EOLs are considered here and in other states.
lexer.Token('INITIAL', r'^\r?\n', '', ''),
# FSM entry point ('type=anything msg=audit'), critical to recognize a
# SELinux audit file and used to retrieve the audit type. From there two
# next states are possible: TIME or failure, since TIME state is required.
# An empty type is not accepted and it will cause a failure.
# Examples:
# type=SYSCALL msg=audit(...): ...
# type=UNKNOWN[1323] msg=audit(...): ...
lexer.Token('INITIAL', r'^type=([\w]+(\[[0-9]+\])?)[ \t]+msg=audit',
'ParseType', 'TIMESTAMP'),
lexer.Token('TIMESTAMP', r'\(([0-9]+)\.([0-9]+):([0-9]*)\):',
'ParseTime', 'STRING'),
# Get the log entry description and stay in the same state.
lexer.Token('STRING', r'[ \t]*([^\r\n]+)', 'ParseString', ''),
# Entry parsed. Note that an empty description is managed and it will not
# raise a parsing failure.
lexer.Token('STRING', r'[ \t]*\r?\n', 'ParseMessage', 'INITIAL'),
# The entry is not formatted as expected, so the parsing failed.
lexer.Token('.', '([^\r\n]+)\r?\n', 'ParseFailed', 'INITIAL')
]
def __init__(self, pre_obj, config):
"""Initializes the parser.
Args:
pre_obj: pre-parsing object.
config: configuration object.
"""
# Set local_zone to false, since timestamps are UTC.
super(SELinuxParser, self).__init__(pre_obj, config, False)
self.attributes = {'audit_type': '', 'pid': '', 'body': ''}
self.timestamp = 0
def ParseType(self, match, **_):
"""Parse the audit event type."""
self.attributes['audit_type'] = match.group(1)
def ParseTime(self, match, **_):
"""Parse the log timestamp."""
# TODO: do something with match.group(3) ?
try:
number_of_seconds = int(match.group(1), 10)
timestamp = timelib.Timestamp.FromPosixTime(number_of_seconds)
timestamp += int(match.group(2), 10) * 1000
self.timestamp = timestamp
except ValueError as exception:
logging.error(
u'Unable to retrieve timestamp with error: {0:s}'.format(exception))
self.timestamp = 0
raise lexer.ParseError(u'Not a valid timestamp.')
def ParseString(self, match, **unused_kwargs):
"""Add a string to the body attribute.
This method extends the one from TextParser slightly,
searching for the 'pid=[0-9]+' value inside the message body.
"""
try:
self.attributes['body'] += match.group(1)
# TODO: fix it using lexer or remove pid parsing.
# Indeed this is something that lexer is able to manage, but 'pid' field
# is non positional: so, by doing the following step, the FSM is kept
# simpler. Left the 'to do' as a reminder of possible refactoring.
pid_search = self.PID_RE.search(self.attributes['body'])
if pid_search:
self.attributes['pid'] = pid_search.group(1)
except IndexError:
self.attributes['body'] += match.group(0).strip('\n')
def ParseFailed(self, **unused_kwargs):
"""Entry parsing failed callback."""
raise lexer.ParseError(u'Unable to parse SELinux log line.')
def ParseLine(self, zone):
"""Parse a single line from the SELinux audit file.
This method extends the one from TextParser slightly, creating a
SELinux event with the timestamp (UTC) taken from log entries.
Args:
zone: The timezone of the host computer, not used since the
timestamp are UTC.
Returns:
An event object (instance of EventObject) that is constructed
from the selinux entry.
"""
if not self.timestamp:
raise errors.TimestampNotCorrectlyFormed(
u'Unable to parse entry, timestamp not defined.')
offset = getattr(self, 'entry_offset', 0)
event_object = SELinuxLineEvent(self.timestamp, offset, self.attributes)
self.timestamp = 0
return event_object
|
apache-2.0
| 6,007,773,048,467,449,000 | 36.727273 | 79 | 0.674548 | false |
ColumbiaCMB/kid_readout
|
kid_readout/roach/r2_stream_data.py
|
1
|
9823
|
"""
Processing pipeline:
* Capture UDP packets
* (Buffer)
* Decode int16 -> float32, recover packet sequence number
* Demodulate
* (Buffer)
* Filter
* Write to disk
Usage:
* Do sweeps of resonators
* Set tones to resonant frequencies
* Start streaming processing pipeline for as long as desired
"""
import numpy as np
import socket
from contextlib import closing
import multiprocessing as mp
import time
import ctypes
from Queue import Empty as EmptyException
from kid_readout.roach import demodulator
pkt_size = 4100
data_ctype = ctypes.c_uint8
data_dtype = np.uint8
sequence_num_ctype = ctypes.c_uint32
counter_dtype = np.uint32
chns_per_pkt = 1024
samples_per_packet = 1024
class ReadoutPipeline:
def __init__(self, nchans, num_data_buffers=4, num_packets_per_buffer=2 ** 12, output_size=2 ** 20,
host_address=('10.0.0.1',55555)):
packet_buffer_size = pkt_size * num_packets_per_buffer
self.num_data_buffers = num_data_buffers
self.packet_data_buffers = [mp.Array(data_ctype, packet_buffer_size) for b in range(num_data_buffers)]
demodulated_buffer_size = num_packets_per_buffer*samples_per_packet*np.dtype(np.complex64).size
self.demodulated_data_buffers = [mp.Array(ctypes.c_uint8, demodulated_buffer_size) for b in range(num_data_buffers)]
self.real_time_data_buffer = mp.Array(ctypes.c_uint8, output_size*np.dtype(np.complex64).size)
# im not sure we are using this correctly. should there be a lock somewhere?
# or does it not matter because only one process gets it
self._sequence_num_buffer = mp.Array(sequence_num_ctype, output_size)
self.sequence_num = np.frombuffer(self._sequence_num_buffer.get_obj(), dtype=counter_dtype)
self.sequence_num[:] = 0
self.capture_status = mp.Array(ctypes.c_char, 32)
self.demodulate_status = mp.Array(ctypes.c_char, 32)
self._num_bad_packets = mp.Value(ctypes.c_uint)
self.num_bad_packets = self._num_bad_packets.get_obj()
self.num_bad_packets.value = 0
self.packet_input_queue = mp.Queue()
self.packet_output_queue = mp.Queue()
self.demodulated_input_queue = mp.Queue()
self.demodulated_output_queue = mp.Queue()
for i in range(num_data_buffers):
self.packet_input_queue.put(i)
self.demodulated_input_queue.put(i)
self.process_data = DecodePacketsAndDemodulateProcess(packet_data_buffers=self.packet_data_buffers,
num_packets_per_buffer=num_packets_per_buffer,
packet_output_queue=self.packet_output_queue,
packet_input_queue=self.packet_input_queue,
demodulated_data_buffers=self.demodulated_data_buffers,
demodulated_input_queue=self.demodulated_input_queue,
sequence_num_buffer=self._sequence_num_buffer,
output_size=output_size,
nchans=nchans, status = self.demodulate_status)
self.read_data = CapturePacketsProcess(packet_data_buffers=self.packet_data_buffers,
num_packets_per_buffer=num_packets_per_buffer,
packet_input_queue=self.packet_input_queue,
packet_output_queue=self.packet_output_queue,
bad_packets_counter=self._num_bad_packets,
host_address=host_address, status = self.capture_status)
def close(self):
self.packet_input_queue.put(None)
self.packet_output_queue.put(None)
self.read_data.child.join()
self.process_data.child.join()
class DecodePacketsAndDemodulateProcess:
def __init__(self, packet_data_buffers, demodulated_data_buffers, num_packets_per_buffer,
packet_input_queue, packet_output_queue,
demodulated_input_queue, sequence_num_buffer, output_size,
nchans, status):
self.output_index = 0
self.packet_data_buffers = packet_data_buffers
self.demodulated_data_buffers = demodulated_data_buffers
self.num_packets_per_buffer = num_packets_per_buffer
self.packet_input_queue = packet_input_queue
self.packet_output_queue = packet_output_queue
self.demodulated_input_queue = demodulated_input_queue
self.sequence_num_buffer = sequence_num_buffer
self.output_size = output_size
self.nchans = nchans
self.status = status
self.status.value = "not started"
self.child = mp.Process(target=self.run)
self.child.start()
def run(self):
self.demodulator = demodulator.StreamDemodulator(tone_bins=self.tone_bins,phases=self.phases,
tone_nsamp=self.tone_nsamp, fft_bins=self.fft_bins,
nfft=self.nfft, )
while True:
try:
process_me = self.packet_output_queue.get_nowait()
except EmptyException:
self.status.value = "waiting"
time.sleep(0.01)
continue
if process_me is None:
break
else:
self.status.value = "blocked"
output_to = self.demodulated_input_queue.get()
with self.packet_data_buffers[process_me].get_lock(), self.demodulated_data_buffers[output_to].get_lock():
self.status.value = "processing"
packets = np.frombuffer(self.packet_data_buffers[process_me].get_obj(), dtype=data_dtype)
packets.shape=(self.num_packets_per_buffer, pkt_size)
pkt_counter = packets.view(counter_dtype)[:,-1]
# contiguous = np.all(np.diff(pkt_counter)==1)
demod_data = np.frombuffer(self.demodulated_data_buffers[output_to].get_obj(), dtype=np.complex64)
raw_data = packets[:,:-4].view('<i2').astype(np.float32).view(np.complex64)
raw_data = raw_data.reshape((-1,self.nchans))
demod_data = np.transpose(demod_data.reshape((-1,self.nchans,2)),axes=(2,0,1))
# Decode packets
for k in range(self.num_packets_per_buffer):
si = k * chns_per_pkt
sf = (k + 1) * chns_per_pkt
demod_data[si:sf] = packets[k,:-4].view('<i2').astype('float32').view('complex64')
self.sequence_num_buffer[self.output_index] = pkt_counter[k]
self.output_index += 1
self.output_index %= self.output_size
demod_data = demod_data.reshape((-1,self.nchans))
# data = r2.demodulate_stream(data, pkt_counter)
#print "decode ", timeit.default_timer() - t0
self.demodulated_output_queue.put(output_to)
self.packet_input_queue.put(process_me)
self.status.value = "exiting"
return None
class CapturePacketsProcess:
def __init__(self, packet_data_buffers, num_packets_per_buffer, packet_input_queue, packet_output_queue,
bad_packets_counter, host_address,status):
self.packet_data_buffers = packet_data_buffers
self.num_packets_per_buffer = num_packets_per_buffer
self.packet_input_queue = packet_input_queue
self.packet_output_queue = packet_output_queue
self.bad_packets_counter = bad_packets_counter
self.host_address = host_address
self.status = status
self.status.value = "starting"
self.child = mp.Process(target=self.run)
self.child.start()
def run(self):
with closing(socket.socket(socket.AF_INET,socket.SOCK_DGRAM)) as s:
s.bind(self.host_address)
s.settimeout(1)
while True:
try:
process_me = self.packet_input_queue.get_nowait()
except EmptyException:
self.status.value = "blocked"
time.sleep(0.005)
continue
if process_me is None:
break
else:
with self.packet_data_buffers[process_me].get_lock():
self.status.value = "processing"
#t0 = timeit.default_timer()
packet_buffer = np.frombuffer(self.packet_data_buffers[process_me].get_obj(), dtype=data_dtype)
packet_buffer.shape=(self.num_packets_per_buffer, pkt_size)
i = 0
while i < self.num_packets_per_buffer:
pkt = s.recv(5000)
if len(pkt) == pkt_size:
packet_buffer[i,:] = np.frombuffer(pkt, dtype=data_dtype)
i += 1
else:
print "got a bad packet"
self.bad_packets_counter.value += 1
#print "read: ", timeit.default_timer() - t0
self.packet_output_queue.put(process_me)
self.status.value = "exiting"
return None
|
bsd-2-clause
| -6,831,429,427,017,959,000 | 46.225962 | 124 | 0.552275 | false |
qeedquan/misc_utilities
|
math/controls/response_example.py
|
1
|
4147
|
# https://www.wescottdesign.com/articles/pid/pidWithoutAPhd.pdf
from math import *
import cmath
import numpy as np
import matplotlib.pyplot as plt
"""
The motor response is represented by the differential equation
y''(t) = 1/tau(kV - y'(t))
y(t) represents the angle position at time t
We can classify this differential equation as a
2nd order linear non-homogeneous ode
y''(t) + y'(t)/tau = kV/tau
Rewrite it into a standard schema
y''(t) + ay'(t) = c where a and c are constants
a = 1/tau
c = kV/tau
The solution to this ode is
y(t) = k1*exp(-a/t)/a + t*(c/a) + k2
where k1 and k2 are constants that is determined by
the initial condition.
With this equation, we can predict how the motor angle will
change if we had induced it with a step response by plugging in
y(0) and y(1) and solving for the constants k1 and k2
V is the voltage induced
k represents the gain in volts/degrees, it says how fast the motor will turn when voltage changes
tau is in seconds, and represent how fast the motor speed will settle to a constant speed when voltage changes
"""
def solve_motor_ivp(tau, k, V, t0, t1):
a = 1/tau
c = k*V/tau
A = np.array([[exp(-t0/a)/a, 1],
[exp(-t1/a)/a, 1]])
b = np.array([t0 - c*t0/a,
t1 - c*t1/a])
return np.linalg.solve(A, b)
def make_motor_function(tau, k, V, t0, t1):
r = solve_motor_ivp(tau, k, V, t0, t1)
a = 1/tau
c = k*V/tau
return lambda t: r[0]*exp(-t/a)/a + c*t/a + r[1]
def plot_motor_position():
t0 = 0
t1 = 1
xs = np.linspace(t0, t1, 100)
plt.clf()
fig, axes = plt.subplots(3, 1)
for V in range(1, 11):
k = 1
tau = 0.2
f = make_motor_function(tau, k, V, t0, t1)
ys = np.array([f(x) for x in xs])
axes[0].plot(xs, ys, label='V={} k={} tau={}'.format(V, k, tau))
axes[0].legend()
for k in range(1, 11):
V = 9
f = make_motor_function(tau, k, V, t0, t1)
ys = np.array([f(x) for x in xs])
axes[1].plot(xs, ys, label='V={} k={} tau={}'.format(V, k, tau))
axes[1].legend()
for tau in range(0, 100, 10):
if tau == 0:
tau = 1
V = 9
k = 1
f = make_motor_function(tau, k, V, t0, t1)
axes[2].plot(xs, ys, label='V={} k={} tau={}'.format(V, k, tau))
axes[2].legend()
fig.set_size_inches(18.5, 10.5)
fig.savefig('motor_position.png', dpi=100)
"""
The non-contact precision actuator has the response of
y''(t) = k_i/m * i
V(t) = k_t * y(t)
where V(t) is the output transducer
The standard schema for this differential equation is
y''(t) = c where c is a constant
c = k_i/m * i
The general solution is then
y(t) = c*t^2/2 + k1*t + k2
where k1 and k2 are constants determined by initial conditions
k_i is the coil force constant in N/A
k_t is the gain in V/m
m is the total mass of the stage, magnet and the moving portion
i is the current in A
"""
def solve_actuator_ivp(k_i, m, i, t0, t1):
c = k_i/m * i
A = np.array([[t0, 1],
[t1, 1]])
b = np.array([t0 - c*t0*t0/2,
t1 - c*t1*t1/2])
return np.linalg.solve(A, b)
def make_actuator_function(k_t, k_i, m, i, t0, t1):
c = k_i/m * i
r = solve_actuator_ivp(k_i, m, i, t0, t1)
return lambda t: k_t*(c*t*t/2 + r[0]*t + r[1])
def plot_actuator_position():
t0 = 0
t1 = 1
k_g = 1
k_i = 1
m = 1
plt.clf()
fig, axes = plt.subplots(2, 1)
xs = np.linspace(t0, t1, 100)
for i in range(0, 10):
f = make_actuator_function(k_g, k_i, m, i, t0, t1)
ys = np.array([f(x) for x in xs])
axes[0].plot(xs, ys, label='k_g={} k_i={} m={} i={}'.format(k_g, k_i, m, i))
axes[0].legend()
for m in range(1, 10):
i = 0.5
f = make_actuator_function(k_g, k_i, m, i, t0, t1)
ys = np.array([f(x) for x in xs])
axes[1].plot(xs, ys, label='k_g={} k_i={} m={} i={}'.format(k_g, k_i, m, i))
axes[1].legend()
fig.set_size_inches(18.5, 10.5)
fig.savefig('actuator_position.png', dpi=100)
plot_motor_position()
plot_actuator_position()
|
mit
| -4,235,555,004,952,766,000 | 25.935065 | 110 | 0.570292 | false |
slackhq/python-slackclient
|
integration_tests/samples/socket_mode/bolt_adapter/websocket_client.py
|
1
|
1064
|
import os
from time import time
from typing import Optional
from slack_sdk.socket_mode.request import SocketModeRequest
from slack_sdk.socket_mode.websocket_client import SocketModeClient
from slack_bolt import App
from .base_handler import BaseSocketModeHandler
from .internals import run_bolt_app, send_response
from slack_bolt.response import BoltResponse
class SocketModeHandler(BaseSocketModeHandler):
app: App # type: ignore
app_token: str
client: SocketModeClient
def __init__( # type: ignore
self,
app: App, # type: ignore
app_token: Optional[str] = None,
):
self.app = app
self.app_token = app_token or os.environ["SLACK_APP_TOKEN"]
self.client = SocketModeClient(app_token=self.app_token)
self.client.socket_mode_request_listeners.append(self.handle)
def handle(self, client: SocketModeClient, req: SocketModeRequest) -> None:
start = time()
bolt_resp: BoltResponse = run_bolt_app(self.app, req)
send_response(client, req, bolt_resp, start)
|
mit
| -4,845,273,409,917,624,000 | 32.25 | 79 | 0.705827 | false |
Aidan-Bharath/code_and_stuffs
|
polar_3d.py
|
1
|
2209
|
from __future__ import division
from panelCut import *
from matplotlib import cm
from PySide.QtCore import *
from PySide.QtGui import *
import sys
import os
def magnitude(a):
mag = np.sqrt(a['u']**2+a['v']**2+a['w']**2)
return mag
def polar3d(probe,adcp,timestart,timestop):
fig = plt.figure()
ax = fig.gca(projection='3d')
#probe = probe.mean(axis=2)
#probe = probe[::20]
#adcp = adcp.mean(axis=2)
#adcp = adcp[::20]
theta_probe = np.arctan2(probe['v'],probe['u'])
mag_probe = magnitude(probe)
theta_adcp = np.arctan2(adcp['v'],adcp['u'])
mag_adcp = magnitude(adcp)
X = mag_probe*np.cos(theta_probe)
Y = mag_probe*np.sin(theta_probe)
A = mag_adcp*np.cos(theta_adcp)
B = mag_adcp*np.sin(theta_adcp)
Z = np.linspace(0,probe['u'].index[-1].hour - probe['u'].index[0].hour,len(X))
print Z
C = np.linspace(0,probe['u'].index[-1].hour - probe['u'].index[0].hour,len(A))
ax.plot(X,Y,Z,color='blue',label='GP-BPb-FVCOM')
ax.plot(A,B,C,color='red',label='GP-BPb-ADCP')
ax.set_xlabel('Eastern Velocity (m/s)')
ax.set_ylabel('Northern Velocity (m/s)')
ax.set_zlabel('Time (hr)')
ax.set_xlim((-0.25,0.65))
ax.set_ylim((0.5,2))
plt.legend()
plt.title('Depth Averaged Velocity Magnitude between \n '+timestart+' and '+timestop)
plt.show()
if __name__ == "__main__":
"""
app = QApplication(sys.argv)
caption = 'Open Files'
directory = './'
adcp = QFileDialog.getOpenFileNames(None, caption, directory)[0]
adcp = adcp[0]
p1 = QFileDialog.getOpenFileNames(None, caption, directory)[0]
p1 = p1[0]
"""
bpb = '/home/aidan/thesis/probe_data/panels/2013/june_july/GP-130621-24-BPb_sigmas'
ta = '/home/aidan/thesis/probe_data/panels/2013/june_july/GP-130621-BPb-hr'
bpb = pd.read_pickle(bpb)
ta = pd.read_pickle(ta)
bpb = bpb.mean(axis=2)
ta = ta.mean(axis=2)
bpb = bpb.reindex(index=bpb.index.to_datetime())
ta = ta.reindex(index=ta.index.to_datetime())
ta = ta.resample('2T')
time = ['2013-06-21 08:00:00','2013-06-21 12:00:00']
bpb = bpb[time[0]:time[1]]
ta = ta[time[0]:time[1]]
polar3d(bpb,ta,time[0],time[1])
|
mit
| 7,691,573,705,744,837,000 | 29.260274 | 89 | 0.6134 | false |
alephdata/aleph
|
aleph/migrate/versions/5b681871c1ab_introduce_export_model.py
|
1
|
1909
|
"""Introduce Export model
Revision ID: 5b681871c1ab
Revises: 18f53aae83ae
Create Date: 2020-07-28 11:26:26.392701
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "5b681871c1ab"
down_revision = "18f53aae83ae"
def upgrade():
op.create_table(
"export",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("label", sa.Unicode(), nullable=True),
sa.Column("operation", sa.Unicode(), nullable=True),
sa.Column("creator_id", sa.Integer(), nullable=True),
sa.Column("collection_id", sa.Integer(), nullable=True),
sa.Column("expires_at", sa.DateTime(), nullable=True),
sa.Column("deleted", sa.Boolean(), nullable=True),
sa.Column("export_status", sa.Unicode(), nullable=True),
sa.Column("content_hash", sa.Unicode(length=65), nullable=True),
sa.Column("file_size", sa.BigInteger(), nullable=True),
sa.Column("file_name", sa.Unicode(), nullable=True),
sa.Column("mime_type", sa.Unicode(), nullable=True),
sa.Column("meta", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(["collection_id"], ["collection.id"],),
sa.ForeignKeyConstraint(["creator_id"], ["role.id"],),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_export_collection_id"), "export", ["collection_id"], unique=False
)
op.create_index(
op.f("ix_export_content_hash"), "export", ["content_hash"], unique=False
)
def downgrade():
op.drop_index(op.f("ix_export_content_hash"), table_name="export")
op.drop_index(op.f("ix_export_collection_id"), table_name="export")
op.drop_table("export")
|
mit
| 7,512,912,441,888,649,000 | 36.431373 | 82 | 0.643269 | false |
walkover/auto-tracking-cctv-gateway
|
gateway/camera/recognizor.py
|
1
|
1064
|
import cv2
import face_recognition
def recognize_face(frame, faces, tolerance=0.50):
small_frame = cv2.resize(frame, (0, 0), fx=0.25, fy=0.25)
locations = face_recognition.face_locations(small_frame)
encodings = face_recognition.face_encodings(small_frame, locations)
names = []
for encoding in encodings:
matchs = face_recognition.compare_faces(faces, encoding, tolerance)
if matchs[0]:
names.append('So eun')
else:
names.append('Unknown')
__display_recognized_faces(frame, locations, names)
return names
def __display_recognized_faces(frame, locations, names):
for (top, right, bottom, left), name in zip(locations, names):
top *= 4
right *= 4
bottom *= 4
left *= 4
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, name, (left + 6, bottom - 6),
font, 1.0, (255, 255, 255), 1)
cv2.imshow('display', frame)
cv2.waitKey(1)
|
mit
| -4,840,927,029,344,294,000 | 27.756757 | 75 | 0.603383 | false |
kmatzen/nyc3dcars-toolkit
|
query_utils.py
|
1
|
8694
|
"""A set of utility functions for querying the database."""
from sqlalchemy import func, desc
from nyc3dcars import Photo, Model, Vehicle, Detection
import math
import numpy
def overlap(geom_a, geom_b):
"""Computes the overlap between two bounding boxes."""
intersection_score = intersection(geom_a, geom_b)
area1 = (geom_a.x2 - geom_a.x1) * (geom_a.y2 - geom_a.y1)
area2 = (geom_b.y2 - geom_b.y1) * (geom_b.x2 - geom_b.x1)
union_score = area1 + area2 - intersection_score
overlap_score = intersection_score / union_score
return overlap_score
def overlap_asym(geom_a, geom_b):
"""Computes an asymmetric overlap between two bounding boxes."""
intersection_score = intersection(geom_a, geom_b)
area2 = (geom_b.y2 - geom_b.y1) * (geom_b.x2 - geom_b.x1)
overlap_score = intersection_score / area2
return overlap_score
def intersection(geom_a, geom_b):
"""Computes the interesction of two bounding boxes."""
intersection_score = func.greatest(0,
(func.least(geom_a.x2, geom_b.x2) -
func.greatest(geom_a.x1, geom_b.x1))) * \
func.greatest(0,
(func.least(geom_a.y2, geom_b.y2) -
func.greatest(geom_a.y1, geom_b.y1)))
return intersection_score
def union(geom_a, geom_b):
"""Computes the union of two bounding boxes."""
intersection_score = intersection(geom_a, geom_b)
area1 = (geom_a.x2 - geom_a.x1) * (geom_a.y2 - geom_a.y1)
area2 = (geom_b.y2 - geom_b.y1) * (geom_b.x2 - geom_b.x1)
union_score = area1 + area2 - intersection_score
return union_score
def match(labels):
"""Computes matching between detections and annotations."""
true_positive = []
covered = []
selected = []
for label in labels:
if label.did in true_positive:
continue
if label.vid in covered:
continue
true_positive += [label.did]
covered += [label.vid]
selected += [label]
return selected
def get_detections(session, score, query_filters, model):
"""Selects all detections that satisfy query filters."""
# pylint: disable-msg=E1101
detections = session.query(
score.label('score')) \
.join(Photo) \
.join(Model) \
.filter(Model.filename == model) \
.filter(Photo.test == True) \
# pylint: enable-msg=E1101
for query_filter in query_filters:
detections = detections.filter(query_filter)
return detections.all()
def precision_recall_threshold(labels, detections, threshold):
"""Computes precision and recall for particular threshold."""
thresholded_labels = [
label for label in labels if label.score >= threshold]
thresholded_detections = [
detection for detection in detections if detection.score >= threshold]
num_detections = len(thresholded_detections)
selected = match(thresholded_labels)
return len(selected), num_detections, threshold
def get_num_vehicles(session, query_filters):
"""Gets the total number of annotations."""
# pylint: disable-msg=E1101
num_vehicles_query = session.query(
func.count(Vehicle.id)) \
.join(Photo) \
.filter(Photo.test == True) \
# pylint: enable-msg=E1101
for query_filter in query_filters:
num_vehicles_query = num_vehicles_query.filter(query_filter)
num_vehicles, = num_vehicles_query.one()
return num_vehicles
def orientation_sim_threshold(labels, detections, threshold):
"""Computes orientation similarity and recall for particular threshold."""
thresholded_labels = [
label for label in labels if label.score >= threshold]
thresholded_detections = [
detection for detection in detections if detection.score >= threshold]
num_detections = len(thresholded_detections)
selected = match(thresholded_labels)
return sum(s.orientation_similarity for s in selected), len(selected), num_detections
def precision_recall(session, score, detection_filters, vehicle_filters, model):
"""Computes precision-recall curve."""
num_vehicles = get_num_vehicles(session, vehicle_filters)
overlap_score = overlap(Detection, Vehicle)
# pylint: disable-msg=E1101
labels = session.query(
overlap_score.label('overlap'),
Vehicle.id.label('vid'),
Detection.id.label('did'),
score.label('score')) \
.select_from(Detection) \
.join(Photo) \
.join(Vehicle) \
.join(Model) \
.filter(Model.filename == model) \
.filter(Photo.test == True) \
.filter(overlap_score > 0.5)
# pylint: enable-msg=E1101
for query_filter in detection_filters:
labels = labels.filter(query_filter)
for query_filter in vehicle_filters:
labels = labels.filter(query_filter)
labels = labels.order_by(desc(overlap_score)).all()
detections = get_detections(session, score, detection_filters, model)
# pylint: disable-msg=E1101
range_query = session.query(
func.min(Detection.score),
func.max(Detection.score)) \
.join(Photo) \
.join(Model) \
.filter(Model.filename == model) \
.filter(Photo.test == True)
# pylint: enable-msg=E1101
for query_filter in detection_filters:
range_query = range_query.filter(query_filter)
low, high = range_query.one()
model = session.query(Model) \
.filter_by(filename=model) \
.one()
thresholds_linear = [1 - i / 499.0 for i in xrange(500)]
step = (high - low) / 500.0
thresholds_sigmoid = [
1.0 / (1.0 + math.exp(model.a * (step * i + low) + model.b))
for i in xrange(500)
]
thresholds = thresholds_linear + thresholds_sigmoid
thresholds.sort(key=lambda k: -k)
thresholded = [precision_recall_threshold(labels, detections, threshold)
for threshold in thresholds]
return numpy.array([(
float(tp) / num_detections if num_detections > 0 else 1,
float(tp) / num_vehicles if num_vehicles > 0 else 1,
threshold,
) for tp, num_detections, threshold in thresholded])
def orientation_similarity(session, score, detection_filters,
vehicle_filters, model):
"""Computes orientation similarity-recall curve."""
num_vehicles = get_num_vehicles(session, vehicle_filters)
overlap_score = overlap(Detection, Vehicle)
# pylint: disable-msg=E1101
labels = session.query(
overlap_score.label('overlap'),
Vehicle.id.label('vid'),
Detection.id.label('did'),
(-Vehicle.theta / 180 * math.pi + math.pi).label('gt'),
(Detection.world_angle).label('d'),
((1 + func.cos(-Vehicle.theta / 180 * math.pi + math.pi - Detection.world_angle))
/ 2).label('orientation_similarity'),
score.label('score')) \
.select_from(Detection) \
.join(Photo) \
.join(Vehicle) \
.join(Model) \
.filter(Model.filename == model) \
.filter(Photo.test == True) \
.filter(overlap_score > 0.5)
# pylint: enable-msg=E1101
for query_filter in detection_filters:
labels = labels.filter(query_filter)
for query_filter in vehicle_filters:
labels = labels.filter(query_filter)
labels = labels.order_by(desc(overlap_score)).all()
detections = get_detections(session, score, detection_filters, model)
# pylint: disable-msg=E1101
range_query = session.query(
func.min(Detection.score),
func.max(Detection.score)) \
.join(Photo) \
.join(Model) \
.filter(Model.filename == model) \
.filter(Photo.test == True)
# pylint: enable-msg=E1101
for query_filter in detection_filters:
range_query = range_query.filter(query_filter)
low, high = range_query.one()
model = session.query(Model) \
.filter_by(filename=model) \
.one()
thresholds_linear = [1 - i / 499.0 for i in xrange(500)]
step = (high - low) / 500.0
thresholds_sigmoid = [
1.0 / (1.0 + math.exp(model.a * (step * i + low) + model.b))
for i in xrange(500)
]
thresholds = thresholds_linear + thresholds_sigmoid
thresholds.sort(key=lambda k: -k)
thresholded = [orientation_sim_threshold(
labels, detections, threshold) for threshold in thresholds]
return numpy.array([(
aos / num_detections if num_detections > 0 else 1,
float(tp) / num_vehicles if num_vehicles > 0 else 1
) for aos, tp, num_detections in thresholded])
|
bsd-3-clause
| -4,930,403,148,584,195,000 | 29.939502 | 89 | 0.624914 | false |
kalliope-project/kalliope
|
kalliope/neurons/mqtt_publisher/mqtt_publisher.py
|
1
|
6010
|
import logging
import socket
import paho
import paho.mqtt.client as mqtt
from kalliope.core.NeuronModule import NeuronModule
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Mqtt_publisher(NeuronModule):
def __init__(self, **kwargs):
super(Mqtt_publisher, self).__init__(**kwargs)
logger.debug("[mqtt_publisher] neuron called with parameters: %s" % kwargs)
# get parameters
self.broker_ip = kwargs.get('broker_ip', None)
self.port = kwargs.get('port', 1883)
self.topic = kwargs.get('topic', None)
self.payload = kwargs.get('payload', None)
self.qos = kwargs.get('qos', 0)
self.retain = kwargs.get('retain', False)
self.client_id = kwargs.get('client_id', 'kalliope')
self.keepalive = kwargs.get('keepalive', 60)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.ca_cert = kwargs.get('ca_cert', None)
self.certfile = kwargs.get('certfile', None)
self.keyfile = kwargs.get('keyfile', None)
self.protocol = kwargs.get('protocol', 'MQTTv311')
self.tls_insecure = kwargs.get('tls_insecure', False)
if not self._is_parameters_ok():
logger.debug("[mqtt_publisher] One or more invalid parameters, neuron will not be launched")
else:
# string must be converted
self.protocol = self._get_protocol(self.protocol)
self.client = mqtt.Client(client_id=self.broker_ip, protocol=self.protocol)
if self.username is not None and self.password is not None:
logger.debug("[mqtt_publisher] Username and password are set")
self.client.username_pw_set(self.username, self.password)
if self.ca_cert is not None and self.certfile is not None and self.keyfile is not None:
logger.debug("[mqtt_publisher] Active TLS with client certificate authentication")
self.client.tls_set(ca_certs=self.ca_cert,
certfile=self.certfile,
keyfile=self.keyfile)
self.client.tls_insecure_set(self.tls_insecure)
elif self.ca_cert is not None:
logger.debug("[mqtt_publisher] Active TLS with server CA certificate only")
self.client.tls_set(ca_certs=self.ca_cert)
self.client.tls_insecure_set(self.tls_insecure)
try:
self.client.connect(self.broker_ip, port=self.port, keepalive=self.keepalive)
self.client.publish(topic=self.topic, payload=self.payload, qos=int(self.qos), retain=self.retain)
logger.debug("[mqtt_publisher] Message published to topic %s: %s" % (self.topic, self.payload))
self.client.disconnect()
except socket.error:
logger.debug("[mqtt_publisher] Unable to connect to broker %s" % self.broker_ip)
def _is_parameters_ok(self):
if self.broker_ip is None:
print("[mqtt_publisher] ERROR: broker_ip is not set")
return False
if self.port is not None:
if not isinstance(self.port, int):
try:
self.port = int(self.port)
except ValueError:
print("[mqtt_publisher] ERROR: port must be an integer")
return False
if self.topic is None:
print("[mqtt_publisher] ERROR: topic is not set")
return False
if self.payload is None:
print("[mqtt_publisher] ERROR: payload is not set")
return False
if self.qos:
if not isinstance(self.qos, int):
try:
self.qos = int(self.qos)
except ValueError:
print("[mqtt_publisher] ERROR: qos must be an integer")
return False
if self.qos not in [0, 1, 2]:
print("[mqtt_publisher] ERROR: qos must be 0,1 or 2")
return False
if self.keepalive:
if not isinstance(self.keepalive, int):
try:
self.keepalive = int(self.keepalive)
except ValueError:
print("[mqtt_publisher] ERROR: keepalive must be an integer")
return False
if self.username is not None and self.password is None:
print("[mqtt_publisher] ERROR: password must be set when using username")
return False
if self.username is None and self.password is not None:
print("[mqtt_publisher] ERROR: username must be set when using password")
return False
if self.protocol:
if self.protocol not in ["MQTTv31", "MQTTv311"]:
print("[mqtt_publisher] Invalid protocol value, fallback to MQTTv311")
self.protocol = "MQTTv311"
# if the user set a certfile, the key and ca cert must be set to
if self.certfile is not None and self.keyfile is None:
print("[mqtt_publisher] ERROR: keyfile must be set when using certfile")
return False
if self.certfile is None and self.keyfile is not None:
print("[mqtt_publisher] ERROR: certfile must be set when using keyfile")
return False
if self.certfile is not None and self.keyfile is not None:
if self.ca_cert is None:
print("[mqtt_publisher] ERROR: ca_cert must be set when using keyfile and certfile")
return False
return True
def _get_protocol(self, protocol):
"""
Return the right code depending on the given string protocol name
:param protocol: string name of the protocol to use.
:return: integer
"""
if protocol == "MQTTv31":
return paho.mqtt.client.MQTTv31
return paho.mqtt.client.MQTTv311
|
gpl-3.0
| 8,015,126,353,587,675,000 | 41.027972 | 114 | 0.585691 | false |
magopian/tcp
|
tcp/core/migrations/0001_initial.py
|
1
|
2409
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
depends_on = [('provider', '0001_initial')]
def forwards(self, orm):
# Adding model 'Request'
db.create_table('core_request', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('initial_code', self.gf('django.db.models.fields.TextField')()),
('video_link', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('is_valid', self.gf('django.db.models.fields.BooleanField')(default=False)),
('clean_code', self.gf('django.db.models.fields.TextField')(blank=True)),
('message', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('provider', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['provider.Provider'], null=True, blank=True)),
))
db.send_create_signal('core', ['Request'])
def backwards(self, orm):
# Deleting model 'Request'
db.delete_table('core_request')
models = {
'core.request': {
'Meta': {'object_name': 'Request'},
'clean_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initial_code': ('django.db.models.fields.TextField', [], {}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['provider.Provider']", 'null': 'True', 'blank': 'True'}),
'video_link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'provider.provider': {
'Meta': {'object_name': 'Provider'},
'embed_template': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_template': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['core']
|
bsd-3-clause
| -6,178,174,402,755,163,000 | 47.18 | 144 | 0.571191 | false |
django-danceschool/django-danceschool
|
danceschool/payments/payatdoor/views.py
|
1
|
3095
|
from django.http import HttpResponseRedirect, HttpResponseBadRequest
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.views.generic import FormView
from danceschool.core.models import CashPaymentRecord, Invoice
from danceschool.core.helpers import getReturnPage
from .forms import WillPayAtDoorForm, DoorPaymentForm
import logging
# Define logger for this file
logger = logging.getLogger(__name__)
class WillPayAtDoorView(FormView):
form_class=WillPayAtDoorForm
def post(self, request, *args, **kwargs):
logger.info('Received request for at-the-door payment.')
self.request = request
return super().post(request, *args, **kwargs)
def form_valid(self, form):
invoice = form.cleaned_data.get('invoice')
instance = form.cleaned_data.get('instance')
invoice.status = Invoice.PaymentStatus.unpaid
invoice.save()
if getattr(invoice, 'registration', None):
invoice.registration.finalize()
if instance:
return HttpResponseRedirect(instance.successPage.get_absolute_url())
def form_invalid(self, form):
return HttpResponseBadRequest(str(form.errors))
class PayAtDoorView(FormView):
form_class = DoorPaymentForm
def post(self, request, *args, **kwargs):
logger.info('Received request for at-the-door payment.')
self.request = request
return super().post(request, *args, **kwargs)
def form_valid(self, form):
invoice = form.cleaned_data.get('invoice')
amountPaid = form.cleaned_data.get('amountPaid')
subUser = form.cleaned_data.get('submissionUser')
paymentMethod = form.cleaned_data.get('paymentMethod')
payerEmail = form.cleaned_data.get('payerEmail')
receivedBy = form.cleaned_data.get('receivedBy')
if not invoice:
return HttpResponseBadRequest("No invoice")
this_cash_payment = CashPaymentRecord.objects.create(
invoice=invoice, amount=amountPaid,
status=CashPaymentRecord.PaymentStatus.collected,
paymentMethod=paymentMethod,
payerEmail=payerEmail,
submissionUser=subUser, collectedByUser=receivedBy,
)
invoice.processPayment(
amount=amountPaid, fees=0, paidOnline=False, methodName=paymentMethod,
submissionUser=subUser, collectedByUser=receivedBy,
methodTxn='CASHPAYMENT_%s' % this_cash_payment.recordId,
forceFinalize=True,
)
# Send users back to the invoice to confirm the successful payment.
# If none is specified, then return to the registration page.
returnPage = getReturnPage(self.request.session.get('SITE_HISTORY', {}))
if returnPage.get('url'):
return HttpResponseRedirect(returnPage['url'])
return HttpResponseRedirect(reverse('registration'))
def form_invalid(self, form):
logger.error('Invalid request for at-the-door payment.')
return HttpResponseBadRequest(str(form.errors))
|
bsd-3-clause
| -1,268,400,641,313,756,000 | 35.845238 | 82 | 0.684653 | false |
jmwright/cadquery-freecad-module
|
ThirdParty/cqparts_fasteners/utils/selector.py
|
1
|
1920
|
from .evaluator import Evaluator
class Selector(object):
"""
Facilitates the selection and placement of a *fastener's* components
based on an *evaluation*.
Each *selector* instance has an :class:`Evaluator` for reference,
and must have both the methods :meth:`get_components` and
:meth:`get_constraints` overridden.
"""
def __init__(self, evaluator, parent=None):
"""
:param evaluator: evaluator of fastener parts
:type evaluator: :class:`Evaluator`
:param parent: parent object
:type parent: :class:`Fastener <cqparts_fasteners.fasteners.base.Fastener>`
"""
self.evaluator = evaluator
self.parent = parent
self._components = None
self._constraints = None
# ---- Components
def get_components(self):
"""
Return fastener's components
.. note::
Must be overridden by inheriting class.
Read :ref:`cqparts_fasteners.build_cycle` to learn more.
:return: components for the *fastener*
:rtype: :class:`dict` of :class:`Component <cqparts.Component>` instances
"""
return {}
@property
def components(self):
if self._components is None:
self._components = self.get_components()
return self._components
# ---- Constraints
def get_constraints(self):
"""
Return fastener's constraints
.. note::
Must be overridden by inheriting class.
Read :ref:`cqparts_fasteners.build_cycle` to learn more.
:return: list of *constraints*
:rtype: :class:`list` of :class:`Constraint <cqparts.constraint.Constraint>` instances
"""
return []
@property
def constraints(self):
if self._constraints is None:
self._constraints = self.get_constraints()
return self._constraints
|
lgpl-3.0
| -2,852,328,245,811,442,000 | 26.826087 | 94 | 0.605729 | false |
mawimawi/datadjables
|
datadjables/views.py
|
1
|
6674
|
# coding: utf-8
from json import dumps
from django.http import HttpResponse
from django.utils.cache import add_never_cache_headers
from django.utils.safestring import mark_safe
from django.views.generic import TemplateView
from .internals import MetaDataDjable
class DataDjable(TemplateView):
__metaclass__ = MetaDataDjable
def __init__(self, max_rows_per_batch=500):
self.max_rows_per_batch = max_rows_per_batch
def thead(self):
"""Renders the table header as HTML"""
return mark_safe('<th>' + '</th><th>'.join(
[obj.coltitle for obj in self._meta.columns]) + '</th>')
def tfoot(self):
"""Renders the table footer as HTML with filter_labels"""
return mark_safe('<th>' + '</th><th>'.join(
[obj.filter_label for obj in self._meta.columns]) + '</th>')
def js_initial_ordering(self):
"""Returns the initial ordering of the DataDjable in a format
that jQuery-DataTables understands."""
orderarray = []
for idx, strg in enumerate(self.ordering):
if strg.startswith('-'):
order = 'desc'
else:
order = 'asc'
# cleanup strg
strg = strg.split('-')[-1]
strg = strg.split('+')[-1]
colnames = []
for idx, col in enumerate(self._meta.columns):
colnames.append(col.colname)
orderarray.append((colnames.index(strg), order))
return dumps(orderarray)
def js_data_columns(self):
"""Creates the javascript list of objects for initialization of
the jQuery-DataTable"""
return mark_safe(
'[' +
','.join([obj.js_data_column()
for obj in self._meta.columns]) +
']')
def result_data(self, queryset):
"""Returns a list of rows to be shown in the jQuery-DataTable tbody"""
return [self.get_row(obj) for obj in queryset]
def get_row(self, queryset_row):
"""Returns one row to be shown in the jQuery-DataTable tbody"""
return [x.dt_cell_content(queryset_row, view=self)
for x in self._meta.columns]
def js_columnfilter_init(self):
"""Returns the initialization JavaScript list for the
jQuery-DataTable.columnfilter plugin"""
result = [x.js_columnfilter_init() for x in self._meta.columns]
return mark_safe(dumps(result))
def order_queryset(self, queryset, request):
ordering = []
for sortcol in range(len(self._meta.columns)):
colnum = int(request.GET.get('iSortCol_%s' % sortcol, -1))
if colnum > -1 and self._meta.columns[colnum].sortable:
prefix = ''
if request.GET.get('sSortDir_%s' % sortcol) == 'desc':
prefix = '-'
ordering.append(
'%s%s' % (
prefix,
self._meta.columns[colnum].colname
))
return queryset.extra(order_by=ordering)
def filter_by_columns(self, queryset, request):
for colnum in range(len(self._meta.columns)):
expr = request.GET.get('sSearch_%s' % colnum, '').encode('utf-8')
column = self._meta.columns[colnum]
if expr and column.searchable:
queryset = column.filter(expr, queryset=queryset)
return queryset
def filter_fulltext(self, queryset, request):
"""global search over all "fulltext_search_columns" """
expr = request.GET.get('sSearch', '').encode('utf-8')
subqueries = None
if expr:
for column in self._meta.fulltext_search_columns:
q = column.filter(expr, self.base_query())
if q is not None:
if subqueries:
subqueries.append(q)
else:
subqueries = [q]
if subqueries:
subqueryset = subqueries[0]
for q in subqueries[1:]:
subqueryset = subqueryset | q
queryset = queryset & subqueryset
return queryset
def base_query(self, *args, **kwargs):
raise NotImplementedError(
"You need to define a base_query method which returns a QuerySet")
def get(self, request, *args, **kwargs):
# Should we send table data?
if request.GET.get('sEcho') is not None:
return self.ajax_response(request, *args, **kwargs)
else:
return super(DataDjable, self).get(
request, dtobj=self, *args, **kwargs)
def ajax_response(self, request, *args, **kwargs):
# check for weird input
queryset = self.base_query(*args, **kwargs)
echo = request.GET.get('sEcho', '0') # needed by datatables.js
# return max. 'self.max_rows_per_batch' rows
length = min(
int(request.GET.get('iDisplayLength', 50)),
self.max_rows_per_batch)
start = int(request.GET.get('iDisplayStart', 0))
end = start + length
# add additional columns which are not part of the ordinary model
for column in self._meta.columns:
if column.selector:
queryset = queryset.extra(
select={column.colname: column.selector})
# filter by all fulltext_search_columns
queryset = self.filter_fulltext(queryset, request)
# filter by individual columns
queryset = self.filter_by_columns(queryset, request)
# do the sorting
queryset = self.order_queryset(queryset, request)
# how many records are there?
total_records = total_display_records = queryset.count()
# slice it
queryset = queryset[start:end]
# return the finished queryset
jsonString = dumps({'sEcho': echo,
'iTotalRecords': total_records,
'iTotalDisplayRecords': total_display_records,
'aaData': self.result_data(queryset)}
)
response = HttpResponse(jsonString, mimetype="application/javascript")
add_never_cache_headers(response)
return response
class ModelDataDjable(DataDjable):
model = None
def base_query(self, *args, **kwargs):
return self.model.objects.all()
def get(self, request, *args, **kwargs):
if not self.model:
raise AttributeError(
"You need to define a ``model`` class property.")
return super(ModelDataDjable, self).get(request, *args, **kwargs)
|
mit
| -5,034,103,780,631,419,000 | 35.469945 | 78 | 0.568924 | false |
muxiaobai/CourseExercises
|
python/tensorflow/mnist/mnist-softmax.py
|
1
|
1963
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import tensorflow as tf
import input_data
mnist = input_data.read_data_sets("MNIST_data/" , one_hot = True)
tf.set_random_seed(777) # reproducibility
X = tf.placeholder(tf.float32, shape= [None , 784])
Y = tf.placeholder(tf.float32, shape= [None , 10])
W = tf.Variable(tf.random_normal([784 , 10]))
b = tf.Variable(tf.random_normal([10]))
#hypothesis = tf.nn.softmax(tf.matmul(X,W) + b) #第一个成本函数
hypothesis = tf.matmul(X,W) + b #第二个成本函数
# 成本函数
#loss = tf.reduce_mean(- tf.reduce_sum(Y * tf.log(hypothesis),1))
# 优化算法 ---梯度下降算法 gradient descent algorithm
#train = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
# 成本函数 : 利用交叉熵来描述损失函数 define cost/loss & optimizer
#两种不同的最优化 Optimization
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hypothesis, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001)
train =optimizer.minimize(loss)
# 初始化变量
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
# 拟合平面
# parameters
training_epochs = 15
batch_size = 100
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(mnist.train.num_examples / batch_size)
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
c, _ = sess.run([loss, train], feed_dict={
X: batch_xs, Y: batch_ys})
avg_cost += c / total_batch
print('Epoch:', '%04d' % (epoch + 1),
'loss =', '{:.9f}'.format(avg_cost))
# 评估模型
correct_prediction = tf.equal(tf.argmax(hypothesis,1), tf.argmax(Y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print ("Accuracy: ", accuracy.eval(session=sess, feed_dict={X: mnist.test.images, Y: mnist.test.labels}))
# softmax GradientDescentOptimizer(0.1) 0.888 AdamOptimizer(0.001) 0.9028
|
gpl-2.0
| 1,106,916,554,553,806,300 | 32.962963 | 105 | 0.677032 | false |
Gaurang033/Selenium2Library
|
src/Selenium2Library/keywords/_logging.py
|
1
|
1484
|
import os
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
try:
from robot.libraries.BuiltIn import RobotNotRunningError
except ImportError: # Support RF < 2.8.5
RobotNotRunningError = AttributeError
from .keywordgroup import KeywordGroup
class _LoggingKeywords(KeywordGroup):
# Private
def _debug(self, message):
logger.debug(message)
def _get_log_dir(self):
try:
variables = BuiltIn().get_variables()
logfile = variables['${LOG FILE}']
if logfile != 'NONE':
return os.path.dirname(logfile)
return variables['${OUTPUTDIR}']
except RobotNotRunningError:
return os.getcwd()
def _html(self, message):
logger.info(message, True, False)
def _info(self, message):
logger.info(message)
def _log(self, message, level='INFO'):
level = level.upper()
if (level == 'INFO'): self._info(message)
elif (level == 'DEBUG'): self._debug(message)
elif (level == 'WARN'): self._warn(message)
elif (level == 'HTML'): self._html(message)
def _log_list(self, items, what='item'):
msg = ['Altogether %d %s%s.' % (len(items), what, ['s',''][len(items)==1])]
for index, item in enumerate(items):
msg.append('%d: %s' % (index+1, item))
self._info('\n'.join(msg))
return items
def _warn(self, message):
logger.warn(message)
|
apache-2.0
| -4,243,327,034,716,164,600 | 27 | 83 | 0.591644 | false |
vpp-dev/vpp
|
test/test_ip4_vrf_multi_instance.py
|
1
|
19307
|
#!/usr/bin/env python3
"""IP4 VRF Multi-instance Test Case HLD:
**NOTES:**
- higher number of pg-ip4 interfaces causes problems => only 15 pg-ip4 \
interfaces in 5 VRFs are tested
- jumbo packets in configuration with 15 pg-ip4 interfaces leads to \
problems too
**config 1**
- add 15 pg-ip4 interfaces
- configure 5 hosts per pg-ip4 interface
- configure 4 VRFs
- add 3 pg-ip4 interfaces per VRF
**test 1**
- send IP4 packets between all pg-ip4 interfaces in all VRF groups
**verify 1**
- check VRF data by parsing output of ip_fib_dump API command
- all packets received correctly in case of pg-ip4 interfaces in the same
VRF
- no packet received in case of pg-ip4 interfaces not in VRF
- no packet received in case of pg-ip4 interfaces in different VRFs
**config 2**
- reset 2 VRFs
**test 2**
- send IP4 packets between all pg-ip4 interfaces in all VRF groups
**verify 2**
- all packets received correctly in case of pg-ip4 interfaces in the same
VRF
- no packet received in case of pg-ip4 interfaces not in VRF
- no packet received in case of pg-ip4 interfaces in different VRFs
**config 3**
- add 1 of reset VRFs and 1 new VRF
**test 3**
- send IP4 packets between all pg-ip4 interfaces in all VRF groups
**verify 3**
- check VRF data by parsing output of ip_fib_dump API command
- all packets received correctly in case of pg-ip4 interfaces in the same
VRF
- no packet received in case of pg-ip4 interfaces not in VRF
- no packet received in case of pg-ip4 interfaces in different VRFs
**config 4**
- reset all created VRFs
**test 4**
- send IP4 packets between all pg-ip4 interfaces in all VRF groups
**verify 4**
- check VRF data by parsing output of ip_fib_dump API command
- all packets received correctly in case of pg-ip4 interfaces in the same
VRF
- no packet received in case of pg-ip4 interfaces not in VRF
- no packet received in case of pg-ip4 interfaces in different VRFs
"""
import unittest
import random
import socket
import scapy.compat
from scapy.packet import Raw
from scapy.layers.l2 import Ether, ARP
from scapy.layers.inet import IP, UDP
from framework import VppTestCase, VppTestRunner
from util import ppp
from vrf import VRFState
def is_ipv4_misc(p):
""" Is packet one of uninteresting IPv4 broadcasts? """
if p.haslayer(ARP):
return True
return False
class TestIp4VrfMultiInst(VppTestCase):
""" IP4 VRF Multi-instance Test Case """
@classmethod
def setUpClass(cls):
"""
Perform standard class setup (defined by class method setUpClass in
class VppTestCase) before running the test case, set test case related
variables and configure VPP.
"""
super(TestIp4VrfMultiInst, cls).setUpClass()
# Test variables
cls.hosts_per_pg = 5
cls.nr_of_vrfs = 5
cls.pg_ifs_per_vrf = 3
try:
# Create pg interfaces
cls.create_pg_interfaces(
range(cls.nr_of_vrfs * cls.pg_ifs_per_vrf))
# Packet flows mapping pg0 -> pg1, pg2 etc.
cls.flows = dict()
for i in range(len(cls.pg_interfaces)):
multiplicand = i // cls.pg_ifs_per_vrf
pg_list = [
cls.pg_interfaces[multiplicand * cls.pg_ifs_per_vrf + j]
for j in range(cls.pg_ifs_per_vrf)
if (multiplicand * cls.pg_ifs_per_vrf + j) != i]
cls.flows[cls.pg_interfaces[i]] = pg_list
# Packet sizes - jumbo packet (9018 bytes) skipped
cls.pg_if_packet_sizes = [64, 512, 1518]
# Set up all interfaces
for pg_if in cls.pg_interfaces:
pg_if.admin_up()
pg_if.generate_remote_hosts(cls.hosts_per_pg)
# Create list of VRFs
cls.vrf_list = list()
# Create list of reset VRFs
cls.vrf_reset_list = list()
# Create list of pg_interfaces in VRFs
cls.pg_in_vrf = list()
# Create list of pg_interfaces not in VRFs
cls.pg_not_in_vrf = [pg_if for pg_if in cls.pg_interfaces]
# Create mapping of pg_interfaces to VRF IDs
cls.pg_if_by_vrf_id = dict()
for i in range(cls.nr_of_vrfs):
vrf_id = i + 1
pg_list = [
cls.pg_interfaces[i * cls.pg_ifs_per_vrf + j]
for j in range(cls.pg_ifs_per_vrf)]
cls.pg_if_by_vrf_id[vrf_id] = pg_list
except Exception:
super(TestIp4VrfMultiInst, cls).tearDownClass()
raise
@classmethod
def tearDownClass(cls):
super(TestIp4VrfMultiInst, cls).tearDownClass()
def setUp(self):
"""
Clear trace and packet infos before running each test.
"""
super(TestIp4VrfMultiInst, self).setUp()
self.reset_packet_infos()
def tearDown(self):
"""
Show various debug prints after each test.
"""
super(TestIp4VrfMultiInst, self).tearDown()
def show_commands_at_teardown(self):
self.logger.info(self.vapi.ppcli("show ip fib"))
self.logger.info(self.vapi.ppcli("show ip4 neighbors"))
def create_vrf_and_assign_interfaces(self, count, start=1):
"""
Create required number of FIB tables / VRFs, put 3 pg-ip4 interfaces
to every FIB table / VRF.
:param int count: Number of FIB tables / VRFs to be created.
:param int start: Starting number of the FIB table / VRF ID. \
(Default value = 1)
"""
for i in range(count):
vrf_id = i + start
pg_if = self.pg_if_by_vrf_id[vrf_id][0]
dest_addr = pg_if.local_ip4n
dest_addr_len = 24
self.vapi.ip_table_add_del(is_add=1, table={'table_id': vrf_id})
self.logger.info("IPv4 VRF ID %d created" % vrf_id)
if vrf_id not in self.vrf_list:
self.vrf_list.append(vrf_id)
if vrf_id in self.vrf_reset_list:
self.vrf_reset_list.remove(vrf_id)
for j in range(self.pg_ifs_per_vrf):
pg_if = self.pg_if_by_vrf_id[vrf_id][j]
pg_if.set_table_ip4(vrf_id)
self.logger.info("pg-interface %s added to IPv4 VRF ID %d"
% (pg_if.name, vrf_id))
if pg_if not in self.pg_in_vrf:
self.pg_in_vrf.append(pg_if)
if pg_if in self.pg_not_in_vrf:
self.pg_not_in_vrf.remove(pg_if)
pg_if.config_ip4()
pg_if.configure_ipv4_neighbors()
self.logger.debug(self.vapi.ppcli("show ip fib"))
self.logger.debug(self.vapi.ppcli("show ip4 neighbors"))
def reset_vrf_and_remove_from_vrf_list(self, vrf_id):
"""
Reset required FIB table / VRF and remove it from VRF list.
:param int vrf_id: The FIB table / VRF ID to be reset.
"""
self.vapi.ip_table_flush(table={'table_id': vrf_id})
if vrf_id in self.vrf_list:
self.vrf_list.remove(vrf_id)
if vrf_id not in self.vrf_reset_list:
self.vrf_reset_list.append(vrf_id)
for j in range(self.pg_ifs_per_vrf):
pg_if = self.pg_if_by_vrf_id[vrf_id][j]
pg_if.unconfig_ip4()
if pg_if in self.pg_in_vrf:
self.pg_in_vrf.remove(pg_if)
if pg_if not in self.pg_not_in_vrf:
self.pg_not_in_vrf.append(pg_if)
self.logger.info("IPv4 VRF ID %d reset finished" % vrf_id)
self.logger.debug(self.vapi.ppcli("show ip fib"))
self.logger.debug(self.vapi.ppcli("show ip neighbors"))
self.vapi.ip_table_add_del(is_add=0, table={'table_id': vrf_id})
def create_stream(self, src_if, packet_sizes):
"""
Create input packet stream for defined interface using hosts list.
:param object src_if: Interface to create packet stream for.
:param list packet_sizes: List of required packet sizes.
:return: Stream of packets.
"""
pkts = []
src_hosts = src_if.remote_hosts
for dst_if in self.flows[src_if]:
for dst_host in dst_if.remote_hosts:
src_host = random.choice(src_hosts)
pkt_info = self.create_packet_info(src_if, dst_if)
payload = self.info_to_payload(pkt_info)
p = (Ether(dst=src_if.local_mac, src=src_host.mac) /
IP(src=src_host.ip4, dst=dst_host.ip4) /
UDP(sport=1234, dport=1234) /
Raw(payload))
pkt_info.data = p.copy()
size = random.choice(packet_sizes)
self.extend_packet(p, size)
pkts.append(p)
self.logger.debug("Input stream created for port %s. Length: %u pkt(s)"
% (src_if.name, len(pkts)))
return pkts
def create_stream_crosswise_vrf(self, src_if, vrf_id, packet_sizes):
"""
Create input packet stream for negative test for leaking across
different VRFs for defined interface using hosts list.
:param object src_if: Interface to create packet stream for.
:param int vrf_id: The FIB table / VRF ID where src_if is assigned.
:param list packet_sizes: List of required packet sizes.
:return: Stream of packets.
"""
pkts = []
src_hosts = src_if.remote_hosts
vrf_lst = list(self.vrf_list)
vrf_lst.remove(vrf_id)
for vrf in vrf_lst:
for dst_if in self.pg_if_by_vrf_id[vrf]:
for dst_host in dst_if.remote_hosts:
src_host = random.choice(src_hosts)
pkt_info = self.create_packet_info(src_if, dst_if)
payload = self.info_to_payload(pkt_info)
p = (Ether(dst=src_if.local_mac, src=src_host.mac) /
IP(src=src_host.ip4, dst=dst_host.ip4) /
UDP(sport=1234, dport=1234) /
Raw(payload))
pkt_info.data = p.copy()
size = random.choice(packet_sizes)
self.extend_packet(p, size)
pkts.append(p)
self.logger.debug("Input stream created for port %s. Length: %u pkt(s)"
% (src_if.name, len(pkts)))
return pkts
def verify_capture(self, pg_if, capture):
"""
Verify captured input packet stream for defined interface.
:param object pg_if: Interface to verify captured packet stream for.
:param list capture: Captured packet stream.
"""
last_info = dict()
for i in self.pg_interfaces:
last_info[i.sw_if_index] = None
dst_sw_if_index = pg_if.sw_if_index
for packet in capture:
try:
ip = packet[IP]
udp = packet[UDP]
payload_info = self.payload_to_info(packet[Raw])
packet_index = payload_info.index
self.assertEqual(payload_info.dst, dst_sw_if_index)
self.logger.debug("Got packet on port %s: src=%u (id=%u)" %
(pg_if.name, payload_info.src, packet_index))
next_info = self.get_next_packet_info_for_interface2(
payload_info.src, dst_sw_if_index,
last_info[payload_info.src])
last_info[payload_info.src] = next_info
self.assertIsNotNone(next_info)
self.assertEqual(packet_index, next_info.index)
saved_packet = next_info.data
# Check standard fields
self.assertEqual(ip.src, saved_packet[IP].src)
self.assertEqual(ip.dst, saved_packet[IP].dst)
self.assertEqual(udp.sport, saved_packet[UDP].sport)
self.assertEqual(udp.dport, saved_packet[UDP].dport)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
for i in self.pg_interfaces:
remaining_packet = self.get_next_packet_info_for_interface2(
i, dst_sw_if_index, last_info[i.sw_if_index])
self.assertIsNone(
remaining_packet,
"Port %u: Packet expected from source %u didn't arrive" %
(dst_sw_if_index, i.sw_if_index))
def verify_vrf(self, vrf_id):
"""
Check if the FIB table / VRF ID is configured.
:param int vrf_id: The FIB table / VRF ID to be verified.
:return: 1 if the FIB table / VRF ID is configured, otherwise return 0.
"""
ip_fib_dump = self.vapi.ip_route_dump(vrf_id)
vrf_exist = len(ip_fib_dump)
vrf_count = 0
for ip_fib_details in ip_fib_dump:
addr = ip_fib_details.route.prefix.network_address
found = False
for pg_if in self.pg_if_by_vrf_id[vrf_id]:
if found:
break
for host in pg_if.remote_hosts:
if str(addr) == host.ip4:
vrf_count += 1
found = True
break
if not vrf_exist and vrf_count == 0:
self.logger.info("IPv4 VRF ID %d is not configured" % vrf_id)
return VRFState.not_configured
elif vrf_exist and vrf_count == 0:
self.logger.info("IPv4 VRF ID %d has been reset" % vrf_id)
return VRFState.reset
else:
self.logger.info("IPv4 VRF ID %d is configured" % vrf_id)
return VRFState.configured
def run_verify_test(self):
"""
Create packet streams for all configured pg interfaces, send all \
prepared packet streams and verify that:
- all packets received correctly on all pg-ip4 interfaces assigned
to VRFs
- no packet received on all pg-ip4 interfaces not assigned to VRFs
:raise RuntimeError: If no packet captured on pg-ip4 interface assigned
to VRF or if any packet is captured on pg-ip4 interface not
assigned to VRF.
"""
# Test
# Create incoming packet streams for packet-generator interfaces
for pg_if in self.pg_interfaces:
pkts = self.create_stream(pg_if, self.pg_if_packet_sizes)
pg_if.add_stream(pkts)
# Enable packet capture and start packet sending
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# Verify
# Verify outgoing packet streams per packet-generator interface
for pg_if in self.pg_interfaces:
if pg_if in self.pg_in_vrf:
capture = pg_if.get_capture(remark="interface is in VRF")
self.verify_capture(pg_if, capture)
elif pg_if in self.pg_not_in_vrf:
pg_if.assert_nothing_captured(remark="interface is not in VRF",
filter_out_fn=is_ipv4_misc)
self.logger.debug("No capture for interface %s" % pg_if.name)
else:
raise Exception("Unknown interface: %s" % pg_if.name)
def run_crosswise_vrf_test(self):
"""
Create packet streams for every pg-ip4 interface in VRF towards all
pg-ip4 interfaces in other VRFs, send all prepared packet streams and \
verify that:
- no packet received on all configured pg-ip4 interfaces
:raise RuntimeError: If any packet is captured on any pg-ip4 interface.
"""
# Test
# Create incoming packet streams for packet-generator interfaces
for vrf_id in self.vrf_list:
for pg_if in self.pg_if_by_vrf_id[vrf_id]:
pkts = self.create_stream_crosswise_vrf(
pg_if, vrf_id, self.pg_if_packet_sizes)
pg_if.add_stream(pkts)
# Enable packet capture and start packet sending
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# Verify
# Verify outgoing packet streams per packet-generator interface
for pg_if in self.pg_interfaces:
pg_if.assert_nothing_captured(remark="interface is in other VRF",
filter_out_fn=is_ipv4_misc)
self.logger.debug("No capture for interface %s" % pg_if.name)
def test_ip4_vrf_01(self):
""" IP4 VRF Multi-instance test 1 - create 4 VRFs
"""
# Config 1
# Create 4 VRFs
self.create_vrf_and_assign_interfaces(4)
# Verify 1
for vrf_id in self.vrf_list:
self.assert_equal(self.verify_vrf(vrf_id),
VRFState.configured, VRFState)
# Test 1
self.run_verify_test()
self.run_crosswise_vrf_test()
def test_ip4_vrf_02(self):
""" IP4 VRF Multi-instance test 2 - reset 2 VRFs
"""
# Config 2
# Reset 2 VRFs
self.reset_vrf_and_remove_from_vrf_list(1)
self.reset_vrf_and_remove_from_vrf_list(2)
# Verify 2
for vrf_id in self.vrf_reset_list:
self.assert_equal(self.verify_vrf(vrf_id),
VRFState.reset, VRFState)
for vrf_id in self.vrf_list:
self.assert_equal(self.verify_vrf(vrf_id),
VRFState.configured, VRFState)
# Test 2
self.run_verify_test()
self.run_crosswise_vrf_test()
def test_ip4_vrf_03(self):
""" IP4 VRF Multi-instance 3 - add 2 VRFs
"""
# Config 3
# Add 1 of reset VRFs and 1 new VRF
self.create_vrf_and_assign_interfaces(1)
self.create_vrf_and_assign_interfaces(1, start=5)
# Verify 3
for vrf_id in self.vrf_reset_list:
self.assert_equal(self.verify_vrf(vrf_id),
VRFState.reset, VRFState)
for vrf_id in self.vrf_list:
self.assert_equal(self.verify_vrf(vrf_id),
VRFState.configured, VRFState)
# Test 3
self.run_verify_test()
self.run_crosswise_vrf_test()
def test_ip4_vrf_04(self):
""" IP4 VRF Multi-instance test 4 - reset 4 VRFs
"""
# Config 4
# Reset all VRFs (i.e. no VRF except VRF=0 configured)
for i in range(len(self.vrf_list)):
self.reset_vrf_and_remove_from_vrf_list(self.vrf_list[0])
# Verify 4
for vrf_id in self.vrf_reset_list:
self.assert_equal(self.verify_vrf(vrf_id),
VRFState.reset, VRFState)
vrf_list_length = len(self.vrf_list)
self.assertEqual(
vrf_list_length, 0,
"List of configured VRFs is not empty: %s != 0" % vrf_list_length)
# Test 4
self.run_verify_test()
self.run_crosswise_vrf_test()
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
|
apache-2.0
| -5,742,475,308,046,317,000 | 37.231683 | 79 | 0.565909 | false |
igogorek/allure-python
|
allure-pytest/test/fixtures/function_scope/fixtures_parametrization_test.py
|
1
|
3104
|
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... all_of(
... has_property('test_cases', has_length(8)),
... has_property('test_groups', has_length(0))
... )) # doctest: +SKIP
"""
import pytest
@pytest.fixture(params=[True, False])
def parametrized_fixture(request):
assert request.param
def test_function_scope_parametrized_fixture(parametrized_fixture):
"""
>>> allure_report = getfixture('allure_report')
>>> for passed in [True, False]:
... assert_that(allure_report,
... has_test_case('test_function_scope_parametrized_fixture[{param}]'.format(param=passed),
... has_parameter('parametrized_fixture', str(passed)),
... has_container(allure_report,
... has_before('parametrized_fixture',
... with_status('passed' if passed else 'failed'),
... has_status_details(
... with_status_message('AssertionError')
... ) if not passed else anything()
... )
... )
... )
... )
"""
pass
@pytest.fixture(params=[True, False], ids=['param_true', 'param_false'])
def parametrized_fixture_with_ids(request):
return request.param
def test_function_scope_parametrized_fixture_with_ids(parametrized_fixture_with_ids):
"""
>>> allure_report = getfixture('allure_report')
>>> for param, ids in zip([True, False], ['param_true', 'param_false']):
... assert_that(allure_report,
... has_test_case('test_function_scope_parametrized_fixture_with_ids[{ids}]'.format(ids=ids),
... has_parameter('parametrized_fixture_with_ids', str(param))
... )
... )
"""
assert parametrized_fixture_with_ids
def test_two_function_scope_parametrized_fixture(parametrized_fixture, parametrized_fixture_with_ids):
"""
>>> allure_report = getfixture('allure_report')
>>> for param1 in [True, False]:
... for param2, ids2 in zip([True, False], ['param_true', 'param_false']):
... assert_that(allure_report,
... has_test_case('test_two_function_scope_parametrized_fixture[{param1}-{ids2}]'.format(
... param1=param1, ids2=ids2),
... all_of(has_parameter('parametrized_fixture', str(param1)),
... has_parameter('parametrized_fixture_with_ids', str(param2))
... )
... )
... )
"""
assert parametrized_fixture_with_ids and parametrized_fixture
|
apache-2.0
| -6,150,953,334,713,741,000 | 42.732394 | 122 | 0.479059 | false |
lmazuel/azure-sdk-for-python
|
azure-batch/azure/batch/models/pool_get_options.py
|
1
|
3326
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PoolGetOptions(Model):
"""Additional parameters for get operation.
:param select: An OData $select clause.
:type select: str
:param expand: An OData $expand clause.
:type expand: str
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
:param if_match: An ETag value associated with the version of the resource
known to the client. The operation will be performed only if the
resource's current ETag on the service exactly matches the value specified
by the client.
:type if_match: str
:param if_none_match: An ETag value associated with the version of the
resource known to the client. The operation will be performed only if the
resource's current ETag on the service does not match the value specified
by the client.
:type if_none_match: str
:param if_modified_since: A timestamp indicating the last modified time of
the resource known to the client. The operation will be performed only if
the resource on the service has been modified since the specified time.
:type if_modified_since: datetime
:param if_unmodified_since: A timestamp indicating the last modified time
of the resource known to the client. The operation will be performed only
if the resource on the service has not been modified since the specified
time.
:type if_unmodified_since: datetime
"""
def __init__(self, select=None, expand=None, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None):
super(PoolGetOptions, self).__init__()
self.select = select
self.expand = expand
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
self.if_match = if_match
self.if_none_match = if_none_match
self.if_modified_since = if_modified_since
self.if_unmodified_since = if_unmodified_since
|
mit
| 2,216,408,968,951,889,700 | 47.911765 | 217 | 0.686109 | false |
mr-karan/Udacity-FullStack-ND004
|
Project3/udacityblog-159515/handlers/deletepost.py
|
1
|
1048
|
from google.appengine.ext import db
from handlers.blog import BlogHandler
from helpers import *
class DeletePostHandler(BlogHandler):
def get(self, post_id, post_user_id):
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
return self.redirect('/login')
# To Check if user owns the post
if self.user and self.user.key().id() == post.user_id:
post.delete()
self.redirect('/')
elif not self.user:
self.redirect('/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
return self.redirect('/')
comments = db.GqlQuery(
"select * from Comment where ancestor is :1 order by created desc limit 10", key)
error = "You don't have permission to delete this post"
self.redirect("permalink.html", post=post, comments=comments, error=error)
|
mit
| 8,692,391,542,979,496,000 | 32.806452 | 97 | 0.576336 | false |
isaac-s/cloudify-manager
|
tests/integration_tests/framework/suites_runner.py
|
1
|
5143
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
import sys
import logging
import tempfile
import datetime
from multiprocessing import Process
from os import path
from os.path import dirname, abspath, join
from shutil import copyfile
import sh
import yaml
from utils import sh_bake
from integration_tests import resources
nosetests = sh_bake(sh.nosetests)
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger('suite_runner')
logging.getLogger('sh').setLevel(logging.INFO)
class SuiteRunner(object):
def __init__(self, descriptor, reports_dir):
self.groups = descriptor.split('#')
resources_path = path.dirname(resources.__file__)
self.integration_tests_dir = dirname(abspath(resources_path))
self.reports_dir = reports_dir
logger.info('SuiteRunner config: '
'[groups: {0}, tests_dir: {1}, reports_dir: {2}]'.
format(self.groups,
self.integration_tests_dir,
self.reports_dir))
with open(join(
self.integration_tests_dir, 'suites', 'suites.yaml')) as f:
logger.debug('Loading suites_yaml..')
self.suites_yaml = yaml.load(f.read())
if not os.path.isdir(reports_dir):
logger.debug('Creating logs dir {0}..'.format(reports_dir))
os.makedirs(reports_dir)
else:
for report in os.listdir(self.reports_dir):
old_report = join(self.reports_dir, report)
logger.debug('Deleting old report {0}..'.format(old_report))
os.remove(old_report)
def run_all_groups(self):
proc = []
for group in self.groups:
logger.debug('Creating process for suite {0}..'.format(group))
p = Process(target=self.prepare_and_run_tests, args=(group, ))
p.start()
proc.append(p)
for p in proc:
p.join()
def prepare_and_run_tests(self, group):
logger.debug('Running suite {0}..'.format(group))
tests = []
testing_elements = group.split(',')
for testing_element in testing_elements:
if testing_element in self.suites_yaml:
# The element is a suite. extracting tests from it.
tests_in_suite = self.suites_yaml[testing_element]
tests += tests_in_suite
else:
tests.append(testing_element)
report_file = join(self.reports_dir,
'{0}-report.xml'.format(os.getpid()))
logger.debug('Running tests: {0}, report: {1}'
.format(tests, report_file))
tmp_dir = tempfile.mkdtemp()
logger.debug('Copying tests files into tmp dir: {0}'.format(tmp_dir))
os.chdir(join(self.integration_tests_dir, 'tests'))
for test_file in tests:
copyfile(test_file, os.path.join(tmp_dir,
os.path.basename(test_file)))
logger.debug('Copying __init__.py to tmp dir: {0}'.format(tmp_dir))
copyfile(os.path.join(os.path.dirname(test_file), '__init__.py'),
os.path.join(tmp_dir, '__init__.py'))
nosetests(tmp_dir,
verbose=True,
nocapture=True,
with_xunit=bool(os.environ.get('JENKINS_JOB')),
xunit_file=report_file).wait()
def main():
descriptor = sys.argv[1]
if os.environ.get('CFY_LOGS_PATH') and not os.environ.get('JENKINS_JOB'):
time = str(datetime.datetime.now()).split('.')[0]
time = time.replace(' ', '_').replace('-', '').replace(':', '')
os.environ['CFY_LOGS_PATH'] = os.path.join(
os.path.expanduser(os.environ['CFY_LOGS_PATH']),
'{0}_{1}'.format(time, descriptor))
resources_path = path.dirname(resources.__file__)
reports_dir = join(dirname(abspath(resources_path)), 'xunit-reports')
if len(sys.argv) > 2:
reports_dir = sys.argv[2]
suites_runner = SuiteRunner(descriptor, reports_dir)
if os.environ.get('CFY_LOGS_PATH'):
logger.info('manager logs will be saved at {0}'.format(
os.environ['CFY_LOGS_PATH']))
else:
logger.info('Saving manager logs is disabled by configuration, '
'to enable logs keeping, define "CFY_LOGS_PATH"')
suites_runner.run_all_groups()
if __name__ == '__main__':
main()
|
apache-2.0
| -9,000,244,277,455,029,000 | 36.268116 | 79 | 0.593039 | false |
Pappulab/localCIDER
|
localcider/backend/data/aminoacids.py
|
1
|
17145
|
"""
!--------------------------------------------------------------------------!
! LICENSE INFO: !
!--------------------------------------------------------------------------!
! This file is part of localCIDER. !
! !
! Copyright (C) 2014 - 2021 !
! The localCIDER development team (current and former contributors) !
! Alex Holehouse, James Ahad, Rahul K. Das. !
! !
! localCIDER was developed in the lab of Rohit Pappu at Washington !
! University in St. Louis. Please see the website for citation !
! information: !
! !
! http://pappulab.github.io/localCIDER/ !
! !
! For more information please see the Pappu lab website: !
! !
! http://pappulab.wustl.edu/ !
! !
! localCIDER is free software: you can redistribute it and/or modify !
! it under the terms of the GNU General Public License as published by !
! the Free Software Foundation, either version 3 of the License, or !
! (at your option) any later version. !
! !
! localCIDER is distributed in the hope that it will be useful, !
! but WITHOUT ANY WARRANTY; without even the implied warranty of !
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the !
! GNU General Public License for more details. !
! !
! You should have received a copy of the GNU General Public License !
! along with localCIDER. If not, see <http://www.gnu.org/licenses/>. !
!--------------------------------------------------------------------------!
! AUTHORSHIP INFO: !
!--------------------------------------------------------------------------!
! !
! MAIN AUTHOR: Alex Holehouse !
! !
!--------------------------------------------------------------------------!
Amino acids data
"""
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# <>
# <> THREE-TO-ONE LETTER CODE TRANSLATION
# <>
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
THREE_TO_ONE = {'ALA':'A',
'CYS':'C',
'ASP':'D',
'GLU':'E',
'PHE':'F',
'GLY':'G',
'HIS':'H',
'ILE':'I',
'LYS':'K',
'LEU':'L',
'MET':'M',
'ASN':'N',
'PRO':'P',
'GLN':'Q',
'ARG':'R',
'SER':'S',
'THR':'T',
'VAL':'V',
'TRP':'W',
'TYR':'Y'}
ONE_TO_THREE = {'A':'ALA',
'C':'CYS',
'D':'ASP',
'E':'GLU',
'F':'PHE',
'G':'GLY',
'H':'HIS',
'I':'ILE',
'K':'LYS',
'L':'LEU',
'M':'MET',
'N':'ASN',
'P':'PRO',
'Q':'GLN',
'R':'ARG',
'S':'SER',
'T':'THR',
'V':'VAL',
'W':'TRP',
'Y':'TYR'}
TWENTY_AAs = ['R','H','K','D','E','S','T','N','Q','C','G','P','A','I','L','M','F','W','Y','V']
# Update on 0.1.11 gly went from black to green
# Aromatics became orange
#
#
DEFAULT_COLOR_PALETTE = {'A':'black',
'C':'black',
'D':'red',
'E':'red',
'F':'orange',
'G':'green',
'H':'green',
'I':'black',
'K':'blue',
'L':'black',
'M':'black',
'N':'green',
'P':'fuchsia',
'Q':'green',
'R':'blue',
'S':'green',
'T':'green',
'V':'black',
'W':'orange',
'Y':'orange'}
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# <>
# <> HYDROPHOBICITY DEFINING FUNCTIONS
# <>
# <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
#
# KYTE-DOOLITTLE SCALES
#
# References
#
# Main hydrophobicity scale
# ...............................
#
# A simple method for displaying the hydropathic character of a protein.
# Kyte J, Doolittle RF. J Mol Biol. 1982 May 5;157(1):105-32.
#
#
#
# 0 to 1 normalized hydrophobicity scale
# ...............................................
#
# Why are "natively unfolded" proteins unstructured under physiological conditions?
# Valdimir N. Uversky, Joel R. Gillespie, and Anthony L. Frink
# Protines: Structure, function, and genetics 41:415-427 (2000)
#
def get_KD_original():
""" Function which returns the original KD hydropathy lookup table
"""
return {'ILE': 4.5,
'VAL': 4.2,
'LEU': 3.8,
'PHE': 2.8,
'CYS': 2.5,
'MET': 1.9,
'ALA': 1.8,
'GLY': -0.4,
'THR': -0.7,
'SER': -0.8,
'TRP': -0.9,
'TYR': -1.3,
'PRO': -1.6,
'HIS': -3.2,
'GLU': -3.5,
'GLN': -3.5,
'ASP': -3.5,
'ASN': -3.5,
'LYS': -3.9,
'ARG': -4.5}
def get_residue_charge():
""" Function which returns the original KD hydropathy lookup table
"""
return {'ILE': 0,
'VAL': 0,
'LEU': 0,
'PHE': 0,
'CYS': 0,
'MET': 0,
'ALA': 0,
'GLY': 0,
'THR': 0,
'SER': 0,
'TRP': 0,
'TYR': 0,
'PRO': 0,
'HIS': 0,
'GLU': -1,
'GLN': 0,
'ASP': -1,
'ASN': 0,
'LYS': 1,
'ARG': 1}
def get_KD_shifted():
"""
Function which returns the shifted KD hydropathy lookup table (such that
it runs from 0 to 9 instead of -4.5 to 4.5)
"""
original = get_KD_original()
shifted = {}
for i in original:
shifted[i] = original[i]+4.5
return shifted
""" Should look like this!
{'ALA': 6.3,
'ARG': 0.0,
'ASN': 1.0,
'ASP': 1.0,
'CYS': 7.0,
'GLN': 1.0,
'GLU': 1.0,
'GLY': 4.1,
'HIS': 1.3,
'ILE': 9.0,
'LEU': 8.3,
'LYS': 0.6,
'MET': 6.4,
'PHE': 7.3,
'PRO': 2.9,
'SER': 3.7,
'THR': 3.8,
'TRP': 3.6,
'TYR': 3.2,
'VAL': 8.7
}
"""
def get_KD_uversky():
"""
Returns a 0-to-1 normalized KD scale
"""
shifted = get_KD_shifted()
uversky = {}
for i in shifted:
uversky[i] = shifted[i]/9.0
return uversky
def get_WW_original():
""" Function which returns the original Wimley and White hydropathy lookup table
"""
return {'ILE': 0.31,
'VAL': -0.07,
'LEU': 0.56,
'PHE': 1.13,
'CYS': 0.24,
'MET': 0.23,
'ALA': -0.17,
'GLY': -0.01,
'THR': -0.14,
'SER': -0.13,
'TRP': 1.85,
'TYR': 0.94,
'PRO': -0.45,
'HIS': -0.96,
'GLU': -2.02,
'GLN': -0.58,
'ASP': -1.23,
'ASN': -0.42,
'LYS': -0.99,
'ARG': -0.81}
def get_PPII_Hilser():
"""
Returns an amino acid dictionary with the PPII propensity of
each residue as calculated by Elam et al [1] (Taken from [2]).
[1] - Elam WA, Schrank TP, Campagnolo AJ, Hilser VJ. Evolutionary
conservation of the polyproline II conformation surrounding intrinsically
disordered phosphorylation sites.
Protein Sci. 2013; 22: 405 - 417. doi: 10.1002/pro.2217 PMID: 23341186
[2] - Tomasso, M. E., Tarver, M. J., Devarajan, D. & Whitten, S. T.
Hydrodynamic Radii of Intrinsically Disordered Proteins Determined
from Experimental Polyproline II Propensities.
PLoS Comput. Biol. 12, e1004686 (2016).
"""
return {'ILE': 0.39,
'VAL': 0.39,
'LEU': 0.24,
'PHE': 0.17,
'CYS': 0.25,
'MET': 0.36,
'ALA': 0.37,
'GLY': 0.13,
'THR': 0.32,
'SER': 0.24,
'TRP': 0.25,
'TYR': 0.25,
'PRO': 1.00,
'HIS': 0.20,
'GLU': 0.42,
'GLN': 0.53,
'ASP': 0.30,
'ASN': 0.27,
'LYS': 0.56,
'ARG': 0.38}
def get_PPII_Creamer():
"""
Returns an amino acid dictionary with the PPII propensity of
each residue as calculated by Rucker et al [1] (Taken from [2]).
Note that Trp and Try do not have values reported by Rucker
et al., so we followed the convention uesed by Tomasso et al.
and set both to the mean value (0.58)
[1] - Rucker, A.L., Pager, C.T., Campbell, M.N., Qualls, J.E.,
and Creamer, T.P. (2003). Host-guest scale of left-handed
polyproline II helix formation. Proteins 53, 68-75.
[2] - Tomasso, M. E., Tarver, M. J., Devarajan, D. & Whitten, S. T.
Hydrodynamic Radii of Intrinsically Disordered Proteins Determined
from Experimental Polyproline II Propensities.
PLoS Comput. Biol. 12, e1004686 (2016).
"""
return {'ILE': 0.50,
'VAL': 0.49,
'LEU': 0.58,
'PHE': 0.58,
'CYS': 0.55,
'MET': 0.55,
'ALA': 0.61,
'GLY': 0.58,
'THR': 0.53,
'SER': 0.58,
'TRP': 0.58,
'TYR': 0.58,
'PRO': 0.67,
'HIS': 0.55,
'GLU': 0.61,
'GLN': 0.66,
'ASP': 0.63,
'ASN': 0.55,
'LYS': 0.59,
'ARG': 0.61}
def get_PPII_Kallenbach():
"""
Returns an amino acid dictionary with the PPII propensity of
each residue as calculated by Shi et al [1] (Taken from [2]).
Note that Gly and Pro do not have values reported by Shi et al.,
so we followed the convention uesed by Tomasso et al. and
set Gly = 0.5 and Pro = 1.0.
[1] - Shi, Z., Chen, K., Liu, Z., Ng, A., Bracken, W.C., and
Kallenbach, N.R. (2005). Polyproline II propensities from
GGXGG peptides reveal an anticorrelation with beta-sheet scales.
Proc. Natl. Acad. Sci. U. S. A. 102, 17964-17968.
[2] - Tomasso, M. E., Tarver, M. J., Devarajan, D. & Whitten, S. T.
Hydrodynamic Radii of Intrinsically Disordered Proteins Determined
from Experimental Polyproline II Propensities.
PLoS Comput. Biol. 12, e1004686 (2016).
"""
return {'ILE': 0.519,
'VAL': 0.743,
'LEU': 0.574,
'PHE': 0.639,
'CYS': 0.557,
'MET': 0.498,
'ALA': 0.818,
'GLY': 0.500,
'THR': 0.553,
'SER': 0.774,
'TRP': 0.764,
'TYR': 0.630,
'PRO': 1.000,
'HIS': 0.428,
'GLU': 0.684,
'GLN': 0.654,
'ASP': 0.552,
'ASN': 0.667,
'LYS': 0.581,
'ARG': 0.638}
def get_pKa():
"""
Return pKa values for titratable residues. Values taken here correspond to values from
EMBOSS, but in reality pKa values are pretty context-dependent so these are 'model'
values, but the true value in the context of a protein is likely to be pretty seriously
different.
"""
return {'C': 8.5,
'Y': 10.1,
'H': 6.5,
'E': 4.1,
'D': 3.9,
'K': 10.0,
'R': 12.5}
def get_molecular_weight_Da():
"""
Returns a lookup table with the molecular weights of each
amino acid in Da
"""
return {'I': 131.2,
'V': 117.1,
'L': 131.2,
'F': 165.2,
'C': 121.2,
'M': 149.2,
'A': 89.1,
'G': 75.1,
'T': 119.1,
'S': 105.1,
'W': 204.2,
'Y': 181.2,
'P': 115.1,
'H': 155.2,
'E': 147.1,
'Q': 146.2,
'D': 133.1,
'N': 132.1,
'K': 146.2,
'R': 174.2,
}
""""
EMPTY = {'ILE':
'VAL':
'LEU':
'PHE':
'CYS':
'MET':
'ALA':
'GLY':
'THR':
'SER':
'TRP':
'TYR':
'PRO':
'HIS':
'GLU':
'GLN':
'ASP':
'ASN':
'LYS':
'ARG': }
"""
def buildTable():
return build_amino_acids_skeleton()
def build_amino_acids_skeleton():
"""
The default hydrophobicity is an augmented Kyte-Doolitle where
0 = most hydrophilic and 9 is most hydrophobic
This is a weird way of doing this but essentially it means we define
the charge and hydrophobicity in one place (in separate functions)
and then construct the skeleton before appending those values to the
amino acid skeleton.
The advantage of this is we can add further information to induvidual
amino acids.
"""
# get a dictionary of 3 letter to charge
charge_Dict = get_residue_charge()
# get a dictionary of 3 letter to KD hydrophobicity
KD_Dict = get_KD_shifted()
PPII_Dict_Hilser = get_PPII_Hilser()
PPII_Dict_Creamer = get_PPII_Creamer()
PPII_Dict_Kallenbach = get_PPII_Kallenbach()
# build the initial skeleton of amnino acids
skeleton=[["Alanine", "ALA", "A"],
["Cysteine", "CYS", "C"],
["Aspartic_Acid", "ASP", "D"],
["Glutamic_Acid", "GLU", "E"],
["Phenylalanine", "PHE", "F"],
["Glycine", "GLY", "G"],
["Histidine", "HIS", "H"],
["Isoleucine", "ILE", "I"],
["Lysine", "LYS", "K"],
["Leucine", "LEU", "L"],
["Methionine", "MET", "M"],
["Asparagine", "ASN", "N"],
["Proline", "PRO", "P"],
["Glutamine", "GLN", "Q"],
["Arginine", "ARG", "R"],
["Serine", "SER", "S"],
["Threonine", "THR", "T"],
["Valine", "VAL", "V"],
["Tryptophan", "TRP", "W"],
["Tyrosine", "TYR", "Y"]]
# for each residue update with further information
for res in skeleton:
# update this residue with the charge value
res.append(KD_Dict[res[1]])
# update this residue with the KD hydrophobicity
res.append(charge_Dict[res[1]])
# update the residue with the PPII content
res.append(PPII_Dict_Hilser[res[1]])
res.append(PPII_Dict_Creamer[res[1]])
res.append(PPII_Dict_Kallenbach[res[1]])
# each residue is now defined by a list of length 8 which are
# [full name, 3 letter, one letter, Hydrophobicity, charge, PPII_Hilser, PPII_Creamer, PPII_Kallenbach]
return skeleton
def update_hydrophobicity(aalist, scale):
"""
Function which lets you update the hydrophobicity of an amino
acid to your own scale.
"""
index=0
for i in aalist:
i[3] = scale[i[1]]
return aalist
|
gpl-2.0
| 439,729,644,570,166,000 | 29.670841 | 107 | 0.384835 | false |
conda/kapsel
|
conda_kapsel/internal/http_client.py
|
1
|
4769
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
from tornado import httpclient
from tornado import gen
import conda_kapsel.internal.makedirs as makedirs
import conda_kapsel.internal.rename as rename
import os
import hashlib
class FileDownloader(object):
def __init__(self, url, filename, hash_algorithm=None):
"""Downloader for the given url to the given filename, computing the given hash.
hash_algorithm is the name of a hash function in hashlib
"""
self._url = url
self._filename = filename
self._hash_algorithm = hash_algorithm
self._hash = None
self._client = None
self._errors = []
@gen.coroutine
def run(self, io_loop):
"""Run the download on the given io_loop."""
assert self._client is None
dirname = os.path.dirname(self._filename)
try:
makedirs.makedirs_ok_if_exists(dirname)
except Exception as e:
self._errors.append("Could not create directory '%s': %s" % (dirname, e))
raise gen.Return(None)
if self._hash_algorithm is not None:
hasher = getattr(hashlib, self._hash_algorithm)()
self._client = httpclient.AsyncHTTPClient(
io_loop=io_loop,
max_clients=1,
# without this we buffer a huge amount
# of stuff and then call the streaming_callback
# once.
max_buffer_size=1024 * 1024,
# without this we 599 on large downloads
max_body_size=100 * 1024 * 1024 * 1024,
force_instance=True)
tmp_filename = self._filename + ".part"
try:
_file = open(tmp_filename, 'wb')
except EnvironmentError as e:
self._errors.append("Failed to open %s: %s" % (tmp_filename, e))
raise gen.Return(None)
def cleanup_tmp():
try:
_file.close()
# future: we could save it in order to try
# resuming a failed download midstream, but
# pointless until the download code above
# knows how to resume.
os.remove(tmp_filename)
except EnvironmentError:
pass
def writer(chunk):
if len(self._errors) > 0:
return
if self._hash_algorithm is not None:
hasher.update(chunk)
try:
_file.write(chunk)
except EnvironmentError as e:
# we can't actually throw this error or Tornado freaks out, so instead
# we ignore all future chunks once we have an error, which does mean
# we continue to download bytes that we don't use. yuck.
self._errors.append("Failed to write to %s: %s" % (tmp_filename, e))
try:
timeout_in_seconds = 60 * 10 # pretty long because we could be dealing with huge files
request = httpclient.HTTPRequest(url=self._url,
streaming_callback=writer,
request_timeout=timeout_in_seconds)
try:
response = yield self._client.fetch(request)
except Exception as e:
self._errors.append("Failed download to %s: %s" % (self._filename, str(e)))
raise gen.Return(None)
# assert fetch() was supposed to throw the error, not leave it here unthrown
assert response.error is None
if len(self._errors) == 0:
try:
_file.close() # be sure tmp_filename is flushed
rename.rename_over_existing(tmp_filename, self._filename)
except EnvironmentError as e:
self._errors.append("Failed to rename %s to %s: %s" % (tmp_filename, self._filename, str(e)))
if len(self._errors) == 0 and self._hash_algorithm is not None:
self._hash = hasher.hexdigest()
raise gen.Return(response)
finally:
cleanup_tmp()
@property
def hash(self):
"""Hash of the downloaded file if we succeeded in downloading it, None if we failed."""
return self._hash
@property
def errors(self):
"""List of errors if we failed to download, empty list if we succeeded."""
return self._errors
|
bsd-3-clause
| 3,186,103,736,906,866,000 | 36.84127 | 113 | 0.545512 | false |
chipmuenk/audio_control
|
SPDIF.py
|
1
|
25110
|
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 14 16:12:08 2015
@author: tobia
"""
from __future__ import print_function, division, absolute_import
from PyQt4 import QtGui, QtCore, uic
#from PyQt4.QtGui import qApp
import PyQt4.Qwt5 as Qwt
import I2C_Daten
#==============================================================================
# PYTHON IMPORTS
#==============================================================================
import sys, pyaudio, numpy, threading, serial
#==============================================================================
# DEFINES
#==============================================================================
class SPDIF:
def __init__(self):
#Initialisierung der Variablen:
self.RATE = 44100
self.BUFFERSIZE = 1024
self.secToRecord = .001
self.threadDieNow = False #beendet Thread, wenn True
self.newAudio = False #gibt an, ob neues Audio-Zeitintervall vorliegt
self.bThreadStarted = False #gibt an, ob Thread gestartet wurde (s. Methode countClick)
self.channels = 0 #Anzahl der Kanaele
self.anzeige = 1 #Auswahl der Darstellungsform (Einkanal, Differenz, XY)
self.stream_open = False #gibt an, ob Audiostream geoeffnet werden konnte
#Initialisierung der QWT-Objekte:
uiplot.comboBox_channels.addItem(str(1))
uiplot.comboBox_channels.addItem(str(2))
uiplot.comboBox_channels.setCurrentIndex(1)
uiplot.spinBox_Ymin.setRange(-10000,10000)
uiplot.spinBox_Ymax.setRange(-10000,10000)
uiplot.spinBox_Ymin.setValue(-10000)
uiplot.spinBox_Ymax.setValue(10000)
uiplot.radioButton_Einkanal.setChecked(1)
#Programmaufrufe:
self.setupAudio() #Erstellt eine Auswahl verfuegbarer Audioquellen
self.continuousStart() #Vorbereitung Thread zur Darstellung des Audiosignals
uiplot.connect(uiplot.timer, QtCore.SIGNAL('timeout()'), self.plotSignal)
uiplot.show()
def setup(self): #SPDIF
"""
initialisiere Soundkarte, Quelle: http://www.swharden.com/blog/ (Link vom 27.08.2015)
"""
self.channels = int(uiplot.comboBox_channels.currentText())
self.buffersToRecord = int(self.RATE*self.secToRecord/self.BUFFERSIZE)
if 0 == self.buffersToRecord:
self.buffersToRecord = 1
self.samplesToRecord = int(self.BUFFERSIZE*self.buffersToRecord)
self.chunksToRecord = int(self.samplesToRecord/self.BUFFERSIZE)
self.secPerPoint = 1.0/self.RATE
self.p = pyaudio.PyAudio()
try:
self.inStream = self.p.open(format=pyaudio.paInt16,channels=self.channels,rate=self.RATE,input=True,input_device_index=uiplot.comboBox_Audio_In.currentIndex(),frames_per_buffer=self.BUFFERSIZE)
self.stream_open = True
uiplot.statusBar().clear()
except:
print("Kein Audiodevice vorhanden")
uiplot.statusMessage("Kein Audiodevice vorhanden.")
self.stream_open = False
self.xsBuffer = numpy.arange(self.BUFFERSIZE)*self.secPerPoint
self.xs = numpy.arange(self.chunksToRecord*self.BUFFERSIZE)*self.secPerPoint
self.audio = numpy.empty((self.chunksToRecord*self.BUFFERSIZE*self.channels),dtype=numpy.int16)
def close(self): #SPDIF
"""cleanly back out and release sound card."""
if self.stream_open:
self.p.close(self.inStream)
self.threadDieNow = True
self.tR._Thread__stop()
def readSignal(self): #SPDIF
"""
lese Audio Signal vom Audio Device ein
"""
AudioString = self.inStream.read(self.BUFFERSIZE)
return numpy.fromstring(AudioString, dtype=numpy.int16)
def record(self): #SPDIF
"""
Thread: zeichnet Zeitintervall des Audiosignals auf.
"""
while True:
self.lock.acquire() #wird bei Ereignis "Stop" unterbrochen
if self.threadDieNow: break
try:
for i in range(self.chunksToRecord):
self.audio[i*self.BUFFERSIZE:(i+1)*self.BUFFERSIZE*self.channels] = self.readSignal()
uiplot.statusBar().clear()
except:
print("Kananlanzahl falsch")
uiplot.statusMessage("Kananlanzahl falsch.")
self.newAudio = True
self.audio_l = self.audio[0::2] #Aufteilung in linker und rechter Kanal
self.audio_r = self.audio[1::2]
self.audio_diff = self.audio_l-self.audio_r #Differenzbildung für Differenzdarstellung
self.lock.release() #wird bei Ereignis "Start" fortgesetzt
def continuousStart(self): #SPDIF
"""
Vorbereitung Thread zur Darstellung des Audiosignals.
"""
self.tR = threading.Thread(target=self.record)
self.lock = threading.Lock()
self.tR.daemon = True
#Klick-Ereignisse:
QtGui.QDialog.connect(uiplot.pushButton_Start, QtCore.SIGNAL("clicked()"), self.countClick) # Echtzeitdarstellung des Audiosignals wird gestartet
QtGui.QDialog.connect(uiplot.pushButton_Stop, QtCore.SIGNAL("clicked()"), self.suspend) # Echtzeitdarstellung des Audiosignals wird angehalten
uiplot.comboBox_channels.currentIndexChanged.connect(self.setup)
QtGui.QDialog.connect(uiplot.radioButton_Einkanal, QtCore.SIGNAL("clicked()"), lambda: self.auswahlAnzeige(1))
QtGui.QDialog.connect(uiplot.radioButton_Diff, QtCore.SIGNAL("clicked()"), lambda: self.auswahlAnzeige(2))
QtGui.QDialog.connect(uiplot.radioButton_XY, QtCore.SIGNAL("clicked()"), lambda: self.auswahlAnzeige(3))
def suspend(self): #SPDIF
'''
Thread wird angehalten, Auswahl der Audioquelle aktiviert, Stop_Button deaktiviert
'''
self.lock.acquire()
uiplot.comboBox_Audio_In.setEnabled(True)
uiplot.comboBox_channels.setEnabled(True)
uiplot.pushButton_Stop.setDisabled(True)
uiplot.pushButton_Start.setEnabled(True)
print(self.audio_l)
print(self.audio_r)
print(self.audio_diff)
def countClick(self): #SPDIF
"""
Startet den Thread "record" beim ersten Klick auf "Start"
Setzt den Thread "record" fort bei jedem weiteren Klick auf "Start"
"""
if False == self.bThreadStarted:
self.bThreadStarted = True
self.setup()
if self.stream_open:
self.tR.start()
uiplot.statusBar().clear()
else:
print("Audio Thread konnte nicht gestartet werden")
uiplot.statusMessage("Audio Thread konnte nicht gestartet werden.")
uiplot.comboBox_Audio_In.setDisabled(True)
uiplot.comboBox_channels.setDisabled(True)
uiplot.pushButton_Start.setDisabled(True)
uiplot.pushButton_Stop.setEnabled(True)
#print(uiplot.comboBox_Audio_In.currentIndex())
else:
self.setup()
self.lock.release()
uiplot.comboBox_Audio_In.setDisabled(True)
uiplot.comboBox_channels.setDisabled(True)
uiplot.pushButton_Start.setDisabled(True)
uiplot.pushButton_Stop.setEnabled(True)
#print(uiplot.comboBox_Audio_In.currentIndex())
def setupAudio(self): #SPDIF
"""
Erstellt eine Auswahl verfuegbarer Audioquellen und stellt diese in einer Combo Box zur Auswahl bereit
"""
deviceList = []
uiplot.comboBox_Audio_In.clear()
self.p = pyaudio.PyAudio() # instantiate PyAudio, start PortAudio system + list devices
try:
defaultInIdx = self.p.get_default_input_device_info()['index']
#print("Defaultin", defaultInIdx)
for i in range(self.p.get_device_count()):
deviceList.append(self.p.get_device_info_by_index(i))
#print (deviceList[i])
if deviceList[i]['maxInputChannels'] > 0:
if i == defaultInIdx:
uiplot.comboBox_Audio_In.addItem('* '+deviceList[i]['name'], str(i))
defaultInBoxIdx = uiplot.comboBox_Audio_In.currentIndex()
else:
uiplot.comboBox_Audio_In.addItem(deviceList[i]['name'], str(i))
uiplot.comboBox_Audio_In.setCurrentIndex(defaultInBoxIdx)
except:
print("Kein Audio Eingang verfuegbar")
def auswahlAnzeige(self, anz):
"""
gibt an, welche Darstellungsform gewaehlt wurde (Einkanal, Differenz, XY)
"""
if 1 == anz:
self.anzeige = 1 #Einkanal
elif 2 == anz:
self.anzeige = 2 #Differenz
elif 3 == anz:
self.anzeige = 3 #XY
else:
print("Anzeige fehlerhaft")
def plotSignal(self):
"""
Gibt das vom Audio Device eingelsesene Signal als Plot in die GUI aus
"""
if False == s.newAudio:
return
if 1 == self.anzeige:
uiplot.qwtPlot_Zeitsignal.setAxisScale(uiplot.qwtPlot_Zeitsignal.xBottom,0,0.025)
c.setData(self.xs,self.audio_l)
elif 2 == self.anzeige:
uiplot.qwtPlot_Zeitsignal.setAxisScale(uiplot.qwtPlot_Zeitsignal.xBottom,0,0.025)
c.setData(self.xs,self.audio_diff)
elif 3 == self.anzeige:
uiplot.qwtPlot_Zeitsignal.setAxisScale(uiplot.qwtPlot_Zeitsignal.xBottom,uiplot.spinBox_Ymin.value(),uiplot.spinBox_Ymax.value())
c.setData(self.audio_l,self.audio_r)
else:
print("Anzeigeauswahl fehlerhaft")
uiplot.qwtPlot_Zeitsignal.setAxisScale(uiplot.qwtPlot_Zeitsignal.yLeft,uiplot.spinBox_Ymin.value(),uiplot.spinBox_Ymax.value())
uiplot.qwtPlot_Zeitsignal.replot()
s.newAudio = False
class I2C:
def __init__(self): #I2C
"""
Initialisierung I2C für ELV USB/I2C Interface
Scan nach erreichbaren COM Schnittstellen
"""
#Initialisierungen der Variablen:
self.BAUDRATE_I2C = 115200 # Einstellungen des ELV USB/I2C Interface gemaess Bedienungsanleitung
self.BAUDRATE_UART = 9600
self.OpenPort = False #gibt an, ob COM Port geoeffnet ist
self.threadDieNow=False #beendet Thread, wenn True
self.Kanal = 1 #Kanalauswahl
self.Uebertragung = 1 #Art der Uebertragung (I2C, UART)
#Initialisierungen der QWT Objekte:
uiplot.comboBox_Abfragerate.addItem(str(0.01))
uiplot.comboBox_Abfragerate.addItem(str(0.1))
uiplot.comboBox_Abfragerate.addItem(str(1.0))
uiplot.comboBox_Abfragerate.addItem(str(2.0))
uiplot.comboBox_Abfragerate.setCurrentIndex(3)
uiplot.radioButton_I2C.setChecked(1)
uiplot.radioButton_Kanal1.setChecked(1)
uiplot.radioButton_MUX1.setChecked(1)
#Suche nach COM Ports:
ValidComPorts = self.serialScan()
if 0 == (len(ValidComPorts)):
uiplot.comboBox_COM.addItem("KEIN COM!")
else:
for i in range(len(ValidComPorts)):
uiplot.comboBox_COM.addItem(ValidComPorts[i][1])
com = uiplot.comboBox_COM.currentText()
if ("KEIN COM!" == com):
#print("Kein COM")
uiplot.statusMessage("Bitte COM Device anschliessen")
else:
self.serialPort()
uiplot.statusBar().clear()
if self.OpenPort:
self.ser.write('Y41') # Konfigurationsbefehle für ELV gemaess Bedienungsanleitung S. 11 Tab. 3
self.ser.write('Y01')
self.continuousStart()
def serialScan(self):
"""
gibt alle erreichbaren COM Schnittstellen zurueck, hauptsaechlich aus "Verstaerker_v3_3_3.py" uebernommen
"""
ports = []
for i in range(256):
try:
self.ser = serial.Serial(i)
ports.append([i, self.ser.portstr])
self.ser.close()
except serial.SerialException:
pass
if(len(ports) == 0):
print("Es wurde kein COM-Port gefunden.")
uiplot.statusMessage("Es wurde kein COM-Port gefunden.")
else:
uiplot.statusBar().clear()
return ports
def serialPort(self):
"""
serialPort oeffnet den ausgewaehlten COM-Port, hauptsaechlich aus "Verstaerker_v3_3_3.py" uebernommen
"""
com = uiplot.comboBox_COM.currentText()
if False == self.OpenPort:
try:
if 1 == self.Uebertragung: #I2C
# test = self.ser.isOpen()
# print(test)
self.ser = serial.Serial(
port=str(com),
baudrate=self.BAUDRATE_I2C,
parity=serial.PARITY_EVEN,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS
)
self.OpenPort = True
# test = self.ser.isOpen()
# print(test)
# if False == test:
# try:
# self.ser.open()
# self.OpenPort = True
# except serial.SerialException:
# self.OpenPort = False
#print(self.OpenPort)
elif 2 == self.Uebertragung: #UART
self.ser = serial.Serial(
port=str(com),
baudrate=self.BAUDRATE_UART,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS
)
self.OpenPort = True
# if False == self.ser.isOpen():
# try:
# self.ser.open()
# self.OpenPort = True
# except serial.SerialException:
# self.OpenPort = False
uiplot.statusBar().clear()
except serial.SerialException:
print("COM kann nicht geoeffnet werden")
uiplot.statusMessage("COM kann nicht geoeffnet werden.")
else:
uiplot.statusMessage("COM ist bereits offen")
def requireData(self): #I2C
"""
Thread zur zyklischen Abfrage von Daten auf I2C
"""
while True:
self.cond.acquire()
self.cond.wait(float(uiplot.comboBox_Abfragerate.currentText()))
if self.threadDieNow: break
if self.OpenPort:
#print("Anforderung Daten")
#print(uiplot.horizontalSlider_Volume.value())
self.readI2C()
else:
#print("COM Port nicht geoeffnet")
self.testGUI()
self.cond.release()
def continuousStart(self): #I2C
"""
Vorbereitung Thread zur zyklischen Abfrage
Verarbeitung von Klick-Ereignissen zur MUX und Volume Einstellung
"""
self.tI2C = threading.Thread(target=self.requireData)
self.cond = threading.Condition()
self.tI2C.daemon = True
self.sendMUX1()
self.sendVolume()
self.tI2C.start()
QtGui.QDialog.connect(uiplot.pushButton_Open, QtCore.SIGNAL("clicked()"), self.serialPort)
#QtGui.QDialog.connect(uiplot.radioButton_I2C, QtCore.SIGNAL("clicked()"), lambda: self.auswahl_uebertragung(1))
#QtGui.QDialog.connect(uiplot.radioButton_seriell, QtCore.SIGNAL("clicked()"), lambda: self.auswahl_uebertragung(2))
QtGui.QDialog.connect(uiplot.radioButton_MUX1, QtCore.SIGNAL("clicked()"), self.sendMUX1)
QtGui.QDialog.connect(uiplot.radioButton_MUX2, QtCore.SIGNAL("clicked()"), self.sendMUX2)
QtGui.QDialog.connect(uiplot.radioButton_Kanal1, QtCore.SIGNAL("clicked()"), lambda: self.auswahl_kanal(1))
QtGui.QDialog.connect(uiplot.radioButton_Kanal2, QtCore.SIGNAL("clicked()"), lambda: self.auswahl_kanal(2))
QtGui.QDialog.connect(uiplot.horizontalSlider_Volume, QtCore.SIGNAL('valueChanged(int)'), self.sendVolume)
QtGui.QDialog.connect(uiplot.comboBox_COM, QtCore.SIGNAL('currentIndexChanged(int)'), self.serClose)
def serClose(self):
self.OpenPort=False
self.ser.close()
def sendMUX1(self): #I2C
"""
schreibt die Auswahl MUX1 auf I2C
"""
if self.OpenPort:
if 1 == self.Kanal:
self.ser.write(I2C_Daten.MUX1_1) #TODO auslagern
elif 2 == self.Kanal:
self.ser.write(I2C_Daten.MUX1_2)
else:
print("Kanal fehlerhaft")
# else:
# print("COM nicht offen")
#print(self.Kanal)
def sendMUX2(self): #I2C
"""
schreibt die Auswahl MUX2 auf I2C
"""
if self.OpenPort:
if 1 == self.Kanal:
self.ser.write(I2C_Daten.MUX2_1) #TODO auslagern
elif 2 == self.Kanal:
self.ser.write(I2C_Daten.MUX2_2)
else:
print("Kanal fehlerhaft")
# else:
# print("COM nicht offen")
def auswahl_kanal(self, kanal):
"""
Gibt an, welcher Kanal ausgewählt wurde
"""
if 1 == kanal:
self.Kanal = 1
elif 2 == kanal:
self.Kanal = 2
else:
print("Kanal fehlerhaft")
def auswahl_uebertragung(self, uebertragung):
"""
Gibt an, welche Uebertragungsart gewaehlt wurde
"""
if self.OpenPort:
self.ser.close()
self.OpenPort=False
if 1 == uebertragung:
self.Uebertragung = 1
elif 2 == uebertragung:
self.Uebertragung = 2
else:
print("Uebertragung fehlerhaft")
def sendVolume(self): #I2C
"""
schreibt die Auswahl der Volume auf I2C
"""
if self.OpenPort:
try:
ivalue = uiplot.horizontalSlider_Volume.value()*5
str_value_hex = str(hex(ivalue))
if ivalue > 250:
print("Ungueltiger Wert")
elif ivalue >= 20:
data = str_value_hex[2] + str_value_hex[3]
else:
data = '0' + str_value_hex[2]
if 1 == self.Kanal:
self.ser.write(I2C_Daten.VOLUME_1 + data + 'p')
elif 2 == self.Kanal:
self.ser.write(I2C_Daten.VOLUME_2 + data + 'p')
else:
print("Kanal fehlerhaft")
uiplot.statusBar().clear()
except:
print("Schreiben auf COM fehlgeschlagen")
uiplot.statusMessage("Schreiben auf COM fehlgeschlagen.")
# else:
# print("COM nicht offen")
def readI2C(self): #I2C
"""
liest die empfangenen Daten von I2C
"""
i = 0;
myList=[0 for j in range(9)]
if 1 == self.Kanal:
self.ser.write(I2C_Daten.CYCLIC_1)
elif 2 == self.Kanal:
self.ser.write(I2C_Daten.CYCLIC_2)
while self.ser.inWaiting() > 0:
try:
myList[i] = self.ser.read(2)
i = i + 1
except:
print("Index out of range")
self.writeGUI(myList)
def writeGUI(self, data): #I2C
'''
I2C-Bus lesen, Werte in die GUI ausgeben
'''
strStatus = str(data[0])
try:
iStatus = int(strStatus, 16)
except:
iStatus = "Err"
uiplot.lcdNumber_Status.display(iStatus)
strV_in = str(data[1] + data[2])
#print(strV_in)
try:
iV_in = int(strV_in, 16)
except:
iV_in = "Err"
#print(iV_in)
uiplot.lcdNumber_Vcc.display(iV_in)
strV_out = str(data[3] + data[4])
try:
iV_out = int(strV_out, 16)
except:
iV_out = "Err"
uiplot.lcdNumber_2_Ausgang.display(iV_out)
strI = str(data[5])
try:
iI = int(strI, 16)
except:
iI = "Err"
uiplot.lcdNumber_Strom.display(iI)
strTemp = str(data[6])
try:
iTemp = int(strTemp, 16)
except:
iTemp = "Err"
uiplot.lcdNumber_Temperatur.display(iTemp)
strVol = str(data[7])
try:
iVol = int(strVol, 16)
except:
iVol = "Err"
uiplot.lcdNumber_Volume.display(iVol)
strMUX = str(data[8])
try:
iMUX = int(strMUX, 16)
except:
iMUX = "Err"
uiplot.lcdNumber_MUX.display(iMUX)
def testGUI(self):
"""
Testfunktion, falls keine COM-Schnittstelle vorhanden
"""
uiplot.lcdNumber_2_Ausgang.display(1860)
uiplot.lcdNumber_Strom.display(1860)
uiplot.lcdNumber_Temperatur.display(1860)
uiplot.lcdNumber_Vcc.display(1860)
def close(self):
"""
Serielle Schnittstelle schliessen und Thread beenden
"""
if self.OpenPort:
self.ser.close()
self.threadDieNow = True
self.tI2C._Thread__stop()
class MyMainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
self.ui = uic.loadUi('GUI_AudioControl.ui',self)
self.qwtPlot_Zeitsignal.setAxisScale(self.qwtPlot_Zeitsignal.yLeft,-10000,10000)
self.Thermo_Ausgang.setRange(-100.0,100.0)
self.Thermo_Vcc.setRange(-100.0,100.0)
self.pushButton_Stop.setDisabled(True)
self.timer = QtCore.QTimer()
self.timer.start(1.0)
def statusMessage(self, message):
"""
Display a message in the statusbar.
"""
self.statusBar().showMessage(message)
#==============================================================================
# MAIN
#==============================================================================
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
uiplot = MyMainWindow()
s = SPDIF() # Instanz von SPDIF wird erzeugt
bus = I2C() # Instanz von I2C wird erzeugt
c = Qwt.QwtPlotCurve()
c.attach(uiplot.qwtPlot_Zeitsignal)
code = app.exec_() # Beenden des Programms
s.close()
bus.close()
sys.exit(code)
|
gpl-2.0
| 2,810,089,096,417,922,000 | 39.758117 | 205 | 0.508046 | false |
mircealungu/Zeeguu-Core
|
tools/vis/generate-matrix-vis.py
|
1
|
1278
|
import numpy
import matplotlib.pylab as plt
def generate_matrix_visualization(known_words, bookmark_words):
m = []
for i in range(0,100):
m.append([])
for j in range (0, 100):
if (i*100+j) in bookmark_words:
m[i].append(0.65)
elif (i*100+j) in known_words:
m[i].append(0.4)
else:
m[i].append(0.2)
m.reverse()
# we need this next line to scale the color scheme
m[0][0]=0
matrix = numpy.matrix(m)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.set_aspect('equal')
plt.imshow(matrix, interpolation='none')
plt.set_cmap('hot')
plt.colorbar()
plt.show()
frame.axes.get_xaxis().set_ticks([])
#plt.axis([0,10000,0,10000])
plt.ylabel('Rank')
plt.title('Encountered Ranked Words by User')
plt.colorbar()
plt.show()
# assume known words in current folder words.txt file
known_words = [ ]
learning_words = [ ]
with open ('words.txt') as f:
for line in f:
known_words.append(int (line))
with open ('bookmarkwords.txt') as f:
for line in f:
bookmark_words.append(int (line))
with open ('learning.txt') as f:
for line in f:
learning_words.append(int (line))
generate_matrix_visualization(known_words, learning_words)
|
mit
| -163,580,975,101,220,900 | 23.576923 | 63 | 0.613459 | false |
ers-devs/contact-app
|
SugarActivity.py
|
1
|
1561
|
'''
@author: cgueret
'''
from gi.repository import Gtk
from sugar3.activity import activity
from sugar3.graphics.toolbarbox import ToolbarBox
from sugar3.activity.widgets import ActivityButton
from sugar3.activity.widgets import TitleEntry
from sugar3.activity.widgets import StopButton
from sugar3.activity.widgets import DescriptionItem
from View import View
from Controller import Controller
from Model import Model
class ContactActivity(activity.Activity):
def __init__(self, handle):
'''
Initialise the activity
'''
activity.Activity.__init__(self, handle)
# Create the model
model = Model()
# Create the view
view = View(model)
# Create the controller
controller = Controller(view, model)
# Sugar toolbar with the new toolbar redesign
toolbar_box = ToolbarBox()
activity_button = ActivityButton(self)
toolbar_box.toolbar.insert(activity_button, 0)
activity_button.show()
title_entry = TitleEntry(self)
toolbar_box.toolbar.insert(title_entry, -1)
title_entry.show()
description_item = DescriptionItem(self)
toolbar_box.toolbar.insert(description_item, -1)
description_item.show()
separator = Gtk.SeparatorToolItem()
separator.props.draw = False
separator.set_expand(True)
toolbar_box.toolbar.insert(separator, -1)
separator.show()
stop_button = StopButton(self)
toolbar_box.toolbar.insert(stop_button, -1)
stop_button.show()
self.set_toolbar_box(toolbar_box)
toolbar_box.show()
# Set the canvas
self.set_canvas(view.get_widget())
self.canvas.show()
|
apache-2.0
| -1,839,907,573,162,901,500 | 23.777778 | 51 | 0.737348 | false |
edarin/ENSAE_projects
|
ParallelizedPCA/OpVectorNorm.py
|
1
|
2580
|
# coding: utf8
#############################################
# Ce code prend en input trois vecteurs: a_gpu et b_gpu
# dont il calcule le produit scalaire stocké dans c_gpu
# de taille 1.
# La fonction est principalement utilisée pour calculer
# le carré de la norme 2 d'un vecteur.
#
#############################################
import pycuda.driver as cuda
import pycuda.autoinit
from pycuda.compiler import SourceModule
from pycuda import driver, compiler
import pycuda.gpuarray as gpuarray
import numpy as np
import time
N_COL=100000
N_ROW=1
device = cuda.Device(0)
THREADS_PER_BLOCK = device.get_attributes()[1]
print('MAX_THREADS_PER_BLOCK:', THREADS_PER_BLOCK)
def Norm_squared(a_gpu, b_gpu, c_gpu,
N_COL = N_COL, N_ROW = N_ROW
):
kernel_code_template = """
__global__ void dot(float *a, float *b,float *c) {
__shared__ float temp[1024];
int index = threadIdx.x + blockIdx.x *blockDim.x;
temp[threadIdx.x] = a[index] * b[index];
__shared__ int blockElt;
if (threadIdx.x == 0) {
if ((blockIdx.x + 1) * 1024<= %(N_COL)s)
blockElt = 1024;
else blockElt = fmodf(%(N_COL)s, 1024);
}
__syncthreads();
if (threadIdx.x == 0) {
int sum =0;
for (int i =0; i< blockElt; i++)
sum += temp[i];
atomicAdd(c, sum);
}
}
"""
kernel_code = kernel_code_template % {
'N_COL' : N_COL,
'N_ROW' : N_ROW
}
mod = compiler.SourceModule(kernel_code)
func = mod.get_function("dot")
dimGridx = int((N_COL+ 1024-1)/1024)
dimGridy= 1
blocksPerGrid = (dimGridx,dimGridy)
func(a_gpu, a_gpu, c_gpu, block=(1024,1,1), grid=blocksPerGrid)
a = np.random.randint(low = -10, high =10, size=N_COL, dtype= np.int32)
a = a.astype(np.float32)
#print(a)
a_gpu = cuda.mem_alloc(a.nbytes)
cuda.memcpy_htod(a_gpu, a)
c = np.zeros(1, dtype= np.float32)
#print(c)
c_gpu = cuda.mem_alloc(c.nbytes)
cuda.memcpy_htod(c_gpu, c)
start_time = time.time()
Norm_squared(a_gpu, a_gpu, c_gpu)
print(a)
# On rapatrie les resultats
cuda.memcpy_dtoh(c, c_gpu)
print("CUDA" , (time.time() - start_time))
print(c)
# print the results
start_time = time.time()
c_cpu = np.dot(a,a)
print("Pandas" , (time.time() - start_time))
print(c_cpu)
print(np.allclose(c_cpu,c))
|
gpl-3.0
| 6,778,039,217,496,820,000 | 23.093458 | 71 | 0.537447 | false |
frankyrumple/ope
|
client_tools/svc/OPEService.py
|
1
|
21514
|
import pythoncom
import win32serviceutil
import win32service
import win32event
import servicemanager
import socket
import time
import datetime
import sys
import os
import logging
import random
from win32com.shell import shell, shellcon
import ntsecuritycon
import win32security
import win32api
import win32gui
import win32ui
import win32con
import win32gui_struct
import win32ts
import win32process
import PIL
import pyscreenshot as ImageGrab
import ctypes
# TODO - Set recovery options for service so it restarts on failure
# Disable ALL nics if this is set
DISABLE_ALL_NICS = True
# Disable sshot if this is set
DISABLE_SSHOT = False
# Most event notification support lives around win32gui
GUID_DEVINTERFACE_USB_DEVICE = "{A5DCBF10-6530-11D2-901F-00C04FB951ED}"
LOG_FOLDER = os.path.join(shell.SHGetFolderPath(0, shellcon.CSIDL_COMMON_APPDATA, None, 0), "ope")
SCREEN_SHOTS_FOLDER = os.path.join(LOG_FOLDER, "screen_shots")
EVERYONE, domain, type = win32security.LookupAccountName("", "Everyone")
ADMINISTRATORS, domain, type = win32security.LookupAccountName("", "Administrators")
CURRENT_USER, domain, type = win32security.LookupAccountName("", win32api.GetUserName())
SYSTEM_USER, domain, type = win32security.LookupAccountName("", "System")
def show_cacls(filename):
print
print
for line in os.popen("cacls %s" % filename).read().splitlines():
print line
def make_data_folder(folder, allow_add_file=False):
try:
os.makedirs(folder)
except:
pass
# Set inheritance flags
flags = win32security.OBJECT_INHERIT_ACE | win32security.CONTAINER_INHERIT_ACE
# Set permissions on this folder so that it isn't viewable by students
sd = win32security.GetFileSecurity(folder, win32security.DACL_SECURITY_INFORMATION)
# Create the blank DACL and add our ACE's
dacl = win32security.ACL()
#dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, EVERYONE)
# Setup a folder where sshot running as the student can add pics, but can't delete/modify anythin
if allow_add_file == True:
dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_ADD_FILE, EVERYONE)
#dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ | ntsecuritycon.FILE_GENERIC_WRITE, CURRENT_USER)
dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, flags, ntsecuritycon.FILE_ALL_ACCESS, ADMINISTRATORS)
#dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_ALL_ACCESS, ADMINISTRATORS)
dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION_DS, flags, ntsecuritycon.FILE_ALL_ACCESS, SYSTEM_USER)
#dacl.AddAccessAllowedAce(win32security.ACL_REVISION, ntsecuritycon.FILE_ALL_ACCESS, SYSTEM_USER)
# Set our ACL
sd.SetSecurityDescriptorDacl(1, dacl, 0)
win32security.SetFileSecurity(folder, win32security.DACL_SECURITY_INFORMATION | win32security.UNPROTECTED_DACL_SECURITY_INFORMATION, sd)
#win32security.TreeSetNamedSecurityInfo(folder, win32security.SE_FILE_OBJECT, win32security.DACL_SECURITY_INFORMATION | win32security.UNPROTECTED_DACL_SECURITY_INFORMATION, None, None, sd, None)
def scanNics():
# May need to call this before calling this function so that COM works
#pythoncom.CoInitialize()
global DISABLE_ALL_NICS
system_nics = ["WAN Miniport (IP)", "WAN Miniport (IPv6)", "WAN Miniport (Network Monitor)",
"WAN Miniport (PPPOE)", "WAN Miniport (PPTP)", "WAN Miniport (L2TP)", "WAN Miniport (IKEv2)",
"WAN Miniport (SSTP)", "Microsoft Wi-Fi Direct Virtual Adapter", "Teredo Tunneling Pseudo-Interface",
"Microsoft Kernel Debug Network Adapter",
]
approved_nics = ["Realtek USB GbE Family Controller",]
if DISABLE_ALL_NICS == True:
approved_nics = []
logging.info("scanning for unauthorized nics...")
import win32com.client
strComputer = "."
objWMIService = win32com.client.Dispatch("WbemScripting.SWbemLocator")
objSWbemServices = objWMIService.ConnectServer(strComputer,"root\cimv2")
colItems = objSWbemServices.ExecQuery("Select * from Win32_NetworkAdapter")
for objItem in colItems:
if objItem.Name in approved_nics:
#logging.info("***Device found - on approved list: " + str(objItem.Name) + str(objItem.NetConnectionID))
dev_id = objItem.NetConnectionID
if dev_id:
logging.info(" ---> !!! Approved device !!!, enabling..." + str(dev_id))
cmd = "netsh interface set interface \"" + dev_id + "\" admin=ENABLED"
#print cmd
os.system(cmd)
else:
#print " ---> unauthorized, not plugged in..."
pass
continue
elif objItem.Name in system_nics:
#logging.info("***Device found - system nic - ignoring: " + str(objItem.Name))
continue
else:
#logging.info("***Device found :" + str(objItem.Name))
dev_id = objItem.NetConnectionID
if dev_id:
logging.info(" ---> !!! unauthorized !!!, disabling..." + str(dev_id))
cmd = "netsh interface set interface \"" + dev_id + "\" admin=DISABLED"
#print cmd
os.system(cmd)
else:
#print " ---> unauthorized, not plugged in..."
pass
continue
#print "========================================================"
#print "Adapter Type: ", objItem.AdapterType
#print "Adapter Type Id: ", objItem.AdapterTypeId
#print "AutoSense: ", objItem.AutoSense
#print "Availability: ", objItem.Availability
#print "Caption: ", objItem.Caption
#print "Config Manager Error Code: ", objItem.ConfigManagerErrorCode
#print "Config Manager User Config: ", objItem.ConfigManagerUserConfig
#print "Creation Class Name: ", objItem.CreationClassName
#print "Description: ", objItem.Description
#print "Device ID: ", objItem.DeviceID
#print "Error Cleared: ", objItem.ErrorCleared
#print "Error Description: ", objItem.ErrorDescription
#print "Index: ", objItem.Index
#print "Install Date: ", objItem.InstallDate
#print "Installed: ", objItem.Installed
#print "Last Error Code: ", objItem.LastErrorCode
#print "MAC Address: ", objItem.MACAddress
#print "Manufacturer: ", objItem.Manufacturer
#print "Max Number Controlled: ", objItem.MaxNumberControlled
#print "Max Speed: ", objItem.MaxSpeed
#print "Name: ", objItem.Name
#print "Net Connection ID: ", objItem.NetConnectionID
#print "Net Connection Status: ", objItem.NetConnectionStatus
#z = objItem.NetworkAddresses
#if z is None:
# a = 1
#else:
# for x in z:
# print "Network Addresses: ", x
#print "Permanent Address: ", objItem.PermanentAddress
#print "PNP Device ID: ", objItem.PNPDeviceID
#z = objItem.PowerManagementCapabilities
#if z is None:
# a = 1
#else:
# for x in z:
# print "Power Management Capabilities: ", x
#print "Power Management Supported: ", objItem.PowerManagementSupported
#print "Product Name: ", objItem.ProductName
#print "Service Name: ", objItem.ServiceName
#print "Speed: ", objItem.Speed
#print "Status: ", objItem.Status
#print "Status Info: ", objItem.StatusInfo
#print "System Creation Class Name: ", objItem.SystemCreationClassName
#print "System Name: ", objItem.SystemName
#print "Time Of Last Reset: ", objItem.TimeOfLastReset
class OPEService(win32serviceutil.ServiceFramework):
_svc_name_ = 'OPEService'
_svc_desplay_name_ = 'OPEService'
_svc_description_ = "Open Prison Education Service"
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
socket.setdefaulttimeout(60)
self.isAlive = True
# Setup data folders
make_data_folder(LOG_FOLDER)
make_data_folder(SCREEN_SHOTS_FOLDER, True)
# Setup logging
logging.basicConfig(
filename=os.path.join(LOG_FOLDER, 'ope-service.log'),
level=logging.DEBUG,
format='[ope-service] %(levelname)-7.7s %(message)s'
)
logging.info("service init")
# register for a device notification - we pass our service handle
# instead of a window handle.
filter = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE(
GUID_DEVINTERFACE_USB_DEVICE)
self.hdn = win32gui.RegisterDeviceNotification(self.ssh, filter,
win32con.DEVICE_NOTIFY_SERVICE_HANDLE)
# Override the base class so we can accept additional events.
def GetAcceptedControls(self):
# say we accept them all.
rc = win32serviceutil.ServiceFramework.GetAcceptedControls(self)
rc |= win32service.SERVICE_ACCEPT_PARAMCHANGE \
| win32service.SERVICE_ACCEPT_NETBINDCHANGE \
| win32service.SERVICE_CONTROL_DEVICEEVENT \
| win32service.SERVICE_ACCEPT_HARDWAREPROFILECHANGE \
| win32service.SERVICE_ACCEPT_POWEREVENT \
| win32service.SERVICE_ACCEPT_SESSIONCHANGE
return rc
# All extra events are sent via SvcOtherEx (SvcOther remains as a
# function taking only the first args for backwards compat)
def SvcOtherEx(self, control, event_type, data):
# This is only showing a few of the extra events - see the MSDN
# docs for "HandlerEx callback" for more info.
if control == win32service.SERVICE_CONTROL_DEVICEEVENT:
info = win32gui_struct.UnpackDEV_BROADCAST(data)
msg = "A device event occurred: %x - %s" % (event_type, info)
scanNics()
elif control == win32service.SERVICE_CONTROL_HARDWAREPROFILECHANGE:
msg = "A hardware profile changed: type=%s, data=%s" % (event_type, data)
elif control == win32service.SERVICE_CONTROL_POWEREVENT:
msg = "A power event: setting %s" % data
elif control == win32service.SERVICE_CONTROL_SESSIONCHANGE:
# data is a single elt tuple, but this could potentially grow
# in the future if the win32 struct does
msg = "Session event: type=%s, data=%s" % (event_type, data)
else:
msg = "Other event: code=%d, type=%s, data=%s" \
% (control, event_type, data)
logging.info("Event " + msg)
servicemanager.LogMsg(
servicemanager.EVENTLOG_INFORMATION_TYPE,
0xF000, # generic message
(msg, '')
)
def SvcStop(self):
self.isAlive = False
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
self.isAlive = True
logging.info("Service running")
servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE,
servicemanager.PYS_SERVICE_STARTED, (self._svc_name_, ''))
self.main()
# win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
# Write a stop message.
logging.info("Service Stopped")
servicemanager.LogMsg(
servicemanager.EVENTLOG_INFORMATION_TYPE,
servicemanager.PYS_SERVICE_STOPPED,
(self._svc_name_, '')
)
def runScreenShotApp3(self):
# Get the current security token
token = win32security.OpenProcessToken(win32process.GetCurrentProcess(),
win32security.TOKEN_ALL_ACCESS)
# Make a copy
#token2 = win32security.DuplicateToken(token)
token2 = win32security.DuplicateTokenEx(token,
win32security.SecurityImpersonation,
win32security.TOKEN_ALL_ACCESS,
win32security.TokenPrimary)
# Find the session id - we will grab the console/keyboard
#proc_id = win32process.GetCurrentProcessId()
#session_id = win32ts.ProcessIdToSessionId(proc_id)
session_id = win32ts.WTSGetActiveConsoleSessionId()
# Make this token target our session
win32security.SetTokenInformation(token2, win32security.TokenSessionId, session_id)
def runScreenShotApp(self):
global DISABLE_SSHOT
if DISABLE_SSHOT == True:
return
# Get the session id for the console
session_id = win32ts.WTSGetActiveConsoleSessionId()
if session_id == 0xffffffff:
# User not logged in right now?
logging.info("No console user")
return None
logging.info("Got Console: " + str(session_id))
# Login to the terminal service to get the user token for the console id
svr = win32ts.WTSOpenServer(".")
user_token = win32ts.WTSQueryUserToken(session_id)
logging.info("User Token " + str(user_token))
# Copy the token
user_token_copy = win32security.DuplicateTokenEx(user_token,
win32security.SecurityImpersonation,
win32security.TOKEN_ALL_ACCESS,
win32security.TokenPrimary)
# Put this token in the logged in session
win32security.SetTokenInformation(user_token_copy, win32security.TokenSessionId, session_id)
# Switch to the user
#win32security.ImpersonateLoggedOnUser(user_token)
#logging.info("Impersonating " + win32api.GetUserName())
# Run the screen shot app
app_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
#cmd = os.path.join(app_path, "sshot\\dist\\sshot.exe")
cmd = "c:\\programdata\\ope\\bin\\sshot.exe"
#cmd = "cmd.exe"
logging.info("Running sshot app " + cmd)
# Use win create process function
si = win32process.STARTUPINFO()
si.dwFlags = win32process.STARTF_USESHOWWINDOW
si.wShowWindow = win32con.SW_NORMAL
#si.lpDesktop = "WinSta0\Default"
si.lpDesktop = ""
try:
(hProcess, hThread, dwProcessId, dwThreadId) = win32process.CreateProcessAsUser(user_token_copy,
None, # AppName (really command line, blank if cmd line supplied)
"\"" + cmd + "\"", # Command Line (blank if app supplied)
None, # Process Attributes
None, # Thread Attributes
0, # Inherits Handles
win32con.NORMAL_PRIORITY_CLASS, # or win32con.CREATE_NEW_CONSOLE,
None, # Environment
os.path.dirname(cmd), # Curr directory
si) # Startup info
logging.info("Process Started: " + str(dwProcessId))
logging.info(hProcess)
except Exception as e:
logging.info("Error launching process: " + str(e))
#logging.info(os.system(cmd))
# Return us to normal security
#win32security.RevertToSelf()
# Cleanup
win32ts.WTSCloseServer(svr)
user_token.close()
user_token_copy.close()
return
def runScreenShotApp2(self):
console_id = win32ts.WTSGetActiveConsoleSessionId()
if console_id == 0xffffffff:
# User not logged in right now?
logging.info("No console user")
return None
dc = None
logging.info("Got console: " + str(console_id))
# Get processes running on this console
svr = win32ts.WTSOpenServer(".")
user_token = win32ts.WTSQueryUserToken(console_id)
logging.info("User Token " + str(user_token))
#hwnd = win32gui.GetDC(win32con.HWND_DESKTOP) # win32gui.GetDesktopWindow()
#dc = ctypes.windll.user32.GetDC(win32con.HWND_DESKTOP)
#logging.info("DC before impersonation " + str(dc))
#win32gui.ReleaseDC(win32con.HWND_DESKTOP, dc)
# Switch to the user
win32security.ImpersonateLoggedOnUser(user_token)
logging.info("Impersonating " + win32api.GetUserName())
app_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
cmd = os.path.join(app_path, "sshot\\dist\\sshot.exe")
logging.info("Running sshot app " + cmd)
logging.info(os.system(cmd))
#hwnd = ctypes.windll.user32.GetDC(win32con.HWND_DESKTOP)
#logging.info("HWND after impersonation " + str(hwnd))
# ps_list = win32ts.WTSEnumerateProcesses(svr, 1, 0)
# for ps in ps_list:
# logging.info("PS " + str(ps))
win32ts.WTSCloseServer(svr)
# Revert back to normal user
win32security.RevertToSelf()
user_token.close()
return
def grabScreenShot(self):
# Grab the screen shot and save it to the logs folder
# Get the hwnd for the current desktop window
try:
hwnd = win32gui.GetDesktopWindow()
#hwnd = self.getDesktopHWND()
l, t, r, b = win32gui.GetWindowRect(hwnd)
w = r - l
h = b - t
logging.info("SC - HWND " + str(hwnd) + " " + str(w) + "/" + str(h))
dc = win32gui.GetDC(win32con.HWND_DESKTOP)
logging.info("DC " + str(dc))
dcObj = win32ui.CreateDCFromHandle(dc)
drawDC = dcObj.CreateCompatibleDC()
logging.info("drawDC " + str(drawDC))
#cDC = dcObj.CreateCompatibleDC() # Do we need this since it is the desktop dc?
bm = win32ui.CreateBitmap()
bm.CreateCompatibleBitmap(dcObj, w, h)
drawDC.SelectObject(bm)
drawDC.BitBlt((0, 0), (w, h), dcObj, (0, 0), win32con.SRCCOPY)
bm.SaveBitmapFile(drawDC, os.path.join(SCREEN_SHOTS_FOLDER, "test.jpeg"))
win32gui.DeleteObject(bm.GetHandle())
drawDC.DeleteDC()
dcObj.DeleteDC()
win32gui.ReleaseDC(win32con.HWND_DESKTOP, dc)
# dc = win32gui.GetWindowDC(hwnd)
# logging.info("DC " + str(dc))
# dcObj = win32ui.CreateDCFromHandle(dc)
# logging.info("dcObj " + str(dcObj))
# cDC = dcObj.CreateCompatibleDC()
# logging.info("cDC " + str(cDC))
# bm = win32ui.CreateBitmap()
# logging.info("bm " + str(bm))
# bm.CreateCompatibleBitmap(dcObj, w, h)
# cDC.SelectObject(bm)
# r = cDC.BitBlt((0,0), (w,h), dcObj, (0,0), win32con.SRCCOPY)
# logging.info("bitblt " + str(r))
# bm.SaveBitmapFile(cDC, os.path.join(SCREEN_SHOTS_FOLDER, "test.jpeg"))
# dcObj.DeleteDC()
# cDC.DeleteDC()
# win32gui.ReleaseDC(hwnd, dc)
#win32gui.DeleteObject(bm.GetHandle())
except Exception as ex:
logging.info("Error grabbing screenshot: " + str(ex))
#m = ImageGrab.grab()
# Save the file
#p = os.path.join(SCREEN_SHOTS_FOLDER, str(datetime.datetime.now()) + ".png")
#im.save(p, optimize=True)
def main(self):
rc = None
nic_scan_time = 0
# Need this so scanNics doesn't fail
pythoncom.CoInitialize()
while rc != win32event.WAIT_OBJECT_0:
# TODO
#logging.info("Loop...")
# Grab screen shots
i = random.randint(0, 2)
if i == 1:
#self.grabScreenShot()
self.runScreenShotApp()
# Scan for inserted NICS
if time.time() - nic_scan_time > 60:
scanNics()
nic_scan_time = time.time()
# Grab event logs
# Grab firewall logs
# Run virus scanner
# Security checks - is current user the correct user?
# Is online?
# block for 24*60*60 seconds and wait for a stop event
# it is used for a one-day loop
rest = 5 # * 1000 # 24*60*60*1000
rc = win32event.WaitForSingleObject(self.hWaitStop, rest)
time.sleep(5)
# Cleanup
pythoncom.CoUninitialize()
if __name__ == '__main__':
if len(sys.argv) == 1:
servicemanager.Initialize()
servicemanager.PrepareToHostSingle(OPEService)
servicemanager.StartServiceCtrlDispatcher()
else:
win32serviceutil.HandleCommandLine(OPEService)
|
mit
| -3,039,249,831,958,229,500 | 40.452794 | 198 | 0.599795 | false |
koraysaritas/slack-db-tracker
|
workers/slack_worker.py
|
1
|
9125
|
import datetime
import time
from slackclient import SlackClient
from helpers import config_helper
from helpers import slack_helper
from helpers import utils
from helpers import voltdb_helper
from helpers import timesten_helper
from helpers import altibase_helper
from helpers.store import SlackStore
from helpers.store import WorkerStore
def run(config, worker_name, verbose, queue_resource):
slack_store = SlackStore(config)
slack_store.verbose = verbose
maybe_workers = config_helper.get_supported_databases(config)
dict_workers = {worker_name: WorkerStore(config, worker_name)
for worker_name in maybe_workers
if config_helper.is_worker_active(config, worker_name)}
slack_store.slack_client = SlackClient(slack_store.slack_token)
slack_helper.join_channel(slack_store, slack_store.slack_channel_name)
while True:
try:
if slack_store.slack_client.rtm_connect():
print("{} connected to the {}. Polling messages. . .".format(worker_name,
slack_store.slack_channel_name))
while True:
try:
# read slack
userid, channel_name, has_my_hostname, target_hostname, maybe_command = parse_slack_output(
slack_store,
slack_store.slack_client.rtm_read())
# if there is a message to bot or bot's name mentioned
if channel_name and userid:
if slack_store.verbose:
print("Channel: {}, HasMyHostname: {} Maybe Command: {}".format(channel_name,
has_my_hostname,
maybe_command))
# message contains a db command
yes_handle, worker_store = should_handle_command(slack_store, dict_workers, userid,
channel_name, has_my_hostname,
maybe_command)
if yes_handle:
slack_helper.send_wait_message(slack_store, "slack", userid)
handle_command(slack_store, worker_store, userid, maybe_command, channel_name)
# the message is a command but some other worker needs to process it
yes_request = False
if not yes_handle:
yes_request, queue_message = should_request_command(config, slack_store,
has_my_hostname, maybe_command)
if yes_request:
slack_helper.send_wait_message(slack_store, "slack", userid)
queue_put_request(slack_store, queue_resource, queue_message, userid)
# send user a friendly message about available commands
if not any([yes_handle, yes_request]) \
and not is_valid_command(slack_store, maybe_command) \
and slack_store.slack_send_help_msg:
message = "{mention_text} not sure what you mean.\nCommands:\n" + \
slack_helper.get_avail_command_str(slack_store)
slack_helper.send_message_to_user(slack_store, message, "slack", userid)
except Exception as e:
ex = "Exception: {} @slack_client.rtm_read: {}".format(worker_name, e)
print(ex)
slack_store.slack_client.rtm_connect()
finally:
time.sleep(slack_store.seconds_sleep_after_slack_poll)
else:
print("Connection failed. Invalid Slack token or bot ID?")
except Exception as e:
ex = "Exception: {} @run: {}".format(worker_name, e)
print(ex)
finally:
time.sleep(slack_store.seconds_sleep_after_slack_poll)
def parse_slack_output(slack_store, slack_rtm_output):
output_list = slack_rtm_output
has_my_hostname = False
target_hostname = None
if output_list and len(output_list) > 0:
for output in output_list:
if output and 'text' in output:
if slack_store.verbose:
print("text: " + output['text'])
# return text after the @ mention, whitespace removed
as_mention = ""
has_bot = slack_store.slack_mention_bot in output['text']
has_my_hostname = slack_store.hostname in output['text']
target_hostname = parse_target_hostname(output['text'])
if has_bot:
as_mention = slack_store.slack_mention_bot
if not has_bot and slack_store.bot_cmd_start in output['text']:
has_bot = True
as_mention = slack_store.bot_cmd_start
if not has_bot and slack_store.slack_bot_name in output['text']:
has_bot = True
as_mention = slack_store.slack_bot_name
if has_bot and "bot_id" not in output:
userid = ""
try:
userid = output["user"]
except Exception as e:
ex = "Exception @parse_slack_output: %s" % e
print(ex)
return userid, output['channel'], has_my_hostname, target_hostname, \
output['text'].split(as_mention)[
1].strip().lower()
return None, None, None, None, None
def parse_target_hostname(output_text):
try:
# !kerata omsdev1-altibase-status
hostname_command = (output_text.split())[1] # omsdev1-altibase-status
maybe_hostname = hostname_command.split('-')[0] # omsdev1
return maybe_hostname
except Exception as e:
return None
def is_valid_command(slack_store, maybe_command):
return maybe_command and maybe_command in slack_store.dict_commands
def should_handle_command(slack_store, dict_workers, userid, channel_name, has_my_hostname, maybe_command):
if maybe_command in slack_store.dict_commands:
command_worker_name = slack_store.dict_commands[maybe_command]
if has_my_hostname and command_worker_name in dict_workers:
if dict_workers[command_worker_name].is_active:
return True, dict_workers[command_worker_name]
else:
pass # worker config does not exist in the current instance
return False, None
def handle_command(slack_store, worker_store, userid, command, channel_name):
cmd_parsed = utils.command_without_hostname(slack_store.hostname, command)
helper_func = None
if cmd_parsed == "voltdb-status":
helper_func = voltdb_helper.get_voltdb_status
elif cmd_parsed == "timesten-status":
helper_func = timesten_helper.get_timesten_status
elif cmd_parsed == "altibase-status":
helper_func = altibase_helper.get_altibase_status
if helper_func:
do_func(helper_func, slack_store, worker_store, userid, command, channel_name)
def do_func(func, slack_store, worker_store, userid, command, channel_name):
error, result = func(worker_store)
if error:
msg = slack_helper.format_error_message(slack_store.hostname,
worker_store.worker_name,
datetime.datetime.now(),
result)
if slack_store.verbose:
print(msg)
slack_helper.send_message(slack_store, msg, worker_store.worker_name)
else:
msg = ""
for m in result:
msg += m + "\n"
if slack_store.verbose:
print("\n" + msg)
slack_helper.send_message(slack_store, msg, worker_store.worker_name)
def should_request_command(config, slack_store, has_my_hostname, queue_message):
if has_my_hostname and queue_message in slack_store.dict_commands:
maybe_resource = slack_store.dict_commands[queue_message]
queue_message = utils.command_without_hostname(slack_store.hostname, queue_message)
if maybe_resource == "resource" and config_helper.is_resource_notification_active(config, queue_message):
return True, queue_message
return False, None
def queue_put_request(slack_store, queue_resource, queue_message, userid):
queue_resource.put((queue_message, userid))
|
mit
| 5,229,936,567,187,545,000 | 47.026316 | 115 | 0.539836 | false |
crackwitz/videozeug
|
quaxlist.py
|
1
|
1081
|
#!/usr/bin/env python2
from __future__ import division
import os
import sys
import re
linerex = re.compile(r'([0-9]+)\.([0-9]+): (\d+)')
def parse_line(line):
m = linerex.match(line)
assert m
(secs, msecs, slide) = m.groups()
slide = int(slide)
# peculiarity in Walter Unger's script
# "0.90" is 0.090, not 0.900
time = int(secs) + int(msecs) * 0.001
return (time, slide)
def pairs(s):
s = list(s)
return zip(s[:-1], s[1:])
def read_file(fileobj):
lines = map(parse_line, fileobj)
times = [u for u,v in lines]
assert all(t1 < t2 for t1,t2 in pairs(times))
return lines
if __name__ == '__main__':
import json
inf = sys.stdin
outf = sys.stdout
if len(sys.argv) >= 2 and sys.argv[1] != '-':
inf = open(sys.argv[1])
if len(sys.argv) >= 3 and sys.argv[2] != '-':
outf = open(sys.argv[2])
markers = read_file(inf)
data = {
'duration': max(u for u,v in markers) + 300,
'chapters': [
{
'name': "Slide {0}".format(slideno),
'start': t,
}
for t,slideno in markers
]
}
json.dump(data, outf, sort_keys=True, indent=1)
|
mit
| -7,336,864,783,408,740,000 | 17.016667 | 50 | 0.59482 | false |
0racul/pythonize
|
test/test_info.py
|
1
|
1473
|
import re
from random import randrange
def test_all_info_on_home_page(app):
index = randrange(1, len(app.contact.get_contact_list()))
contact_from_home_page = app.contact.get_contact_info_from_homepage()[index]
contact_from_edit_page = app.contact.get_contact_info_from_edit_page(index)
assert contact_from_home_page.lastname == contact_from_edit_page.lastname
assert contact_from_home_page.firstname == contact_from_edit_page.firstname
assert contact_from_home_page.address == contact_from_edit_page.address
assert contact_from_home_page.all_phones_from_homepage == merge_phones_like_on_homepage(contact_from_edit_page)
assert contact_from_home_page.all_emails_from_homepage == merge_emails_like_on_homepage(contact_from_edit_page)
def clear(s):
return re.sub(" [ ()-]", "", s)
def merge_phones_like_on_homepage(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.hometele, contact.mobiletele, contact.worktele, contact.phone2]))))
def merge_emails_like_on_homepage(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.email, contact.email2, contact.email3]))))
|
apache-2.0
| 1,393,689,568,122,579,200 | 42.323529 | 119 | 0.610319 | false |
vertexproject/synapse
|
synapse/tests/test_model_infotech.py
|
1
|
44099
|
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.models.crypto as s_m_crypto
import synapse.tests.utils as s_t_utils
from synapse.tests.utils import alist
class InfotechModelTest(s_t_utils.SynTest):
async def test_infotech_basics(self):
async with self.getTestCore() as core:
nodes = await core.nodes('''[
it:sec:cwe=CWE-120
:name=omg
:desc=omgwtfbbq
:url=https://cwe.mitre.org/data/definitions/120.html
:parents=(CWE-119,)
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:sec:cwe', 'CWE-120'))
self.eq(nodes[0].get('name'), 'omg')
self.eq(nodes[0].get('desc'), 'omgwtfbbq')
self.eq(nodes[0].get('url'), 'https://cwe.mitre.org/data/definitions/120.html')
self.eq(nodes[0].get('parents'), ('CWE-119',))
self.eq(r'foo\:bar', core.model.type('it:sec:cpe').norm(r'cpe:2.3:a:foo\:bar:*:*:*:*:*:*:*:*:*')[1]['subs']['vendor'])
with self.raises(s_exc.BadTypeValu):
nodes = await core.nodes('[it:sec:cpe=asdf]')
with self.raises(s_exc.BadTypeValu):
nodes = await core.nodes('[it:sec:cpe=cpe:2.3:a:asdf]')
nodes = await core.nodes('''[
it:sec:cpe=cpe:2.3:a:microsoft:internet_explorer:8.0.6001:beta:*:*:*:*:*:*
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:sec:cpe', 'cpe:2.3:a:microsoft:internet_explorer:8.0.6001:beta:*:*:*:*:*:*'))
self.eq(nodes[0].get('part'), 'a')
self.eq(nodes[0].get('vendor'), 'microsoft')
self.eq(nodes[0].get('product'), 'internet_explorer')
self.eq(nodes[0].get('version'), '8.0.6001')
self.eq(nodes[0].get('update'), 'beta')
nodes = await core.nodes('''[
it:mitre:attack:group=G0100
:org={[ ou:org=* :name=visicorp ]}
:name=aptvisi
:names=(visigroup, nerdsrus, visigroup)
:desc=worlddom
:url=https://vertex.link
:tag=cno.mitre.g0100
:references=(https://foo.com,https://bar.com)
:software=(S0200,S0100,S0100)
:techniques=(T0200,T0100,T0100)
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:mitre:attack:group', 'G0100'))
self.nn(nodes[0].get('org'))
self.eq(nodes[0].get('name'), 'aptvisi')
self.eq(nodes[0].get('names'), ('nerdsrus', 'visigroup'))
self.eq(nodes[0].get('desc'), 'worlddom')
self.eq(nodes[0].get('tag'), 'cno.mitre.g0100')
self.eq(nodes[0].get('url'), 'https://vertex.link')
self.eq(nodes[0].get('references'), ('https://foo.com', 'https://bar.com'))
self.eq(nodes[0].get('software'), ('S0100', 'S0200'))
self.eq(nodes[0].get('techniques'), ('T0100', 'T0200'))
nodes = await core.nodes('''[
it:mitre:attack:tactic=TA0100
:name=tactilneck
:desc=darkerblack
:url=https://archer.link
:tag=cno.mitre.ta0100
:references=(https://foo.com,https://bar.com)
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:mitre:attack:tactic', 'TA0100'))
self.eq(nodes[0].get('name'), 'tactilneck')
self.eq(nodes[0].get('desc'), 'darkerblack')
self.eq(nodes[0].get('tag'), 'cno.mitre.ta0100')
self.eq(nodes[0].get('url'), 'https://archer.link')
self.eq(nodes[0].get('references'), ('https://foo.com', 'https://bar.com'))
nodes = await core.nodes('''[
it:mitre:attack:technique=T0100
:name=lockpicking
:desc=speedhackers
:url=https://locksrus.link
:tag=cno.mitre.t0100
:references=(https://foo.com,https://bar.com)
:parent=T9999
:status=deprecated
:isnow=T1110
:tactics=(TA0200,TA0100,TA0100)
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:mitre:attack:technique', 'T0100'))
self.eq(nodes[0].get('name'), 'lockpicking')
self.eq(nodes[0].get('desc'), 'speedhackers')
self.eq(nodes[0].get('tag'), 'cno.mitre.t0100')
self.eq(nodes[0].get('url'), 'https://locksrus.link')
self.eq(nodes[0].get('references'), ('https://foo.com', 'https://bar.com'))
self.eq(nodes[0].get('parent'), 'T9999')
self.eq(nodes[0].get('tactics'), ('TA0100', 'TA0200'))
self.eq(nodes[0].get('status'), 'deprecated')
self.eq(nodes[0].get('isnow'), 'T1110')
nodes = await core.nodes('''[
it:mitre:attack:software=S0100
:software=*
:name=redtree
:desc=redtreestuff
:url=https://redtree.link
:tag=cno.mitre.s0100
:references=(https://foo.com,https://bar.com)
:techniques=(T0200,T0100,T0100)
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:mitre:attack:software', 'S0100'))
self.nn(nodes[0].get('software'))
self.eq(nodes[0].get('name'), 'redtree')
self.eq(nodes[0].get('desc'), 'redtreestuff')
self.eq(nodes[0].get('tag'), 'cno.mitre.s0100')
self.eq(nodes[0].get('url'), 'https://redtree.link')
self.eq(nodes[0].get('references'), ('https://foo.com', 'https://bar.com'))
self.eq(nodes[0].get('techniques'), ('T0100', 'T0200'))
nodes = await core.nodes('''[
it:mitre:attack:mitigation=M0100
:name=patchstuff
:desc=patchyourstuff
:url=https://wsus.com
:tag=cno.mitre.m0100
:references=(https://foo.com,https://bar.com)
:addresses=(T0200,T0100,T0100)
]''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:mitre:attack:mitigation', 'M0100'))
self.eq(nodes[0].get('name'), 'patchstuff')
self.eq(nodes[0].get('desc'), 'patchyourstuff')
self.eq(nodes[0].get('tag'), 'cno.mitre.m0100')
self.eq(nodes[0].get('url'), 'https://wsus.com')
self.eq(nodes[0].get('references'), ('https://foo.com', 'https://bar.com'))
self.eq(nodes[0].get('addresses'), ('T0100', 'T0200'))
async def test_infotech_ios(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
valu = '00000000-0000-0000-0000-00000000000A'
idfa = await snap.addNode('it:os:ios:idfa', valu)
self.eq(idfa.ndef[1], '00000000-0000-0000-0000-00000000000a')
async def test_infotech_android(self):
softver = s_common.guid()
async with self.getTestCore() as core:
async with await core.snap() as snap:
perm = await snap.addNode('it:os:android:perm', 'Foo Perm')
self.eq(perm.ndef[1], 'Foo Perm')
intent = await snap.addNode('it:os:android:intent', 'Foo Intent')
self.eq(intent.ndef[1], 'Foo Intent')
ilisn = await snap.addNode('it:os:android:ilisten', (softver, 'Listen Test'))
self.eq(ilisn.get('app'), softver)
self.eq(ilisn.get('intent'), 'Listen Test')
ibcast = await snap.addNode('it:os:android:ibroadcast', (softver, 'Broadcast Test'))
self.eq(ibcast.get('app'), softver)
self.eq(ibcast.get('intent'), 'Broadcast Test')
reqperm = await snap.addNode('it:os:android:reqperm', (softver, 'Test Perm'))
self.eq(reqperm.get('app'), softver)
self.eq(reqperm.get('perm'), 'Test Perm')
valu = 'someIdentifier'
aaid = await snap.addNode('it:os:android:aaid', valu)
self.eq(aaid.ndef[1], 'someidentifier')
async def test_it_forms_simple(self):
async with self.getTestCore() as core:
place = s_common.guid()
async with await core.snap() as snap:
node = await snap.addNode('it:hostname', 'Bobs Computer')
self.eq(node.ndef[1], 'bobs computer')
org0 = s_common.guid()
host0 = s_common.guid()
sver0 = s_common.guid()
cont0 = s_common.guid()
hprops = {
'name': 'Bobs laptop',
'desc': 'Bobs paperweight',
'ipv4': '1.2.3.4',
'latlong': '0.0, 0.0',
'place': place,
'os': sver0,
'manu': 'Dull',
'model': 'Lutitude 8249',
'serial': '111-222',
'loc': 'us.hehe.haha',
'operator': cont0,
'org': org0,
}
node = await snap.addNode('it:host', host0, hprops)
self.eq(node.ndef[1], host0)
self.eq(node.get('name'), 'bobs laptop')
self.eq(node.get('desc'), 'Bobs paperweight')
self.eq(node.get('ipv4'), 0x01020304)
self.eq(node.get('latlong'), (0.0, 0.0))
self.eq(node.get('place'), place)
self.eq(node.get('os'), sver0)
self.eq(node.get('loc'), 'us.hehe.haha')
self.eq(node.get('org'), org0)
self.eq(node.get('operator'), cont0)
node = await snap.addNode('it:hosturl', (host0, 'http://vertex.ninja/cool.php'))
self.eq(node.ndef[1], (host0, 'http://vertex.ninja/cool.php'))
self.eq(node.get('host'), host0)
self.eq(node.get('url'), 'http://vertex.ninja/cool.php')
node = await snap.addNode('it:dev:int', 0x61C88648)
self.eq(node.ndef[1], 1640531528)
cprops = {
'desc': 'Some words.',
}
node = await snap.addNode('it:sec:cve', 'CVE-2013-9999', cprops)
self.eq(node.ndef[1], 'cve-2013-9999')
self.eq(node.get('desc'), 'Some words.')
hash0 = s_common.guid()
hprops = {
'salt': 'B33F',
'hash:md5': s_m_crypto.ex_md5,
'hash:sha1': s_m_crypto.ex_sha1,
'hash:sha256': s_m_crypto.ex_sha256,
'hash:sha512': s_m_crypto.ex_sha512,
'hash:lm': s_m_crypto.ex_md5,
'hash:ntlm': s_m_crypto.ex_md5,
'passwd': "I've got the same combination on my luggage!",
}
node = await snap.addNode('it:auth:passwdhash', hash0, hprops)
self.eq(node.ndef[1], hash0)
self.eq(node.get('salt'), 'b33f')
self.eq(node.get('hash:md5'), s_m_crypto.ex_md5)
self.eq(node.get('hash:sha1'), s_m_crypto.ex_sha1)
self.eq(node.get('hash:sha256'), s_m_crypto.ex_sha256)
self.eq(node.get('hash:sha512'), s_m_crypto.ex_sha512)
self.eq(node.get('hash:lm'), s_m_crypto.ex_md5)
self.eq(node.get('hash:ntlm'), s_m_crypto.ex_md5)
self.eq(node.get('passwd'), "I've got the same combination on my luggage!")
nodes = await core.nodes('[ it:adid=visi ]')
self.eq(('it:adid', 'visi'), nodes[0].ndef)
nodes = await core.nodes('''
init {
$org = $lib.guid()
$host = $lib.guid()
$acct = $lib.guid()
}
[
it:account=$acct
:host=$host
:user=visi
:contact={[ ps:contact=* :[email protected] ]}
:domain={[ it:domain=* :org=$org :name=vertex :desc="the vertex project domain" ]}
(it:logon=* :time=20210314 :logoff:time=202103140201 :account=$acct :host=$host :duration=(:logoff:time - :time))
]
''')
self.len(2, nodes)
self.eq('visi', nodes[0].get('user'))
self.nn(nodes[0].get('host'))
self.nn(nodes[0].get('domain'))
self.nn(nodes[0].get('contact'))
self.nn(nodes[1].get('host'))
self.nn(nodes[1].get('account'))
self.eq(1615680000000, nodes[1].get('time'))
self.eq(1615687260000, nodes[1].get('logoff:time'))
self.eq(7260000, nodes[1].get('duration'))
self.eq('02:01:00.000', nodes[1].repr('duration'))
nodes = await core.nodes('inet:[email protected] -> ps:contact -> it:account -> it:logon +:time>=2021 -> it:host')
self.len(1, nodes)
self.eq('it:host', nodes[0].ndef[0])
nodes = await core.nodes('it:account -> it:domain')
self.len(1, nodes)
self.nn(nodes[0].get('org'))
self.eq('vertex', nodes[0].get('name'))
self.eq('the vertex project domain', nodes[0].get('desc'))
nodes = await core.nodes('''[
it:log:event=*
:mesg=foobar
:data=(foo, bar, baz)
:severity=debug
:host={it:host | limit 1}
]''')
self.len(1, nodes)
self.eq(10, nodes[0].get('severity'))
self.eq('foobar', nodes[0].get('mesg'))
self.eq(('foo', 'bar', 'baz'), nodes[0].get('data'))
# check that the host activity model was inherited
self.nn(nodes[0].get('host'))
async def test_it_forms_prodsoft(self):
# Test all prodsoft and prodsoft associated linked forms
async with self.getTestCore() as core:
async with await core.snap() as snap:
# it:prod:soft
prod0 = s_common.guid()
org0 = s_common.guid()
person0 = s_common.guid()
file0 = 'a' * 64
acct0 = ('vertex.link', 'pennywise')
url0 = 'https://vertex.link/products/balloonmaker'
sprops = {
'name': 'Balloon Maker',
'desc': "Pennywise's patented balloon blower upper",
'desc:short': 'Balloon blower',
'author:org': org0,
'author:email': '[email protected]',
'author:acct': acct0,
'author:person': person0,
'url': url0,
}
node = await snap.addNode('it:prod:soft', prod0, sprops)
self.eq(node.ndef[1], prod0)
self.eq(node.get('name'), 'balloon maker')
self.eq(node.get('desc'), "Pennywise's patented balloon blower upper")
self.eq(node.get('desc:short'), 'balloon blower')
self.eq(node.get('author:org'), org0)
self.eq(node.get('author:acct'), acct0)
self.eq(node.get('author:email'), '[email protected]')
self.eq(node.get('author:person'), person0)
self.false(node.get('isos'))
self.false(node.get('islib'))
await node.set('isos', True)
await node.set('islib', True)
self.true(node.get('isos'))
self.true(node.get('islib'))
self.eq(node.get('url'), url0)
# it:prod:softver - this does test a bunch of property related callbacks
url1 = 'https://vertex.link/products/balloonmaker/release_101-beta.exe'
vprops = {
'vers': 'V1.0.1-beta+exp.sha.5114f85',
'url': url1,
'software': prod0,
'arch': 'amd64'
}
ver0 = s_common.guid()
node = await snap.addNode('it:prod:softver', ver0, vprops)
self.eq(node.ndef[1], ver0)
self.eq(node.get('arch'), 'amd64')
self.eq(node.get('software'), prod0)
self.eq(node.get('software:name'), 'balloon maker')
self.eq(node.get('vers'), 'V1.0.1-beta+exp.sha.5114f85')
self.eq(node.get('vers:norm'), 'v1.0.1-beta+exp.sha.5114f85')
self.eq(node.get('semver'), 0x000010000000001)
self.eq(node.get('semver:major'), 1)
self.eq(node.get('semver:minor'), 0)
self.eq(node.get('semver:patch'), 1)
self.eq(node.get('semver:pre'), 'beta')
self.eq(node.get('semver:build'), 'exp.sha.5114f85')
self.eq(node.get('url'), url1)
# callback node creation checks
nodes = await snap.nodes('it:dev:str=V1.0.1-beta+exp.sha.5114f85')
self.len(1, nodes)
nodes = await snap.nodes('it:dev:str=amd64')
self.len(1, nodes)
host0 = s_common.guid()
node = await snap.addNode('it:hostsoft', (host0, ver0))
self.eq(node.ndef[1], (host0, ver0))
self.eq(node.get('host'), host0)
self.eq(node.get('softver'), ver0)
softfile = await snap.addNode('it:prod:softfile', (ver0, file0))
self.eq(softfile.get('soft'), ver0)
self.eq(softfile.get('file'), f'sha256:{file0}')
ver1 = s_common.guid()
softlib = await snap.addNode('it:prod:softlib', (ver0, ver1))
self.eq(softlib.get('soft'), ver0)
self.eq(softlib.get('lib'), ver1)
os0 = s_common.guid()
softos = await snap.addNode('it:prod:softos', (ver0, os0))
self.eq(softos.get('soft'), ver0)
self.eq(softos.get('os'), os0)
prod1 = s_common.guid()
sigprops = {
'desc': 'The evil balloon virus!',
'url': url1,
}
sig0 = (prod1, 'Bar.BAZ.faZ')
node = await snap.addNode('it:av:sig', sig0, sigprops)
self.eq(node.ndef[1], (prod1, 'Bar.BAZ.faZ'.lower()))
self.eq(node.get('soft'), prod1)
self.eq(node.get('name'), 'bar.baz.faz')
self.eq(node.get('desc'), 'The evil balloon virus!')
self.eq(node.get('url'), url1)
node = await snap.addNode('it:av:filehit', (file0, sig0))
self.eq(node.ndef[1], (f'sha256:{file0}', (prod1, 'Bar.BAZ.faZ'.lower())))
self.eq(node.get('file'), f'sha256:{file0}')
self.eq(node.get('sig'), (prod1, 'Bar.BAZ.faZ'.lower()))
self.eq(node.get('sig:name'), 'bar.baz.faz')
self.eq(node.get('sig:soft'), prod1)
# Test 'vers' semver brute forcing
testvectors = [
('1', 0x000010000000000, {'major': 1, 'minor': 0, 'patch': 0}),
('2.0A1', 0x000020000000000, {'major': 2, 'minor': 0, 'patch': 0}),
('2016-03-01', 0x007e00000300001, {'major': 2016, 'minor': 3, 'patch': 1}),
('1.2.windows-RC1', 0x000010000200000, {'major': 1, 'minor': 2, 'patch': 0}),
('3.4', 0x000030000400000, {'major': 3, 'minor': 4, 'patch': 0}),
('1.3a2.dev12', 0x000010000000000, {'major': 1, 'minor': 0, 'patch': 0}),
('v2.4.0.0-1', 0x000020000400000, {'major': 2, 'minor': 4, 'patch': 0}),
('v2.4.1.0-0.3.rc1', 0x000020000400001, {'major': 2, 'minor': 4, 'patch': 1}),
('0.18rc2', 0, {'major': 0, 'minor': 0, 'patch': 0}),
('OpenSSL_1_0_2l', 0x000010000000000, {'major': 1, 'minor': 0, 'patch': 0}),
]
for tv, te, subs in testvectors:
props = {
'vers': tv
}
node = await snap.addNode('it:prod:softver', '*', props)
self.eq(node.get('semver'), te)
self.eq(node.get('semver:major'), subs.get('major'))
self.eq(node.get('semver:minor'), subs.get('minor'))
self.eq(node.get('semver:patch'), subs.get('patch'))
node = await snap.addNode('it:prod:softver', '*', {'vers': ''})
self.eq(node.get('vers'), '')
self.none(node.get('vers:norm'))
self.none(node.get('semver'))
with self.getLoggerStream('synapse.models.infotech',
'Unable to brute force version parts out of the string') as stream:
node = await snap.addNode('it:prod:softver', '*', {'vers': 'Alpha'})
self.none(node.get('semver'))
self.true(stream.is_set())
async def test_it_form_callbacks(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
# it:dev:str kicks out the :norm property on him when he is made
node = await snap.addNode('it:dev:str', 'evil RAT')
self.eq(node.ndef[1], 'evil RAT')
self.eq(node.get('norm'), 'evil rat')
node = await snap.addNode('it:dev:pipe', 'MyPipe')
self.eq(node.ndef[1], 'MyPipe')
nodes = await snap.nodes('it:dev:str=MyPipe')
self.len(1, nodes)
# The callback created node also has norm set on it
self.eq(nodes[0].get('norm'), 'mypipe')
node = await snap.addNode('it:dev:mutex', 'MyMutex')
self.eq(node.ndef[1], 'MyMutex')
nodes = await snap.nodes('it:dev:str=MyMutex')
self.len(1, nodes)
key = 'HKEY_LOCAL_MACHINE\\Foo\\Bar'
node = await snap.addNode('it:dev:regkey', key)
self.eq(node.ndef[1], key)
opts = {'vars': {'key': key}}
nodes = await snap.nodes('it:dev:str=$key', opts=opts)
self.len(1, nodes)
fbyts = 'sha256:' + 64 * 'f'
key = 'HKEY_LOCAL_MACHINE\\DUCK\\QUACK'
valus = [
('str', 'knight'),
('int', 20),
('bytes', fbyts),
]
for prop, valu in valus:
iden = s_common.guid((key, valu))
props = {
'key': key,
prop: valu,
}
node = await snap.addNode('it:dev:regval', iden, props)
self.eq(node.ndef[1], iden)
self.eq(node.get('key'), key)
self.eq(node.get(prop), valu)
nodes = await snap.nodes('it:dev:str=HKEY_LOCAL_MACHINE\\Foo\\Bar')
self.len(1, nodes)
async def test_it_semvertype(self):
async with self.getTestCore() as core:
t = core.model.type('it:semver')
testvectors = (
# Strings
('1.2.3', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3, })),
('0.0.1', (0x000000000000001,
{'major': 0, 'minor': 0, 'patch': 1, })),
('1.2.3-alpha', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': 'alpha', })),
('1.2.3-alpha.1', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': 'alpha.1', })),
('1.2.3-0.3.7', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': '0.3.7', })),
('1.2.3-x.7.z.92', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': 'x.7.z.92', })),
('1.2.3-alpha+001', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': 'alpha', 'build': '001'})),
('1.2.3+20130313144700', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'build': '20130313144700'})),
('1.2.3-beta+exp.sha.5114f85', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': 'beta',
'build': 'exp.sha.5114f85'})),
# Real world examples
('1.2.3-B5CD5743F', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3,
'pre': 'B5CD5743F', })),
('V1.2.3', (0x000010000200003,
{'major': 1, 'minor': 2, 'patch': 3, })),
('V1.4.0-RC0', (0x000010000400000,
{'major': 1, 'minor': 4, 'patch': 0,
'pre': 'RC0', })),
('v2.4.1-0.3.rc1', (0x000020000400001,
{'major': 2, 'minor': 4, 'patch': 1,
'pre': '0.3.rc1'})),
('0.18.1', (0x000000001200001,
{'major': 0, 'minor': 18, 'patch': 1, })),
# Integer values
(0, (0, {'major': 0, 'minor': 0, 'patch': 0})),
(1, (1, {'major': 0, 'minor': 0, 'patch': 1})),
(2, (2, {'major': 0, 'minor': 0, 'patch': 2})),
(0xFFFFF, (0xFFFFF, {'major': 0, 'minor': 0, 'patch': 0xFFFFF})),
(0xFFFFF + 1, (0xFFFFF + 1, {'major': 0, 'minor': 1, 'patch': 0})),
(0xdeadb33f1337133, (0xdeadb33f1337133, {'major': 0xdeadb, 'minor': 0x33f13, 'patch': 0x37133})),
(0xFFFFFFFFFFFFFFF, (0xFFFFFFFFFFFFFFF, {'major': 0xFFFFF, 'minor': 0xFFFFF, 'patch': 0xFFFFF}))
)
for v, e in testvectors:
ev, es = e
valu, rdict = t.norm(v)
subs = rdict.get('subs')
self.eq(valu, ev)
self.eq(subs, es)
testvectors_bad = (
# Invalid strings
'1',
'1.2',
'2.0A1',
'0.18rc2',
'0.0.00001',
'2016-03-01',
'v2.4.0.0-1',
'1.3a2.dev12',
'OpenSSL_1_0_2l',
'1.2.windows-RC1',
'v2.4.1.0-0.3.rc1',
# invalid ints
-1,
0xFFFFFFFFFFFFFFFFFFFFFFFF + 1,
# Invalid build and prerelease values
'1.2.3-alpha.foo..+001',
'1.2.3-alpha.foo.001+001',
'1.2.3-alpha+001.blahblahblah...',
'1.2.3-alpha+001.blahblahblah.*iggy',
# Just bad input
' ',
' alpha ',
)
for v in testvectors_bad:
self.raises(s_exc.BadTypeValu, t.norm, v)
testvectors_repr = (
(0, '0.0.0'),
(1, '0.0.1'),
(0x000010000200003, '1.2.3'),
)
for v, e in testvectors_repr:
self.eq(t.repr(v), e)
async def test_it_forms_hostexec(self):
# forms related to the host execution model
async with self.getTestCore() as core:
async with await core.snap() as snap:
exe = 'sha256:' + 'a' * 64
port = 80
tick = s_common.now()
host = s_common.guid()
proc = s_common.guid()
mutex = 'giggleXX_X0'
pipe = 'pipe\\mynamedpipe'
user = 'serviceadmin'
pid = 20
key = 'HKEY_LOCAL_MACHINE\\Foo\\Bar'
ipv4 = 0x01020304
ipv6 = '::1'
addr4 = f'tcp://1.2.3.4:{port}'
addr6 = f'udp://[::1]:{port}'
url = 'http://www.google.com/sekrit.html'
raw_path = r'c:\Windows\System32\rar.exe'
norm_path = r'c:/windows/system32/rar.exe'
src_proc = s_common.guid()
src_path = r'c:/temp/ping.exe'
cmd0 = 'rar a -r yourfiles.rar *.txt'
fpath = 'c:/temp/yourfiles.rar'
fbyts = 'sha256:' + 'b' * 64
pprops = {
'exe': exe,
'pid': pid,
'cmd': cmd0,
'host': host,
'time': tick,
'user': user,
'path': raw_path,
'src:exe': src_path,
'src:proc': src_proc,
}
node = await snap.addNode('it:exec:proc', proc, pprops)
self.eq(node.ndef[1], proc)
self.eq(node.get('exe'), exe)
self.eq(node.get('pid'), pid)
self.eq(node.get('cmd'), cmd0)
self.eq(node.get('host'), host)
self.eq(node.get('time'), tick)
self.eq(node.get('user'), user)
self.eq(node.get('path'), norm_path)
self.eq(node.get('src:exe'), src_path)
self.eq(node.get('src:proc'), src_proc)
nodes = await core.nodes('it:cmd')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('it:cmd', 'rar a -r yourfiles.rar *.txt'))
m0 = s_common.guid()
mprops = {
'exe': exe,
'proc': proc,
'name': mutex,
'host': host,
'time': tick,
}
node = await snap.addNode('it:exec:mutex', m0, mprops)
self.eq(node.ndef[1], m0)
self.eq(node.get('exe'), exe)
self.eq(node.get('proc'), proc)
self.eq(node.get('host'), host)
self.eq(node.get('time'), tick)
self.eq(node.get('name'), mutex)
p0 = s_common.guid()
pipeprops = {
'exe': exe,
'proc': proc,
'name': pipe,
'host': host,
'time': tick,
}
node = await snap.addNode('it:exec:pipe', p0, pipeprops)
self.eq(node.ndef[1], p0)
self.eq(node.get('exe'), exe)
self.eq(node.get('proc'), proc)
self.eq(node.get('host'), host)
self.eq(node.get('time'), tick)
self.eq(node.get('name'), pipe)
u0 = s_common.guid()
uprops = {
'proc': proc,
'host': host,
'exe': exe,
'time': tick,
'url': url,
'client': addr4,
}
node = await snap.addNode('it:exec:url', u0, uprops)
self.eq(node.ndef[1], u0)
self.eq(node.get('exe'), exe)
self.eq(node.get('proc'), proc)
self.eq(node.get('host'), host)
self.eq(node.get('time'), tick)
self.eq(node.get('url'), url)
self.eq(node.get('client'), addr4)
self.eq(node.get('client:ipv4'), ipv4)
self.eq(node.get('client:port'), port)
u1 = s_common.guid()
uprops['client'] = addr6
node = await snap.addNode('it:exec:url', u1, uprops)
self.eq(node.ndef[1], u1)
self.eq(node.get('client'), addr6)
self.eq(node.get('client:ipv6'), ipv6)
self.eq(node.get('client:port'), port)
b0 = s_common.guid()
bprops = {
'proc': proc,
'host': host,
'exe': exe,
'time': tick,
'server': addr4
}
node = await snap.addNode('it:exec:bind', b0, bprops)
self.eq(node.ndef[1], b0)
self.eq(node.get('exe'), exe)
self.eq(node.get('proc'), proc)
self.eq(node.get('host'), host)
self.eq(node.get('time'), tick)
self.eq(node.get('server'), addr4)
self.eq(node.get('server:ipv4'), ipv4)
self.eq(node.get('server:port'), port)
b1 = s_common.guid()
bprops['server'] = addr6
node = await snap.addNode('it:exec:bind', b1, bprops)
self.eq(node.ndef[1], b1)
self.eq(node.get('server'), addr6)
self.eq(node.get('server:ipv6'), ipv6)
self.eq(node.get('server:port'), port)
faprops = {
'exe': exe,
'host': host,
'proc': proc,
'file': fbyts,
'time': tick,
'path': fpath,
}
fa0 = s_common.guid()
node = await snap.addNode('it:exec:file:add', fa0, faprops)
self.eq(node.ndef[1], fa0)
self.eq(node.get('exe'), exe)
self.eq(node.get('host'), host)
self.eq(node.get('proc'), proc)
self.eq(node.get('time'), tick)
self.eq(node.get('file'), fbyts)
self.eq(node.get('path'), fpath)
self.eq(node.get('path:dir'), 'c:/temp')
self.eq(node.get('path:base'), 'yourfiles.rar')
self.eq(node.get('path:ext'), 'rar')
fr0 = s_common.guid()
node = await snap.addNode('it:exec:file:read', fr0, faprops)
self.eq(node.ndef[1], fr0)
self.eq(node.get('exe'), exe)
self.eq(node.get('host'), host)
self.eq(node.get('proc'), proc)
self.eq(node.get('time'), tick)
self.eq(node.get('file'), fbyts)
self.eq(node.get('path'), fpath)
self.eq(node.get('path:dir'), 'c:/temp')
self.eq(node.get('path:base'), 'yourfiles.rar')
self.eq(node.get('path:ext'), 'rar')
fw0 = s_common.guid()
node = await snap.addNode('it:exec:file:write', fw0, faprops)
self.eq(node.ndef[1], fw0)
self.eq(node.get('exe'), exe)
self.eq(node.get('host'), host)
self.eq(node.get('proc'), proc)
self.eq(node.get('time'), tick)
self.eq(node.get('file'), fbyts)
self.eq(node.get('path'), fpath)
self.eq(node.get('path:dir'), 'c:/temp')
self.eq(node.get('path:base'), 'yourfiles.rar')
self.eq(node.get('path:ext'), 'rar')
fd0 = s_common.guid()
node = await snap.addNode('it:exec:file:del', fd0, faprops)
self.eq(node.ndef[1], fd0)
self.eq(node.get('exe'), exe)
self.eq(node.get('host'), host)
self.eq(node.get('proc'), proc)
self.eq(node.get('time'), tick)
self.eq(node.get('file'), fbyts)
self.eq(node.get('path'), fpath)
self.eq(node.get('path:dir'), 'c:/temp')
self.eq(node.get('path:base'), 'yourfiles.rar')
self.eq(node.get('path:ext'), 'rar')
file0 = s_common.guid()
fsprops = {
'host': host,
'path': fpath,
'file': fbyts,
'ctime': tick,
'mtime': tick + 1,
'atime': tick + 2,
'user': user,
'group': 'domainadmin'
}
node = await snap.addNode('it:fs:file', file0, fsprops)
self.eq(node.ndef[1], file0)
self.eq(node.get('host'), host)
self.eq(node.get('user'), user)
self.eq(node.get('group'), 'domainadmin')
self.eq(node.get('file'), fbyts)
self.eq(node.get('ctime'), tick)
self.eq(node.get('mtime'), tick + 1)
self.eq(node.get('atime'), tick + 2)
self.eq(node.get('path'), fpath)
self.eq(node.get('path:dir'), 'c:/temp')
self.eq(node.get('path:base'), 'yourfiles.rar')
self.eq(node.get('path:ext'), 'rar')
rprops = {
'host': host,
'proc': proc,
'exe': exe,
'time': tick,
'reg': '*',
}
forms = ('it:exec:reg:get',
'it:exec:reg:set',
'it:exec:reg:del',
)
for form in forms:
rk0 = s_common.guid()
nprops = rprops.copy()
node = await snap.addNode(form, rk0, nprops)
self.eq(node.ndef[1], rk0)
self.eq(node.get('host'), host)
self.eq(node.get('proc'), proc)
self.eq(node.get('exe'), exe)
self.eq(node.get('time'), tick)
self.nn(node.get('reg'))
async def test_it_app_yara(self):
async with self.getTestCore() as core:
rule = s_common.guid()
opts = {'vars': {'rule': rule}}
nodes = await core.nodes('[ it:app:yara:rule=$rule :enabled=true :text=gronk :author=* :name=foo :version=1.2.3 ]', opts=opts)
self.len(1, nodes)
self.eq('foo', nodes[0].get('name'))
self.eq(True, nodes[0].get('enabled'))
self.eq('gronk', nodes[0].get('text'))
self.eq(0x10000200003, nodes[0].get('version'))
self.len(1, await core.nodes('it:app:yara:rule=$rule -> ps:contact', opts=opts))
nodes = await core.nodes('[ it:app:yara:match=($rule, "*") :version=1.2.3 ]', opts=opts)
self.len(1, nodes)
self.nn(nodes[0].get('file'))
self.eq(rule, nodes[0].get('rule'))
self.eq(0x10000200003, nodes[0].get('version'))
async def test_it_app_snort(self):
async with self.getTestCore() as core:
hit = s_common.guid()
rule = s_common.guid()
flow = s_common.guid()
host = s_common.guid()
opts = {'vars': {'rule': rule, 'flow': flow, 'host': host, 'hit': hit}}
nodes = await core.nodes('[ it:app:snort:rule=$rule :text=gronk :name=foo :version=1.2.3 ]', opts=opts)
self.len(1, nodes)
self.eq('foo', nodes[0].get('name'))
self.eq('gronk', nodes[0].get('text'))
self.eq(0x10000200003, nodes[0].get('version'))
nodes = await core.nodes('[ it:app:snort:hit=$hit :rule=$rule :flow=$flow :src="tcp://[::ffff:0102:0304]:0" :dst="tcp://[::ffff:0505:0505]:80" :time=2015 :sensor=$host :version=1.2.3 ]', opts=opts)
self.len(1, nodes)
self.eq(rule, nodes[0].get('rule'))
self.eq(flow, nodes[0].get('flow'))
self.eq(host, nodes[0].get('sensor'))
self.eq(1420070400000, nodes[0].get('time'))
self.eq('tcp://[::ffff:1.2.3.4]:0', nodes[0].get('src'))
self.eq(0, nodes[0].get('src:port'))
self.eq(0x01020304, nodes[0].get('src:ipv4'))
self.eq('::ffff:1.2.3.4', nodes[0].get('src:ipv6'))
self.eq('tcp://[::ffff:5.5.5.5]:80', nodes[0].get('dst'))
self.eq(80, nodes[0].get('dst:port'))
self.eq(0x05050505, nodes[0].get('dst:ipv4'))
self.eq('::ffff:5.5.5.5', nodes[0].get('dst:ipv6'))
self.eq(0x10000200003, nodes[0].get('version'))
async def test_it_reveng(self):
async with self.getTestCore() as core:
baseFile = s_common.ehex(s_common.buid())
func = s_common.guid()
fva = 0x404438
rank = 33
complexity = 60
funccalls = ((baseFile, func), )
fopt = {'vars': {'file': baseFile,
'func': func,
'fva': fva,
'rank': rank,
'cmplx': complexity,
'funccalls': funccalls}}
vstr = 'VertexBrandArtisanalBinaries'
sopt = {'vars': {'func': func,
'string': vstr}}
name = "FunkyFunction"
descrp = "Test Function"
impcalls = ("libr.foo", "libr.foo2", "libr.foo3")
funcopt = {'vars': {'name': name,
'descrp': descrp,
'impcalls': impcalls}}
fnode = await core.nodes('[it:reveng:filefunc=($file, $func) :va=$fva :rank=$rank :complexity=$cmplx :funccalls=$funccalls]', opts=fopt)
snode = await core.nodes('[it:reveng:funcstr=($func, $string)]', opts=sopt)
self.len(1, fnode)
self.eq(f'sha256:{baseFile}', fnode[0].get('file'))
self.eq(fva, fnode[0].get('va'))
self.eq(rank, fnode[0].get('rank'))
self.eq(complexity, fnode[0].get('complexity'))
self.eq((f'sha256:{baseFile}', func), fnode[0].get('funccalls')[0])
self.len(1, snode)
self.eq(fnode[0].get('function'), snode[0].get('function'))
self.eq(vstr, snode[0].get('string'))
funcnode = await core.nodes('''
it:reveng:function [
:name=$name
:description=$descrp
:impcalls=$impcalls
:strings=(bar,foo,foo)
]''', opts=funcopt)
self.len(1, funcnode)
self.eq(name, funcnode[0].get('name'))
self.eq(descrp, funcnode[0].get('description'))
self.len(len(impcalls), funcnode[0].get('impcalls'))
self.eq(impcalls[0], funcnode[0].get('impcalls')[0])
self.sorteq(('bar', 'foo'), funcnode[0].get('strings'))
nodes = await core.nodes('it:reveng:function -> it:dev:str')
self.len(2, nodes)
nodes = await core.nodes(f'file:bytes={baseFile} -> it:reveng:filefunc :function -> it:reveng:funcstr:function')
self.len(1, nodes)
self.eq(vstr, nodes[0].get('string'))
nodes = await core.nodes(f'file:bytes={baseFile} -> it:reveng:filefunc -> it:reveng:function -> it:reveng:impfunc')
self.len(len(impcalls), nodes)
|
apache-2.0
| 7,804,782,150,777,394,000 | 44.091002 | 209 | 0.446133 | false |
dmwyatt/rose_trellis
|
rosetrellis/util.py
|
1
|
3091
|
import abc
from urllib.parse import urljoin
import asyncio
from typing import Any, Callable, Sequence, List
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
def make_sequence_attrgetter(attr_name: str) -> Callable[Sequence[object]]:
"""
Get a callable which takes a sequence of objects and returns a list of
``attr_name`` from each object in the sequence.
:param attr_name: The name of the attribute to get from objects provided the the
returned callable.
:return: A callable which returns a list of attributes specified by ``attr_name``.
"""
def sequence_attrgetter(seq: Sequence[object]) -> List[object]:
"""
Returns a list of attributes from the provided sequence of objects.
:param seq: The sequence to query.
:return: List of attribute values.
"""
return [getattr(obj, attr_name, None) for obj in seq]
return sequence_attrgetter
class _Synchronizer(abc.ABCMeta):
"""
This metaclass functions as a replacement for abc.ABCMeta that adds a synchronous
version of every asynchronous method in the class.
Finds every coroutine method by checking every method with
:func:`asyncio.iscoroutinefunction`. It then creates a synchronous version with
'_s' appended to the method name.
"""
def __new__(cls, clsname, bases, dct):
new_dct = {}
for name, val in dct.items():
# Make a sync version of all coroutine functions
if asyncio.iscoroutinefunction(val):
meth = cls.sync_maker(name)
syncname = '{}_s'.format(name)
meth.__name__ = syncname
meth.__qualname__ = '{}.{}'.format(clsname, syncname)
new_dct[syncname] = meth
elif isinstance(val, classmethod) and asyncio.iscoroutinefunction(val.__func__):
meth = cls.sync_maker(val.__func__.__name__)
syncname = '{}_s'.format(name)
meth.__name__ = syncname
meth.__qualname__ = '{}.{}'.format(clsname, syncname)
new_dct[syncname] = classmethod(meth)
dct.update(new_dct)
return super().__new__(cls, clsname, bases, dct)
@staticmethod
def sync_maker(func):
def sync_func(self, *args, **kwargs):
meth = getattr(self, func)
return asyncio.get_event_loop().run_until_complete(meth(*args, **kwargs))
return sync_func
class Synchronizer(metaclass=_Synchronizer):
pass
def is_valid_website(website: str) -> bool:
return website.startswith('http://') or website.startswith('https://')
def get_child_obj_id(obj: object, obj_attr_name: str, obj_id_attr_name: str) -> str:
"""
:raises AttributeError: If don't have child object or child object does not have an id and object does not have obj_id_attr_name.
"""
if hasattr(obj, obj_attr_name):
child_obj = getattr(obj, obj_attr_name, None)
if hasattr(child_obj, 'id'):
child_obj_id = getattr(child_obj, 'id', None)
if child_obj_id:
return child_obj_id
return getattr(obj, obj_id_attr_name)
|
mit
| 34,526,889,038,846,704 | 28.438095 | 130 | 0.694274 | false |
fevral13/django-history
|
django_history/helpers.py
|
1
|
1738
|
# -*- coding:utf-8 -*-
from decimal import Decimal
from datetime import datetime, date, time
from types import NoneType
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from .settings import DATETIME_FORMAT, TIME_FORMAT, DATE_FORMAT
def serialize_field(obj, field_name, fail_silently=False):
"""Must serialize different fields differently
- integers: integer
- floats and decimals: float
- string: string
- foreign keys: integer
- date: string "2012-01-03"
- time: string "03:52 AM"
- boolean: bool
- datetime: string "2012-01-03 03:52 AM"
- None value: null
must be implemented per individual model class object
- many-to-many: list of integers
"""
if isinstance(field_name, (tuple, list)):
return field_name
try:
value = getattr(obj, field_name)
except ObjectDoesNotExist:
if fail_silently:
return field_name, None
else:
raise
if isinstance(value, (int, bool, unicode, NoneType, float, str)):
result = value
elif isinstance(value, long):
result = int(value)
elif isinstance(value, Decimal):
result = float(value)
elif isinstance(value, datetime):
result = unicode(value.strftime(DATETIME_FORMAT))
elif isinstance(value, time):
result = unicode(value.strftime(TIME_FORMAT))
elif isinstance(value, date):
result = unicode(value.strftime(DATE_FORMAT))
elif isinstance(value, models.Model):
result = value.id
else:
if fail_silently:
result = str(value)
else:
raise Exception('Unknown type "%s" to convert' % type(value))
return field_name, result
|
bsd-3-clause
| 8,840,314,101,324,615,000 | 29.491228 | 73 | 0.648446 | false |
mkobos/tree_crawler
|
concurrent_tree_crawler/abstract_tree_accessor.py
|
1
|
2032
|
from concurrent_tree_crawler.abstract_node import AbstractNode, NodeState
class NodeAction:
"""Enumeration describing actions that can be taken by a crawler when
entering a new node"""
TO_PROCESS = 0
TO_VISIT = 1
@staticmethod
def to_str(action):
"""@type action: L{NodeAction} enum"""
if action == NodeAction.TO_PROCESS:
return "TO_PROCESS"
elif action == NodeAction.TO_VISIT:
return "TO_VISIT"
class AbstractTreeAccessor:
"""
An interface for the tree made of L{AbstractNode}s.
"""
def get_sentinel(self):
"""
@return: sentinel node
@rtype: L{AbstractNode}
"""
raise NotImplementedError
def get_root(self):
"""
@return: root node
@rtype: L{AbstractNode}
"""
raise NotImplementedError
def get_path(self, node):
"""
A convenience method. Returns tree path to the given node.
@type node: L{AbstractNode}
@return: subsequent node names from the tree root to the current node
@rtype: list of strings
"""
path = []
while node != self.get_sentinel():
path.append(node.get_name())
node = node.get_parent()
path.reverse()
return path
def update_and_get_child(self, node, possible_children_names):
"""
Append new children to the node and return a child that can be
entered by the crawler.
@param node: node considered
@type node: L{AbstractNode}
@param possible_children_names: list of children names
@type possible_children_names: list of strings
@return: children node along with information what the crawler should
do with it. C{None} instead is returned if all children have
state C{NodeState.CLOSED} or C{NodeState.ERROR} or the node
has no children.
@rtype: (L{AbstractNode}, L{NodeAction}) pair or C{None}
"""
raise NotImplementedError
def set_node_type(self, node, is_leaf):
"""
Set the leaf state of the node
@param is_leaf: C{True} iff the node is a leaf
"""
raise NotImplementedError
def set_error(self, node):
"""Set the node state as C{NodeState.ERROR}"""
raise NotImplementedError
|
mit
| 5,506,532,906,030,808,000 | 24.734177 | 73 | 0.699311 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.