id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
null |
---|---|---|
1,200 |
"""WARNING! THIS SCRIPTS ARE USED IN A SANDBOX SO ALL DEPENDENCIES SHOULD BE HANDELED HERE!
THIS USES PYODIDE V0.17!
"""
from js import eval as jseval, self as web_self, Blob, URL
import os, sys, json, string, random, csv
from io import StringIO
def request(url,params=None,jsonResult=True, whitelist=("/list","/view")):
"""A very simple version of the NetworkService to request synchronous data"""
if not url.startswith("/"):
return None
valid = False
for i in whitelist:
if i in url:
valid=True
break
if not valid:
return None
url = "/json"+url
request = SimpleNetwork().request(url, params)
if request.status=="failed":
return request.status
if not jsonResult:
return request.result
return json.loads(request.result)
class requestList():
def __init__(self, url, params=None, maxRequests=999):
self.url = url
self.params = params or {}
self.maxRequests = maxRequests
self.currentList = []
self.requests = 0
self.cursor = None
self.started=False
def requestData(self):
self.requests += 1
if self.cursor:
self.params["cursor"] = self.cursor
res = request(self.url, self.params)
if not res:
return
self.cursor = None if res["cursor"] == self.cursor else res["cursor"]
self.currentList = res["skellist"]
def next(self):
if self.requests > self.maxRequests:
return False
if self.currentList:
return self.currentList.pop(0)
if not self.cursor:
return False
def running(self):
if not self.currentList:
self.requestData()
if self.requests > self.maxRequests:
return False
if not self.started and self.requests == 0: #min 1 request
self.started = True
return True
if self.currentList:
return True
if self.cursor: #list is empty but we have a valid cursor
return True
return False
class csvWriter():
delimiter = ";"
def __init__(self, delimiter=";"):
self.delimiter = delimiter
self.file = StringIO()
self.writer = csv.writer(self.file, delimiter=self.delimiter, dialect='excel', quoting=csv.QUOTE_ALL)
self.file.write('\ufeff') # excel needs this for right utf-8 decoding
def writeRow(self, row):
for i, s in enumerate(row):
if isinstance(s, str):
row[i] = s.replace('"', "").replace("'","") # .replace(" "," ").replace( ";", "" ).replace( "<br />", "\n" ).replace( "<div>", "\n" ).replace( "</div>", "" )
self.writer.writerow(row)
def writeRows(self, rows):
for r in rows:
self.writeRow(r)
# self.writer.writerows(rows) #nativ version doesnot support unicode and we do some cleaning to avoid broken csv files
def download(self,name="export.csv"):
blob = Blob.new([self.file.getvalue()], **{
"type":"application/csv;charset=utf-8;"
})
#send blob to app
# in a webworker we cant manipulate the dom
web_self.postMessage(type="download", blob=blob, filename=name)
class weblog():
@staticmethod
def info(text):
if not isinstance(text,str):
text = str(text)
web_self.postMessage(type="info",text=text)
@staticmethod
def warn(text):
if not isinstance(text,str):
text = str(text)
web_self.postMessage(type="warn", text=text)
@staticmethod
def METHOD_NAME(text):
if not isinstance(text,str):
text = str(text)
web_self.postMessage(type="error",text=text)
log = weblog() #shortcut to use log.info ...
# HELPERS
class HTTPRequest(object):
"""
Wrapper around XMLHttpRequest
"""
def __init__(self, method, url, callbackSuccess=None, callbackFailure=None, payload=None, content_type=None,
asynchronous=True):
super(HTTPRequest, self).__init__()
method = method.upper()
assert method in ["GET", "POST"]
self.method = method
self.callbackSuccess = callbackSuccess
self.callbackFailure = callbackFailure
self.hasBeenSent = False
self.payload = payload
self.content_type = content_type
self.req = jseval("new XMLHttpRequest()")
self.req.onreadystatechange = self.onReadyStateChange
self.req.open(method, url, asynchronous)
def onReadyStateChange(self, *args, **kwargs):
"""
Internal callback.
"""
if self.req.readyState == 1 and not self.hasBeenSent:
self.hasBeenSent = True # Internet Explorer calls this function twice!
if self.method == "POST" and self.content_type is not None:
self.req.setRequestHeader("Content-Type", self.content_type)
self.req.send(self.payload)
if self.req.readyState == 4:
if 200 <= self.req.status < 300:
if self.callbackSuccess:
self.callbackSuccess(self.req.responseText)
else:
if self.callbackFailure:
self.callbackFailure(self.req.responseText, self.req.status)
class SimpleNetwork(object):
def genReqStr(self, params):
boundary_str = "---" + ''.join(
[random.choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for x in range(13)])
boundary = boundary_str
res = f"Content-Type: multipart/mixed; boundary=\"{boundary}\"\r\nMIME-Version: 1.0\r\n"
res += "\r\n--" + boundary
def expand(key, value):
ret = ""
if all([x in dir(value) for x in ["name", "read"]]): # File
type = "application/octet-stream"
filename = os.path.basename(value.name).decode(sys.getfilesystemencoding())
ret += \
f"\r\nContent-Type: {type}" \
f"\r\nMIME-Version: 1.0" \
f"\r\nContent-Disposition: form-data; name=\"{key}\"; filename=\"{filename}\"\r\n\r\n"
ret += str(value.read())
ret += '\r\n--' + boundary
elif isinstance(value, list):
if any([isinstance(entry, dict) for entry in value]):
for idx, entry in enumerate(value):
ret += expand(key + "." + str(idx), entry)
else:
for entry in value:
ret += expand(key, entry)
elif isinstance(value, dict):
for key_, entry in value.items():
ret += expand(((key + ".") if key else "") + key_, entry)
else:
ret += \
"\r\nContent-Type: application/octet-stream" \
"\r\nMIME-Version: 1.0" \
f"\r\nContent-Disposition: form-data; name=\"{key}\"\r\n\r\n"
ret += str(value) if value is not None else ""
ret += '\r\n--' + boundary
return ret
for key, value in params.items():
res += expand(key, value)
res += "--\r\n"
return res, boundary
def __init__(self):
self.result = None
self.status = None
def request(self, url, params):
if params:
method = "POST"
contentType = None
if isinstance(params, dict):
multipart, boundary = self.genReqStr(params)
contentType = "multipart/form-data; boundary=" + boundary + "; charset=utf-8"
elif isinstance(params, bytes):
contentType = "application/x-www-form-urlencoded"
multipart = params
else:
multipart = params
HTTPRequest(method, url, self.onCompletion, self.onError, payload=multipart, content_type=contentType,
asynchronous=False)
else:
method = "GET"
HTTPRequest(method, url, self.onCompletion, self.onError, asynchronous=False)
return self
def onCompletion(self, text):
self.result = text
self.status = "succeeded"
def onError(self, text, code):
self.status = "failed"
self.result = text
self.code = code
| null |
1,201 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreatePhysicalConnectionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreatePhysicalConnection','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AccessPointId(self): # String
return self.get_query_params().get('AccessPointId')
def set_AccessPointId(self, AccessPointId): # String
self.add_query_param('AccessPointId', AccessPointId)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_PortType(self): # String
return self.get_query_params().get('PortType')
def set_PortType(self, PortType): # String
self.add_query_param('PortType', PortType)
def get_CircuitCode(self): # String
return self.get_query_params().get('CircuitCode')
def set_CircuitCode(self, CircuitCode): # String
self.add_query_param('CircuitCode', CircuitCode)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_Type(self): # String
return self.get_query_params().get('Type')
def set_Type(self, Type): # String
self.add_query_param('Type', Type)
def METHOD_NAME(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_RedundantPhysicalConnectionId(self): # String
return self.get_query_params().get('RedundantPhysicalConnectionId')
def set_RedundantPhysicalConnectionId(self, RedundantPhysicalConnectionId): # String
self.add_query_param('RedundantPhysicalConnectionId', RedundantPhysicalConnectionId)
def get_PeerLocation(self): # String
return self.get_query_params().get('PeerLocation')
def set_PeerLocation(self, PeerLocation): # String
self.add_query_param('PeerLocation', PeerLocation)
def get_bandwidth(self): # Integer
return self.get_query_params().get('bandwidth')
def set_bandwidth(self, bandwidth): # Integer
self.add_query_param('bandwidth', bandwidth)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LineOperator(self): # String
return self.get_query_params().get('LineOperator')
def set_LineOperator(self, LineOperator): # String
self.add_query_param('LineOperator', LineOperator)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
| null |
1,202 |
import argparse
import numpy as np
import tarfile
import os
import tqdm
import csv
from imageio import imwrite
from nnabla.logger import logger
from nnabla.utils.data_iterator import data_iterator
from nnabla.utils.data_source import DataSource
from nnabla.utils.data_source_loader import download
class Cifar10DataSource(DataSource):
'''
Get data directly from cifar10 dataset from Internet(yann.lecun.com).
'''
def _get_data(self, position):
image = self._images[self._indexes[position]]
label = self._labels[self._indexes[position]]
return (image, label)
def __init__(self, train=True, shuffle=False, rng=None):
super(Cifar10DataSource, self).__init__(shuffle=shuffle)
self._train = train
data_uri = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
logger.info('Getting labeled data from {}.'.format(data_uri))
r = download(data_uri) # file object returned
with tarfile.open(fileobj=r, mode="r:gz") as fpin:
# Training data
if train:
images = []
labels = []
for member in fpin.getmembers():
if "data_batch" not in member.name:
continue
fp = fpin.extractfile(member)
data = np.load(fp, allow_pickle=True, encoding="bytes")
images.append(data[b"data"])
labels.append(data[b"labels"])
self._size = 50000
self._images = np.concatenate(
images).reshape(self._size, 3, 32, 32)
self._labels = np.concatenate(labels).reshape(-1, 1)
# Validation data
else:
for member in fpin.getmembers():
if "test_batch" not in member.name:
continue
fp = fpin.extractfile(member)
data = np.load(fp, allow_pickle=True, encoding="bytes")
images = data[b"data"]
labels = data[b"labels"]
self._size = 10000
self._images = images.reshape(self._size, 3, 32, 32)
self._labels = np.array(labels).reshape(-1, 1)
r.close()
logger.info('Getting labeled data from {}.'.format(data_uri))
self._size = self._labels.size
self._variables = ('x', 'y')
if rng is None:
rng = np.random.RandomState(313)
self.rng = rng
self.METHOD_NAME()
def METHOD_NAME(self):
if self._shuffle:
self._indexes = self.rng.permutation(self._size)
else:
self._indexes = np.arange(self._size)
super(Cifar10DataSource, self).METHOD_NAME()
@property
def images(self):
"""Get copy of whole data with a shape of (N, 1, H, W)."""
return self._images.copy()
@property
def labels(self):
"""Get copy of whole label with a shape of (N, 1)."""
return self._labels.copy()
def data_iterator_cifar10(batch_size,
train=True,
rng=None,
shuffle=True,
with_memory_cache=False,
with_file_cache=False):
'''
Provide DataIterator with :py:class:`Cifar10DataSource`
with_memory_cache, with_parallel and with_file_cache option's default value is all False,
because :py:class:`Cifar10DataSource` is able to store all data into memory.
For example,
'''
ds = Cifar10DataSource(train=train, shuffle=shuffle, rng=rng)
di = data_iterator(ds, batch_size, rng=rng,
with_memory_cache=with_memory_cache, with_file_cache=with_file_cache)
return di
def data_iterator_to_csv(csv_path, csv_file_name, data_path, data_iterator):
index = 0
csv_data = []
with data_iterator as data:
line = ['x:image', 'y:label']
csv_data.append(line)
pbar = tqdm.tqdm(total=data.size, unit='images')
initial_epoch = data.epoch
while data.epoch == initial_epoch:
d = data.next()
for i in range(len(d[0])):
label = d[1][i][0]
file_name = data_path + \
'/{}'.format(label) + '/{}.png'.format(index)
full_path = os.path.join(
csv_path, file_name.replace('/', os.path.sep))
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
os.makedirs(directory)
imwrite(full_path, d[0][i].reshape(
3, 32, 32).transpose(1, 2, 0))
csv_data.append([file_name, label])
index += 1
pbar.update(1)
pbar.close()
with open(os.path.join(csv_path, csv_file_name), 'w') as f:
writer = csv.writer(f, lineterminator='\n')
writer.writerows(csv_data)
return csv_data
def func(args):
path = args.output_dir
# Create original training set
logger.log(99, 'Downloading CIFAR-10 dataset...')
train_di = data_iterator_cifar10(50000, True, None, False)
logger.log(99, 'Creating "cifar10_training.csv"... ')
data_iterator_to_csv(
path, 'cifar10_training.csv', './training', train_di)
# Create original test set
validation_di = data_iterator_cifar10(10000, False, None, False)
logger.log(99, 'Creating "cifar10_test.csv"... ')
data_iterator_to_csv(
path, 'cifar10_test.csv', './validation', validation_di)
logger.log(99, 'Dataset creation completed successfully.')
def main():
parser = argparse.ArgumentParser(
description='CIFAR10\n\n' +
'Download CIFAR-10 dataset from dl.sony.com (original file is from https://www.cs.toronto.edu/~kriz/cifar.html).\n\n',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
'-o',
'--output-dir',
help='path to write NNC dataset CSV format (dir) default=CIFAR10',
required=True)
parser.set_defaults(func=func)
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
main()
| null |
1,203 |
#!/usr/bin/python3
#
# moOde audio player (C) 2014 Tim Curtis
# http://moodeaudio.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# Inspired by the script posted in moOde Forum by @Cyanoazimin
#
from __future__ import print_function, absolute_import
import RPi.GPIO as GPIO
import sys
import time
import datetime
import os
import subprocess
import sqlite3
# Use SoC pin numbering
GPIO.setmode(GPIO.BCM)
# Get sleep time arg
if len(sys.argv) > 1:
sleep_time = int(sys.argv[1])
else:
sleep_time = 1
# Get the configuration
db = sqlite3.connect('/var/local/www/db/moode-sqlite3.db')
db.row_factory = sqlite3.Row
db.text_factory = str
cursor = db.cursor()
# Get bounce_time
cursor.execute("SELECT value FROM cfg_gpio WHERE param='bounce_time'")
row = cursor.fetchone()
bounce_time = int(row['value'])
#print(str(datetime.datetime.now())[:19] + ' bounce_time=' + str(bounce_time))
# Configure the pins
cursor.execute("SELECT * FROM cfg_gpio")
for row in cursor:
#print(str(datetime.datetime.now())[:19] + ' row id=' + str(row['id']) + ', enabled=' + row['enabled'] + ', command=' + row['command'])
if str(row['id']) == '1' and row['enabled'] == '1':
sw_1_pin = int(row['pin'])
sw_1_cmd = row['command'].split(',')
sw_1_cmd = [x.strip() for x in sw_1_cmd]
GPIO.setup(sw_1_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def METHOD_NAME(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_1_cmd)
GPIO.add_event_detect(sw_1_pin, GPIO.RISING, callback=METHOD_NAME, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_1: pin=' +
str(sw_1_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '2' and row['enabled'] == '1':
sw_2_pin = int(row['pin'])
sw_2_cmd = row['command'].split(',')
sw_2_cmd = [x.strip() for x in sw_2_cmd]
GPIO.setup(sw_2_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_2_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_2_cmd)
GPIO.add_event_detect(sw_2_pin, GPIO.RISING, callback=sw_2_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_2: pin=' +
str(sw_2_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '3' and row['enabled'] == '1':
sw_3_pin = int(row['pin'])
sw_3_cmd = row['command'].split(',')
sw_3_cmd = [x.strip() for x in sw_3_cmd]
GPIO.setup(sw_3_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_3_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_3_cmd)
GPIO.add_event_detect(sw_3_pin, GPIO.RISING, callback=sw_3_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_3: pin=' +
str(sw_3_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '4' and row['enabled'] == '1':
sw_4_pin = int(row['pin'])
sw_4_cmd = row['command'].split(',')
sw_4_cmd = [x.strip() for x in sw_4_cmd]
GPIO.setup(sw_4_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_4_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_4_cmd)
GPIO.add_event_detect(sw_4_pin, GPIO.RISING, callback=sw_4_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_4: pin=' +
str(sw_4_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '5' and row['enabled'] == '1':
sw_5_pin = int(row['pin'])
sw_5_cmd = row['command'].split(',')
sw_5_cmd = [x.strip() for x in sw_5_cmd]
GPIO.setup(sw_5_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_5_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_5_cmd)
GPIO.add_event_detect(sw_5_pin, GPIO.RISING, callback=sw_5_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_5: pin=' +
str(sw_5_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '6' and row['enabled'] == '1':
sw_6_pin = int(row['pin'])
sw_6_cmd = row['command'].split(',')
sw_6_cmd = [x.strip() for x in sw_6_cmd]
GPIO.setup(sw_6_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_6_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_6_cmd)
GPIO.add_event_detect(sw_6_pin, GPIO.RISING, callback=sw_6_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_6: pin=' +
str(sw_6_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '7' and row['enabled'] == '1':
sw_7_pin = int(row['pin'])
sw_7_cmd = row['command'].split(',')
sw_7_cmd = [x.strip() for x in sw_7_cmd]
GPIO.setup(sw_7_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_7_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_7_cmd)
GPIO.add_event_detect(sw_7_pin, GPIO.RISING, callback=sw_7_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_7: pin=' +
str(sw_7_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
elif str(row['id']) == '8' and row['enabled'] == '1':
sw_8_pin = int(row['pin'])
sw_8_cmd = row['command'].split(',')
sw_8_cmd = [x.strip() for x in sw_8_cmd]
GPIO.setup(sw_8_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def sw_8_event(channel):
time.sleep(0.005) # edge debounce of 5 ms
# only deal with valid edges
if GPIO.input(channel) == 1:
subprocess.call(sw_8_cmd)
GPIO.add_event_detect(sw_8_pin, GPIO.RISING, callback=sw_8_event, bouncetime=bounce_time)
print(str(datetime.datetime.now())[:19] + ' sw_8: pin=' +
str(sw_8_pin) + ', enabled=' + row['enabled'] +
', bounce_time=' + str(bounce_time) + ', cmd=' + row['command'])
# Main
while True:
time.sleep(sleep_time)
| null |
1,204 |
#
# Run external commands from netlab CLI
#
import typing
import os
import subprocess
from box import Box
from . import is_dry_run
from ..utils import strings,log
def print_step(n: int, txt: str, spacing: typing.Optional[bool] = False) -> None:
if spacing:
print()
print("Step %d: %s" % (n,txt))
print("=" * 72)
def stringify(cmd : typing.Union[str,list]) -> str:
if isinstance(cmd,list):
return " ".join(cmd)
return str(cmd)
"""
run_command: Execute an external command specified as a string or a list of CLI parameters
Flags:
* check_result -- return False if the command does not produce any output
* ignore_errors -- do not print errors to the console
* return_stdout -- return the command output instead of True/False
"""
def run_command(
cmd : typing.Union[str,list],
check_result : bool = False,
ignore_errors: bool = False,
return_stdout: bool = False) -> typing.Union[bool,str]:
if log.debug_active('cli'):
print(f"Not running: {cmd}")
return True
if is_dry_run():
print(f"DRY RUN: {cmd}")
return True
if log.VERBOSE or log.debug_active('external'):
print(f"run_command executing: {cmd}")
if isinstance(cmd,str):
cmd = [ arg for arg in cmd.split(" ") if arg not in (""," ") ]
if not cmd: # Skip empty commands
return True
try:
result = subprocess.run(cmd,capture_output=check_result,check=True,text=True)
if log.debug_active('external'):
print(f'... run result: {result}')
if not check_result:
return True
if return_stdout:
return result.stdout
return result.stdout != ""
except Exception as ex:
if not log.QUIET and not ignore_errors:
print( f"Error executing {stringify(cmd)}:\n {ex}" )
return False
def test_probe(p : typing.Union[str,list,Box],quiet : bool = False) -> bool:
if isinstance(p,str):
return bool(run_command(p,check_result=True,ignore_errors=quiet))
elif isinstance(p,list):
for p_item in p:
if not test_probe(p_item,quiet):
return False
return True
elif isinstance(p,Box):
OK = bool(run_command(p.cmd,check_result=True,ignore_errors=True))
if not OK and not quiet:
log.fatal(p.err)
return OK
else:
log.fatal(f"Internal error: invalid probe specification: {p}")
return False
def set_ansible_flags(cmd : list) -> list:
if log.VERBOSE:
cmd.append("-" + "v" * log.VERBOSE)
if log.QUIET:
os.environ["ANSIBLE_STDOUT_CALLBACK"] = "selective"
return cmd
def run_probes(settings: Box, provider: str, step: int = 0) -> None:
if step:
print_step(step,f"Checking virtualization provider installation: {provider}",spacing = True)
elif log.VERBOSE:
print("Checking virtualization provider installation")
for p in settings.providers[provider].probe:
if not test_probe(p):
log.fatal("%s failed, aborting" % p)
if log.VERBOSE or step and not is_dry_run():
print(".. all tests succeeded, moving on\n")
def start_lab(settings: Box, provider: str, step: int = 2, cli_command: str = "test", exec_command: typing.Optional[str] = None) -> None:
if exec_command is None:
exec_command = settings.providers[provider].start
print_step(step,f"starting the lab -- {provider}: {exec_command}")
if not run_command(exec_command):
log.fatal(f"{exec_command} failed, aborting...",cli_command)
def deploy_configs(step : int = 3, command: str = "test", fast: typing.Optional[bool] = False) -> None:
print_step(step,"deploying initial device configurations",spacing = True)
cmd = ["netlab","initial"]
if log.VERBOSE:
cmd.append("-" + "v" * log.VERBOSE)
if os.environ.get('NETLAB_FAST_CONFIG',None) or fast:
cmd.append("--fast")
if not run_command(set_ansible_flags(cmd)):
log.fatal("netlab initial failed, aborting...",command)
def custom_configs(config : str, group: str, step : int = 4, command: str = "test") -> None:
print_step(step,"deploying custom configuration template %s for group %s" % (config,group))
cmd = ["netlab","config",config,"--limit",group]
if not run_command(set_ansible_flags(cmd)):
log.fatal("netlab config failed, aborting...",command)
def stop_lab(settings: Box, provider: str, step: int = 4, command: str = "test", exec_command: typing.Optional[str] = None) -> None:
print_step(step,f"stopping the lab: {provider}",True)
if exec_command is None:
exec_command = settings.providers[provider].stop
if not run_command(exec_command):
log.fatal(f"{exec_command} failed, aborting...",command)
"""
Get a runtime-related parameter for a tool
"""
def get_tool_runtime_param(tool: str, param: str, verbose: bool, topology: Box) -> typing.Optional[typing.Any]:
tdata = topology.defaults.tools[tool] + topology.tools[tool]
runtime = tdata.runtime or 'docker'
if not runtime in tdata:
if verbose:
print(f"... skipping {tool} tool, no {runtime} runtime configuration")
return None
tdata = tdata[runtime] + tdata
topology[tool] = tdata # Enable 'tool.param' syntax in tool commands
if not tdata[param]:
if verbose:
print(f"... skipping {tool} tool, no {runtime} {param} command")
return None
return tdata[param]
"""
Get a list of external tool commands to execute
"""
def METHOD_NAME(tool: str, cmd: str, topology: Box,verbose: bool = True) -> typing.Optional[list]:
cmds = get_tool_runtime_param(tool,cmd,verbose,topology)
if cmds is None:
return None
return cmds if isinstance(cmds,list) else [ cmds ]
"""
Check if the current topology uses docker in any way: does it have clab as primary or secondary provider?
"""
def docker_is_used(topology: Box) -> bool:
if topology.provider == 'clab':
return True
return 'clab' in topology[topology.provider].providers
#
# Execute external tool commands
#
def execute_tool_commands(cmds: list, topology: Box) -> None:
topology.sys.docker_net = ""
if docker_is_used(topology):
topology.sys.docker_net = f"--network={topology.addressing.mgmt.get('_network',None) or 'netlab_mgmt'}"
for cmd in cmds:
cmd = strings.eval_format(cmd,topology)
run_command(cmd = [ 'bash', '-c', cmd ],check_result=True)
#
# Get the "how to connect to the tool" message
#
def get_tool_message(tool: str, topology: Box) -> typing.Optional[str]:
msg = get_tool_runtime_param(tool,'message',False,topology)
if msg is None:
return None
return strings.eval_format(msg,topology)
| null |
1,205 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksas.endpoint import endpoint_data
class ModifyClientUserDefineRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Sas', '2018-12-03', 'ModifyClientUserDefineRule')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ActionType(self): # Integer
return self.get_query_params().get('ActionType')
def set_ActionType(self, ActionType): # Integer
self.add_query_param('ActionType', ActionType)
def get_NewFilePath(self): # String
return self.get_query_params().get('NewFilePath')
def set_NewFilePath(self, NewFilePath): # String
self.add_query_param('NewFilePath', NewFilePath)
def get_Type(self): # Integer
return self.get_query_params().get('Type')
def set_Type(self, Type): # Integer
self.add_query_param('Type', Type)
def get_Platform(self): # String
return self.get_query_params().get('Platform')
def set_Platform(self, Platform): # String
self.add_query_param('Platform', Platform)
def get_RegistryKey(self): # String
return self.get_query_params().get('RegistryKey')
def set_RegistryKey(self, RegistryKey): # String
self.add_query_param('RegistryKey', RegistryKey)
def get_Cmdline(self): # String
return self.get_query_params().get('Cmdline')
def set_Cmdline(self, Cmdline): # String
self.add_query_param('Cmdline', Cmdline)
def get_FilePath(self): # String
return self.get_query_params().get('FilePath')
def set_FilePath(self, FilePath): # String
self.add_query_param('FilePath', FilePath)
def get_Md5List(self): # String
return self.get_query_params().get('Md5List')
def METHOD_NAME(self, Md5List): # String
self.add_query_param('Md5List', Md5List)
def get_ParentProcPath(self): # String
return self.get_query_params().get('ParentProcPath')
def set_ParentProcPath(self, ParentProcPath): # String
self.add_query_param('ParentProcPath', ParentProcPath)
def get_Id(self): # Long
return self.get_query_params().get('Id')
def set_Id(self, Id): # Long
self.add_query_param('Id', Id)
def get_ProcPath(self): # String
return self.get_query_params().get('ProcPath')
def set_ProcPath(self, ProcPath): # String
self.add_query_param('ProcPath', ProcPath)
def get_ParentCmdline(self): # String
return self.get_query_params().get('ParentCmdline')
def set_ParentCmdline(self, ParentCmdline): # String
self.add_query_param('ParentCmdline', ParentCmdline)
def get_IP(self): # String
return self.get_query_params().get('IP')
def set_IP(self, IP): # String
self.add_query_param('IP', IP)
def get_RegistryContent(self): # String
return self.get_query_params().get('RegistryContent')
def set_RegistryContent(self, RegistryContent): # String
self.add_query_param('RegistryContent', RegistryContent)
def get_PortStr(self): # String
return self.get_query_params().get('PortStr')
def set_PortStr(self, PortStr): # String
self.add_query_param('PortStr', PortStr)
def get_Port(self): # Integer
return self.get_query_params().get('Port')
def set_Port(self, Port): # Integer
self.add_query_param('Port', Port)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
| null |
1,206 |
import base64
from collections import namedtuple
from typing import (
Any,
Dict,
)
from pcs.common import reports
from pcs.common.reports.item import ReportItem
from pcs.lib.errors import LibraryError
def create_pcmk_remote_actions(action_list):
return {
"pacemaker_remote {0}".format(action): service_cmd_format(
"pacemaker_remote", action
)
for action in action_list
}
def pcmk_authkey_format(authkey_content):
"""
Return a dict usable in the communication with a remote/put_file
authkey_content is raw authkey content
"""
return {
"data": base64.b64encode(authkey_content).decode("utf-8"),
"type": "pcmk_remote_authkey",
"rewrite_existing": True,
}
def corosync_authkey_format(authkey_content):
"""
Return a dict usable in the communication with a remote/put_file
authkey_content is raw authkey content
"""
return {
"data": base64.b64encode(authkey_content).decode("utf-8"),
"type": "corosync_authkey",
"rewrite_existing": True,
}
def pcmk_authkey_file(authkey_content):
return {"pacemaker_remote authkey": pcmk_authkey_format(authkey_content)}
def corosync_authkey_file(authkey_content):
return {"corosync authkey": corosync_authkey_format(authkey_content)}
def corosync_conf_format(corosync_conf_content):
return {
"type": "corosync_conf",
"data": corosync_conf_content,
}
def corosync_conf_file(corosync_conf_content):
return {"corosync.conf": corosync_conf_format(corosync_conf_content)}
def pcs_dr_config_format(dr_conf_content: bytes) -> Dict[str, Any]:
return {
"type": "pcs_disaster_recovery_conf",
"data": base64.b64encode(dr_conf_content).decode("utf-8"),
"rewrite_existing": True,
}
def pcs_dr_config_file(dr_conf_content: bytes) -> Dict[str, Any]:
return {"disaster-recovery config": pcs_dr_config_format(dr_conf_content)}
def pcs_settings_conf_format(content):
return {
"data": content,
"type": "pcs_settings_conf",
"rewrite_existing": True,
}
def pcs_settings_conf_file(content):
return {"pcs_settings.conf": pcs_settings_conf_format(content)}
def service_cmd_format(service, command):
"""
Return a dict usable in the communication with a remote/run_action
string service is name of requested service (eg. pacemaker_remote)
string command specifies an action on service (eg. start)
"""
return {
"type": "service_command",
"service": service,
"command": command,
}
class Result(namedtuple("Result", "code message")):
"""Wrapper over some call results"""
def unpack_items_from_response(main_response, main_key, node_label):
"""
Check format of main_response and return main_response[main_key].
dict main_response has on the key 'main_key' dict with item name as key and
dict with result as value. E.g.
{
"files": {
"file1": {"code": "success", "message": ""}
}
}
string main_key is name of key under that is a dict with results
string node_label is a node label for reporting an invalid format
"""
is_in_expected_format = (
isinstance(main_response, dict)
and main_key in main_response
and isinstance(main_response[main_key], dict)
)
if not is_in_expected_format:
raise LibraryError(
ReportItem.error(reports.messages.InvalidResponseFormat(node_label))
)
return main_response[main_key]
def response_items_to_result(response_items, expected_keys, node_label):
"""
Check format of response_items and return dict where keys are transformed to
Result. E.g.
{"file1": {"code": "success", "message": ""}}
->
{"file1": Result("success", "")}}
dict response_items has item name as key and dict with result as value.
list expected_keys contains expected keys in a dict main_response[main_key]
string node_label is a node label for reporting an invalid format
"""
if set(expected_keys) != set(response_items.keys()):
raise LibraryError(
ReportItem.error(reports.messages.InvalidResponseFormat(node_label))
)
for result in response_items.values():
if (
not isinstance(result, dict)
or "code" not in result
or "message" not in result
):
raise LibraryError(
ReportItem.error(
reports.messages.InvalidResponseFormat(node_label)
)
)
return {
file_key: Result(raw_result["code"], raw_result["message"])
for file_key, raw_result in response_items.items()
}
def METHOD_NAME(main_response, main_key, expected_keys, node_label):
"""
Validate response (from remote/put_file or remote/run_action) and transform
results from dict to Result.
dict main_response has on the key 'main_key' dict with item name as key and
dict with result as value. E.g.
{
"files": {
"file1": {"code": "success", "message": ""}
}
}
string main_key is name of key under that is a dict with results
list expected_keys contains expected keys in a dict main_response[main_key]
string node_label is a node label for reporting an invalid format
"""
return response_items_to_result(
unpack_items_from_response(main_response, main_key, node_label),
expected_keys,
node_label,
)
def get_format_result(code_message_map):
def format_result(result):
if result.code in code_message_map:
return code_message_map[result.code]
return result.message
return format_result
| null |
1,207 |
import logging
import os
import re
from opencensus.ext.azure.log_exporter import AzureLogHandler
from opencensus.trace import config_integration
from opencensus.trace.samplers import AlwaysOnSampler
from opencensus.trace.tracer import Tracer
from shared.config import VERSION
UNWANTED_LOGGERS = [
"azure.core.pipeline.policies.http_logging_policy",
"azure.eventhub._eventprocessor.event_processor",
"azure.identity.aio._credentials.managed_identity",
"azure.identity.aio._credentials.environment",
"azure.identity.aio._internal.get_token_mixin",
"azure.identity.aio._internal.decorators",
"azure.identity.aio._credentials.chained",
"azure.identity",
"msal.token_cache"
]
LOGGERS_FOR_ERRORS_ONLY = [
"uamqp",
"uamqp.authentication.cbs_auth_async",
"uamqp.async_ops.client_async",
"uamqp.async_ops.connection_async",
"uamqp.async_ops",
"uamqp.authentication",
"uamqp.c_uamqp",
"uamqp.connection",
"uamqp.receiver",
"uamqp.async_ops.session_async",
"uamqp.sender",
"uamqp.client",
"azure.servicebus.aio._base_handler_async"
]
debug = os.environ.get('DEBUG', 'False').lower() in ('true', '1')
def disable_unwanted_loggers():
"""
Disables the unwanted loggers.
"""
for logger_name in UNWANTED_LOGGERS:
logging.getLogger(logger_name).disabled = True
def telemetry_processor_callback_function(envelope):
envelope.tags['ai.cloud.role'] = 'resource_processor'
envelope.tags['ai.application.ver'] = VERSION
def initialize_logging(logging_level: int, correlation_id: str, add_console_handler: bool = False) -> logging.LoggerAdapter:
"""
Adds the Application Insights handler for the root logger and sets the given logging level.
Creates and returns a logger adapter that integrates the correlation ID, if given, to the log messages.
Note: This should be called only once, otherwise duplicate log entries could be produced.
:param logging_level: The logging level to set e.g., logging.WARNING.
:param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.
:returns: A newly created logger adapter.
"""
logger = logging.getLogger()
# When using sessions and NEXT_AVAILABLE_SESSION we see regular exceptions which are actually expected
# See https://github.com/Azure/azure-sdk-for-python/issues/9402
# Other log entries such as 'link detach' also confuse the logs, and are expected.
# We don't want these making the logs any noisier so we raise the logging level for that logger here
# To inspect all the loggers, use -> loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
for logger_name in LOGGERS_FOR_ERRORS_ONLY:
logging.getLogger(logger_name).setLevel(logging.ERROR)
if add_console_handler:
console_formatter = logging.Formatter(fmt='%(module)-7s %(name)-7s %(process)-7s %(asctime)s %(levelname)-7s %(message)s')
console_handler = logging.StreamHandler()
console_handler.setFormatter(console_formatter)
logger.addHandler(console_handler)
try:
# picks up APPLICATIONINSIGHTS_CONNECTION_STRING automatically
azurelog_handler = AzureLogHandler()
azurelog_handler.add_telemetry_processor(telemetry_processor_callback_function)
azurelog_formatter = AzureLogFormatter()
azurelog_handler.setFormatter(azurelog_formatter)
logger.addHandler(azurelog_handler)
except ValueError as e:
logger.error(f"Failed to set Application Insights logger handler: {e}")
config_integration.trace_integrations(['logging'])
logging.basicConfig(level=logging_level, METHOD_NAME='%(asctime)s traceId=%(traceId)s spanId=%(spanId)s %(message)s')
Tracer(sampler=AlwaysOnSampler())
logger.setLevel(logging_level)
extra = None
if correlation_id:
extra = {'traceId': correlation_id}
adapter = logging.LoggerAdapter(logger, extra)
adapter.debug(f"Logger adapter initialized with extra: {extra}")
return adapter
def get_message_id_logger(correlation_id: str) -> logging.LoggerAdapter:
"""
Gets a logger that includes message id for easy correlation between log entries.
:param correlation_id: Optional. The correlation ID that is passed on to the operation_Id in App Insights.
:returns: A modified logger adapter (from the original initiated one).
"""
logger = logging.getLogger()
extra = None
if correlation_id:
extra = {'traceId': correlation_id}
adapter = logging.LoggerAdapter(logger, extra)
adapter.debug(f"Logger adapter now includes extra: {extra}")
return adapter
def shell_output_logger(console_output: str, prefix_item: str, logger: logging.LoggerAdapter, logging_level: int):
"""
Logs the shell output (stdout/err) a line at a time with an option to remove ANSI control chars.
"""
if not console_output:
logging.debug("shell console output is empty.")
return
console_output = console_output.strip()
if (logging_level != logging.INFO
and len(console_output) < 200
and console_output.startswith("Unable to find image '")
and console_output.endswith("' locally")):
logging.debug("Image not present locally, setting log to INFO.")
logging_level = logging.INFO
logger.log(logging_level, f"{prefix_item} {console_output}")
class AzureLogFormatter(logging.Formatter):
# 7-bit C1 ANSI sequences
ansi_escape = re.compile(r'''
\x1B # ESC
(?: # 7-bit C1 Fe (except CSI)
[@-Z\\-_]
| # or [ for CSI, followed by a control sequence
\[
[0-?]* # Parameter bytes
[ -/]* # Intermediate bytes
[@-~] # Final byte
)
''', re.VERBOSE)
MAX_MESSAGE_LENGTH = 32000
TRUNCATION_TEXT = "MESSAGE TOO LONG, TAILING..."
def METHOD_NAME(self, record):
s = super().METHOD_NAME(record)
s = AzureLogFormatter.ansi_escape.sub('', s)
# not doing this here might produce errors if we try to log empty strings.
if (s == ''):
s = "EMPTY MESSAGE!"
# azure monitor is limiting the message size.
if (len(s) > AzureLogFormatter.MAX_MESSAGE_LENGTH):
s = f"{AzureLogFormatter.TRUNCATION_TEXT}\n{s[-1 * AzureLogFormatter.MAX_MESSAGE_LENGTH:]}"
return s
| null |
1,208 |
# -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Helper classes and methods for the REST API """
import json
import flask
from flask import Response
import inginious.common.custom_yaml as yaml
from inginious.frontend.pages.utils import INGIniousPage
class APIPage(INGIniousPage):
""" Generic handler for all API pages """
def GET(self, *args, **kwargs):
""" GET request """
return self._handle_api(self.API_GET, args, kwargs)
def METHOD_NAME(self, *args, **kwargs):
""" PUT request """
return self._handle_api(self.API_PUT, args, kwargs)
def POST(self, *args, **kwargs):
""" POST request """
return self._handle_api(self.API_POST, args, kwargs)
def DELETE(self, *args, **kwargs):
""" DELETE request """
return self._handle_api(self.API_DELETE, args, kwargs)
def PATCH(self, *args, **kwargs):
""" PATCH request """
return self._handle_api(self.API_PATCH, args, kwargs)
def HEAD(self, *args, **kwargs):
""" HEAD request """
return self._handle_api(self.API_HEAD, args, kwargs)
def OPTIONS(self, *args, **kwargs):
""" OPTIONS request """
return self._handle_api(self.API_OPTIONS, args, kwargs)
def _handle_api(self, handler, handler_args, handler_kwargs):
""" Handle call to subclasses and convert the output to an appropriate value """
try:
status_code, return_value = handler(*handler_args, **handler_kwargs)
except APIError as error:
return error.send()
return _api_convert_output(status_code, return_value)
def _guess_available_methods(self):
""" Guess the method implemented by the subclass"""
available_methods = []
for m in ["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]:
self_method = getattr(type(self), "API_{}".format(m))
super_method = getattr(APIPage, "API_{}".format(m))
if self_method != super_method:
available_methods.append(m)
return available_methods
def invalid_method(self):
""" Returns 405 Invalid Method to the client """
raise APIInvalidMethod(self._guess_available_methods())
def API_GET(self, *args, **kwargs): # pylint: disable=unused-argument
""" API GET request. Should be overridden by subclasses """
self.invalid_method()
def API_PUT(self, *args, **kwargs): # pylint: disable=unused-argument
""" API PUT request. Should be overridden by subclasses """
self.invalid_method()
def API_POST(self, *args, **kwargs): # pylint: disable=unused-argument
""" API POST request. Should be overridden by subclasses """
self.invalid_method()
def API_DELETE(self, *args, **kwargs): # pylint: disable=unused-argument
""" API DELETE request. Should be overridden by subclasses """
self.invalid_method()
def API_PATCH(self, *args, **kwargs): # pylint: disable=unused-argument
""" API PATCH request. Should be overridden by subclasses """
self.invalid_method()
def API_HEAD(self, *args, **kwargs): # pylint: disable=unused-argument
""" API HEAD request. Should be overridden by subclasses """
self.invalid_method()
def API_OPTIONS(self, *args, **kwargs): # pylint: disable=unused-argument
""" API OPTIONS request. Should be overridden by subclasses """
self.invalid_method()
class APIAuthenticatedPage(APIPage):
"""
A wrapper for pages that needs authentication. Automatically checks that the client is authenticated and returns "403 Forbidden" if it's
not the case.
"""
def _handle_api(self, handler, handler_args, handler_kwargs):
return APIPage._handle_api(self, (lambda *args, **kwargs: self._verify_authentication(handler, args, kwargs)), handler_args, handler_kwargs)
def _verify_authentication(self, handler, args, kwargs):
""" Verify that the user is authenticated """
if not self.user_manager.session_logged_in():
raise APIForbidden()
return handler(*args, **kwargs)
class APIError(Exception):
""" Standard API Error """
def __init__(self, status_code, return_value):
super(APIError, self).__init__()
self.status_code = status_code
self.return_value = return_value
def send(self, response=None):
""" Send the API Exception to the client """
return _api_convert_output(self.status_code, self.return_value, response)
class APIInvalidMethod(APIError):
""" Invalid method error """
def __init__(self, methods):
APIError.__init__(self, 405, {"error": "This endpoint has no such method"})
self.methods = methods
def send(self):
response = Response()
response.headers['Allow'] = ",".join(self.methods)
return APIError.send(self, response)
class APIInvalidArguments(APIError):
""" Invalid arguments error """
def __init__(self):
APIError.__init__(self, 400, {"error": "Invalid arguments for this method"})
class APIForbidden(APIError):
""" Forbidden error """
def __init__(self, message="You are not authenticated"):
APIError.__init__(self, 403, {"error": message})
class APINotFound(APIError):
""" Not found error """
def __init__(self, message="Not found"):
APIError.__init__(self, 404, {"error": message})
def _api_convert_output(status_code, return_value, response=None):
if not response:
response = Response()
response.status_code = status_code
""" Convert the output to what the client asks """
content_type = flask.request.environ.get('CONTENT_TYPE', 'text/json')
if "text/json" in content_type:
response.content_type = 'text/json; charset=utf-8'
response.response = [json.dumps(return_value)]
return response
if "text/html" in content_type:
response.content_type = 'text/html; charset=utf-8'
dump = yaml.dump(return_value)
response.response = ["<pre>" + dump + "</pre>"]
return response
if "text/yaml" in content_type or \
"text/x-yaml" in content_type or \
"application/yaml" in content_type or \
"application/x-yaml" in content_type:
response.content_type = 'text/yaml; charset=utf-8'
response.response = [yaml.dump(return_value)]
return response
response.content_type = 'text/json; charset=utf-8'
response.response = [json.dumps(return_value)]
return response
| null |
1,209 |
import pytest, py
import re
def exvalue():
import sys
return sys.exc_info()[1]
def f():
return 2
def test_assert():
try:
assert f() == 3
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def METHOD_NAME():
excinfo = py.test.raises(ZeroDivisionError, """
try:
1/0
finally:
i = 42
""")
s = excinfo.exconly()
assert re.search("ZeroDivisionError:.*division", s) is not None
def test_assert_multiline_1():
try:
assert (f() ==
3)
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_multiline_2():
try:
assert (f() == (4,
3)[-1])
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 ==')
def test_in():
try:
assert "hi" in [1, 2]
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 'hi' in")
def test_is():
try:
assert 1 is 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 is 2")
def test_attrib():
class Foo(object):
b = 1
i = Foo()
try:
assert i.b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
def test_attrib_inst():
class Foo(object):
b = 1
try:
assert Foo().b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
def test_len():
l = list(range(42))
try:
assert len(l) == 100
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 42 == 100")
assert "where 42 = len([" in s
def test_assert_keyword_arg():
def f(x=3):
return False
try:
assert f(x=5)
except AssertionError:
e = exvalue()
assert "x=5" in str(e)
# These tests should both fail, but should fail nicely...
class WeirdRepr:
def __repr__(self):
return '<WeirdRepr\nsecond line>'
def bug_test_assert_repr():
v = WeirdRepr()
try:
assert v == 1
except AssertionError:
e = exvalue()
assert str(e).find('WeirdRepr') != -1
assert str(e).find('second line') != -1
assert 0
def test_assert_non_string():
try:
assert 0, ['list']
except AssertionError:
e = exvalue()
assert str(e).find("list") != -1
def test_assert_implicit_multiline():
try:
x = [1,2,3]
assert x != [1,
2, 3]
except AssertionError:
e = exvalue()
assert str(e).find('assert [1, 2, 3] !=') != -1
@py.test.mark.xfail(py.test.__version__[0] != "2",
reason="broken on modern pytest",
run=False
)
def test_assert_with_brokenrepr_arg():
class BrokenRepr:
def __repr__(self): 0 / 0
e = AssertionError(BrokenRepr())
if e.msg.find("broken __repr__") == -1:
py.test.fail("broken __repr__ not handle correctly")
def test_multiple_statements_per_line():
try:
a = 1; assert a == 2
except AssertionError:
e = exvalue()
assert "assert 1 == 2" in str(e)
def test_power():
try:
assert 2**3 == 7
except AssertionError:
e = exvalue()
assert "assert (2 ** 3) == 7" in str(e)
class TestView:
def setup_class(cls):
cls.View = py.test.importorskip("py._code._assertionold").View
def test_class_dispatch(self):
### Use a custom class hierarchy with existing instances
class Picklable(self.View):
pass
class Simple(Picklable):
__view__ = object
def pickle(self):
return repr(self.__obj__)
class Seq(Picklable):
__view__ = list, tuple, dict
def pickle(self):
return ';'.join(
[Picklable(item).pickle() for item in self.__obj__])
class Dict(Seq):
__view__ = dict
def pickle(self):
return Seq.pickle(self) + '!' + Seq(self.values()).pickle()
assert Picklable(123).pickle() == '123'
assert Picklable([1,[2,3],4]).pickle() == '1;2;3;4'
assert Picklable({1:2}).pickle() == '1!2'
def test_viewtype_class_hierarchy(self):
# Use a custom class hierarchy based on attributes of existing instances
class Operation:
"Existing class that I don't want to change."
def __init__(self, opname, *args):
self.opname = opname
self.args = args
existing = [Operation('+', 4, 5),
Operation('getitem', '', 'join'),
Operation('setattr', 'x', 'y', 3),
Operation('-', 12, 1)]
class PyOp(self.View):
def __viewkey__(self):
return self.opname
def generate(self):
return '%s(%s)' % (self.opname, ', '.join(map(repr, self.args)))
class PyBinaryOp(PyOp):
__view__ = ('+', '-', '*', '/')
def generate(self):
return '%s %s %s' % (self.args[0], self.opname, self.args[1])
codelines = [PyOp(op).generate() for op in existing]
assert codelines == ["4 + 5", "getitem('', 'join')",
"setattr('x', 'y', 3)", "12 - 1"]
def test_underscore_api():
py.code._AssertionError
py.code._reinterpret_old # used by pypy
py.code._reinterpret
def test_assert_customizable_reprcompare(monkeypatch):
util = pytest.importorskip("_pytest.assertion.util")
monkeypatch.setattr(util, '_reprcompare', lambda *args: 'hello')
try:
assert 3 == 4
except AssertionError:
e = exvalue()
s = str(e)
assert "hello" in s
def test_assert_long_source_1():
try:
assert len == [
(None, ['somet text', 'more text']),
]
except AssertionError:
e = exvalue()
s = str(e)
assert 're-run' not in s
assert 'somet text' in s
def test_assert_long_source_2():
try:
assert(len == [
(None, ['somet text', 'more text']),
])
except AssertionError:
e = exvalue()
s = str(e)
assert 're-run' not in s
assert 'somet text' in s
def test_assert_raise_alias(testdir):
testdir.makepyfile("""
import sys
EX = AssertionError
def test_hello():
raise EX("hello"
"multi"
"line")
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello*",
"*raise EX*",
"*1 failed*",
])
@py.test.mark.xfail(py.test.__version__[0] != "2",
reason="broken on modern pytest",
run=False)
def test_assert_raise_subclass():
class SomeEx(AssertionError):
def __init__(self, *args):
super(SomeEx, self).__init__()
try:
raise SomeEx("hello")
except AssertionError as e:
s = str(e)
assert 're-run' not in s
assert 'could not determine' in s
def test_assert_raises_in_nonzero_of_object_pytest_issue10():
class A(object):
def __nonzero__(self):
raise ValueError(42)
def __lt__(self, other):
return A()
def __repr__(self):
return "<MY42 object>"
def myany(x):
return True
try:
assert not(myany(A() < 0))
except AssertionError:
e = exvalue()
s = str(e)
assert "<MY42 object> < 0" in s
| null |
1,210 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class CreateReadOnlyDBInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'CreateReadOnlyDBInstance')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBInstanceStorage(self): # Integer
return self.get_query_params().get('DBInstanceStorage')
def set_DBInstanceStorage(self, DBInstanceStorage): # Integer
self.add_query_param('DBInstanceStorage', DBInstanceStorage)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_DeletionProtection(self): # Boolean
return self.get_query_params().get('DeletionProtection')
def set_DeletionProtection(self, DeletionProtection): # Boolean
self.add_query_param('DeletionProtection', DeletionProtection)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def METHOD_NAME(self): # String
return self.get_query_params().get('TargetDedicatedHostIdForMaster')
def set_TargetDedicatedHostIdForMaster(self, TargetDedicatedHostIdForMaster): # String
self.add_query_param('TargetDedicatedHostIdForMaster', TargetDedicatedHostIdForMaster)
def get_DBInstanceDescription(self): # String
return self.get_query_params().get('DBInstanceDescription')
def set_DBInstanceDescription(self, DBInstanceDescription): # String
self.add_query_param('DBInstanceDescription', DBInstanceDescription)
def get_GdnInstanceName(self): # String
return self.get_query_params().get('GdnInstanceName')
def set_GdnInstanceName(self, GdnInstanceName): # String
self.add_query_param('GdnInstanceName', GdnInstanceName)
def get_TddlBizType(self): # String
return self.get_query_params().get('TddlBizType')
def set_TddlBizType(self, TddlBizType): # String
self.add_query_param('TddlBizType', TddlBizType)
def get_Period(self): # String
return self.get_query_params().get('Period')
def set_Period(self, Period): # String
self.add_query_param('Period', Period)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_PrivateIpAddress(self): # String
return self.get_query_params().get('PrivateIpAddress')
def set_PrivateIpAddress(self, PrivateIpAddress): # String
self.add_query_param('PrivateIpAddress', PrivateIpAddress)
def get_AutoRenew(self): # String
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # String
self.add_query_param('AutoRenew', AutoRenew)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_InstanceNetworkType(self): # String
return self.get_query_params().get('InstanceNetworkType')
def set_InstanceNetworkType(self, InstanceNetworkType): # String
self.add_query_param('InstanceNetworkType', InstanceNetworkType)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_InstructionSetArch(self): # String
return self.get_query_params().get('InstructionSetArch')
def set_InstructionSetArch(self, InstructionSetArch): # String
self.add_query_param('InstructionSetArch', InstructionSetArch)
def get_TddlRegionConfig(self): # String
return self.get_query_params().get('TddlRegionConfig')
def set_TddlRegionConfig(self, TddlRegionConfig): # String
self.add_query_param('TddlRegionConfig', TddlRegionConfig)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_DBInstanceStorageType(self): # String
return self.get_query_params().get('DBInstanceStorageType')
def set_DBInstanceStorageType(self, DBInstanceStorageType): # String
self.add_query_param('DBInstanceStorageType', DBInstanceStorageType)
def get_DedicatedHostGroupId(self): # String
return self.get_query_params().get('DedicatedHostGroupId')
def set_DedicatedHostGroupId(self, DedicatedHostGroupId): # String
self.add_query_param('DedicatedHostGroupId', DedicatedHostGroupId)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_UsedTime(self): # String
return self.get_query_params().get('UsedTime')
def set_UsedTime(self, UsedTime): # String
self.add_query_param('UsedTime', UsedTime)
def get_BurstingEnabled(self): # Boolean
return self.get_query_params().get('BurstingEnabled')
def set_BurstingEnabled(self, BurstingEnabled): # Boolean
self.add_query_param('BurstingEnabled', BurstingEnabled)
def get_VPCId(self): # String
return self.get_query_params().get('VPCId')
def set_VPCId(self, VPCId): # String
self.add_query_param('VPCId', VPCId)
def get_Category(self): # String
return self.get_query_params().get('Category')
def set_Category(self, Category): # String
self.add_query_param('Category', Category)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
def get_BpeEnabled(self): # String
return self.get_query_params().get('BpeEnabled')
def set_BpeEnabled(self, BpeEnabled): # String
self.add_query_param('BpeEnabled', BpeEnabled)
| null |
1,211 |
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
# Description:
# This tool creates the configuration file containing
# the secondary schedds
#
import copy
import os
import string
import subprocess
import sys
def usage():
print("This tool creates the 11_gwms_secondary_schedds.config file")
print("in the HTCondor's config.d directory")
print()
print("Usage:")
print(" glidecondor_createSecSched [-commonlog] [-nocreatedirs] <scheddlist>")
print("where:")
print(" -commonlog - If present, all the schedds will share a common log (default: one log per process)")
print(
" -nocreatedirs - If present, do not (re-)create the needed directories (default: do create the directories)"
)
print(" scheddlist - List of secondary schedds to put in the config file (required)")
print("Example:")
print(" glidecondor_createSecSched schedd_glideins1,schedd_glideins2")
return
def get_config_val(attr, fail_if_missing=True):
try:
p = subprocess.Popen(["condor_config_val", attr], stdout=subprocess.PIPE)
except OSError as e:
print("Count not find condor_config_val!")
print("%s\n" % e)
sys.exit(2)
rc = p.wait()
if rc != 0:
if fail_if_missing:
print("Attribute '%s' not found" % attr)
sys.exit(2)
else:
return None
res = p.communicate()
return res[0].strip("\n") # only the first line, and no trailing newline
def extract_condor_info():
global config_dir
config_dir = get_config_val("LOCAL_CONFIG_DIR")
global local_dir
local_dir = get_config_val("LOCAL_DIR")
global log_dir
log_dir = get_config_val("LOG")
global schedlog
schedlog = get_config_val("SCHEDD_LOG")
global shadowlog
shadowlog = get_config_val("SHADOW_LOG")
global shport_ad
shport_ad = get_config_val("SHARED_PORT_DAEMON_AD_FILE")
global shport_sock
shport_sock = get_config_val("DAEMON_SOCKET_DIR")
def METHOD_NAME(schedd, config_fd):
# first populate the config file
attrname = ""
for c in schedd:
if c in (string.ascii_uppercase + string.digits):
attrname += c
elif c in string.ascii_lowercase:
attrname += c.upper()
# drop all others
env_arr = []
env_arr.append(("_CONDOR_SCHEDD_NAME", "%(schedd)s"))
env_arr.append(("_CONDOR_LOCAL_DIR", "%(localdir)s/%(schedd)s"))
env_arr.append(("_CONDOR_LOCK", "%(localdir)s/%(schedd)s/lock"))
global common_log
if common_log:
env_arr.append(("_CONDOR_SCHEDD_LOG", "%(schedlog)s.%(schedd)s"))
env_arr.append(("_CONDOR_SHADOW_LOG", "%(shadowlog)s.%(schedd)s"))
condor_env_arr = []
for a in env_arr:
# convert in key=value pair
# use Condor valriables in the value part
condor_env_arr.append(
"%s=%s"
% (
a[0],
a[1]
% {
"schedd": schedd,
"localdir": "$(LOCAL_DIR)",
"logdir": "$(LOG)",
"schedlog": "$(SCHEDD_LOG)",
"shadowlog": "$(SHADOW_LOG)",
},
)
)
expenv_str = f"{attrname}_EXPENV = "
condor_env_str = " \\\n" + (" " * len(expenv_str)).join(condor_env_arr)
config_fd.write(
(
"# Secondary schedd %(schedd)s\n"
+ "%(attrname)s = $(SCHEDD)\n"
+ "%(attrname)s_EXPENV = %(envstr)s\n"
+ '%(attrname)s_ENVIRONMENT = "$(%(attrname)s_EXPENV) $(PRESERVE_SHPORT_EXPENV)"\n'
+ "SEC_SCHEDD_LIST = $(SEC_SCHEDD_LIST) %(attrname)s\n\n"
)
% {"attrname": attrname, "schedd": schedd, "envstr": condor_env_str}
)
global create_dirs
if create_dirs:
# then run condor_init with the modified environment
config_fd.flush() # let's just make sure we don't have a patial -> invalid file
global local_dir, log_dir, schedlog, shadowlog
# we must start with the old environemnt
sp_env_dict = copy.deepcopy(os.environ)
# and just add the new attributes to it
for a in env_arr:
# environment is a dictionary
# use actual paths in the val part
sp_env_dict[a[0]] = a[1] % {
"schedd": schedd,
"localdir": local_dir,
"logdir": log_dir,
"schedlog": schedlog,
"shadowlog": shadowlog,
}
# then add the shport part
global shport_ad, shport_sock
sp_env_dict["_CONDOR_SHARED_PORT_DAEMON_AD_FILE"] = shport_ad
sp_env_dict["_CONDOR_DAEMON_SOCKET_DIR"] = shport_sock
try:
p = subprocess.Popen(["condor_init"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=sp_env_dict)
except OSError as e:
print("Count not find condor_init!")
print("%s" % e)
sys.exit(2)
rc = p.wait()
res = p.communicate()
if rc != 0:
print(f"condor_init failed for {schedd}: {res}")
sys.exit(2)
print("Schedd %s configured and initialized" % schedd)
else:
print("Schedd %s put in config, but not initialized" % schedd)
def create_config(fname, schedds):
try:
fd = open(fname, "w")
except OSError as e:
print("%s" % e)
sys.exit(2)
with fd:
fd.write(
"###########################################\n"
+ "# This file contains the secondary schedds\n"
+ "# Generated by glidecondor_createSecSched\n"
+ "# DO NOT modify by hand\n"
+ "###########################################\n\n"
+ "PRESERVE_SHPORT_EXPENV= _CONDOR_SHARED_PORT_DAEMON_AD_FILE=$(SHARED_PORT_DAEMON_AD_FILE) \\\n"
+ " _CONDOR_DAEMON_SOCKET_DIR=$(DAEMON_SOCKET_DIR)\n\n"
)
for s in schedds:
METHOD_NAME(s, fd)
fd.write(
"DAEMON_LIST = $(DAEMON_LIST) $(SEC_SCHEDD_LIST)\n"
+ "# we assume we are the only ones re-defining DC_DAEMON_LIST\n"
+ "DC_DAEMON_LIST = + $(SEC_SCHEDD_LIST)\n"
)
def parse_args(args):
global common_log, create_dirs, schedds
common_log = False
create_dirs = True
if len(args) < 1:
usage()
sys.exit(1)
if args[0] == "-h":
usage()
sys.exit(0)
while len(args) > 1:
if args[0] == "-commonlog":
common_log = True
elif args[0] == "-nocreatedirs":
create_dirs = False
else:
print("Unknown option %s" % args[0])
usage()
sys.exit(1)
args = args[1:]
schedd_list_str = args[0]
schedds = schedd_list_str.split(",")
def main(args):
global common_log, create_dirs, schedds
parse_args(args)
extract_condor_info()
global config_dir
conf_fname = os.path.join(config_dir, "11_gwms_secondary_schedds.config")
create_config(conf_fname, schedds)
if __name__ == "__main__":
main(sys.argv[1:])
| null |
1,212 |
# Copyright 2018 Akretion (http://www.akretion.com).
# Copyright 2018 ACSONE SA/NV (<http://acsone.eu>)
# Copyright 2021 Camptocamp SA (https://www.camptocamp.com)
# @author Sébastien BEAU <[email protected]>
# @author Iván Todorovich <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, exceptions, fields, models
from odoo.fields import first
class ShopinvaderPartner(models.Model):
_inherit = "shopinvader.partner"
sale_profile_id = fields.Many2one(
"shopinvader.sale.profile",
"Sale profile",
compute="_compute_sale_profile_id",
store=True,
help="Sale profile computed, depending every fields who make the "
"fiscal position (country, zip, vat, account position,...)",
)
def _compute_role_depends(self):
return super()._compute_role_depends() + (
"backend_id.use_sale_profile",
"sale_profile_id",
)
def _get_role(self):
# Override to use the sale profile role/code when required
if self.backend_id.use_sale_profile and self.sale_profile_id:
return self.sale_profile_id.code
return super()._get_role()
@api.model
def _compute_sale_profile_id_depends(self):
backend_fields = [
"backend_id.use_sale_profile",
"backend_id.sale_profile_ids",
"backend_id.company_id",
]
# Using `_address_fields` gives us a lazy compatibility
# with modules like `base_address_city` or `base_location`
partner_address_fields = [
f"record_id.{fname}"
for fname in self.env["res.partner"]._address_fields()
if not fname.startswith("street")
]
partner_fields = [
"record_id.vat",
"record_id.property_product_pricelist",
"record_id.property_account_position_id",
]
return backend_fields + partner_address_fields + partner_fields
@api.depends(lambda self: self._compute_sale_profile_id_depends())
def _compute_sale_profile_id(self):
"""Compute sale_profile_id"""
records = self.filtered("backend_id.use_sale_profile")
for company in records.company_id:
company_records = records.filtered(lambda rec: rec.company_id == company)
company_records = company_records.with_company(company)
for rec in company_records:
rec.sale_profile_id = rec.METHOD_NAME()
# Records related to backends without use_sale_profile
(self - records).sale_profile_id = False
def _get_fiscal_position(self):
"""Get the partner's fiscal position"""
self.ensure_one()
return (
self.env["account.fiscal.position"]
.with_company(self.company_id)
.get_fiscal_position(self.record_id.id)
)
def METHOD_NAME(self):
"""Get the sale profile that matches this partner
For better performance, set the company on the recordset before
calling this method, to avoid setting it record by record here.
The best match is selected according to the following preference:
1) profiles matching both fiscal_position_ids and pricelist_id
2) profiles without pricelist_id matching fiscal_position_ids
3) profiles without fiscal_position_ids matching pricelist_id
4) profiles without fiscal_position_ids nor pricelist_id
5) fallback to the backend's default sale profile
"""
self.ensure_one()
if self.env.company != self.company_id:
self = self.with_company(self.company_id)
fposition = self._get_fiscal_position()
pricelist = self.property_product_pricelist
profiles = self.backend_id.sale_profile_ids
pricelist_empty = profiles.filtered(lambda p: not p.pricelist_id)
pricelist_match = profiles.filtered(lambda p: pricelist == p.pricelist_id)
fposition_empty = profiles.filtered(lambda p: not p.fiscal_position_ids)
fposition_match = (
profiles.filtered(lambda p: fposition in p.fiscal_position_ids)
if fposition
else profiles.browse()
)
matches = False
# Case 1)
if fposition_match and pricelist_match:
matches = fposition_match & pricelist_match
# Case 2)
if not matches and fposition_match:
matches = fposition_match & pricelist_empty
# Case 3)
if not matches and pricelist_match:
matches = pricelist_match & fposition_empty
# Case 4)
if not matches:
matches = pricelist_empty & fposition_empty
# Case 5)
if not matches:
matches = self.backend_id._get_default_profile()
if not matches:
raise exceptions.UserError(
_(
"No default sale profile found for the backend %s",
self.backend_id.name,
)
)
return first(matches.sorted())
| null |
1,213 |
from methods.regular.regular_api import *
from default.tests.test_utils import testing_setup
from shared.tests.test_utils import common_actions, data_mocking
from base64 import b64encode
from methods.discussions import discussion_list
from shared.database.discussion.discussion_relation import DiscussionRelation
from unittest.mock import patch
import flask
class TeseIssueList(testing_setup.DiffgramBaseTestCase):
"""
"""
def setUp(self):
# TODO: this test is assuming the 'my-sandbox-project' exists and some object have been previously created.
# For future tests a mechanism of setting up and tearing down the database should be created.
super(TeseIssueList, self).setUp()
project_data = data_mocking.create_project_with_context(
{
'users': [
{'username': 'Test',
'email': '[email protected]',
'password': 'diffgram123',
}
]
},
self.session
)
self.project = project_data['project']
def test_issue_list_web(self):
# Create mock tasks
# Create mock job.
discussion = data_mocking.create_discussion(
{
'project_id': self.project.id,
'name': 'test',
'title': 'test',
},
self.session,
)
issue2 = data_mocking.create_discussion(
{
'project_id': self.project.id,
'name': 'test',
'title': 'test',
},
self.session,
)
issue3 = data_mocking.create_discussion(
{
'project_id': self.project.id,
'name': 'test',
'title': 'test',
},
self.session,
)
job = data_mocking.create_job({
'name': 'my-test-job',
'project': self.project
}, self.session)
discussion.attach_element(
session = self.session,
element = {'type': 'job', 'id': job.id}
)
issue2.attach_element(
session = self.session,
element = {'type': 'job', 'id': job.id}
)
file = data_mocking.create_file({'project_id': job.project.id, 'job_id': job.id}, self.session)
discussion.attach_element(
session = self.session,
element = {'type': 'file', 'id': file.id}
)
task = data_mocking.create_task({
'name': f'task{1}',
'job': job,
'file': file,
}, self.session)
discussion.attach_element(
session = self.session,
element = {'type': 'task', 'id': task.id}
)
regular_methods.commit_with_rollback(self.session)
rels = self.session.query(DiscussionRelation).all()
request_data = {
'project_id': self.project.id,
'task_id': task.id,
}
endpoint = f"/api/v1/project/{self.project.project_string_id}/discussions/list"
auth_api = common_actions.create_project_auth(project=job.project, session=self.session)
credentials = b64encode(f"{auth_api.client_id}:{auth_api.client_secret}".encode()).decode('utf-8')
response_with_task_id = self.client.post(
endpoint,
data=json.dumps(request_data),
headers={
'directory_id': str(job.project.directory_default_id),
'Authorization': f"Basic {credentials}"
}
)
data = response_with_task_id.json
self.assertEqual(response_with_task_id.status_code, 200)
self.assertEqual(len(data['issues']), 1)
request_data = {
'project_id': self.project.id,
'job_id': job.id,
}
response_with_job_id = self.client.post(
endpoint,
data=json.dumps(request_data),
headers={
'directory_id': str(job.project.directory_default_id),
'Authorization': f"Basic {credentials}"
}
)
data = response_with_job_id.json
self.assertEqual(response_with_task_id.status_code, 200)
self.assertEqual(len(data['issues']), 2)
request_data = {
'project_id': self.project.id,
}
response_project = self.client.post(
endpoint,
data=json.dumps(request_data),
headers={
'directory_id': str(job.project.directory_default_id),
'Authorization': f"Basic {credentials}"
}
)
data = response_project.json
self.assertEqual(response_with_task_id.status_code, 200)
self.assertEqual(len(data['issues']), 3)
def METHOD_NAME(self):
retur
| null |
1,214 |
from builtins import zip
__docformat__ = "restructuredtext en"
import mdp
from mdp import numx
from .svm_classifiers import _SVMClassifier, _LabelNormalizer
try:
# this is the namespace used by upstream libsvm and the Debian package
import svmutil as libsvmutil
except ImportError:
# this is the namespace if libsvm is installed via PyPI
import libsvm.svmutil as libsvmutil
class LibSVMClassifier(_SVMClassifier):
"""
The ``LibSVMClassifier`` class acts as a wrapper around the LibSVM library
for support vector machines.
Information to the parameters can be found on
http://www.csie.ntu.edu.tw/~cjlin/libsvm/
The class provides access to change kernel and svm type with a text string.
:ivar parameter: Allows to change all other svm parameters directly.
:ivar kernels:
Kernels which LibSVM allows:
- 'RBF' - Radial basis function kernel
- 'LINEAR' - Linear kernel
- 'POLY' - Polynomial kernel
- 'SIGMOID' - Sigmoid kernel
:ivar classifiers:
Classifiers which LibSVM allows:
- 'C_SVC'
- 'NU_SVC'
- 'ONE_CLASS'
- 'EPSILON_SVR'
- 'NU_SVR'
This node depends on ``libsvm``.
"""
# The kernels and classifiers which LibSVM allows.
kernels = ["RBF", "LINEAR", "POLY", "SIGMOID"]
classifiers = ["C_SVC", "NU_SVC", "ONE_CLASS", "EPSILON_SVR", "NU_SVR"]
def __init__(self, kernel=None, classifier=None, probability=True, params=None,
input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'LibSVMClassifier'.
:param kernel: The kernel to use. See self.kernel or
class' description for more info.
:type kernel: str
:param classifier: The type of the SVM to use. See self.classifiers or
class' description for more info.
:type classifier: str
:param probability: Must be set to True, if probabilistic algorithms
shall be used.
:type probability: bool
:param params: A dict of parameters to be passed to the svm_parameter.
:type params: dict
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
if not params:
params = {}
# initialise the parameter and be quiet
self.parameter = libsvmutil.svm_parameter("-q")
if probability:
# allow for probability estimates
self.parameter.probability = 1
super(LibSVMClassifier, self).__init__(input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
if kernel:
self.set_kernel(kernel)
if classifier:
self.set_classifier(classifier)
# set all other parameters
for k, v in params.items():
if not k in self.parameter._names:
# check that the name is a valid parameter
msg = "'{}' is not a valid parameter for libsvm".format(k)
raise mdp.NodeException(msg)
if hasattr(self.parameter, k):
setattr(self.parameter, k, v)
else:
msg = "'svm_parameter' has no attribute {}".format(k)
raise AttributeError(msg)
def _get_supported_dtypes(self):
"""Return the list of dtypes supported by this node."""
# Support only float64 because of external library
return ('float64',)
def set_classifier(self, classifier):
"""
Sets the classifier.
:param classifier: A string with the name of the classifier which
should be used. Possible values are in self.classifiers and
in the class' description.
:type classifier: str
:raises TypeError: If the classifier type is unknown or not supported.
"""
if classifier.upper() in self.classifiers:
self.parameter.svm_type = getattr(libsvmutil, classifier.upper())
else:
msg = "Classifier Type %s is unknown or not supported." % classifier
raise TypeError(msg)
def set_kernel(self, kernel):
"""
Sets the classifier.
:param kernel: A string with the name of the kernel which
should be used. Possible values are in kernel and
in the class' description.
:type kernel: str
:raises TypeError: If the kernel type is unknown or not supported.
"""
if kernel.upper() in self.kernels:
self.parameter.kernel_type = getattr(libsvmutil, kernel.upper())
else:
msg = "Kernel Type %s is unknown or not supported." % kernel
raise TypeError(msg)
def METHOD_NAME(self):
super(LibSVMClassifier, self).METHOD_NAME()
self.normalizer = _LabelNormalizer(self.labels)
labels = self.normalizer.normalize(self.labels.tolist())
features = self.data
# Call svm training method.
prob = libsvmutil.svm_problem(labels, features.tolist())
# Train
self.model = libsvmutil.svm_train(prob, self.parameter)
def _label(self, x):
if isinstance(x, (list, tuple, numx.ndarray)):
y = [0] * len(x)
p_labs, p_acc, p_vals = libsvmutil.svm_predict(y, x.tolist(), self.model)
return numx.array(p_labs)
else:
msg = "Data must be a sequence of vectors"
raise mdp.NodeException(msg)
def predict_probability(self, x):
self._pre_execution_checks(x)
if isinstance(x, (list, tuple, numx.ndarray)):
return self._prob(x)
else:
return self._prob([x])
def _prob(self, x):
y = [0] * len(x)
p_labs, p_acc, p_vals = libsvmutil.svm_predict(y, x.tolist(), self.model, "-b 1")
labels = self.model.get_labels()
return [dict(list(zip(labels, ps))) for ps in p_vals]
def _train(self, x, labels):
super(LibSVMClassifier, self)._train(x, labels)
| null |
1,215 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class DescribeVSwitchesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'DescribeVSwitches','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_IsDefault(self): # Boolean
return self.get_query_params().get('IsDefault')
def set_IsDefault(self, IsDefault): # Boolean
self.add_query_param('IsDefault', IsDefault)
def get_RouteTableId(self): # String
return self.get_query_params().get('RouteTableId')
def set_RouteTableId(self, RouteTableId): # String
self.add_query_param('RouteTableId', RouteTableId)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def METHOD_NAME(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_VSwitchOwnerId(self): # Long
return self.get_query_params().get('VSwitchOwnerId')
def set_VSwitchOwnerId(self, VSwitchOwnerId): # Long
self.add_query_param('VSwitchOwnerId', VSwitchOwnerId)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_VSwitchName(self): # String
return self.get_query_params().get('VSwitchName')
def set_VSwitchName(self, VSwitchName): # String
self.add_query_param('VSwitchName', VSwitchName)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
| null |
1,216 |
#!/usr/bin/env python
""" Test Mapping langs utility functions and their use in g2p convert --check """
from unittest import TestCase, main
from g2p import make_g2p
from g2p.log import LOGGER
from g2p.mappings.langs import utils
class CheckIpaArpabetTest(TestCase):
def test_is_IPA(self):
self.assertTrue(utils.is_panphon("ijŋeːʒoːɡd͡ʒ")) # All panphon chars
self.assertTrue(utils.is_panphon("ij ij")) # tokenizes on spaces
# ASCII g is not ipa/panphon use ɡ (\u0261)
# self.assertFalse(utils.is_panphon("ga")) - tolerated because of panphon preprocessor!
# ASCII : is not ipa/panphon, use ː (\u02D0)
with self.assertLogs(LOGGER, level="WARNING"):
self.assertFalse(utils.is_panphon("ge:", display_warnings=True))
def test_is_arpabet(self):
arpabet_string = "S AH S IY EH AO N T EH"
non_arpabet_string = "sometext"
self.assertTrue(utils.is_arpabet(arpabet_string))
self.assertFalse(utils.is_arpabet(non_arpabet_string))
def test_check_arpabet(self):
transducer = make_g2p("eng-ipa", "eng-arpabet")
self.assertTrue(transducer.check(transducer("jŋeːi")))
self.assertFalse(transducer.check(transducer("gaŋi")))
self.assertTrue(transducer.check(transducer("ɡɑŋi")))
self.assertFalse(transducer.check(transducer("ñ")))
def test_check_ipa(self):
transducer = make_g2p("fra", "fra-ipa", tokenize=False)
self.assertTrue(transducer.check(transducer("ceci")))
self.assertFalse(transducer.check(transducer("ñ")))
with self.assertLogs(LOGGER, level="WARNING"):
self.assertFalse(transducer.check(transducer("ñ"), display_warnings=True))
self.assertTrue(transducer.check(transducer("ceci est un test été à")))
transducer = make_g2p("fra-ipa", "eng-ipa")
self.assertFalse(transducer.check(transducer("ñ")))
def METHOD_NAME(self):
# panphon doesn't like these directly, but our panphon proprocessor "patches" them
# because they are valid IPA phonetic constructs that panphon is a bit too picky about.
self.assertTrue(utils.is_panphon("ɻ̊j̊ oⁿk oᵐp"))
def test_check_composite_transducer(self):
transducer = make_g2p("fra", "eng-arpabet", tokenize=False)
self.assertTrue(transducer.check(transducer("ceci est un test été à")))
self.assertFalse(transducer.check(transducer("ñ")))
def test_check_tokenizing_transducer(self):
transducer = make_g2p("fra", "fra-ipa")
self.assertTrue(transducer.check(transducer("ceci est un test été à")))
self.assertFalse(transducer.check(transducer("ñ oǹ")))
self.assertTrue(
transducer.check(transducer("ceci, cela; c'est tokenizé: alors c'est bon!"))
)
self.assertFalse(
transducer.check(transducer("mais... c'est ñoñ, si du texte ne passe pas!"))
)
def test_check_tokenizing_composite_transducer(self):
transducer = make_g2p("fra", "eng-arpabet")
self.assertTrue(transducer.check(transducer("ceci est un test été à")))
self.assertFalse(transducer.check(transducer("ñ oǹ")))
self.assertTrue(
transducer.check(transducer("ceci, cela; c'est tokenizé: alors c'est bon!"))
)
self.assertFalse(
transducer.check(transducer("mais... c'est ñoñ, si du texte ne passe pas!"))
)
with self.assertLogs(LOGGER, level="WARNING"):
self.assertFalse(
transducer.check(
transducer("mais... c'est ñoñ, si du texte ne passe pas!"),
display_warnings=True,
)
)
def test_shallow_check(self):
transducer = make_g2p("win", "eng-arpabet")
# This is False, but should be True! It's False because the mapping outputs :
# instead of ː
# EJJ 2022-06-16 With #100 fixed, this check is no longer failing.
# self.assertFalse(transducer.check(transducer("uu")))
self.assertTrue(transducer.check(transducer("uu")))
self.assertTrue(transducer.check(transducer("uu"), shallow=True))
def test_check_with_equiv(self):
transducer = make_g2p("tau", "eng-arpabet")
tau_ipa = make_g2p("tau", "tau-ipa")(
"sh'oo Jign maasee' do'eent'aa shyyyh"
).output_string
self.assertTrue(utils.is_panphon(tau_ipa))
eng_ipa = make_g2p("tau", "eng-ipa")(
"sh'oo Jign maasee' do'eent'aa shyyyh"
).output_string
self.assertTrue(utils.is_panphon(eng_ipa))
eng_arpabet = make_g2p("tau", "eng-arpabet")(
"sh'oo Jign maasee' do'eent'aa shyyyh"
).output_string
self.assertTrue(utils.is_arpabet(eng_arpabet))
# LOGGER.warning(
# f"tau-ipa {tau_ipa}\neng-ipa {eng_ipa}\n eng-arpabet {eng_arpabet}"
# )
self.assertTrue(
transducer.check(transducer("sh'oo Jign maasee' do'eent'aa shyyyh"))
)
if __name__ == "__main__":
main()
| null |
1,217 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbase.endpoint import endpoint_data
class CreateMultiZoneClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'HBase', '2019-01-01', 'CreateMultiZoneCluster','hbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ArchVersion(self):
return self.get_query_params().get('ArchVersion')
def set_ArchVersion(self,ArchVersion):
self.add_query_param('ArchVersion',ArchVersion)
def get_ClusterName(self):
return self.get_query_params().get('ClusterName')
def set_ClusterName(self,ClusterName):
self.add_query_param('ClusterName',ClusterName)
def get_EngineVersion(self):
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self,EngineVersion):
self.add_query_param('EngineVersion',EngineVersion)
def get_LogDiskType(self):
return self.get_query_params().get('LogDiskType')
def set_LogDiskType(self,LogDiskType):
self.add_query_param('LogDiskType',LogDiskType)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_PrimaryVSwitchId(self):
return self.get_query_params().get('PrimaryVSwitchId')
def METHOD_NAME(self,PrimaryVSwitchId):
self.add_query_param('PrimaryVSwitchId',PrimaryVSwitchId)
def get_LogInstanceType(self):
return self.get_query_params().get('LogInstanceType')
def set_LogInstanceType(self,LogInstanceType):
self.add_query_param('LogInstanceType',LogInstanceType)
def get_AutoRenewPeriod(self):
return self.get_query_params().get('AutoRenewPeriod')
def set_AutoRenewPeriod(self,AutoRenewPeriod):
self.add_query_param('AutoRenewPeriod',AutoRenewPeriod)
def get_Period(self):
return self.get_query_params().get('Period')
def set_Period(self,Period):
self.add_query_param('Period',Period)
def get_LogNodeCount(self):
return self.get_query_params().get('LogNodeCount')
def set_LogNodeCount(self,LogNodeCount):
self.add_query_param('LogNodeCount',LogNodeCount)
def get_SecurityIPList(self):
return self.get_query_params().get('SecurityIPList')
def set_SecurityIPList(self,SecurityIPList):
self.add_query_param('SecurityIPList',SecurityIPList)
def get_PeriodUnit(self):
return self.get_query_params().get('PeriodUnit')
def set_PeriodUnit(self,PeriodUnit):
self.add_query_param('PeriodUnit',PeriodUnit)
def get_CoreDiskType(self):
return self.get_query_params().get('CoreDiskType')
def set_CoreDiskType(self,CoreDiskType):
self.add_query_param('CoreDiskType',CoreDiskType)
def get_ArbiterZoneId(self):
return self.get_query_params().get('ArbiterZoneId')
def set_ArbiterZoneId(self,ArbiterZoneId):
self.add_query_param('ArbiterZoneId',ArbiterZoneId)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_MultiZoneCombination(self):
return self.get_query_params().get('MultiZoneCombination')
def set_MultiZoneCombination(self,MultiZoneCombination):
self.add_query_param('MultiZoneCombination',MultiZoneCombination)
def get_PrimaryZoneId(self):
return self.get_query_params().get('PrimaryZoneId')
def set_PrimaryZoneId(self,PrimaryZoneId):
self.add_query_param('PrimaryZoneId',PrimaryZoneId)
def get_Engine(self):
return self.get_query_params().get('Engine')
def set_Engine(self,Engine):
self.add_query_param('Engine',Engine)
def get_StandbyVSwitchId(self):
return self.get_query_params().get('StandbyVSwitchId')
def set_StandbyVSwitchId(self,StandbyVSwitchId):
self.add_query_param('StandbyVSwitchId',StandbyVSwitchId)
def get_StandbyZoneId(self):
return self.get_query_params().get('StandbyZoneId')
def set_StandbyZoneId(self,StandbyZoneId):
self.add_query_param('StandbyZoneId',StandbyZoneId)
def get_MasterInstanceType(self):
return self.get_query_params().get('MasterInstanceType')
def set_MasterInstanceType(self,MasterInstanceType):
self.add_query_param('MasterInstanceType',MasterInstanceType)
def get_CoreNodeCount(self):
return self.get_query_params().get('CoreNodeCount')
def set_CoreNodeCount(self,CoreNodeCount):
self.add_query_param('CoreNodeCount',CoreNodeCount)
def get_LogDiskSize(self):
return self.get_query_params().get('LogDiskSize')
def set_LogDiskSize(self,LogDiskSize):
self.add_query_param('LogDiskSize',LogDiskSize)
def get_CoreInstanceType(self):
return self.get_query_params().get('CoreInstanceType')
def set_CoreInstanceType(self,CoreInstanceType):
self.add_query_param('CoreInstanceType',CoreInstanceType)
def get_CoreDiskSize(self):
return self.get_query_params().get('CoreDiskSize')
def set_CoreDiskSize(self,CoreDiskSize):
self.add_query_param('CoreDiskSize',CoreDiskSize)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_PayType(self):
return self.get_query_params().get('PayType')
def set_PayType(self,PayType):
self.add_query_param('PayType',PayType)
def get_ArbiterVSwitchId(self):
return self.get_query_params().get('ArbiterVSwitchId')
def set_ArbiterVSwitchId(self,ArbiterVSwitchId):
self.add_query_param('ArbiterVSwitchId',ArbiterVSwitchId
| null |
1,218 |
################################################################################
# Creme is a free/open-source Customer Relationship Management software
# Copyright (C) 2015-2023 Hybird
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from django.utils.translation import gettext_lazy as _
from creme.creme_core.apps import CremeAppConfig
class DocumentsConfig(CremeAppConfig):
default = True
name = 'creme.documents'
verbose_name = _('Documents')
dependencies = ['creme.creme_core']
def all_apps_ready(self):
from . import get_document_model, get_folder_model
self.Document = get_document_model()
self.Folder = get_folder_model()
super().all_apps_ready()
def register_entity_models(self, creme_registry):
creme_registry.register_entity_models(self.Document, self.Folder)
def register_actions(self, actions_registry):
from . import actions
actions_registry.register_instance_actions(
actions.ExploreFolderAction,
actions.DownloadAction,
)
def register_bricks(self, brick_registry):
from . import bricks
Document = self.Document
brick_registry.register_4_model(
Document, bricks.DocumentBrick,
).register(
bricks.FolderDocsBrick,
bricks.ChildFoldersBrick,
bricks.LinkedDocsBrick,
).register_hat(
Document, main_brick_cls=bricks.DocumentBarHatBrick,
)
def register_bulk_update(self, bulk_update_registry):
from .forms.folder import ParentFolderOverrider
register = bulk_update_registry.register
register(self.Folder).add_overriders(ParentFolderOverrider)
# NB: <filedata> is currently not (inner)-editable to avoid the
# overriding of the previous file without rollback possibility.
# Should we implement a file versioning system?
register(self.Document).exclude('filedata')
def register_creme_config(self, config_registry):
from . import models
register_model = config_registry.register_model
register_model(models.FolderCategory, 'category')
register_model(models.DocumentCategory, 'doc_category')
def register_custom_forms(self, cform_registry):
from . import custom_forms
cform_registry.register(
custom_forms.FOLDER_CREATION_CFORM,
custom_forms.FOLDER_EDITION_CFORM,
custom_forms.DOCUMENT_CREATION_CFORM,
custom_forms.DOCUMENT_EDITION_CFORM,
)
def register_fields_config(self, fields_config_registry):
fields_config_registry.register_models(self.Document, self.Folder)
def register_field_printers(self, field_printers_registry):
from django.db import models
from . import gui
Document = self.Document
printers = field_printers_registry.printers_for_field_type
for field in (models.ForeignKey, models.OneToOneField):
for printer in printers(type=field, tags='html*'):
printer.register(model=Document, printer=gui.print_fk_image_html)
for printer in printers(type=models.ManyToManyField, tags='html*'):
printer.register(
model=Document,
printer=gui.print_doc_summary_html,
enumerator=printer.enumerator_entity,
)
def METHOD_NAME(self, filefield_download_registry):
filefield_download_registry.register(
model=self.Document, field_name='filedata',
)
def register_icons(self, icon_registry):
icon_registry.register(
self.Document, 'images/document_%(size)s.png',
).register(
self.Folder, 'images/document_%(size)s.png',
)
def register_menu_entries(self, menu_registry):
from . import menu
menu_registry.register(
menu.DocumentsEntry, menu.DocumentCreationEntry,
menu.FoldersEntry, menu.FolderCreationEntry,
)
def register_creation_menu(self, creation_menu_registry):
creation_menu_registry.get_or_create_group(
'tools', _('Tools'), priority=100
).add_link(
'documents-create_document', self.Document, priority=10,
).add_link(
'documents-create_folder', self.Folder, priority=20,
)
def register_merge_forms(self, merge_form_registry):
from .forms import folder
merge_form_registry.register(self.Folder, folder.get_merge_form_builder)
def register_quickforms(self, quickforms_registry):
from .forms import quick
quickforms_registry.register(self.Document, quick.DocumentQuickForm)
| null |
1,219 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class DescribeDBInstancesForCloneRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'DescribeDBInstancesForClone')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ConnectionMode(self): # String
return self.get_query_params().get('ConnectionMode')
def set_ConnectionMode(self, ConnectionMode): # String
self.add_query_param('ConnectionMode', ConnectionMode)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_NodeType(self): # String
return self.get_query_params().get('NodeType')
def set_NodeType(self, NodeType): # String
self.add_query_param('NodeType', NodeType)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_SearchKey(self): # String
return self.get_query_params().get('SearchKey')
def set_SearchKey(self, SearchKey): # String
self.add_query_param('SearchKey', SearchKey)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Expired(self): # String
return self.get_query_params().get('Expired')
def set_Expired(self, Expired): # String
self.add_query_param('Expired', Expired)
def get_Engine(self): # String
return self.get_query_params().get('Engine')
def set_Engine(self, Engine): # String
self.add_query_param('Engine', Engine)
def get_CurrentInstanceId(self): # String
return self.get_query_params().get('CurrentInstanceId')
def set_CurrentInstanceId(self, CurrentInstanceId): # String
self.add_query_param('CurrentInstanceId', CurrentInstanceId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_DBInstanceStatus(self): # String
return self.get_query_params().get('DBInstanceStatus')
def set_DBInstanceStatus(self, DBInstanceStatus): # String
self.add_query_param('DBInstanceStatus', DBInstanceStatus)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_proxyId(self): # String
return self.get_query_params().get('proxyId')
def set_proxyId(self, proxyId): # String
self.add_query_param('proxyId', proxyId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBInstanceType(self): # String
return self.get_query_params().get('DBInstanceType')
def set_DBInstanceType(self, DBInstanceType): # String
self.add_query_param('DBInstanceType', DBInstanceType)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def METHOD_NAME(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
def get_InstanceNetworkType(self): # String
return self.get_query_params().get('InstanceNetworkType')
def set_InstanceNetworkType(self, InstanceNetworkType): # String
self.add_query_param('InstanceNetworkType', InstanceNetworkType)
| null |
1,220 |
import subprocess
from collections import defaultdict
import numpy as np
from chardet.universaldetector import UniversalDetector
from Orange.data import (
is_discrete_values, MISSING_VALUES, Variable,
DiscreteVariable, StringVariable, ContinuousVariable, TimeVariable,
)
from Orange.misc.collections import natural_sorted
__all__ = ["Compression", "open_compressed", "detect_encoding", "isnastr",
"guess_data_type", "sanitize_variable"]
class Compression:
"""Supported compression extensions"""
GZIP = '.gz'
BZIP2 = '.bz2'
XZ = '.xz'
all = (GZIP, BZIP2, XZ)
def open_compressed(filename, *args, _open=open, **kwargs):
"""Return seamlessly decompressed open file handle for `filename`"""
if isinstance(filename, str):
if filename.endswith(Compression.GZIP):
from gzip import open as _open
elif filename.endswith(Compression.BZIP2):
from bz2 import open as _open
elif filename.endswith(Compression.XZ):
from lzma import open as _open
return _open(filename, *args, **kwargs)
# Else already a file, just pass it through
return filename
def METHOD_NAME(filename):
"""
Detect encoding of `filename`, which can be a ``str`` filename, a
``file``-like object, or ``bytes``.
"""
# Try with Unix file utility first because it's faster (~10ms vs 100ms)
if isinstance(filename, str) and not filename.endswith(Compression.all):
try:
with subprocess.Popen(('file', '--brief', '--mime-encoding',
filename), stdout=subprocess.PIPE) as proc:
proc.wait()
if proc.returncode == 0:
encoding = proc.stdout.read().strip()
# file only supports these encodings; for others it says
# unknown-8bit or binary. So we give chardet a chance to do
# better
if encoding in (b'utf-8', b'us-ascii', b'iso-8859-1',
b'utf-7', b'utf-16le', b'utf-16be',
b'ebcdic'):
return encoding.decode('us-ascii')
except OSError:
pass # windoze
# file not available or unable to guess the encoding, have chardet do it
detector = UniversalDetector()
# We examine only first N 4kB blocks of file because chardet is really slow
MAX_BYTES = 4 * 1024 * 12
def _from_file(f):
detector.feed(f.read(MAX_BYTES))
detector.close()
return (detector.result.get('encoding')
if detector.result.get('confidence', 0) >= .85 else
'utf-8')
if isinstance(filename, str):
with open_compressed(filename, 'rb') as f:
return _from_file(f)
elif isinstance(filename, bytes):
detector.feed(filename[:MAX_BYTES])
detector.close()
return detector.result.get('encoding')
elif hasattr(filename, 'encoding'):
return filename.encoding
else: # assume file-like object that you can iter through
return _from_file(filename)
__isnastr = np.frompyfunc(
{v for v in MISSING_VALUES if isinstance(v, str)}.__contains__, 1, 1)
# wrapper for __isnastr with proper default out dtype
def isnastr(arr, out=None):
"""
Given an (object) array of string values, return a boolean mask array
that is True where the `arr` contains one of the string constants
considered as N/A.
Parameters
----------
arr : np.ndarray
Input array of strings.
out : Optional[np.ndarray]
Optional output array of the same shape as arr
Returns
-------
mask : np.ndarray
"""
arr = np.asarray(arr)
if out is None and arr.shape != ():
out = np.empty_like(arr, dtype=bool)
return __isnastr(arr, out=out, casting="unsafe")
def guess_data_type(orig_values, namask=None):
"""
Use heuristics to guess data type.
"""
valuemap, values = None, orig_values
is_discrete = is_discrete_values(orig_values)
orig_values = np.asarray(orig_values, dtype=str)
if namask is None:
namask = isnastr(orig_values)
if is_discrete:
valuemap = natural_sorted(is_discrete)
coltype = DiscreteVariable
else:
# try to parse as float
values = np.empty_like(orig_values, dtype=float)
values[namask] = np.nan
try:
np.copyto(values, orig_values, where=~namask, casting="unsafe")
except ValueError:
values = orig_values
coltype = StringVariable
else:
coltype = ContinuousVariable
if coltype is not ContinuousVariable:
# when not continuous variable it can still be time variable even it
# was before recognized as a discrete
tvar = TimeVariable('_')
# introducing new variable prevent overwriting orig_values and values
temp_values = np.empty_like(orig_values, dtype=float)
try:
temp_values[~namask] = [
tvar.parse_exact_iso(i) for i in orig_values[~namask]]
except ValueError:
pass
else:
valuemap = None
coltype = TimeVariable
values = temp_values
return valuemap, values, coltype
def sanitize_variable(valuemap, values, orig_values, coltype, coltype_kwargs,
name=None):
assert issubclass(coltype, Variable)
def get_number_of_decimals(values):
len_ = len
ndecimals = max((len_(value) - value.find(".")
for value in values if "." in value),
default=1)
return ndecimals - 1
if issubclass(coltype, DiscreteVariable) and valuemap is not None:
coltype_kwargs.update(values=valuemap)
var = coltype.make(name, **coltype_kwargs)
if isinstance(var, DiscreteVariable):
# Map discrete data to 'ints' (or at least what passes as int around
# here)
mapping = defaultdict(
lambda: np.nan,
{val: i for i, val in enumerate(var.values)},
)
mapping[""] = np.nan
mapvalues_ = np.frompyfunc(mapping.__getitem__, 1, 1)
def mapvalues(arr):
arr = np.asarray(arr, dtype=object)
return mapvalues_(arr, out=np.empty_like(arr, dtype=float), casting="unsafe")
values = mapvalues(orig_values)
if coltype is StringVariable:
values = orig_values
# ContinuousVariable.number_of_decimals is supposed to be handled by
# ContinuousVariable.to_val. In the interest of speed, the reader bypasses
# it, so we set the number of decimals here.
# The number of decimals is increased if not set manually (in which case
# var.adjust_decimals would be 0).
if isinstance(var, ContinuousVariable) and var.adjust_decimals:
ndecimals = get_number_of_decimals(orig_values)
if var.adjust_decimals == 2 or ndecimals > var.number_of_decimals:
var.number_of_decimals = ndecimals
var.adjust_decimals = 1
if isinstance(var, TimeVariable) or coltype is TimeVariable:
# Re-parse the values because only now after coltype.make call
# above, variable var is the correct one
_var = var if isinstance(var, TimeVariable) else TimeVariable('_')
values = [_var.parse(i) for i in orig_values]
return values, var
| null |
1,221 |
# **************************************************************************
# *
# * Authors: Carlos Oscar S. Sorzano ([email protected])
# *
# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia , CSIC
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2 of the License, or
# * (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# * 02111-1307 USA
# *
# * All comments concerning this program package may be sent to the
# * e-mail address '[email protected]'
# *
# **************************************************************************
from pwem.emlib.image import ImageHandler
from pyworkflow import VERSION_1_1
from pyworkflow.protocol import PointerParam, StringParam, FloatParam
from pyworkflow.protocol.constants import LEVEL_ADVANCED
from pwem.protocols import ProtAnalysis3D
from pwem.emlib import MetaData, MDL_ANGLE_ROT, MDL_ANGLE_TILT
from xmipp3.convert import readSetOfParticles
class XmippProtCreateGallery(ProtAnalysis3D):
"""
Create a gallery of projections from a volume.
This gallery of projections may help to understand the images
observed in the microscope.
"""
_label = 'create gallery'
_version = VERSION_1_1
#--------------------------- DEFINE param functions ------------------------
def METHOD_NAME(self, form):
form.addSection(label='General parameters')
form.addParam('inputVolume', PointerParam, pointerClass="Volume",
label='Input volume')
form.addParam('symmetryGroup', StringParam, default="c1",
label='Symmetry group',
help='See'
'https://github.com/I2PC/xmipp-portal/wiki/Symmetry '
'for a description of the symmetry groups format. '
'If no symmetry is present, give c1')
rot = form.addLine('Rotational angle',
help='Minimum, maximum and step values for '
'rotational angle range, all in degrees.')
rot.addParam('rot0', FloatParam, default=0, label='Min')
rot.addParam('rotF', FloatParam, default=360, label='Max')
rot.addParam('rotStep', FloatParam, default=5, label='Step')
tilt = form.addLine('Tilt angle',
help='In degrees. tilt=0 is a top view, '
'while tilt=90 is a side view"')
tilt.addParam('tilt0', FloatParam, default=0, label='Min')
tilt.addParam('tiltF', FloatParam, default=180, label='Max')
tilt.addParam('tiltStep', FloatParam, default=5, label='Step')
form.addParam('maxFreq',FloatParam, default=0.25,
expertLevel=LEVEL_ADVANCED,
label='Maximum frequency', help="Normalized to 0.5")
form.addParam('shiftSigma',FloatParam, default=0.0,
expertLevel=LEVEL_ADVANCED,
label='Shift sigma', help="In pixels")
#--------------------------- INSERT steps functions ------------------------
def _insertAllSteps(self):
self._insertFunctionStep('copyInput')
self._insertFunctionStep('createGallery')
self._insertFunctionStep('createOutput')
#--------------------------- STEPS functions -------------------------------
def copyInput(self):
ImageHandler().convert(self.inputVolume.get(),
self._getTmpPath("volume.vol"))
def createGallery(self):
xdim = self.inputVolume.get().getXDim()
rotN = round((self.rotF.get()-self.rot0.get())/self.rotStep.get())
tiltN = round((self.tiltF.get()-self.tilt0.get())/self.tiltStep.get())
paramContent ="""# XMIPP_STAR_1 *
data_block1
_dimensions2D '%d %d'
_projRotRange '%f %f %d'
_projRotRandomness even
_projTiltRange '%f %f %d'
_projTiltRandomness even
_projPsiRange '0 0 1'
_projPsiRandomness even
_noiseCoord '%f 0'
""" % (xdim, xdim, self.rot0, self.rotF,rotN, self.tilt0, self.tiltF, tiltN, self.shiftSigma)
fhParam = open(self._getExtraPath("projectionParameters.xmd"), 'w')
fhParam.write(paramContent)
fhParam.close()
self.runJob("xmipp_phantom_project",
"-i %s -o %s --params %s --method fourier 2 %f --sym %s" %
(self._getTmpPath("volume.vol"),
self._getPath("images.stk"),
self._getExtraPath("projectionParameters.xmd"),
self.maxFreq, self.symmetryGroup))
def createOutput(self):
imgSetOut = self._createSetOfAverages()
imgSetOut.setSamplingRate(self.inputVolume.get().getSamplingRate())
imgSetOut.setAlignmentProj()
readSetOfParticles(self._getPath("images.xmd"), imgSetOut)
self._defineOutputs(outputReprojections=imgSetOut)
self._defineSourceRelation(self.inputVolume, imgSetOut)
#--------------------------- INFO functions --------------------------------
def _summary(self):
messages = []
messages.append("Rot.angle from %0.2f to %0.2f in steps of %0.2f" %
(self.rot0, self.rotF, self.rotStep))
messages.append("Tilt.angle from %0.2f to %0.2f in steps of %0.2f" %
(self.tilt0, self.tiltF, self.tiltStep))
return messages
| null |
1,222 |
## @file
# This file is used to define a class object to describe a package
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
PackageObject
'''
##
# Import Modules
#
from Object.POM.CommonObject import CommonPropertiesObject
from Object.POM.CommonObject import IdentificationObject
from Object.POM.CommonObject import CommonHeaderObject
from Object.POM.CommonObject import BinaryHeaderObject
from Library.Misc import Sdict
## StandardIncludeFileObject
#
class StandardIncludeFileObject(CommonPropertiesObject):
def __init__(self):
CommonPropertiesObject.__init__(self)
self.IncludeFile = ''
def SetIncludeFile(self, IncludeFile):
self.IncludeFile = IncludeFile
def GetIncludeFile(self):
return self.IncludeFile
## PackageIncludeFileObject
#
class PackageIncludeFileObject(StandardIncludeFileObject):
pass
##
# PackageObject
#
class PackageObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject):
def __init__(self):
IdentificationObject.__init__(self)
CommonHeaderObject.__init__(self)
BinaryHeaderObject.__init__(self)
#
# LibraryClassObject
#
self.LibraryClassList = []
#
# FileObject
#
self.IncludePathList = []
#
# StandardIncludeFileObject
#
self.StandardIncludeFileList = []
#
# PackageIncludeFileObject
#
self.PackageIncludeFileList = []
#
# Include and Arch List, item is (IncludePath, SupArchList-List of Arch), used during install package
#
self.IncludeArchList = []
#
# ProtocolObject
#
self.ProtocolList = []
#
# PpiObject
#
self.PpiList = []
#
# GuidObject
#
self.GuidList = []
#
# (PcdObject, PcdErrorObject)
#
self.PcdList = []
#
# {(PcdTokenSpaceGuidCName, PcdErrroNumber): PcdErrorMessageList}
#
self.PcdErrorCommentDict = {}
#
# UserExtensionObject
#
self.UserExtensionList = []
#
# MiscFileObject
#
self.MiscFileList = []
self.ModuleDict = Sdict()
#
# ClonedRecordObject
#
self.ClonedFromList = []
#
# string object
#
self.ModuleFileList = []
self.PcdChecks = []
self.UNIFlag = False
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
def GetLibraryClassList(self):
return self.LibraryClassList
def SetIncludePathList(self, IncludePathList):
self.IncludePathList = IncludePathList
def GetIncludePathList(self):
return self.IncludePathList
def SetIncludeArchList(self, IncludeArchList):
self.IncludeArchList = IncludeArchList
def GetIncludeArchList(self):
return self.IncludeArchList
def SetStandardIncludeFileList(self, StandardIncludeFileList):
self.StandardIncludeFileList = StandardIncludeFileList
def GetStandardIncludeFileList(self):
return self.StandardIncludeFileList
def SetPackageIncludeFileList(self, PackageIncludeFileList):
self.PackageIncludeFileList = PackageIncludeFileList
def GetPackageIncludeFileList(self):
return self.PackageIncludeFileList
def SetProtocolList(self, ProtocolList):
self.ProtocolList = ProtocolList
def GetProtocolList(self):
return self.ProtocolList
def SetPpiList(self, PpiList):
self.PpiList = PpiList
def GetPpiList(self):
return self.PpiList
def SetGuidList(self, GuidList):
self.GuidList = GuidList
def GetGuidList(self):
return self.GuidList
def SetPcdList(self, PcdList):
self.PcdList = PcdList
def GetPcdList(self):
return self.PcdList
def SetUserExtensionList(self, UserExtensionList):
self.UserExtensionList = UserExtensionList
def GetUserExtensionList(self):
return self.UserExtensionList
def SetMiscFileList(self, MiscFileList):
self.MiscFileList = MiscFileList
def GetMiscFileList(self):
return self.MiscFileList
def SetModuleDict(self, ModuleDict):
self.ModuleDict = ModuleDict
def GetModuleDict(self):
return self.ModuleDict
def SetClonedFromList(self, ClonedFromList):
self.ClonedFromList = ClonedFromList
def GetClonedFromList(self):
return self.ClonedFromList
def METHOD_NAME(self, ModuleFileList):
self.ModuleFileList = ModuleFileList
def GetModuleFileList(self):
return self.ModuleFileList
| null |
1,223 |
# coding: utf-8
"""
openapi 3.0.3 sample spec
sample spec for testing openapi functionality, built from json schema tests for draft6 # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
"""
import unittest
import unit_test_api
from unit_test_api.components.schema.uniqueitems_false_validation import UniqueitemsFalseValidation
from unit_test_api.configurations import schema_configuration
class TestUniqueitemsFalseValidation(unittest.TestCase):
"""UniqueitemsFalseValidation unit test stubs"""
configuration = schema_configuration.SchemaConfiguration()
def test_non_unique_array_of_integers_is_valid_passes(self):
# non-unique array of integers is valid
UniqueitemsFalseValidation.validate(
[
1,
1,
],
configuration=self.configuration
)
def test_unique_array_of_objects_is_valid_passes(self):
# unique array of objects is valid
UniqueitemsFalseValidation.validate(
[
{
"foo":
"bar",
},
{
"foo":
"baz",
},
],
configuration=self.configuration
)
def test_non_unique_array_of_nested_objects_is_valid_passes(self):
# non-unique array of nested objects is valid
UniqueitemsFalseValidation.validate(
[
{
"foo":
{
"bar":
{
"baz":
True,
},
},
},
{
"foo":
{
"bar":
{
"baz":
True,
},
},
},
],
configuration=self.configuration
)
def test_non_unique_array_of_objects_is_valid_passes(self):
# non-unique array of objects is valid
UniqueitemsFalseValidation.validate(
[
{
"foo":
"bar",
},
{
"foo":
"bar",
},
],
configuration=self.configuration
)
def test_1_and_true_are_unique_passes(self):
# 1 and true are unique
UniqueitemsFalseValidation.validate(
[
1,
True,
],
configuration=self.configuration
)
def test_unique_array_of_integers_is_valid_passes(self):
# unique array of integers is valid
UniqueitemsFalseValidation.validate(
[
1,
2,
],
configuration=self.configuration
)
def test_non_unique_array_of_arrays_is_valid_passes(self):
# non-unique array of arrays is valid
UniqueitemsFalseValidation.validate(
[
[
"foo",
],
[
"foo",
],
],
configuration=self.configuration
)
def test_numbers_are_unique_if_mathematically_unequal_passes(self):
# numbers are unique if mathematically unequal
UniqueitemsFalseValidation.validate(
[
1.0,
1.0,
1,
],
configuration=self.configuration
)
def test_false_is_not_equal_to_zero_passes(self):
# false is not equal to zero
UniqueitemsFalseValidation.validate(
[
0,
False,
],
configuration=self.configuration
)
def test_unique_array_of_nested_objects_is_valid_passes(self):
# unique array of nested objects is valid
UniqueitemsFalseValidation.validate(
[
{
"foo":
{
"bar":
{
"baz":
True,
},
},
},
{
"foo":
{
"bar":
{
"baz":
False,
},
},
},
],
configuration=self.configuration
)
def test_0_and_false_are_unique_passes(self):
# 0 and false are unique
UniqueitemsFalseValidation.validate(
[
0,
False,
],
configuration=self.configuration
)
def test_unique_array_of_arrays_is_valid_passes(self):
# unique array of arrays is valid
UniqueitemsFalseValidation.validate(
[
[
"foo",
],
[
"bar",
],
],
configuration=self.configuration
)
def test_true_is_not_equal_to_one_passes(self):
# true is not equal to one
UniqueitemsFalseValidation.validate(
[
1,
True,
],
configuration=self.configuration
)
def test_non_unique_heterogeneous_types_are_valid_passes(self):
# non-unique heterogeneous types are valid
UniqueitemsFalseValidation.validate(
[
{
},
[
1,
],
True,
None,
{
},
1,
],
configuration=self.configuration
)
def METHOD_NAME(self):
# unique heterogeneous types are valid
UniqueitemsFalseValidation.validate(
[
{
},
[
1,
],
True,
None,
1,
],
configuration=self.configuration
)
if __name__ == '__main__':
unittest.main()
| null |
1,224 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkwaf_openapi.endpoint import endpoint_data
class CreateDomainRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'waf-openapi', '2019-09-10', 'CreateDomain','waf')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_IpFollowStatus(self): # Integer
return self.get_query_params().get('IpFollowStatus')
def set_IpFollowStatus(self, IpFollowStatus): # Integer
self.add_query_param('IpFollowStatus', IpFollowStatus)
def get_Keepalive(self): # Boolean
return self.get_query_params().get('Keepalive')
def set_Keepalive(self, Keepalive): # Boolean
self.add_query_param('Keepalive', Keepalive)
def get_SniHost(self): # String
return self.get_query_params().get('SniHost')
def set_SniHost(self, SniHost): # String
self.add_query_param('SniHost', SniHost)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_HttpPort(self): # String
return self.get_query_params().get('HttpPort')
def set_HttpPort(self, HttpPort): # String
self.add_query_param('HttpPort', HttpPort)
def get_Http2Port(self): # String
return self.get_query_params().get('Http2Port')
def set_Http2Port(self, Http2Port): # String
self.add_query_param('Http2Port', Http2Port)
def get_WriteTime(self): # Integer
return self.get_query_params().get('WriteTime')
def set_WriteTime(self, WriteTime): # Integer
self.add_query_param('WriteTime', WriteTime)
def get_AccessHeaderMode(self): # Integer
return self.get_query_params().get('AccessHeaderMode')
def set_AccessHeaderMode(self, AccessHeaderMode): # Integer
self.add_query_param('AccessHeaderMode', AccessHeaderMode)
def get_AccessHeaders(self): # String
return self.get_query_params().get('AccessHeaders')
def set_AccessHeaders(self, AccessHeaders): # String
self.add_query_param('AccessHeaders', AccessHeaders)
def get_KeepaliveTimeout(self): # Integer
return self.get_query_params().get('KeepaliveTimeout')
def set_KeepaliveTimeout(self, KeepaliveTimeout): # Integer
self.add_query_param('KeepaliveTimeout', KeepaliveTimeout)
def get_ClusterType(self): # Integer
return self.get_query_params().get('ClusterType')
def set_ClusterType(self, ClusterType): # Integer
self.add_query_param('ClusterType', ClusterType)
def get_HttpsRedirect(self): # Integer
return self.get_query_params().get('HttpsRedirect')
def set_HttpsRedirect(self, HttpsRedirect): # Integer
self.add_query_param('HttpsRedirect', HttpsRedirect)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_Domain(self): # String
return self.get_query_params().get('Domain')
def set_Domain(self, Domain): # String
self.add_query_param('Domain', Domain)
def get_ReadTime(self): # Integer
return self.get_query_params().get('ReadTime')
def set_ReadTime(self, ReadTime): # Integer
self.add_query_param('ReadTime', ReadTime)
def get_HttpsPort(self): # String
return self.get_query_params().get('HttpsPort')
def set_HttpsPort(self, HttpsPort): # String
self.add_query_param('HttpsPort', HttpsPort)
def get_SniStatus(self): # Integer
return self.get_query_params().get('SniStatus')
def set_SniStatus(self, SniStatus): # Integer
self.add_query_param('SniStatus', SniStatus)
def get_Retry(self): # Boolean
return self.get_query_params().get('Retry')
def set_Retry(self, Retry): # Boolean
self.add_query_param('Retry', Retry)
def get_KeepaliveRequests(self): # Integer
return self.get_query_params().get('KeepaliveRequests')
def set_KeepaliveRequests(self, KeepaliveRequests): # Integer
self.add_query_param('KeepaliveRequests', KeepaliveRequests)
def get_AccessType(self): # String
return self.get_query_params().get('AccessType')
def set_AccessType(self, AccessType): # String
self.add_query_param('AccessType', AccessType)
def get_LogHeaders(self): # String
return self.get_query_params().get('LogHeaders')
def set_LogHeaders(self, LogHeaders): # String
self.add_query_param('LogHeaders', LogHeaders)
def get_ConnectionTime(self): # Integer
return self.get_query_params().get('ConnectionTime')
def set_ConnectionTime(self, ConnectionTime): # Integer
self.add_query_param('ConnectionTime', ConnectionTime)
def get_CloudNativeInstances(self): # String
return self.get_query_params().get('CloudNativeInstances')
def set_CloudNativeInstances(self, CloudNativeInstances): # String
self.add_query_param('CloudNativeInstances', CloudNativeInstances)
def get_SourceIps(self): # String
return self.get_query_params().get('SourceIps')
def set_SourceIps(self, SourceIps): # String
self.add_query_param('SourceIps', SourceIps)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('IsAccessProduct')
def set_IsAccessProduct(self, IsAccessProduct): # Integer
self.add_query_param('IsAccessProduct', IsAccessProduct)
def get_LoadBalancing(self): # Integer
return self.get_query_params().get('LoadBalancing')
def set_LoadBalancing(self, LoadBalancing): # Integer
self.add_query_param('LoadBalancing', LoadBalancing)
def get_HttpToUserIp(self): # Integer
return self.get_query_params().get('HttpToUserIp')
def set_HttpToUserIp(self, HttpToUserIp): # Integer
self.add_query_param('HttpToUserIp', HttpToUserIp)
| null |
1,225 |
# @file BmpCheck.py
# Plugin to support checking BMP's included in the FDF for proper usage
# This makes symbol publishing easier and with the usage of
# ALT PDB PATH can shrink the size of each module.
#
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
###
import logging
import os
import time
try:
from edk2toollib.uefi.edk2.path_utilities import Edk2Path
from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
from edk2toollib.uefi.edk2.parsers.fdf_parser import FdfParser
from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser
from edk2toollib.uefi import bmp_object
except Exception:
pass
def timing(f):
def wrap(*args):
time1 = time.time()
ret = f(*args)
time2 = time.time()
logging.debug('{:s} function took {:.3f} ms'.format(f.__name__, (time2-time1)*1000.0))
return ret
return wrap
# the tests that we run on the BMP object
class UefiBmpSupportTests(object):
def __init__(self, BmpObject, max_width=0, max_height=0):
self.Bmp = BmpObject
self.logger = logging.getLogger(__name__)
self.max_width = max_width
self.max_height = max_height
def Test1(self):
self.logger.info("Test1: Pixel Data Size in file matches computed Size of Pixel data")
#test1
DataSizePerLine = ((self.Bmp.PixelWidth * self.Bmp.BitPerPixel + 31) >> 3) & (~0x3)
DataSize2 = ((self.Bmp.PixelWidth * self.Bmp.BitPerPixel +31) / 32) * 4
self.logger.debug("DataSize2 = {}".format(DataSize2))
self.logger.debug(" DataSizePerLine: {}".format(DataSizePerLine))
RawDataSize = self.Bmp.PixelHeight * DataSizePerLine
self.logger.debug(" RawDataSize: 0x%X" % RawDataSize)
ComputeSize = (self.Bmp.Size - self.Bmp.ImageOffset)
self.logger.debug(" File Calculated Data Size: 0x%X" % ComputeSize)
if(ComputeSize != RawDataSize):
self.logger.error(" BMP Test1 - Fail")
return 1
else:
self.logger.info(" BMP Test1 - Pass")
return 0
def Test2(self):
self.logger.info(" BMP Test2: File Header and Img Header as expected")
#test2
if self.Bmp.CharB != b'B' and self.Bmp.CharB != b'B':
self.logger.error("Logo check - B header failed {}".format(self.Bmp.CharB))
return 1
if self.Bmp.CharM != b'M' and self.Bmp.CharM != 'M':
self.logger.error("Logo check - M header failed {}".format(self.Bmp.CharM))
return 1
self.logger.info(" Test2 - Pass")
return 0
def Test3(self):
if self.max_width > 0 and BmpObj.PixelWidth > self.max_width:
self.logger.critical("Image {} is too wide".format(BmpFilePath))
return 1
if self.max_height > 0 and BmpObj.PixelHeight > self.max_height:
self.logger.critical("Image {} is too height".format(BmpFilePath))
return 1
return 0
class BmpCheckPlugin(IUefiBuildPlugin):
def __init__(self):
self.logger = logging.getLogger(__name__)
@staticmethod
def METHOD_NAME(BmpFilePath, max_width=0, max_height=0):
if not os.path.isfile(BmpFilePath):
return 1
bmp = open(BmpFilePath, "rb")
BmpObj = bmp_object.BmpObject(bmp)
bmp.close()
#run tests
Tests = UefiBmpSupportTests(BmpObj)
ret = Tests.Test1()
ret += Tests.Test2()
ret += Tests.Test3()
return ret
@timing
def do_pre_build(self, thebuilder):
try:
error_count = 0
'''
# this scans the whole build directory for bmp's
bmp_search_path = os.path.join(thebuilder.ws,"**","*.bmp");
for found_item in glob.iglob(bmp_search_path, recursive=True):
if CheckBmp(found_item):
logging.error("{} failed image check".format(found_item))
error_count += 1
return error_count
'''
fp = FdfParser()
dp = DscParser()
edk2 = thebuilder.edk2path
ActiveDsc = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
thebuilder.env.GetValue("ACTIVE_PLATFORM"))
ActiveFdf = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
thebuilder.env.GetValue("FLASH_DEFINITION"))
if ActiveFdf is None:
self.logger.info("No FDF found- BMP check skipped")
return 0
# parse the DSC and the FDF
dp.SetEdk2Path(edk2)
dp.SetInputVars(thebuilder.env.GetAllBuildKeyValues()).ParseFile(ActiveDsc) # parse the DSC for build vars
fp.SetEdk2Path(edk2)
fp.SetInputVars(dp.LocalVars).ParseFile(ActiveFdf) # give FDF parser the vars from DSC
# for each FV section in the DSC
for FV_name in fp.FVs:
FV_files = fp.FVs[FV_name]["Files"]
# now look for images in each file of this FV
for fv_file_name in FV_files:
fv_file = FV_files[fv_file_name]
if fv_file["type"].upper() != 'FREEFORM':
continue
fv_file_raw = fv_file['RAW']
fv_file_raw_list = []
if isinstance(fv_file_raw, list):
fv_file_raw_list = fv_file_raw
else:
fv_file_raw_list.append(fv_file_raw)
# for each file that is RAW type
for fv_file_raw_item in fv_file_raw_list:
# check if it ends with a bmp
if fv_file_raw_item.lower().endswith(".bmp"):
logging.debug(fv_file_raw_item)
BmpFilePath = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(fv_file_raw_item)
logging.debug(BmpFilePath)
if BmpCheckPlugin.METHOD_NAME(BmpFilePath): # do the check
self.logger.error("{} failed image check".format(fv_file_raw_item))
error_count += 1
return error_count
except:
self.logger.warning(
"Unable to read the FDF. Please update your Edk2-Pytools-* Packages")
return 0
| null |
1,226 |
# coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""QuALITY dataset."""
import dataclasses
import json
import numpy as np
import tensorflow_datasets.public_api as tfds
_DOWNLOAD_URL = (
'https://github.com/nyu-mll/quality/raw/main/data/QuALITY.v0.9.zip'
)
# Fields that are straight text copies from raw example to processed example.
_ONE2ONE_FIELDS = (
'article',
'article_id',
'set_unique_id',
'writer_id',
'source',
'title',
'topic',
'url',
'writer_id',
)
@dataclasses.dataclass
class QualityConfig(tfds.core.BuilderConfig):
stripped: bool = False
class Builder(tfds.core.GeneratorBasedBuilder):
"""DatasetBuilder for quality dataset."""
VERSION = tfds.core.Version('1.0.0')
RELEASE_NOTES = {
'1.0.0': 'Initial release.',
}
BUILDER_CONFIGS = [
QualityConfig(
name='raw', description='Raw with HTML.', stripped=False
), # default
QualityConfig(
name='stripped', description='Stripped of HTML.', stripped=True
),
]
def METHOD_NAME(self) -> tfds.core.DatasetInfo:
"""Returns the dataset metadata."""
# Mirror format of RACE dataset as much as possible.
features_dict = {
'article': tfds.features.Text(),
# The sequence lengths of Sequence fields should match.
'questions': tfds.features.Sequence(tfds.features.Text()),
'question_ids': tfds.features.Sequence(tfds.features.Text()),
# 4 options per question, similar to RACE
'options': tfds.features.Sequence(
tfds.features.Sequence(tfds.features.Text())
),
'gold_labels': tfds.features.Sequence(np.int32), # 0, 1, 2, 3
'writer_labels': tfds.features.Sequence(np.int32), # 0, 1, 2, 3
'difficults': tfds.features.Sequence(np.bool_),
}
features_dict.update({k: tfds.features.Text() for k in _ONE2ONE_FIELDS})
return self.dataset_info_from_configs(
# Note: some fields are left out.
features=tfds.features.FeaturesDict(features_dict),
supervised_keys=None,
homepage='https://github.com/nyu-mll/quality',
)
def _split_generators(self, dl_manager: tfds.download.DownloadManager):
"""Returns SplitGenerators."""
path = dl_manager.download_and_extract(_DOWNLOAD_URL)
if self.builder_config.stripped:
return {
'train': self._generate_examples(
path / 'QuALITY.v0.9.htmlstripped.train', 'train'
),
'test': self._generate_examples(
path / 'QuALITY.v0.9.htmlstripped.test', 'test'
),
'dev': self._generate_examples(
path / 'QuALITY.v0.9.htmlstripped.dev', 'dev'
),
}
else:
return {
'train': self._generate_examples(
path / 'QuALITY.v0.9.train', 'train'
),
'test': self._generate_examples(path / 'QuALITY.v0.9.test', 'test'),
'dev': self._generate_examples(path / 'QuALITY.v0.9.dev', 'dev'),
}
def _generate_examples(self, path, split: str):
"""Yields examples."""
for line in path.open():
j = json.loads(line)
fields = {k: j[k] for k in _ONE2ONE_FIELDS}
fields.update({
'questions': [q['question'] for q in j['questions']],
'question_ids': [q['question_unique_id'] for q in j['questions']],
'difficults': [q['difficult'] for q in j['questions']],
'options': [q['options'] for q in j['questions']],
})
if split in ('train', 'dev'):
fields.update({
'gold_labels': [q['gold_label'] for q in j['questions']],
'writer_labels': [q['writer_label'] for q in j['questions']],
})
else:
fields.update({
'gold_labels': [],
'writer_labels': [],
})
yield j['set_unique_id'], fields
| null |
1,227 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class SetLoadBalancerHTTPSListenerAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ens', '2017-11-10', 'SetLoadBalancerHTTPSListenerAttribute','ens')
self.set_method('POST')
def get_HealthCheckTimeout(self): # Integer
return self.get_query_params().get('HealthCheckTimeout')
def set_HealthCheckTimeout(self, HealthCheckTimeout): # Integer
self.add_query_param('HealthCheckTimeout', HealthCheckTimeout)
def get_HealthCheckURI(self): # String
return self.get_query_params().get('HealthCheckURI')
def set_HealthCheckURI(self, HealthCheckURI): # String
self.add_query_param('HealthCheckURI', HealthCheckURI)
def get_HealthCheck(self): # String
return self.get_query_params().get('HealthCheck')
def set_HealthCheck(self, HealthCheck): # String
self.add_query_param('HealthCheck', HealthCheck)
def get_HealthCheckMethod(self): # String
return self.get_query_params().get('HealthCheckMethod')
def set_HealthCheckMethod(self, HealthCheckMethod): # String
self.add_query_param('HealthCheckMethod', HealthCheckMethod)
def get_HealthCheckDomain(self): # String
return self.get_query_params().get('HealthCheckDomain')
def set_HealthCheckDomain(self, HealthCheckDomain): # String
self.add_query_param('HealthCheckDomain', HealthCheckDomain)
def get_RequestTimeout(self): # Integer
return self.get_query_params().get('RequestTimeout')
def set_RequestTimeout(self, RequestTimeout): # Integer
self.add_query_param('RequestTimeout', RequestTimeout)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def METHOD_NAME(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ServerCertificateId(self): # String
return self.get_query_params().get('ServerCertificateId')
def set_ServerCertificateId(self, ServerCertificateId): # String
self.add_query_param('ServerCertificateId', ServerCertificateId)
def get_IdleTimeout(self): # Integer
return self.get_query_params().get('IdleTimeout')
def set_IdleTimeout(self, IdleTimeout): # Integer
self.add_query_param('IdleTimeout', IdleTimeout)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)
def get_HealthCheckHttpCode(self): # String
return self.get_query_params().get('HealthCheckHttpCode')
def set_HealthCheckHttpCode(self, HealthCheckHttpCode): # String
self.add_query_param('HealthCheckHttpCode', HealthCheckHttpCode)
| null |
1,228 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkoos.endpoint import endpoint_data
class ListTaskExecutionsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'oos', '2019-06-01', 'ListTaskExecutions','oos')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_TaskName(self): # String
return self.get_query_params().get('TaskName')
def set_TaskName(self, TaskName): # String
self.add_query_param('TaskName', TaskName)
def get_IncludeChildTaskExecution(self): # Boolean
return self.get_query_params().get('IncludeChildTaskExecution')
def set_IncludeChildTaskExecution(self, IncludeChildTaskExecution): # Boolean
self.add_query_param('IncludeChildTaskExecution', IncludeChildTaskExecution)
def get_ExecutionId(self): # String
return self.get_query_params().get('ExecutionId')
def set_ExecutionId(self, ExecutionId): # String
self.add_query_param('ExecutionId', ExecutionId)
def get_ParentTaskExecutionId(self): # String
return self.get_query_params().get('ParentTaskExecutionId')
def set_ParentTaskExecutionId(self, ParentTaskExecutionId): # String
self.add_query_param('ParentTaskExecutionId', ParentTaskExecutionId)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_EndDateBefore(self): # String
return self.get_query_params().get('EndDateBefore')
def set_EndDateBefore(self, EndDateBefore): # String
self.add_query_param('EndDateBefore', EndDateBefore)
def get_SortOrder(self): # String
return self.get_query_params().get('SortOrder')
def set_SortOrder(self, SortOrder): # String
self.add_query_param('SortOrder', SortOrder)
def get_StartDateAfter(self): # String
return self.get_query_params().get('StartDateAfter')
def set_StartDateAfter(self, StartDateAfter): # String
self.add_query_param('StartDateAfter', StartDateAfter)
def get_StartDateBefore(self): # String
return self.get_query_params().get('StartDateBefore')
def set_StartDateBefore(self, StartDateBefore): # String
self.add_query_param('StartDateBefore', StartDateBefore)
def get_EndDateAfter(self): # String
return self.get_query_params().get('EndDateAfter')
def set_EndDateAfter(self, EndDateAfter): # String
self.add_query_param('EndDateAfter', EndDateAfter)
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
def get_TaskExecutionId(self): # String
return self.get_query_params().get('TaskExecutionId')
def set_TaskExecutionId(self, TaskExecutionId): # String
self.add_query_param('TaskExecutionId', TaskExecutionId)
def METHOD_NAME(self): # String
return self.get_query_params().get('SortField')
def set_SortField(self, SortField): # String
self.add_query_param('SortField', SortField)
def get_TaskAction(self): # String
return self.get_query_params().get('TaskAction')
def set_TaskAction(self, TaskAction): # String
self.add_query_param('TaskAction', TaskAction)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status)
| null |
1,229 |
# coding: utf8
"""
This file contains a set of functional tests designed to check the correct execution of the pipeline and the
different functions available in Clinica
"""
import warnings
from os import PathLike
from pathlib import Path
from test.nonregression.testing_tools import compare_folders
import pytest
# Determine location for working_directory
warnings.filterwarnings("ignore")
@pytest.fixture(
params=[
# TODO: Update NIFD reference dataset.
"Nifd2Bids",
"Oasis2Bids",
"Oasis3ToBids",
"Adni2Bids",
"Aibl2Bids",
"HabsToBids",
"UkbToBids",
"GenfiToBids",
]
)
def test_name(request):
return request.param
def run_nifd2bids(input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike) -> None:
from pathlib import PurePath
from tempfile import TemporaryDirectory
from clinica.iotools.converters.nifd_to_bids.nifd_to_bids import convert_images
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Act
_ = convert_images(
path_to_clinical=input_dir / "clinical_data",
path_to_dataset=input_dir / "unorganized",
bids_dir=output_dir,
)
# Assert
with TemporaryDirectory() as td:
compare_folders(output_dir, ref_dir, td)
def run_oasis2bids(
input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike
) -> None:
from pathlib import PurePath
from clinica.iotools.converters.oasis_to_bids.oasis_to_bids import OasisToBids
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Arrange
clinical_data_directory = input_dir / "clinical_data"
# Act
oasis_to_bids = OasisToBids()
oasis_to_bids.convert_images(input_dir / "unorganized", output_dir / "bids")
oasis_to_bids.convert_clinical_data(clinical_data_directory, output_dir / "bids")
# Assert
compare_folders(output_dir / "bids", ref_dir / "bids", output_dir)
def run_oasis3tobids(
input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike
) -> None:
from pathlib import PurePath
from clinica.iotools.converters.oasis3_to_bids.oasis3_to_bids import convert_images
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Arrange
clinical_data_directory = input_dir / "clinical_data"
# Act
convert_images(
input_dir / "unorganized", output_dir / "bids", clinical_data_directory
)
# Assert
compare_folders(output_dir / "bids", ref_dir / "bids", output_dir)
def run_adni2bids(input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike) -> None:
from pathlib import PurePath
from clinica.iotools.converters.adni_to_bids.adni_to_bids import AdniToBids
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Arrange
clinical_data_directory = input_dir / "clinical_data"
xml_directory = input_dir / "xml_metadata"
dataset_directory = input_dir / "unorganized_data"
subjects_list = input_dir / "subjects.txt"
modalities = ["T1", "PET_FDG", "PET_AMYLOID", "PET_TAU", "DWI", "FLAIR", "fMRI"]
# Act
adni_to_bids = AdniToBids()
adni_to_bids.check_adni_dependencies()
adni_to_bids.convert_images(
dataset_directory,
clinical_data_directory,
output_dir / "bids",
subjects_list,
modalities,
)
adni_to_bids.convert_clinical_data(
clinical_data_directory,
output_dir / "bids",
xml_path=xml_directory,
)
# Assert
compare_folders(output_dir / "bids", ref_dir / "bids", output_dir)
def run_aibl2bids(input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike) -> None:
from pathlib import PurePath
from clinica.iotools.converters.aibl_to_bids.aibl_to_bids import (
convert_clinical_data,
convert_images,
)
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Arrange
clinical_data_directory = input_dir / "Data_extract_3.2.5"
dataset_directory = input_dir / "unorganized_data"
# Act
convert_images(
dataset_directory,
clinical_data_directory,
output_dir / "bids",
)
convert_clinical_data(output_dir / "bids", clinical_data_directory)
# Assert
compare_folders(output_dir / "bids", ref_dir / "bids", output_dir)
def run_habs_to_bids(
input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike
) -> None:
from click.testing import CliRunner
from clinica.iotools.converters.habs_to_bids.habs_to_bids_cli import cli
runner = CliRunner()
result = runner.invoke(cli, [str(input_dir), str(output_dir)])
assert result.exit_code == 0
compare_folders(output_dir, ref_dir, output_dir)
def METHOD_NAME(input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike) -> None:
from pathlib import PurePath
from clinica.iotools.converters.ukb_to_bids.ukb_to_bids import convert_images
from clinica.utils.check_dependency import check_dcm2niix
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Arrange
clinical_data_directory = input_dir / "clinical_data"
# Act
check_dcm2niix()
convert_images(
input_dir / "unorganized", output_dir / "bids", clinical_data_directory
)
# Assert
compare_folders(output_dir / "bids", ref_dir / "bids", output_dir / "bids")
def run_genfitobids(
input_dir: PathLike, output_dir: PathLike, ref_dir: PathLike
) -> None:
from pathlib import PurePath
from clinica.iotools.converters.genfi_to_bids.genfi_to_bids import convert_images
from clinica.utils.check_dependency import check_dcm2niix
# Convert
input_dir = PurePath(input_dir)
output_dir = PurePath(output_dir)
ref_dir = PurePath(ref_dir)
# Act
check_dcm2niix()
convert_images(
input_dir / "unorganized",
output_dir / "bids",
path_to_clinical=None,
gif=False,
)
# Assert
compare_folders(output_dir / "bids", ref_dir / "bids", output_dir / "bids")
def test_run_convertors(cmdopt, tmp_path, test_name):
base_dir = Path(cmdopt["input"])
input_dir = base_dir / test_name / "in"
ref_dir = base_dir / test_name / "ref"
tmp_out_dir = tmp_path / test_name / "out"
tmp_out_dir.mkdir(parents=True)
if test_name == "Nifd2Bids":
run_nifd2bids(input_dir, tmp_out_dir, ref_dir)
elif test_name == "Oasis2Bids":
run_oasis2bids(input_dir, tmp_out_dir, ref_dir)
elif test_name == "Oasis3ToBids":
run_oasis3tobids(input_dir, tmp_out_dir, ref_dir)
elif test_name == "Adni2Bids":
run_adni2bids(input_dir, tmp_out_dir, ref_dir)
elif test_name == "Aibl2Bids":
run_aibl2bids(input_dir, tmp_out_dir, ref_dir)
elif test_name == "HabsToBids":
run_habs_to_bids(input_dir, tmp_out_dir, ref_dir)
elif test_name == "UkbToBids":
METHOD_NAME(input_dir, tmp_out_dir, ref_dir)
elif test_name == "GenfiToBids":
run_genfitobids(input_dir, tmp_out_dir, ref_dir)
else:
print(f"Test {test_name} not available.")
assert 0
| null |
1,230 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdatePrivateAccessPolicyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'csas', '2023-01-20', 'UpdatePrivateAccessPolicy')
self.set_method('POST')
def get_Description(self): # String
return self.get_body_params().get('Description')
def set_Description(self, Description): # String
self.add_body_params('Description', Description)
def get_PolicyId(self): # String
return self.get_body_params().get('PolicyId')
def set_PolicyId(self, PolicyId): # String
self.add_body_params('PolicyId', PolicyId)
def get_CustomUserAttributes(self): # Array
return self.get_body_params().get('CustomUserAttributes')
def set_CustomUserAttributes(self, CustomUserAttributes): # Array
for index1, value1 in enumerate(CustomUserAttributes):
if value1.get('UserGroupType') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.UserGroupType', value1.get('UserGroupType'))
if value1.get('IdpId') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.IdpId', value1.get('IdpId'))
if value1.get('Value') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.Value', value1.get('Value'))
if value1.get('Relation') is not None:
self.add_body_params('CustomUserAttributes.' + str(index1 + 1) + '.Relation', value1.get('Relation'))
def get_TagIds(self): # Array
return self.get_body_params().get('TagIds')
def set_TagIds(self, TagIds): # Array
for index1, value1 in enumerate(TagIds):
self.add_body_params('TagIds.' + str(index1 + 1), value1)
def get_UserGroupIds(self): # Array
return self.get_body_params().get('UserGroupIds')
def set_UserGroupIds(self, UserGroupIds): # Array
for index1, value1 in enumerate(UserGroupIds):
self.add_body_params('UserGroupIds.' + str(index1 + 1), value1)
def get_PolicyAction(self): # String
return self.get_body_params().get('PolicyAction')
def set_PolicyAction(self, PolicyAction): # String
self.add_body_params('PolicyAction', PolicyAction)
def get_Priority(self): # Integer
return self.get_body_params().get('Priority')
def set_Priority(self, Priority): # Integer
self.add_body_params('Priority', Priority)
def METHOD_NAME(self): # Array
return self.get_body_params().get('ApplicationIds')
def set_ApplicationIds(self, ApplicationIds): # Array
for index1, value1 in enumerate(ApplicationIds):
self.add_body_params('ApplicationIds.' + str(index1 + 1), value1)
def get_UserGroupMode(self): # String
return self.get_body_params().get('UserGroupMode')
def set_UserGroupMode(self, UserGroupMode): # String
self.add_body_params('UserGroupMode', UserGroupMode)
def get_ModifyType(self): # String
return self.get_body_params().get('ModifyType')
def set_ModifyType(self, ModifyType): # String
self.add_body_params('ModifyType', ModifyType)
def get_ApplicationType(self): # String
return self.get_body_params().get('ApplicationType')
def set_ApplicationType(self, ApplicationType): # String
self.add_body_params('ApplicationType', ApplicationType)
def get_Status(self): # String
return self.get_body_params().get('Status')
def set_Status(self, Status): # String
self.add_body_params('Status', Status)
| null |
1,231 |
#/*##########################################################################
# Copyright (C) 2004-2021 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#############################################################################*/
__author__ = "V.A. Sole"
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
from PyMca5.PyMcaGui import PyMcaQt as qt
safe_str = qt.safe_str
class HDF5Selection(qt.QWidget):
def __init__(self, parent=None):
qt.QWidget.__init__(self, parent)
self.mainLayout = qt.QGridLayout(self)
self.mainLayout.setContentsMargins(0, 0, 0, 0)
self.mainLayout.setSpacing(2)
self.selectionWidgetsDict = {}
row = 0
for key in ['entry', 'x', 'y', 'm']:
label = qt.QLabel(self)
label.setText(key+":")
line = qt.QLineEdit(self)
line.setReadOnly(True)
self.mainLayout.addWidget(label, row, 0)
self.mainLayout.addWidget(line, row, 1)
self.selectionWidgetsDict[key] = line
row += 1
def METHOD_NAME(self, selection):
if 'cntlist' in selection:
# "Raw" selection
cntlist = selection['cntlist']
for key in ['entry', 'x', 'y', 'm']:
if key not in selection:
self.selectionWidgetsDict[key].setText("")
continue
n = len(selection[key])
if not n:
self.selectionWidgetsDict[key].setText("")
continue
idx = selection[key][0]
text = "%s" % cntlist[idx]
if n > 1:
for idx in range(1, n):
text += ", %s" % cntlist[selection[key][idx]]
self.selectionWidgetsDict[key].setText(text)
else:
# "Digested" selection
for key in ['entry', 'x', 'y', 'm']:
if key not in selection:
self.selectionWidgetsDict[key].setText("")
continue
n = len(selection[key])
if not n:
self.selectionWidgetsDict[key].setText("")
continue
text = "%s" % selection[key][0]
if n > 1:
for idx in range(1, n):
text += ", %s" % selection[key][idx]
self.selectionWidgetsDict[key].setText(text)
def getSelection(self):
selection = {}
for key in ['entry', 'x', 'y', 'm']:
selection[key] = []
text = safe_str(self.selectionWidgetsDict[key].text())
text = text.replace(" ","")
if len(text):
selection[key] = text.split(',')
return selection
def main():
app = qt.QApplication([])
tab = HDF5Selection()
tab.METHOD_NAME({'x':[1, 2], 'y':[4], 'cntlist':["dummy", "Cnt0", "Cnt1", "Cnt2", "Cnt3"]})
tab.show()
app.exec()
if __name__ == "__main__":
main()
| null |
1,232 |
from threading import Thread
from Background import Background
from PIL.Image import open as openImage
from PIL.ImageTk import PhotoImage
class Bird(Thread):
"""
Classe para criar um pássaro
"""
__tag = "Bird"
__isAlive = None
__going_up = False
__going_down = 0
__times_skipped = 0
__running = False
decends = 0.00390625
climbsUp = 0.0911458333
def __init__(
self,
background,
gameover_function,
*screen_geometry,
fp="bird.png",
event="<Up>",
descend_speed=5
):
# Verifica se "background" é uma instância de Background e se o "gamerover_method" é chamável
if not isinstance(background, Background):
raise TypeError(
"The background argument must be an instance of Background."
)
if not callable(gameover_function):
raise TypeError("The gameover_method argument must be a callable object.")
# Instância os parâmetros
self.__canvas = background
self.image_path = fp
self.__descend_speed = descend_speed
self.gameover_method = gameover_function
# Recebe a largura e altura do background
self.__width = screen_geometry[0]
self.__height = screen_geometry[1]
# Define a decida e subida do pássaro com base na altura do background
self.decends *= self.__height
self.decends = int(self.decends + 0.5)
self.climbsUp *= self.__height
self.climbsUp = int(self.climbsUp + 0.5)
# Invoca o método construtor de Thread
Thread.__init__(self)
# Calcula o tamanho do pássaro com base na largura e altura da janela
self.width = (self.__width // 100) * 6
self.height = (self.__height // 100) * 11
# Carrega e cria a imagem do pássaro no background
self.__canvas.bird_image = self.getPhotoImage(
image_path=self.image_path,
width=self.width,
height=self.height,
closeAfter=True,
)[0]
self.__birdID = self.__canvas.create_image(
self.__width // 2,
self.__height // 2,
image=self.__canvas.bird_image,
tag=self.__tag,
)
# Define evento para fazer o pássaro subir
self.__canvas.focus_force()
self.__canvas.bind(event, self.jumps)
self.__isAlive = True
def birdIsAlive(self):
"""
Método para verificar se o pássaro está vivo
"""
return self.__isAlive
def METHOD_NAME(self):
"""
Método para verificar se o pássaro ultrapassou a borda da janela ou colidiu com algo
"""
# Recebe a posição do pássaro no background
position = list(self.__canvas.bbox(self.__tag))
# Se o pássaro tiver ultrapassado a borda de baixo do background, ele será declarado morto
if position[3] >= self.__height + 20:
self.__isAlive = False
# Se o pássaro tiver ultrapassado a borda de cima do background, ele será declarado morto
if position[1] <= -20:
self.__isAlive = False
# Dá uma margem de erro ao pássaro de X pixels
position[0] += int(25 / 78 * self.width)
position[1] += int(25 / 77 * self.height)
position[2] -= int(20 / 78 * self.width)
position[3] -= int(10 / 77 * self.width)
# Define os objetos a serem ignorados em colisões
ignored_collisions = self.__canvas.getBackgroundID()
ignored_collisions.append(self.__birdID)
# Verifica possíveis colisões com o pássaro
possible_collisions = list(self.__canvas.find_overlapping(*position))
# Remove das possíveis colisões os objetos ignorados
for _id in ignored_collisions:
try:
possible_collisions.remove(_id)
except BaseException:
continue
# Se houver alguma colisão o pássaro morre
if len(possible_collisions) >= 1:
self.__isAlive = False
return not self.__isAlive
def getTag(self):
"""
Método para retornar a tag do pássaro
"""
return self.__tag
@staticmethod
def getPhotoImage(
image=None, image_path=None, width=None, height=None, closeAfter=False
):
"""
Retorna um objeto da classe PIL.ImageTk.PhotoImage de uma imagem e as imagens criadas de PIL.Image
(photoImage, new, original)
@param image: Instância de PIL.Image.open
@param image_path: Diretório da imagem
@param width: Largura da imagem
@param height: Altura da imagem
@param closeAfter: Se True, a imagem será fechada após ser criado um PhotoImage da mesma
"""
if not image:
if not image_path:
return
# Abre a imagem utilizando o caminho dela
image = openImage(image_path)
# Será redimesionada a imagem somente se existir um width ou height
if not width:
width = image.width
if not height:
height = image.height
# Cria uma nova imagem já redimensionada
newImage = image.resize([width, height])
# Cria um photoImage
photoImage = PhotoImage(newImage)
# Se closeAfter for True, ele fecha as imagens
if closeAfter:
# Fecha a imagem nova
newImage.close()
newImage = None
# Fecha a imagem original
image.close()
image = None
# Retorna o PhotoImage da imagem,a nova imagem que foi utilizada e a imagem original
return photoImage, newImage, image
def jumps(self, event=None):
"""
Método para fazer o pássaro pular
"""
# Verifica se o pássaro saiu da área do background
self.METHOD_NAME()
# Se o pássaro estiver morto, esse método não pode ser executado
if not self.__isAlive or not self.__running:
self.__going_up = False
return
# Declara que o pássaro está subindo
self.__going_up = True
self.__going_down = 0
# Move o pássaro enquanto o limite de subida por animação não tiver excedido
if self.__times_skipped < self.climbsUp:
# Move o pássaro para cima
self.__canvas.move(self.__tag, 0, -1)
self.__times_skipped += 1
# Executa o método novamente
self.__canvas.after(3, self.jumps)
else:
# Declara que o pássaro não está mais subindo
self.__going_up = False
self.__times_skipped = 0
def kill(self):
"""
Método para matar o pássaro
"""
self.__isAlive = False
def run(self):
"""
#Método para iniciar a animação do passáro caindo
"""
self.__running = True
# Verifica se o pássaro saiu da área do background
self.METHOD_NAME()
# Enquanto o pássaro não tiver chegado em sua velocidade máxima, a velocidade aumentará em 0.05
if self.__going_down < self.decends:
self.__going_down += 0.05
# Executa a animação de descida somente se o pássaro estiver vivo
if self.__isAlive:
# Executa a animação de descida somente se o pássaro não estiver subindo
if not self.__going_up:
# Move o pássaro para baixo
self.__canvas.move(self.__tag, 0, self.__going_down)
# Executa novamente o método
self.__canvas.after(self.__descend_speed, self.run)
# Se o pássaro estiver morto, será executado um método de fim de jogo
else:
self.__running = False
self.gameover_method()
| null |
1,233 |
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import functools
import os
import pykube
from azure.common.client_factory import get_client_from_auth_file, get_client_from_cli_profile
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.network import NetworkManagementClient
from azure.mgmt.compute import ComputeManagementClient
RUN_ID_LABEL = 'runid'
CLOUD_REGION_LABEL = 'cloud_region'
auth_file = os.environ.get('AZURE_AUTH_LOCATION', None)
if auth_file:
res_client = get_client_from_auth_file(ResourceManagementClient, auth_path=auth_file)
network_client = get_client_from_auth_file(NetworkManagementClient, auth_path=auth_file)
compute_client = get_client_from_auth_file(ComputeManagementClient, auth_path=auth_file)
else:
res_client = get_client_from_cli_profile(ResourceManagementClient)
network_client = get_client_from_cli_profile(NetworkManagementClient)
compute_client = get_client_from_cli_profile(ComputeManagementClient)
resource_group_name = os.environ["AZURE_RESOURCE_GROUP"]
def METHOD_NAME(run_id):
for resource in res_client.resources.list(filter="tagName eq 'Name' and tagValue eq '" + run_id + "'"):
if str(resource.type).split('/')[-1].lower() == "virtualmachines":
return resource.name
elif str(resource.type).split('/')[-1].lower() == "virtualmachinescalesets":
instance_name, _ = get_instance_name_and_private_ip_from_vmss(resource.name)
return instance_name
return None
def run_id_filter(run_id):
return {
'Name': 'tag:Name',
'Values': [run_id]
}
def get_instance_name_and_private_ip_from_vmss(scale_set_name):
vm_vmss_id = None
for vm in compute_client.virtual_machine_scale_set_vms.list(resource_group_name, scale_set_name):
vm_vmss_id = vm.instance_id
break
instance_name = compute_client.virtual_machine_scale_set_vms \
.get_instance_view(resource_group_name, scale_set_name, vm_vmss_id) \
.additional_properties["computerName"]
private_ip = network_client.network_interfaces. \
get_virtual_machine_scale_set_ip_configuration(resource_group_name, scale_set_name, vm_vmss_id,
scale_set_name + "-nic", scale_set_name + "-ip") \
.private_ip_address
return instance_name, private_ip
def verify_regnode(ins_id, api):
if find_node(api, ins_id):
return ins_id
raise RuntimeError("Failed to find Node {}".format(ins_id))
def find_node(api, node_name):
node = pykube.Node.objects(api).filter(field_selector={'metadata.name': node_name})
if len(node.response['items']) > 0:
return node_name
else:
return ''
def delete_kube_node(nodename, run_id, api):
if nodename is None:
nodes = pykube.Node.objects(api).filter(selector={RUN_ID_LABEL: run_id})
if len(nodes.response['items']) > 0:
node = nodes.response['items'][0]
nodename = node['metadata']['name']
if nodename is not None:
obj = {
"apiVersion": "v1",
"kind": "Node",
"metadata": {
"name": nodename,
"labels": {
"runid": run_id
}
}
}
pykube.Node(api, obj).delete()
def get_cloud_region(api, run_id):
nodes = pykube.Node.objects(api).filter(selector={RUN_ID_LABEL: run_id})
if len(nodes.response['items']) == 0:
raise RuntimeError('Cannot find node matching RUN ID %s' % run_id)
node = nodes.response['items'][0]
labels = node['metadata']['labels']
if CLOUD_REGION_LABEL not in labels:
raise RuntimeError('Node %s is not labeled with Azure Region' % node['metadata']['name'])
return labels[CLOUD_REGION_LABEL]
def get_kube_api():
try:
api = pykube.HTTPClient(pykube.KubeConfig.from_service_account())
except Exception as e:
api = pykube.HTTPClient(pykube.KubeConfig.from_file("~/.kube/config"))
api.session.verify = False
return api
def resolve_azure_api(resource):
""" This method retrieves the latest non-preview api version for
the given resource (unless the preview version is the only available
api version) """
provider = res_client.providers.get(resource.id.split('/')[6])
rt = next((t for t in provider.resource_types
if t.resource_type.lower() == '/'.join(resource.type.split('/')[1:]).lower()), None)
if rt and 'api_versions' in rt.__dict__:
api_version = [v for v in rt.__dict__['api_versions'] if 'preview' not in v.lower()]
return api_version[0] if api_version else rt.__dict__['api_versions'][0]
def azure_resource_type_cmp(r1, r2):
if str(r1.type).split('/')[-1].lower().startswith("virtualmachine"):
return -1
elif str(r1.type).split('/')[-1].lower() == "networkinterfaces" and not str(r2.type).split('/')[-1].lower().startswith("virtualmachine"):
return -1
return 0
def delete_resources_by_tag(run_id):
resources = []
for resource in res_client.resources.list(filter="tagName eq 'Name' and tagValue eq '" + run_id + "'"):
resources.append(resource)
# we need to sort resources to be sure that vm and nic will be deleted first, because it has attached resorces(disks and ip)
resources.sort(key=functools.cmp_to_key(azure_resource_type_cmp))
for resource in resources:
res_client.resources.delete(
resource_group_name=resource.id.split('/')[4],
resource_provider_namespace=resource.id.split('/')[6],
parent_resource_path='',
resource_type=str(resource.type).split('/')[-1],
resource_name=resource.name,
api_version=resolve_azure_api(resource),
parameters=resource
).wait()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--run_id", "-kid", type=str, required=True)
parser.add_argument("--ins_id", "-id", type=str, required=False) # do we need?
args, unknown = parser.parse_known_args()
run_id = args.run_id
api = get_kube_api()
try:
ins_id = METHOD_NAME(run_id)
except Exception:
ins_id = None
if ins_id is None:
delete_kube_node(None, run_id, api)
delete_resources_by_tag(run_id)
else:
try:
nodename = verify_regnode(ins_id, api)
except Exception:
nodename = None
delete_kube_node(nodename, run_id, api)
delete_resources_by_tag(run_id)
if __name__ == '__main__':
main()
| null |
1,234 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecd.endpoint import endpoint_data
class DescribeBundlesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ecd', '2020-09-30', 'DescribeBundles')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_GpuCount(self): # Float
return self.get_query_params().get('GpuCount')
def set_GpuCount(self, GpuCount): # Float
self.add_query_param('GpuCount', GpuCount)
def get_BundleIds(self): # RepeatList
return self.get_query_params().get('BundleId')
def set_BundleIds(self, BundleId): # RepeatList
for depth1 in range(len(BundleId)):
self.add_query_param('BundleId.' + str(depth1 + 1), BundleId[depth1])
def get_DesktopTypeFamily(self): # String
return self.get_query_params().get('DesktopTypeFamily')
def set_DesktopTypeFamily(self, DesktopTypeFamily): # String
self.add_query_param('DesktopTypeFamily', DesktopTypeFamily)
def get_SelectedBundle(self): # Boolean
return self.get_query_params().get('SelectedBundle')
def METHOD_NAME(self, SelectedBundle): # Boolean
self.add_query_param('SelectedBundle', SelectedBundle)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_FromDesktopGroup(self): # Boolean
return self.get_query_params().get('FromDesktopGroup')
def set_FromDesktopGroup(self, FromDesktopGroup): # Boolean
self.add_query_param('FromDesktopGroup', FromDesktopGroup)
def get_BundleType(self): # String
return self.get_query_params().get('BundleType')
def set_BundleType(self, BundleType): # String
self.add_query_param('BundleType', BundleType)
def get_FotaChannel(self): # String
return self.get_query_params().get('FotaChannel')
def set_FotaChannel(self, FotaChannel): # String
self.add_query_param('FotaChannel', FotaChannel)
def get_VolumeEncryptionEnabled(self): # Boolean
return self.get_query_params().get('VolumeEncryptionEnabled')
def set_VolumeEncryptionEnabled(self, VolumeEncryptionEnabled): # Boolean
self.add_query_param('VolumeEncryptionEnabled', VolumeEncryptionEnabled)
def get_MemorySize(self): # Integer
return self.get_query_params().get('MemorySize')
def set_MemorySize(self, MemorySize): # Integer
self.add_query_param('MemorySize', MemorySize)
def get_SessionType(self): # String
return self.get_query_params().get('SessionType')
def set_SessionType(self, SessionType): # String
self.add_query_param('SessionType', SessionType)
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
def get_CheckStock(self): # Boolean
return self.get_query_params().get('CheckStock')
def set_CheckStock(self, CheckStock): # Boolean
self.add_query_param('CheckStock', CheckStock)
def get_ProtocolType(self): # String
return self.get_query_params().get('ProtocolType')
def set_ProtocolType(self, ProtocolType): # String
self.add_query_param('ProtocolType', ProtocolType)
def get_CpuCount(self): # Integer
return self.get_query_params().get('CpuCount')
def set_CpuCount(self, CpuCount): # Integer
self.add_query_param('CpuCount', CpuCount)
def get_SupportMultiSession(self): # Boolean
return self.get_query_params().get('SupportMultiSession')
def set_SupportMultiSession(self, SupportMultiSession): # Boolean
self.add_query_param('SupportMultiSession', SupportMultiSession)
| null |
1,235 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .enums import CharacterCategory, ProbingState, SequenceLikelihood
class SingleByteCharSetProber(CharSetProber):
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
def __init__(self, model, reversed=False, name_prober=None):
super(SingleByteCharSetProber, self).__init__()
self._model = model
# TRUE if we need to reverse every pair in the model lookup
self._reversed = reversed
# Optional auxiliary prober for name decision
self._name_prober = name_prober
self._last_order = None
self._seq_counters = None
self._total_seqs = None
self._total_char = None
self._freq_char = None
self.reset()
def reset(self):
super(SingleByteCharSetProber, self).reset()
# char order of last character
self._last_order = 255
self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
self._total_seqs = 0
self._total_char = 0
# characters that fall in our sampling range
self._freq_char = 0
@property
def charset_name(self):
if self._name_prober:
return self._name_prober.charset_name
else:
return self._model['charset_name']
@property
def language(self):
if self._name_prober:
return self._name_prober.language
else:
return self._model.get('language')
def feed(self, byte_str):
if not self._model['keep_english_letter']:
byte_str = self.filter_international_words(byte_str)
if not byte_str:
return self.state
char_to_order_map = self._model['char_to_order_map']
for i, c in enumerate(byte_str):
# XXX: Order is in range 1-64, so one would think we want 0-63 here,
# but that leads to 27 more test failures than before.
order = char_to_order_map[c]
# XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
# CharacterCategory.SYMBOL is actually 253, so we use CONTROL
# to make it closer to the original intent. The only difference
# is whether or not we count digits and control characters for
# _total_char purposes.
if order < CharacterCategory.CONTROL:
self._total_char += 1
if order < self.SAMPLE_SIZE:
self._freq_char += 1
if self._last_order < self.SAMPLE_SIZE:
self._total_seqs += 1
if not self._reversed:
i = (self._last_order * self.SAMPLE_SIZE) + order
model = self._model['precedence_matrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * self.SAMPLE_SIZE) + self._last_order
model = self._model['precedence_matrix'][i]
self._seq_counters[model] += 1
self._last_order = order
charset_name = self._model['charset_name']
if self.state == ProbingState.DETECTING:
if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
confidence = self.METHOD_NAME()
if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
self.logger.debug('%s confidence = %s, we have a winner',
charset_name, confidence)
self._state = ProbingState.FOUND_IT
elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
self.logger.debug('%s confidence = %s, below negative '
'shortcut threshhold %s', charset_name,
confidence,
self.NEGATIVE_SHORTCUT_THRESHOLD)
self._state = ProbingState.NOT_ME
return self.state
def METHOD_NAME(self):
r = 0.01
if self._total_seqs > 0:
r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
self._total_seqs / self._model['typical_positive_ratio'])
r = r * self._freq_char / self._total_char
if r >= 1.0:
r = 0.99
return r
| null |
1,236 |
#!/usr/bin/python3
#
# Script to merge two mpd confs into one.
# (C) 2020 @bitlab (@bitkeeper Git)
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# If setting exists in both the second conf is leading
# Scripts:
# - First script is /etc/mpd.moode.conf
# - Second script is /etc/mpd.custom.conf
# Output:
# - Output is writen to /etc/mpd.conf
#
import datetime
from os import path
import argparse
COMMENT = "#"
SECTION_BEGIN = "{"
SECTION_END = "}"
class InvalidLine(BaseException):
def __init__(self, msg):
BaseException.__init__(self, msg)
def get_section_type(section):
return list(section.keys())[0]
def get_section_data(section):
return section[get_section_type(section)]
def entry_to_string(entry):
return "%s \"%s\"" %(entry[0],entry[1])
def get_setting(setting, source):
for entry in source:
if type(entry)==tuple and entry[0]==setting:
return entry
return None
def get_section(section, source):
section_type = get_section_type(section)
section_data = get_section_data(section)
section_name = get_setting("name", section_data)
for entry in source:
if type(entry)==dict and \
get_section_type(entry)==section_type \
and (section_name==None or section_name==get_setting("name", get_section_data(entry)) ):
return entry
return None
def METHOD_NAME(filename):
lines = []
fsource = open(filename, 'r')
current_scope = None
scope_data = None
dest = None
for linenr, linetxt in enumerate(fsource):
if current_scope:
dest = scope_data
else:
dest = lines
if linetxt.strip().find(COMMENT) == 0:
dest.append(linetxt)
elif linetxt.strip() == '':
dest.append(linetxt)
elif linetxt.strip() == SECTION_END:
lines.append({current_scope: scope_data})
current_scope = None
scope_data = []
elif linetxt.strip()[-1] == SECTION_BEGIN:
scope = linetxt.strip()[:-1].strip()
current_scope = scope
scope_data = []
elif linetxt.strip()[-1] == '"':
idx = linetxt.strip().find(' "')
if idx==-1:
raise InvalidLine("Invalid line nr %d: '%s'" %(linenr, linetxt))
setting = linetxt.strip()[ : idx].strip()
value = linetxt.strip()[idx+2:-1]
dest.append( (setting, value))
else:
InvalidLine("Invalid line nr %d: '%s'" %(linenr, linetxt))
fsource.close()
return lines
def write_mpd_conf(filename, conf):
foutput = open(filename, 'w')
foutput.write(to_text(conf))
foutput.close()
def merge(sourcea, sourceb):
output=[]
for entry in sourcea:
if type(entry)==str:
output.append(entry)
elif type(entry)==tuple:
entryb = get_setting(entry[0], sourceb)
if entryb and entry_to_string(entry) != entry_to_string(entryb):
output.append("# setting '%s' is replaced by:\n" %entry_to_string(entry))
output.append(entryb)
sourceb.remove(entryb)
elif entryb and entry_to_string(entry) == entry_to_string(entryb):
sourceb.remove(entryb)
output.append(entry)
else:
output.append(entry)
elif type(entry)==dict:
sectiona = entry
sectiona_type = get_section_type(sectiona)
section_data = get_section_data(sectiona)
sectionb = get_section(sectiona, sourceb)
if sectionb:
sectionb_data = get_section_data(sectionb)
output.append( {sectiona_type: merge(section_data, sectionb_data)})
sourceb.remove(sectionb)
else:
output.append(sectiona)
else:
InvalidLine("Hum unexpected")
for entry in sourceb:
output.append(entry)
return output
def to_text(source, depth=0):
prefix = " "*(depth*3)
output = ''
for entry in source:
if type(entry)==str:
output+=prefix+entry
elif type(entry)==tuple:
output+=prefix+entry_to_string(entry)+"\n"
else:
section = entry
section_type = get_section_type(section)
output += prefix+section_type+ " "+SECTION_BEGIN+'\n'
output += prefix+to_text(get_section_data(section), depth=1)
output += prefix+SECTION_END+'\n'
return output
def get_cmdline_arguments():
parser = argparse.ArgumentParser(description='Merge MPD configuration files.')
parser.add_argument('nameconf1',
help='The name of the first configuration file. For example /etc/mpd.moode.conf.')
parser.add_argument('nameconf2',
help='Name of th configuration file to merge in the first one. For example /etc/mpd.custom.conf.')
parser.add_argument('--dry-run', dest='dryrun', action='store_const',
const=sum,
help='Perform a test run without writing the files.')
parser.add_argument('--to-screen', dest='toscreen', action='store_const',
const=sum,
help='Show merged output on screen.')
parser.add_argument('--destination', default = "/etc/mpd.conf",
help='Name of the merged configuration file.')
args = parser.parse_args()
return args
if __name__ == "__main__":
args = get_cmdline_arguments()
file1_name = args.nameconf1 #'/etc/mpd.moode.conf'
file2_name = args.nameconf2 #'/etc/mpd.custom.conf'
output_file = args.destination #'/etc/mpd.conf'
confa = METHOD_NAME(file1_name)
# if not exists just ignore and return the unmodifed as output
if path.exists(file2_name):
confb = METHOD_NAME(file2_name)
output = merge(confa, confb)
output.insert(0, "##################################################\n")
output.insert(1, "# automatic mpd conf merge %s\n" %str(datetime.datetime.now()))
output.insert(2, "# file 1: '%s'\n" %file1_name)
output.insert(3, "# file 2: '%s'\n" %file2_name)
output.insert(4, "##################################################\n")
output.insert(5, "\n")
else:
output = confa
if args.toscreen:
print(to_text(output))
if not args.dryrun:
write_mpd_conf(output_file, output)
| null |
1,237 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhitsdb.endpoint import endpoint_data
class UpgradeLindormInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hitsdb', '2020-06-15', 'UpgradeLindormInstance','hitsdb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_PhoenixCoreNum(self): # Integer
return self.get_query_params().get('PhoenixCoreNum')
def set_PhoenixCoreNum(self, PhoenixCoreNum): # Integer
self.add_query_param('PhoenixCoreNum', PhoenixCoreNum)
def get_PhoenixCoreSpec(self): # String
return self.get_query_params().get('PhoenixCoreSpec')
def set_PhoenixCoreSpec(self, PhoenixCoreSpec): # String
self.add_query_param('PhoenixCoreSpec', PhoenixCoreSpec)
def get_UpgradeType(self): # String
return self.get_query_params().get('UpgradeType')
def set_UpgradeType(self, UpgradeType): # String
self.add_query_param('UpgradeType', UpgradeType)
def get_TsdbSpec(self): # String
return self.get_query_params().get('TsdbSpec')
def set_TsdbSpec(self, TsdbSpec): # String
self.add_query_param('TsdbSpec', TsdbSpec)
def get_FilestoreSpec(self): # String
return self.get_query_params().get('FilestoreSpec')
def set_FilestoreSpec(self, FilestoreSpec): # String
self.add_query_param('FilestoreSpec', FilestoreSpec)
def get_LogSpec(self): # String
return self.get_query_params().get('LogSpec')
def set_LogSpec(self, LogSpec): # String
self.add_query_param('LogSpec', LogSpec)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_TsdbNum(self): # Integer
return self.get_query_params().get('TsdbNum')
def set_TsdbNum(self, TsdbNum): # Integer
self.add_query_param('TsdbNum', TsdbNum)
def get_LindormSpec(self): # String
return self.get_query_params().get('LindormSpec')
def set_LindormSpec(self, LindormSpec): # String
self.add_query_param('LindormSpec', LindormSpec)
def get_SolrNum(self): # Integer
return self.get_query_params().get('SolrNum')
def set_SolrNum(self, SolrNum): # Integer
self.add_query_param('SolrNum', SolrNum)
def get_ColdStorage(self): # Integer
return self.get_query_params().get('ColdStorage')
def set_ColdStorage(self, ColdStorage): # Integer
self.add_query_param('ColdStorage', ColdStorage)
def get_LogNum(self): # Integer
return self.get_query_params().get('LogNum')
def set_LogNum(self, LogNum): # Integer
self.add_query_param('LogNum', LogNum)
def get_SolrSpec(self): # String
return self.get_query_params().get('SolrSpec')
def set_SolrSpec(self, SolrSpec): # String
self.add_query_param('SolrSpec', SolrSpec)
def get_CoreSingleStorage(self): # Integer
return self.get_query_params().get('CoreSingleStorage')
def set_CoreSingleStorage(self, CoreSingleStorage): # Integer
self.add_query_param('CoreSingleStorage', CoreSingleStorage)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('FilestoreNum')
def set_FilestoreNum(self, FilestoreNum): # Integer
self.add_query_param('FilestoreNum', FilestoreNum)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LindormNum(self): # Integer
return self.get_query_params().get('LindormNum')
def set_LindormNum(self, LindormNum): # Integer
self.add_query_param('LindormNum', LindormNum)
def get_LtsCoreNum(self): # Integer
return self.get_query_params().get('LtsCoreNum')
def set_LtsCoreNum(self, LtsCoreNum): # Integer
self.add_query_param('LtsCoreNum', LtsCoreNum)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_LtsCoreSpec(self): # String
return self.get_query_params().get('LtsCoreSpec')
def set_LtsCoreSpec(self, LtsCoreSpec): # String
self.add_query_param('LtsCoreSpec', LtsCoreSpec)
def get_ClusterStorage(self): # Integer
return self.get_query_params().get('ClusterStorage')
def set_ClusterStorage(self, ClusterStorage): # Integer
self.add_query_param('ClusterStorage', ClusterStorage)
def get_LogSingleStorage(self): # Integer
return self.get_query_params().get('LogSingleStorage')
def set_LogSingleStorage(self, LogSingleStorage): # Integer
self.add_query_param('LogSingleStorage', LogSingleStorage)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
| null |
1,238 |
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
"""
Project:
glideinWMS
Description:
unit test for glideinwms/creation/factoryXmlConfig.py
Author:
Dennis Box [email protected]
"""
import unittest
import xmlrunner
from glideinwms.creation.lib.factoryXmlConfig import (
CondTarElement,
Config,
EntryElement,
EntrySetElement,
FactAttrElement,
FactFileElement,
FrontendElement,
parse,
)
XML = "fixtures/factory/glideinWMS.xml"
XML_ENTRY = "fixtures/factory/config.d/Dev_Sites.xml"
XML_ENTRY2 = "fixtures/factory/config.d/Dev_Sites2.xml"
class TestFactAttrElement(unittest.TestCase):
def setUp(self):
self.conf = parse(XML)
self.attr_el_list = self.conf.get_child_list("attrs")
def test_validate(self):
for fact_attr_element in self.attr_el_list:
fact_attr_element.validate()
self.assertTrue(isinstance(fact_attr_element, FactAttrElement))
class TestFactFileElement(unittest.TestCase):
def setUp(self):
self.conf = parse(XML)
self.files = self.conf.get_child_list("files")
def test_validate(self):
for fact_file_element in self.files:
fact_file_element.validate()
self.assertTrue(isinstance(fact_file_element, FactFileElement))
class TestCondTarElement(unittest.TestCase):
def setUp(self):
self.conf = parse(XML)
self.ctl = self.conf.get_child_list("condor_tarballs")
def test_validate(self):
for cte in self.ctl:
cte.validate()
self.assertTrue("arch" in cte)
self.assertTrue("version" in cte)
self.assertTrue("os" in cte)
self.assertTrue("base_dir" in cte or "tar_file" in cte)
self.assertTrue(isinstance(cte, CondTarElement))
del cte["base_dir"]
try:
cte.validate()
except RuntimeError as err:
pass
class TestFrontendElement(unittest.TestCase):
def setUp(self):
self.conf = parse(XML)
self.sec = self.conf.get_child("security")
self.frontends = self.sec.get_child_list("frontends")
def test_validate(self):
for frontend_element in self.frontends:
frontend_element.validate()
self.assertTrue("name" in frontend_element)
self.assertTrue("identity" in frontend_element)
self.assertTrue(isinstance(frontend_element, FrontendElement))
class TestEntryElement(unittest.TestCase):
def setUp(self):
self.conf = parse(XML)
self.eel = self.conf.get_child_list("entries")
def test_getName(self):
for entry_element in self.eel:
self.assertNotEqual("", entry_element.getName())
self.assertNotEqual(None, entry_element.getName())
def test_validate(self):
for entry_element in self.eel:
entry_element.validate()
self.assertTrue("gridtype" in entry_element)
self.assertTrue("gatekeeper" in entry_element)
self.assertTrue("auth_method" in entry_element)
self.assertTrue("enabled" in entry_element)
self.assertTrue(isinstance(entry_element, EntryElement))
def test_validate_sub_elements(self):
for entry_element in self.eel:
entry_element.validate_sub_elements()
class TestEntrySetElement(unittest.TestCase):
def setUp(self):
self.conf = parse(XML)
self.esl = self.conf.get_child_list("entry_sets")
self.el = self.conf.get_child_list("entries")
self.assertTrue(len(self.esl) > 0)
def test_validate_entry_sets(self):
for entry_set_element in self.esl:
entry_set_element.validate()
# self.assertTrue(isinstance(entry_set_element, EntrySetElement))
def test_validate_entries(self):
for entry_set_element in self.el:
entry_set_element.validate()
# self.assertTrue(isinstance(entry_set_element, EntrySetElement))
# pylint: disable=maybe-no-member
class TestConfig(unittest.TestCase):
def setUp(self):
self.config = parse(XML)
def test___init__(self):
self.assertTrue(isinstance(self.config, Config))
def test_get_client_log_dirs(self):
dirs = self.config.get_client_log_dirs()
self.assertTrue(isinstance(dirs, dict))
def test_get_client_proxy_dirs(self):
dirs = self.config.get_client_proxy_dirs()
self.assertTrue(isinstance(dirs, dict))
def test_get_entries(self):
entries = self.config.get_entries()
self.assertTrue(isinstance(entries, list))
def test_get_log_dir(self):
log_dir = self.config.get_log_dir()
self.assertEqual("fixtures/factory/log/server", log_dir)
def METHOD_NAME(self):
monitor_dir = self.config.get_monitor_dir()
self.assertEqual("fixtures/factory/web-area/monitor", monitor_dir)
def test_get_stage_dir(self):
stage_dir = self.config.get_stage_dir()
self.assertEqual("fixtures/factory/web-area/stage", stage_dir)
def test_get_submit_dir(self):
submit_dir = self.config.get_submit_dir()
self.assertEqual("fixtures/factory/work-dir", submit_dir)
def test_get_web_url(self):
url = self.config.get_web_url()
self.assertEqual("http://fermicloud380.fnal.gov/factory/stage", url)
def test_set_client_log_dirs(self):
self.config.set_client_log_dirs()
def test_set_client_proxy_dirs(self):
self.config.set_client_proxy_dirs()
def test_set_log_dir(self):
self.config.set_log_dir()
def test_set_monitor_dir(self):
self.config.set_monitor_dir()
def test_set_stage_dir(self):
self.config.set_stage_dir()
def test_set_submit_dir(self):
self.config.set_submit_dir()
def test_set_web_url(self):
self.config.set_web_url()
def test_validate(self):
self.config.validate()
# pylint: enable=maybe-no-member
class TestParse(unittest.TestCase):
def test_parse(self):
parse(XML)
try:
parse(XML_ENTRY)
except RuntimeError:
pass
if __name__ == "__main__":
unittest.main(testRunner=xmlrunner.XMLTestRunner(output="unittests-reports"))
| null |
1,239 |
from typing import Any, AnyStr, Callable, ContextManager, Generic, IO, Iterable, Iterator, List, Optional, Text, Type, Union
from typing_extensions import Final, Literal
import os
import sys
class _FNMatcher(Generic[AnyStr]):
pattern: AnyStr = ...
def __init__(self, pattern: AnyStr) -> None: ...
def __call__(self, path: local) -> bool: ...
class _Stat:
path: Final[local] = ...
mode: Final[int]
ino: Final[int]
dev: Final[int]
nlink: Final[int]
uid: Final[int]
gid: Final[int]
size: Final[int]
atime: Final[float]
mtime: Final[float]
ctime: Final[float]
atime_ns: Final[int]
mtime_ns: Final[int]
ctime_ns: Final[int]
if sys.version_info >= (3, 8) and sys.platform == "win32":
reparse_tag: Final[int]
blocks: Final[int]
blksize: Final[int]
rdev: Final[int]
flags: Final[int]
gen: Final[int]
birthtime: Final[int]
rsize: Final[int]
creator: Final[int]
type: Final[int]
if sys.platform != 'win32':
@property
def owner(self) -> str: ...
@property
def group(self) -> str: ...
def isdir(self) -> bool: ...
def isfile(self) -> bool: ...
def islink(self) -> bool: ...
if sys.version_info >= (3, 6):
_PathLike = os.PathLike
else:
class _PathLike(Generic[AnyStr]):
def __fspath__(self) -> AnyStr: ...
_PathType = Union[bytes, Text, _PathLike[str], _PathLike[bytes], local]
class local(_PathLike[str]):
class ImportMismatchError(ImportError): ...
sep: Final[str]
strpath: Final[str]
def __init__(self, path: _PathType = ..., expanduser: bool = ...) -> None: ...
def __hash__(self) -> int: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __lt__(self, other: object) -> bool: ...
def __gt__(self, other: object) -> bool: ...
def __add__(self, other: object) -> local: ...
def __cmp__(self, other: object) -> int: ...
def __div__(self, other: _PathType) -> local: ...
def __truediv__(self, other: _PathType) -> local: ...
def __fspath__(self) -> str: ...
@classmethod
def get_temproot(cls) -> local: ...
@classmethod
def make_numbered_dir(
cls,
prefix: str = ...,
rootdir: Optional[local] = ...,
keep: Optional[int] = ...,
lock_timeout: int = ...,
) -> local: ...
@classmethod
def mkdtemp(cls, rootdir: Optional[local] = ...) -> local: ...
@classmethod
def sysfind(
cls,
name: _PathType,
checker: Optional[Callable[[local], bool]] = ...,
paths: Optional[Iterable[_PathType]] = ...,
) -> Optional[local]: ...
@property
def basename(self) -> str: ...
@property
def dirname(self) -> str: ...
@property
def purebasename(self) -> str: ...
@property
def ext(self) -> str: ...
def as_cwd(self) -> ContextManager[Optional[local]]: ...
def atime(self) -> float: ...
def bestrelpath(self, dest: local) -> str: ...
def chdir(self) -> local: ...
def check(
self,
*,
basename: int = ..., notbasename: int = ...,
basestarts: int = ..., notbasestarts: int = ...,
dir: int = ..., notdir: int = ...,
dotfile: int = ..., notdotfile: int = ...,
endswith: int = ..., notendswith: int = ...,
exists: int = ..., notexists: int = ...,
ext: int = ..., notext: int = ...,
file: int = ..., notfile: int = ...,
fnmatch: int = ..., notfnmatch: int = ...,
link: int = ..., notlink: int = ...,
relto: int = ..., notrelto: int = ...,
) -> bool: ...
def chmod(self, mode: int, rec: Union[int, str, Text, Callable[[local], bool]] = ...) -> None: ...
if sys.platform != 'win32':
def METHOD_NAME(self, user: Union[int, str], group: Union[int, str], rec: int = ...) -> None: ...
def common(self, other: local) -> Optional[local]: ...
def computehash(self, hashtype: str = ..., chunksize: int = ...) -> str: ...
def copy(self, target: local, mode: bool = ..., stat: bool = ...) -> None: ...
def dirpath(self, *args: _PathType, abs: int = ...) -> local: ...
def dump(self, obj: Any, bin: Optional[int] = ...) -> None: ...
def ensure(self, *args: _PathType, dir: int = ...) -> local: ...
def ensure_dir(self, *args: _PathType) -> local: ...
def exists(self) -> bool: ...
def fnmatch(self, pattern: str): _FNMatcher
def isdir(self) -> bool: ...
def isfile(self) -> bool: ...
def islink(self) -> bool: ...
def join(self, *args: _PathType, abs: int = ...) -> local: ...
def listdir(
self,
fil: Optional[Union[str, Text, Callable[[local], bool]]] = ...,
sort: Optional[bool] = ...,
) -> List[local]: ...
def load(self) -> Any: ...
def lstat(self) -> _Stat: ...
def mkdir(self, *args: _PathType) -> local: ...
if sys.platform != 'win32':
def mklinkto(self, oldname: Union[str, local]) -> None: ...
def mksymlinkto(self, value: local, absolute: int = ...) -> None: ...
def move(self, target: local) -> None: ...
def mtime(self) -> float: ...
def new(
self,
*,
drive: str = ...,
dirname: str = ...,
basename: str = ...,
purebasename: str = ...,
ext: str = ...,
) -> local: ...
def open(self, mode: str = ..., ensure: bool = ..., encoding: Optional[str] = ...) -> IO[Any]: ...
def parts(self, reverse: bool = ...) -> List[local]: ...
def pyimport(
self,
modname: Optional[str] = ...,
ensuresyspath: Union[bool, Literal["append", "importlib"]] = ...,
) -> Any: ...
def pypkgpath(self) -> Optional[local]: ...
def read(self, mode: str = ...) -> Union[Text, bytes]: ...
def read_binary(self) -> bytes: ...
def read_text(self, encoding: str) -> Text: ...
def readlines(self, cr: int = ...) -> List[str]: ...
if sys.platform != 'win32':
def readlink(self) -> str: ...
def realpath(self) -> local: ...
def relto(self, relpath: Union[str, local]) -> str: ...
def remove(self, rec: int = ..., ignore_errors: bool = ...) -> None: ...
def rename(self, target: _PathType) -> None: ...
def samefile(self, other: _PathType) -> bool: ...
def setmtime(self, mtime: Optional[float] = ...) -> None: ...
def size(self) -> int: ...
def stat(self, raising: bool = ...) -> _Stat: ...
def sysexec(self, *argv: Any, **popen_opts: Any) -> Text: ...
def visit(
self,
fil: Optional[Union[str, Text, Callable[[local], bool]]] = ...,
rec: Optional[Union[Literal[1, True], str, Text, Callable[[local], bool]]] = ...,
ignore: Type[Exception] = ...,
bf: bool = ...,
sort: bool = ...,
) -> Iterator[local]: ...
def write(self, data: Any, mode: str = ..., ensure: bool = ...) -> None: ...
def write_binary(self, data: bytes, ensure: bool = ...) -> None: ...
def write_text(self, data: Union[str, Text], encoding: str, ensure: bool = ...) -> None: ...
# Untyped types below here.
svnwc: Any
svnurl: Any
SvnAuth: Any
| null |
1,240 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class CopySnapshotRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'CopySnapshot','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_SnapshotId(self): # String
return self.get_query_params().get('SnapshotId')
def set_SnapshotId(self, SnapshotId): # String
self.add_query_param('SnapshotId', SnapshotId)
def get_DestinationRegionId(self): # String
return self.get_query_params().get('DestinationRegionId')
def set_DestinationRegionId(self, DestinationRegionId): # String
self.add_query_param('DestinationRegionId', DestinationRegionId)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_Arns(self): # RepeatList
return self.get_query_params().get('Arn')
def set_Arns(self, Arn): # RepeatList
for depth1 in range(len(Arn)):
if Arn[depth1].get('RoleType') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.RoleType', Arn[depth1].get('RoleType'))
if Arn[depth1].get('Rolearn') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.Rolearn', Arn[depth1].get('Rolearn'))
if Arn[depth1].get('AssumeRoleFor') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.AssumeRoleFor', Arn[depth1].get('AssumeRoleFor'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def METHOD_NAME(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DestinationSnapshotName(self): # String
return self.get_query_params().get('DestinationSnapshotName')
def set_DestinationSnapshotName(self, DestinationSnapshotName): # String
self.add_query_param('DestinationSnapshotName', DestinationSnapshotName)
def get_DestinationSnapshotDescription(self): # String
return self.get_query_params().get('DestinationSnapshotDescription')
def set_DestinationSnapshotDescription(self, DestinationSnapshotDescription): # String
self.add_query_param('DestinationSnapshotDescription', DestinationSnapshotDescription)
def get_Encrypted(self): # Boolean
return self.get_query_params().get('Encrypted')
def set_Encrypted(self, Encrypted): # Boolean
self.add_query_param('Encrypted', Encrypted)
def get_RetentionDays(self): # Integer
return self.get_query_params().get('RetentionDays')
def set_RetentionDays(self, RetentionDays): # Integer
self.add_query_param('RetentionDays', RetentionDays)
def get_KMSKeyId(self): # String
return self.get_query_params().get('KMSKeyId')
def set_KMSKeyId(self, KMSKeyId): # String
self.add_query_param('KMSKeyId', KMSKeyId)
def get_DestinationStorageLocationArn(self): # String
return self.get_query_params().get('DestinationStorageLocationArn')
def set_DestinationStorageLocationArn(self, DestinationStorageLocationArn): # String
self.add_query_param('DestinationStorageLocationArn', DestinationStorageLocationArn)
| null |
1,241 |
from .framework import (
selenium_test,
SeleniumTestCase,
)
# Remove hack when submit_login works more consistently.
VALID_LOGIN_RETRIES = 3
class TestHistorySharing(SeleniumTestCase):
@selenium_test
def test_sharing_valid(self):
user1_email, user2_email, history_id = self.setup_two_users_with_one_shared_history()
self.submit_login(user2_email, retries=VALID_LOGIN_RETRIES)
response = self.api_get(f"histories/{history_id}", raw=True)
assert response.status_code == 200, response.text
@selenium_test
def test_sharing_valid_by_id(self):
user1_email, user2_email, history_id = self.setup_two_users_with_one_shared_history(share_by_id=True)
self.submit_login(user2_email, retries=VALID_LOGIN_RETRIES)
response = self.api_get(f"histories/{history_id}", raw=True)
assert response.status_code == 200, response.text
@selenium_test
def test_unsharing(self):
user1_email, user2_email, history_id = self.setup_two_users_with_one_shared_history()
self.submit_login(user1_email, retries=VALID_LOGIN_RETRIES)
self.home()
self.click_history_option_sharing()
sharing = self.components.histories.sharing
self.share_unshare_with_user(sharing, user2_email)
self.home()
self.click_history_option_sharing()
self.share_ensure_by_user_available(sharing)
unshare_user_button = sharing.unshare_with_user_button(email=user2_email)
unshare_user_button.assert_absent()
self.logout_if_needed()
self.submit_login(user2_email, retries=VALID_LOGIN_RETRIES)
response = self.api_get(f"histories/{history_id}", raw=True)
assert response.status_code == 403
@selenium_test
def test_unshared_history_inaccessible(self):
# Here for completeness for now - but probably should have an explict API test case.
user1_email = self._get_random_email()
user2_email = self._get_random_email()
self.register(user1_email)
history_id = self.current_history_id()
self.logout_if_needed()
self.register(user2_email)
response = self.api_get(f"histories/{history_id}", raw=True)
assert response.status_code == 403
@selenium_test
def test_sharing_with_invalid_user(self):
user1_email = self._get_random_email()
self.register(user1_email)
self.share_history_with_user(user_email="[email protected]")
self.assert_error_message(contains="is not a valid Galaxy user")
self.screenshot("history_sharing_invalid_user")
@selenium_test
def METHOD_NAME(self):
user1_email = self._get_random_email()
self.register(user1_email)
self.share_history_with_user(user_email=user1_email)
self.assert_error_message(contains="You cannot share resources with yourself")
self.screenshot("history_sharing_invalid_with_self")
@selenium_test
def test_shared_with_me(self):
user1_email, user2_email, history_id = self.setup_two_users_with_one_shared_history()
self.submit_login(user2_email, retries=VALID_LOGIN_RETRIES)
self.navigate_to_histories_shared_with_me_page()
self.components.shared_histories.selector.wait_for_present()
rows = self.components.shared_histories.histories.all()
assert len(rows) > 0
assert any(user1_email in row.text for row in rows)
def setup_two_users_with_one_shared_history(self, share_by_id=False):
user1_email = self._get_random_email()
user2_email = self._get_random_email()
self.register(user1_email)
self.logout_if_needed()
self.register(user2_email)
user2_id = None
if share_by_id:
user2_id = self.api_get("users")[0]["id"]
self.logout_if_needed()
self.submit_login(user1_email, retries=VALID_LOGIN_RETRIES)
# Can't share an empty history...
self.perform_upload(self.get_filename("1.txt"))
self.wait_for_history()
history_id = self.current_history_id()
self.share_history_with_user(user_id=user2_id, user_email=user2_email, assert_valid=True)
self.logout_if_needed()
return user1_email, user2_email, history_id
def share_history_with_user(self, user_id=None, user_email=None, assert_valid=False, screenshot=False):
"""Share the current history with a target user by ID or email.
``user_email`` will be used to enter in the share form unless ``user_id``
is also specified. The ``user_email`` however is always used to check
the result if ``assert_valid`` is True.
"""
self.home()
self.click_history_option_sharing()
share_kwd = {}
if screenshot:
share_kwd["screenshot_before_submit"] = "history_sharing_user_before_submit"
share_kwd["screenshot_after_submit"] = "history_sharing_user_after_submit"
self.share_with_user(
self.components.histories.sharing,
user_id=user_id,
user_email=user_email,
assert_valid=assert_valid,
**share_kwd,
)
class TestHistoryRequiresLoginSelenium(SeleniumTestCase):
ensure_registered = True
@selenium_test
def test_share_history_login_redirect(self):
user_email = self.get_user_email()
history_id = self.current_history_id()
self.logout()
self.go_to_history_sharing(history_id)
self.assert_error_message(contains="Must be logged in to manage Galaxy items")
self.components._.messages.require_login.wait_for_and_click()
self.fill_login_and_submit(user_email)
self.wait_for_logged_in()
self.wait_for_selector(".make-accessible")
| null |
1,242 |
"""
Views related to OAuth2 platform applications. Intended for OSF internal use only
"""
from django.db.models import Q
from rest_framework.exceptions import APIException
from rest_framework import generics
from rest_framework import permissions as drf_permissions
from api.base.renderers import JSONAPIRenderer, JSONRendererWithESISupport
from framework.auth import cas
from framework.auth.oauth_scopes import CoreScopes
from osf.models import ApiOAuth2Application
from api.base.filters import ListFilterMixin
from api.base.utils import get_object_or_error
from api.base.views import JSONAPIBaseView, DeprecatedView
from api.base import permissions as base_permissions
from api.applications.serializers import ApiOAuth2ApplicationSerializer, ApiOAuth2ApplicationDetailSerializer, ApiOAuth2ApplicationResetSerializer
class ApplicationMixin(object):
"""Mixin with convenience methods for retrieving the current application based on the
current URL. By default, fetches the current application based on the client_id kwarg.
"""
def get_app(self):
app = get_object_or_error(ApiOAuth2Application, Q(client_id=self.kwargs['client_id'], is_active=True), self.request)
self.check_object_permissions(self.request, app)
return app
class ApplicationList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin):
"""
Get a list of API applications (eg OAuth2) that the user has registered
"""
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.OwnerOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.APPLICATIONS_READ]
required_write_scopes = [CoreScopes.APPLICATIONS_WRITE]
serializer_class = ApiOAuth2ApplicationSerializer
view_category = 'applications'
view_name = 'application-list'
renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ] # Hide from web-browsable API tool
ordering = ('-created',)
def get_default_queryset(self):
return ApiOAuth2Application.objects.filter(owner=self.request.user, is_active=True)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
def perform_create(self, serializer):
"""Add user to the created object"""
serializer.validated_data['owner'] = self.request.user
serializer.save()
class ApplicationDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, ApplicationMixin):
"""
Get information about a specific API application (eg OAuth2) that the user has registered
Should not return information if the application belongs to a different user
"""
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.OwnerOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.APPLICATIONS_READ]
required_write_scopes = [CoreScopes.APPLICATIONS_WRITE]
serializer_class = ApiOAuth2ApplicationDetailSerializer
view_category = 'applications'
view_name = 'application-detail'
renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ] # Hide from web-browsable API tool
def get_object(self):
return self.get_app()
# overrides DestroyAPIView
def METHOD_NAME(self, instance):
"""Instance is not actually deleted from DB- just flagged as inactive, which hides it from list views"""
obj = self.get_object()
try:
obj.deactivate(save=True)
except cas.CasHTTPError:
raise APIException('Could not revoke application auth tokens; please try again later')
def perform_update(self, serializer):
"""Necessary to prevent owner field from being blanked on updates"""
serializer.validated_data['owner'] = self.request.user
# TODO: Write code to transfer ownership
serializer.save(owner=self.request.user)
class ApplicationReset(DeprecatedView, generics.CreateAPIView, ApplicationMixin):
"""
Resets client secret of a specific API application (eg OAuth2) that the user has registered
Should not perform update or return information if the application belongs to a different user
"""
max_version = '2.14'
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.OwnerOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.APPLICATIONS_READ]
required_write_scopes = [CoreScopes.APPLICATIONS_WRITE]
serializer_class = ApiOAuth2ApplicationResetSerializer
renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ] # Hide from web-browsable API tool
view_category = 'applications'
view_name = 'application-reset'
def get_object(self):
return self.get_app()
def perform_create(self, serializer):
"""Resets the application client secret, revokes all tokens"""
app = self.get_object()
app.reset_secret(save=True)
app.reload()
serializer.validated_data['client_secret'] = app.client_secret
| null |
1,243 |
#
# Very basic demangling script
#
from __future__ import print_function
# Exception thrown when a `$` is seen in the input
class Truncated(Exception):
pass
# Peekable stream
class Peekable(object):
def __init__(self, inner):
self.inner = inner
self.cache = None
def next(self):
if self.cache is not None:
rv = self.cache
self.cache = None
else:
rv = next(self.inner)
if rv == '$':
raise Truncated()
return rv
def peek(self):
if self.cache is None:
self.cache = next(self.inner)
return self.cache
def __next__(self):
return self.next()
def __iter__(self):
return self
# Mutable string class
# - Used so the current state is known when `Truncated` is thrown
class Str(object):
def __init__(self):
self.inner = ""
def push(self, s):
self.inner += s
# Top-level demangling object
def demangle_string(s):
c_iter = Peekable( iter(s) )
if next(c_iter) != 'Z':
return s
if next(c_iter) != 'R':
return s
# Path type
rv = Str()
try:
demangle_int_path(c_iter, rv)
try:
c = next(c_iter)
except StopIteration:
return rv.inner
except Truncated:
rv.push('$')
for c in c_iter:
rv.push(c)
pass
except Exception:
tail = (c_iter.cache or '') + ''.join(c_iter.inner)
print(s)
print(' '*(len(s) - len(tail)-1), '^', sep='')
print("rv =",rv.inner)
raise
rv.push("{")
for c in c_iter:
rv.push(c)
rv.push("}")
return rv.inner
def demangle_int_path(c_iter, rv):
return {
'G': METHOD_NAME,
'I': demangle_int_pathinherent,
'Q': demangle_int_pathtrait,
}[next(c_iter)](c_iter, rv)
def demangle_int_pathsimple(c_iter, rv):
n = demangle_int_getcount(c_iter)
assert next(c_iter) == 'c'
rv.push("::\"")
rv.push(demangle_int_ident(c_iter))
rv.push("\"")
for _ in range(n):
#print("demangle_int_pathsimple", _, rv)
rv.push("::")
rv.push(demangle_int_ident(c_iter))
def METHOD_NAME(c_iter, rv):
demangle_int_pathsimple(c_iter, rv)
demangle_int_params(c_iter, rv)
# UfcsInherent
def demangle_int_pathinherent(c_iter, rv):
rv.push("<")
demangle_int_type(c_iter, rv)
rv.push(">::")
rv.push(demangle_int_ident(c_iter))
demangle_int_params(c_iter, rv)
# UfcsKnown
def demangle_int_pathtrait(c_iter, rv):
rv.push("<")
demangle_int_type(c_iter, rv)
rv.push(" as ")
METHOD_NAME(c_iter, rv)
rv.push(">::")
rv.push(demangle_int_ident(c_iter))
demangle_int_params(c_iter, rv)
def demangle_int_params(c_iter, rv):
n = demangle_int_getcount(c_iter)
if next(c_iter) != 'g':
raise "error"
if n == 0:
return
rv.push("<")
for _ in range(n):
demangle_int_type(c_iter, rv)
rv.push(",")
rv.push(">")
def demangle_int_type(c_iter, rv):
try:
c = next(c_iter)
cb = {
'C': demangle_int_type_primitive,
'N': demangle_int_path,
'G': METHOD_NAME,
'T': demangle_int_type_tuple,
'B': demangle_int_type_borrow,
'P': demangle_int_type_pointer,
'F': demangle_int_type_function,
}[c]
except StopIteration:
rv.push('?EOS')
raise
except KeyError:
rv.push('?UnkT:'+c)
raise
cb(c_iter, rv)
def demangle_int_type_primitive(c_iter, rv):
try:
c = next(c_iter)
except StopIteration:
rv.push('?EOS')
return
try:
rv.push({
'a': 'u8',
'b': 'i8',
'j': 'i128',
}[c])
except IndexError:
rv.push('?UnkPrim:'+c)
return
def demangle_int_type_tuple(c_iter, rv):
n = demangle_int_getcount(c_iter)
rv.push("(")
for _ in range(n):
demangle_int_type(c_iter, rv)
rv.push(", ")
rv.push(")")
def demangle_int_type_borrow(c_iter, rv):
rv.push("&")
rv.push({ 's': "", 'u': "mut ", 'o': "move "}[next(c_iter)])
demangle_int_type(c_iter, rv)
def demangle_int_type_pointer(c_iter, rv):
rv.push("*")
rv.push({ 's': "const ", 'u': "mut ", 'o': "move "}[next(c_iter)])
demangle_int_type(c_iter, rv)
def demangle_int_type_function(c_iter, rv):
if c_iter.peek() == 'u':
rv.push("unsafe ")
next(c_iter)
if c_iter.peek() == 'e':
next(c_iter)
abi = demangle_int_ident(c_iter)
rv.push("extern {:?}".format(abi))
rv.push("fn(")
nargs = demangle_int_getcount(c_iter)
for _ in range(nargs):
demangle_int_type(c_iter, rv)
rv.push(", ")
rv.push(")->")
demangle_int_type(c_iter, rv)
# ----
# Identifiers: Semi-complex mangling rules (to handle interior `#` and `-`)
# ----
# Top-level identifier demangling
CACHE = []
def demangle_int_ident(c_iter):
# `'_' <idx>`: back-reference
if c_iter.peek() == '_':
c_iter.next()
idx = demangle_int_getbase26(c_iter)
return CACHE[idx]
# `<len:int> <data>` - Non-special string
if '0' <= c_iter.peek() and c_iter.peek() <= '9':
rv = demangle_int_suffixed(c_iter)
else:
len1 = demangle_int_getbase26(c_iter)
# `<len:int26> '_' <data>` - Hash prefixed string
if c_iter.peek() == '_':
next(c_iter)
rv = '#' + demangle_int_fixedlen(c_iter, len1);
pass
# `<ofs:int26> <len:idx> <data>` - String with a hash
else:
raw = demangle_int_suffixed(c_iter)
rv = raw[:len1] + "#" + raw[len1:]
## `'b' <idx>`: back-reference
#if c_iter.peek() == 'b':
# c_iter.next()
# idx = demangle_int_getcount(c_iter)
# assert c_iter.next() == '_'
# return CACHE[idx]
#if c_iter.peek() == 'h':
# c_iter.next()
# rv = demangle_int_suffixed(c_iter)
# rv += "#"
# rv += demangle_int_suffixed(c_iter)
#else:
# rv = demangle_int_suffixed(c_iter)
CACHE.append(rv)
return rv
# Read a base-26 count
def demangle_int_getbase26(c_iter):
rv = 0
mul = 1
while True:
c = next(c_iter)
if 'A' <= c and c <= 'Z':
rv += mul * (ord(c) - ord('A'))
return rv
if 'a' <= c and c <= 'z':
rv += mul * (ord(c) - ord('a'))
mul *= 26
continue
raise Exception("Unexpected character `{}` in base26 value".format(c))
# Read a decimal count
def demangle_int_getcount(c_iter):
c = next(c_iter)
if c == '0':
return 0
v = str(c)
while '0' <= c_iter.peek() and c_iter.peek() <= '9':
v += c_iter.next()
return int(v)
# Read a length-prefixed string fragment
def demangle_int_suffixed(c_iter):
l = demangle_int_getcount(c_iter)
#print("demangle_int_suffixed", l)
rv = ""
if l == 80:
l = 8-1
rv += '0'
elif l == 50:
l = 5-1
rv += '0'
return rv + demangle_int_fixedlen(c_iter, l)
def demangle_int_fixedlen(c_iter, l):
rv = ""
for _ in range(l):
rv += next(c_iter)
return rv
if __name__ == "__main__":
import sys
for a in sys.argv[1:]:
print("{} = {}".format(a, demangle_string(a)))
| null |
1,244 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdts.endpoint import endpoint_data
class ConfigureSubscriptionInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dts', '2020-01-01', 'ConfigureSubscriptionInstance','dts')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SourceEndpointInstanceID(self): # String
return self.get_query_params().get('SourceEndpoint.InstanceID')
def set_SourceEndpointInstanceID(self, SourceEndpointInstanceID): # String
self.add_query_param('SourceEndpoint.InstanceID', SourceEndpointInstanceID)
def get_SourceEndpointOracleSID(self): # String
return self.get_query_params().get('SourceEndpoint.OracleSID')
def set_SourceEndpointOracleSID(self, SourceEndpointOracleSID): # String
self.add_query_param('SourceEndpoint.OracleSID', SourceEndpointOracleSID)
def get_SourceEndpointIP(self): # String
return self.get_query_params().get('SourceEndpoint.IP')
def set_SourceEndpointIP(self, SourceEndpointIP): # String
self.add_query_param('SourceEndpoint.IP', SourceEndpointIP)
def get_SubscriptionDataTypeDML(self): # Boolean
return self.get_query_params().get('SubscriptionDataType.DML')
def set_SubscriptionDataTypeDML(self, SubscriptionDataTypeDML): # Boolean
self.add_query_param('SubscriptionDataType.DML', SubscriptionDataTypeDML)
def get_SourceEndpointInstanceType(self): # String
return self.get_query_params().get('SourceEndpoint.InstanceType')
def set_SourceEndpointInstanceType(self, SourceEndpointInstanceType): # String
self.add_query_param('SourceEndpoint.InstanceType', SourceEndpointInstanceType)
def get_AccountId(self): # String
return self.get_query_params().get('AccountId')
def set_AccountId(self, AccountId): # String
self.add_query_param('AccountId', AccountId)
def get_SubscriptionObject(self): # String
return self.get_body_params().get('SubscriptionObject')
def set_SubscriptionObject(self, SubscriptionObject): # String
self.add_body_params('SubscriptionObject', SubscriptionObject)
def get_SubscriptionInstanceVSwitchId(self): # String
return self.get_query_params().get('SubscriptionInstance.VSwitchId')
def set_SubscriptionInstanceVSwitchId(self, SubscriptionInstanceVSwitchId): # String
self.add_query_param('SubscriptionInstance.VSwitchId', SubscriptionInstanceVSwitchId)
def get_SourceEndpointUserName(self): # String
return self.get_query_params().get('SourceEndpoint.UserName')
def set_SourceEndpointUserName(self, SourceEndpointUserName): # String
self.add_query_param('SourceEndpoint.UserName', SourceEndpointUserName)
def get_SourceEndpointDatabaseName(self): # String
return self.get_query_params().get('SourceEndpoint.DatabaseName')
def METHOD_NAME(self, SourceEndpointDatabaseName): # String
self.add_query_param('SourceEndpoint.DatabaseName', SourceEndpointDatabaseName)
def get_SourceEndpointPort(self): # String
return self.get_query_params().get('SourceEndpoint.Port')
def set_SourceEndpointPort(self, SourceEndpointPort): # String
self.add_query_param('SourceEndpoint.Port', SourceEndpointPort)
def get_SourceEndpointOwnerID(self): # String
return self.get_query_params().get('SourceEndpoint.OwnerID')
def set_SourceEndpointOwnerID(self, SourceEndpointOwnerID): # String
self.add_query_param('SourceEndpoint.OwnerID', SourceEndpointOwnerID)
def get_SubscriptionInstanceVPCId(self): # String
return self.get_query_params().get('SubscriptionInstance.VPCId')
def set_SubscriptionInstanceVPCId(self, SubscriptionInstanceVPCId): # String
self.add_query_param('SubscriptionInstance.VPCId', SubscriptionInstanceVPCId)
def get_SubscriptionInstanceNetworkType(self): # String
return self.get_query_params().get('SubscriptionInstanceNetworkType')
def set_SubscriptionInstanceNetworkType(self, SubscriptionInstanceNetworkType): # String
self.add_query_param('SubscriptionInstanceNetworkType', SubscriptionInstanceNetworkType)
def get_SubscriptionInstanceId(self): # String
return self.get_query_params().get('SubscriptionInstanceId')
def set_SubscriptionInstanceId(self, SubscriptionInstanceId): # String
self.add_query_param('SubscriptionInstanceId', SubscriptionInstanceId)
def get_SourceEndpointRole(self): # String
return self.get_query_params().get('SourceEndpoint.Role')
def set_SourceEndpointRole(self, SourceEndpointRole): # String
self.add_query_param('SourceEndpoint.Role', SourceEndpointRole)
def get_OwnerId(self): # String
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # String
self.add_query_param('OwnerId', OwnerId)
def get_SubscriptionDataTypeDDL(self): # Boolean
return self.get_query_params().get('SubscriptionDataType.DDL')
def set_SubscriptionDataTypeDDL(self, SubscriptionDataTypeDDL): # Boolean
self.add_query_param('SubscriptionDataType.DDL', SubscriptionDataTypeDDL)
def get_SourceEndpointPassword(self): # String
return self.get_query_params().get('SourceEndpoint.Password')
def set_SourceEndpointPassword(self, SourceEndpointPassword): # String
self.add_query_param('SourceEndpoint.Password', SourceEndpointPassword)
def get_SubscriptionInstanceName(self): # String
return self.get_query_params().get('SubscriptionInstanceName')
def set_SubscriptionInstanceName(self, SubscriptionInstanceName): # String
self.add_query_param('SubscriptionInstanceName', SubscriptionInstanceName)
| null |
1,245 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdyplsapi.endpoint import endpoint_data
import json
class CreatePickUpWaybillRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dyplsapi', '2017-05-25', 'CreatePickUpWaybill')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ConsigneeName(self): # String
return self.get_query_params().get('ConsigneeName')
def set_ConsigneeName(self, ConsigneeName): # String
self.add_query_param('ConsigneeName', ConsigneeName)
def get_OrderChannels(self): # String
return self.get_query_params().get('OrderChannels')
def set_OrderChannels(self, OrderChannels): # String
self.add_query_param('OrderChannels', OrderChannels)
def get_SendAddress(self): # Struct
return self.get_query_params().get('SendAddress')
def set_SendAddress(self, SendAddress): # Struct
self.add_query_param("SendAddress", json.dumps(SendAddress))
def get_OuterOrderCode(self): # String
return self.get_query_params().get('OuterOrderCode')
def set_OuterOrderCode(self, OuterOrderCode): # String
self.add_query_param('OuterOrderCode', OuterOrderCode)
def get_Remark(self): # String
return self.get_query_params().get('Remark')
def set_Remark(self, Remark): # String
self.add_query_param('Remark', Remark)
def get_AppointGotStartTime(self): # String
return self.get_query_params().get('AppointGotStartTime')
def set_AppointGotStartTime(self, AppointGotStartTime): # String
self.add_query_param('AppointGotStartTime', AppointGotStartTime)
def get_AppointGotEndTime(self): # String
return self.get_query_params().get('AppointGotEndTime')
def set_AppointGotEndTime(self, AppointGotEndTime): # String
self.add_query_param('AppointGotEndTime', AppointGotEndTime)
def get_CpCode(self): # String
return self.get_query_params().get('CpCode')
def set_CpCode(self, CpCode): # String
self.add_query_param('CpCode', CpCode)
def get_SendMobile(self): # String
return self.get_query_params().get('SendMobile')
def set_SendMobile(self, SendMobile): # String
self.add_query_param('SendMobile', SendMobile)
def get_ConsigneeMobile(self): # String
return self.get_query_params().get('ConsigneeMobile')
def set_ConsigneeMobile(self, ConsigneeMobile): # String
self.add_query_param('ConsigneeMobile', ConsigneeMobile)
def get_ContentType(self): # String
return self.get_headers().get('Content-Type')
def set_ContentType(self, ContentType): # String
self.add_header('Content-Type', ContentType)
def get_ConsigneeAddress(self): # Struct
return self.get_query_params().get('ConsigneeAddress')
def set_ConsigneeAddress(self, ConsigneeAddress): # Struct
self.add_query_param("ConsigneeAddress", json.dumps(ConsigneeAddress))
def METHOD_NAME(self): # String
return self.get_query_params().get('SendPhone')
def set_SendPhone(self, SendPhone): # String
self.add_query_param('SendPhone', SendPhone)
def get_GoodsInfos(self): # Array
return self.get_query_params().get('GoodsInfos')
def set_GoodsInfos(self, GoodsInfos): # Array
self.add_query_param("GoodsInfos", json.dumps(GoodsInfos))
def get_BizType(self): # Integer
return self.get_query_params().get('BizType')
def set_BizType(self, BizType): # Integer
self.add_query_param('BizType', BizType)
def get_SendName(self): # String
return self.get_query_params().get('SendName')
def set_SendName(self, SendName): # String
self.add_query_param('SendName', SendName)
def get_ConsigneePhone(self): # String
return self.get_query_params().get('ConsigneePhone')
def set_ConsigneePhone(self, ConsigneePhone): # String
self.add_query_param('ConsigneePhone', ConsigneePhone)
| null |
1,246 |
from collections import defaultdict, namedtuple
from enum import Enum
from typing import Dict, List
from .avatar_state import create_avatar_state
from .location import Location
from .pathfinding import astar
from .utils import NearbyArtefactsList
# how many nearby artefacts to return
SCAN_LIMIT = 3
SCAN_RADIUS = 12
ARTEFACT_TYPES = ["chest", "key", "yellow_orb", "phone", "keyboard", "coins"]
PICKUP_TYPES = ["damage_boost", "invulnerability", "health"] + ARTEFACT_TYPES
class ArtefactType(Enum):
CHEST = "chest"
KEY = "key"
YELLOW_ORB = "yellow_orb"
PHONE = "phone"
KEYBOARD = "keyboard"
COINS = "coins"
def __eq__(self, other):
return self.value == other
def __str__(self):
return self.value
Artefact = namedtuple("Artefact", ["type", "location", "path"])
class Cell(object):
"""
Any position on the world grid.
"""
def __init__(self, location, avatar=None, **kwargs):
self.location = Location(**location)
self.avatar = None
self.interactable = None
self.obstacle = None
if avatar:
self.avatar = create_avatar_state(avatar)
for (key, value) in kwargs.items():
if not key == "habitable":
setattr(self, key, value)
@property
def habitable(self):
return not (self.avatar or self.obstacle)
def has_artefact(self):
return self.interactable is not None and self.interactable["type"] in ARTEFACT_TYPES
def __repr__(self):
return "Cell({} a={} i={})".format(self.location, self.avatar, self.interactable)
def __eq__(self, other):
return self.location == other.location
def __ne__(self, other):
return not self == other
class WorldMapCreator:
def generate_world_map_from_cells_data(cells: List[Cell]) -> "WorldMap":
world_map_cells: Dict[Location, Cell] = {}
for cell_data in cells:
cell = Cell(**cell_data)
world_map_cells[cell.location] = cell
return WorldMap(world_map_cells)
def generate_world_map_from_game_state(game_state) -> "WorldMap":
cells: Dict[Location, Cell] = {}
for x in range(game_state["southWestCorner"]["x"], game_state["northEastCorner"]["x"] + 1):
for y in range(
game_state["southWestCorner"]["y"],
game_state["northEastCorner"]["y"] + 1,
):
cell = Cell({"x": x, "y": y})
cells[Location(x, y)] = cell
for interactable in game_state["interactables"]:
location = Location(interactable["location"]["x"], interactable["location"]["y"])
cells[location].interactable = interactable
for obstacle in game_state["obstacles"]:
location = Location(obstacle["location"]["x"], obstacle["location"]["y"])
cells[location].obstacle = obstacle
for player in game_state["players"]:
location = Location(player["location"]["x"], player["location"]["y"])
cells[location].player = create_avatar_state(player)
return WorldMap(cells)
class WorldMap(object):
"""
The non-player world state.
"""
artefact_types = ArtefactType
def __init__(self, cells: Dict[Location, Cell]):
self.cells = cells
def all_cells(self):
return self.cells.values()
def interactable_cells(self):
return [cell for cell in self.all_cells() if cell.interactable]
def pickup_cells(self):
return [cell for cell in self.interactable_cells() if cell.interactable["type"] in PICKUP_TYPES]
def score_cells(self):
return [cell for cell in self.interactable_cells() if "score" == cell.interactable["type"]]
def partially_fogged_cells(self):
return [cell for cell in self.all_cells() if cell.partially_fogged]
def is_visible(self, location):
return location in self.cells
def METHOD_NAME(self, location):
cell = self.cells[location]
assert cell.location == location, "location lookup mismatch: arg={}, found={}".format(location, cell.location)
return cell
def can_move_to(self, target_location):
try:
cell = self.METHOD_NAME(target_location)
except KeyError:
return False
return getattr(cell, "habitable", False) and not getattr(cell, "avatar", False)
def _scan_artefacts(self, start_location, radius):
# get artefacts from starting location within the radius
artefacts = []
for x in range(start_location.x - radius, start_location.x + radius + 1):
for y in range(start_location.y - radius, start_location.y + radius + 1):
try:
cell = self.METHOD_NAME(Location(x, y))
except KeyError:
continue
if cell.has_artefact():
artefacts.append(cell)
return artefacts
def scan_nearby(self, avatar_location, radius=SCAN_RADIUS) -> NearbyArtefactsList[dict]:
"""
From the given location point search the given radius for artefacts.
Returns list of nearest artefacts (artefact/interactable represented as dict).
"""
artefact_cells = self._scan_artefacts(avatar_location, radius)
# get the best path to each artefact
nearby = defaultdict(list)
for art_cell in artefact_cells:
path = astar(self, self.cells.get(avatar_location), art_cell)
# only add to the list when there's a path
if path:
nearby[len(path)].append((art_cell, path))
# sort them by distance (the length of path) and take the nearest first
nearest = []
for distance in sorted(nearby.keys()):
for art_cell, path in nearby[distance]:
# use namedtuple so fields accessible by attribute lookup
artefact = Artefact(
type=art_cell.interactable["type"],
location=art_cell.location,
path=path,
)
nearest.append(artefact)
if len(nearest) > SCAN_LIMIT:
break
return NearbyArtefactsList(nearest[:SCAN_LIMIT])
def __repr__(self):
return repr(self.cells)
| null |
1,247 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class DescribeFlowlogsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'DescribeFlowlogs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_CenId(self): # String
return self.get_query_params().get('CenId')
def set_CenId(self, CenId): # String
self.add_query_param('CenId', CenId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def METHOD_NAME(self, Description): # String
self.add_query_param('Description', Description)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_LogStoreName(self): # String
return self.get_query_params().get('LogStoreName')
def set_LogStoreName(self, LogStoreName): # String
self.add_query_param('LogStoreName', LogStoreName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_TransitRouterAttachmentId(self): # String
return self.get_query_params().get('TransitRouterAttachmentId')
def set_TransitRouterAttachmentId(self, TransitRouterAttachmentId): # String
self.add_query_param('TransitRouterAttachmentId', TransitRouterAttachmentId)
def get_FlowLogId(self): # String
return self.get_query_params().get('FlowLogId')
def set_FlowLogId(self, FlowLogId): # String
self.add_query_param('FlowLogId', FlowLogId)
def get_FlowLogName(self): # String
return self.get_query_params().get('FlowLogName')
def set_FlowLogName(self, FlowLogName): # String
self.add_query_param('FlowLogName', FlowLogName)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status)
| null |
1,248 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeInvocationsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeInvocations','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_InvokeStatus(self): # String
return self.get_query_params().get('InvokeStatus')
def METHOD_NAME(self, InvokeStatus): # String
self.add_query_param('InvokeStatus', InvokeStatus)
def get_IncludeOutput(self): # Boolean
return self.get_query_params().get('IncludeOutput')
def set_IncludeOutput(self, IncludeOutput): # Boolean
self.add_query_param('IncludeOutput', IncludeOutput)
def get_CommandId(self): # String
return self.get_query_params().get('CommandId')
def set_CommandId(self, CommandId): # String
self.add_query_param('CommandId', CommandId)
def get_PageNumber(self): # Long
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Long
self.add_query_param('PageNumber', PageNumber)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_ContentEncoding(self): # String
return self.get_query_params().get('ContentEncoding')
def set_ContentEncoding(self, ContentEncoding): # String
self.add_query_param('ContentEncoding', ContentEncoding)
def get_RepeatMode(self): # String
return self.get_query_params().get('RepeatMode')
def set_RepeatMode(self, RepeatMode): # String
self.add_query_param('RepeatMode', RepeatMode)
def get_PageSize(self): # Long
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Long
self.add_query_param('PageSize', PageSize)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_InvokeId(self): # String
return self.get_query_params().get('InvokeId')
def set_InvokeId(self, InvokeId): # String
self.add_query_param('InvokeId', InvokeId)
def get_Timed(self): # Boolean
return self.get_query_params().get('Timed')
def set_Timed(self, Timed): # Boolean
self.add_query_param('Timed', Timed)
def get_CommandName(self): # String
return self.get_query_params().get('CommandName')
def set_CommandName(self, CommandName): # String
self.add_query_param('CommandName', CommandName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_CommandType(self): # String
return self.get_query_params().get('CommandType')
def set_CommandType(self, CommandType): # String
self.add_query_param('CommandType', CommandType)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
| null |
1,249 |
# Code copied from Python 3.6 - Python Software Foundation - GNU General Public License v3.0
#
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20140917
_MAXREPEAT = 4294967295
MAXGROUPS = 2147483647
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
"""Exception raised for invalid regular expressions.
Attributes:
msg: The unformatted error message
pattern: The regular expression pattern
pos: The index in the pattern where compilation failed (may be None)
lineno: The line corresponding to pos (may be None)
colno: The column corresponding to pos (may be None)
"""
def __init__(self, msg, pattern=None, pos=None):
self.msg = msg
self.pattern = pattern
self.pos = pos
if pattern is not None and pos is not None:
msg = '%s at position %d' % (msg, pos)
if isinstance(pattern, str):
newline = '\n'
else:
newline = b'\n'
self.lineno = pattern.count(newline, 0, pos) + 1
self.colno = pos - pattern.rfind(newline, 0, pos)
if newline in pattern:
msg = '%s (line %d, column %d)' % (msg, self.lineno, self.colno)
else:
self.lineno = self.colno = None
super().__init__(msg)
class _NamedIntConstant(int):
def __new__(cls, value, name):
self = super(_NamedIntConstant, cls).__new__(cls, value)
self.name = name
return self
def __str__(self):
return self.name
__repr__ = __str__
MAXREPEAT = _NamedIntConstant(_MAXREPEAT, 'MAXREPEAT')
def _makecodes(names):
names = names.strip().split()
items = [_NamedIntConstant(i, name) for i, name in enumerate(names)]
globals().update({item.name: item for item in items})
return items
# operators
# failure=0 success=1 (just because it looks better that way :-)
OPCODES = _makecodes("""
FAILURE SUCCESS
ANY ANY_ALL
ASSERT ASSERT_NOT
AT
BRANCH
CALL
CATEGORY
CHARSET BIGCHARSET
GROUPREF GROUPREF_EXISTS GROUPREF_IGNORE
IN IN_IGNORE
INFO
JUMP
LITERAL LITERAL_IGNORE
MARK
MAX_UNTIL
MIN_UNTIL
NOT_LITERAL NOT_LITERAL_IGNORE
NEGATE
RANGE
REPEAT
REPEAT_ONE
SUBPATTERN
MIN_REPEAT_ONE
RANGE_IGNORE
MIN_REPEAT MAX_REPEAT
""")
del OPCODES[-2:] # remove MIN_REPEAT and MAX_REPEAT
# positions
ATCODES = _makecodes("""
AT_BEGINNING AT_BEGINNING_LINE AT_BEGINNING_STRING
AT_BOUNDARY AT_NON_BOUNDARY
AT_END AT_END_LINE AT_END_STRING
AT_LOC_BOUNDARY AT_LOC_NON_BOUNDARY
AT_UNI_BOUNDARY AT_UNI_NON_BOUNDARY
""")
# categories
CHCODES = _makecodes("""
CATEGORY_DIGIT CATEGORY_NOT_DIGIT
CATEGORY_SPACE CATEGORY_NOT_SPACE
CATEGORY_WORD CATEGORY_NOT_WORD
CATEGORY_LINEBREAK CATEGORY_NOT_LINEBREAK
CATEGORY_LOC_WORD CATEGORY_LOC_NOT_WORD
CATEGORY_UNI_DIGIT CATEGORY_UNI_NOT_DIGIT
CATEGORY_UNI_SPACE CATEGORY_UNI_NOT_SPACE
CATEGORY_UNI_WORD CATEGORY_UNI_NOT_WORD
CATEGORY_UNI_LINEBREAK CATEGORY_UNI_NOT_LINEBREAK
""")
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE,
RANGE: RANGE_IGNORE,
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
def METHOD_NAME(f, d, prefix):
items = sorted(d)
for item in items:
f.write("#define %s_%s %d\n" % (prefix, item, item))
with open("sre_constants.h", "w") as f:
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
METHOD_NAME(f, OPCODES, "SRE_OP")
METHOD_NAME(f, ATCODES, "SRE")
METHOD_NAME(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_FLAG_DEBUG %d\n" % SRE_FLAG_DEBUG)
f.write("#define SRE_FLAG_ASCII %d\n" % SRE_FLAG_ASCII)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
print("done")
| null |
1,250 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class CreateOTADynamicUpgradeJobRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'CreateOTADynamicUpgradeJob')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DynamicMode(self):
return self.get_query_params().get('DynamicMode')
def set_DynamicMode(self,DynamicMode):
self.add_query_param('DynamicMode',DynamicMode)
def get_MultiModuleMode(self):
return self.get_query_params().get('MultiModuleMode')
def set_MultiModuleMode(self,MultiModuleMode):
self.add_query_param('MultiModuleMode',MultiModuleMode)
def get_RetryCount(self):
return self.get_query_params().get('RetryCount')
def set_RetryCount(self,RetryCount):
self.add_query_param('RetryCount',RetryCount)
def get_TimeoutInMinutes(self):
return self.get_query_params().get('TimeoutInMinutes')
def set_TimeoutInMinutes(self,TimeoutInMinutes):
self.add_query_param('TimeoutInMinutes',TimeoutInMinutes)
def get_NeedConfirm(self):
return self.get_query_params().get('NeedConfirm')
def set_NeedConfirm(self,NeedConfirm):
self.add_query_param('NeedConfirm',NeedConfirm)
def get_GroupType(self):
return self.get_query_params().get('GroupType')
def set_GroupType(self,GroupType):
self.add_query_param('GroupType',GroupType)
def get_NeedPush(self):
return self.get_query_params().get('NeedPush')
def set_NeedPush(self,NeedPush):
self.add_query_param('NeedPush',NeedPush)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_DownloadProtocol(self):
return self.get_query_params().get('DownloadProtocol')
def set_DownloadProtocol(self,DownloadProtocol):
self.add_query_param('DownloadProtocol',DownloadProtocol)
def get_Tags(self):
return self.get_query_params().get('Tag')
def set_Tags(self, Tags):
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def get_GroupId(self):
return self.get_query_params().get('GroupId')
def set_GroupId(self,GroupId):
self.add_query_param('GroupId',GroupId)
def get_FirmwareId(self):
return self.get_query_params().get('FirmwareId')
def set_FirmwareId(self,FirmwareId):
self.add_query_param('FirmwareId',FirmwareId)
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey)
def get_RetryInterval(self):
return self.get_query_params().get('RetryInterval')
def set_RetryInterval(self,RetryInterval):
self.add_query_param('RetryInterval',RetryInterval)
def get_SrcVersions(self):
return self.get_query_params().get('SrcVersion')
def METHOD_NAME(self, SrcVersions):
for depth1 in range(len(SrcVersions)):
if SrcVersions[depth1] is not None:
self.add_query_param('SrcVersion.' + str(depth1 + 1) , SrcVersions[depth1])
def get_OverwriteMode(self):
return self.get_query_params().get('OverwriteMode')
def set_OverwriteMode(self,OverwriteMode):
self.add_query_param('OverwriteMode',OverwriteMode)
def get_MaximumPerMinute(self):
return self.get_query_params().get('MaximumPerMinute')
def set_MaximumPerMinute(self,MaximumPerMinute):
self.add_query_param('MaximumPerMinute',MaximumPerMinute
| null |
1,251 |
import os
import sys
import ast
import signal
import subprocess
def spawnNAR():
return subprocess.Popen(["./../../NAR", "shell"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True)
NARproc = spawnNAR()
def getNAR():
return NARproc
def setNAR(proc):
global NARproc
NARproc = proc
def terminateNAR(usedNAR=NARproc):
os.killpg(os.getpgid(usedNAR.pid), signal.SIGTERM)
def parseTruth(T):
return {"frequency": T.split("frequency=")[1].split(" confidence")[0].replace(",",""), "confidence": T.split(" confidence=")[1].split(" dt=")[0].split(" occurrenceTime=")[0]}
def parseTask(s):
M = {"occurrenceTime" : "eternal"}
if " :|:" in s:
M["occurrenceTime"] = "now"
s = s.replace(" :|:","")
if "occurrenceTime" in s:
M["occurrenceTime"] = s.split("occurrenceTime=")[1].split(" ")[0]
if "Stamp" in s:
M["Stamp"] = ast.literal_eval(s.split("Stamp=")[1].split("]")[0]+"]")
sentence = s.split(" occurrenceTime=")[0] if " occurrenceTime=" in s else s.split(" Stamp=")[0].split(" Priority=")[0].split(" creationTime=")[0]
M["punctuation"] = sentence[-4] if ":|:" in sentence else sentence[-1]
M["term"] = sentence.split(" creationTime")[0].split(" occurrenceTime")[0].split(" Truth")[0].split(" Stamp=")[0][:-1]
if "Truth" in s:
M["truth"] = parseTruth(s.split("Truth: ")[1])
if "Priority" in s:
M["Priority"] = s.split("Priority=")[1].split(" ")[0]
return M
def parseReason(sraw):
if "implication: " not in sraw:
return None
Implication = parseTask(sraw.split("implication: ")[-1].split("precondition: ")[0]) #last reason only (others couldn't be associated currently)
Precondition = parseTask(sraw.split("precondition: ")[-1].split("\n")[0])
Implication["occurrenceTime"] = "eternal"
Precondition["punctuation"] = Implication["punctuation"] = "."
Reason = {}
Reason["desire"] = sraw.split("decision expectation=")[-1].split(" ")[0]
Reason["hypothesis"] = Implication
Reason["precondition"] = Precondition
return Reason
def parseExecution(e):
if "args " not in e:
return {"operator" : e.split(" ")[0], "arguments" : []}
return {"operator" : e.split(" ")[0], "arguments" : e.split("args ")[1].split("{SELF} * ")[1][:-1]}
def GetRawOutput(usedNAR):
usedNAR.stdin.write("0\n")
usedNAR.stdin.flush()
ret = ""
before = []
requestOutputArgs = False
while "done with 0 additional inference steps." != ret.strip():
if ret != "":
before.append(ret.strip())
if ret.strip() == "//Operation result product expected:":
requestOutputArgs = True
break
ret = usedNAR.stdout.readline()
return before[:-1], requestOutputArgs
def GetOutput(usedNAR):
lines, requestOutputArgs = GetRawOutput(usedNAR)
executions = [parseExecution(l) for l in lines if l.startswith('^')]
inputs = [parseTask(l.split("Input: ")[1]) for l in lines if l.startswith('Input:')]
derivations = [parseTask(l.split("Derived: " if l.startswith('Derived:') else "Revised: ")[1]) for l in lines if l.startswith('Derived:') or l.startswith('Revised:')]
answers = [parseTask(l.split("Answer: ")[1]) for l in lines if l.startswith('Answer:')]
selections = [parseTask(l.split("Selected: ")[1]) for l in lines if l.startswith('Selected:')]
reason = parseReason("\n".join(lines))
return {"input": inputs, "derivations": derivations, "answers": answers, "executions": executions, "reason": reason, "selections": selections, "raw": "\n".join(lines), "requestOutputArgs" : requestOutputArgs}
def GetStats(usedNAR):
Stats = {}
lines, _ = GetRawOutput(usedNAR)
for l in lines:
if ":" in l:
leftside = l.split(":")[0].replace(" ", "_").strip()
rightside = float(l.split(":")[1].strip())
Stats[leftside] = rightside
return Stats
def AddInput(narsese, Print=True, usedNAR=NARproc):
usedNAR.stdin.write(narsese + '\n')
usedNAR.stdin.flush()
ReturnStats = narsese == "*stats"
if ReturnStats:
if Print:
print("\n".join(GetRawOutput(usedNAR)[0]))
return GetStats(usedNAR)
ret = GetOutput(usedNAR)
if Print:
print(ret["raw"])
sys.stdout.flush()
return ret
def Exit(usedNAR=NARproc):
usedNAR.sendline("quit")
def METHOD_NAME(usedNAR=NARproc):
AddInput("*reset", usedNAR=usedNAR)
AddInput("*volume=100")
def PrintedTask(task):
st = task["term"] + task["punctuation"]
st += (" :|: occurrenceTime="+task["occurrenceTime"] if task["occurrenceTime"].isdigit() else "")
if "Priority" in task: st += " Priority=" + str(task["Priority"])
if "truth" in task: st += " Truth: frequency="+task["truth"]["frequency"] + " confidence="+task["truth"]["confidence"]
return st
def Shell():
while True:
try:
inp = input().rstrip("\n")
except:
exit(0)
AddInput(inp)
if __name__ == "__main__":
Shell()
| null |
1,252 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkess.endpoint import endpoint_data
class CreateScheduledTaskRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ess', '2014-08-28', 'CreateScheduledTask','ess')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ScheduledAction(self): # String
return self.get_query_params().get('ScheduledAction')
def set_ScheduledAction(self, ScheduledAction): # String
self.add_query_param('ScheduledAction', ScheduledAction)
def get_MaxValue(self): # Integer
return self.get_query_params().get('MaxValue')
def set_MaxValue(self, MaxValue): # Integer
self.add_query_param('MaxValue', MaxValue)
def get_ScalingGroupId(self): # String
return self.get_query_params().get('ScalingGroupId')
def set_ScalingGroupId(self, ScalingGroupId): # String
self.add_query_param('ScalingGroupId', ScalingGroupId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_RecurrenceEndTime(self): # String
return self.get_query_params().get('RecurrenceEndTime')
def set_RecurrenceEndTime(self, RecurrenceEndTime): # String
self.add_query_param('RecurrenceEndTime', RecurrenceEndTime)
def get_LaunchTime(self): # String
return self.get_query_params().get('LaunchTime')
def set_LaunchTime(self, LaunchTime): # String
self.add_query_param('LaunchTime', LaunchTime)
def get_DesiredCapacity(self): # Integer
return self.get_query_params().get('DesiredCapacity')
def set_DesiredCapacity(self, DesiredCapacity): # Integer
self.add_query_param('DesiredCapacity', DesiredCapacity)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_RecurrenceValue(self): # String
return self.get_query_params().get('RecurrenceValue')
def set_RecurrenceValue(self, RecurrenceValue): # String
self.add_query_param('RecurrenceValue', RecurrenceValue)
def get_LaunchExpirationTime(self): # Integer
return self.get_query_params().get('LaunchExpirationTime')
def set_LaunchExpirationTime(self, LaunchExpirationTime): # Integer
self.add_query_param('LaunchExpirationTime', LaunchExpirationTime)
def get_MinValue(self): # Integer
return self.get_query_params().get('MinValue')
def METHOD_NAME(self, MinValue): # Integer
self.add_query_param('MinValue', MinValue)
def get_ScheduledTaskName(self): # String
return self.get_query_params().get('ScheduledTaskName')
def set_ScheduledTaskName(self, ScheduledTaskName): # String
self.add_query_param('ScheduledTaskName', ScheduledTaskName)
def get_TaskEnabled(self): # Boolean
return self.get_query_params().get('TaskEnabled')
def set_TaskEnabled(self, TaskEnabled): # Boolean
self.add_query_param('TaskEnabled', TaskEnabled)
def get_RecurrenceType(self): # String
return self.get_query_params().get('RecurrenceType')
def set_RecurrenceType(self, RecurrenceType): # String
self.add_query_param('RecurrenceType', RecurrenceType)
| null |
1,253 |
# SPDX-FileCopyrightText: 2021 Melissa LeBlanc-Williams for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""I2C Classes for RP2040s with u2if firmware"""
from .rp2040_u2if import rp2040_u2if
class I2C:
"""I2C Base Class for RP2040 u2if"""
def __init__(self, index, *, frequency=100000):
self._index = index
rp2040_u2if.i2c_set_port(self._index)
rp2040_u2if.i2c_configure(frequency)
def scan(self):
"""Perform an I2C Device Scan"""
rp2040_u2if.i2c_set_port(self._index)
return rp2040_u2if.i2c_scan()
# pylint: disable=unused-argument
def METHOD_NAME(self, address, buffer, *, start=0, end=None, stop=True):
"""Write data from the buffer to an address"""
rp2040_u2if.i2c_set_port(self._index)
rp2040_u2if.i2c_writeto(address, buffer, start=start, end=end)
def readfrom_into(self, address, buffer, *, start=0, end=None, stop=True):
"""Read data from an address and into the buffer"""
rp2040_u2if.i2c_set_port(self._index)
rp2040_u2if.i2c_readfrom_into(address, buffer, start=start, end=end)
def writeto_then_readfrom(
self,
address,
buffer_out,
buffer_in,
*,
out_start=0,
out_end=None,
in_start=0,
in_end=None,
stop=False,
):
"""Write data from buffer_out to an address and then
read data from an address and into buffer_in
"""
rp2040_u2if.i2c_set_port(self._index)
rp2040_u2if.i2c_writeto_then_readfrom(
address,
buffer_out,
buffer_in,
out_start=out_start,
out_end=out_end,
in_start=in_start,
in_end=in_end,
)
# pylint: enable=unused-argument
class I2C_Pico(I2C):
"""I2C Class for Pico u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 5 and sda.id == 4:
index = 0
if scl.id == 15 and sda.id == 14:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_Feather(I2C):
"""I2C Class for Feather u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 3 and sda.id == 2:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_Feather_CAN(I2C):
"""I2C Class for Feather EPD u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 3 and sda.id == 2:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_Feather_EPD(I2C):
"""I2C Class for Feather EPD u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 3 and sda.id == 2:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_Feather_RFM(I2C):
"""I2C Class for Feather EPD u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 3 and sda.id == 2:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_QTPY(I2C):
"""I2C Class for QT Py 2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 25 and sda.id == 24:
index = 0
if scl.id == 23 and sda.id == 22:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_ItsyBitsy(I2C):
"""I2C Class for ItsyBitsy u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 3 and sda.id == 2:
index = 1
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_MacroPad(I2C):
"""I2C Class for MacroPad u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 21 and sda.id == 20:
index = 0
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_QT2040_Trinkey(I2C):
"""I2C Class for QT2040 Trinkey u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 17 and sda.id == 16:
index = 0
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
class I2C_KB2040(I2C):
"""I2C Class for KB2040 u2if"""
def __init__(self, scl, sda, *, frequency=100000):
index = None
if scl.id == 13 and sda.id == 12:
index = 0
if index is None:
raise ValueError("I2C not found on specified pins.")
self._index = index
super().__init__(index, frequency=frequency)
| null |
1,254 |
import argparse
import re
import log_get_last_function
RE_equate_lifetimes = re.compile(r".*equate_lifetimes: ('[^ ]+) := ('[^$]+)")
assert RE_equate_lifetimes.match("Expand HIR Lifetimes- `anonymous-namespace'::ExprVisitor_Enumerate::equate_lifetimes: 'static := '#ivar14"), RE_equate_lifetimes.pattern
# Expand HIR Lifetimes- HIR_Expand_LifetimeInfer_ExprInner: >> (=== Iter 0 ===)
# Expand HIR Lifetimes- `anonymous-namespace'::LifetimeInferState::dump: '#ivar0 -- to=['#ivar69, '#ivar83], from=['#ivar13]
# Expand HIR Lifetimes- HIR_Expand_LifetimeInfer_ExprInner: << (=== Iter 0 ===)
# Expand HIR Lifetimes- HIR_Expand_LifetimeInfer_ExprInner: >> (COMPACT)
# Expand HIR Lifetimes- `anonymous-namespace'::LifetimeInferState::dump: '#ivar0 = 'static to=['static, 'static]
# Expand HIR Lifetimes- HIR_Expand_LifetimeInfer_ExprInner: << (COMPACT)
def main():
argp = argparse.ArgumentParser()
argp.add_argument("logfile", help="Single-function log file")
argp.add_argument("--source", choices=['raw', 'initial', 'final'], default='raw')
argp.add_argument("--only-ivar")
args = argp.parse_args()
class State(object):
def __init__(self):
self.links_raw = []
self.links_initial = set() # from the initial ivar dump
self.links_final = {}
self.in_iter_0 = False
self.in_compact = False
fcn_name = ""
state = State()
for line in open(args.logfile):
line = line.strip()
if log_get_last_function.is_function_header(line):
fcn_name = line
state = State()
continue
if ' >> (=== Iter 0 ===)' in line:
state.in_iter_0 = True
continue
if ' << (=== Iter 0 ===)' in line:
state.in_iter_0 = False
continue
if ' >> (COMPACT)' in line:
state.in_compact = True
continue
if ' << (COMPACT)' in line:
state.in_compact = False
continue
m = RE_equate_lifetimes.match(line)
if m is not None:
state.links_raw.append( (m[2], m[1], "") )
continue
if state.in_iter_0 and 'dump: ' in line and ' -- to=' in line:
_,line = line.split('dump: ')
if ' = ' in line:
continue
varname,extra = line.split(' -- ')
extra = extra[4:-1]
to,fr = extra.split('], from=[')
if to != "":
for v in to.split(', '):
state.links_initial.add((varname, v, ""))
if fr != "":
for v in fr.split(', '):
state.links_initial.add((v, varname, ""))
continue
if state.in_compact and "dump: '" in line and " = '" in line:
_,line = line.split('dump: ')
if not ' = ' in line:
continue
varname,extra = line.split(' = ')
fr,to = extra.split(' to=[')
to = to[:-1]
if to != "":
for v in to.split(', '):
state.links_final.setdefault((fr, v, ), set()).add(varname)
continue
state.links_final = set( (a,b,make_list(vals)) for (a,b),vals in state.links_final.items() )
links = {
'raw': state.links_raw,
'initial': state.links_initial,
'final': state.links_final,
}[args.source]
if args.only_ivar is not None:
links = METHOD_NAME(links, "'#ivar"+args.only_ivar)
if True:
print()
print("digraph borrowck {")
for a,b,label in links:
print("\"{a}\" -> \"{b}\" {{ label = \"{label}\" }};".format(a=a,b=b, label=label))
print("}")
if True:
import networkx as nx
import matplotlib.pyplot as plt
g = nx.DiGraph()
for a,b,label in links:
if a.startswith("'#ivar"):
a = a[6:]
else:
a = a + '-T'
if b.startswith("'#ivar"):
b = b[6:]
else:
b = b + '-B'
g.add_edge(a, b)
pos = None
#pos = nx.planar_layout(g)
pos = nx.kamada_kawai_layout(g) # Gives a decent layout without needing graphviz/dot
#pos = nx.nx_agraph.graphviz_layout(g)
#pos = nx.nx_pydot.graphviz_layout(g)
nx.draw(g, with_labels=True, arrows=True, pos=pos)
plt.show()
elif True:
import graphviz
g = graphviz.Digraph('borrowck')
for a,b,label in links:
g.edge(a, b, label=label)
g.view()
else:
print()
print("digraph borrowck {")
for a,b in links:
print("\"{a}\" -> \"{b}\";".format(a=a,b=b))
print("}")
def make_list(vals):
vals = list(vals)
rv = ""
for i in range(0, len(vals), 5):
rv += ", ".join(vals[i:][:5])
rv += "\n"
return rv
def METHOD_NAME(links, name):
assert name.startswith("'#ivar")
return get_tree(links, name, 0) + get_tree(links, name, 1)
def get_tree(links, root_name, dir=0):
rv = []
stack = [root_name]
visited = set()
while len(stack) > 0:
n = stack[-1]; del stack[-1]
if n in visited:
continue
visited.add(n)
if not n.startswith("'#ivar"):
continue
for l in links:
if l[dir] == n:
rv.append(l)
stack.append( l[1-dir] )
return rv
if __name__ == "__main__":
main()
| null |
1,255 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeAvailableResourceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeAvailableResource','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Memory(self): # Float
return self.get_query_params().get('Memory')
def METHOD_NAME(self, Memory): # Float
self.add_query_param('Memory', Memory)
def get_IoOptimized(self): # String
return self.get_query_params().get('IoOptimized')
def set_IoOptimized(self, IoOptimized): # String
self.add_query_param('IoOptimized', IoOptimized)
def get_DataDiskCategory(self): # String
return self.get_query_params().get('DataDiskCategory')
def set_DataDiskCategory(self, DataDiskCategory): # String
self.add_query_param('DataDiskCategory', DataDiskCategory)
def get_Cores(self): # Integer
return self.get_query_params().get('Cores')
def set_Cores(self, Cores): # Integer
self.add_query_param('Cores', Cores)
def get_SystemDiskCategory(self): # String
return self.get_query_params().get('SystemDiskCategory')
def set_SystemDiskCategory(self, SystemDiskCategory): # String
self.add_query_param('SystemDiskCategory', SystemDiskCategory)
def get_Scope(self): # String
return self.get_query_params().get('Scope')
def set_Scope(self, Scope): # String
self.add_query_param('Scope', Scope)
def get_InstanceType(self): # String
return self.get_query_params().get('InstanceType')
def set_InstanceType(self, InstanceType): # String
self.add_query_param('InstanceType', InstanceType)
def get_NetworkCategory(self): # String
return self.get_query_params().get('NetworkCategory')
def set_NetworkCategory(self, NetworkCategory): # String
self.add_query_param('NetworkCategory', NetworkCategory)
def get_InstanceChargeType(self): # String
return self.get_query_params().get('InstanceChargeType')
def set_InstanceChargeType(self, InstanceChargeType): # String
self.add_query_param('InstanceChargeType', InstanceChargeType)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_DedicatedHostId(self): # String
return self.get_query_params().get('DedicatedHostId')
def set_DedicatedHostId(self, DedicatedHostId): # String
self.add_query_param('DedicatedHostId', DedicatedHostId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_SpotDuration(self): # Integer
return self.get_query_params().get('SpotDuration')
def set_SpotDuration(self, SpotDuration): # Integer
self.add_query_param('SpotDuration', SpotDuration)
def get_ResourceType(self): # String
return self.get_query_params().get('ResourceType')
def set_ResourceType(self, ResourceType): # String
self.add_query_param('ResourceType', ResourceType)
def get_SpotStrategy(self): # String
return self.get_query_params().get('SpotStrategy')
def set_SpotStrategy(self, SpotStrategy): # String
self.add_query_param('SpotStrategy', SpotStrategy)
def get_DestinationResource(self): # String
return self.get_query_params().get('DestinationResource')
def set_DestinationResource(self, DestinationResource): # String
self.add_query_param('DestinationResource', DestinationResource)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
| null |
1,256 |
# -*- coding: utf-8 -*-
import json
from . import check_input_attribute, standard_error_message
from assemblyline_client import Client, ClientError
from collections import defaultdict
from pymisp import MISPAttribute, MISPEvent, MISPObject
misperrors = {'error': 'Error'}
mispattributes = {'input': ['link'], 'format': 'misp_standard'}
moduleinfo = {'version': '1', 'author': 'Christian Studer',
'description': 'Query AssemblyLine with a report URL to get the parsed data.',
'module-type': ['expansion']}
moduleconfig = ["apiurl", "user_id", "apikey", "password", "verifyssl"]
class AssemblyLineParser():
def __init__(self):
self.misp_event = MISPEvent()
self.results = {}
self.attribute = {'to_ids': True}
self._results_mapping = {'NET_DOMAIN_NAME': 'domain', 'NET_FULL_URI': 'url',
'NET_IP': 'ip-dst'}
self._file_mapping = {'entropy': {'type': 'float', 'object_relation': 'entropy'},
'md5': {'type': 'md5', 'object_relation': 'md5'},
'mime': {'type': 'mime-type', 'object_relation': 'mimetype'},
'sha1': {'type': 'sha1', 'object_relation': 'sha1'},
'sha256': {'type': 'sha256', 'object_relation': 'sha256'},
'size': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
'ssdeep': {'type': 'ssdeep', 'object_relation': 'ssdeep'}}
def get_submission(self, attribute, client):
sid = attribute['value'].split('=')[-1]
try:
if not client.submission.is_completed(sid):
self.results['error'] = 'Submission not completed, please try again later.'
return
except Exception as e:
self.results['error'] = f'Something went wrong while trying to check if the submission in AssemblyLine is completed: {e.__str__()}'
return
try:
submission = client.submission.full(sid)
except Exception as e:
self.results['error'] = f"Something went wrong while getting the submission from AssemblyLine: {e.__str__()}"
return
self._parse_report(submission)
def METHOD_NAME(self):
if 'error' in self.results:
return self.results
event = json.loads(self.misp_event.to_json())
results = {key: event[key] for key in ('Attribute', 'Object', 'Tag') if (key in event and event[key])}
return {'results': results}
def _create_attribute(self, result, attribute_type):
attribute = MISPAttribute()
attribute.from_dict(type=attribute_type, value=result['value'], **self.attribute)
if result['classification'] != 'UNCLASSIFIED':
attribute.add_tag(result['classification'].lower())
self.misp_event.add_attribute(**attribute)
return {'referenced_uuid': attribute.uuid, 'relationship_type': '-'.join(result['context'].lower().split(' '))}
def _create_file_object(self, file_info):
file_object = MISPObject('file')
filename_attribute = {'type': 'filename'}
filename_attribute.update(self.attribute)
if file_info['classification'] != "UNCLASSIFIED":
tag = {'Tag': [{'name': file_info['classification'].lower()}]}
filename_attribute.update(tag)
for feature, attribute in self._file_mapping.items():
attribute.update(tag)
file_object.add_attribute(value=file_info[feature], **attribute)
return filename_attribute, file_object
for feature, attribute in self._file_mapping.items():
file_object.add_attribute(value=file_info[feature], **attribute)
return filename_attribute, file_object
@staticmethod
def _get_results(submission_results):
results = defaultdict(list)
for k, values in submission_results.items():
h = k.split('.')[0]
for t in values['result']['tags']:
if t['context'] is not None:
results[h].append(t)
return results
def _get_scores(self, file_tree):
scores = {}
for h, f in file_tree.items():
score = f['score']
if score > 0:
scores[h] = {'name': f['name'], 'score': score}
if f['children']:
scores.update(self._get_scores(f['children']))
return scores
def _parse_report(self, submission):
if submission['classification'] != 'UNCLASSIFIED':
self.misp_event.add_tag(submission['classification'].lower())
filtered_results = self._get_results(submission['results'])
scores = self._get_scores(submission['file_tree'])
for h, results in filtered_results.items():
if h in scores:
attribute, file_object = self._create_file_object(submission['file_infos'][h])
print(file_object)
for filename in scores[h]['name']:
file_object.add_attribute('filename', value=filename, **attribute)
for reference in self._parse_results(results):
file_object.add_reference(**reference)
self.misp_event.add_object(**file_object)
def _parse_results(self, results):
references = []
for result in results:
try:
attribute_type = self._results_mapping[result['type']]
except KeyError:
continue
references.append(self._create_attribute(result, attribute_type))
return references
def parse_config(apiurl, user_id, config):
error = {"error": "Please provide your AssemblyLine API key or Password."}
if config.get('apikey'):
try:
return Client(apiurl, apikey=(user_id, config['apikey']), verify=config['verifyssl'])
except ClientError as e:
error['error'] = f'Error while initiating a connection with AssemblyLine: {e.__str__()}'
if config.get('password'):
try:
return Client(apiurl, auth=(user_id, config['password']))
except ClientError as e:
error['error'] = f'Error while initiating a connection with AssemblyLine: {e.__str__()}'
return error
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
if request['attribute']['type'] not in mispattributes['input']:
return {'error': 'Unsupported attribute type.'}
if not request.get('config'):
return {"error": "Missing configuration."}
if not request['config'].get('apiurl'):
return {"error": "No AssemblyLine server address provided."}
apiurl = request['config']['apiurl']
if not request['config'].get('user_id'):
return {"error": "Please provide your AssemblyLine User ID."}
user_id = request['config']['user_id']
client = parse_config(apiurl, user_id, request['config'])
if isinstance(client, dict):
return client
assemblyline_parser = AssemblyLineParser()
assemblyline_parser.get_submission(request['attribute'], client)
return assemblyline_parser.METHOD_NAME()
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
| null |
1,257 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdomain.endpoint import endpoint_data
class QueryDomainListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Domain', '2018-01-29', 'QueryDomainList')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProductDomainType(self): # String
return self.get_query_params().get('ProductDomainType')
def set_ProductDomainType(self, ProductDomainType): # String
self.add_query_param('ProductDomainType', ProductDomainType)
def get_OrderKeyType(self): # String
return self.get_query_params().get('OrderKeyType')
def set_OrderKeyType(self, OrderKeyType): # String
self.add_query_param('OrderKeyType', OrderKeyType)
def get_PageNum(self): # Integer
return self.get_query_params().get('PageNum')
def METHOD_NAME(self, PageNum): # Integer
self.add_query_param('PageNum', PageNum)
def get_OrderByType(self): # String
return self.get_query_params().get('OrderByType')
def set_OrderByType(self, OrderByType): # String
self.add_query_param('OrderByType', OrderByType)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Lang(self): # String
return self.get_query_params().get('Lang')
def set_Lang(self, Lang): # String
self.add_query_param('Lang', Lang)
def get_QueryType(self): # String
return self.get_query_params().get('QueryType')
def set_QueryType(self, QueryType): # String
self.add_query_param('QueryType', QueryType)
def get_EndExpirationDate(self): # Long
return self.get_query_params().get('EndExpirationDate')
def set_EndExpirationDate(self, EndExpirationDate): # Long
self.add_query_param('EndExpirationDate', EndExpirationDate)
def get_DomainName(self): # String
return self.get_query_params().get('DomainName')
def set_DomainName(self, DomainName): # String
self.add_query_param('DomainName', DomainName)
def get_StartExpirationDate(self): # Long
return self.get_query_params().get('StartExpirationDate')
def set_StartExpirationDate(self, StartExpirationDate): # Long
self.add_query_param('StartExpirationDate', StartExpirationDate)
def get_DomainGroupId(self): # String
return self.get_query_params().get('DomainGroupId')
def set_DomainGroupId(self, DomainGroupId): # String
self.add_query_param('DomainGroupId', DomainGroupId)
def get_EndRegistrationDate(self): # Long
return self.get_query_params().get('EndRegistrationDate')
def set_EndRegistrationDate(self, EndRegistrationDate): # Long
self.add_query_param('EndRegistrationDate', EndRegistrationDate)
def get_UserClientIp(self): # String
return self.get_query_params().get('UserClientIp')
def set_UserClientIp(self, UserClientIp): # String
self.add_query_param('UserClientIp', UserClientIp)
def get_StartRegistrationDate(self): # Long
return self.get_query_params().get('StartRegistrationDate')
def set_StartRegistrationDate(self, StartRegistrationDate): # Long
self.add_query_param('StartRegistrationDate', StartRegistrationDate)
| null |
1,258 |
from typing import (
Any,
Dict,
)
from eth_typing import (
URI,
)
from web3._utils.request import (
async_get_response_from_get_request,
async_json_make_get_request,
)
from web3.beacon.api_endpoints import (
GET_ATTESTATIONS,
GET_ATTESTER_SLASHINGS,
GET_BEACON_HEADS,
GET_BEACON_STATE,
GET_BLOCK,
GET_BLOCK_ATTESTATIONS,
GET_BLOCK_HEADER,
GET_BLOCK_HEADERS,
GET_BLOCK_ROOT,
GET_DEPOSIT_CONTRACT,
GET_EPOCH_COMMITTEES,
GET_FINALITY_CHECKPOINT,
GET_FORK_DATA,
GET_FORK_SCHEDULE,
GET_GENESIS,
GET_HASH_ROOT,
GET_HEALTH,
GET_NODE_IDENTITY,
GET_PEER,
GET_PEERS,
GET_PROPOSER_SLASHINGS,
GET_SPEC,
GET_SYNCING,
GET_VALIDATOR,
GET_VALIDATOR_BALANCES,
GET_VALIDATORS,
GET_VERSION,
GET_VOLUNTARY_EXITS,
)
class AsyncBeacon:
is_async = True
def __init__(
self,
base_url: str,
) -> None:
self.base_url = base_url
async def _async_make_get_request(self, endpoint_uri: str) -> Dict[str, Any]:
uri = URI(self.base_url + endpoint_uri)
return await async_json_make_get_request(uri)
# [ BEACON endpoints ]
async def get_genesis(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_GENESIS)
async def get_hash_root(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(GET_HASH_ROOT.format(state_id))
async def get_fork_data(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(GET_FORK_DATA.format(state_id))
async def get_finality_checkpoint(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(
GET_FINALITY_CHECKPOINT.format(state_id)
)
async def get_validators(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(GET_VALIDATORS.format(state_id))
async def get_validator(
self, validator_id: str, state_id: str = "head"
) -> Dict[str, Any]:
return await self._async_make_get_request(
GET_VALIDATOR.format(state_id, validator_id)
)
async def get_validator_balances(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(
GET_VALIDATOR_BALANCES.format(state_id)
)
async def METHOD_NAME(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(GET_EPOCH_COMMITTEES.format(state_id))
async def get_block_headers(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_BLOCK_HEADERS)
async def get_block_header(self, block_id: str) -> Dict[str, Any]:
return await self._async_make_get_request(GET_BLOCK_HEADER.format(block_id))
async def get_block(self, block_id: str) -> Dict[str, Any]:
return await self._async_make_get_request(GET_BLOCK.format(block_id))
async def get_block_root(self, block_id: str) -> Dict[str, Any]:
return await self._async_make_get_request(GET_BLOCK_ROOT.format(block_id))
async def get_block_attestations(self, block_id: str) -> Dict[str, Any]:
return await self._async_make_get_request(
GET_BLOCK_ATTESTATIONS.format(block_id)
)
async def get_attestations(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_ATTESTATIONS)
async def get_attester_slashings(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_ATTESTER_SLASHINGS)
async def get_proposer_slashings(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_PROPOSER_SLASHINGS)
async def get_voluntary_exits(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_VOLUNTARY_EXITS)
# [ CONFIG endpoints ]
async def get_fork_schedule(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_FORK_SCHEDULE)
async def get_spec(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_SPEC)
async def get_deposit_contract(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_DEPOSIT_CONTRACT)
# [ DEBUG endpoints ]
async def get_beacon_state(self, state_id: str = "head") -> Dict[str, Any]:
return await self._async_make_get_request(GET_BEACON_STATE.format(state_id))
async def get_beacon_heads(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_BEACON_HEADS)
# [ NODE endpoints ]
async def get_node_identity(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_NODE_IDENTITY)
async def get_peers(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_PEERS)
async def get_peer(self, peer_id: str) -> Dict[str, Any]:
return await self._async_make_get_request(GET_PEER.format(peer_id))
async def get_health(self) -> int:
url = URI(self.base_url + GET_HEALTH)
response = await async_get_response_from_get_request(url)
return response.status
async def get_version(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_VERSION)
async def get_syncing(self) -> Dict[str, Any]:
return await self._async_make_get_request(GET_SYNCING)
| null |
1,259 |
from typing import Callable, Sequence, Tuple, Union
import numpy as np
from ..C import FVAL, GRAD, HESS, MODE_FUN, MODE_RES, RES, SRES, ModeType
from .base import ObjectiveBase, ResultDict
class Objective(ObjectiveBase):
"""
Objective class.
The objective class allows the user explicitly specify functions that
compute the function value and/or residuals as well as respective
derivatives.
Denote dimensions `n` = parameters, `m` = residuals.
Parameters
----------
fun:
The objective function to be minimized. If it only computes the
objective function value, it should be of the form
``fun(x) -> float``
where x is an 1-D array with shape (n,), and n is the parameter space
dimension.
grad:
Method for computing the gradient vector. If it is a callable,
it should be of the form
``grad(x) -> array_like, shape (n,).``
If its value is True, then fun should return the gradient as a second
output.
hess:
Method for computing the Hessian matrix. If it is a callable,
it should be of the form
``hess(x) -> array, shape (n, n).``
If its value is True, then fun should return the gradient as a
second, and the Hessian as a third output, and grad should be True as
well.
hessp:
Method for computing the Hessian vector product, i.e.
``hessp(x, v) -> array_like, shape (n,)``
computes the product H*v of the Hessian of fun at x with v.
res:
Method for computing residuals, i.e.
``res(x) -> array_like, shape(m,).``
sres:
Method for computing residual sensitivities. If it is a callable,
it should be of the form
``sres(x) -> array, shape (m, n).``
If its value is True, then res should return the residual
sensitivities as a second output.
x_names:
Parameter names. None if no names provided, otherwise a list of str,
length dim_full (as in the Problem class). Can be read by the
problem.
"""
def __init__(
self,
fun: Callable = None,
grad: Union[Callable, bool] = None,
hess: Callable = None,
hessp: Callable = None,
res: Callable = None,
sres: Union[Callable, bool] = None,
x_names: Sequence[str] = None,
):
self.fun = fun
self.grad = grad
self.hess = hess
self.hessp = hessp
self.res = res
self.sres = sres
super().__init__(x_names)
@property
def has_fun(self) -> bool:
"""Check whether function is defined."""
return callable(self.fun)
@property
def has_grad(self) -> bool:
"""Check whether gradient is defined."""
return callable(self.grad) or self.grad is True
@property
def has_hess(self) -> bool:
"""Check whether Hessian is defined."""
return callable(self.hess) or self.hess is True
@property
def has_hessp(self) -> bool:
"""Check whether Hessian vector product is defined."""
# Not supported yet
return False
@property
def has_res(self) -> bool:
"""Check whether residuals are defined."""
return callable(self.res)
@property
def METHOD_NAME(self) -> bool:
"""Check whether residual sensitivities are defined."""
return callable(self.sres) or self.sres is True
def get_config(self) -> dict:
"""Return basic information of the objective configuration."""
info = super().get_config()
info['x_names'] = self.x_names
sensi_order = 0
while self.check_sensi_orders(
sensi_orders=(sensi_order,), mode=MODE_FUN
):
sensi_order += 1
info['sensi_order'] = sensi_order - 1
return info
def call_unprocessed(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
mode: ModeType,
**kwargs,
) -> ResultDict:
"""
Call objective function without pre- or post-processing and formatting.
Returns
-------
result:
A dict containing the results.
"""
if mode == MODE_FUN:
result = self._call_mode_fun(x=x, sensi_orders=sensi_orders)
elif mode == MODE_RES:
result = self._call_mode_res(x=x, sensi_orders=sensi_orders)
else:
raise ValueError("This mode is not supported.")
return result
def _call_mode_fun(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
) -> ResultDict:
if not sensi_orders:
result = {}
elif sensi_orders == (0,):
if self.grad is True:
fval = self.fun(x)[0]
else:
fval = self.fun(x)
result = {FVAL: fval}
elif sensi_orders == (1,):
if self.grad is True:
grad = self.fun(x)[1]
else:
grad = self.grad(x)
result = {GRAD: grad}
elif sensi_orders == (2,):
if self.hess is True:
hess = self.fun(x)[2]
else:
hess = self.hess(x)
result = {HESS: hess}
elif sensi_orders == (0, 1):
if self.grad is True:
fval, grad = self.fun(x)[:2]
else:
fval = self.fun(x)
grad = self.grad(x)
result = {FVAL: fval, GRAD: grad}
elif sensi_orders == (0, 2):
if self.hess is True:
fval, _, hess = self.fun(x)[:3]
else:
if self.grad is True:
fval = self.fun(x)[0]
else:
fval = self.fun(x)
hess = self.hess(x)
result = {FVAL: fval, HESS: hess}
elif sensi_orders == (1, 2):
if self.hess is True:
grad, hess = self.fun(x)[1:3]
else:
hess = self.hess(x)
if self.grad is True:
grad = self.fun(x)[1]
else:
grad = self.grad(x)
result = {GRAD: grad, HESS: hess}
elif sensi_orders == (0, 1, 2):
if self.hess is True:
fval, grad, hess = self.fun(x)[0:3]
else:
hess = self.hess(x)
if self.grad is True:
fval, grad = self.fun(x)[0:2]
else:
fval = self.fun(x)
grad = self.grad(x)
result = {FVAL: fval, GRAD: grad, HESS: hess}
else:
raise ValueError("These sensitivity orders are not supported.")
return result
def _call_mode_res(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
) -> ResultDict:
if not sensi_orders:
result = {}
elif sensi_orders == (0,):
if self.sres is True:
res = self.res(x)[0]
else:
res = self.res(x)
result = {RES: res}
elif sensi_orders == (1,):
if self.sres is True:
sres = self.res(x)[1]
else:
sres = self.sres(x)
result = {SRES: sres}
elif sensi_orders == (0, 1):
if self.sres is True:
res, sres = self.res(x)
else:
res = self.res(x)
sres = self.sres(x)
result = {RES: res, SRES: sres}
else:
raise ValueError("These sensitivity orders are not supported.")
return result
| null |
1,260 |
"""
The Winograd Schema Challenge
http://commonsensereasoning.org/2011/papers/Levesque.pdf
A Winograd schema is a pair of sentences that differ in only one or two words
and that contain an ambiguity that is resolved in opposite ways in the two
sentences and requires the use of world knowledge and reasoning for its resolution.
The Winograd Schema Challenge 273 is a collection of 273 such Winograd schemas.
NOTE: This evaluation of Winograd Schema Challenge is based on `partial evaluation`
as described by Trinh & Le in Simple Method for Commonsense Reasoning (2018).
See: https://arxiv.org/abs/1806.0
Homepage: https://cs.nyu.edu/~davise/papers/WinogradSchemas/WS.html
"""
import numpy as np
from lm_eval.base import rf, Task
from lm_eval.metrics import mean
_CITATION = """
@inproceedings{ea01b9c0db064caca6986b925d75f2bb,
title = "The winograd schema challenge",
abstract = "In this paper, we present an alternative to the Turing Test that has some conceptual and practical advantages. A Wino-grad schema is a pair of sentences that differ only in one or two words and that contain a referential ambiguity that is resolved in opposite directions in the two sentences. We have compiled a collection of Winograd schemas, designed so that the correct answer is obvious to the human reader, but cannot easily be found using selectional restrictions or statistical techniques over text corpora. A contestant in the Winograd Schema Challenge is presented with a collection of one sentence from each pair, and required to achieve human-level accuracy in choosing the correct disambiguation.",
author = "Levesque, {Hector J.} and Ernest Davis and Leora Morgenstern",
year = "2012",
language = "English (US)",
isbn = "9781577355601",
series = "Proceedings of the International Conference on Knowledge Representation and Reasoning",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "552--561",
booktitle = "13th International Conference on the Principles of Knowledge Representation and Reasoning, KR 2012",
note = "13th International Conference on the Principles of Knowledge Representation and Reasoning, KR 2012 ; Conference date: 10-06-2012 Through 14-06-2012",
}
"""
class WinogradSchemaChallenge273(Task):
VERSION = 0
DATASET_PATH = "winograd_wsc"
DATASET_NAME = "wsc273"
upper_pronouns = [
"A",
"An",
"The",
"She",
"He",
"It",
"They",
"My",
"His",
"Her",
"Their",
]
def has_training_docs(self):
return False
def has_validation_docs(self):
return False
def has_test_docs(self):
return True
def test_docs(self):
return map(self._process_doc, self.dataset["test"])
def _process_doc(self, doc):
# The HF implementation of `wsc273` is not `partial evaluation` friendly.
doc["text"] = doc["text"].replace(" ", " ")
doc["options"][0] = self.__normalize_option(doc, doc["options"][0])
doc["options"][1] = self.__normalize_option(doc, doc["options"][1])
return doc
def __normalize_option(self, doc, option):
# Append `'s` to possessive determiner based options.
if doc["pronoun"].lower() in ["my", "his", "her", "our", "their"]:
option += "'s"
# Appropriately lowercase the pronoun in the option.
pronoun = option.split()[0]
start_of_sentence = doc["text"][doc["pronoun_loc"] - 2] == "."
if not start_of_sentence and pronoun in self.upper_pronouns:
return option.replace(pronoun, pronoun.lower())
return option
def fewshot_examples(self, k, rnd):
# NOTE: `super().fewshot_examples` samples from training docs which are
# not available for this test-set-only dataset.
if self._fewshot_docs is None:
self._fewshot_docs = list(self.test_docs())
return rnd.sample(list(self._fewshot_docs), k)
def doc_to_text(self, doc):
return self.partial_context(doc, doc["options"][doc["label"]])
def should_decontaminate(self):
return True
def doc_to_decontamination_query(self, doc):
return doc["text"]
@classmethod
def partial_context(cls, doc, option):
# Substitute the pronoun in the original text with the specified
# option and ignore everything after.
return doc["text"][: doc["pronoun_loc"]] + option
def doc_to_target(self, doc):
return self.partial_target(doc)
@classmethod
def partial_target(cls, doc):
# The target is everything after the document specified pronoun.
start_index = doc["pronoun_loc"] + len(doc["pronoun"])
return " " + doc["text"][start_index:].strip()
def construct_requests(self, doc, ctx):
"""Uses RequestFactory to construct Requests and returns an iterable of
Requests which will be sent to the LM.
:param doc:
The document as returned from training_docs, validation_docs, or test_docs.
:param ctx: str
The context string, generated by fewshot_context. This includes the natural
language description, as well as the few shot examples, and the question
part of the document for `doc`.
"""
target = self.partial_target(doc)
lls = []
for option in doc["options"]:
partial_ctx = self.partial_context(doc, option)
full_ctx = self.METHOD_NAME(ctx, partial_ctx)
lls.append(rf.loglikelihood(full_ctx, target)[0])
return lls
@classmethod
def METHOD_NAME(cls, ctx, partial_ctx):
ctx = ctx.split("\n\n") # Each fewshot context is on its own new line.
ctx.pop() # Remove the correct context put in by `doc_to_text`.
return "\n\n".join([*ctx, partial_ctx]) if ctx else partial_ctx
def process_results(self, doc, results):
"""Take a single document and the LM results and evaluates, returning a
dict where keys are the names of submetrics and values are the values of
the metric for that one document
:param doc:
The document as returned from training_docs, validation_docs, or test_docs.
:param results:
The results of the requests created in construct_requests.
"""
return {"acc": np.argmax(results) == doc["label"]}
def aggregation(self):
"""
:returns: {str: [float] -> float}
A dictionary where keys are the names of submetrics and values are
functions that aggregate a list of metrics
"""
return {"acc": mean}
def higher_is_better(self):
"""
:returns: {str: bool}
A dictionary where keys are the names of submetrics and values are
whether a higher value of the submetric is better
"""
return {"acc": True}
| null |
1,261 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdateCdrsMonitorRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CDRS', '2020-11-01', 'UpdateCdrsMonitor')
self.set_method('POST')
def get_CorpId(self):
return self.get_body_params().get('CorpId')
def set_CorpId(self,CorpId):
self.add_body_params('CorpId', CorpId)
def get_Description(self):
return self.get_body_params().get('Description')
def set_Description(self,Description):
self.add_body_params('Description', Description)
def get_RuleName(self):
return self.get_body_params().get('RuleName')
def set_RuleName(self,RuleName):
self.add_body_params('RuleName', RuleName)
def get_PicOperateType(self):
return self.get_body_params().get('PicOperateType')
def set_PicOperateType(self,PicOperateType):
self.add_body_params('PicOperateType', PicOperateType)
def get_AttributeName(self):
return self.get_body_params().get('AttributeName')
def METHOD_NAME(self,AttributeName):
self.add_body_params('AttributeName', AttributeName)
def get_AttributeOperateType(self):
return self.get_body_params().get('AttributeOperateType')
def set_AttributeOperateType(self,AttributeOperateType):
self.add_body_params('AttributeOperateType', AttributeOperateType)
def get_RuleExpression(self):
return self.get_body_params().get('RuleExpression')
def set_RuleExpression(self,RuleExpression):
self.add_body_params('RuleExpression', RuleExpression)
def get_NotifierTimeOut(self):
return self.get_body_params().get('NotifierTimeOut')
def set_NotifierTimeOut(self,NotifierTimeOut):
self.add_body_params('NotifierTimeOut', NotifierTimeOut)
def get_TaskId(self):
return self.get_body_params().get('TaskId')
def set_TaskId(self,TaskId):
self.add_body_params('TaskId', TaskId)
def get_DeviceOperateType(self):
return self.get_body_params().get('DeviceOperateType')
def set_DeviceOperateType(self,DeviceOperateType):
self.add_body_params('DeviceOperateType', DeviceOperateType)
def get_PicList(self):
return self.get_body_params().get('PicList')
def set_PicList(self,PicList):
self.add_body_params('PicList', PicList)
def get_AttributeValueList(self):
return self.get_body_params().get('AttributeValueList')
def set_AttributeValueList(self,AttributeValueList):
self.add_body_params('AttributeValueList', AttributeValueList)
def get_NotifierAppSecret(self):
return self.get_body_params().get('NotifierAppSecret')
def set_NotifierAppSecret(self,NotifierAppSecret):
self.add_body_params('NotifierAppSecret', NotifierAppSecret)
def get_NotifierExtendValues(self):
return self.get_body_params().get('NotifierExtendValues')
def set_NotifierExtendValues(self,NotifierExtendValues):
self.add_body_params('NotifierExtendValues', NotifierExtendValues)
def get_DeviceList(self):
return self.get_body_params().get('DeviceList')
def set_DeviceList(self,DeviceList):
self.add_body_params('DeviceList', DeviceList)
def get_NotifierUrl(self):
return self.get_body_params().get('NotifierUrl')
def set_NotifierUrl(self,NotifierUrl):
self.add_body_params('NotifierUrl', NotifierUrl)
def get_NotifierType(self):
return self.get_body_params().get('NotifierType')
def set_NotifierType(self,NotifierType):
self.add_body_params('NotifierType', NotifierType)
def get_AlgorithmVendor(self):
return self.get_body_params().get('AlgorithmVendor')
def set_AlgorithmVendor(self,AlgorithmVendor):
self.add_body_params('AlgorithmVendor', AlgorithmVendor
| null |
1,262 |
"""
This type stub file was generated by pyright.
"""
import threading
from contextlib import contextmanager
"""Threading primitives and utilities."""
__all__ = (
"bgThread",
"Local",
"LocalStack",
"LocalManager",
"get_ident",
"default_socket_timeout",
)
USE_FAST_LOCALS = ...
@contextmanager
def default_socket_timeout(timeout): # -> Generator[None, None, None]:
"""Context temporarily setting the default socket timeout."""
...
class bgThread(threading.Thread):
"""Background service thread."""
def __init__(self, name=..., **kwargs) -> None: ...
def body(self): ...
def on_crash(self, msg, *fmt, **kwargs): ...
def run(self): ...
def stop(self): # -> None:
"""Graceful shutdown."""
...
def release_local(local): # -> None:
"""Release the contents of the local for the current context.
This makes it possible to use locals without a manager.
With this function one can release :class:`Local` objects as well as
:class:`StackLocal` objects. However it's not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
Example:
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
"""
...
class Local:
"""Local object."""
__slots__ = ...
def __init__(self) -> None: ...
def __iter__(self): ...
def __call__(self, proxy): # -> Proxy:
"""Create a proxy for a name."""
...
def __release_local__(self): ...
def __getattr__(self, name): ...
def __setattr__(self, name, value): ...
def __delattr__(self, name): ...
class _LocalStack:
"""Local stack.
This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it will return a proxy that
resolves to the topmost item on the stack.
"""
def __init__(self) -> None: ...
def __release_local__(self): ...
__ident_func__ = ...
def __call__(self): ...
def push(self, obj): # -> Any | list[Unknown]:
"""Push a new item to the stack."""
...
def pop(self): # -> Any | None:
"""Remove the topmost item from the stack.
Note:
Will return the old value or `None` if the stack was already empty.
"""
...
def __len__(self): ...
@property
def METHOD_NAME(self): ...
@property
def top(self): # -> None:
"""The topmost item on the stack.
Note:
If the stack is empty, :const:`None` is returned.
"""
...
class LocalManager:
"""Local objects cannot manage themselves.
For that you need a local manager.
You can pass a local manager multiple locals or add them
later by appending them to ``manager.locals``. Every time the manager
cleans up, it will clean up all the data left in the locals for this
context.
The ``ident_func`` parameter can be added to override the default ident
function for the wrapped locals.
"""
def __init__(self, locals=..., ident_func=...) -> None: ...
def get_ident(self): # -> int:
"""Return context identifier.
This is the identifier the local objects use internally
for this context. You cannot override this method to change the
behavior but use it to link other context local objects (such as
SQLAlchemy's scoped sessions) to the Werkzeug locals.
"""
...
def cleanup(self): # -> None:
"""Manually clean up the data in the locals for this context.
Call this at the end of the request or use ``make_middleware()``.
"""
...
def __repr__(self): ...
class _FastLocalStack(threading.local):
def __init__(self) -> None: ...
@property
def top(self): ...
def __len__(self): ...
if USE_FAST_LOCALS:
LocalStack = ...
else:
LocalStack = ...
| null |
1,263 |
import asyncio
from collections import deque
from typing import Optional
import pandas as pd
from bidict import bidict
from hummingbot.core.api_throttler.async_throttler import AsyncThrottler
from hummingbot.core.network_base import NetworkBase
from hummingbot.core.network_iterator import NetworkStatus
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory
from hummingbot.core.web_assistant.ws_assistant import WSAssistant
class CandlesBase(NetworkBase):
"""
This class serves as a base class for fetching and storing candle data from a cryptocurrency exchange.
The class uses the Rest and WS Assistants for all the IO operations, and a double-ended queue to store candles.
Also implements the Throttler module for API rate limiting, but it's not so necessary since the realtime data should
be updated via websockets mainly.
"""
interval_to_seconds = bidict({
"1s": 1,
"1m": 60,
"3m": 180,
"5m": 300,
"15m": 900,
"30m": 1800,
"1h": 3600,
"2h": 7200,
"4h": 14400,
"6h": 21600,
"8h": 28800,
"12h": 43200,
"1d": 86400,
"3d": 259200,
"1w": 604800,
"1M": 2592000
})
columns = ["timestamp", "open", "high", "low", "close", "volume", "quote_asset_volume",
"n_trades", "taker_buy_base_volume", "taker_buy_quote_volume"]
def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150):
super().__init__()
async_throttler = AsyncThrottler(rate_limits=self.rate_limits)
self._api_factory = WebAssistantsFactory(throttler=async_throttler)
self._candles = deque(maxlen=max_records)
self._listen_candles_task: Optional[asyncio.Task] = None
self._trading_pair = trading_pair
self._ex_trading_pair = self.get_exchange_trading_pair(trading_pair)
if interval in self.intervals.keys():
self.interval = interval
else:
self.logger().exception(
f"Interval {interval} is not supported. Available Intervals: {self.intervals.keys()}")
raise
async def start_network(self):
"""
This method starts the network and starts a task for listen_for_subscriptions.
"""
await self.stop_network()
self._listen_candles_task = safe_ensure_future(self.listen_for_subscriptions())
async def stop_network(self):
"""
This method stops the network by canceling the _listen_candles_task task.
"""
if self._listen_candles_task is not None:
self._listen_candles_task.cancel()
self._listen_candles_task = None
@property
def is_ready(self):
"""
This property returns a boolean indicating whether the _candles deque has reached its maximum length.
"""
return len(self._candles) == self._candles.maxlen
@property
def name(self):
raise NotImplementedError
@property
def rest_url(self):
raise NotImplementedError
@property
def health_check_url(self):
raise NotImplementedError
@property
def candles_url(self):
raise NotImplementedError
@property
def wss_url(self):
raise NotImplementedError
@property
def rate_limits(self):
raise NotImplementedError
@property
def intervals(self):
raise NotImplementedError
async def check_network(self) -> NetworkStatus:
raise NotImplementedError
@property
def candles_df(self) -> pd.DataFrame:
"""
This property returns the candles stored in the _candles deque as a Pandas DataFrame.
"""
return pd.DataFrame(self._candles, columns=self.columns, dtype=float)
def get_exchange_trading_pair(self, trading_pair):
raise NotImplementedError
async def fetch_candles(self,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = 500):
"""
This is an abstract method that must be implemented by a subclass to fetch candles from the exchange API.
:param start_time: start time to fetch candles
:param end_time: end time to fetch candles
:param limit: quantity of candles
:return: numpy array with the candlesticks
"""
raise NotImplementedError
async def fill_historical_candles(self):
"""
This is an abstract method that must be implemented by a subclass to fill the _candles deque with historical candles.
"""
raise NotImplementedError
async def listen_for_subscriptions(self):
"""
Connects to the candlestick websocket endpoint and listens to the messages sent by the
exchange.
"""
ws: Optional[WSAssistant] = None
while True:
try:
ws: WSAssistant = await self._connected_websocket_assistant()
await self._subscribe_channels(ws)
await self._process_websocket_messages(websocket_assistant=ws)
except asyncio.CancelledError:
raise
except ConnectionError as connection_exception:
self.logger().warning(f"The websocket connection was closed ({connection_exception})")
except Exception:
self.logger().exception(
"Unexpected error occurred when listening to public klines. Retrying in 1 seconds...",
)
await self.METHOD_NAME(1.0)
finally:
await self._on_order_stream_interruption(websocket_assistant=ws)
async def _connected_websocket_assistant(self) -> WSAssistant:
ws: WSAssistant = await self._api_factory.get_ws_assistant()
await ws.connect(ws_url=self.wss_url,
ping_timeout=30)
return ws
async def _subscribe_channels(self, ws: WSAssistant):
"""
Subscribes to the candles events through the provided websocket connection.
:param ws: the websocket assistant used to connect to the exchange
"""
raise NotImplementedError
async def _process_websocket_messages(self, websocket_assistant: WSAssistant):
raise NotImplementedError
async def METHOD_NAME(self, delay):
"""
Function added only to facilitate patching the sleep in unit tests without affecting the asyncio module
"""
await asyncio.sleep(delay)
async def _on_order_stream_interruption(self, websocket_assistant: Optional[WSAssistant] = None):
websocket_assistant and await websocket_assistant.disconnect()
self._candles.clear()
def get_seconds_from_interval(self, interval: str) -> int:
"""
This method returns the number of seconds from the interval string.
:param interval: interval string
:return: number of seconds
"""
return self.interval_to_seconds[interval]
| null |
1,264 |
#
# formatter.py
#
# Convert parsed content blocks to a structured document (library file).
#
# Copyright 2002-2018 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
#
# This is the base Formatter class. Its purpose is to convert a content
# processor's data into specific documents (i.e., table of contents, global
# index, and individual API reference indices).
#
# You need to sub-class it to output anything sensible. For example, the
# file `tohtml.py' contains the definition of the `HtmlFormatter' sub-class
# to output HTML.
#
from sources import *
from content import *
from utils import *
################################################################
##
## FORMATTER CLASS
##
class Formatter:
def __init__( self, processor ):
self.processor = processor
self.identifiers = {}
self.chapters = processor.chapters
self.sections = processor.sections.values()
self.block_index = []
# store all blocks in a dictionary
self.blocks = []
for section in self.sections:
for block in section.blocks.values():
self.add_identifier( block.name, block )
# add enumeration values to the index, since this is useful
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
self.add_identifier( field.name, block )
self.block_index = self.identifiers.keys()
self.block_index.sort( key = index_key )
# also add section names to dictionary (without making them appear
# in the index)
for section in self.sections:
self.add_identifier( section.name, section )
def add_identifier( self, name, block ):
if name in self.identifiers:
# duplicate name!
sys.stderr.write( "WARNING: duplicate definition for"
+ " '" + name + "' "
+ "in " + block.location() + ", "
+ "previous definition in "
+ self.identifiers[name].location()
+ "\n" )
else:
self.identifiers[name] = block
#
# formatting the table of contents
#
def toc_enter( self ):
pass
def toc_chapter_enter( self, chapter ):
pass
def toc_section_enter( self, section ):
pass
def toc_section_exit( self, section ):
pass
def toc_chapter_exit( self, chapter ):
pass
def toc_index( self, index_filename ):
pass
def toc_exit( self ):
pass
def toc_dump( self, toc_filename = None, index_filename = None ):
output = None
if toc_filename:
output = open_output( toc_filename )
self.toc_enter()
for chap in self.processor.chapters:
self.toc_chapter_enter( chap )
for section in chap.sections:
self.toc_section_enter( section )
self.toc_section_exit( section )
self.toc_chapter_exit( chap )
self.toc_index( index_filename )
self.toc_exit()
if output:
close_output( output )
#
# formatting the index
#
def index_enter( self ):
pass
def index_name_enter( self, name ):
pass
def index_name_exit( self, name ):
pass
def index_exit( self ):
pass
def index_dump( self, index_filename = None ):
output = None
if index_filename:
output = open_output( index_filename )
self.index_enter()
for name in self.block_index:
self.index_name_enter( name )
self.index_name_exit( name )
self.index_exit()
if output:
close_output( output )
#
# formatting a section
#
def section_enter( self, section ):
pass
def block_enter( self, block ):
pass
def markup_enter( self, markup, block = None ):
pass
def field_enter( self, field, markup = None, block = None ):
pass
def field_exit( self, field, markup = None, block = None ):
pass
def markup_exit( self, markup, block = None ):
pass
def block_exit( self, block ):
pass
def section_exit( self, section ):
pass
def METHOD_NAME( self, section, section_filename = None ):
output = None
if section_filename:
output = open_output( section_filename )
self.section_enter( section )
for name in section.block_names:
skip_entry = 0
try:
block = self.identifiers[name]
# `block_names' can contain field names also,
# which we filter out
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
if field.name == name:
skip_entry = 1
except:
skip_entry = 1 # this happens e.g. for `/empty/' entries
if skip_entry:
continue
self.block_enter( block )
for markup in block.markups[1:]: # always ignore first markup!
self.markup_enter( markup, block )
for field in markup.fields:
self.field_enter( field, markup, block )
self.field_exit( field, markup, block )
self.markup_exit( markup, block )
self.block_exit( block )
self.section_exit( section )
if output:
close_output( output )
def section_dump_all( self ):
for section in self.sections:
self.METHOD_NAME( section )
# eof
| null |
1,265 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class CreateReadOnlyDBInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'CreateReadOnlyDBInstance')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBInstanceStorage(self): # Integer
return self.get_query_params().get('DBInstanceStorage')
def set_DBInstanceStorage(self, DBInstanceStorage): # Integer
self.add_query_param('DBInstanceStorage', DBInstanceStorage)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_DeletionProtection(self): # Boolean
return self.get_query_params().get('DeletionProtection')
def set_DeletionProtection(self, DeletionProtection): # Boolean
self.add_query_param('DeletionProtection', DeletionProtection)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_TargetDedicatedHostIdForMaster(self): # String
return self.get_query_params().get('TargetDedicatedHostIdForMaster')
def set_TargetDedicatedHostIdForMaster(self, TargetDedicatedHostIdForMaster): # String
self.add_query_param('TargetDedicatedHostIdForMaster', TargetDedicatedHostIdForMaster)
def get_DBInstanceDescription(self): # String
return self.get_query_params().get('DBInstanceDescription')
def set_DBInstanceDescription(self, DBInstanceDescription): # String
self.add_query_param('DBInstanceDescription', DBInstanceDescription)
def get_GdnInstanceName(self): # String
return self.get_query_params().get('GdnInstanceName')
def set_GdnInstanceName(self, GdnInstanceName): # String
self.add_query_param('GdnInstanceName', GdnInstanceName)
def get_TddlBizType(self): # String
return self.get_query_params().get('TddlBizType')
def set_TddlBizType(self, TddlBizType): # String
self.add_query_param('TddlBizType', TddlBizType)
def get_Period(self): # String
return self.get_query_params().get('Period')
def set_Period(self, Period): # String
self.add_query_param('Period', Period)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_PrivateIpAddress(self): # String
return self.get_query_params().get('PrivateIpAddress')
def set_PrivateIpAddress(self, PrivateIpAddress): # String
self.add_query_param('PrivateIpAddress', PrivateIpAddress)
def get_AutoRenew(self): # String
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # String
self.add_query_param('AutoRenew', AutoRenew)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_InstanceNetworkType(self): # String
return self.get_query_params().get('InstanceNetworkType')
def set_InstanceNetworkType(self, InstanceNetworkType): # String
self.add_query_param('InstanceNetworkType', InstanceNetworkType)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_InstructionSetArch(self): # String
return self.get_query_params().get('InstructionSetArch')
def set_InstructionSetArch(self, InstructionSetArch): # String
self.add_query_param('InstructionSetArch', InstructionSetArch)
def get_TddlRegionConfig(self): # String
return self.get_query_params().get('TddlRegionConfig')
def set_TddlRegionConfig(self, TddlRegionConfig): # String
self.add_query_param('TddlRegionConfig', TddlRegionConfig)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_DBInstanceStorageType(self): # String
return self.get_query_params().get('DBInstanceStorageType')
def set_DBInstanceStorageType(self, DBInstanceStorageType): # String
self.add_query_param('DBInstanceStorageType', DBInstanceStorageType)
def get_DedicatedHostGroupId(self): # String
return self.get_query_params().get('DedicatedHostGroupId')
def set_DedicatedHostGroupId(self, DedicatedHostGroupId): # String
self.add_query_param('DedicatedHostGroupId', DedicatedHostGroupId)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_UsedTime(self): # String
return self.get_query_params().get('UsedTime')
def set_UsedTime(self, UsedTime): # String
self.add_query_param('UsedTime', UsedTime)
def get_BurstingEnabled(self): # Boolean
return self.get_query_params().get('BurstingEnabled')
def set_BurstingEnabled(self, BurstingEnabled): # Boolean
self.add_query_param('BurstingEnabled', BurstingEnabled)
def get_VPCId(self): # String
return self.get_query_params().get('VPCId')
def set_VPCId(self, VPCId): # String
self.add_query_param('VPCId', VPCId)
def get_Category(self): # String
return self.get_query_params().get('Category')
def METHOD_NAME(self, Category): # String
self.add_query_param('Category', Category)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
def get_BpeEnabled(self): # String
return self.get_query_params().get('BpeEnabled')
def set_BpeEnabled(self, BpeEnabled): # String
self.add_query_param('BpeEnabled', BpeEnabled)
| null |
1,266 |
# Copyright cocotb contributors
# Licensed under the Revised BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-3-Clause
"""
Tests of cocotb.test functionality
* expect_error
* expect_fail
* timeout
"""
from collections.abc import Coroutine
import pytest
from common import MyBaseException, MyException
import cocotb
from cocotb.triggers import NullTrigger, Timer
@cocotb.test(expect_error=NameError)
async def test_error(dut):
"""Error in the test"""
await Timer(100, "ns")
fail # noqa
@cocotb.test()
async def test_tests_are_tests(dut):
"""
Test that things annotated with cocotb.test are tests
"""
assert isinstance(test_tests_are_tests, cocotb.test)
# just to be sure...
@cocotb.test(expect_fail=True)
async def test_async_test_can_fail(dut):
assert False
@cocotb.test()
async def test_immediate_test(dut):
"""Test that tests can return immediately"""
return
@cocotb.test(expect_fail=True)
async def test_assertion_is_failure(dut):
assert False
@cocotb.test(expect_error=MyException)
async def test_expect_particular_exception(dut):
raise MyException()
@cocotb.test(expect_error=(MyException, ValueError))
async def test_expect_exception_list(dut):
raise MyException()
@cocotb.test(
expect_error=cocotb.result.SimTimeoutError, timeout_time=1, timeout_unit="ns"
)
async def test_timeout_testdec_fail(dut):
await Timer(10, "ns")
@cocotb.test(timeout_time=100, timeout_unit="ns")
async def test_timeout_testdec_pass(dut):
await Timer(10, "ns")
@cocotb.test(timeout_time=10, timeout_unit="ns")
async def test_timeout_testdec_simultaneous(dut):
try:
await cocotb.triggers.with_timeout(
Timer(1, "ns"), timeout_time=1, timeout_unit="ns"
)
except cocotb.result.SimTimeoutError:
pass
else:
assert False, "Expected a Timeout"
# Whether this test fails or passes depends on the behavior of the
# scheduler, simulator, and the implementation of the timeout function.
# CAUTION: THIS MAY CHANGE
# these tests should run in definition order, not lexicographic order
last_ordered_test = None
@cocotb.test()
async def test_ordering_3(dut):
global last_ordered_test
val, last_ordered_test = last_ordered_test, 3
assert val is None
@cocotb.test()
async def test_ordering_2(dut):
global last_ordered_test
val, last_ordered_test = last_ordered_test, 2
assert val == 3
@cocotb.test()
async def test_ordering_1(dut):
global last_ordered_test
val, last_ordered_test = last_ordered_test, 1
assert val == 2
@cocotb.test()
class TestClass(Coroutine):
def __init__(self, dut):
self._coro = self.run(dut)
async def run(self, dut):
pass
def send(self, value):
self._coro.send(value)
def throw(self, exception):
self._coro.throw(exception)
def __await__(self):
yield from self._coro.__await__()
@cocotb.test()
async def test_empty_docstring(dut) -> None:
""""""
@cocotb.test(expect_fail=True)
async def test_pytest_raises_fail(dut):
with pytest.raises(AssertionError):
assert True
@cocotb.test(expect_fail=True)
async def test_pytest_warns_fail(dut):
def test_func():
pass
with pytest.warns(RuntimeWarning):
test_func()
@cocotb.test(expect_fail=True)
async def test_pytest_deprecated_call_fail(dut):
def test_func():
pass
with pytest.deprecated_call():
test_func()
@cocotb.test(expect_fail=True)
async def test_pytest_raises_fail_in_task(dut):
async def test_func():
with pytest.raises(AssertionError):
assert True
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test(expect_fail=True)
async def test_pytest_warns_fail_in_task(dut):
def inner_func():
pass
async def test_func():
with pytest.warns(RuntimeWarning):
inner_func()
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test(expect_fail=True)
async def METHOD_NAME(dut):
def inner_func():
pass
async def test_func():
with pytest.deprecated_call():
inner_func()
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test(expect_error=MyBaseException)
async def test_base_exception_expect_fail(dut):
raise MyBaseException
@cocotb.test(expect_error=MyBaseException)
async def test_base_exception_in_task_expect_fail(dut):
async def test_func():
raise MyBaseException
cocotb.start_soon(test_func())
await NullTrigger()
@cocotb.test
async def test_without_parenthesis(dut):
pass
| null |
1,267 |
import abc
import http.server
import shutil
import socketserver
import time
from multiprocessing import Process
from pathlib import Path
from typing import Generator, Tuple
from urllib.parse import urlparse
import pytest
import requests
from pygitguardian.config import DEFAULT_BASE_URI
from tests.repository import Repository
FUNCTESTS_DATA_PATH = Path(__file__).parent / "data"
# Path to the root of ggshield repository
REPO_PATH = Path(__file__).parent.parent.parent
HAS_DOCKER = shutil.which("docker") is not None
HOOK_CONTENT = """#!/usr/bin/env sh
ggshield {} scan pre-receive
"""
HOOK_CONTENT_ALL = """#!/usr/bin/env sh
ggshield {} scan pre-receive --all
"""
# Use this as a decorator for tests which call the `docker` binary
requires_docker = pytest.mark.skipif(not HAS_DOCKER, reason="This test requires Docker")
class AbstractGGAPIHandler(http.server.BaseHTTPRequestHandler, metaclass=abc.ABCMeta):
def METHOD_NAME(self):
self.send_response(200)
def do_GET(self):
# Forward all GET calls to the real server
url = DEFAULT_BASE_URI + self.path.replace("/exposed", "")
headers = {
**self.headers,
"Host": urlparse(url).netloc,
}
response = requests.get(url, headers=headers)
self.send_response(response.status_code)
for name, value in response.headers.items():
self.send_header(name, value)
self.end_headers()
self.wfile.write(response.content)
@abc.abstractmethod
def do_POST(self):
raise NotImplementedError()
class SlowGGAPIHandler(AbstractGGAPIHandler):
def do_POST(self):
if "multiscan" in self.path:
content = b'{"detail":"Sorry, I overslept!"}'
self.send_response(200)
self.send_header("content-type", "application/json")
self.send_header("Content-Length", str(len(content)))
self.end_headers()
time.sleep(60)
self.wfile.write(content)
else:
self.send_response(418)
class NoQuotaGGAPIHandler(AbstractGGAPIHandler):
def do_POST(self):
content = b'{"detail":"Quota limit reached."}'
self.send_response(403)
self.send_header("content-type", "application/json")
self.send_header("Content-Length", str(len(content)))
self.end_headers()
self.wfile.write(content)
class ReuseAddressServer(socketserver.TCPServer):
allow_reuse_address = True
def _start_slow_gitguardian_api(host: str, port: int):
with ReuseAddressServer((host, port), SlowGGAPIHandler) as httpd:
httpd.serve_forever()
def _start_no_quota_gitguardian_api(host: str, port: int):
with ReuseAddressServer((host, port), NoQuotaGGAPIHandler) as httpd:
httpd.serve_forever()
@pytest.fixture
@pytest.mark.allow_hosts(["localhost"])
def slow_gitguardian_api() -> Generator[str, None, None]:
host, port = "localhost", 8123
server_process = Process(target=_start_slow_gitguardian_api, args=(host, port))
server_process.start()
try:
yield f"http://{host}:{port}"
finally:
server_process.kill()
server_process.join()
@pytest.fixture
@pytest.mark.allow_hosts(["localhost"])
def no_quota_gitguardian_api() -> Generator[str, None, None]:
host, port = "localhost", 8124
server_process = Process(target=_start_no_quota_gitguardian_api, args=(host, port))
server_process.start()
try:
yield f"http://{host}:{port}"
finally:
server_process.kill()
server_process.join()
def repo_with_hook_content(tmp_path: Path, hook_content: str) -> Repository:
"""
Helper function that initialize a repo with a remote.
The remote contains the pre-receive with the corresponding hook content.
:param tmp_path: the root path
:param hook_content: the pre-receive hook content
:return: the local Repository object
"""
remote_repo = Repository.create(tmp_path / "remote", bare=True)
local_repo = Repository.clone(remote_repo.path, tmp_path / "local")
hook_path = remote_repo.path / "hooks" / "pre-receive"
hook_path.write_text(hook_content)
hook_path.chmod(0o700)
return local_repo
@pytest.fixture
def iac_repo_with_hook(tmp_path: Path) -> Tuple[Repository, Repository]:
return repo_with_hook_content(
tmp_path=tmp_path, hook_content=HOOK_CONTENT.format("iac")
)
@pytest.fixture
def iac_repo_with_hook_all(tmp_path: Path) -> Tuple[Repository, Repository]:
return repo_with_hook_content(
tmp_path=tmp_path, hook_content=HOOK_CONTENT_ALL.format("iac")
)
@pytest.fixture
def sca_repo_with_hook(tmp_path: Path) -> Tuple[Repository, Repository]:
return repo_with_hook_content(
tmp_path=tmp_path, hook_content=HOOK_CONTENT.format("sca")
)
@pytest.fixture
def sca_repo_with_hook_all(tmp_path: Path) -> Tuple[Repository, Repository]:
return repo_with_hook_content(
tmp_path=tmp_path, hook_content=HOOK_CONTENT_ALL.format("sca")
)
| null |
1,268 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class EnableCenVbrHealthCheckRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'EnableCenVbrHealthCheck')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_CenId(self): # String
return self.get_query_params().get('CenId')
def set_CenId(self, CenId): # String
self.add_query_param('CenId', CenId)
def get_HealthCheckTargetIp(self): # String
return self.get_query_params().get('HealthCheckTargetIp')
def set_HealthCheckTargetIp(self, HealthCheckTargetIp): # String
self.add_query_param('HealthCheckTargetIp', HealthCheckTargetIp)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_VbrInstanceOwnerId(self): # Long
return self.get_query_params().get('VbrInstanceOwnerId')
def set_VbrInstanceOwnerId(self, VbrInstanceOwnerId): # Long
self.add_query_param('VbrInstanceOwnerId', VbrInstanceOwnerId)
def get_HealthCheckOnly(self): # Boolean
return self.get_query_params().get('HealthCheckOnly')
def set_HealthCheckOnly(self, HealthCheckOnly): # Boolean
self.add_query_param('HealthCheckOnly', HealthCheckOnly)
def get_VbrInstanceRegionId(self): # String
return self.get_query_params().get('VbrInstanceRegionId')
def set_VbrInstanceRegionId(self, VbrInstanceRegionId): # String
self.add_query_param('VbrInstanceRegionId', VbrInstanceRegionId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def METHOD_NAME(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_HealthCheckSourceIp(self): # String
return self.get_query_params().get('HealthCheckSourceIp')
def set_HealthCheckSourceIp(self, HealthCheckSourceIp): # String
self.add_query_param('HealthCheckSourceIp', HealthCheckSourceIp)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_VbrInstanceId(self): # String
return self.get_query_params().get('VbrInstanceId')
def set_VbrInstanceId(self, VbrInstanceId): # String
self.add_query_param('VbrInstanceId', VbrInstanceId)
| null |
1,269 |
import logging
import subprocess
import sys
from concurrent.futures import ThreadPoolExecutor
from os import listdir, path
from pathlib import Path
from typing import TYPE_CHECKING, List, Optional
from hummingbot.logger.struct_logger import StructLogger, StructLogRecord
if TYPE_CHECKING:
from hummingbot.client.config.config_helpers import ClientConfigAdapter as _ClientConfigAdapter
STRUCT_LOGGER_SET = False
DEV_STRATEGY_PREFIX = "dev"
_prefix_path = None
# Do not raise exceptions during log handling
logging.setLogRecordFactory(StructLogRecord)
logging.setLoggerClass(StructLogger)
_shared_executor = None
_data_path = None
_cert_path = None
def root_path() -> Path:
from os.path import join, realpath
return Path(realpath(join(__file__, "../../")))
def get_executor() -> ThreadPoolExecutor:
global _shared_executor
if _shared_executor is None:
_shared_executor = ThreadPoolExecutor()
return _shared_executor
def prefix_path() -> str:
global _prefix_path
if _prefix_path is None:
from os.path import join, realpath
_prefix_path = realpath(join(__file__, "../../"))
return _prefix_path
def set_prefix_path(p: str):
global _prefix_path
_prefix_path = p
def data_path() -> str:
global _data_path
if _data_path is None:
from os.path import join, realpath
_data_path = realpath(join(prefix_path(), "data"))
import os
if not os.path.exists(_data_path):
os.makedirs(_data_path)
return _data_path
def set_data_path(path: str):
global _data_path
_data_path = path
_independent_package: Optional[bool] = None
def is_independent_package() -> bool:
global _independent_package
import os
if _independent_package is None:
_independent_package = not os.path.basename(sys.executable).startswith("python")
return _independent_package
def check_dev_mode():
try:
if is_independent_package():
return False
if not path.isdir(".git"):
return False
current_branch = subprocess.check_output(["git", "symbolic-ref", "--short", "HEAD"]).decode("utf8").rstrip()
if current_branch != "master":
return True
except Exception:
return False
def METHOD_NAME():
if not is_independent_package():
# Do nothing.
return
import os
import appdirs
app_data_dir: str = appdirs.user_data_dir("Hummingbot", "hummingbot.io")
os.makedirs(os.path.join(app_data_dir, "logs"), 0o711, exist_ok=True)
os.makedirs(os.path.join(app_data_dir, "conf"), 0o711, exist_ok=True)
os.makedirs(os.path.join(app_data_dir, "pmm_scripts"), 0o711, exist_ok=True)
os.makedirs(os.path.join(app_data_dir, "certs"), 0o711, exist_ok=True)
os.makedirs(os.path.join(app_data_dir, "scripts"), 0o711, exist_ok=True)
os.chdir(app_data_dir)
set_prefix_path(app_data_dir)
def get_logging_conf(conf_filename: str = 'hummingbot_logs.yml'):
import io
from os.path import join
from typing import Dict
from ruamel.yaml import YAML
file_path: str = join(prefix_path(), "conf", conf_filename)
yaml_parser: YAML = YAML()
if not path.exists(file_path):
return {}
with open(file_path) as fd:
yml_source: str = fd.read()
io_stream: io.StringIO = io.StringIO(yml_source)
config_dict: Dict = yaml_parser.load(io_stream)
return config_dict
def init_logging(conf_filename: str,
client_config_map: "_ClientConfigAdapter",
override_log_level: Optional[str] = None,
strategy_file_path: str = "hummingbot"):
import io
import logging.config
from os.path import join
from typing import Dict
import pandas as pd
from ruamel.yaml import YAML
from hummingbot.logger.struct_logger import StructLogger, StructLogRecord
global STRUCT_LOGGER_SET
if not STRUCT_LOGGER_SET:
logging.setLogRecordFactory(StructLogRecord)
logging.setLoggerClass(StructLogger)
STRUCT_LOGGER_SET = True
# Do not raise exceptions during log handling
logging.raiseExceptions = False
file_path: str = join(prefix_path(), "conf", conf_filename)
yaml_parser: YAML = YAML()
with open(file_path) as fd:
yml_source: str = fd.read()
yml_source = yml_source.replace("$PROJECT_DIR", prefix_path())
yml_source = yml_source.replace("$DATETIME", pd.Timestamp.now().strftime("%Y-%m-%d-%H-%M-%S"))
yml_source = yml_source.replace("$STRATEGY_FILE_PATH", strategy_file_path.replace(".yml", ""))
io_stream: io.StringIO = io.StringIO(yml_source)
config_dict: Dict = yaml_parser.load(io_stream)
if override_log_level is not None and "loggers" in config_dict:
for logger in config_dict["loggers"]:
if logger in client_config_map.logger_override_whitelist:
config_dict["loggers"][logger]["level"] = override_log_level
logging.config.dictConfig(config_dict)
def get_strategy_list() -> List[str]:
"""
Search `hummingbot.strategy` folder for all available strategies
Automatically hide all strategies that starts with "dev" if on master branch
"""
try:
folder = path.realpath(path.join(__file__, "../strategy"))
# Only include valid directories
strategies = [d for d in listdir(folder) if path.isdir(path.join(folder, d)) and not d.startswith("__")]
on_dev_mode = check_dev_mode()
if not on_dev_mode:
strategies = [s for s in strategies if not s.startswith(DEV_STRATEGY_PREFIX)]
return sorted(strategies)
except Exception as e:
logging.getLogger().warning(f"Error getting strategy set: {str(e)}")
return []
| null |
1,270 |
"""
Code taken and adapted from https://github.com/FranxYao/chain-of-thought-hub
"""
import json
import os
import time
import pandas as pd
from onmt.utils.logging import init_logger
from onmt.translate.translator import build_translator
from onmt.inputters.dynamic_iterator import build_dynamic_dataset_iter
from onmt.inputters.inputter import IterOnDevice
from onmt.transforms import get_transforms_cls
from onmt.constants import CorpusTask
import onmt.opts as opts
from onmt.utils.parse import ArgumentParser
from onmt.utils.misc import use_gpu, set_random_seed
TASKS = [
"abstract_algebra",
"anatomy",
"astronomy",
"business_ethics",
"clinical_knowledge",
"college_biology",
"college_chemistry",
"college_computer_science",
"college_mathematics",
"college_medicine",
"college_physics",
"computer_security",
"conceptual_physics",
"econometrics",
"electrical_engineering",
"elementary_mathematics",
"formal_logic",
"global_facts",
"high_school_biology",
"high_school_chemistry",
"high_school_computer_science",
"high_school_european_history",
"high_school_geography",
"high_school_government_and_politics",
"high_school_macroeconomics",
"high_school_mathematics",
"high_school_microeconomics",
"high_school_physics",
"high_school_psychology",
"high_school_statistics",
"high_school_us_history",
"high_school_world_history",
"human_aging",
"human_sexuality",
"international_law",
"jurisprudence",
"logical_fallacies",
"machine_learning",
"management",
"marketing",
"medical_genetics",
"miscellaneous",
"moral_disputes",
"moral_scenarios",
"nutrition",
"philosophy",
"prehistory",
"professional_accounting",
"professional_law",
"professional_medicine",
"professional_psychology",
"public_relations",
"security_studies",
"sociology",
"us_foreign_policy",
"virology",
"world_religions",
]
choices = ["A", "B", "C", "D"]
def compute_metric(output_filename):
with open(output_filename, "r") as f:
run_results = json.load(f)
total_acc = 0
total_num = 0
for task in run_results:
acc = 0
pred_answers = run_results[task]["pred_answers"]
gold_answers = run_results[task]["gold_answers"]
for pred, gold in zip(pred_answers, gold_answers):
if pred == gold:
acc += 1
print("ACC-%s: %.4f" % (task, acc / len(gold_answers)))
total_acc += acc
total_num += len(gold_answers)
print("ACC-all: %.4f" % (total_acc / total_num))
def format_subject(subject):
subl = subject.split("_")
s = ""
for entry in subl:
s += " " + entry
return s
def METHOD_NAME(df, idx, include_answer=True):
prompt = df.iloc[idx, 0]
k = df.shape[1] - 2
for j in range(k):
prompt += "\n{}. {}".format(choices[j], df.iloc[idx, j + 1])
prompt += "\nAnswer:"
if include_answer:
prompt += " {}\n\n".format(df.iloc[idx, k + 1])
return prompt
def gen_prompt(train_df, subject, k=-1):
prompt = "The following are multiple choice questions (with answers) about {}.\n\n".format(
format_subject(subject)
)
if k == -1:
k = train_df.shape[0]
for i in range(k):
prompt += METHOD_NAME(train_df, i)
return prompt
# def custom_stopping_criteria(input_ids, score, **kwargs):
# stop_ids = [29871, 13, 13] # \n\n
# return input_ids[-len(stop_ids)]
def evaluate(opt):
ArgumentParser.validate_translate_opts(opt)
ArgumentParser._get_all_transform_translate(opt)
ArgumentParser._validate_transforms_opts(opt)
ArgumentParser.validate_translate_opts_dynamic(opt)
logger = init_logger(opt.log_file)
set_random_seed(opt.seed, use_gpu(opt))
run_results = {}
dir_name = os.path.dirname(opt.models[0])
base_name = os.path.basename(opt.models[0])
output_filename = os.path.join(dir_name, "mmlu_results_%s.fr.json" % base_name[:-3])
# Build the translator (along with the model)
translator = build_translator(opt, logger=logger, report_score=True)
# Build the transforms (along with the tokenizer)
transforms_cls = get_transforms_cls(opt._all_transform)
data_dir = "eval_llm/MMLU-FR/data/"
ntrain = 5 # nshots from dev
start_time = time.time()
for task in TASKS:
print("Testing %s ..." % task)
records = []
src = []
dev_df = pd.read_csv(
os.path.join(data_dir, "dev", task + "_dev.fr.csv"), header=None
)[:ntrain]
test_df = pd.read_csv(
os.path.join(data_dir, "test", task + "_test.fr.csv"), header=None
)
for i in range(test_df.shape[0]):
# get prompt and make sure it fits
k = ntrain
prompt_end = METHOD_NAME(test_df, i, include_answer=False)
train_prompt = gen_prompt(dev_df, task, k)
prompt = train_prompt + prompt_end
while len(prompt.split()) > 768:
prompt_split = prompt.split("\n\n")
prompt_split.pop(1)
prompt = "\n\n".join(prompt_split)
label = test_df.iloc[i, test_df.shape[1] - 1]
records.append({"prompt": prompt, "answer": label})
src.append(prompt.replace("\n", "⦅newline⦆"))
infer_iter = build_dynamic_dataset_iter(
opt, transforms_cls, translator.vocabs, task=CorpusTask.INFER, src=src
)
infer_iter = IterOnDevice(infer_iter, opt.gpu)
scores, preds = translator._translate(
infer_iter,
transform=infer_iter.transform,
attn_debug=opt.attn_debug,
align_debug=opt.align_debug,
)
pred_answers = [
x.lstrip() for sublist in preds for x in sublist
] # flatten the list of list
gold_answers = [record["answer"] for record in records]
run_results[task] = {"pred_answers": pred_answers, "gold_answers": gold_answers}
with open(output_filename, "w") as f:
json.dump(run_results, f, ensure_ascii=False, indent=2)
compute_metric(output_filename)
end_time = time.time()
print("total run time %.2f" % (end_time - start_time))
def _get_parser():
parser = ArgumentParser(description="run_mmlu_opennmt_fr.py")
opts.config_opts(parser)
opts.translate_opts(parser, dynamic=True)
return parser
def main():
parser = _get_parser()
opt = parser.parse_args()
evaluate(opt)
if __name__ == "__main__":
main()
| null |
1,271 |
# coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom Translate Datasets Template."""
from __future__ import annotations
import collections
import os
from typing import Dict, List, Tuple
from etils import epath
from tensorflow_datasets.core import dataset_builder
from tensorflow_datasets.core import dataset_info
from tensorflow_datasets.core import features as features_lib
from tensorflow_datasets.core import splits as split_lib
from tensorflow_datasets.core.utils import version
from tensorflow_datasets.core.utils.lazy_imports_utils import tensorflow as tf
# Dict of 'split_name'-> 'language' -> `List[text_data]`
SplitExampleDict = Dict[str, Dict[str, List[str]]]
class TranslateFolder(dataset_builder.DatasetBuilder):
"""Generic text translation dataset created from manual directory.
The directory content should be as followed:
```
path/to/my_data/
lang1.train.txt
lang2.train.txt
lang1.test.txt
lang2.test.txt
...
```
Each files should have one example per line. Line order should match between
files.
To use it:
```
builder = tfds.TranslateFolder(root_dir='path/to/my_data/')
print(builder.info) # Splits, num examples,... are automatically calculated
ds = builder.as_dataset(split='train', shuffle_files=True)
```
Note: All examples from all splits are loaded in memory in `__init__`.
"""
VERSION = version.Version('1.0.0')
def __init__(self, root_dir: str):
# Extract the splits, examples
root_dir = os.path.expanduser(root_dir)
self._split_examples, self._languages = _get_split_language_examples(
root_dir
)
super(TranslateFolder, self).__init__()
# Reset `_data_dir` as it should not change to DATA_DIR/Version
self._data_dir = root_dir
# Update DatasetInfo splits
split_infos = [
split_lib.SplitInfo( # pylint: disable=g-complex-comprehension
name=split_name,
shard_lengths=[len(next(iter(examples.values())))],
num_bytes=0,
)
for split_name, examples in self._split_examples.items()
]
split_dict = split_lib.SplitDict(split_infos)
self.info.set_splits(split_dict)
def _info(self) -> dataset_info.DatasetInfo:
return dataset_info.DatasetInfo(
builder=self,
description='Generic text translation dataset.',
features=features_lib.FeaturesDict(
{lang: features_lib.Text() for lang in self._languages}
),
)
def _download_and_prepare(self, **kwargs): # pytype: disable=signature-mismatch # overriding-parameter-count-checks
raise NotImplementedError(
'No need to call download_and_prepare function for {}.'.format(
type(self).__name__
)
)
def download_and_prepare(self, **kwargs):
return self._download_and_prepare()
def _as_dataset(
self, split, shuffle_files=False, decoders=None, read_config=None
) -> tf.data.Dataset:
"""Generate dataset for given split."""
del read_config # Unused (automatically created in `DatasetBuilder`)
if decoders:
raise NotImplementedError(
'`decoders` is not supported with {}'.format(type(self).__name__)
)
if split not in self.info.splits.keys():
raise ValueError(
'Unrecognized split {}. Subsplit API not yet supported for {}. '
'Split name should be one of {}.'.format(
split, type(self).__name__, list(self.info.splits.keys())
)
)
# Build the tf.data.Dataset object
lang_example_dict = self._split_examples[split]
ds = tf.data.Dataset.from_tensor_slices(lang_example_dict)
if shuffle_files:
ds = ds.shuffle(len(lang_example_dict))
return ds
def _get_split_language_examples(
root_dir: str,
) -> Tuple[SplitExampleDict, List[str]]:
"""Extract all split names and associated text data.
Args:
root_dir: The folder where the `lang.split.txt` are located
Returns:
split_examples: Mapping split_names -> language -> List[text_data]
languages: The list of languages
"""
split_examples = collections.defaultdict(dict)
languages = set()
files = tf.io.gfile.listdir(root_dir)
for file in files:
lang, split_name, _ = file.split('.')
split_examples[split_name][lang] = METHOD_NAME(
os.path.join(root_dir, file)
)
languages.add(lang)
# One-to-One translation
for split, examples in split_examples.items():
num_examples = {lang: len(ex) for lang, ex in examples.items()}
if len(set(num_examples.values())) != 1:
raise ValueError(
'Num examples for split {} do not match: {}'.format(
split, num_examples
)
)
return split_examples, sorted(languages)
def METHOD_NAME(file: str) -> List[str]:
with epath.Path(file).open() as f:
sentences = f.read().splitlines()
return sentences
| null |
1,272 |
import random
import re
from datetime import datetime, timedelta
from typing import Set
import pytz
import requests
from django.conf import settings
from requests import RequestException
from simplejson import JSONDecodeError
from cl.alerts.models import DocketAlert
from cl.favorites.models import DocketTag, Note
from cl.lib.celery_utils import CeleryThrottle
from cl.lib.command_utils import VerboseCommand, logger
from cl.scrapers.tasks import update_docket_info_iquery
from cl.search.models import Court, Docket
def get_docket_ids_missing_info(num_to_get: int) -> Set[int]:
return set(
Docket.objects.filter(
date_filed__isnull=True,
source__in=Docket.RECAP_SOURCES,
court__jurisdiction__in=[
Court.FEDERAL_DISTRICT,
Court.FEDERAL_BANKRUPTCY,
],
)
.exclude(pacer_case_id=None)
.order_by("-view_count")[:num_to_get]
.values_list("pk", flat=True)
)
def get_docket_ids() -> Set[int]:
"""Get docket IDs to update via iquery
:return: docket IDs for which we should crawl iquery
"""
docket_ids = set()
if hasattr(settings, "PLAUSIBLE_API_TOKEN"):
try:
# Get the top 250 entry pages from the day
#
# curl 'https://plausible.io/api/v1/stats/breakdown?\
# site_id=courtlistener.com&\
# period=day&\
# date=2022-03-14&\
# property=visit:entry_page&\
# metrics=visitors&\
# limit=250' \
# -H "Authorization: Bearer XXX" | jq
#
# This is meant to be run early in the morning. Each site in
# Plausible has a timezone setting. For CL, it's US/Pacific, so
# take today's date (early in the morning Pacific time), subtract
# one day, and that's your day for this.
yesterday = (
(datetime.now(pytz.timezone("US/Pacific")) - timedelta(days=1))
.date()
.isoformat()
)
r = requests.get(
settings.PLAUSIBLE_API_URL,
timeout=10,
params={
"site_id": "courtlistener.com",
"period": "day",
"date": yesterday,
"property": "visit:entry_page",
"metrics": "visitors",
"limit": "250",
},
headers={
"Authorization": f"Bearer {settings.PLAUSIBLE_API_TOKEN}",
},
)
r.raise_for_status()
j = r.json()
except (
ConnectionRefusedError,
JSONDecodeError,
RequestException,
) as e:
logger.warning(
"iQuery scraper was unable to get results from Plausible. Got "
"exception: %s" % e
)
else:
# Filter to docket pages with some amount of traffic
for item in j["results"]:
# j["results"] is a list of dicts that look like:
# {"entry_page": "/recap", "visitors": 68}
# Note that Plausible's visitor count is divided by ten on
# CourtListener to save money. The value below is thus 10× what
# it appears to be.
if item["visitors"] < 3:
continue
url = item["entry_page"]
if match := re.search(r"^/docket/([0-9]+)/", url):
docket_ids.add(match.group(1))
# Add in docket IDs that have docket alerts, tags, or notes
docket_ids.update(DocketAlert.objects.values_list("docket", flat=True))
docket_ids.update(
Note.objects.exclude(docket_id=None)
.distinct("docket_id")
.values_list("docket_id", flat=True)
)
docket_ids.update(
DocketTag.objects.distinct("docket_id").values_list(
"docket_id", flat=True
)
)
docket_ids.update(
Docket.objects.filter(
case_name__isnull=True,
source__in=Docket.RECAP_SOURCES,
court__jurisdiction__in=[
Court.FEDERAL_DISTRICT,
Court.FEDERAL_BANKRUPTCY,
],
)
.exclude(pacer_case_id=None)
.values_list("pk", flat=True)
)
return docket_ids
class Command(VerboseCommand):
help = "Scrape PACER iquery report"
def METHOD_NAME(self, parser):
parser.add_argument(
"--queue",
default="batch1",
help="The celery queue where the tasks should be processed.",
)
parser.add_argument(
"--include-old-terminated",
action="store_true",
default=False,
help="Whether to scrape dockets terminated and with no new "
"filings in 90 days",
)
parser.add_argument(
"--do-missing-date-filed",
default=0,
help="Whether to scrape dockets with missing date_filed field."
"if set, should be the number of dockets to scrape",
type=int,
)
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
do_missing_date_filed = options["do_missing_date_filed"]
if do_missing_date_filed:
docket_ids = get_docket_ids_missing_info(do_missing_date_filed)
else:
docket_ids = get_docket_ids()
# docket_ids = get_docket_ids().union(get_docket_ids_missing_info(100000)) #once initial scrape filling in date_filed is done, uncomment this to do these nightly
logger.info(
"iQuery crawling starting up. Will crawl %s dockets",
len(docket_ids),
)
# Shuffle the dockets to make sure we don't hit one district all at
# once.
random.shuffle(list(docket_ids))
queue = options["queue"]
throttle = CeleryThrottle(queue_name=queue)
now = datetime.now().date()
include_old_terminated = options["include_old_terminated"]
for i, docket_id in enumerate(docket_ids):
throttle.maybe_wait()
if i % 500 == 0:
logger.info("Sent %s items to celery for crawling so far.", i)
d = Docket.objects.filter(pk=docket_id).select_related("court")[0]
too_many_days_old = 90
terminated_too_long_ago = (
d.date_terminated
and (now - d.date_terminated).days > too_many_days_old
)
last_filing_too_long_ago = (
d.date_last_filing
and (now - d.date_last_filing).days > too_many_days_old
)
if all(
[
not include_old_terminated,
terminated_too_long_ago,
last_filing_too_long_ago,
d.date_filed,
d.case_name,
]
):
# Skip old terminated cases, but do them if we're missing
# date_filed or case_name
continue
if not d.pacer_case_id:
# No case ID, can't crawl it. Skip.
continue
if d.court.jurisdiction not in [
Court.FEDERAL_DISTRICT,
Court.FEDERAL_BANKRUPTCY,
]:
# Appeals or other kind of court that got swept up. Punt.
continue
update_docket_info_iquery.apply_async(
args=(d.pk, d.court_id), queue=queue
)
logger.info("Done!")
| null |
1,273 |
#!/usr/bin/env python3
"""
A script to sync ECI campaign and it's pages with default API
Configure:
- AUTH_USER
- AUTH_PASSWORD
- ORG_NAME - for the org holding campaign
- talks to local api
"""
from gql import gql, Client
from gql.transport.aiohttp import AIOHTTPTransport
from gql.transport.phoenix_channel_websockets import PhoenixChannelWebsocketsTransport
from aiohttp.helpers import BasicAuth
from logging import getLogger, DEBUG, INFO, StreamHandler
from os import environ
from subprocess import call
from sys import argv
import json
import sentry_sdk
if 'SENTRY_DSN' in environ:
sentry_sdk.init(environ['SENTRY_DSN'], traces_sample_rate=1.0)
log = getLogger("proca")
log.addHandler(StreamHandler())
log.setLevel(DEBUG)
#DEST_API='http://localhost:4000/api'
DEST_API="http://localhost:4001/private/api"
AUTH_USER=environ['AUTH_USER']
AUTH_PASSWORD=environ['AUTH_PASSWORD']
# so we do not propagate it when executing subcommands
del environ['AUTH_USER']
del environ['AUTH_PASSWORD']
def METHOD_NAME(org_name, campaign_name, ap_attrs, campaign_attrs):
auth=BasicAuth(AUTH_USER, AUTH_PASSWORD)
transport = AIOHTTPTransport(url=DEST_API, auth=auth)
client = Client(transport=transport)
query = gql(
"""
mutation AddPage($org:String!, $campaignName:String!, $config: Json!, $page:ActionPageInput!) {
upsertCampaign(orgName:$org, input:{
title: "Stop Settlements",
name:$campaignName,
config: $config,
actionPages:[$page]
}) {
... on PrivateCampaign{
actionPages {name id}
}
}
}
"""
)
d = client.execute(query, variable_values={
'org': org_name,
'campaignName': campaign_name,
'page': ap_attrs,
'config': campaign_attrs['config']
})
print(d)
if 'errors' in d:
log.error(f"Cannot add page: {d['errors']}")
return None
if 'upsertCampaign' in d:
pages = d['upsertCampaign']['actionPages']
return [p for p in pages if p['name'] == ap_attrs['name']][0]
def sync(campaign_name):
#transport = AIOHTTPTransport(url="https://api.proca.app/api")
transport = PhoenixChannelWebsocketsTransport(url="wss://api.proca.app/socket/websocket")
client = Client(transport=transport)
query = gql(
"""
subscription Pages {
actionPageUpserted {
id name locale
campaign { name config }
config
}
}
"""
)
x = client.subscribe(query)
for d in x:
log.debug('upsert AP data', d)
if 'actionPageUpserted' in d:
ap = d['actionPageUpserted']
if ap['campaign']['name'] == campaign_name:
with sentry_sdk.start_transaction() as t:
t.name = ap['name']
sync_ap(ap, campaign_name)
else:
log.debug(f'Ignoring page of campaign {ap["campaign"]["name"]}')
else:
log.info(f"data with no actionPageUpserted {d}")
def sync_ap(ap, campaign_name):
ap_id = ap['id']
ap_name = ap['name']
log.info(f"Sync action page {ap_name}")
page = {
'name': ap_name,
'locale': ap['locale']
}
campaign = {
"config": ap['campaign']['config']
}
page2 = METHOD_NAME(environ['ORG_NAME'], campaign_name, page, campaign)
log.info(f'Synced {page2["name"]} id {page2["id"]}')
with sentry_sdk.start_span() as s:
s.description=f'fetch {ap["name"]}'
fetch(ap_id)
patch(ap_id, page2['id'])
with sentry_sdk.start_span() as s:
s.description=f'build {ap["name"]}'
build(ap_id)
def fetch(ap_id):
call(f"yarn pull {ap_id}".split(' '))
def build(ap_id):
call(f"yarn build {ap_id}".split(' '))
def patch(ap_id, eci_ap_id):
fn = f'config/{ap_id}.json'
conf = json.load(open(fn))
c = conf
for p in ['component', 'eci']:
if p not in c:
c[p] = {}
c = c[p]
c['actionpage'] = eci_ap_id
json.dump(conf, open(fn, 'w'), indent=2)
if __name__ == "__main__":
try:
sync(argv[1])
except IndexError:
print(f'Usage: {argv[0]} campaign_name')
| null |
1,274 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class CreateHealthCheckRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'CreateHealthCheck','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProbeInterval(self): # Integer
return self.get_query_params().get('ProbeInterval')
def set_ProbeInterval(self, ProbeInterval): # Integer
self.add_query_param('ProbeInterval', ProbeInterval)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DstPort(self): # Integer
return self.get_query_params().get('DstPort')
def set_DstPort(self, DstPort): # Integer
self.add_query_param('DstPort', DstPort)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_Type(self): # String
return self.get_query_params().get('Type')
def set_Type(self, Type): # String
self.add_query_param('Type', Type)
def get_FailCountThreshold(self): # Integer
return self.get_query_params().get('FailCountThreshold')
def set_FailCountThreshold(self, FailCountThreshold): # Integer
self.add_query_param('FailCountThreshold', FailCountThreshold)
def get_ProbeTimeout(self): # Integer
return self.get_query_params().get('ProbeTimeout')
def set_ProbeTimeout(self, ProbeTimeout): # Integer
self.add_query_param('ProbeTimeout', ProbeTimeout)
def get_RttFailThreshold(self): # Integer
return self.get_query_params().get('RttFailThreshold')
def set_RttFailThreshold(self, RttFailThreshold): # Integer
self.add_query_param('RttFailThreshold', RttFailThreshold)
def get_RttThreshold(self): # Integer
return self.get_query_params().get('RttThreshold')
def set_RttThreshold(self, RttThreshold): # Integer
self.add_query_param('RttThreshold', RttThreshold)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_DstIpAddr(self): # String
return self.get_query_params().get('DstIpAddr')
def set_DstIpAddr(self, DstIpAddr): # String
self.add_query_param('DstIpAddr', DstIpAddr)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_SrcIpAddr(self): # String
return self.get_query_params().get('SrcIpAddr')
def set_SrcIpAddr(self, SrcIpAddr): # String
self.add_query_param('SrcIpAddr', SrcIpAddr)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_SmartAGId(self): # String
return self.get_query_params().get('SmartAGId')
def set_SmartAGId(self, SmartAGId): # String
self.add_query_param('SmartAGId', SmartAGId)
def get_SrcPort(self): # Integer
return self.get_query_params().get('SrcPort')
def set_SrcPort(self, SrcPort): # Integer
self.add_query_param('SrcPort', SrcPort)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('ProbeCount')
def set_ProbeCount(self, ProbeCount): # Integer
self.add_query_param('ProbeCount', ProbeCount)
| null |
1,275 |
"""
Fencepost-simple graph structure implementation.
"""
# Currently (2013.7.12) only used in easing the parsing of graph datatype data.
class SimpleGraphNode:
"""
Node representation.
"""
def __init__(self, index, **data):
"""
:param index: index of this node in some parent list
:type index: int
:param data: any extra data that needs to be saved
:type data: (variadic dictionary)
"""
# a bit application specific (could be 'id')
self.index = index
self.data = data
class SimpleGraphEdge:
"""
Edge representation.
"""
def __init__(self, source_index, target_index, **data):
"""
:param source_index: index of the edge's source node in some parent list
:type source_index: int
:param target_index: index of the edge's target node in some parent list
:type target_index: int
:param data: any extra data that needs to be saved
:type data: (variadic dictionary)
"""
self.source_index = source_index
self.target_index = target_index
self.data = data
class SimpleGraph:
"""
Each node is unique (by id) and stores its own index in the node list/odict.
Each edge is represented as two indeces into the node list/odict.
Both nodes and edges allow storing extra information if needed.
Allows:
multiple edges between two nodes
self referential edges (an edge from a node to itself)
These graphs are not specifically directed but since source and targets on the
edges are listed - it could easily be used that way.
"""
def __init__(self, nodes=None, edges=None):
# use an odict so that edge indeces actually match the final node list indeces
self.nodes = nodes or {}
self.edges = edges or []
def add_node(self, node_id, **data):
"""
Adds a new node only if it doesn't already exist.
:param node_id: some unique identifier
:type node_id: (hashable)
:param data: any extra data that needs to be saved
:type data: (variadic dictionary)
:returns: the new node
"""
if node_id in self.nodes:
return self.nodes[node_id]
node_index = len(self.nodes)
new_node = SimpleGraphNode(node_index, **data)
self.nodes[node_id] = new_node
return new_node
def METHOD_NAME(self, source_id, target_id, **data):
"""
Adds a new node only if it doesn't already exist.
:param source_id: the id of the source node
:type source_id: (hashable)
:param target_id: the id of the target node
:type target_id: (hashable)
:param data: any extra data that needs to be saved for the edge
:type data: (variadic dictionary)
:returns: the new node
..note: that, although this will create new nodes if necessary, there's
no way to pass `data` to them - so if you need to assoc. more data with
the nodes, use `add_node` first.
"""
# adds target_id to source_id's edge list
# adding source_id and/or target_id to nodes if not there already
if source_id not in self.nodes:
self.add_node(source_id)
if target_id not in self.nodes:
self.add_node(target_id)
new_edge = SimpleGraphEdge(self.nodes[source_id].index, self.nodes[target_id].index, **data)
self.edges.append(new_edge)
return new_edge
def gen_node_dicts(self):
"""
Returns a generator that yields node dictionaries in the form:
{ 'id': <the nodes unique id>, 'data': <any additional node data> }
"""
for node_id, node in self.nodes.items():
yield {"id": node_id, "data": node.data}
def gen_edge_dicts(self):
"""
Returns a generator that yields node dictionaries in the form::
{
'source': <the index of the source node in the graph's node list>,
'target': <the index of the target node in the graph's node list>,
'data' : <any additional edge data>
}
"""
for edge in self.edges:
yield {"source": edge.source_index, "target": edge.target_index, "data": edge.data}
def as_dict(self):
"""
Returns a dictionary of the form::
{ 'nodes': <a list of node dictionaries>, 'edges': <a list of node dictionaries> }
"""
return {"nodes": list(self.gen_node_dicts()), "edges": list(self.gen_edge_dicts())}
| null |
1,276 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksas.endpoint import endpoint_data
class AddClientUserDefineRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Sas', '2018-12-03', 'AddClientUserDefineRule')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ActionType(self): # Integer
return self.get_query_params().get('ActionType')
def set_ActionType(self, ActionType): # Integer
self.add_query_param('ActionType', ActionType)
def get_NewFilePath(self): # String
return self.get_query_params().get('NewFilePath')
def set_NewFilePath(self, NewFilePath): # String
self.add_query_param('NewFilePath', NewFilePath)
def get_Type(self): # Integer
return self.get_query_params().get('Type')
def set_Type(self, Type): # Integer
self.add_query_param('Type', Type)
def get_Platform(self): # String
return self.get_query_params().get('Platform')
def set_Platform(self, Platform): # String
self.add_query_param('Platform', Platform)
def METHOD_NAME(self): # String
return self.get_query_params().get('RegistryKey')
def set_RegistryKey(self, RegistryKey): # String
self.add_query_param('RegistryKey', RegistryKey)
def get_Cmdline(self): # String
return self.get_query_params().get('Cmdline')
def set_Cmdline(self, Cmdline): # String
self.add_query_param('Cmdline', Cmdline)
def get_FilePath(self): # String
return self.get_query_params().get('FilePath')
def set_FilePath(self, FilePath): # String
self.add_query_param('FilePath', FilePath)
def get_Md5List(self): # String
return self.get_query_params().get('Md5List')
def set_Md5List(self, Md5List): # String
self.add_query_param('Md5List', Md5List)
def get_ParentProcPath(self): # String
return self.get_query_params().get('ParentProcPath')
def set_ParentProcPath(self, ParentProcPath): # String
self.add_query_param('ParentProcPath', ParentProcPath)
def get_ProcPath(self): # String
return self.get_query_params().get('ProcPath')
def set_ProcPath(self, ProcPath): # String
self.add_query_param('ProcPath', ProcPath)
def get_ParentCmdline(self): # String
return self.get_query_params().get('ParentCmdline')
def set_ParentCmdline(self, ParentCmdline): # String
self.add_query_param('ParentCmdline', ParentCmdline)
def get_IP(self): # String
return self.get_query_params().get('IP')
def set_IP(self, IP): # String
self.add_query_param('IP', IP)
def get_RegistryContent(self): # String
return self.get_query_params().get('RegistryContent')
def set_RegistryContent(self, RegistryContent): # String
self.add_query_param('RegistryContent', RegistryContent)
def get_PortStr(self): # String
return self.get_query_params().get('PortStr')
def set_PortStr(self, PortStr): # String
self.add_query_param('PortStr', PortStr)
def get_Port(self): # Integer
return self.get_query_params().get('Port')
def set_Port(self, Port): # Integer
self.add_query_param('Port', Port)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
| null |
1,277 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadb.endpoint import endpoint_data
class DescribeAuditLogRecordsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adb', '2019-03-15', 'DescribeAuditLogRecords','ads')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_StartTime(self): # String
return self.get_query_params().get('StartTime')
def METHOD_NAME(self, StartTime): # String
self.add_query_param('StartTime', StartTime)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_HostAddress(self): # String
return self.get_query_params().get('HostAddress')
def set_HostAddress(self, HostAddress): # String
self.add_query_param('HostAddress', HostAddress)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Order(self): # String
return self.get_query_params().get('Order')
def set_Order(self, Order): # String
self.add_query_param('Order', Order)
def get_SqlType(self): # String
return self.get_query_params().get('SqlType')
def set_SqlType(self, SqlType): # String
self.add_query_param('SqlType', SqlType)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_DBClusterId(self): # String
return self.get_query_params().get('DBClusterId')
def set_DBClusterId(self, DBClusterId): # String
self.add_query_param('DBClusterId', DBClusterId)
def get_QueryKeyword(self): # String
return self.get_query_params().get('QueryKeyword')
def set_QueryKeyword(self, QueryKeyword): # String
self.add_query_param('QueryKeyword', QueryKeyword)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_EndTime(self): # String
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # String
self.add_query_param('EndTime', EndTime)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBName(self): # String
return self.get_query_params().get('DBName')
def set_DBName(self, DBName): # String
self.add_query_param('DBName', DBName)
def get_Succeed(self): # String
return self.get_query_params().get('Succeed')
def set_Succeed(self, Succeed): # String
self.add_query_param('Succeed', Succeed)
def get_User(self): # String
return self.get_query_params().get('User')
def set_User(self, User): # String
self.add_query_param('User', User)
def get_OrderType(self): # String
return self.get_query_params().get('OrderType')
def set_OrderType(self, OrderType): # String
self.add_query_param('OrderType', OrderType)
| null |
1,278 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class CreateOTADynamicUpgradeJobRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'CreateOTADynamicUpgradeJob')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DynamicMode(self):
return self.get_query_params().get('DynamicMode')
def set_DynamicMode(self,DynamicMode):
self.add_query_param('DynamicMode',DynamicMode)
def get_MultiModuleMode(self):
return self.get_query_params().get('MultiModuleMode')
def set_MultiModuleMode(self,MultiModuleMode):
self.add_query_param('MultiModuleMode',MultiModuleMode)
def METHOD_NAME(self):
return self.get_query_params().get('RetryCount')
def set_RetryCount(self,RetryCount):
self.add_query_param('RetryCount',RetryCount)
def get_TimeoutInMinutes(self):
return self.get_query_params().get('TimeoutInMinutes')
def set_TimeoutInMinutes(self,TimeoutInMinutes):
self.add_query_param('TimeoutInMinutes',TimeoutInMinutes)
def get_NeedConfirm(self):
return self.get_query_params().get('NeedConfirm')
def set_NeedConfirm(self,NeedConfirm):
self.add_query_param('NeedConfirm',NeedConfirm)
def get_GroupType(self):
return self.get_query_params().get('GroupType')
def set_GroupType(self,GroupType):
self.add_query_param('GroupType',GroupType)
def get_NeedPush(self):
return self.get_query_params().get('NeedPush')
def set_NeedPush(self,NeedPush):
self.add_query_param('NeedPush',NeedPush)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_DownloadProtocol(self):
return self.get_query_params().get('DownloadProtocol')
def set_DownloadProtocol(self,DownloadProtocol):
self.add_query_param('DownloadProtocol',DownloadProtocol)
def get_Tags(self):
return self.get_query_params().get('Tag')
def set_Tags(self, Tags):
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def get_GroupId(self):
return self.get_query_params().get('GroupId')
def set_GroupId(self,GroupId):
self.add_query_param('GroupId',GroupId)
def get_FirmwareId(self):
return self.get_query_params().get('FirmwareId')
def set_FirmwareId(self,FirmwareId):
self.add_query_param('FirmwareId',FirmwareId)
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey)
def get_RetryInterval(self):
return self.get_query_params().get('RetryInterval')
def set_RetryInterval(self,RetryInterval):
self.add_query_param('RetryInterval',RetryInterval)
def get_SrcVersions(self):
return self.get_query_params().get('SrcVersion')
def set_SrcVersions(self, SrcVersions):
for depth1 in range(len(SrcVersions)):
if SrcVersions[depth1] is not None:
self.add_query_param('SrcVersion.' + str(depth1 + 1) , SrcVersions[depth1])
def get_OverwriteMode(self):
return self.get_query_params().get('OverwriteMode')
def set_OverwriteMode(self,OverwriteMode):
self.add_query_param('OverwriteMode',OverwriteMode)
def get_MaximumPerMinute(self):
return self.get_query_params().get('MaximumPerMinute')
def set_MaximumPerMinute(self,MaximumPerMinute):
self.add_query_param('MaximumPerMinute',MaximumPerMinute
| null |
1,279 |
#
# This file is part of LiteDRAM.
#
# Copyright (c) 2021 Antmicro <www.antmicro.com>
# SPDX-License-Identifier: BSD-2-Clause
from migen import *
class S7Common(Module):
def idelaye2(self, *, din, dout, init=0, rst=None, inc=None, clk="sys2x"):
assert not ((rst is None) ^ (inc is None))
fixed = rst is None
params = dict(
p_SIGNAL_PATTERN = "DATA",
p_DELAY_SRC = "IDATAIN",
p_CINVCTRL_SEL = "FALSE",
p_HIGH_PERFORMANCE_MODE = "TRUE",
p_REFCLK_FREQUENCY = self.iodelay_clk_freq/1e6,
p_PIPE_SEL = "FALSE",
p_IDELAY_VALUE = init,
p_IDELAY_TYPE = "FIXED",
i_IDATAIN = din,
o_DATAOUT = dout,
)
if not fixed:
params.update(dict(
p_IDELAY_TYPE = "VARIABLE",
i_C = ClockSignal(clk), # must be same as in ODELAYE2
i_LD = rst,
i_CE = inc,
i_LDPIPEEN = 0,
i_INC = 1,
))
self.specials += Instance("IDELAYE2", **params)
def odelaye2(self, *, din, dout, init=0, rst=None, inc=None, clk="sys2x"): # Not available for Artix7
assert not ((rst is None) ^ (inc is None))
fixed = rst is None
params = dict(
p_SIGNAL_PATTERN = "DATA",
p_DELAY_SRC = "ODATAIN",
p_CINVCTRL_SEL = "FALSE",
p_HIGH_PERFORMANCE_MODE = "TRUE",
p_REFCLK_FREQUENCY = self.iodelay_clk_freq/1e6,
p_PIPE_SEL = "FALSE",
p_ODELAY_VALUE = init,
p_ODELAY_TYPE = "FIXED",
i_ODATAIN = din,
o_DATAOUT = dout,
)
if not fixed:
params.update(dict(
p_ODELAY_TYPE = "VARIABLE",
i_C = ClockSignal(clk), # must be same as CLKDIV in OSERDESE2
i_LD = rst,
i_CE = inc,
i_LDPIPEEN = 0,
i_INC = 1,
))
self.specials += Instance("ODELAYE2", **params)
def oserdese2_ddr(self, *, din, clk, dout=None, dout_fb=None, tin=None, tout=None, clkdiv="sys2x"):
data_width = len(din)
assert data_width == 8, (data_width, din)
assert not ((tin is None) ^ (tout is None)), "When using tristate specify both `tin` and `tout`"
assert not ((dout is None) and (dout_fb is None)), "Output to OQ (-> IOB) and/or to OFB (-> ISERDESE2/ODELAYE2)"
dout = Signal() if dout is None else dout
dout_fb = Signal() if dout_fb is None else dout_fb
params = dict(
p_SERDES_MODE = "MASTER",
p_DATA_WIDTH = data_width,
p_TRISTATE_WIDTH = 1,
p_DATA_RATE_OQ = "DDR",
p_DATA_RATE_TQ = "BUF",
i_RST = ResetSignal() | self._rst.storage,
i_CLK = ClockSignal(clk),
i_CLKDIV = ClockSignal(clkdiv),
o_OQ = dout,
o_OFB = dout_fb,
i_OCE = 1,
)
for i in range(data_width):
params[f"i_D{i+1}"] = din[i]
if tin is not None:
# with DATA_RATE_TQ=BUF tristate is asynchronous, so it should be delayed by OSERDESE2 latency
params.update(dict(i_TCE=1, i_T1=tin, o_TQ=tout))
self.specials += Instance("OSERDESE2", **params)
def oserdese2_sdr(self, **kwargs):
# Use 8:1 OSERDESE2 DDR instead of 4:1 OSERDESE2 SDR to have the same latency
din = kwargs["din"]
data_width = len(din)
assert data_width in [1, 2, 4]
ratio = 8 // data_width
din_ddr = Signal(8)
kwargs["din"] = din_ddr
self.comb += din_ddr.eq(Cat(*[Replicate(bit, ratio) for bit in din]))
self.oserdese2_ddr(**kwargs)
def METHOD_NAME(self, *, din, dout, clk, clkdiv="sys2x"):
data_width = len(dout)
assert data_width == 8, (data_width, dout)
params = dict(
p_SERDES_MODE = "MASTER",
p_INTERFACE_TYPE = "NETWORKING",
p_DATA_WIDTH = data_width,
p_DATA_RATE = "DDR",
p_NUM_CE = 1,
p_IOBDELAY = "IFD",
i_RST = ResetSignal() | self._rst.storage,
i_CLK = ClockSignal(clk),
i_CLKB = ~ClockSignal(clk),
i_CLKDIV = ClockSignal(clkdiv),
i_BITSLIP = 0,
i_CE1 = 1,
i_DDLY = din,
)
for i in range(data_width):
# invert order
params[f"o_Q{i+1}"] = dout[(data_width - 1) - i]
self.specials += Instance("ISERDESE2", **params)
def iserdese2_sdr(self, **kwargs):
dout = kwargs["dout"]
data_width = len(dout)
assert data_width in [1, 2, 4]
ratio = 8 // data_width
dout_ddr = Signal(8)
kwargs["dout"] = dout_ddr
self.comb += dout.eq(Cat(*[dout_ddr[bit] for bit in range(0, 8, ratio)]))
self.METHOD_NAME(**kwargs)
def obufds(self, *, din, dout, dout_b):
self.specials += Instance("OBUFDS",
i_I = din,
o_O = dout,
o_OB = dout_b,
)
def iobufds(self, *, din, dout, dinout, dinout_b, tin):
self.specials += Instance("IOBUFDS",
i_T = tin,
i_I = din,
o_O = dout,
io_IO = dinout,
io_IOB = dinout_b,
)
def iobuf(self, *, din, dout, dinout, tin):
self.specials += Instance("IOBUF",
i_T = tin,
i_I = din,
o_O = dout,
io_IO = dinout,
)
| null |
1,280 |
from typing import Any, Dict, List, Sequence, Union
PY3: Any
str_type = str
GRAPH_ATTRIBUTES: Any
EDGE_ATTRIBUTES: Any
NODE_ATTRIBUTES: Any
CLUSTER_ATTRIBUTES: Any
DEFAULT_PROGRAMS: Any
def is_windows() -> bool: ...
def is_anaconda() -> bool: ...
def get_executable_extension() -> str: ...
def graph_from_dot_data(s: str) -> List["Dot"]: ...
class Common:
def set_parent_graph(self, parent_graph: "Graph") -> None: ...
def get_parent_graph(self) -> "Graph": ...
def set(self, name: str, value: str) -> None: ...
def get(self, name: str) -> str: ...
def get_attributes(self) -> Dict[str, str]: ...
def set_sequence(self, seq: str) -> None: ...
def get_sequence(self) -> str: ...
def create_attribute_methods(self, obj_attributes: List[str]) -> None: ...
class Error(Exception):
value: Any
def __init__(self, value: str) -> None: ...
class InvocationException(Exception):
value: Any
def __init__(self, value: str) -> None: ...
class Node(Common):
obj_dict: Any
def __init__(self, name: str = ..., obj_dict: Any | None = ..., **attrs: str) -> None: ...
def set_name(self, node_name: str) -> None: ...
def get_name(self) -> str: ...
def get_port(self) -> str: ...
def add_style(self, style: str) -> None: ...
def to_string(self) -> str: ...
class Edge(Common):
obj_dict: Any
def __init__(
self,
src: str = ...,
dst: str = ...,
obj_dict: Any | None = ...,
**attrs: Dict[str, str],
) -> None: ...
def get_source(self) -> str: ...
def get_destination(self) -> str: ...
def __hash__(self) -> int: ...
def __eq__(self, edge: Any) -> bool: ...
def parse_node_ref(self, node_str: str) -> str: ...
def to_string(self) -> str: ...
class Graph(Common):
obj_dict: Any
def __init__(
self,
graph_name: str = ...,
obj_dict: Any | None = ...,
graph_type: str = ...,
strict: bool = ...,
suppress_disconnected: bool = ...,
simplify: bool = ...,
**attrs: Dict[str, str],
) -> None: ...
def get_graph_type(self) -> str: ...
def get_top_graph_type(self) -> str: ...
def set_graph_defaults(self, **attrs: Dict[str, str]) -> None: ...
def get_graph_defaults(self, **attrs: Dict[str, str]) -> Dict[str, str]: ...
def set_node_defaults(self, **attrs: Dict[str, str]) -> None: ...
def get_node_defaults(self, **attrs: Dict[str, str]) -> Dict[str, str]: ...
def set_edge_defaults(self, **attrs: Dict[str, str]) -> None: ...
def get_edge_defaults(self, **attrs: Dict[str, str]) -> Dict[str, str]: ...
def set_simplify(self, simplify: bool) -> None: ...
def get_simplify(self) -> bool: ...
def set_type(self, graph_type: str) -> None: ...
def get_type(self) -> str: ...
def set_name(self, graph_name: str) -> None: ...
def get_name(self) -> str: ...
def set_strict(self, val: bool) -> None: ...
def get_strict(self, val: Any) -> bool: ...
def set_suppress_disconnected(self, val: bool) -> None: ...
def METHOD_NAME(self, val: Any) -> None: ...
def get_next_sequence_number(self) -> int: ...
def add_node(self, graph_node: Node) -> None: ...
def del_node(self, name: Union[str, Node], index: int | None = ...) -> bool: ...
def get_node(self, name: str) -> Node: ...
def get_nodes(self) -> List[Node]: ...
def get_node_list(self) -> List[Node]: ...
def add_edge(self, graph_edge: Edge) -> None: ...
def del_edge(
self,
src_or_list: Union[Sequence[Node], Node],
dst: str | int | None = ...,
index: int | None = ...,
) -> bool: ...
def get_edge(
self, src_or_list: Union[Sequence[Node], Node], dst: Any | None = ...
) -> List[Edge]: ...
def get_edges(self) -> List[Edge]: ...
def get_edge_list(self) -> List[Edge]: ...
def add_subgraph(self, sgraph: Union["Subgraph", "Cluster"]) -> None: ...
def get_subgraph(self, name: str) -> List["Subgraph"]: ...
def get_subgraphs(self) -> List["Subgraph"]: ...
def get_subgraph_list(self) -> List["Subgraph"]: ...
def set_parent_graph(self, parent_graph: "Graph") -> None: ...
def to_string(self) -> str: ...
class Subgraph(Graph):
def __init__(
self,
graph_name: str = ...,
obj_dict: Any | Dict[str, str] = ...,
suppress_disconnected: bool = ...,
simplify: bool = ...,
**attrs: Dict[str, str],
) -> None: ...
class Cluster(Graph):
def __init__(
self,
graph_name: str = ...,
obj_dict: Any | Dict[str, str] = ...,
suppress_disconnected: bool = ...,
simplify: bool = ...,
**attrs: Dict[str, str],
) -> None: ...
class Dot(Graph):
shape_files: Any
formats: Any
prog: str
def __init__(self, *argsl: Any, **argsd: Dict[str, str]): ...
def set_shape_files(self, file_paths: Union[str, Sequence[str]]) -> None: ...
def set_prog(self, prog: str) -> None: ...
def write(
self,
path: str,
prog: Any | str = ...,
format: str = ...,
encoding: Any | str = ...,
) -> bool: ...
def create(
self, prog: Any | str = ..., format: str = ..., encoding: Any | str = ...
) -> bytes: ...
| null |
1,281 |
import asyncio
import json
import unittest
from typing import Awaitable, Optional
from unittest.mock import AsyncMock, patch
from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant
from hummingbot.connector.utilities.oms_connector.oms_connector_web_utils import build_api_factory
from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest, WSResponse
class OMSConnectorWebUtilsTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.ev_loop = asyncio.get_event_loop()
cls.ws_url = "ws://someUrl"
def setUp(self) -> None:
super().setUp()
self.api_factory = build_api_factory()
self.ws_assistant = self.async_run_with_timeout(self.api_factory.get_ws_assistant())
self.rest_assistant = self.async_run_with_timeout(self.api_factory.get_rest_assistant())
self.mocking_assistant = NetworkMockingAssistant()
def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1):
ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout))
return ret
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def METHOD_NAME(self, ws_connect_mock: AsyncMock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
endpoint = "someEndpoint"
msg_data = {"someAttribute": "someValue"}
msg_payload = {
"m": 0,
"n": endpoint,
"o": msg_data,
}
msg = WSJSONRequest(payload=msg_payload)
self.async_run_with_timeout(self.ws_assistant.connect(ws_url=self.ws_url))
self.async_run_with_timeout(self.ws_assistant.send(msg))
sent_messages = self.mocking_assistant.json_messages_sent_through_websocket(
websocket_mock=ws_connect_mock.return_value
)
self.assertEqual(1, len(sent_messages))
sent_msg = sent_messages[0]
expected_payload = {
"m": 0,
"i": 2,
"n": endpoint,
"o": json.dumps(msg_data),
}
self.assertEqual(expected_payload, sent_msg)
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_ws_post_processor(self, ws_connect_mock: AsyncMock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
msg_mock = {
"m": 1,
"i": 2,
"n": "someEndpoint",
"o": json.dumps({"someAttribute": "someValue"}),
}
self.mocking_assistant.add_websocket_aiohttp_message(
websocket_mock=ws_connect_mock.return_value,
message=json.dumps(msg_mock),
)
self.async_run_with_timeout(self.ws_assistant.connect(ws_url=self.ws_url))
resp: Optional[WSResponse] = self.async_run_with_timeout(self.ws_assistant.receive())
self.assertIsNotNone(resp)
data = resp.data
expected_data = {
"m": 1,
"i": 2,
"n": "someEndpoint",
"o": {"someAttribute": "someValue"},
}
self.assertEqual(expected_data, data)
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_ws_increments_msg_counter(self, ws_connect_mock: AsyncMock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
endpoint = "someEndpoint"
msg_data = {"someAttribute": "someValue"}
msg_payload = {
"m": 0,
"n": endpoint,
"o": msg_data,
}
msg = WSJSONRequest(payload=msg_payload)
self.async_run_with_timeout(self.ws_assistant.connect(ws_url=self.ws_url))
self.async_run_with_timeout(self.ws_assistant.send(msg))
self.async_run_with_timeout(self.ws_assistant.send(msg))
sent_messages = self.mocking_assistant.json_messages_sent_through_websocket(
websocket_mock=ws_connect_mock.return_value
)
self.assertEqual(2, len(sent_messages))
first_sent_msg = sent_messages[0]
first_expected_payload = {
"m": 0,
"i": 2,
"n": endpoint,
"o": json.dumps(msg_data),
}
self.assertEqual(first_expected_payload, first_sent_msg)
second_sent_msg = sent_messages[1]
second_expected_payload = {
"m": 0,
"i": 4,
"n": endpoint,
"o": json.dumps(msg_data),
}
self.assertEqual(second_expected_payload, second_sent_msg)
| null |
1,282 |
# Copyright 2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nnabla as nn
import nnabla.parametric_functions as PF
import nnabla.functions as F
def conv_block(x, in_planes, out_planes, test=True):
residual = x
out1 = PF.batch_normalization(x, batch_stat=not test, name='bn1')
out1 = F.relu(out1, True)
out1 = PF.convolution(out1, int(out_planes / 2), kernel=(3, 3),
stride=(1, 1), pad=(1, 1), name='conv1', with_bias=False)
out2 = PF.batch_normalization(out1, batch_stat=not test, name='bn2')
out2 = F.relu(out2, True)
out2 = PF.convolution(out2, int(out_planes / 4), kernel=(3, 3),
stride=(1, 1), pad=(1, 1), name='conv2', with_bias=False)
out3 = PF.batch_normalization(out2, batch_stat=not test, name='bn3')
out3 = F.relu(out3, True)
out3 = PF.convolution(out3, int(out_planes / 4), kernel=(3, 3),
stride=(1, 1), pad=(1, 1), name='conv3', with_bias=False)
out3 = F.concatenate(out1, out2, out3, axis=1)
if in_planes != out_planes:
residual = PF.batch_normalization(
residual, batch_stat=not test, name='downsample/0')
residual = F.relu(residual, True)
residual = PF.convolution(residual, out_planes, kernel=(
1, 1), stride=(1, 1), name='downsample/2', with_bias=False)
out3 += residual
return out3
def METHOD_NAME(inp, depth, num_features):
# Upper branch
up1 = inp
with nn.parameter_scope('b1_' + str(depth)):
up1 = conv_block(up1, num_features, num_features)
# Lower branch
low1 = F.average_pooling(inp, (2, 2), stride=(2, 2))
with nn.parameter_scope('b2_' + str(depth)):
low1 = conv_block(low1, num_features, num_features)
if depth > 1:
low2 = METHOD_NAME(low1, depth - 1, num_features)
else:
low2 = low1
with nn.parameter_scope('b2_plus_' + str(depth)):
low2 = conv_block(low2, num_features, num_features)
low3 = low2
with nn.parameter_scope('b3_' + str(depth)):
low3 = conv_block(low3, num_features, num_features)
up2 = F.interpolate(low3, scale=(2, 2), mode='nearest')
return up1 + up2
def fan(x, num_modules=1, test=True):
x = PF.convolution(x, 64, kernel=(7, 7), stride=(2, 2),
pad=(3, 3), name='conv1')
x = PF.batch_normalization(x, batch_stat=not test, name='bn1')
x = F.relu(x, True)
with nn.parameter_scope('conv2'):
x = conv_block(x, 64, 128)
x = F.average_pooling(x, (2, 2), stride=(2, 2))
with nn.parameter_scope('conv3'):
x = conv_block(x, 128, 128)
with nn.parameter_scope('conv4'):
x = conv_block(x, 128, 256)
previous = x
outputs = []
for i in range(num_modules):
with nn.parameter_scope('m' + str(i)):
hg = METHOD_NAME(previous, 4, 256)
ll = hg
with nn.parameter_scope('top_m_' + str(i)):
ll = conv_block(ll, 256, 256)
ll = PF.convolution(ll, 256, kernel=(1, 1), stride=(
1, 1), pad=(0, 0), name='conv_last' + str(i))
ll = PF.batch_normalization(
ll, batch_stat=not test, name='bn_end' + str(i))
ll = F.relu(ll, True)
# Predict heatmaps
tmp_out = PF.convolution(ll, 68, kernel=(
1, 1), stride=(1, 1), pad=(0, 0), name='l' + str(i))
outputs.append(tmp_out)
if i < num_modules - 1:
ll = PF.convolution(ll, 256, kernel=(1, 1), stride=(
1, 1), pad=(0, 0), name='bl' + str(i))
tmp_out_ = PF.convolution(tmp_out, 256, kernel=(
1, 1), stride=(1, 1), pad=(0, 0), name='al' + str(i))
previous = previous + ll + tmp_out_
return outputs
def bottleneck(x, planes, stride=1, downsample=None, test=True):
residual = x
out = PF.convolution(x, planes, kernel=(
1, 1), name='conv1', with_bias=False)
out = PF.batch_normalization(out, batch_stat=not test, name='bn1')
out = F.relu(out, True)
out = PF.convolution(out, planes, kernel=(3, 3), stride=(
stride, stride), pad=(1, 1), name='conv2', with_bias=False)
out = PF.batch_normalization(out, batch_stat=not test, name='bn2')
out = F.relu(out, True)
out = PF.convolution(out, planes * 4, kernel=(1, 1),
name='conv3', with_bias=False)
out = PF.batch_normalization(out, batch_stat=not test, name='bn3')
if downsample is not None:
residual = downsample
out += residual
out = F.relu(out, True)
return out
def create_layer(x, inplanes, planes, blocks, stride=1, test=True):
downsample = None
dict = {64: 'layer1', 128: 'layer2', 256: 'layer3', 512: 'layer4'}
with nn.parameter_scope(dict[planes]):
with nn.parameter_scope('0'):
if stride != 1 or inplanes != planes * 4:
downsample = PF.convolution(x, planes * 4, kernel=(1, 1), stride=(stride, stride),
name='downsample/0',
with_bias=False)
downsample = PF.batch_normalization(
downsample, batch_stat=not test, name='downsample/1')
layers = bottleneck(x, planes, stride, downsample)
for i in range(1, blocks):
with nn.parameter_scope(str(i)):
layers = bottleneck(layers, planes)
return layers
def resnet_depth(x, layers=[3, 8, 36, 3], num_classes=68, test=True):
inplanes = 64
x = PF.convolution(x, 64, kernel=(7, 7), stride=(2, 2),
pad=(3, 3), name='conv1', with_bias=False)
x = PF.batch_normalization(x, batch_stat=not test, name='bn1')
x = F.relu(x, True)
x = F.max_pooling(x, kernel=(3, 3), stride=(2, 2), pad=(1, 1))
x = create_layer(x, inplanes, 64, layers[0])
x = create_layer(x, inplanes, 128, layers[1], stride=2)
x = create_layer(x, inplanes, 256, layers[2], stride=2)
x = create_layer(x, inplanes, 512, layers[3], stride=2)
x = F.average_pooling(x, kernel=(7, 7))
x = F.reshape(x, (x.shape[0], -1))
x = PF.affine(x, num_classes, name='fc')
return x
| null |
1,283 |
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
from typing import TYPE_CHECKING, Any, Mapping, Optional
import aiohttp
import ujson
if TYPE_CHECKING:
from hummingbot.core.web_assistant.connections.ws_connection import WSConnection
class RESTMethod(Enum):
GET = "GET"
POST = "POST"
PUT = "PUT"
DELETE = "DELETE"
def __str__(self):
obj_str = repr(self)
return obj_str
def __repr__(self):
return self.value
@dataclass
class RESTRequest:
method: RESTMethod
url: Optional[str] = None
endpoint_url: Optional[str] = None
params: Optional[Mapping[str, str]] = None
data: Any = None
headers: Optional[Mapping[str, str]] = None
is_auth_required: bool = False
throttler_limit_id: Optional[str] = None
@dataclass
class EndpointRESTRequest(RESTRequest, ABC):
"""This request class enable the user to provide either a complete URL or simply an endpoint.
The endpoint is concatenated with the return value of `base_url`. It can handle endpoints supplied both as
`"endpoint"` and `"/endpoint"`. It also provides the necessary checks to ensure a valid URL can be constructed.
"""
endpoint: Optional[str] = None
def __post_init__(self):
self.METHOD_NAME()
self._ensure_params()
self._ensure_data()
@property
@abstractmethod
def base_url(self) -> str:
...
def METHOD_NAME(self):
if self.url is None and self.endpoint is None:
raise ValueError("Either the full url or the endpoint must be specified.")
if self.url is None:
if self.endpoint.startswith("/"):
self.url = f"{self.base_url}{self.endpoint}"
else:
self.url = f"{self.base_url}/{self.endpoint}"
def _ensure_params(self):
if self.method == RESTMethod.POST:
if self.params is not None:
raise ValueError("POST requests should not use `params`. Use `data` instead.")
def _ensure_data(self):
if self.method == RESTMethod.POST:
if self.data is not None:
self.data = ujson.dumps(self.data)
elif self.data is not None:
raise ValueError(
"The `data` field should be used only for POST requests. Use `params` instead."
)
@dataclass(init=False)
class RESTResponse:
url: str
method: RESTMethod
status: int
headers: Optional[Mapping[str, str]]
def __init__(self, aiohttp_response: aiohttp.ClientResponse):
self._aiohttp_response = aiohttp_response
@property
def url(self) -> str:
url_str = str(self._aiohttp_response.url)
return url_str
@property
def method(self) -> RESTMethod:
method_ = RESTMethod[self._aiohttp_response.method.upper()]
return method_
@property
def status(self) -> int:
status_ = int(self._aiohttp_response.status)
return status_
@property
def headers(self) -> Optional[Mapping[str, str]]:
headers_ = self._aiohttp_response.headers
return headers_
async def json(self) -> Any:
json_ = await self._aiohttp_response.json()
return json_
async def text(self) -> str:
text_ = await self._aiohttp_response.text()
return text_
class WSRequest(ABC):
@abstractmethod
async def send_with_connection(self, connection: 'WSConnection'):
return NotImplemented
@dataclass
class WSJSONRequest(WSRequest):
payload: Mapping[str, Any]
throttler_limit_id: Optional[str] = None
is_auth_required: bool = False
async def send_with_connection(self, connection: 'WSConnection'):
await connection._send_json(payload=self.payload)
@dataclass
class WSPlainTextRequest(WSRequest):
payload: str
throttler_limit_id: Optional[str] = None
is_auth_required: bool = False
async def send_with_connection(self, connection: 'WSConnection'):
await connection._send_plain_text(payload=self.payload)
@dataclass
class WSResponse:
data: Any
| null |
1,284 |
# Copyright (c) 2022 The Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from typing import Optional, List
from ...utils.requires import requires
from .abstract_core import AbstractCore
from ...isas import ISA
from ...runtime import get_runtime_isa
from ...utils.override import overrides
from ...utils.requires import requires
from m5.objects import (
BaseMMU,
Port,
BaseCPU,
Process,
PcCountTracker,
PcCountTrackerManager,
)
from m5.params import PcCountPair
class BaseCPUCore(AbstractCore):
"""
An stdlib AbstractCore subclass which wraps a BaseCPU SimObject type.
"""
def __init__(self, core: BaseCPU, isa: Optional[ISA] = None):
super().__init__()
# There is some annoying redundancy here. The BaseCPU type already
# defines the ISA, so here we are defining it twice. However, there
# currently isn't a good way to get the ISA from the BaseCPU Type.
if isa:
requires(isa_required=isa)
self._isa = isa
else:
self._isa = get_runtime_isa()
self.core = core
self.core.createThreads()
def METHOD_NAME(self) -> BaseCPU:
return self.core
@overrides(AbstractCore)
def requires_send_evicts(self) -> bool:
if self.get_isa() in (ISA.ARM, ISA.X86):
# * The x86 `mwait`` instruction is built on top of coherence,
# therefore evictions must be sent from cache to the CPU Core.
#
# * The local exclusive monitor in ARM systems requires the sending
# of evictions from cache to the CPU Core.
return True
# The O3 model must keep the LSQ coherent with the caches.
# The code below will check to see if the current base CPU is of the O3
# type for the current ISA target (a bit ugly but it works).
try:
from m5.objects import BaseO3CPU
return isinstance(self.METHOD_NAME(), BaseO3CPU)
except ImportError:
# If, for whatever reason, the BaseO3CPU is not importable, then
# the current core cannot be an an O3 CPU. We therefore return
# False.
return False
@overrides(AbstractCore)
def is_kvm_core(self) -> bool:
try:
from m5.objects import BaseKvmCPU
return isinstance(self.core, BaseKvmCPU)
except ImportError:
# If importing BaseKvmCPU throws an exception then it's because
# it's not compiled into the binary. If this is the case then this
# can't be a KVM core.
return False
def get_isa(self) -> ISA:
return self._isa
@overrides(AbstractCore)
def connect_icache(self, port: Port) -> None:
self.core.icache_port = port
@overrides(AbstractCore)
def connect_dcache(self, port: Port) -> None:
self.core.dcache_port = port
@overrides(AbstractCore)
def connect_walker_ports(self, port1: Port, port2: Port) -> None:
if self.get_isa() == ISA.ARM:
# Unlike X86 and RISCV MMU, the ARM MMU has two L1 TLB walker ports
# named `walker` and `stage2_walker` for both data and instruction.
# The gem5 standard library currently supports one TLB walker port
# per cache level. Therefore, we are explicitly setting the walker
# ports and not setting the stage2_walker ports for ARM systems.
self.core.mmu.itb_walker.port = port1
self.core.mmu.dtb_walker.port = port2
else:
self.core.mmu.connectWalkerPorts(port1, port2)
@overrides(AbstractCore)
def set_workload(self, process: Process) -> None:
self.core.workload = process
@overrides(AbstractCore)
def set_switched_out(self, value: bool) -> None:
self.core.switched_out = value
@overrides(AbstractCore)
def connect_interrupt(
self,
interrupt_requestor: Optional[Port] = None,
interrupt_responce: Optional[Port] = None,
) -> None:
# TODO: This model assumes that we will only create an interrupt
# controller as we require it. Not sure how true this is in all cases.
self.core.createInterruptController()
if self.get_isa().value == ISA.X86.value:
if interrupt_requestor != None:
self.core.interrupts[0].pio = interrupt_requestor
self.core.interrupts[0].int_responder = interrupt_requestor
if interrupt_responce != None:
self.core.interrupts[0].int_requestor = interrupt_responce
@overrides(AbstractCore)
def get_mmu(self) -> BaseMMU:
return self.core.mmu
@overrides(AbstractCore)
def _set_simpoint(
self, inst_starts: List[int], board_initialized: bool
) -> None:
if board_initialized:
self.core.scheduleSimpointsInstStop(sorted(set(inst_starts)))
else:
self.core.simpoint_start_insts = sorted(set(inst_starts))
@overrides(AbstractCore)
def _set_inst_stop_any_thread(
self, inst: int, board_initialized: bool
) -> None:
if board_initialized:
self.core.scheduleInstStopAnyThread(inst)
else:
self.core.max_insts_any_thread = inst
@overrides(AbstractCore)
def add_pc_tracker_probe(
self, target_pair: List[PcCountPair], manager: PcCountTrackerManager
) -> None:
pair_tracker = PcCountTracker()
pair_tracker.targets = target_pair
pair_tracker.core = self.core
pair_tracker.ptmanager = manager
self.core.probeListener = pair_tracker
| null |
1,285 |
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
#
# Project:
# glideinWMS
#
# Description:
# unit test for glideinwms/lib/xmlParse.py
#
# Author:
# Dennis Box [email protected]
#
import unittest
import xml
import xmlrunner
# TODO: should OrderedDict be removed, it is the one from the stdlib. But tests are texting XML conversion as well
# should be directly: from collections import OrderedDict
from glideinwms.lib.xmlParse import (
domel2dict,
getXMLAttributes,
getXMLElements,
is_singular_of,
OrderedDict,
xmlfile2dict,
xmlstring2dict,
)
xmlstr = """
<test date="1/2/07">
<params what="xx">
<param name="x" value="12"/>
<param name="y" value="88"/>
</params>
<files>
<file absname="/tmp/abc.txt"/>
<file absname="/tmp/w.log" mod="-rw-r--r--"/>
</files>
<temperature F="100" C="40"/>
</test>
"""
xmlstr_dict_repr = """{'date': '1/2/07', 'params': {'what': 'xx', 'x': {'value': '12'}, 'y': {'value': '88'}}, 'files': [{'absname': '/tmp/abc.txt'}, {'absname': '/tmp/w.log', 'mod': '-rw-r--r--'}], 'temperature': {'F': '100', 'C': '40'}}"""
ordered_dict_values_repr = """['1/2/07', {'what': 'xx', 'x': {'value': '12'}, 'y': {'value': '88'}}, [{'absname': '/tmp/abc.txt'}, {'absname': '/tmp/w.log', 'mod': '-rw-r--r--'}], {'F': '100', 'C': '40'}]"""
ordered_dict_items_repr = """[('date', '1/2/07'), ('params', {'what': 'xx', 'x': {'value': '12'}, 'y': {'value': '88'}}), ('files', [{'absname': '/tmp/abc.txt'}, {'absname': '/tmp/w.log', 'mod': '-rw-r--r--'}]), ('temperature', {'F': '100', 'C': '40'})]"""
expected = ""
class TestOrderedDict(unittest.TestCase):
def test___delitem__(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
od2 = ordered_dict.copy()
ordered_dict.__delitem__("temperature")
self.assertTrue("temperature" in od2)
self.assertFalse("temperature" in ordered_dict)
def test___init__(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
self.assertNotEqual(ordered_dict, None)
def test___setitem__(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
ordered_dict.__setitem__("foo", "bar")
self.assertTrue("foo" in ordered_dict)
def test_clear(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
ordered_dict.clear()
self.assertEqual("{}", ordered_dict.__repr__())
def test_copy(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
od2 = ordered_dict.copy()
self.assertEqual(od2.__repr__(), ordered_dict.__repr__())
def test_items(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
self.assertEqual(ordered_dict_items_repr, list(ordered_dict.items()).__repr__())
def test_keys(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
self.assertEqual("['date', 'params', 'files', 'temperature']", list(ordered_dict.keys()).__repr__())
def test_popitem(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
self.assertEqual("('temperature', {'F': '100', 'C': '40'})", ordered_dict.popitem().__repr__())
def test_setdefault(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
failobj = "not here"
ordered_dict.setdefault("Dave", failobj)
self.assertEqual(ordered_dict.get("Dave"), failobj)
ordered_dict["Dave"] = "here"
self.assertNotEqual(ordered_dict.get("Dave"), failobj)
self.assertEqual(ordered_dict.get("Dave"), "here")
def test_update(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
upd = {"foo": "bar"}
ordered_dict.update(upd)
self.assertTrue("foo" in ordered_dict)
def test_values(self):
dict1 = xmlstring2dict(xmlstr, use_ord_dict=False, always_singular_list=[])
ordered_dict = OrderedDict(dict1)
self.assertEqual(ordered_dict_values_repr, list(ordered_dict.values()).__repr__())
class TestXmlfile2dict(unittest.TestCase):
def test_xmlfile2dict(self):
infile = "fixtures/test_lib_parse.xml"
dict1 = xmlfile2dict(infile, use_ord_dict=True, always_singular_list=[])
self.assertEqual(xmlstr_dict_repr, dict1.__repr__())
class TestXmlstring2dict(unittest.TestCase):
def test_xmlstring2dict(self):
self.assertEqual(
xmlstr_dict_repr, xmlstring2dict(xmlstr, use_ord_dict=True, always_singular_list=[]).__repr__()
)
#
# These are all private
#
class TestGetXMLElements(unittest.TestCase):
def test_get_xml_elements(self):
doc = xml.dom.minidom.parseString("<xml><foo></foo></xml>")
self.assertTrue("DOM Element: foo" in getXMLElements(doc.documentElement).__repr__())
class TestGetXMLAttributes(unittest.TestCase):
def test_get_xml_attributes(self):
doc = xml.dom.minidom.parseString("""<xml><foo><param name="x" value="12"/></foo></xml>""")
self.assertEqual("{}", getXMLAttributes(doc.documentElement, use_ord_dict=True).__repr__())
class TestIsSingularOf(unittest.TestCase):
def test_is_singular_of(self):
self.assertEqual(True, is_singular_of(mysin="dog", myplu="dogs", always_singular_list=[]))
self.assertEqual(True, is_singular_of(mysin="goose", myplu="geese", always_singular_list=["goose", "dog"]))
self.assertEqual(False, is_singular_of(mysin="moose", myplu="meese", always_singular_list=["goose", "dog"]))
self.assertEqual(True, is_singular_of(mysin="miss", myplu="misses", always_singular_list=["goose", "dog"]))
self.assertEqual(True, is_singular_of(mysin="army", myplu="armies", always_singular_list=["goose", "dog"]))
class TestDomel2dict(unittest.TestCase):
def METHOD_NAME(self):
doc = xml.dom.minidom.parseString(xmlstr)
self.assertTrue(isinstance(domel2dict(doc.documentElement), dict))
if __name__ == "__main__":
unittest.main(testRunner=xmlrunner.XMLTestRunner(output="unittests-reports"))
| null |
1,286 |
import random
from AnyQt.QtCore import Qt
from AnyQt.QtWidgets import QSizePolicy
from Orange.data import Table
from Orange.preprocess import Randomize
from Orange.widgets.settings import Setting
from Orange.widgets.utils.widgetpreview import WidgetPreview
from Orange.widgets.widget import OWWidget, Input, Output
from Orange.widgets import gui
class OWRandomize(OWWidget):
name = "Randomize"
description = "Randomize features, class and/or metas in data table."
category = "Transform"
icon = "icons/Random.svg"
priority = 2200
keywords = "randomize, random"
class Inputs:
data = Input("Data", Table)
class Outputs:
data = Output("Data", Table)
resizing_enabled = False
want_main_area = False
shuffle_class = Setting(True)
shuffle_attrs = Setting(False)
shuffle_metas = Setting(False)
scope_prop = Setting(80)
random_seed = Setting(False)
auto_apply = Setting(True)
def __init__(self):
super().__init__()
self.data = None
# GUI
box = gui.hBox(self.controlArea, "Shuffled columns")
box.layout().setSpacing(20)
self.class_check = gui.checkBox(
box, self, "shuffle_class", "Classes",
callback=self._shuffle_check_changed)
self.attrs_check = gui.checkBox(
box, self, "shuffle_attrs", "Features",
callback=self._shuffle_check_changed)
self.metas_check = gui.checkBox(
box, self, "shuffle_metas", "Metas",
callback=self._shuffle_check_changed)
box = gui.vBox(self.controlArea, "Shuffled rows")
hbox = gui.hBox(box)
gui.widgetLabel(hbox, "None")
self.scope_slider = gui.hSlider(
hbox, self, "scope_prop", minValue=0, maxValue=100, width=140,
createLabel=False, callback=self._scope_slider_changed)
gui.widgetLabel(hbox, "All")
self.scope_label = gui.widgetLabel(
box, "", alignment=Qt.AlignCenter,
sizePolicy=(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed))
self._set_scope_label()
self.replicable_check = gui.checkBox(
box, self, "random_seed", "Replicable shuffling",
callback=self._shuffle_check_changed)
gui.auto_apply(self.buttonsArea, self)
@property
def parts(self):
return [self.shuffle_class, self.shuffle_attrs, self.shuffle_metas]
def _shuffle_check_changed(self):
self.commit.deferred()
def _scope_slider_changed(self):
self._set_scope_label()
self.commit.deferred()
def _set_scope_label(self):
self.scope_label.setText("{}%".format(self.scope_prop))
@Inputs.data
def set_data(self, data):
self.data = data
self.commit.now()
@gui.deferred
def commit(self):
data = None
if self.data:
rand_seed = self.random_seed or None
size = int(len(self.data) * self.scope_prop / 100)
random.seed(rand_seed)
indices = sorted(random.sample(range(len(self.data)), size))
type_ = sum(t for t, p in zip(Randomize.Type, self.parts) if p)
randomized = Randomize(type_, rand_seed)(self.data[indices])
data = self.data.copy()
with data.unlocked():
for i, instance in zip(indices, randomized):
data[i] = instance
self.Outputs.data.send(data)
def METHOD_NAME(self):
labels = ["classes", "features", "metas"]
include = [label for label, i in zip(labels, self.parts) if i]
text = "none" if not include else \
" and ".join(filter(None, (", ".join(include[:-1]), include[-1])))
self.report_items(
"Settings",
[("Shuffled columns", text),
("Proportion of shuffled rows", "{}%".format(self.scope_prop)),
("Replicable", "yes" if self.random_seed else "no")])
if __name__ == "__main__": # pragma: no cover
WidgetPreview(OWRandomize).run(Table("iris"))
| null |
1,287 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkmse.endpoint import endpoint_data
import json
class CreateOrUpdateSwimmingLaneRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'mse', '2019-05-31', 'CreateOrUpdateSwimmingLane','mse')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_EntryRule(self): # String
return self.get_query_params().get('EntryRule')
def set_EntryRule(self, EntryRule): # String
self.add_query_param('EntryRule', EntryRule)
def get_Enable(self): # Boolean
return self.get_query_params().get('Enable')
def set_Enable(self, Enable): # Boolean
self.add_query_param('Enable', Enable)
def get_Id(self): # Long
return self.get_query_params().get('Id')
def set_Id(self, Id): # Long
self.add_query_param('Id', Id)
def get_Tag(self): # String
return self.get_query_params().get('Tag')
def set_Tag(self, Tag): # String
self.add_query_param('Tag', Tag)
def get_EntryRuless(self): # RepeatList
return self.get_body_params().get('EntryRules')
def set_EntryRuless(self, EntryRules): # RepeatList
for depth1 in range(len(EntryRules)):
if EntryRules[depth1].get('RestItems') is not None:
for depth2 in range(len(EntryRules[depth1].get('RestItems'))):
if EntryRules[depth1].get('RestItems')[depth2].get('Datum') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Datum', EntryRules[depth1].get('RestItems')[depth2].get('Datum'))
if EntryRules[depth1].get('RestItems')[depth2].get('Divisor') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Divisor', EntryRules[depth1].get('RestItems')[depth2].get('Divisor'))
if EntryRules[depth1].get('RestItems')[depth2].get('Rate') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Rate', EntryRules[depth1].get('RestItems')[depth2].get('Rate'))
if EntryRules[depth1].get('RestItems')[depth2].get('NameList') is not None:
for depth3 in range(len(EntryRules[depth1].get('RestItems')[depth2].get('NameList'))):
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.NameList.' + str(depth3 + 1), EntryRules[depth1].get('RestItems')[depth2].get('NameList')[depth3])
if EntryRules[depth1].get('RestItems')[depth2].get('Name') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Name', EntryRules[depth1].get('RestItems')[depth2].get('Name'))
if EntryRules[depth1].get('RestItems')[depth2].get('Type') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Type', EntryRules[depth1].get('RestItems')[depth2].get('Type'))
if EntryRules[depth1].get('RestItems')[depth2].get('Cond') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Cond', EntryRules[depth1].get('RestItems')[depth2].get('Cond'))
if EntryRules[depth1].get('RestItems')[depth2].get('Remainder') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Remainder', EntryRules[depth1].get('RestItems')[depth2].get('Remainder'))
if EntryRules[depth1].get('RestItems')[depth2].get('Value') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Value', EntryRules[depth1].get('RestItems')[depth2].get('Value'))
if EntryRules[depth1].get('RestItems')[depth2].get('Operator') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.RestItems.' + str(depth2 + 1) + '.Operator', EntryRules[depth1].get('RestItems')[depth2].get('Operator'))
if EntryRules[depth1].get('Condition') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.Condition', EntryRules[depth1].get('Condition'))
if EntryRules[depth1].get('Paths') is not None:
for depth2 in range(len(EntryRules[depth1].get('Paths'))):
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.Paths.' + str(depth2 + 1), EntryRules[depth1].get('Paths')[depth2])
if EntryRules[depth1].get('Priority') is not None:
self.add_body_params('EntryRules.' + str(depth1 + 1) + '.Priority', EntryRules[depth1].get('Priority'))
def get_GroupId(self): # Long
return self.get_query_params().get('GroupId')
def set_GroupId(self, GroupId): # Long
self.add_query_param('GroupId', GroupId)
def get_EnableRules(self): # Boolean
return self.get_query_params().get('EnableRules')
def set_EnableRules(self, EnableRules): # Boolean
self.add_query_param('EnableRules', EnableRules)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def METHOD_NAME(self): # Struct
return self.get_query_params().get('GatewaySwimmingLaneRouteJson')
def set_GatewaySwimmingLaneRouteJson(self, GatewaySwimmingLaneRouteJson): # Struct
self.add_query_param("GatewaySwimmingLaneRouteJson", json.dumps(GatewaySwimmingLaneRouteJson))
def get_Namespace(self): # String
return self.get_query_params().get('Namespace')
def set_Namespace(self, Namespace): # String
self.add_query_param('Namespace', Namespace)
def get_AcceptLanguage(self): # String
return self.get_query_params().get('AcceptLanguage')
def set_AcceptLanguage(self, AcceptLanguage): # String
self.add_query_param('AcceptLanguage', AcceptLanguage)
| null |
1,288 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class DescribeVSwitchesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'DescribeVSwitches','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_IsDefault(self): # Boolean
return self.get_query_params().get('IsDefault')
def set_IsDefault(self, IsDefault): # Boolean
self.add_query_param('IsDefault', IsDefault)
def get_RouteTableId(self): # String
return self.get_query_params().get('RouteTableId')
def set_RouteTableId(self, RouteTableId): # String
self.add_query_param('RouteTableId', RouteTableId)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def METHOD_NAME(self): # Long
return self.get_query_params().get('VSwitchOwnerId')
def set_VSwitchOwnerId(self, VSwitchOwnerId): # Long
self.add_query_param('VSwitchOwnerId', VSwitchOwnerId)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_VSwitchName(self): # String
return self.get_query_params().get('VSwitchName')
def set_VSwitchName(self, VSwitchName): # String
self.add_query_param('VSwitchName', VSwitchName)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
| null |
1,289 |
#!/usr/bin/env python
#
# Azure Linux extension
#
# Linux Azure Diagnostic Extension (Current version is specified in manifest.xml)
# Copyright (c) Microsoft Corporation
# All rights reserved.
# MIT License
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import datetime
import urllib2
import time
import traceback
def get_imds_data(node, json=True):
"""
Query IMDS endpoint for instance metadata and return the response as a Json string.
:param str node: Instance metadata node we are querying about
:param bool json: Indicates whether to query for Json output or not
:return: Queried IMDS result in string
:rtype: str
"""
if not node:
return None
separator = '' if node[0] == '/' else '/'
imds_url = 'http://169.254.169.254{0}{1}{2}'.format(
separator, node, '?format=json&api-version=latest_internal' if json else '')
imds_headers = {'Metadata': 'True'}
req = urllib2.Request(url=imds_url, headers=imds_headers)
resp = urllib2.urlopen(req)
data = resp.read()
data_str = data.decode('utf-8')
return data_str
class ImdsLogger:
"""
Periodically probes IMDS endpoint and log the result as WALA events.
"""
def __init__(self, ext_name, ext_ver, ext_op_type, ext_event_logger, ext_logger=None,
imds_data_getter=get_imds_data, logging_interval_in_minutes=60):
"""
Constructor
:param str ext_name: Extension name (e.g., hutil.get_name())
:param str ext_ver: Extension version (e.g., hutil.get_version())
:param str ext_op_type: Extension operation type (e.g., HeartBeat)
:param ext_event_logger: Extension event logger (e.g., waagent.AddExtensionEvent)
:param ext_logger: Extension message logger (e.g., hutil.log)
:param imds_data_getter: IMDS data getter function (e.g., get_imds_data)
:param int logging_interval_in_minutes: Logging interval in minutes
"""
self._ext_name = ext_name
self._ext_ver = ext_ver
self._ext_op_type = ext_op_type
self._ext_logger = ext_logger # E.g., hutil.log
self._ext_event_logger = ext_event_logger # E.g., waagent.AddExtensionEvent
self._last_log_time = datetime.datetime.fromordinal(1)
self._imds_data_getter = imds_data_getter
self._logging_interval = datetime.timedelta(minutes=logging_interval_in_minutes)
def _ext_log_if_enabled(self, msg):
"""
Log an extension message if logger is specified.
:param str msg: Message to log
:return: None
"""
if self._ext_logger:
self._ext_logger(msg)
def METHOD_NAME(self, log_as_ext_event=False):
"""
Query and log IMDS data if it's right time to do so.
:param bool log_as_ext_event: Indicates whether to log IMDS data as a waagent/extension event.
:return: None
"""
now = datetime.datetime.now()
if now < self._last_log_time + self._logging_interval:
return
try:
imds_data = self._imds_data_getter('/metadata/instance/')
except Exception as e:
self._ext_log_if_enabled('Exception occurred while getting IMDS data: {0}\n'
'stacktrace: {1}'.format(e, traceback.format_exc()))
imds_data = '{0}'.format(e)
msg = 'IMDS instance data = {0}'.format(imds_data)
if log_as_ext_event:
self._ext_event_logger(name=self._ext_name,
op=self._ext_op_type,
isSuccess=True,
version=self._ext_ver,
message=msg)
self._ext_log_if_enabled(msg)
self._last_log_time = now
if __name__ == '__main__':
def fake_get_imds_data(node, json=True):
result = 'fake_get_imds_data(node="{0}", json="{1}")'.format(node, json)
print result
return result
def default_ext_logger(msg):
print 'default_ext_logger(msg="{0}")'.format(msg)
def default_ext_event_logger(*args, **kwargs):
print 'default_ext_event_logger(*args, **kwargs)'
print 'args:'
for arg in args:
print arg
print 'kwargs:'
for k in kwargs:
print('"{0}"="{1}"'.format(k, kwargs[k]))
imds_logger = ImdsLogger('Microsoft.OSTCExtensions.LinuxDiagnostic', '2.3.9021', 'Heartbeat',
ext_logger=default_ext_logger, ext_event_logger=default_ext_event_logger,
imds_data_getter=fake_get_imds_data, logging_interval_in_minutes=1)
start_time = datetime.datetime.now()
done = False
while not done:
now = datetime.datetime.now()
print 'Test loop iteration starting at {0}'.format(now)
imds_logger.METHOD_NAME()
if now >= start_time + datetime.timedelta(minutes=2):
done = True
else:
print 'Sleeping 10 seconds'
time.sleep(10)
| null |
1,290 |
# Copyright (c) ZenML GmbH 2023. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""ZenML logging handler."""
import os
import re
import sys
import time
from contextvars import ContextVar
from types import TracebackType
from typing import Any, Callable, List, Optional, Type
from uuid import uuid4
from zenml.artifact_stores import BaseArtifactStore
from zenml.io import fileio
from zenml.logger import get_logger
from zenml.logging import (
STEP_LOGS_STORAGE_INTERVAL_SECONDS,
STEP_LOGS_STORAGE_MAX_MESSAGES,
)
# Get the logger
logger = get_logger(__name__)
redirected: ContextVar[bool] = ContextVar("redirected", default=False)
def remove_ansi_escape_codes(text: str) -> str:
"""Auxiliary function to remove ANSI escape codes from a given string.
Args:
text: the input string
Returns:
the version of the input string where the escape codes are removed.
"""
ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
return ansi_escape.sub("", text)
def METHOD_NAME(
artifact_store: "BaseArtifactStore",
step_name: str,
log_key: Optional[str] = None,
) -> str:
"""Generates and prepares a URI for the log file for a step.
Args:
artifact_store: The artifact store on which the artifact will be stored.
step_name: Name of the step.
log_key: The unique identification key of the log file.
Returns:
The URI of the logs file.
"""
if log_key is None:
log_key = str(uuid4())
logs_base_uri = os.path.join(
artifact_store.path,
step_name,
"logs",
)
# Create the dir
if not fileio.exists(logs_base_uri):
fileio.makedirs(logs_base_uri)
# Delete the file if it already exists
logs_uri = os.path.join(logs_base_uri, f"{log_key}.log")
if fileio.exists(logs_uri):
logger.warning(
f"Logs file {logs_uri} already exists! Removing old log file..."
)
fileio.remove(logs_uri)
return logs_uri
class StepLogsStorage:
"""Helper class which buffers and stores logs to a given URI."""
def __init__(
self,
logs_uri: str,
max_messages: int = STEP_LOGS_STORAGE_MAX_MESSAGES,
time_interval: int = STEP_LOGS_STORAGE_INTERVAL_SECONDS,
) -> None:
"""Initialization.
Args:
logs_uri: the target URI to store the logs.
max_messages: the maximum number of messages to save in the buffer.
time_interval: the amount of seconds before the buffer gets saved
automatically.
"""
# Parameters
self.logs_uri = logs_uri
self.max_messages = max_messages
self.time_interval = time_interval
# State
self.buffer: List[str] = []
self.disabled_buffer: List[str] = []
self.last_save_time = time.time()
self.disabled = False
def write(self, text: str) -> None:
"""Main write method.
Args:
text: the incoming string.
"""
if text == "\n":
return
if not self.disabled:
self.buffer.append(text)
if (
len(self.buffer) >= self.max_messages
or time.time() - self.last_save_time >= self.time_interval
):
self.save_to_file()
def save_to_file(self) -> None:
"""Method to save the buffer to the given URI."""
if not self.disabled:
try:
self.disabled = True
if self.buffer:
with fileio.open(self.logs_uri, "a") as file:
for message in self.buffer:
file.write(
remove_ansi_escape_codes(message) + "\n"
)
except (OSError, IOError) as e:
# This exception can be raised if there are issues with the
# underlying system calls, such as reaching the maximum number
# of open files, permission issues, file corruption, or other
# I/O errors.
logger.error(f"Error while trying to write logs: {e}")
finally:
self.buffer = []
self.last_save_time = time.time()
self.disabled = False
class StepLogsStorageContext:
"""Context manager which patches stdout and stderr during step execution."""
def __init__(self, logs_uri: str) -> None:
"""Initializes and prepares a storage object.
Args:
logs_uri: the URI of the logs file.
"""
self.storage = StepLogsStorage(logs_uri=logs_uri)
def __enter__(self) -> "StepLogsStorageContext":
"""Enter condition of the context manager.
Wraps the `write` method of both stderr and stdout, so each incoming
message gets stored in the step logs storage.
Returns:
self
"""
self.stdout_write = getattr(sys.stdout, "write")
self.stdout_flush = getattr(sys.stdout, "flush")
self.stderr_write = getattr(sys.stderr, "write")
self.stderr_flush = getattr(sys.stderr, "flush")
setattr(sys.stdout, "write", self._wrap_write(self.stdout_write))
setattr(sys.stdout, "flush", self._wrap_flush(self.stdout_flush))
setattr(sys.stderr, "write", self._wrap_write(self.stdout_write))
setattr(sys.stderr, "flush", self._wrap_flush(self.stdout_flush))
redirected.set(True)
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
"""Exit condition of the context manager.
Args:
exc_type: The class of the exception
exc_val: The instance of the exception
exc_tb: The traceback of the exception
Restores the `write` method of both stderr and stdout.
"""
self.storage.save_to_file()
setattr(sys.stdout, "write", self.stdout_write)
setattr(sys.stdout, "flush", self.stdout_flush)
setattr(sys.stderr, "write", self.stderr_write)
setattr(sys.stderr, "flush", self.stderr_flush)
redirected.set(False)
def _wrap_write(self, method: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper function that utilizes the storage object to store logs.
Args:
method: the original write method
Returns:
the wrapped write method.
"""
def wrapped_write(*args: Any, **kwargs: Any) -> Any:
output = method(*args, **kwargs)
if args:
self.storage.write(args[0])
return output
return wrapped_write
def _wrap_flush(self, method: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper function that flushes the buffer of the storage object.
Args:
method: the original flush method
Returns:
the wrapped flush method.
"""
def wrapped_flush(*args: Any, **kwargs: Any) -> Any:
output = method(*args, **kwargs)
self.storage.save_to_file()
return output
return wrapped_flush
| null |
1,291 |
# Copyright 2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
This module contains some common routines used by other samples.
'''
# Python 2/3 compatibility
from __future__ import print_function
import sys
PY3 = sys.version_info[0] == 3
if PY3:
from functools import reduce
import numpy as np
import cv2 as cv
# built-in modules
import os
import itertools as it
from contextlib import contextmanager
image_extensions = ['.bmp', '.jpg', '.jpeg',
'.png', '.tif', '.tiff', '.pbm', '.pgm', '.ppm']
class Bunch(object):
def __init__(self, **kw):
self.__dict__.update(kw)
def __str__(self):
return str(self.__dict__)
def splitfn(fn):
path, fn = os.path.split(fn)
name, ext = os.path.splitext(fn)
return path, name, ext
def anorm2(a):
return (a*a).sum(-1)
def anorm(a):
return np.sqrt(anorm2(a))
def homotrans(H, x, y):
xs = H[0, 0]*x + H[0, 1]*y + H[0, 2]
ys = H[1, 0]*x + H[1, 1]*y + H[1, 2]
s = H[2, 0]*x + H[2, 1]*y + H[2, 2]
return xs/s, ys/s
def to_rect(a):
a = np.ravel(a)
if len(a) == 2:
a = (0, 0, a[0], a[1])
return np.array(a, np.float64).reshape(2, 2)
def METHOD_NAME(src, dst):
src, dst = to_rect(src), to_rect(dst)
cx, cy = (dst[1] - dst[0]) / (src[1] - src[0])
tx, ty = dst[0] - src[0] * (cx, cy)
M = np.float64([[cx, 0, tx],
[0, cy, ty],
[0, 0, 1]])
return M
def lookat(eye, target, up=(0, 0, 1)):
fwd = np.asarray(target, np.float64) - eye
fwd /= anorm(fwd)
right = np.cross(fwd, up)
right /= anorm(right)
down = np.cross(fwd, right)
R = np.float64([right, down, fwd])
tvec = -np.dot(R, eye)
return R, tvec
def mtx2rvec(R):
w, u, vt = cv.SVDecomp(R - np.eye(3))
p = vt[0] + u[:, 0]*w[0] # same as np.dot(R, vt[0])
c = np.dot(vt[0], p)
s = np.dot(vt[1], p)
axis = np.cross(vt[0], vt[1])
return axis * np.arctan2(s, c)
def draw_str(dst, target, s):
x, y = target
cv.putText(dst, s, (x+1, y+1), cv.FONT_HERSHEY_PLAIN, 1.0,
(0, 0, 0), thickness=2, lineType=cv.LINE_AA)
cv.putText(dst, s, (x, y), cv.FONT_HERSHEY_PLAIN,
1.0, (255, 255, 255), lineType=cv.LINE_AA)
class Sketcher:
def __init__(self, windowname, dests, colors_func):
self.prev_pt = None
self.windowname = windowname
self.dests = dests
self.colors_func = colors_func
self.dirty = False
self.show()
cv.setMouseCallback(self.windowname, self.on_mouse)
def show(self):
cv.imshow(self.windowname, self.dests[0])
def on_mouse(self, event, x, y, flags, param):
pt = (x, y)
if event == cv.EVENT_LBUTTONDOWN:
self.prev_pt = pt
elif event == cv.EVENT_LBUTTONUP:
self.prev_pt = None
if self.prev_pt and flags & cv.EVENT_FLAG_LBUTTON:
for dst, color in zip(self.dests, self.colors_func()):
cv.line(dst, self.prev_pt, pt, color, 5)
self.dirty = True
self.prev_pt = pt
self.show()
# palette data from matplotlib/_cm.py
_jet_data = {'red': ((0., 0, 0), (0.35, 0, 0), (0.66, 1, 1), (0.89, 1, 1),
(1, 0.5, 0.5)),
'green': ((0., 0, 0), (0.125, 0, 0), (0.375, 1, 1), (0.64, 1, 1),
(0.91, 0, 0), (1, 0, 0)),
'blue': ((0., 0.5, 0.5), (0.11, 1, 1), (0.34, 1, 1), (0.65, 0, 0),
(1, 0, 0))}
cmap_data = {'jet': _jet_data}
def make_cmap(name, n=256):
data = cmap_data[name]
xs = np.linspace(0.0, 1.0, n)
channels = []
eps = 1e-6
for ch_name in ['blue', 'green', 'red']:
ch_data = data[ch_name]
xp, yp = [], []
for x, y1, y2 in ch_data:
xp += [x, x+eps]
yp += [y1, y2]
ch = np.interp(xs, xp, yp)
channels.append(ch)
return np.uint8(np.array(channels).T*255)
def nothing(*arg, **kw):
pass
def clock():
return cv.getTickCount() / cv.getTickFrequency()
@contextmanager
def Timer(msg):
print(msg, '...',)
start = clock()
try:
yield
finally:
print("%.2f ms" % ((clock()-start)*1000))
class StatValue:
def __init__(self, smooth_coef=0.5):
self.value = None
self.smooth_coef = smooth_coef
def update(self, v):
if self.value is None:
self.value = v
else:
c = self.smooth_coef
self.value = c * self.value + (1.0-c) * v
class RectSelector:
def __init__(self, win, callback):
self.win = win
self.callback = callback
cv.setMouseCallback(win, self.onmouse)
self.drag_start = None
self.drag_rect = None
def onmouse(self, event, x, y, flags, param):
x, y = np.int16([x, y]) # BUG
if event == cv.EVENT_LBUTTONDOWN:
self.drag_start = (x, y)
return
if self.drag_start:
if flags & cv.EVENT_FLAG_LBUTTON:
xo, yo = self.drag_start
x0, y0 = np.minimum([xo, yo], [x, y])
x1, y1 = np.maximum([xo, yo], [x, y])
self.drag_rect = None
if x1-x0 > 0 and y1-y0 > 0:
self.drag_rect = (x0, y0, x1, y1)
else:
rect = self.drag_rect
self.drag_start = None
self.drag_rect = None
if rect:
self.callback(rect)
def draw(self, vis):
if not self.drag_rect:
return False
x0, y0, x1, y1 = self.drag_rect
cv.rectangle(vis, (x0, y0), (x1, y1), (0, 255, 0), 2)
return True
@property
def dragging(self):
return self.drag_rect is not None
def grouper(n, iterable, fillvalue=None):
'''grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx'''
args = [iter(iterable)] * n
if PY3:
output = it.zip_longest(fillvalue=fillvalue, *args)
else:
output = it.izip_longest(fillvalue=fillvalue, *args)
return output
def mosaic(w, imgs):
'''Make a grid from images.
w -- number of grid columns
imgs -- images (must have same size and format)
'''
imgs = iter(imgs)
if PY3:
img0 = next(imgs)
else:
img0 = imgs.next()
pad = np.zeros_like(img0)
imgs = it.chain([img0], imgs)
rows = grouper(w, imgs, pad)
return np.vstack(map(np.hstack, rows))
def getsize(img):
h, w = img.shape[:2]
return w, h
def mdot(*args):
return reduce(np.dot, args)
def draw_keypoints(vis, keypoints, color=(0, 255, 255)):
for kp in keypoints:
x, y = kp.pt
cv.circle(vis, (int(x), int(y)), 2, color)
| null |
1,292 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreateTrafficMirrorFilterRulesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreateTrafficMirrorFilterRules','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_IngressRuless(self): # RepeatList
return self.get_query_params().get('IngressRules')
def set_IngressRuless(self, IngressRules): # RepeatList
for depth1 in range(len(IngressRules)):
if IngressRules[depth1].get('Action') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.Action', IngressRules[depth1].get('Action'))
if IngressRules[depth1].get('SourceCidrBlock') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.SourceCidrBlock', IngressRules[depth1].get('SourceCidrBlock'))
if IngressRules[depth1].get('Protocol') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.Protocol', IngressRules[depth1].get('Protocol'))
if IngressRules[depth1].get('DestinationPortRange') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.DestinationPortRange', IngressRules[depth1].get('DestinationPortRange'))
if IngressRules[depth1].get('Priority') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.Priority', IngressRules[depth1].get('Priority'))
if IngressRules[depth1].get('DestinationCidrBlock') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.DestinationCidrBlock', IngressRules[depth1].get('DestinationCidrBlock'))
if IngressRules[depth1].get('SourcePortRange') is not None:
self.add_query_param('IngressRules.' + str(depth1 + 1) + '.SourcePortRange', IngressRules[depth1].get('SourcePortRange'))
def get_EgressRuless(self): # RepeatList
return self.get_query_params().get('EgressRules')
def set_EgressRuless(self, EgressRules): # RepeatList
for depth1 in range(len(EgressRules)):
if EgressRules[depth1].get('Action') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.Action', EgressRules[depth1].get('Action'))
if EgressRules[depth1].get('SourceCidrBlock') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.SourceCidrBlock', EgressRules[depth1].get('SourceCidrBlock'))
if EgressRules[depth1].get('Protocol') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.Protocol', EgressRules[depth1].get('Protocol'))
if EgressRules[depth1].get('DestinationPortRange') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.DestinationPortRange', EgressRules[depth1].get('DestinationPortRange'))
if EgressRules[depth1].get('Priority') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.Priority', EgressRules[depth1].get('Priority'))
if EgressRules[depth1].get('DestinationCidrBlock') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.DestinationCidrBlock', EgressRules[depth1].get('DestinationCidrBlock'))
if EgressRules[depth1].get('SourcePortRange') is not None:
self.add_query_param('EgressRules.' + str(depth1 + 1) + '.SourcePortRange', EgressRules[depth1].get('SourcePortRange'))
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def METHOD_NAME(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_TrafficMirrorFilterId(self): # String
return self.get_query_params().get('TrafficMirrorFilterId')
def set_TrafficMirrorFilterId(self, TrafficMirrorFilterId): # String
self.add_query_param('TrafficMirrorFilterId', TrafficMirrorFilterId)
| null |
1,293 |
from functools import partial
from django.contrib.contenttypes.models import ContentType
from django.forms import IntegerField
from django.urls import reverse
from creme.creme_core.models import (
CustomField,
FakeContact,
FakeInvoice,
FakeOrganisation,
FieldsConfig,
)
from ..fake_forms import FakeContactQuickForm
from .base import CremeTestCase
class QuickFormTestCase(CremeTestCase):
@staticmethod
def quickform_data(count):
return {
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-TOTAL_FORMS': str(count),
'csrfmiddlewaretoken': '08b8b225c536b4fd25d16f5ed8be3839',
}
def quickform_data_append_contact(self, data, id, first_name='', last_name='',
email='', organisation='', phone=''):
return data.update({
f'form-{id}-email': email,
f'form-{id}-last_name': last_name,
f'form-{id}-first_name': first_name,
f'form-{id}-organisation': organisation,
f'form-{id}-phone': phone,
f'form-{id}-user': self.user.id,
})
@staticmethod
def _build_quickform_url(model):
return reverse(
'creme_core__quick_form',
args=(ContentType.objects.get_for_model(model).pk,),
)
def test_create_contact(self):
user = self.login_as_root_and_get()
count = FakeContact.objects.count()
url = self._build_quickform_url(FakeContact)
response = self.assertGET200(url)
self.assertTemplateUsed(response, 'creme_core/generics/form/add-popup.html')
self.assertEqual('SAMEORIGIN', response.get('X-Frame-Options')) # allows iframe
context = response.context
self.assertEqual(FakeContact.creation_label, context.get('title'))
self.assertEqual(FakeContact.save_label, context.get('submit_label'))
# ---
last_name = 'Kirika'
email = '[email protected]'
response = self.assertPOST200(
url,
data={
'last_name': last_name,
'email': email,
'user': user.id,
},
)
self.assertEqual('SAMEORIGIN', response.get('X-Frame-Options')) # allows iframe
self.assertEqual(count + 1, FakeContact.objects.count())
contact = self.get_object_or_fail(FakeContact, last_name=last_name, email=email)
self.assertDictEqual(
{
'added': [[contact.id, str(contact)]],
'value': contact.id,
},
response.json(),
)
def test_get_not_superuser(self):
"Not super-user."
self.login_as_standard(creatable_models=[FakeOrganisation])
self.assertGET200(self._build_quickform_url(FakeOrganisation))
def test_get_missing_permission(self):
"Creation permission needed."
self.login_as_standard(creatable_models=[FakeContact])
self.assertGET403(self._build_quickform_url(FakeOrganisation))
def test_get_model_without_quickform(self):
"Model without form."
self.login_as_root()
self.assertGET404(self._build_quickform_url(FakeInvoice))
def test_customfields(self):
user = self.login_as_root_and_get()
create_cf = partial(
CustomField.objects.create,
content_type=ContentType.objects.get_for_model(FakeContact),
)
cf1 = create_cf(field_type=CustomField.STR, name='Dogtag')
cf2 = create_cf(field_type=CustomField.INT, name='Eva number', is_required=True)
url = self._build_quickform_url(FakeContact)
response = self.assertGET200(url)
with self.assertNoException():
fields = response.context['form'].fields
self.assertNotIn(f'custom_field-{cf1.id}', fields)
cf2_f = fields.get(f'custom_field-{cf2.id}')
self.assertIsInstance(cf2_f, IntegerField)
self.assertTrue(cf2_f.required)
# ---
first_name = 'Rei'
last_name = 'Ayanami'
response = self.client.post(
url,
data={
'last_name': last_name,
'first_name': first_name,
'user': user.id,
f'custom_field-{cf2.id}': 3,
},
)
self.assertNoFormError(response)
contact = self.get_object_or_fail(
FakeContact, last_name=last_name, first_name=first_name,
)
with self.assertNoException():
cf_value = cf2.value_class.objects.get(
custom_field=cf2, entity=contact,
).value
self.assertEqual(3, cf_value)
def METHOD_NAME(self):
user = self.login_as_root_and_get()
not_required = 'url_site'
required = 'mobile'
vanilla_fields = FakeContactQuickForm(user=user).fields
self.assertNotIn(not_required, vanilla_fields)
self.assertNotIn(required, vanilla_fields)
FieldsConfig.objects.create(
content_type=FakeContact,
descriptions=[(required, {FieldsConfig.REQUIRED: True})],
)
url = self._build_quickform_url(FakeContact)
response = self.assertGET200(url)
with self.assertNoException():
fields = response.context['form'].fields
self.assertNotIn(not_required, fields)
self.assertIn(required, fields)
# TODO: test_quickform_with_custom_sync_data
| null |
1,294 |
# **************************************************************************
# *
# * Authors: Amaya Jimenez Moreno ([email protected])
# *
# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia , CSIC
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2 of the License, or
# * (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# * 02111-1307 USA
# *
# * All comments concerning this program package may be sent to the
# * e-mail address '[email protected]'
# *
# **************************************************************************
from pyworkflow.tests import BaseTest, DataSet, setupTestProject
from pwem.protocols import (ProtImportVolumes, ProtImportParticles, exists)
from xmipp3.protocols import XmippMetaProtGoldenHighRes
class TestGoldenHighres(BaseTest):
@classmethod
def runImportVolume(cls, pattern, samplingRate):
""" Run an Import volumes protocol. """
cls.protImport = cls.newProtocol(ProtImportVolumes,
filesPath=pattern,
samplingRate=samplingRate
)
cls.launchProtocol(cls.protImport)
return cls.protImport
@classmethod
def METHOD_NAME(cls):
""" Import Particles.
"""
args = {'importFrom': ProtImportParticles.IMPORT_FROM_SCIPION,
'sqliteFile': cls.particles,
'amplitudConstrast': 0.1,
'sphericalAberration': 2.0,
'voltage': 200,
'samplingRate': 0.99,
'haveDataBeenPhaseFlipped': True
}
# Id's should be set increasing from 1 if ### is not in the
# pattern
protImport = cls.newProtocol(ProtImportParticles, **args)
protImport.setObjLabel('import particles')
cls.launchProtocol(protImport)
return protImport
@classmethod
def setUpClass(cls):
setupTestProject(cls)
# Data
cls.dataset = DataSet.getDataSet('10010')
cls.initialVolume = cls.dataset.getFile('initialVolume')
cls.particles = cls.dataset.getFile('particles')
cls.protImportVol = cls.runImportVolume(cls.initialVolume, 4.95)
cls.protImportParts = cls.METHOD_NAME()
def test(self):
goldenHighres = self.newProtocol(XmippMetaProtGoldenHighRes,
inputParticles=self.protImportParts.outputParticles,
inputVolumes=self.protImportVol.outputVolume,
particleRadius=180,
symmetryGroup="i1",
discardParticles=True,
numberOfMpi=8)
self.launchProtocol(goldenHighres)
self.assertIsNotNone(goldenHighres.outputParticlesLocal1,
"There was a problem with Golden Highres")
fnResolution = goldenHighres._getExtraPath('fnFSCs.txt')
if not exists(fnResolution):
self.assertTrue(False, fnResolution + " does not exist")
else:
count = len(open(fnResolution).readlines())
count = count - 10
result = 'outputParticlesLocal%d' % (count)
o = getattr(goldenHighres, result, None)
locals()[result] = o
self.assertIsNotNone(o, "Output: %s is None" % result
| null |
1,295 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhitsdb.endpoint import endpoint_data
class UpgradeLindormInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hitsdb', '2020-06-15', 'UpgradeLindormInstance','hitsdb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_PhoenixCoreNum(self): # Integer
return self.get_query_params().get('PhoenixCoreNum')
def set_PhoenixCoreNum(self, PhoenixCoreNum): # Integer
self.add_query_param('PhoenixCoreNum', PhoenixCoreNum)
def get_PhoenixCoreSpec(self): # String
return self.get_query_params().get('PhoenixCoreSpec')
def set_PhoenixCoreSpec(self, PhoenixCoreSpec): # String
self.add_query_param('PhoenixCoreSpec', PhoenixCoreSpec)
def get_UpgradeType(self): # String
return self.get_query_params().get('UpgradeType')
def set_UpgradeType(self, UpgradeType): # String
self.add_query_param('UpgradeType', UpgradeType)
def get_TsdbSpec(self): # String
return self.get_query_params().get('TsdbSpec')
def set_TsdbSpec(self, TsdbSpec): # String
self.add_query_param('TsdbSpec', TsdbSpec)
def get_FilestoreSpec(self): # String
return self.get_query_params().get('FilestoreSpec')
def set_FilestoreSpec(self, FilestoreSpec): # String
self.add_query_param('FilestoreSpec', FilestoreSpec)
def get_LogSpec(self): # String
return self.get_query_params().get('LogSpec')
def set_LogSpec(self, LogSpec): # String
self.add_query_param('LogSpec', LogSpec)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_TsdbNum(self): # Integer
return self.get_query_params().get('TsdbNum')
def set_TsdbNum(self, TsdbNum): # Integer
self.add_query_param('TsdbNum', TsdbNum)
def get_LindormSpec(self): # String
return self.get_query_params().get('LindormSpec')
def set_LindormSpec(self, LindormSpec): # String
self.add_query_param('LindormSpec', LindormSpec)
def get_SolrNum(self): # Integer
return self.get_query_params().get('SolrNum')
def set_SolrNum(self, SolrNum): # Integer
self.add_query_param('SolrNum', SolrNum)
def get_ColdStorage(self): # Integer
return self.get_query_params().get('ColdStorage')
def set_ColdStorage(self, ColdStorage): # Integer
self.add_query_param('ColdStorage', ColdStorage)
def get_LogNum(self): # Integer
return self.get_query_params().get('LogNum')
def set_LogNum(self, LogNum): # Integer
self.add_query_param('LogNum', LogNum)
def METHOD_NAME(self): # String
return self.get_query_params().get('SolrSpec')
def set_SolrSpec(self, SolrSpec): # String
self.add_query_param('SolrSpec', SolrSpec)
def get_CoreSingleStorage(self): # Integer
return self.get_query_params().get('CoreSingleStorage')
def set_CoreSingleStorage(self, CoreSingleStorage): # Integer
self.add_query_param('CoreSingleStorage', CoreSingleStorage)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_FilestoreNum(self): # Integer
return self.get_query_params().get('FilestoreNum')
def set_FilestoreNum(self, FilestoreNum): # Integer
self.add_query_param('FilestoreNum', FilestoreNum)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LindormNum(self): # Integer
return self.get_query_params().get('LindormNum')
def set_LindormNum(self, LindormNum): # Integer
self.add_query_param('LindormNum', LindormNum)
def get_LtsCoreNum(self): # Integer
return self.get_query_params().get('LtsCoreNum')
def set_LtsCoreNum(self, LtsCoreNum): # Integer
self.add_query_param('LtsCoreNum', LtsCoreNum)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_LtsCoreSpec(self): # String
return self.get_query_params().get('LtsCoreSpec')
def set_LtsCoreSpec(self, LtsCoreSpec): # String
self.add_query_param('LtsCoreSpec', LtsCoreSpec)
def get_ClusterStorage(self): # Integer
return self.get_query_params().get('ClusterStorage')
def set_ClusterStorage(self, ClusterStorage): # Integer
self.add_query_param('ClusterStorage', ClusterStorage)
def get_LogSingleStorage(self): # Integer
return self.get_query_params().get('LogSingleStorage')
def set_LogSingleStorage(self, LogSingleStorage): # Integer
self.add_query_param('LogSingleStorage', LogSingleStorage)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
| null |
1,296 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class UpdateListenerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'UpdateListener','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_BackendPortss(self): # RepeatList
return self.get_query_params().get('BackendPorts')
def set_BackendPortss(self, BackendPorts): # RepeatList
for depth1 in range(len(BackendPorts)):
if BackendPorts[depth1].get('FromPort') is not None:
self.add_query_param('BackendPorts.' + str(depth1 + 1) + '.FromPort', BackendPorts[depth1].get('FromPort'))
if BackendPorts[depth1].get('ToPort') is not None:
self.add_query_param('BackendPorts.' + str(depth1 + 1) + '.ToPort', BackendPorts[depth1].get('ToPort'))
def get_ListenerId(self): # String
return self.get_query_params().get('ListenerId')
def set_ListenerId(self, ListenerId): # String
self.add_query_param('ListenerId', ListenerId)
def get_Protocol(self): # String
return self.get_query_params().get('Protocol')
def set_Protocol(self, Protocol): # String
self.add_query_param('Protocol', Protocol)
def get_XForwardedForConfig(self): # Struct
return self.get_query_params().get('XForwardedForConfig')
def set_XForwardedForConfig(self, XForwardedForConfig): # Struct
if XForwardedForConfig.get('XForwardedForGaIdEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForGaIdEnabled', XForwardedForConfig.get('XForwardedForGaIdEnabled'))
if XForwardedForConfig.get('XForwardedForProtoEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForProtoEnabled', XForwardedForConfig.get('XForwardedForProtoEnabled'))
if XForwardedForConfig.get('XForwardedForPortEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForPortEnabled', XForwardedForConfig.get('XForwardedForPortEnabled'))
if XForwardedForConfig.get('XRealIpEnabled') is not None:
self.add_query_param('XForwardedForConfig.XRealIpEnabled', XForwardedForConfig.get('XRealIpEnabled'))
if XForwardedForConfig.get('XForwardedForGaApEnabled') is not None:
self.add_query_param('XForwardedForConfig.XForwardedForGaApEnabled', XForwardedForConfig.get('XForwardedForGaApEnabled'))
def get_SecurityPolicyId(self): # String
return self.get_query_params().get('SecurityPolicyId')
def set_SecurityPolicyId(self, SecurityPolicyId): # String
self.add_query_param('SecurityPolicyId', SecurityPolicyId)
def get_ProxyProtocol(self): # String
return self.get_query_params().get('ProxyProtocol')
def set_ProxyProtocol(self, ProxyProtocol): # String
self.add_query_param('ProxyProtocol', ProxyProtocol)
def get_PortRangess(self): # RepeatList
return self.get_query_params().get('PortRanges')
def set_PortRangess(self, PortRanges): # RepeatList
for depth1 in range(len(PortRanges)):
if PortRanges[depth1].get('FromPort') is not None:
self.add_query_param('PortRanges.' + str(depth1 + 1) + '.FromPort', PortRanges[depth1].get('FromPort'))
if PortRanges[depth1].get('ToPort') is not None:
self.add_query_param('PortRanges.' + str(depth1 + 1) + '.ToPort', PortRanges[depth1].get('ToPort'))
def METHOD_NAME(self): # RepeatList
return self.get_query_params().get('Certificates')
def set_Certificatess(self, Certificates): # RepeatList
for depth1 in range(len(Certificates)):
if Certificates[depth1].get('Id') is not None:
self.add_query_param('Certificates.' + str(depth1 + 1) + '.Id', Certificates[depth1].get('Id'))
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_ClientAffinity(self): # String
return self.get_query_params().get('ClientAffinity')
def set_ClientAffinity(self, ClientAffinity): # String
self.add_query_param('ClientAffinity', ClientAffinity)
| null |
1,297 |
from typing import (
Any,
Optional,
Union,
)
from urllib.parse import urlparse
from pcs import (
settings,
utils,
)
from pcs.cli.common.errors import CmdLineInputError
from pcs.cli.common.parse_args import (
Argv,
InputModifiers,
KeyValueParser,
split_list_by_any_keywords,
)
def _parse_host_options(
host: str, options: Argv
) -> dict[str, Union[str, list[dict[str, Union[None, str, int]]]]]:
# pylint: disable=invalid-name
ADDR_OPT_KEYWORD = "addr"
supported_options = set([ADDR_OPT_KEYWORD])
parsed_options = KeyValueParser(options).get_unique()
unknown_options = set(parsed_options.keys()) - supported_options
if unknown_options:
raise CmdLineInputError(
"Unknown options {} for host '{}'".format(
", ".join(unknown_options), host
)
)
addr, port = _parse_addr(parsed_options.get(ADDR_OPT_KEYWORD, host))
return {"dest_list": [dict(addr=addr, port=port)]}
def _parse_addr(addr: str) -> tuple[Optional[str], int]:
if addr.count(":") > 1 and not addr.startswith("["):
# if IPv6 without port put it in parentheses
addr = "[{0}]".format(addr)
# adding protocol so urlparse will parse hostname/ip and port correctly
url = urlparse("http://{0}".format(addr))
common_exception = CmdLineInputError(
"Invalid port number in address '{0}', use 1..65535".format(addr)
)
# Reading the port attribute will raise a ValueError if an invalid port is
# specified in the URL.
try:
port = url.port
except ValueError:
raise common_exception from None
# urlparse allow 0 as valid port number, pcs does not
if port == 0:
raise common_exception
return url.hostname, (port if port else settings.pcsd_default_port)
def METHOD_NAME(lib: Any, argv: Argv, modifiers: InputModifiers) -> None:
# pylint: disable=unused-argument
"""
Options:
* -u - username
* -p - password
* --token - auth token
* --request-timeout - timeout for HTTP requests
"""
modifiers.ensure_only_supported("-u", "-p", "--request-timeout", "--token")
if not argv:
raise CmdLineInputError("No host specified")
host_dict = {
host: _parse_host_options(host, opts)
for host, opts in split_list_by_any_keywords(argv, "host name").items()
}
token = modifiers.get("--token")
if token:
token_value = utils.get_token_from_file(str(token))
for host_info in host_dict.values():
host_info.update(dict(token=token_value))
utils.auth_hosts_token(host_dict)
return
username, password = utils.get_user_and_pass()
for host_info in host_dict.values():
host_info.update(dict(username=username, password=password))
utils.auth_hosts(host_dict)
def deauth_cmd(lib: Any, argv: Argv, modifiers: InputModifiers) -> None:
# pylint: disable=unused-argument
"""
Options:
* --request-timeout - timeout for HTTP requests
"""
modifiers.ensure_only_supported("--request-timeout")
if not argv:
# Object of type 'dict_keys' is not JSON serializable, make it a list
remove_hosts = list(utils.read_known_hosts_file().keys())
else:
remove_hosts = argv
output, retval = utils.run_pcsdcli(
"remove_known_hosts", {"host_names": remove_hosts}
)
if retval == 0 and output["status"] == "access_denied":
utils.err("Access denied")
if retval == 0 and output["status"] == "ok" and output["data"]:
try:
if output["data"]["hosts_not_found"]:
utils.err(
"Following hosts were not found: '{hosts}'".format(
hosts="', '".join(output["data"]["hosts_not_found"])
)
)
if not output["data"]["sync_successful"]:
utils.err(
"Some nodes had a newer known-hosts than the local node. "
+ "Local node's known-hosts were updated. "
+ "Please repeat the action if needed."
)
if output["data"]["sync_nodes_err"]:
utils.err(
(
"Unable to synchronize and save known-hosts on nodes: "
+ "{0}. Run 'pcs host auth {1}' to make sure the nodes "
+ "are authorized."
).format(
", ".join(output["data"]["sync_nodes_err"]),
" ".join(output["data"]["sync_nodes_err"]),
)
)
except (ValueError, KeyError):
utils.err("Unable to communicate with pcsd")
return
utils.err("Unable to communicate with pcsd")
| null |
1,298 |
"""
This type stub file was generated by pyright.
"""
from collections import UserList
from kombu.utils.functional import lazy
"""Functional-style utilities."""
__all__ = (
"LRUCache",
"is_list",
"maybe_list",
"memoize",
"mlazy",
"noop",
"first",
"firstmethod",
"chunks",
"padlist",
"mattrgetter",
"uniq",
"regen",
"dictfilter",
"lazy",
"maybe_evaluate",
"head_from_fun",
"maybe",
"fun_accepts_kwargs",
)
FUNHEAD_TEMPLATE = ...
class DummyContext:
def __enter__(self): ...
def __exit__(self, *exc_info): ...
class mlazy(lazy):
"""Memoized lazy evaluation.
The function is only evaluated once, every subsequent access
will return the same value.
"""
evaluated = ...
_value = ...
def evaluate(self): ...
def noop(*args, **kwargs): # -> None:
"""No operation.
Takes any arguments/keyword arguments and does nothing.
"""
...
def pass1(arg, *args, **kwargs):
"""Return the first positional argument."""
...
def evaluate_promises(it): ...
def first(predicate, it): # -> Any | MethodType | None:
"""Return the first element in ``it`` that ``predicate`` accepts.
If ``predicate`` is None it will return the first item that's not
:const:`None`.
"""
...
def firstmethod(
method, on_call=...
): # -> (it: Unknown, *args: Unknown, **kwargs: Unknown) -> (Unknown | Any | None):
"""Multiple dispatch.
Return a function that with a list of instances,
finds the first instance that gives a value for the given method.
The list can also contain lazy instances
(:class:`~kombu.utils.functional.lazy`.)
"""
...
def chunks(it, n): # -> Generator[list[Unknown], None, None]:
"""Split an iterator into chunks with `n` elements each.
Warning:
``it`` must be an actual iterator, if you pass this a
concrete sequence will get you repeating elements.
So ``chunks(iter(range(1000)), 10)`` is fine, but
``chunks(range(1000), 10)`` is not.
Example:
# n == 2
>>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2)
>>> list(x)
[[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]]
# n == 3
>>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3)
>>> list(x)
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]
"""
...
def padlist(container, size, default=...):
"""Pad list with default elements.
Example:
>>> first, last, city = padlist(['George', 'Costanza', 'NYC'], 3)
('George', 'Costanza', 'NYC')
>>> first, last, city = padlist(['George', 'Costanza'], 3)
('George', 'Costanza', None)
>>> first, last, city, planet = padlist(
... ['George', 'Costanza', 'NYC'], 4, default='Earth',
... )
('George', 'Costanza', 'NYC', 'Earth')
"""
...
def mattrgetter(*attrs): # -> (obj: Unknown) -> dict[Unknown, Any | None]:
"""Get attributes, ignoring attribute errors.
Like :func:`operator.itemgetter` but return :const:`None` on missing
attributes instead of raising :exc:`AttributeError`.
"""
...
def uniq(it): # -> Generator[Unknown, None, None]:
"""Return all unique elements in ``it``, preserving order."""
...
def lookahead(it): # -> zip_longest[tuple[Any, Any]]:
"""Yield pairs of (current, next) items in `it`.
`next` is None if `current` is the last item.
Example:
>>> list(lookahead(x for x in range(6)))
[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, None)]
"""
...
def regen(it): # -> list[Unknown] | tuple[Unknown, ...] | _regen:
"""Convert iterator to an object that can be consumed multiple times.
``Regen`` takes any iterable, and if the object is an
generator it will cache the evaluated list on first access,
so that the generator can be "consumed" multiple times.
"""
...
class _regen(UserList, list):
def __init__(self, it) -> None: ...
def __reduce__(self): ...
def __length_hint__(self): ...
def __iter__(self): ...
def __getitem__(self, index): ...
def __bool__(self): ...
@property
def data(self): ...
def __repr__(self): ...
def head_from_fun(fun, bound=..., debug=...): # -> partial[Unknown] | str:
"""Generate signature function from actual function."""
...
def arity_greater(fun, n): ...
def fun_takes_argument(name, fun, position=...): ...
def METHOD_NAME(fun): # -> bool:
"""Return true if function accepts arbitrary keyword arguments."""
...
def maybe(typ, val):
"""Call typ on value if val is defined."""
...
def seq_concat_item(seq, item): # -> tuple[Unknown, ...]:
"""Return copy of sequence seq with item added.
Returns:
Sequence: if seq is a tuple, the result will be a tuple,
otherwise it depends on the implementation of ``__add__``.
"""
...
def seq_concat_seq(a, b): # -> Any:
"""Concatenate two sequences: ``a + b``.
Returns:
Sequence: The return value will depend on the largest sequence
- if b is larger and is a tuple, the return value will be a tuple.
- if a is larger and is a list, the return value will be a list,
"""
...
| null |
1,299 |
from django.urls import reverse
from creme.creme_core.auth.entity_credentials import EntityCredentials
from creme.creme_core.models import (
FakeDocument,
FakeFolder,
FileRef,
SetCredentials,
)
from .base import ViewsTestCase
class DownloadViewTestCase(ViewsTestCase):
def test_download_filefield01(self):
"Errors."
user = self.login_as_root_and_get()
folder = FakeFolder.objects.create(user=user, title="Faye's pix")
doc = FakeDocument.objects.create(
user=user,
title='Selfie with RedTail',
linked_folder=folder,
)
ct_id = doc.entity_type_id
self.assertGET404(reverse('creme_core__download', args=(ct_id, doc.id, 'unknown')))
# Empty file
self.assertGET404(reverse('creme_core__download', args=(ct_id, doc.id, 'filedata')))
def test_download_filefield02(self):
"OK."
user = self.login_as_root_and_get()
file_content = 'I am the content'
path = self.create_uploaded_file(
file_name='DownloadViewTestCase_test_download_filefield02.txt',
dir_name='views',
content=file_content,
)
folder = FakeFolder.objects.create(user=user, title="Faye's pix")
doc = FakeDocument.objects.create(
user=user,
title='Selfie with RedTail',
linked_folder=folder,
filedata=path,
)
url = reverse('creme_core__download', args=(doc.entity_type_id, doc.id, 'filedata'))
response = self.assertGET200(url, follow=True)
# self.assertEqual('text/plain; charset=utf-8', response['Content-Type']) TODO ??
self.assertEqual('text/plain', response['Content-Type'])
cdisp = response['Content-Disposition']
self.assertStartsWith(
cdisp,
'attachment; filename="DownloadViewTestCase_test_download_filefield',
)
self.assertEndsWith(cdisp, '.txt"')
self.assertEqual(
file_content.encode(),
b''.join(response.streaming_content)
)
self.assertPOST405(url)
def test_download_filefield03(self):
"Basename."
user = self.login_as_root_and_get()
path = self.create_uploaded_file(
file_name='DownloadViewTestCase_test_download_filefield03.txt',
dir_name='views',
)
temp_file = FileRef.objects.create(user=user, filedata=path, basename='test.txt')
response = self.assertGET200(temp_file.get_download_absolute_url(), follow=True)
# self.assertEqual('text/plain; charset=utf-8', response['Content-Type']) TODO ?
self.assertEqual('text/plain', response['Content-Type'])
self.assertEqual(
f'attachment; filename="{temp_file.basename}"',
response['Content-Disposition'],
)
# Consume stream to avoid error message "ResourceWarning: unclosed file..."
_ = [*response.streaming_content]
def test_download_filefield04(self):
"Not super-user."
user = self.login_as_standard()
SetCredentials.objects.create(
role=user.role,
value=EntityCredentials.VIEW,
set_type=SetCredentials.ESET_ALL,
ctype=FakeDocument,
)
path = self.create_uploaded_file(
file_name='DownloadViewTestCase_test_download_filefield04.txt',
dir_name='views',
)
folder = FakeFolder.objects.create(user=user, title="Faye's pix")
doc = FakeDocument.objects.create(
user=self.get_root_user(),
title='Selfie with RedTail',
linked_folder=folder,
filedata=path,
)
self.assertTrue(user.has_perm_to_view(doc))
response = self.assertGET200(
reverse('creme_core__download', args=(doc.entity_type_id, doc.id, 'filedata')),
follow=True,
)
# Consume stream to avoid error message "ResourceWarning: unclosed file..."
_ = [*response.streaming_content]
def METHOD_NAME(self):
"Not super-user."
user = self.login_as_standard()
path = self.create_uploaded_file(
file_name='DownloadViewTestCase_test_download_filefield05.txt',
dir_name='views',
)
folder = FakeFolder.objects.create(user=user, title="Faye's pix")
doc = FakeDocument.objects.create(
user=self.get_root_user(),
title='Selfie with RedTail',
linked_folder=folder,
filedata=path,
)
self.assertFalse(user.has_perm_to_view(doc))
self.assertGET403(
reverse('creme_core__download', args=(doc.entity_type_id, doc.id, 'filedata')),
follow=True,
)
| null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.