id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
null |
---|---|---|
1,500 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class SendTaokeInfoRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'SendTaokeInfo')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProductId(self): # String
return self.get_body_params().get('ProductId')
def set_ProductId(self, ProductId): # String
self.add_body_params('ProductId', ProductId)
def get_Gender(self): # String
return self.get_body_params().get('Gender')
def METHOD_NAME(self, Gender): # String
self.add_body_params('Gender', Gender)
def get_City(self): # String
return self.get_body_params().get('City')
def set_City(self, City): # String
self.add_body_params('City', City)
def get_UserId(self): # String
return self.get_body_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_body_params('UserId', UserId)
def get_Mac(self): # String
return self.get_body_params().get('Mac')
def set_Mac(self, Mac): # String
self.add_body_params('Mac', Mac)
def get_Province(self): # String
return self.get_body_params().get('Province')
def set_Province(self, Province): # String
self.add_body_params('Province', Province)
def get_ProductTitle(self): # String
return self.get_body_params().get('ProductTitle')
def set_ProductTitle(self, ProductTitle): # String
self.add_body_params('ProductTitle', ProductTitle)
def get_BrandId(self): # String
return self.get_body_params().get('BrandId')
def set_BrandId(self, BrandId): # String
self.add_body_params('BrandId', BrandId)
def get_SellPrice(self): # String
return self.get_body_params().get('SellPrice')
def set_SellPrice(self, SellPrice): # String
self.add_body_params('SellPrice', SellPrice)
def get_Plat(self): # String
return self.get_body_params().get('Plat')
def set_Plat(self, Plat): # String
self.add_body_params('Plat', Plat)
def get_ComponentId(self): # String
return self.get_body_params().get('ComponentId')
def set_ComponentId(self, ComponentId): # String
self.add_body_params('ComponentId', ComponentId)
def get_Address(self): # String
return self.get_body_params().get('Address')
def set_Address(self, Address): # String
self.add_body_params('Address', Address)
def get_Ip(self): # String
return self.get_body_params().get('Ip')
def set_Ip(self, Ip): # String
self.add_body_params('Ip', Ip)
def get_MediaId(self): # String
return self.get_body_params().get('MediaId')
def set_MediaId(self, MediaId): # String
self.add_body_params('MediaId', MediaId)
def get_Phone(self): # String
return self.get_body_params().get('Phone')
def set_Phone(self, Phone): # String
self.add_body_params('Phone', Phone)
def get_V(self): # String
return self.get_body_params().get('V')
def set_V(self, V): # String
self.add_body_params('V', V)
def get_EnvironmentType(self): # String
return self.get_body_params().get('EnvironmentType')
def set_EnvironmentType(self, EnvironmentType): # String
self.add_body_params('EnvironmentType', EnvironmentType)
def get_District(self): # String
return self.get_body_params().get('District')
def set_District(self, District): # String
self.add_body_params('District', District)
def get_Imei(self): # String
return self.get_body_params().get('Imei')
def set_Imei(self, Imei): # String
self.add_body_params('Imei', Imei)
def get_PayPrice(self): # String
return self.get_body_params().get('PayPrice')
def set_PayPrice(self, PayPrice): # String
self.add_body_params('PayPrice', PayPrice)
def get_ChannelId(self): # String
return self.get_body_params().get('ChannelId')
def set_ChannelId(self, ChannelId): # String
self.add_body_params('ChannelId', ChannelId)
def get_Age(self): # String
return self.get_body_params().get('Age')
def set_Age(self, Age): # String
self.add_body_params('Age', Age)
def get_Status(self): # String
return self.get_body_params().get('Status')
def set_Status(self, Status): # String
self.add_body_params('Status', Status)
| null |
1,501 |
import asyncio
import time
import unittest
from collections import deque
from typing import Deque, Optional, Union
from hummingbot.connector.derivative.bitmex_perpetual.bitmex_perpetual_order_book import BitmexPerpetualOrderBook
from hummingbot.connector.derivative.bitmex_perpetual.bitmex_perpetual_order_book_tracker import (
BitmexPerpetualOrderBookTracker,
)
from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType
class BitmexPerpetualOrderBookTrackerUnitTests(unittest.TestCase):
@classmethod
def METHOD_NAME(cls) -> None:
super().METHOD_NAME()
cls.base_asset = "COINALPHA"
cls.quote_asset = "USD"
cls.domain = "bitmex_perpetual_testnet"
cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}"
cls.ev_loop = asyncio.get_event_loop()
def setUp(self) -> None:
super().setUp()
self.tracker: BitmexPerpetualOrderBookTracker = BitmexPerpetualOrderBookTracker(trading_pairs=[self.trading_pair])
self.tracking_task: Optional[asyncio.Task] = None
# Simulate start()
self.tracker._order_books[self.trading_pair] = BitmexPerpetualOrderBook()
self.tracker._tracking_message_queues[self.trading_pair] = asyncio.Queue()
self.tracker._past_diffs_windows[self.trading_pair] = deque()
self.tracker._order_books_initialized.set()
def tearDown(self) -> None:
self.tracking_task and self.tracking_task.cancel()
super().tearDown()
def _simulate_message_enqueue(self, message_queue: Union[asyncio.Queue, Deque], msg: OrderBookMessage):
if isinstance(message_queue, asyncio.Queue):
self.ev_loop.run_until_complete(message_queue.put(msg))
elif isinstance(message_queue, Deque):
message_queue.append(msg)
else:
raise NotImplementedError
def test_exchange_name(self):
self.assertEqual("bitmex_perpetual", self.tracker.exchange_name)
def test_order_book_diff_router_trading_pair_not_found_append_to_saved_message_queue(self):
expected_msg: OrderBookMessage = OrderBookMessage(
message_type=OrderBookMessageType.DIFF,
content={
"update_id": 1,
"trading_pair": self.trading_pair,
}
)
self._simulate_message_enqueue(self.tracker._order_book_diff_stream, expected_msg)
self.tracker._tracking_message_queues.clear()
task = self.ev_loop.create_task(
self.tracker._track_single_book("COINALPHA-USD")
)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
self.assertEqual(0, len(self.tracker._tracking_message_queues))
task.cancel()
def test_order_book_diff_router_snapshot_uid_above_diff_message_update_id(self):
expected_msg: OrderBookMessage = OrderBookMessage(
message_type=OrderBookMessageType.DIFF,
content={
"update_id": 1,
"trading_pair": self.trading_pair,
}
)
self._simulate_message_enqueue(self.tracker._order_book_diff_stream, expected_msg)
task = self.ev_loop.create_task(
self.tracker._track_single_book("COINALPHA-USD")
)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
task.cancel()
def test_order_book_diff_router_snapshot_uid_below_diff_message_update_id(self):
# Updates the snapshot_uid
self.tracker.order_books[self.trading_pair].apply_snapshot([], [], 2)
expected_msg: OrderBookMessage = OrderBookMessage(
message_type=OrderBookMessageType.DIFF,
content={
"update_id": 1,
"trading_pair": self.trading_pair,
}
)
self._simulate_message_enqueue(self.tracker._order_book_diff_stream, expected_msg)
task = self.ev_loop.create_task(
self.tracker._order_book_diff_router()
)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
self.assertEqual(0, self.tracker._tracking_message_queues[self.trading_pair].qsize())
task.cancel()
def test_track_single_book_snapshot_message_no_past_diffs(self):
snapshot_msg: OrderBookMessage = BitmexPerpetualOrderBook.snapshot_message_from_exchange(
msg={
"trading_pair": "COINALPHA-USD",
"update_id": 2,
"bids": [
["4.00000000", "431.00000000"]
],
"asks": [
["4.00000200", "12.00000000"]
]
},
timestamp=time.time()
)
self._simulate_message_enqueue(self.tracker._tracking_message_queues[self.trading_pair], snapshot_msg)
self.tracking_task = self.ev_loop.create_task(
self.tracker._track_single_book(self.trading_pair)
)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
self.assertTrue(1 < self.tracker.order_books[self.trading_pair].snapshot_uid)
def test_track_single_book_snapshot_message_with_past_diffs(self):
past_diff_msg: OrderBookMessage = BitmexPerpetualOrderBook.diff_message_from_exchange(
msg={
"lastUpdateId": 1,
"data_dict": {
"symbol": "COINALPHA-USD",
"bids": [
["4.00000100", "431.00000000"]
],
"asks": [
["4.00000300", "12.00000000"]
]
}
},
timestamp=time.time()
)
snapshot_msg: OrderBookMessage = BitmexPerpetualOrderBook.snapshot_message_from_exchange(
msg={
"trading_pair": "COINALPHA-USD",
"update_id": 2,
"bids": [
["4.00000000", "431.00000000"]
],
"asks": [
["4.00000200", "12.00000000"]
]
},
timestamp=time.time()
)
self.tracking_task = self.ev_loop.create_task(
self.tracker._track_single_book(self.trading_pair)
)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
self._simulate_message_enqueue(self.tracker._past_diffs_windows[self.trading_pair], past_diff_msg)
self._simulate_message_enqueue(self.tracker._tracking_message_queues[self.trading_pair], snapshot_msg)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
self.assertTrue(1 < self.tracker.order_books[self.trading_pair].snapshot_uid)
def test_track_single_book_diff_message(self):
diff_msg: OrderBookMessage = BitmexPerpetualOrderBook.diff_message_from_exchange(
msg={
"lastUpdateId": 1,
"data_dict": {
"symbol": "COINALPHA-USD",
"bids": [
["4.00000100", "431.00000000"]
],
"asks": [
["4.00000300", "12.00000000"]
]
}
},
timestamp=time.time()
)
self._simulate_message_enqueue(self.tracker._tracking_message_queues[self.trading_pair], diff_msg)
self.tracking_task = self.ev_loop.create_task(
self.tracker._track_single_book(self.trading_pair)
)
self.ev_loop.run_until_complete(asyncio.sleep(0.5))
| null |
1,502 |
#!/usr/bin/env python
import optparse
import sqlite3
import sys
import tempfile
try:
maketrans = str.maketrans
except AttributeError:
from string import maketrans # type: ignore[attr-defined,no-redef]
def stop_err(msg):
sys.stderr.write(msg)
sys.exit()
def METHOD_NAME(quality_string, min_qual=0):
sanger = ""
quality_string = quality_string.rstrip(" ")
for qv in quality_string.split(" "):
try:
if int(qv) < 0:
qv = "0"
if int(qv) < min_qual:
return False
break
sanger += chr(int(qv) + 33)
except Exception:
pass
return sanger
def Translator(frm="", to="", delete=""):
if len(to) == 1:
to = to * len(frm)
trans = maketrans(frm, to)
def callable(s):
return s.translate(trans, delete)
return callable
def merge_reads_qual(
f_reads,
f_qual,
f_out,
trim_name=False,
out="fastq",
double_encode=False,
trim_first_base=False,
pair_end_flag="",
min_qual=0,
table_name=None,
):
# Reads from two files f_csfasta (reads) and f_qual (quality values) and produces output in three formats depending on out parameter,
# which can have three values: fastq, txt, and db
# fastq = fastq format
# txt = space delimited format with defline, reads, and qvs
# dp = dump data into sqlite3 db.
# IMPORTNAT! If out = db two optins must be provided:
# 1. f_out must be a db connection object initialized with sqlite3.connect()
# 2. table_name must be provided
if out == "db":
cursor = f_out.cursor()
sql = "create table %s (name varchar(50) not null, read blob, qv blob)" % table_name
cursor.execute(sql)
lines = []
line = " "
while line:
for f in [f_reads, f_qual]:
line = f.readline().rstrip("\n\r")
while line.startswith("#"):
line = f.readline().rstrip("\n\r")
lines.append(line)
if lines[0].startswith(">") and lines[1].startswith(">"):
if lines[0] != lines[1]:
stop_err(
"Files reads and quality score files are out of sync and likely corrupted. Please, check your input data"
)
defline = lines[0][1:]
if trim_name and (defline[len(defline) - 3 :] == "_F3" or defline[len(defline) - 3 :] == "_R3"):
defline = defline[: len(defline) - 3]
elif not lines[0].startswith(">") and not lines[1].startswith(">") and len(lines[0]) > 0 and len(lines[1]) > 0:
if trim_first_base:
lines[0] = lines[0][1:]
if double_encode:
de = Translator(frm="0123.", to="ACGTN")
lines[0] = de(lines[0])
qual = METHOD_NAME(lines[1], int(min_qual))
if qual:
if out == "fastq":
f_out.write("@%s%s\n%s\n+\n%s\n" % (defline, pair_end_flag, lines[0], qual))
if out == "txt":
f_out.write("%s %s %s\n" % (defline, lines[0], qual))
if out == "db":
cursor.execute('insert into %s values("%s","%s","%s")' % (table_name, defline, lines[0], qual))
lines = []
def main():
usage = "%prog --fr F3.csfasta --fq R3.csfasta --fout fastq_output_file [option]"
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"--fr", "--f_reads", metavar="F3_CSFASTA_FILE", dest="fr", help="Name of F3 file with color space reads"
)
parser.add_option(
"--fq", "--f_qual", metavar="F3_QUAL_FILE", dest="fq", help="Name of F3 file with color quality values"
)
parser.add_option("--fout", "--f3_fastq_output", metavar="F3_OUTPUT", dest="fout", help="Name for F3 output file")
parser.add_option(
"--rr",
"--r_reads",
metavar="R3_CSFASTA_FILE",
dest="rr",
default=False,
help="Name of R3 file with color space reads",
)
parser.add_option(
"--rq",
"--r_qual",
metavar="R3_QUAL_FILE",
dest="rq",
default=False,
help="Name of R3 file with color quality values",
)
parser.add_option("--rout", metavar="R3_OUTPUT", dest="rout", help="Name for F3 output file")
parser.add_option(
"-q",
"--min_qual",
dest="min_qual",
default="-1000",
help="Minimum quality threshold for printing reads. If a read contains a single call with QV lower than this value, it will not be reported. Default is -1000",
)
parser.add_option(
"-t",
"--trim_name",
dest="trim_name",
action="store_true",
default=False,
help="Trim _R3 and _F3 off read names. Default is False",
)
parser.add_option(
"-f",
"--trim_first_base",
dest="trim_first_base",
action="store_true",
default=False,
help="Remove the first base of reads in color-space. Default is False",
)
parser.add_option(
"-d",
"--double_encode",
dest="de",
action="store_true",
default=False,
help="Double encode color calls as nucleotides: 0123. becomes ACGTN. Default is False",
)
options, args = parser.parse_args()
if not (options.fout and options.fr and options.fq):
parser.error(
"""
One or more of the three required paremetrs is missing:
(1) --fr F3.csfasta file
(2) --fq F3.qual file
(3) --fout name of output file
Use --help for more info
"""
)
fr = open(options.fr)
fq = open(options.fq)
f_out = open(options.fout, "w")
if options.rr and options.rq:
rr = open(options.rr)
rq = open(options.rq)
if not options.rout:
parser.error("Provide the name for f3 output using --rout option. Use --help for more info")
r_out = open(options.rout, "w")
db = tempfile.NamedTemporaryFile()
try:
con = sqlite3.connect(db.name)
cur = con.cursor()
except Exception:
stop_err("Cannot connect to %s\n") % db.name
merge_reads_qual(
fr,
fq,
con,
trim_name=options.trim_name,
out="db",
double_encode=options.de,
trim_first_base=options.trim_first_base,
min_qual=options.min_qual,
table_name="f3",
)
merge_reads_qual(
rr,
rq,
con,
trim_name=options.trim_name,
out="db",
double_encode=options.de,
trim_first_base=options.trim_first_base,
min_qual=options.min_qual,
table_name="r3",
)
cur.execute("create index f3_name on f3( name )")
cur.execute("create index r3_name on r3( name )")
cur.execute("select * from f3,r3 where f3.name = r3.name")
for item in cur:
f_out.write("@%s%s\n%s\n+\n%s\n" % (item[0], "/1", item[1], item[2]))
r_out.write("@%s%s\n%s\n+\n%s\n" % (item[3], "/2", item[4], item[5]))
else:
merge_reads_qual(
fr,
fq,
f_out,
trim_name=options.trim_name,
out="fastq",
double_encode=options.de,
trim_first_base=options.trim_first_base,
min_qual=options.min_qual,
)
f_out.close()
if __name__ == "__main__":
main()
| null |
1,503 |
#!/usr/bin/env python
from __future__ import print_function
import math
import IMP.multifit
import IMP.atom
import IMP.em
from IMP import ArgumentParser
import os
import sys
__doc__ = "Refine fitting subunits into a density map with FFT."
class Fitter(object):
def __init__(
self,
em_map,
spacing,
resolution,
origin,
density_threshold,
pdb,
fits_fn,
angle,
num_fits,
angles_per_voxel,
max_trans,
max_angle,
ref_pdb=''):
self.em_map = em_map
self.spacing = spacing
self.resolution = resolution
self.threshold = density_threshold
self.originx = origin[0]
self.originy = origin[1]
self.originz = origin[2]
self.pdb = pdb
self.fits_fn = fits_fn
self.angle = angle
self.num_fits = num_fits
self.angles_per_voxel = angles_per_voxel
self.max_trans = max_trans
self.max_angle = max_angle
self.ref_pdb = ref_pdb
# TODO - update function
def METHOD_NAME(self, mol2fit, rb, initial_transformation):
print("resolution is:", self.resolution)
dmap = IMP.em.read_map(self.em_map)
dmap.get_header().set_resolution(self.resolution)
dmap.update_voxel_size(self.spacing)
dmap.set_origin(IMP.algebra.Vector3D(self.originx,
self.originy,
self.originz))
dmap.set_was_used(True)
dmap.get_header().show()
mh_xyz = IMP.core.XYZs(IMP.core.get_leaves(mol2fit))
ff = IMP.multifit.FFTFitting()
ff.set_was_used(True)
#
do_cluster_fits = True
max_clustering_translation = 3
max_clustering_rotation = 5
num_fits_to_report = 100
#
fits = ff.do_local_fitting(dmap, self.threshold, mol2fit,
self.angle / 180.0 * math.pi,
self.max_angle / 180.0 *
math.pi, self.max_trans, num_fits_to_report,
do_cluster_fits, self.angles_per_voxel,
max_clustering_translation, max_clustering_rotation)
fits.set_was_used(True)
final_fits = fits.best_fits_
if self.ref_pdb != '':
ref_mh = IMP.atom.read_pdb(self.ref_pdb, mdl)
ref_mh_xyz = IMP.core.XYZs(IMP.core.get_leaves(ref_mh))
cur_low = [1e4, 0]
for i, fit in enumerate(final_fits):
fit.set_index(i)
if self.ref_pdb != '':
trans = fit.get_fit_transformation()
IMP.atom.transform(mol2fit, trans)
rmsd = IMP.atom.get_rmsd(mh_xyz, ref_mh_xyz)
if rmsd < cur_low[0]:
cur_low[0] = rmsd
cur_low[1] = i
fit.set_rmsd_to_reference(rmsd)
IMP.atom.transform(mol2fit, trans.get_inverse())
fit.set_fit_transformation(trans * initial_transformation)
if self.ref_pdb != '':
print('from all fits, lowest rmsd to ref:', cur_low)
IMP.multifit.write_fitting_solutions(self.fits_fn, final_fits)
def do_work(f):
f.run()
def parse_args():
desc = """
Fit subunits locally around a combination solution with FFT."""
p = ArgumentParser(description=desc)
p.add_argument("-a", "--angle", dest="angle", type=float, default=5,
help="angle delta (degrees) for FFT rotational "
"search (default 5)")
p.add_argument("-n", "--num", dest="num", type=int, default=100,
help="Number of fits to report (default 100)")
p.add_argument("-v", "--angle_voxel", dest="angle_voxel", type=int,
default=10,
help="Number of angles to keep per voxel (default 10)")
p.add_argument("-t", "--max_trans", dest="max_trans", type=float,
default=10.,
help="maximum translational search in A (default 10)")
p.add_argument("-m", "--max_angle", dest="max_angle", type=float,
default=30.,
help="maximum angular search in degrees (default 50)")
p.add_argument("assembly_file", help="assembly file name")
p.add_argument("ref_assembly_file", help="refined assembly file name")
p.add_argument("proteomics_file", help="proteomics file name")
p.add_argument("mapping_file", help="mapping file name")
p.add_argument("combinations_file", help="combinations file name")
p.add_argument("combination_index", type=int,
help="number of the combination to read from the "
"combinations file")
return p.parse_args()
def run(
asmb_fn,
asmb_refined_fn,
proteomics_fn,
mapping_fn,
combs_fn,
comb_ind,
options):
# get rmsd for subunits
mdl1 = IMP.Model()
mdl2 = IMP.Model()
combs = IMP.multifit.read_paths(combs_fn)
asmb_input = IMP.multifit.read_settings(asmb_fn)
asmb_input.set_was_used(True)
asmb_refined_input = IMP.multifit.read_settings(asmb_refined_fn)
asmb_refined_input.set_was_used(True)
prot_data = IMP.multifit.read_proteomics_data(proteomics_fn)
mapping_data = IMP.multifit.read_protein_anchors_mapping(prot_data,
mapping_fn)
ensmb = IMP.multifit.load_ensemble(asmb_input, mdl1, mapping_data)
ensmb.set_was_used(True)
mhs = ensmb.get_molecules()
ensmb_ref = IMP.multifit.load_ensemble(asmb_input, mdl2, mapping_data)
ensmb_ref.set_was_used(True)
mhs_ref = ensmb_ref.get_molecules()
ensmb.load_combination(combs[comb_ind])
em_map = asmb_input.get_assembly_header().get_dens_fn()
resolution = asmb_input.get_assembly_header().get_resolution()
spacing = asmb_input.get_assembly_header().get_spacing()
origin = asmb_input.get_assembly_header().get_origin()
rbs_ref = ensmb_ref.get_rigid_bodies()
rbs = ensmb.get_rigid_bodies()
for i, mh in enumerate(mhs):
fits_fn = asmb_refined_input.get_component_header(
i).get_transformations_fn()
# todo - get the initial transformation
rb_ref = rbs_ref[i]
rb = rbs[i]
initial_transformation = IMP.algebra.get_transformation_from_first_to_second(
rb_ref.get_reference_frame(),
rb.get_reference_frame())
pdb_fn = asmb_input.get_component_header(i).get_filename()
f = Fitter(
em_map, spacing, resolution, origin, asmb_input.get_assembly_header(
).get_threshold(
), pdb_fn, fits_fn, options.angle, options.num, options.angle_voxel,
options.max_trans, options.max_angle)
f.METHOD_NAME(mh, rb, initial_transformation)
def main():
args = parse_args()
run(args.assembly_file, args.ref_assembly_file, args.proteomics_file,
args.mapping_file, args.combinations_file, args.combination_index,
args)
if __name__ == "__main__":
main()
| null |
1,504 |
from __future__ import annotations
import glob
import os
import shutil
import tempfile
import numpy as np
import pandas as pd
import pytest
import dask.dataframe as dd
from dask.dataframe.optimize import optimize_dataframe_getitem
from dask.dataframe.utils import assert_eq
pytest.importorskip("pyarrow.orc")
pa = pytest.importorskip("pyarrow")
url = (
"https://www.googleapis.com/download/storage/v1/b/anaconda-public-data/o"
"/orc%2FTestOrcFile.testDate1900.orc?generation=1522611448751555&alt="
"media"
)
columns = ["time", "date"]
@pytest.mark.network
def test_orc_with_backend():
pytest.importorskip("requests")
d = dd.read_orc(url)
assert set(d.columns) == {"time", "date"} # order is not guaranteed
assert len(d) == 70000
@pytest.fixture(scope="module")
def orc_files():
requests = pytest.importorskip("requests")
data = requests.get(url).content
d = tempfile.mkdtemp()
files = [os.path.join(d, fn) for fn in ["test1.orc", "test2.orc"]]
for fn in files:
with open(fn, "wb") as f:
f.write(data)
try:
yield files
finally:
shutil.rmtree(d, ignore_errors=True)
@pytest.mark.parametrize("split_stripes", [1, 2])
@pytest.mark.network
def test_orc_single(orc_files, split_stripes):
fn = orc_files[0]
d = dd.read_orc(fn, split_stripes=split_stripes)
assert len(d) == 70000
assert d.npartitions == 8 / split_stripes
d2 = dd.read_orc(fn, columns=["time", "date"])
assert_eq(d[columns], d2[columns], check_index=False)
with pytest.raises(ValueError, match="nonexist"):
dd.read_orc(fn, columns=["time", "nonexist"])
# Check that `optimize_dataframe_getitem` changes the
# `columns` attribute of the "read-orc" layer
d3 = d[columns]
keys = [(d3._name, i) for i in range(d3.npartitions)]
graph = optimize_dataframe_getitem(d3.__dask_graph__(), keys)
key = [k for k in graph.layers.keys() if k.startswith("read-orc-")][0]
assert set(graph.layers[key].columns) == set(columns)
@pytest.mark.network
def test_orc_multiple(orc_files):
d = dd.read_orc(orc_files[0])
d2 = dd.read_orc(orc_files)
assert_eq(d2[columns], dd.concat([d, d])[columns], check_index=False)
d2 = dd.read_orc(os.path.dirname(orc_files[0]) + "/*.orc")
assert_eq(d2[columns], dd.concat([d, d])[columns], check_index=False)
@pytest.mark.parametrize("index", [None, "i32"])
@pytest.mark.parametrize("columns", [None, ["i32", "i64", "f"]])
def METHOD_NAME(tmpdir, index, columns):
tmp = str(tmpdir)
data = pd.DataFrame(
{
"i32": np.arange(1000, dtype=np.int32),
"i64": np.arange(1000, dtype=np.int64),
"f": np.arange(1000, dtype=np.float64),
"bhello": np.random.choice(["hello", "yo", "people"], size=1000).astype(
"O"
),
}
)
if index:
data = data.set_index(index)
df = dd.from_pandas(data, chunksize=500)
if columns:
data = data[[c for c in columns if c != index]]
# Write
df.to_orc(tmp, write_index=bool(index))
# Read
df2 = dd.read_orc(tmp, index=index, columns=columns)
assert_eq(data, df2, check_index=bool(index))
@pytest.mark.parametrize("split_stripes", [True, False, 2, 4])
def test_orc_roundtrip_aggregate_files(tmpdir, split_stripes):
tmp = str(tmpdir)
data = pd.DataFrame(
{
"a": np.arange(100, dtype=np.float64),
"b": np.random.choice(["cat", "dog", "mouse"], size=100),
}
)
df = dd.from_pandas(data, npartitions=8)
df.to_orc(tmp, write_index=False)
df2 = dd.read_orc(tmp, split_stripes=split_stripes, aggregate_files=True)
# Check that we have the expected partition count
# and that the data is correct
if split_stripes:
assert df2.npartitions == df.npartitions / int(split_stripes)
else:
assert df2.npartitions == df.npartitions
assert_eq(data, df2, check_index=False)
@pytest.mark.network
def test_orc_aggregate_files_offset(orc_files):
# Default read should give back 16 partitions. Therefore,
# specifying split_stripes=11 & aggregate_files=True should
# produce 2 partitions (with the first being larger than
# the second)
df2 = dd.read_orc(orc_files[:2], split_stripes=11, aggregate_files=True)
assert df2.npartitions == 2
assert len(df2.partitions[0].index) > len(df2.index) // 2
@pytest.mark.network
def test_orc_names(orc_files, tmp_path):
df = dd.read_orc(orc_files)
assert df._name.startswith("read-orc")
out = df.to_orc(tmp_path, compute=False)
assert out._name.startswith("to-orc")
def test_to_orc_delayed(tmp_path):
# See: https://github.com/dask/dask/issues/8022
df = pd.DataFrame(np.random.randn(100, 4), columns=["a", "b", "c", "d"])
ddf = dd.from_pandas(df, npartitions=4)
eager_path = os.path.join(tmp_path, "eager_orc_dataset")
ddf.to_orc(eager_path)
assert len(glob.glob(os.path.join(eager_path, "*"))) == 4
delayed_path = os.path.join(tmp_path, "delayed_orc_dataset")
dataset = ddf.to_orc(delayed_path, compute=False)
dataset.compute()
assert len(glob.glob(os.path.join(delayed_path, "*"))) == 4
| null |
1,505 |
# Copyright 2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import nnabla as nn
from nnabla.ext_utils import get_extension_context
from nnabla.monitor import Monitor
from argparse import ArgumentParser
import time
from execution import *
common_utils_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', 'utils'))
sys.path.append(common_utils_path)
from neu.yaml_wrapper import read_yaml, write_yaml
from neu.comm import CommunicatorWrapper
import shutil
def METHOD_NAME():
parser = ArgumentParser(description='StyleGAN2: Nnabla implementation')
parser.add_argument('--data', type=str, default='ffhq', choices=['ffhq'],
help='Model dataset')
parser.add_argument('--dataset_path', type=str, default='',
help='Path to dataset')
parser.add_argument('--few_shot', type=str, default='few_shot', choices=['few_shot', 'None'],
help='Model dataset')
parser.add_argument('--weights_path', type=str, default='../results/weights',
help='Path to trained model weights')
parser.add_argument('--results_dir', type=str, default='../results/images',
help='Path to save results')
parser.add_argument('--monitor_path', '-mp', type=str, default='../results/monitor',
help='Path to save results')
# # [few-shot learning]
parser.add_argument('--pre_trained_model', type=str, default='path to pre trained model',
help='Path to trained model weights')
parser.add_argument('--extension_module', type=str, default='cudnn',
help='Device context')
parser.add_argument('--device_id', type=str, default='0',
help='Device Id')
parser.add_argument('--img_size', type=int, default=256,
help='Image size to generate')
parser.add_argument('--batch_size', type=int, default=2,
help='Image size to generate')
parser.add_argument('--train', action='store_true', default=False,
help='Set this flag to start training')
parser.add_argument('--auto_forward', action='store_true', default=False,
help='Set this flag to execute in dynamic computation mode')
parser.add_argument('--dali', action='store_true', default=False,
help='Set this flag to use DALI data iterator')
parser.add_argument('--seed_1', type=list, default=[100, 101],
help='Seed values 1')
parser.add_argument('--seed_2', type=list, default=[102, 103],
help='Seed values 2')
parser.add_argument('--test', type=str, choices=['generate', 'latent_space_interpolation', 'style_mixing', 'latent_space_projection', 'ppl'], nargs='*',
help='Set this flag for testing')
parser.add_argument('--batch_size_A', type=int, default=3,
help='Only for style mixing: Batch size for style A')
parser.add_argument('--batch_size_B', type=int, default=3,
help='Only for style mixing: Batch size for style B')
parser.add_argument('--use_tf_weights', action='store_true', default=False,
help='Use TF trained weights converted to NNabla')
parser.add_argument('--img_path', type=str,
default='',
help='Image path for latent space projection')
return parser
if __name__ == '__main__':
parser = METHOD_NAME()
args = parser.parse_args()
config = read_yaml(os.path.join('configs', f'{args.data}.yaml'))
ctx = get_extension_context(args.extension_module)
nn.set_auto_forward(args.auto_forward or args.test)
comm = CommunicatorWrapper(ctx)
nn.set_default_context(ctx)
monitor = None
if comm is not None:
if comm.rank == 0:
monitor = Monitor(args.monitor_path)
start_time = time.time()
few_shot_config = None
if args.few_shot is not None:
few_shot_config = read_yaml(os.path.join(
'configs', args.few_shot + '.yaml'))
if args.train:
style_gan = Train(monitor, config, args, comm, few_shot_config)
if args.test:
style_gan = Evaluate(monitor, config, args, comm, few_shot_config)
if comm is not None:
if comm.rank == 0:
end_time = time.time()
training_time = (end_time-start_time)/3600
print('Total running time: {} hours'.format(training_time))
| null |
1,506 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadb.endpoint import endpoint_data
class DescribeDiagnosisRecordsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adb', '2019-03-15', 'DescribeDiagnosisRecords','ads')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_QueryCondition(self): # String
return self.get_query_params().get('QueryCondition')
def set_QueryCondition(self, QueryCondition): # String
self.add_query_param('QueryCondition', QueryCondition)
def get_StartTime(self): # String
return self.get_query_params().get('StartTime')
def set_StartTime(self, StartTime): # String
self.add_query_param('StartTime', StartTime)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_Database(self): # String
return self.get_query_params().get('Database')
def set_Database(self, Database): # String
self.add_query_param('Database', Database)
def get_ClientIp(self): # String
return self.get_query_params().get('ClientIp')
def set_ClientIp(self, ClientIp): # String
self.add_query_param('ClientIp', ClientIp)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Keyword(self): # String
return self.get_query_params().get('Keyword')
def set_Keyword(self, Keyword): # String
self.add_query_param('Keyword', Keyword)
def get_Lang(self): # String
return self.get_query_params().get('Lang')
def set_Lang(self, Lang): # String
self.add_query_param('Lang', Lang)
def get_Order(self): # String
return self.get_query_params().get('Order')
def set_Order(self, Order): # String
self.add_query_param('Order', Order)
def METHOD_NAME(self): # Long
return self.get_query_params().get('MaxScanSize')
def set_MaxScanSize(self, MaxScanSize): # Long
self.add_query_param('MaxScanSize', MaxScanSize)
def get_ResourceGroup(self): # String
return self.get_query_params().get('ResourceGroup')
def set_ResourceGroup(self, ResourceGroup): # String
self.add_query_param('ResourceGroup', ResourceGroup)
def get_DBClusterId(self): # String
return self.get_query_params().get('DBClusterId')
def set_DBClusterId(self, DBClusterId): # String
self.add_query_param('DBClusterId', DBClusterId)
def get_PatternId(self): # String
return self.get_query_params().get('PatternId')
def set_PatternId(self, PatternId): # String
self.add_query_param('PatternId', PatternId)
def get_EndTime(self): # String
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # String
self.add_query_param('EndTime', EndTime)
def get_MinPeakMemory(self): # Long
return self.get_query_params().get('MinPeakMemory')
def set_MinPeakMemory(self, MinPeakMemory): # Long
self.add_query_param('MinPeakMemory', MinPeakMemory)
def get_MinScanSize(self): # Long
return self.get_query_params().get('MinScanSize')
def set_MinScanSize(self, MinScanSize): # Long
self.add_query_param('MinScanSize', MinScanSize)
def get_MaxPeakMemory(self): # Long
return self.get_query_params().get('MaxPeakMemory')
def set_MaxPeakMemory(self, MaxPeakMemory): # Long
self.add_query_param('MaxPeakMemory', MaxPeakMemory)
def get_UserName(self): # String
return self.get_query_params().get('UserName')
def set_UserName(self, UserName): # String
self.add_query_param('UserName', UserName)
| null |
1,507 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbase.endpoint import endpoint_data
class CreateClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'HBase', '2019-01-01', 'CreateCluster','hbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClusterName(self):
return self.get_query_params().get('ClusterName')
def set_ClusterName(self,ClusterName):
self.add_query_param('ClusterName',ClusterName)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_EngineVersion(self):
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self,EngineVersion):
self.add_query_param('EngineVersion',EngineVersion)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_Engine(self):
return self.get_query_params().get('Engine')
def set_Engine(self,Engine):
self.add_query_param('Engine',Engine)
def get_AutoRenewPeriod(self):
return self.get_query_params().get('AutoRenewPeriod')
def set_AutoRenewPeriod(self,AutoRenewPeriod):
self.add_query_param('AutoRenewPeriod',AutoRenewPeriod)
def get_Period(self):
return self.get_query_params().get('Period')
def set_Period(self,Period):
self.add_query_param('Period',Period)
def get_DiskSize(self):
return self.get_query_params().get('DiskSize')
def set_DiskSize(self,DiskSize):
self.add_query_param('DiskSize',DiskSize)
def get_EncryptionKey(self):
return self.get_query_params().get('EncryptionKey')
def set_EncryptionKey(self,EncryptionKey):
self.add_query_param('EncryptionKey',EncryptionKey)
def get_MasterInstanceType(self):
return self.get_query_params().get('MasterInstanceType')
def set_MasterInstanceType(self,MasterInstanceType):
self.add_query_param('MasterInstanceType',MasterInstanceType)
def get_DiskType(self):
return self.get_query_params().get('DiskType')
def set_DiskType(self,DiskType):
self.add_query_param('DiskType',DiskType)
def get_VSwitchId(self):
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self,VSwitchId):
self.add_query_param('VSwitchId',VSwitchId)
def get_SecurityIPList(self):
return self.get_query_params().get('SecurityIPList')
def set_SecurityIPList(self,SecurityIPList):
self.add_query_param('SecurityIPList',SecurityIPList)
def get_ColdStorageSize(self):
return self.get_query_params().get('ColdStorageSize')
def set_ColdStorageSize(self,ColdStorageSize):
self.add_query_param('ColdStorageSize',ColdStorageSize)
def get_PeriodUnit(self):
return self.get_query_params().get('PeriodUnit')
def set_PeriodUnit(self,PeriodUnit):
self.add_query_param('PeriodUnit',PeriodUnit)
def get_CoreInstanceType(self):
return self.get_query_params().get('CoreInstanceType')
def set_CoreInstanceType(self,CoreInstanceType):
self.add_query_param('CoreInstanceType',CoreInstanceType)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_NodeCount(self):
return self.get_query_params().get('NodeCount')
def set_NodeCount(self,NodeCount):
self.add_query_param('NodeCount',NodeCount)
def get_ZoneId(self):
return self.get_query_params().get('ZoneId')
def set_ZoneId(self,ZoneId):
self.add_query_param('ZoneId',ZoneId)
def get_PayType(self):
return self.get_query_params().get('PayType')
def METHOD_NAME(self,PayType):
self.add_query_param('PayType',PayType
| null |
1,508 |
from methods.regular.regular_api import *
from shared.utils.task.task_update_manager import Task_Update
@routes.route('/api/v1/project/<string:project_string_id>' +
'/task/next',
methods = ['POST'])
@limiter.limit("1 per second, 50 per minute, 1000 per day")
@Project_permissions.user_has_project(
["admin", "Editor", "annotator"])
def api_get_next_task_annotator(project_string_id):
log = regular_input.regular_log.default_api_log()
with sessionMaker.session_scope() as session:
project = Project.get(session, project_string_id)
user = User.get(session)
if not user:
log['error']['usage'] = "Designed for human users."
return jsonify( log = log), 200
task = get_next_task_by_project(
session = session,
user = user,
project = project)
if task is None:
log['info']['task'] = "No tasks available."
return jsonify( log = log), 200
task_serialized = task.serialize_trainer_annotate(session)
log['success'] = True
return jsonify( log = log,
task = task_serialized), 200
def get_next_task_by_project(
session,
user,
project):
task = Task.get_last_task(
session = session,
user = user,
status_allow_list = ["available", "in_progress"])
if task:
return task
task = Task.request_next_task_by_project(
session = session,
project = project,
user = user)
if task:
task.add_assignee(session, user)
task_update_manager = Task_Update(
session = session,
task = task,
status = 'in_progress'
)
task_update_manager.main() # This updates the task status
session.add(task)
session.add(user)
return task
def get_next_task_by_job(
session,
user,
job):
task = Task.get_last_task(
session = session,
user = user,
status_allow_list = ["available", "in_progress"],
job=job)
if task:
return task
task = recursively_get_next_available(session = session,
job = job,
user = user)
if task:
task.add_assignee(session, user)
task_update_manager = Task_Update(
session = session,
task = task,
status = 'in_progress'
)
# set status
task_update_manager.main() # This updates the task status
session.add(task)
session.add(user)
return task
@routes.route('/api/v1/job/<int:job_id>/task/next',
methods = ['POST'])
@limiter.limit("1 per second, 1000 per day")
@Job_permissions.by_job_id(
mode = "builder",
apis_user_list = ['builder_or_trainer', 'security_email_verified'])
def task_next_by_job_api(job_id):
log = regular_input.regular_log.default_api_log()
with sessionMaker.session_scope() as session:
job = Job.get_by_id(session, job_id)
user = User.get(session)
task = get_next_task_by_job(
session = session,
user = user,
job = job)
if task is None:
log['info']['task'] = "No tasks available."
return jsonify( log = log), 200
task_serialized = task.serialize_trainer_annotate(session)
log['success'] = True
return jsonify( log = log,
task = task_serialized), 200
def recursively_get_next_available(session,
job,
user):
"""
Goal, give consideration to task types,
and not expect that first result from shared.database
matches "business?" logic
Example of saying a person can't review their own task
"""
ignore_task_IDS_list = []
while True:
task = Task.get_next_available_task_by_job_id(
session = session,
job_id = job.id,
ignore_task_IDS_list = ignore_task_IDS_list)
if task is None:
return None
if task.task_type == 'draw':
return task
if task.task_type == 'review':
result = METHOD_NAME(session = session,
task = task,
user = user)
if result is True:
return task
else:
ignore_task_IDS_list.append(task.id)
def METHOD_NAME(session,
task,
user):
parent = Task.get_by_id(session, task.parent_id)
# task.parent not working for some reason
if parent:
if user == parent.assignee_user:
return False
return Tru
| null |
1,509 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeDedicatedHostClustersRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeDedicatedHostClusters','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DedicatedHostClusterName(self): # String
return self.get_query_params().get('DedicatedHostClusterName')
def set_DedicatedHostClusterName(self, DedicatedHostClusterName): # String
self.add_query_param('DedicatedHostClusterName', DedicatedHostClusterName)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DedicatedHostClusterIds(self): # String
return self.get_query_params().get('DedicatedHostClusterIds')
def set_DedicatedHostClusterIds(self, DedicatedHostClusterIds): # String
self.add_query_param('DedicatedHostClusterIds', DedicatedHostClusterIds)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def METHOD_NAME(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_LockReason(self): # String
return self.get_query_params().get('LockReason')
def set_LockReason(self, LockReason): # String
self.add_query_param('LockReason', LockReason)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status)
| null |
1,510 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class DescribeSmartAccessGatewaysRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'DescribeSmartAccessGateways','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AclIds(self): # String
return self.get_query_params().get('AclIds')
def set_AclIds(self, AclIds): # String
self.add_query_param('AclIds', AclIds)
def get_CanAssociateQos(self): # Boolean
return self.get_query_params().get('CanAssociateQos')
def set_CanAssociateQos(self, CanAssociateQos): # Boolean
self.add_query_param('CanAssociateQos', CanAssociateQos)
def get_SoftwareVersion(self): # String
return self.get_query_params().get('SoftwareVersion')
def set_SoftwareVersion(self, SoftwareVersion): # String
self.add_query_param('SoftwareVersion', SoftwareVersion)
def get_UnboundAclIds(self): # String
return self.get_query_params().get('UnboundAclIds')
def set_UnboundAclIds(self, UnboundAclIds): # String
self.add_query_param('UnboundAclIds', UnboundAclIds)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_VersionComparator(self): # String
return self.get_query_params().get('VersionComparator')
def set_VersionComparator(self, VersionComparator): # String
self.add_query_param('VersionComparator', VersionComparator)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_InstanceType(self): # String
return self.get_query_params().get('InstanceType')
def set_InstanceType(self, InstanceType): # String
self.add_query_param('InstanceType', InstanceType)
def get_HardwareType(self): # String
return self.get_query_params().get('HardwareType')
def set_HardwareType(self, HardwareType): # String
self.add_query_param('HardwareType', HardwareType)
def get_SmartAGIdss(self): # RepeatList
return self.get_query_params().get('SmartAGIds')
def set_SmartAGIdss(self, SmartAGIds): # RepeatList
pass
def get_SerialNumber(self): # String
return self.get_query_params().get('SerialNumber')
def set_SerialNumber(self, SerialNumber): # String
self.add_query_param('SerialNumber', SerialNumber)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_AssociatedCcnId(self): # String
return self.get_query_params().get('AssociatedCcnId')
def METHOD_NAME(self, AssociatedCcnId): # String
self.add_query_param('AssociatedCcnId', AssociatedCcnId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_BusinessState(self): # String
return self.get_query_params().get('BusinessState')
def set_BusinessState(self, BusinessState): # String
self.add_query_param('BusinessState', BusinessState)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_SmartAGId(self): # String
return self.get_query_params().get('SmartAGId')
def set_SmartAGId(self, SmartAGId): # String
self.add_query_param('SmartAGId', SmartAGId)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status)
| null |
1,511 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
from typing import Dict, Optional, Tuple
from overrides import EnforceOverrides
from archai.common.config import Config
from archai.common.ordered_dict_logger import get_global_logger
from archai.supergraph.datasets import data
from archai.supergraph.nas.arch_trainer import TArchTrainer
from archai.supergraph.nas.finalizers import Finalizers
from archai.supergraph.nas.model import Model
from archai.supergraph.nas.model_desc import ModelDesc
from archai.supergraph.nas.model_desc_builder import ModelDescBuilder
from archai.supergraph.utils.metrics import Metrics
from archai.supergraph.utils.trainer import Trainer
logger = get_global_logger()
class ModelMetrics:
def __init__(self, model:Model, metrics:Metrics) -> None:
self.model = model
self.metrics = metrics
class SearchResult:
def __init__(self, model_desc:Optional[ModelDesc],
search_metrics:Optional[Metrics],
train_metrics:Optional[Metrics]) -> None:
self.model_desc = model_desc
self.search_metrics = search_metrics
self.train_metrics = train_metrics
class Searcher(EnforceOverrides):
def search(self, conf_search:Config, model_desc_builder:Optional[ModelDescBuilder],
trainer_class:TArchTrainer, finalizers:Finalizers)->SearchResult:
# region config vars
conf_model_desc = conf_search['model_desc']
conf_post_train = conf_search['post_train']
cells = conf_model_desc['n_cells']
reductions = conf_model_desc['n_reductions']
nodes = conf_model_desc['cell']['n_nodes']
# endregion
assert model_desc_builder is not None, 'Default search implementation requires model_desc_builder'
# build model description that we will search on
model_desc = self.build_model_desc(model_desc_builder, conf_model_desc,
reductions, cells, nodes)
# perform search on model description
model_desc, search_metrics = self.search_model_desc(conf_search, model_desc,
trainer_class, finalizers)
# train searched model for few epochs to get some perf metrics
model_metrics = self.train_model_desc(model_desc, conf_post_train)
search_result = SearchResult(model_desc, search_metrics,
model_metrics.metrics if model_metrics is not None else None)
self.clean_log_result(conf_search, search_result)
return search_result
def clean_log_result(self, conf_search:Config, search_result:SearchResult)->None:
final_desc_filename = conf_search['final_desc_filename']
# remove weights info deom model_desc so its more readable
search_result.model_desc.clear_trainables()
# if file name was specified then save the model desc
if final_desc_filename:
search_result.model_desc.save(final_desc_filename)
if search_result.search_metrics is not None:
logger.info({'search_top1_val':
search_result.search_metrics.best_val_top1()})
if search_result.train_metrics is not None:
logger.info({'train_top1_val':
search_result.train_metrics.best_val_top1()})
def build_model_desc(self, model_desc_builder:ModelDescBuilder,
conf_model_desc:Config,
reductions:int, cells:int, nodes:int)->ModelDesc:
# reset macro params in copy of config
conf_model_desc = copy.deepcopy(conf_model_desc)
conf_model_desc['n_reductions'] = reductions
conf_model_desc['n_cells'] = cells
# create model desc for search using model config
# we will build model without call to model_desc_builder for pre-training
model_desc = model_desc_builder.build(conf_model_desc, template=None)
return model_desc
def get_data(self, conf_loader:Config)->data.DataLoaders:
# this dict caches the dataset objects per dataset config so we don't have to reload
# the reason we do dynamic attribute is so that any dependent methods
# can do ray.remote
if not hasattr(self, '_data_cache'):
self._data_cache:Dict[int, data.DataLoaders] = {}
# first get from cache
if id(conf_loader) in self._data_cache:
data_loaders = self._data_cache[id(conf_loader)]
else:
data_loaders = data.get_data(conf_loader)
self._data_cache[id(conf_loader)] = data_loaders
return data_loaders
def METHOD_NAME(self, model:Model, finalizers:Finalizers)->ModelDesc:
return finalizers.METHOD_NAME(model, restore_device=False)
def search_model_desc(self, conf_search:Config, model_desc:ModelDesc,
trainer_class:TArchTrainer, finalizers:Finalizers)\
->Tuple[ModelDesc, Optional[Metrics]]:
# if trainer is not specified for algos like random search we return same desc
if trainer_class is None:
return model_desc, None
logger.pushd('arch_search')
conf_trainer = conf_search['trainer']
conf_loader = conf_search['loader']
model = Model(model_desc, droppath=False, affine=False)
# get data
data_loaders = self.get_data(conf_loader)
# search arch
arch_trainer = trainer_class(conf_trainer, model, checkpoint=None)
search_metrics = arch_trainer.fit(data_loaders)
# finalize
found_desc = self.METHOD_NAME(model, finalizers)
logger.popd()
return found_desc, search_metrics
def train_model_desc(self, model_desc:ModelDesc, conf_train:Config)\
->Optional[ModelMetrics]:
"""Train given description"""
# region conf vars
conf_trainer = conf_train['trainer']
conf_loader = conf_train['loader']
trainer_title = conf_trainer['title']
epochs = conf_trainer['epochs']
drop_path_prob = conf_trainer['drop_path_prob']
# endregion
# if epochs ==0 then nothing to train, so save time
if epochs <= 0:
return None
logger.pushd(trainer_title)
model = Model(model_desc, droppath=drop_path_prob>0.0, affine=True)
# get data
data_loaders= self.get_data(conf_loader)
trainer = Trainer(conf_trainer, model, checkpoint=None)
train_metrics = trainer.fit(data_loaders)
logger.popd()
return ModelMetrics(model, train_metrics)
| null |
1,512 |
import pytest
from xonsh.completers.bash import complete_from_bash
from xonsh.completers.tools import RichCompletion
from xonsh.parsers.completion_context import (
CommandArg,
CommandContext,
CompletionContext,
)
from xonsh.pytest.tools import skip_if_on_darwin, skip_if_on_windows
@pytest.fixture(autouse=True)
def setup(monkeypatch, tmp_path, xession):
if not xession.env.get("BASH_COMPLETIONS"):
monkeypatch.setitem(
xession.env,
"BASH_COMPLETIONS",
["/usr/share/bash-completion/bash_completion"],
)
(tmp_path / "testdir").mkdir()
(tmp_path / "spaced dir").mkdir()
monkeypatch.chdir(str(tmp_path))
@skip_if_on_darwin
@skip_if_on_windows
@pytest.mark.parametrize(
"command_context, completions, lprefix",
(
(
CommandContext(args=(CommandArg("bash"),), arg_index=1, prefix="--deb"),
{"--debug", "--debugger"},
5,
),
(
CommandContext(args=(CommandArg("ls"),), arg_index=1, prefix=""),
{"'testdir/'", "'spaced dir/'"},
0,
),
# tar replaces "~/" with "/home/user/", the change should be rolledback by us.
(
CommandContext(args=(CommandArg("tar"),), arg_index=1, prefix="~/"),
{"~/c", "~/u", "~/t", "~/d", "~/A", "~/r", "~/x"},
2,
),
(
CommandContext(
args=(CommandArg("ls"),), arg_index=1, prefix="", opening_quote="'"
),
{"'testdir/'", "'spaced dir/'"},
1,
),
),
)
def test_bash_completer(command_context, completions, lprefix):
bash_completions, bash_lprefix = complete_from_bash(
CompletionContext(command_context)
)
assert bash_completions == completions and bash_lprefix == lprefix
@skip_if_on_darwin
@skip_if_on_windows
@pytest.mark.parametrize(
"command_context, completions, lprefix",
(
# ls /pro<TAB> -> ls /proc/
(
CommandContext(args=(CommandArg("ls"),), arg_index=1, prefix="/pro"),
{"/proc/"},
4,
),
# ls '/pro<TAB> -> ls '/proc/'
(
CommandContext(
args=(CommandArg("ls"),), arg_index=1, prefix="/pro", opening_quote="'"
),
{"'/proc/'"},
5,
),
# ls '/pro<TAB>' -> ls '/proc/'
(
CommandContext(
args=(CommandArg("ls"),),
arg_index=1,
prefix="/pro",
opening_quote="'",
closing_quote="'",
),
{"'/proc/"},
5,
),
# ls '/pro'<TAB> -> ls '/proc/'
(
CommandContext(
args=(CommandArg("ls"),),
arg_index=1,
prefix="/pro",
opening_quote="'",
closing_quote="'",
is_after_closing_quote=True,
),
{"'/proc/'"},
6,
),
# ls """/pro"""<TAB> -> ls """/proc/"""
(
CommandContext(
args=(CommandArg("ls"),),
arg_index=1,
prefix="/pro",
opening_quote='"""',
closing_quote='"""',
is_after_closing_quote=True,
),
{'"""/proc/"""'},
10,
),
# Completions that have to be quoted:
# ls ./sp -> ls './spaced dir/'
(
CommandContext(args=(CommandArg("ls"),), arg_index=1, prefix="./sp"),
{"'./spaced dir/'"},
4,
),
# ls './sp<TAB> -> ls './spaced dir/'
(
CommandContext(
args=(CommandArg("ls"),), arg_index=1, prefix="./sp", opening_quote="'"
),
{"'./spaced dir/'"},
5,
),
# ls './sp<TAB>' -> ls './spaced dir/'
(
CommandContext(
args=(CommandArg("ls"),),
arg_index=1,
prefix="./sp",
opening_quote="'",
closing_quote="'",
),
{"'./spaced dir/"},
5,
),
# ls './sp'<TAB> -> ls './spaced dir/'
(
CommandContext(
args=(CommandArg("ls"),),
arg_index=1,
prefix="./sp",
opening_quote="'",
closing_quote="'",
is_after_closing_quote=True,
),
{"'./spaced dir/'"},
6,
),
),
)
def test_quote_handling(command_context, completions, lprefix):
bash_completions, bash_lprefix = complete_from_bash(
CompletionContext(command_context)
)
assert bash_completions == completions and bash_lprefix == lprefix
assert all(
isinstance(comp, RichCompletion) and not comp.append_closing_quote
for comp in bash_completions
) # make sure the completer handles the closing quote by itself
@skip_if_on_darwin
@skip_if_on_windows
def METHOD_NAME():
context = CompletionContext(
CommandContext(args=(CommandArg("git"),), arg_index=1, prefix="")
)
bash_completions, bash_lprefix = complete_from_bash(context)
assert {"clean", "show"}.issubset(bash_completions)
@skip_if_on_darwin
@skip_if_on_windows
@pytest.mark.parametrize(
"command_context, completions, lprefix, exp_append_space",
(
# dd sta -> dd status=
(
CommandContext(args=(CommandArg("dd"),), arg_index=1, prefix="sta"),
{"status="},
3,
False,
),
# date --da -> date --date=
(
CommandContext(args=(CommandArg("date"),), arg_index=1, prefix="--da"),
{"--date="},
4,
False,
),
# dd status=pr -> dd status=progress
(
CommandContext(args=(CommandArg("dd"),), arg_index=1, prefix="status=pr"),
{"progress"},
2,
True,
),
# dd if=/et -> dd if=/etc/
(
CommandContext(args=(CommandArg("dd"),), arg_index=1, prefix="if=/et"),
{"/etc/"},
3,
False,
),
# dd of=/dev/nul -> dd of=/dev/null
(
CommandContext(args=(CommandArg("dd"),), arg_index=1, prefix="of=/dev/nul"),
{"/dev/null"},
8,
True,
),
),
)
def test_equal_sign_arg(command_context, completions, lprefix, exp_append_space):
bash_completions, bash_lprefix = complete_from_bash(
CompletionContext(command_context)
)
assert bash_completions == completions and bash_lprefix == lprefix
assert all(
isinstance(comp, RichCompletion) and comp.append_space == exp_append_space
for comp in bash_completions
)
@pytest.fixture
def bash_completer(fake_process):
fake_process.register_subprocess(
command=["bash", fake_process.any()],
# completion for "git push origin :dev-b"
stdout=b"""\
complete -o bashdefault -o default -o nospace -F __git_wrap__git_main git
dev-branch
""",
)
return fake_process
# git push origin :dev-b<TAB> -> git push origin :dev-branch
def test_git_delete_remote_branch(bash_completer):
command_context = CommandContext(
args=(
CommandArg("git"),
CommandArg("push"),
CommandArg("origin"),
),
arg_index=3,
prefix=":dev-b",
)
bash_completions, bash_lprefix = complete_from_bash(
CompletionContext(command_context)
)
assert bash_completions == {"dev-branch"} and bash_lprefix == 5
assert all(
isinstance(comp, RichCompletion) and comp.append_space is False
for comp in bash_completions
)
| null |
1,513 |
import asyncio
import unittest
from typing import Awaitable, Optional
from unittest.mock import AsyncMock, patch
import hummingbot.connector.exchange.mexc.mexc_constants as CONSTANTS
from hummingbot.connector.exchange.mexc.mexc_auth import MexcAuth
from hummingbot.connector.exchange.mexc.mexc_websocket_adaptor import MexcWebSocketAdaptor
from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant
from hummingbot.core.api_throttler.async_throttler import AsyncThrottler
class MexcWebSocketUnitTests(unittest.TestCase):
# the level is required to receive logs from the data source logger
level = 0
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.ev_loop = asyncio.get_event_loop()
cls.trading_pairs = ["COINALPHA-HBOT"]
cls.api_key = "someKey"
cls.secret_key = "someSecretKey"
cls.auth = MexcAuth(api_key=cls.api_key, secret_key=cls.secret_key)
def METHOD_NAME(self) -> None:
super().METHOD_NAME()
self.log_records = []
throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS)
self.websocket = MexcWebSocketAdaptor(throttler)
self.websocket.logger().setLevel(1)
self.websocket.logger().addHandler(self)
self.mocking_assistant = NetworkMockingAssistant()
self.async_task: Optional[asyncio.Task] = None
self.resume_test_event = asyncio.Event()
def tearDown(self) -> None:
self.async_run_with_timeout(self.websocket.disconnect())
self.async_task and self.async_task.cancel()
super().tearDown()
def handle(self, record):
self.log_records.append(record)
def _is_logged(self, log_level: str, message: str) -> bool:
return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records)
def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1):
ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout))
return ret
def resume_test_callback(self):
self.resume_test_event.set()
async def _iter_message(self):
async for _ in self.websocket.iter_messages():
self.resume_test_callback()
self.async_task.cancel()
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_connect_raises_exception(self, ws_connect_mock):
throttler = AsyncThrottler(CONSTANTS.RATE_LIMITS)
ws_connect_mock.side_effect = Exception("TEST ERROR")
self.websocket = MexcWebSocketAdaptor(throttler)
with self.assertRaisesRegex(Exception, "TEST ERROR"):
self.async_run_with_timeout(self.websocket.connect())
self.assertTrue(self._is_logged("ERROR", "Websocket error: 'TEST ERROR'"))
def test_disconnect(self):
ws = AsyncMock()
self.websocket._websocket = ws
self.async_run_with_timeout(self.websocket.disconnect())
self.assertEqual(1, ws.close.await_count)
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_subscribe_to_order_book_streams_raises_cancelled_exception(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.websocket.connect())
ws_connect_mock.return_value.send_str.side_effect = asyncio.CancelledError
with self.assertRaises(asyncio.CancelledError):
self.async_run_with_timeout(self.websocket.subscribe_to_order_book_streams(self.trading_pairs))
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_subscribe_to_order_book_streams_logs_exception(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.websocket.connect())
ws_connect_mock.return_value.send_str.side_effect = Exception("TEST ERROR")
with self.assertRaisesRegex(Exception, "TEST ERROR"):
self.async_run_with_timeout(self.websocket.subscribe_to_order_book_streams(self.trading_pairs))
self.assertTrue(self._is_logged(
"ERROR", "Unexpected error occurred subscribing to order book trading and delta streams..."
))
| null |
1,514 |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError
from django.http import (
Http404,
HttpRequest,
HttpResponse,
HttpResponseNotAllowed,
HttpResponseServerError,
)
from django.shortcuts import get_object_or_404, render
from django.utils.datastructures import MultiValueDictKeyError
from cl.favorites.forms import NoteForm
from cl.favorites.models import DocketTag, Note, UserTag
from cl.lib.http import is_ajax
from cl.lib.view_utils import increment_view_count
def get_note(request: HttpRequest) -> HttpResponse:
audio_pk = request.POST.get("audio_id")
cluster_pk = request.POST.get("cluster_id")
docket_pk = request.POST.get("docket_id")
recap_doc_pk = request.POST.get("recap_doc_id")
if audio_pk and audio_pk != "undefined":
try:
note = Note.objects.get(audio_id=audio_pk, user=request.user)
except ObjectDoesNotExist:
note = Note()
elif cluster_pk and cluster_pk != "undefined":
try:
note = Note.objects.get(cluster_id=cluster_pk, user=request.user)
except ObjectDoesNotExist:
note = Note()
elif docket_pk and docket_pk != "undefined":
try:
note = Note.objects.get(docket_id=docket_pk, user=request.user)
except ObjectDoesNotExist:
note = Note()
elif recap_doc_pk and recap_doc_pk != "undefined":
try:
note = Note.objects.get(
recap_doc_id=recap_doc_pk, user=request.user
)
except ObjectDoesNotExist:
note = Note()
else:
note = None
return note
@login_required
def METHOD_NAME(request: HttpRequest) -> HttpResponse:
"""Uses ajax to save or update a note.
Receives a request as an argument, and then uses that plus POST data to
create or update a note in the database for a specific user. If the
user already has a note for the document, it updates the note with the
new information. If not, it creates a new note.
"""
if is_ajax(request):
note = get_note(request)
if note is None:
return HttpResponseServerError(
"Unknown document, audio, docket or recap document id."
)
f = NoteForm(request.POST, instance=note)
if f.is_valid():
new_note = f.save(commit=False)
new_note.user = request.user
try:
new_note.save()
except IntegrityError:
# User already has this note.
return HttpResponse("It worked")
else:
# Validation errors fail silently. Probably could be better.
return HttpResponseServerError("Failure. Form invalid")
return HttpResponse("It worked")
else:
return HttpResponseNotAllowed(
permitted_methods={"POST"}, content="Not an ajax request."
)
@login_required
def delete_note(request: HttpRequest) -> HttpResponse:
"""Delete a user's note
Deletes a note for a user using an ajax call and post data.
"""
if is_ajax(request):
note = get_note(request)
if note is None:
return HttpResponseServerError(
"Unknown document, audio, docket, or recap document id."
)
note.delete()
try:
if request.POST["message"] == "True":
# used on the profile page. True is a string, not a bool.
messages.add_message(
request,
messages.SUCCESS,
"Your note was deleted successfully.",
)
except MultiValueDictKeyError:
# This happens if message isn't set.
pass
return HttpResponse("It worked.")
else:
return HttpResponseNotAllowed(
permitted_methods=["POST"], content="Not an ajax request."
)
def view_tag(request, username, tag_name):
tag = get_object_or_404(UserTag, name=tag_name, user__username=username)
increment_view_count(tag, request)
if tag.published is False and tag.user != request.user:
# They don't even get to see if it exists.
raise Http404("This tag does not exist")
# Calculate the total tag count (as we add more types of taggables, add
# them here).
enhanced_dockets = tag.dockets.all().order_by("date_filed")
total_tag_count = len(enhanced_dockets)
for docket in enhanced_dockets:
docket.association_id = DocketTag.objects.get(
docket=docket, tag=tag
).pk
requested_user = get_object_or_404(User, username=username)
is_page_owner = request.user == requested_user
return render(
request,
"tag.html",
{
"tag": tag,
"dockets": enhanced_dockets,
"total_tag_count": total_tag_count,
"private": False,
"is_page_owner": is_page_owner,
},
)
def view_tags(request, username):
"""Show the user their tags if they're looking at their own, or show the
public tags of somebody else.
"""
requested_user = get_object_or_404(User, username=username)
is_page_owner = request.user == requested_user
return render(
request,
"tag_list.html",
{
"requested_user": requested_user,
"is_page_owner": is_page_owner,
"private": False,
},
)
| null |
1,515 |
# coding=utf-8
# Copyright 2018-2023 EvaDB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any
import pandas as pd
from evadb.catalog.models.function_catalog import FunctionCatalogEntry
from evadb.functions.abstract.abstract_function import AbstractFunction
from evadb.functions.gpu_compatible import GPUCompatible
from evadb.utils.generic_utils import try_to_import_transformers
class AbstractHFFunction(AbstractFunction, GPUCompatible):
"""
An abstract class for all HuggingFace models.
This is implemented using the pipeline API from HuggingFace. pipeline is an
easy way to use a huggingface model for inference. In EvaDB, we require users
to mention the task they want to perform for simplicity. A HuggingFace task
is different from a model(pytorch). There are a large number of models on HuggingFace
hub that can be used for a particular task. The user can specify the model or a default
model will be used.
Refer to https://huggingface.co/transformers/main_classes/pipelines.html for more details
on pipelines.
"""
@property
def METHOD_NAME(self) -> str:
return "GenericHuggingfaceModel"
def __init__(
self, function_obj: FunctionCatalogEntry, device: int = -1, *args, **kwargs
):
super().__init__(*args, **kwargs)
pipeline_args = self.default_pipeline_args
for entry in function_obj.metadata:
if entry.value.isnumeric():
pipeline_args[entry.key] = int(entry.value)
else:
pipeline_args[entry.key] = entry.value
self.pipeline_args = pipeline_args
try_to_import_transformers()
from transformers import pipeline
self.hf_function_obj = pipeline(**pipeline_args, device=device)
def setup(self, *args, **kwargs) -> None:
super().setup(*args, **kwargs)
@property
def default_pipeline_args(self) -> dict:
"""
Arguments that will be passed to the pipeline by default.
User provided arguments override the default arguments
"""
return {}
def input_formatter(self, inputs: Any):
"""
Function that formats input from EvaDB format to HuggingFace format for that particular HF model
"""
return inputs
def output_formatter(self, outputs: Any):
"""
Function that formats output from HuggingFace format to EvaDB format (pandas dataframe)
The output can be in various formats, depending on the model. For example:
{'text' : 'transcript from video'}
[[{'score': 0.25, 'label': 'bridge'}, {'score': 0.50, 'label': 'car'}]]
"""
if isinstance(outputs, dict):
return pd.DataFrame(outputs, index=[0])
# PERF: Can improve performance by avoiding redundant list creation
result_list = []
if outputs != [[]]:
for row_output in outputs:
# account for the case where we have more than one prediction for an input
if isinstance(row_output, list):
row_output = {
k: [dic[k] for dic in row_output] for k in row_output[0]
}
result_list.append(row_output)
result_df = pd.DataFrame(result_list)
return result_df
def forward(self, inputs, *args, **kwargs) -> pd.DataFrame:
hf_input = self.input_formatter(inputs)
hf_output = self.hf_function_obj(hf_input, *args, **kwargs)
evadb_output = self.output_formatter(hf_output)
return evadb_output
def to_device(self, device: str) -> GPUCompatible:
try_to_import_transformers()
from transformers import pipeline
self.hf_function_obj = pipeline(**self.pipeline_args, device=device)
return self
| null |
1,516 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdklive.endpoint import endpoint_data
class AddShowIntoShowListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'live', '2016-11-01', 'AddShowIntoShowList','live')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_showLists(self): # RepeatList
return self.get_query_params().get('showList')
def set_showLists(self, showList): # RepeatList
for depth1 in range(len(showList)):
if showList[depth1].get('showName') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.showName', showList[depth1].get('showName'))
if showList[depth1].get('repeatTimes') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.repeatTimes', showList[depth1].get('repeatTimes'))
if showList[depth1].get('resourceType') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.resourceType', showList[depth1].get('resourceType'))
if showList[depth1].get('resourceUrl') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.resourceUrl', showList[depth1].get('resourceUrl'))
if showList[depth1].get('liveInputType') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.liveInputType', showList[depth1].get('liveInputType'))
if showList[depth1].get('duration') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.duration', showList[depth1].get('duration'))
if showList[depth1].get('resourceId') is not None:
self.add_query_param('showList.' + str(depth1 + 1) + '.resourceId', showList[depth1].get('resourceId'))
def get_LiveInputType(self): # Integer
return self.get_query_params().get('LiveInputType')
def set_LiveInputType(self, LiveInputType): # Integer
self.add_query_param('LiveInputType', LiveInputType)
def get_isBatchMode(self): # Boolean
return self.get_query_params().get('isBatchMode')
def set_isBatchMode(self, isBatchMode): # Boolean
self.add_query_param('isBatchMode', isBatchMode)
def get_Duration(self): # Long
return self.get_query_params().get('Duration')
def set_Duration(self, Duration): # Long
self.add_query_param('Duration', Duration)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('RepeatTimes')
def set_RepeatTimes(self, RepeatTimes): # Integer
self.add_query_param('RepeatTimes', RepeatTimes)
def get_ShowName(self): # String
return self.get_query_params().get('ShowName')
def set_ShowName(self, ShowName): # String
self.add_query_param('ShowName', ShowName)
def get_ResourceId(self): # String
return self.get_query_params().get('ResourceId')
def set_ResourceId(self, ResourceId): # String
self.add_query_param('ResourceId', ResourceId)
def get_CasterId(self): # String
return self.get_query_params().get('CasterId')
def set_CasterId(self, CasterId): # String
self.add_query_param('CasterId', CasterId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ResourceType(self): # String
return self.get_query_params().get('ResourceType')
def set_ResourceType(self, ResourceType): # String
self.add_query_param('ResourceType', ResourceType)
def get_ResourceUrl(self): # String
return self.get_query_params().get('ResourceUrl')
def set_ResourceUrl(self, ResourceUrl): # String
self.add_query_param('ResourceUrl', ResourceUrl)
def get_Spot(self): # Integer
return self.get_query_params().get('Spot')
def set_Spot(self, Spot): # Integer
self.add_query_param('Spot', Spot)
| null |
1,517 |
# Copyright (c) 2022 The Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This configuration script shows an example of how to restore a checkpoint that
was taken for SimPoints in the
configs/example/gem5_library/checkpoints/simpoints-se-checkpoint.py.
The SimPoints, SimPoints interval length, and the warmup instruction length
are passed into the SimPoint module, so the SimPoint object will store and
calculate the warmup instruction length for each SimPoints based on the
available instructions before reaching the start of the SimPoint. With the
Simulator module, exit event will be generated to stop when the warmup session
ends and the SimPoints interval ends.
This script builds a more complex board than the board used for taking
checkpoint.
Usage
-----
```
scons build/X86/gem5.opt
./build/X86/gem5.opt \
configs/example/gem5_library/checkpoints/simpoints-se-checkpoint.py
./build/X86/gem5.opt \
configs/example/gem5_library/checkpoints/simpoints-se-restore.py
```
"""
from gem5.simulate.exit_event import ExitEvent
from gem5.simulate.simulator import Simulator
from gem5.utils.requires import requires
from gem5.components.cachehierarchies.classic.private_l1_private_l2_cache_hierarchy import (
PrivateL1PrivateL2CacheHierarchy,
)
from gem5.components.boards.simple_board import SimpleBoard
from gem5.components.memory import DualChannelDDR4_2400
from gem5.components.processors.simple_processor import SimpleProcessor
from gem5.components.processors.cpu_types import CPUTypes
from gem5.isas import ISA
from gem5.resources.resource import SimpointResource, obtain_resource
from gem5.resources.workload import Workload
from gem5.resources.resource import SimpointResource
from pathlib import Path
from m5.stats import reset, dump
requires(isa_required=ISA.X86)
# The cache hierarchy can be different from the cache hierarchy used in taking
# the checkpoints
cache_hierarchy = PrivateL1PrivateL2CacheHierarchy(
l1d_size="32kB",
l1i_size="32kB",
l2_size="256kB",
)
# The memory structure can be different from the memory structure used in
# taking the checkpoints, but the size of the memory must be maintained
memory = DualChannelDDR4_2400(size="2GB")
processor = SimpleProcessor(
cpu_type=CPUTypes.TIMING,
isa=ISA.X86,
num_cores=1,
)
board = SimpleBoard(
clk_freq="3GHz",
processor=processor,
memory=memory,
cache_hierarchy=cache_hierarchy,
)
# Here we obtain the workload from gem5 resources, the checkpoint in this
# workload was generated from
# `configs/example/gem5_library/checkpoints/simpoints-se-checkpoint.py`.
# board.set_workload(
# Workload("x86-print-this-15000-with-simpoints-and-checkpoint")
#
# **Note: This has been removed until we update the resources.json file to
# encapsulate the new Simpoint format.
# Below we set the simpount manually.
#
# This loads a single checkpoint as an example of using simpoints to simulate
# the function of a single simpoint region.
board.set_se_simpoint_workload(
binary=obtain_resource("x86-print-this"),
arguments=["print this", 15000],
simpoint=SimpointResource(
simpoint_interval=1000000,
simpoint_list=[2, 3, 4, 15],
weight_list=[0.1, 0.2, 0.4, 0.3],
warmup_interval=1000000,
),
checkpoint=obtain_resource("simpoints-se-checkpoints-v23-0-v1"),
)
def METHOD_NAME():
warmed_up = False
while True:
if warmed_up:
print("end of SimPoint interval")
yield True
else:
print("end of warmup, starting to simulate SimPoint")
warmed_up = True
# Schedule a MAX_INSTS exit event during the simulation
simulator.schedule_max_insts(
board.get_simpoint().get_simpoint_interval()
)
dump()
reset()
yield False
simulator = Simulator(
board=board,
on_exit_event={ExitEvent.MAX_INSTS: METHOD_NAME()},
)
# Schedule a MAX_INSTS exit event before the simulation begins the
# schedule_max_insts function only schedule event when the instruction length
# is greater than 0.
# In here, it schedules an exit event for the first SimPoint's warmup
# instructions
simulator.schedule_max_insts(board.get_simpoint().get_warmup_list()[0])
simulator.run()
| null |
1,518 |
#!/usr/bin/env python3
#
# Copyright (c) 2015 - 2023, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
import unittest
from unittest import mock
mock_libgeopm = mock.Mock()
with mock.patch('cffi.FFI.dlopen', return_value=mock_libgeopm):
from geopmpy.endpoint import Endpoint
class TestEndpoint(unittest.TestCase):
def setUp(self):
mock_libgeopm.reset()
mock_libgeopm.geopm_endpoint_create.return_value = 0
mock_libgeopm.geopm_endpoint_destroy.return_value = 0
mock_libgeopm.geopm_endpoint_open.return_value = 0
mock_libgeopm.geopm_endpoint_close.return_value = 0
self._endpoint = Endpoint('test_endpoint')
self.test_agent_name = 'my_agent'
def mock_agent(endpoint, name_max, name_cstr):
for idx, char in enumerate(self.test_agent_name):
name_cstr[idx] = char.encode()
name_cstr[len(self.test_agent_name)] = b'\x00'
return 0
mock_libgeopm.geopm_endpoint_agent.side_effect = mock_agent
def test_endpoint_creation_destruction(self):
self.assertEqual("Endpoint(name='test_endpoint')", repr(self._endpoint))
initial_destroy_count = mock_libgeopm.geopm_endpoint_destroy.call_count
del self._endpoint
self.assertEqual(initial_destroy_count + 1, mock_libgeopm.geopm_endpoint_destroy.call_count)
mock_libgeopm.geopm_endpoint_create.return_value = 1
self.assertRaises(RuntimeError, Endpoint, 'test_endpoint')
def test_endpoint_entry_exit(self):
initial_open_count = mock_libgeopm.geopm_endpoint_open.call_count
initial_close_count = mock_libgeopm.geopm_endpoint_close.call_count
with self._endpoint:
self.assertEqual(initial_open_count + 1, mock_libgeopm.geopm_endpoint_open.call_count)
self.assertEqual(initial_close_count, mock_libgeopm.geopm_endpoint_close.call_count)
self.assertEqual(initial_close_count + 1, mock_libgeopm.geopm_endpoint_close.call_count)
def test_endpoint_agent_name(self):
self.assertEqual(self.test_agent_name, self._endpoint.agent())
def test_wait_for_agent_attach(self):
mock_libgeopm.geopm_endpoint_wait_for_agent_attach.return_value = 1
self.assertRaises(RuntimeError, self._endpoint.wait_for_agent_attach, 123.4)
mock_libgeopm.geopm_endpoint_wait_for_agent_attach.return_value = 0
self._endpoint.wait_for_agent_attach(123.4)
def test_stop_wait_loop(self):
mock_libgeopm.geopm_endpoint_wait_for_agent_stop_wait_loop.return_value = 1
self.assertRaises(RuntimeError, self._endpoint.stop_wait_loop)
mock_libgeopm.geopm_endpoint_wait_for_agent_stop_wait_loop.return_value = 0
self._endpoint.stop_wait_loop()
def test_reset_wait_loop(self):
mock_libgeopm.geopm_endpoint_wait_for_agent_reset_wait_loop.return_value = 1
self.assertRaises(RuntimeError, self._endpoint.reset_wait_loop)
mock_libgeopm.geopm_endpoint_wait_for_agent_reset_wait_loop.return_value = 0
self._endpoint.reset_wait_loop()
def test_endpoint_profile_name(self):
test_profile_name = 'my agent'
def mock_profile_name(endpoint, name_max, name_cstr):
for idx, char in enumerate(test_profile_name):
name_cstr[idx] = char.encode()
name_cstr[len(test_profile_name)] = b'\x00'
return 0
mock_libgeopm.geopm_endpoint_profile_name.side_effect = mock_profile_name
self.assertEqual(test_profile_name, self._endpoint.profile_name())
def test_endpoint_nodes(self):
test_node_names = ['node 1', 'node 2']
def mock_num_node(endpoint, num_node_p):
num_node_p[0] = len(test_node_names)
return 0
mock_libgeopm.geopm_endpoint_num_node.side_effect = mock_num_node
def mock_node_name(endpoint, node_idx, name_max, name_cstr):
for idx, char in enumerate(test_node_names[node_idx]):
name_cstr[idx] = char.encode()
name_cstr[len(test_node_names[node_idx])] = b'\x00'
return 0
mock_libgeopm.geopm_endpoint_node_name.side_effect = mock_node_name
self.assertEqual(test_node_names, self._endpoint.nodes())
def test_write_policy(self):
test_policy = {'p0': 0, 'p1': 1}
mock_libgeopm.geopm_endpoint_write_policy.return_value = 0
with mock.patch('geopmpy.agent.policy_names') as policy_mock:
policy_mock.return_value = list(test_policy)
self._endpoint.write_policy(test_policy)
args = mock_libgeopm.geopm_endpoint_write_policy.call_args[0]
_, num_policy, policy_array = args
self.assertEqual(num_policy, len(test_policy))
self.assertEqual(policy_array[0], 0)
self.assertEqual(policy_array[1], 1)
def METHOD_NAME(self):
test_sample = {'s0': 0, 's1': 1}
test_age = 1.1
def mock_read_sample(endpoint, num_sample, sample_array, sample_age_p):
sample_array[0] = test_sample['s0']
sample_array[1] = test_sample['s1']
sample_age_p[0] = test_age
return 0
mock_libgeopm.geopm_endpoint_read_sample.side_effect = mock_read_sample
with mock.patch('geopmpy.agent.sample_names') as sample_mock:
sample_mock.return_value = list(test_sample)
self.assertEqual((test_age, test_sample),
self._endpoint.read_sample())
if __name__ == '__main__':
unittest.main()
| null |
1,519 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Dict, List
import torch
from overrides import overrides
from torch import nn
from archai.common.common import get_conf
from archai.common.ordered_dict_logger import get_global_logger
from archai.supergraph.algos.divnas.analyse_activations import compute_brute_force_sol
from archai.supergraph.algos.divnas.divnas_cell import Divnas_Cell
from archai.supergraph.algos.divnas.divop import DivOp
from archai.supergraph.datasets.data import get_data
from archai.supergraph.nas.cell import Cell
from archai.supergraph.nas.finalizers import Finalizers
from archai.supergraph.nas.model import Model
from archai.supergraph.nas.model_desc import CellDesc, EdgeDesc, ModelDesc, NodeDesc
logger = get_global_logger()
class DivnasFinalizers(Finalizers):
@overrides
def METHOD_NAME(self, model: Model, to_cpu=True, restore_device=True) -> ModelDesc:
logger.pushd('finalize')
# get config and train data loader
# TODO: confirm this is correct in case you get silent bugs
conf = get_conf()
conf_loader = conf['nas']['search']['loader']
data_loaders = get_data(conf_loader)
assert data_loaders.train_dl is not None
# wrap all cells in the model
self._divnas_cells:Dict[int, Divnas_Cell] = {}
for _, cell in enumerate(model.cells):
divnas_cell = Divnas_Cell(cell)
self._divnas_cells[id(cell)] = divnas_cell
# go through all edges in the DAG and if they are of divop
# type then set them to collect activations
sigma = conf['nas']['search']['divnas']['sigma']
for _, dcell in enumerate(self._divnas_cells.values()):
dcell.collect_activations(DivOp, sigma)
# now we need to run one evaluation epoch to collect activations
# we do it on cpu otherwise we might run into memory issues
# later we can redo the whole logic in pytorch itself
# at the end of this each node in a cell will have the covariance
# matrix of all incoming edges' ops
model = model.cpu()
model.eval()
with torch.no_grad():
for _ in range(1):
for _, (x, _) in enumerate(data_loaders.train_dl):
_, _ = model(x), None
# now you can go through and update the
# node covariances in every cell
for dcell in self._divnas_cells.values():
dcell.update_covs()
logger.popd()
return super().METHOD_NAME(model, to_cpu, restore_device)
@overrides
def finalize_cell(self, cell:Cell, cell_index:int,
model_desc:ModelDesc, *args, **kwargs)->CellDesc:
# first finalize each node, we will need to recreate node desc with final version
max_final_edges = model_desc.max_final_edges
node_descs:List[NodeDesc] = []
dcell = self._divnas_cells[id(cell)]
assert len(cell.dag) == len(list(dcell.node_covs.values()))
for i,node in enumerate(cell.dag):
node_cov = dcell.node_covs[id(node)]
node_desc = self.finalize_node(node, i, cell.desc.nodes()[i],
max_final_edges, node_cov)
node_descs.append(node_desc)
# (optional) clear out all activation collection information
dcell.clear_collect_activations()
desc = cell.desc
finalized = CellDesc(
id = desc.id, cell_type=desc.cell_type, conf_cell=desc.conf_cell,
stems=[cell.s0_op.finalize()[0], cell.s1_op.finalize()[0]],
stem_shapes=desc.stem_shapes,
nodes = node_descs, node_shapes=desc.node_shapes,
post_op=cell.post_op.finalize()[0],
out_shape=desc.out_shape,
trainables_from = desc.trainables_from
)
return finalized
@overrides
def finalize_node(self, node:nn.ModuleList, node_index:int,
node_desc:NodeDesc, max_final_edges:int,
cov, *args, **kwargs)->NodeDesc:
# node is a list of edges
assert len(node) >= max_final_edges
# covariance matrix shape must be square 2-D
assert len(cov.shape) == 2
assert cov.shape[0] == cov.shape[1]
# the number of primitive operators has to be greater
# than equal to the maximum number of final edges
# allowed
assert cov.shape[0] >= max_final_edges
# get total number of ops incoming to this node
num_ops = sum([edge._op.num_valid_div_ops for edge in node])
# and collect some bookkeeping indices
edge_num_and_op_ind = []
for j, edge in enumerate(node):
if type(edge._op) == DivOp:
for k in range(edge._op.num_valid_div_ops):
edge_num_and_op_ind.append((j, k))
assert len(edge_num_and_op_ind) == num_ops
# run brute force set selection algorithm
max_subset, max_mi = compute_brute_force_sol(cov, max_final_edges)
# convert the cov indices to edge descs
selected_edges = []
for ind in max_subset:
edge_ind, op_ind = edge_num_and_op_ind[ind]
op_desc = node[edge_ind]._op.get_valid_op_desc(op_ind)
new_edge = EdgeDesc(op_desc, node[edge_ind].input_ids)
selected_edges.append(new_edge)
# for edge in selected_edges:
# self.finalize_edge(edge)
return NodeDesc(selected_edges, node_desc.conv_params)
| null |
1,520 |
# Copyright 2019,2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Code for the MAML algorithm and network definitions. """
from __future__ import print_function
import numpy as np
import sys
import nnabla as nn
import nnabla.functions as F
import nnabla.parametric_functions as PF
import nnabla.initializer as I
def net(input, label, bn_batch_stat, args, init_params=None):
output = forward_conv(input, bn_batch_stat, args, init_params)
loss = loss_func(output, label)
output2 = output.get_unlinked_variable(need_grad=False)
accuracy = 1.0 - F.mean(F.top_n_error(output2, label, n=1))
return (loss, accuracy)
# Network construction functions
def forward_conv(inp, bn_batch_stat, args, init_params, activation=F.relu):
hidden1 = conv_block(inp, 'layer1', bn_batch_stat,
activation, args, init_params)
hidden2 = conv_block(hidden1, 'layer2', bn_batch_stat,
activation, args, init_params)
hidden3 = conv_block(hidden2, 'layer3', bn_batch_stat,
activation, args, init_params)
hidden4 = conv_block(hidden3, 'layer4', bn_batch_stat,
activation, args, init_params)
if args.datasource != 'omniglot' or args.method != 'maml':
# hidden4 = F.reshape(hidden4, (hidden4.d.shape[0], -1), inplace=False)
pass
else:
hidden4 = F.mean(hidden4, (2, 3))
if init_params is None or 'layer5/affine/W' not in init_params:
output = PF.affine(hidden4, args.num_classes, name='layer5')
else:
output = F.affine(
hidden4, init_params['layer5/affine/W'], init_params['layer5/affine/b'])
return output
def conv_block(inp, layer_name, bn_batch_stat, activation, args, init_params):
""" Perform, conv, batch norm, nonlinearity, and max pool """
k = 3
stride, no_stride = (2, 2), (1, 1)
pad = (1, 1)
if init_params is None or layer_name + '/conv/W' not in init_params:
if args.max_pool:
conv_output = PF.convolution(
inp, args.num_filters, (k, k), pad=pad, stride=no_stride, name=layer_name)
else:
conv_output = PF.convolution(
inp, args.num_filters, (k, k), pad=pad, stride=stride, name=layer_name)
normed = METHOD_NAME(conv_output, layer_name,
bn_batch_stat, activation, args, init_params)
else:
if args.max_pool:
conv_output = F.convolution(
inp, init_params[layer_name + '/conv/W'], init_params[layer_name + '/conv/b'], pad=pad, stride=no_stride)
else:
conv_output = F.convolution(
inp, init_params[layer_name + '/conv/W'], init_params[layer_name + '/conv/b'], pad=pad, stride=stride)
normed = METHOD_NAME(conv_output, layer_name,
bn_batch_stat, activation, args, init_params)
if args.max_pool:
normed = F.max_pooling(normed, stride, stride=stride)
return normed
def METHOD_NAME(inp, layer_name, bn_batch_stat, activation, args, init_params):
if args.norm == 'batch_norm':
if init_params is None:
inp = PF.batch_normalization(
inp, batch_stat=bn_batch_stat, name=layer_name)
else:
inp = F.batch_normalization(inp, init_params[layer_name + '/bn/beta'], init_params[layer_name + '/bn/gamma'],
mean=None, variance=None, batch_stat=bn_batch_stat)
if activation is not None:
return activation(inp)
else:
return inp
def loss_func(pred, label):
return F.mean(F.softmax_cross_entropy(pred, label))
| null |
1,521 |
from __future__ import print_function
from acq4.devices.Device import Device, DeviceTask, TaskGui
from acq4.util import Qt
from six.moves import range
class Screen(Device):
"""
Device used for screen output.
Currently, this is only used to blank the screen temporarily to avoid
contaminating sensitive imaging operations. In the future, this device may
be extended to provide visual stimulation (perhaps via psychopy)
"""
sigBlankScreen = Qt.Signal(object, object) # bool blank/unblank, QWaitCondition
def __init__(self, dm, config, name):
Device.__init__(self, dm, config, name)
dm.declareInterface(name, ['screen'], self)
self.blanker = ScreenBlanker()
self.sigBlankScreen.connect(self.blankRequested, Qt.Qt.QueuedConnection)
def taskInterface(self, taskRunner):
return ScreenTaskGui(self, taskRunner)
def createTask(self, cmd, parentTask):
return ScreenTask(self, cmd, parentTask)
def blankScreen(self, blank=True, timeout=10.):
isGuiThread = Qt.QThread.currentThread() == Qt.QCoreApplication.instance().thread()
if isGuiThread:
if blank:
self.blanker.blank()
else:
self.blanker.unBlank()
else:
mutex = Qt.QMutex()
mutex.lock()
waitCond = Qt.QWaitCondition()
self.sigBlankScreen.emit(blank, waitCond)
if not waitCond.wait(mutex, int(timeout*1000)):
raise Exception("Screen blanker threaded request timed out.")
def METHOD_NAME(self):
self.blankScreen(False)
def blankRequested(self, blank, waitCond):
try:
if blank:
self.blankScreen()
else:
self.METHOD_NAME()
finally:
waitCond.wakeAll()
class Black(Qt.QWidget):
""" make a black rectangle to fill screen when "blanking" """
def paintEvent(self, event):
p = Qt.QPainter(self)
brush = Qt.QBrush(Qt.QColor(0,0,0))
p.fillRect(self.rect(), brush)
p.end()
class ScreenBlanker:
"""
Perform the blanking on ALL screens that we can detect.
This is so that extraneous light does not leak into the
detector during acquisition.
"""
def __init__(self):
self.widgets = []
def blank(self):
d = Qt.QApplication.desktop()
for i in range(d.screenCount()): # look for all screens
w = Black()
self.widgets.append(w) # make a black widget
sg = d.screenGeometry(i) # get the screen size
w.move(sg.x(), sg.y()) # put the widget there
w.showFullScreen() # duh
Qt.QApplication.processEvents() # make it so
def __exit__(self, *args):
pass
#for w in self.widgets:
#w.hide() # just take them away
#self.widgets = []
def unBlank(self):
for w in self.widgets:
w.hide() # just take them away
self.widgets = []
class ScreenTask(DeviceTask):
def __init__(self, dev, cmd, parentTask):
DeviceTask.__init__(self, dev, cmd, parentTask)
self.cmd = cmd
def configure(self):
pass
def start(self):
## possibly nothing required here, DAQ will start recording.
if self.cmd['blank']:
self.dev.blankScreen()
def stop(self, abort=False):
self.dev.METHOD_NAME()
class ScreenTaskGui(TaskGui):
def __init__(self, dev, taskRunner):
TaskGui.__init__(self, dev, taskRunner)
self.layout = Qt.QGridLayout()
self.setLayout(self.layout)
self.blankCheck = Qt.QCheckBox("Blank Screen")
self.layout.addWidget(self.blankCheck)
def saveState(self):
return {'blank': self.blankCheck.isChecked()}
def restoreState(self, state):
self.blankCheck.setChecked(state['blank'])
def listSequence(self):
return []
def generateTask(self, params=None):
return self.saveState()
| null |
1,522 |
# Copyright 2021 Akretion (http://www.akretion.com).
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.base_rest import restapi
from odoo.addons.component.core import Component
class ExportSettingsService(Component):
"""Shopinvader service to expose allowed settings"""
_inherit = [
"base.shopinvader.service",
]
_name = "shopinvader.settings.service"
_usage = "settings"
_description = __doc__
def _get_all_schema(self):
return {
"countries": {
"type": "list",
"required": True,
"nullable": False,
"schema": {
"type": "dict",
"schema": self._get_countries_schema(),
},
},
"titles": {
"type": "list",
"required": True,
"nullable": False,
"schema": {
"type": "dict",
"schema": self._get_titles_schema(),
},
},
"industries": {
"type": "list",
"required": True,
"nullable": False,
"schema": {
"type": "dict",
"schema": self._get_industries_schema(),
},
},
"currencies": {
"type": "list",
"required": True,
"nullable": False,
"schema": {
"type": "dict",
"schema": self._get_currencies_schema(),
},
},
"languages": {
"type": "list",
"required": True,
"nullable": False,
"schema": {
"type": "dict",
"schema": self.METHOD_NAME(),
},
},
}
@restapi.method(
[(["/", "/all"], "GET")],
output_param=restapi.CerberusValidator("_get_all_schema"),
auth="public_or_default",
)
def get_all(self):
return self._get_all()
def _get_all(self):
return {
"countries": self._get_countries(),
"titles": self._get_titles(),
"industries": self._get_industries(),
"currencies": self._get_currencies(),
"languages": self._get_languages(),
}
def _jsonify_fields_country(self):
return [
"name",
"code",
"id",
]
def _get_countries(self):
return self.shopinvader_backend.allowed_country_ids.jsonify(
self._jsonify_fields_country()
)
def _get_countries_schema(self):
return {
"name": {
"type": "string",
"required": True,
"nullable": False,
},
"code": {
"type": "string",
"required": True,
"nullable": False,
},
"id": {
"type": "integer",
"required": True,
"nullable": False,
},
}
@restapi.method(
[(["/countries"], "GET")],
output_param=restapi.CerberusListValidator(
"_get_countries_schema", unique_items=True
),
auth="public_or_default",
)
def countries(self):
return self._get_countries()
def _jsonify_fields_title(self):
return [
"id",
"name",
]
def _get_titles(self):
return self.shopinvader_backend.partner_title_ids.jsonify(
self._jsonify_fields_title()
)
def _get_titles_schema(self):
return {
"name": {
"type": "string",
"required": True,
"nullable": False,
},
"id": {
"type": "integer",
"required": True,
"nullable": False,
},
}
@restapi.method(
[(["/titles"], "GET")],
output_param=restapi.CerberusListValidator(
"_get_titles_schema", unique_items=True
),
auth="public_or_default",
)
def titles(self):
return self._get_titles()
def _jsonify_fields_industry(self):
return [
"id",
"name",
]
def _get_industries(self):
return self.shopinvader_backend.partner_industry_ids.jsonify(
self._jsonify_fields_industry()
)
def _get_industries_schema(self):
return {
"name": {
"type": "string",
"required": True,
"nullable": False,
},
"id": {
"type": "integer",
"required": True,
"nullable": False,
},
}
@restapi.method(
[(["/industries"], "GET")],
output_param=restapi.CerberusListValidator(
"_get_industries_schema", unique_items=True
),
auth="public_or_default",
)
def industries(self):
return self._get_industries()
def _jsonify_fields_currency(self):
return [
"id",
"name",
]
def _get_currencies(self):
return self.shopinvader_backend.currency_ids.jsonify(
self._jsonify_fields_currency()
)
def _get_currencies_schema(self):
return {
"name": {
"type": "string",
"required": True,
"nullable": False,
},
"id": {
"type": "integer",
"required": True,
"nullable": False,
},
}
@restapi.method(
[(["/currencies"], "GET")],
output_param=restapi.CerberusListValidator(
"_get_currencies_schema", unique_items=True
),
auth="public_or_default",
)
def currencies(self):
return self._get_currencies()
def _jsonify_fields_lang(self):
return [
"id",
"name",
"iso_code",
]
def _get_languages(self):
return self.shopinvader_backend.lang_ids.jsonify(self._jsonify_fields_lang())
def METHOD_NAME(self):
return {
"name": {
"type": "string",
"required": True,
"nullable": False,
},
"id": {
"type": "integer",
"required": True,
"nullable": False,
},
"iso_code": {
"type": "string",
"required": True,
"nullable": False,
},
}
@restapi.method(
[(["/languages"], "GET")],
output_param=restapi.CerberusListValidator(
"_get_languages_schema", unique_items=True
),
auth="public_or_default",
)
def languages(self):
return self._get_languages()
| null |
1,523 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkemr.endpoint import endpoint_data
class CreateFlowJobRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'CreateFlowJob')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_RetryPolicy(self):
return self.get_query_params().get('RetryPolicy')
def set_RetryPolicy(self,RetryPolicy):
self.add_query_param('RetryPolicy',RetryPolicy)
def get_RunConf(self):
return self.get_body_params().get('RunConf')
def set_RunConf(self,RunConf):
self.add_body_params('RunConf', RunConf)
def get_Description(self):
return self.get_body_params().get('Description')
def set_Description(self,Description):
self.add_body_params('Description', Description)
def get_Type(self):
return self.get_query_params().get('Type')
def set_Type(self,Type):
self.add_query_param('Type',Type)
def get_ParamConf(self):
return self.get_body_params().get('ParamConf')
def set_ParamConf(self,ParamConf):
self.add_body_params('ParamConf', ParamConf)
def get_ResourceLists(self):
return self.get_body_params().get('ResourceList')
def set_ResourceLists(self, ResourceLists):
for depth1 in range(len(ResourceLists)):
if ResourceLists[depth1].get('Path') is not None:
self.add_body_params('ResourceList.' + str(depth1 + 1) + '.Path', ResourceLists[depth1].get('Path'))
if ResourceLists[depth1].get('Alias') is not None:
self.add_body_params('ResourceList.' + str(depth1 + 1) + '.Alias', ResourceLists[depth1].get('Alias'))
def get_FailAct(self):
return self.get_query_params().get('FailAct')
def set_FailAct(self,FailAct):
self.add_query_param('FailAct',FailAct)
def get_Mode(self):
return self.get_query_params().get('Mode')
def set_Mode(self,Mode):
self.add_query_param('Mode',Mode)
def get_MonitorConf(self):
return self.get_body_params().get('MonitorConf')
def set_MonitorConf(self,MonitorConf):
self.add_body_params('MonitorConf', MonitorConf)
def get_MaxRetry(self):
return self.get_query_params().get('MaxRetry')
def set_MaxRetry(self,MaxRetry):
self.add_query_param('MaxRetry',MaxRetry)
def get_AlertConf(self):
return self.get_query_params().get('AlertConf')
def set_AlertConf(self,AlertConf):
self.add_query_param('AlertConf',AlertConf)
def get_ProjectId(self):
return self.get_query_params().get('ProjectId')
def set_ProjectId(self,ProjectId):
self.add_query_param('ProjectId',ProjectId)
def get_EnvConf(self):
return self.get_body_params().get('EnvConf')
def set_EnvConf(self,EnvConf):
self.add_body_params('EnvConf', EnvConf)
def get_MaxRunningTimeSec(self):
return self.get_query_params().get('MaxRunningTimeSec')
def set_MaxRunningTimeSec(self,MaxRunningTimeSec):
self.add_query_param('MaxRunningTimeSec',MaxRunningTimeSec)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId)
def get_Params(self):
return self.get_body_params().get('Params')
def set_Params(self,Params):
self.add_body_params('Params', Params)
def get_CustomVariables(self):
return self.get_body_params().get('CustomVariables')
def set_CustomVariables(self,CustomVariables):
self.add_body_params('CustomVariables', CustomVariables)
def get_RetryInterval(self):
return self.get_query_params().get('RetryInterval')
def set_RetryInterval(self,RetryInterval):
self.add_query_param('RetryInterval',RetryInterval)
def METHOD_NAME(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_Adhoc(self):
return self.get_query_params().get('Adhoc')
def set_Adhoc(self,Adhoc):
self.add_query_param('Adhoc',Adhoc)
def get_ParentCategory(self):
return self.get_query_params().get('ParentCategory')
def set_ParentCategory(self,ParentCategory):
self.add_query_param('ParentCategory',ParentCategory
| null |
1,524 |
# /*#########################################################################
#
# The PyMca X-Ray Fluorescence Toolkit
#
# Copyright (c) 2004-2015 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
__author__ = "T. Vincent - ESRF Data Analysis"
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__doc__ = """
OpenGL/Qt QGLWidget backend.
"""
# import ######################################################################
from PyMca5.PyMcaGui.PyMcaQt import pyqtSignal, QSize, Qt
from PyMca5.PyMcaGui.PyMcaQt import QGLWidget, QGLContext
from PyMca5.PyMcaGui.PyMcaQt import QCursor
from ._OpenGLPlotCanvas import OpenGLPlotCanvas
from ._OpenGLPlotCanvas import CURSOR_DEFAULT, CURSOR_POINTING, \
CURSOR_SIZE_HOR, CURSOR_SIZE_VER, CURSOR_SIZE_ALL
from .GLSupport import setGLContextGetter
# OpenGLBackend ###############################################################
# Init GL context getter
setGLContextGetter(QGLContext.currentContext)
class OpenGLBackend(QGLWidget, OpenGLPlotCanvas):
_signalRedisplay = pyqtSignal() # PyQt binds it to instances
def __init__(self, parent=None, **kw):
QGLWidget.__init__(self, parent)
self._signalRedisplay.connect(self.update)
self.setAutoFillBackground(False)
self.setMouseTracking(True)
OpenGLPlotCanvas.__init__(self, parent, **kw)
def postRedisplay(self):
"""Thread-safe call to QWidget.update."""
self._signalRedisplay.emit()
# Mouse events #
_MOUSE_BTNS = {1: 'left', 2: 'right', 4: 'middle'}
def sizeHint(self):
return QSize(8 * 80, 6 * 80) # Mimic MatplotlibBackend
def mousePressEvent(self, event):
xPixel, yPixel = event.x(), event.y()
btn = self._MOUSE_BTNS[event.button()]
self.onMousePress(xPixel, yPixel, btn)
event.accept()
def mouseMoveEvent(self, event):
xPixel, yPixel = event.x(), event.y()
self.onMouseMove(xPixel, yPixel)
event.accept()
def mouseReleaseEvent(self, event):
xPixel, yPixel = event.x(), event.y()
btn = self._MOUSE_BTNS[event.button()]
self.onMouseRelease(xPixel, yPixel, btn)
event.accept()
def wheelEvent(self, event):
xPixel, yPixel = event.x(), event.y()
if hasattr(event, 'angleDelta'): # Qt 5
delta = event.angleDelta().y()
else: # Qt 4 support
delta = event.delta()
angleInDegrees = delta / 8.
self.onMouseWheel(xPixel, yPixel, angleInDegrees)
event.accept()
_CURSORS = {
CURSOR_DEFAULT: Qt.ArrowCursor,
CURSOR_POINTING: Qt.PointingHandCursor,
CURSOR_SIZE_HOR: Qt.SizeHorCursor,
CURSOR_SIZE_VER: Qt.SizeVerCursor,
CURSOR_SIZE_ALL: Qt.SizeAllCursor,
}
def setCursor(self, cursor=CURSOR_DEFAULT):
cursor = self._CURSORS[cursor]
super(OpenGLBackend, self).setCursor(QCursor(cursor))
# Widget
def METHOD_NAME(self):
return self
# PySide seems to need proxy methods
def initializeGL(self):
return OpenGLPlotCanvas.initializeGL(self)
def paintGL(self):
return OpenGLPlotCanvas.paintGL(self)
def resizeGL(self, width, height):
return OpenGLPlotCanvas.resizeGL(self, width, height)
# demo ########################################################################
if __name__ == "__main__":
import numpy as np
import sys
from ..Plot import Plot
try:
from PyQt4.QtGui import QApplication
except ImportError:
try:
from PyQt5.QtWidgets import QApplication
except ImportError:
from PySide.QtGui import QApplication
app = QApplication([])
w = Plot(None, backend=OpenGLBackend)
size = 4096
data = np.arange(float(size)*size, dtype=np.dtype(np.float32))
data.shape = size, size
colormap = {'name': 'gray', 'normalization': 'linear',
'autoscale': True, 'vmin': 0.0, 'vmax': 1.0,
'colors': 256}
w.addImage(data, legend="image 1",
xScale=(25, 1.0), yScale=(-1000, 1.0),
replot=False, colormap=colormap)
w.METHOD_NAME().show()
sys.exit(app.exec())
| null |
1,525 |
from typing import List, Union
import meerkat as mk
def make_test_df(
by: Union[str, List[str]],
ascending: Union[bool, List[bool]] = True,
):
"""Helper function, returns test df."""
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 1, 2]),
"pandas": mk.ScalarColumn([9, 8, 7]),
"numpy": mk.TorchTensorColumn([5, 4, 6]),
}
)
test = df.sort(by=by, ascending=ascending)
return test
def make_tiebreaker_test_df(
by: Union[str, List[str]],
ascending: Union[bool, List[bool]] = True,
):
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 2, 1]),
"pandas": mk.ScalarColumn([9, 7, 9]),
"numpy": mk.TorchTensorColumn([4, 4, 6]),
}
)
test = df.sort(by=by, ascending=ascending)
return test
# flake8: noqa
######## SINGLE COLUMN TESTS ########
def test_sort_by_ascending_tensor_column():
"""Testing all columns after sorting by an ascending tensor column."""
test = make_test_df(by=["tensor"])
assert (
(test["tensor"] == mk.TorchTensorColumn([1, 2, 3])).all()
and (test["pandas"] == mk.ScalarColumn([8, 7, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 6, 5])).all()
)
def test_sort_by_ascending_pandas_on_pandas_column():
"""Testing all columns after sorting by an ascending pandas column."""
test = make_test_df(by=["pandas"])
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 1, 3])).all()
and (test["pandas"] == mk.ScalarColumn([7, 8, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([6, 4, 5])).all()
)
def test_sort_single_numpy_column_ascending():
"""Testing all columns after sorting by an ascending numpy column."""
test = make_test_df(by=["numpy"])
assert (
(test["tensor"] == mk.TorchTensorColumn([1, 3, 2])).all()
and (test["pandas"] == mk.ScalarColumn([8, 9, 7])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 5, 6])).all()
)
# flake8: noqa
######## SINGLE COLUMN TESTS DESCENDING ########
def test_sort_single_tensor_column_descending():
"""Testing all columns after sorting by a descending tensor column."""
test = make_test_df(by=["tensor"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 2, 1])).all()
and (test["pandas"] == mk.ScalarColumn([9, 7, 8])).all()
and (test["numpy"] == mk.TorchTensorColumn([5, 6, 4])).all()
)
def test_sort_single_pandas_column_descending():
"""Testing all columns after sorting by a descending pandas column."""
test = make_test_df(by=["pandas"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 1, 2])).all()
and (test["pandas"] == mk.ScalarColumn([9, 8, 7])).all()
and (test["numpy"] == mk.TorchTensorColumn([5, 4, 6])).all()
)
def test_sort_single_numpy_column_descending():
"""Testing all columns after sorting by a descending numpy column."""
test = make_test_df(by=["numpy"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 3, 1])).all()
and (test["pandas"] == mk.ScalarColumn([7, 9, 8])).all()
and (test["numpy"] == mk.TorchTensorColumn([6, 5, 4])).all()
)
######## MULTIPLE COLUMN TESTS ########
def test_sort_numpy_and_tensor_ascending():
"""# Testing all columns after sorting with multiple ascending columns
(numpy and tensor)"""
test = make_tiebreaker_test_df(by=["numpy", "tensor"], ascending=True)
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 3, 1])).all()
and (test["pandas"] == mk.ScalarColumn([7, 9, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def test_sort_numpy_and_pandas_ascending():
"""Testing all columns after sorting with multiple ascending columns (numpy
and tensor)"""
test = make_tiebreaker_test_df(by=["numpy", "pandas"], ascending=True)
assert (
(test["tensor"] == mk.TorchTensorColumn([2, 3, 1])).all()
and (test["pandas"] == mk.ScalarColumn([7, 9, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def test_sort_numpy_and_pandas_ascending_variable():
"""Testing all columns after sorting with multiple ascending columns (numpy
and tensor)"""
test = make_tiebreaker_test_df(by=["numpy", "pandas"], ascending=[True, False])
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 2, 1])).all()
and (test["pandas"] == mk.ScalarColumn([9, 7, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def METHOD_NAME():
"""Testing all columns after sorting with multiple ascending columns (numpy
and pandas and tensor)"""
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 2, 1]),
"pandas": mk.ScalarColumn([9, 7, 7]),
"numpy": mk.TorchTensorColumn([6, 4, 4]),
}
)
test = df.sort(by=["numpy", "pandas", "tensor"], ascending=True)
assert (
(test["tensor"] == mk.TorchTensorColumn([1, 2, 3])).all()
and (test["pandas"] == mk.ScalarColumn([7, 7, 9])).all()
and (test["numpy"] == mk.TorchTensorColumn([4, 4, 6])).all()
)
def test_sort_tensor_and_pandas_descending():
"""Testing all columns after sorting with multiple ascending columns
(tensor and pandas)."""
df = mk.DataFrame(
{
"tensor": mk.TorchTensorColumn([3, 2, 2]),
"pandas": mk.ScalarColumn([9, 8, 7]),
"numpy": mk.TorchTensorColumn([6, 4, 4]),
}
)
test = df.sort(by=["tensor", "pandas"], ascending=False)
assert (
(test["tensor"] == mk.TorchTensorColumn([3, 2, 2])).all()
and (test["pandas"] == mk.ScalarColumn([9, 8, 7])).all()
and (test["numpy"] == mk.TorchTensorColumn([6, 4, 4])).all()
)
def test_sort_with_store():
df = mk.DataFrame({"tensor": mk.TorchTensorColumn([3, 2, 4])})
test = df.sort(by=mk.Store("tensor"), ascending=True)
assert (test["tensor"] == mk.TorchTensorColumn([2, 3, 4])).all()
| null |
1,526 |
__author__ = "Aleksandr Slepchenkov"
__email__ = "[email protected]"
from typing import (
Any,
Dict,
Iterable,
List,
Match,
Optional,
Pattern,
Sequence,
Tuple,
Type,
)
Tokens = List[Dict[str, Any]]
# There are too much levels of optional unions of lists of text in cell and align 385 and 396 lines in mistune
def escape(text: str, quote: bool = ..., smart_amp: bool = ...) -> str: ...
class BlockGrammar:
def_links: Pattern[str]
def_footnotes: Pattern[str]
newline: Pattern[str]
block_code: Pattern[str]
fences: Pattern[str]
hrule: Pattern[str]
heading: Pattern[str]
lheading: Pattern[str]
block_quote: Pattern[str]
list_block: Pattern[str]
list_item: Pattern[str]
list_bullet: Pattern[str]
paragraph: Pattern[str]
block_html: Pattern[str]
METHOD_NAME: Pattern[str]
nptable: Pattern[str]
text: Pattern[str]
class BlockLexer:
grammar_class: Type[BlockGrammar]
default_rules: List[str]
list_rules: Tuple[str]
footnote_rules: Tuple[str]
tokens: Tokens
def_links: Dict[str, Dict[str, str]]
def_footnotes: Dict[str, int]
rules = ... # type: BlockGrammar
def __init__(self, rules: Optional[BlockGrammar] = ..., **kwargs: Any) -> None: ...
def __call__(self, text: str, rules: Optional[Sequence[str]] = ...) -> Tokens: ...
def parse(self, text: str, rules: Optional[Sequence[str]] = ...) -> Tokens: ...
def parse_newline(self, m: Match[str]) -> None: ...
def parse_block_code(self, m: Match[str]) -> None: ...
def parse_fences(self, m: Match[str]) -> None: ...
def parse_heading(self, m: Match[str]) -> None: ...
def parse_lheading(self, m: Match[str]) -> None: ...
def parse_hrule(self, m: Match[str]) -> None: ...
def parse_list_block(self, m: Match[str]) -> None: ...
def parse_block_quote(self, m: Match[str]) -> None: ...
def parse_def_links(self, m: Match[str]) -> None: ...
def parse_def_footnotes(self, m: Match[str]) -> None: ...
def parse_table(self, m: Match[str]) -> None: ...
def parse_nptable(self, m: Match[str]) -> None: ...
def parse_block_html(self, m: Match[str]) -> None: ...
def parse_paragraph(self, m: Match[str]) -> None: ...
def parse_text(self, m: Match[str]) -> None: ...
class InlineGrammar:
escape: Pattern[str]
inline_html: Pattern[str]
autolink: Pattern[str]
link: Pattern[str]
reflink: Pattern[str]
nolink: Pattern[str]
url: Pattern[str]
double_emphasis: Pattern[str]
emphasis: Pattern[str]
code: Pattern[str]
linebreak: Pattern[str]
strikethrough: Pattern[str]
footnote: Pattern[str]
text: Pattern[str]
def hard_wrap(self) -> None: ...
class InlineLexer:
grammar_class: Type[InlineGrammar]
default_rules: List[str]
inline_html_rules: List[str]
renderer: Renderer
links: Dict[str, Dict[str, str]]
footnotes: Dict[str, int]
footnote_index: int
_in_link: bool
_in_footnote: bool
_parse_inline_html: bool
rules: InlineGrammar
def __init__(
self, renderer: Renderer, rules: Optional[InlineGrammar] = ..., **kwargs: Any
) -> None: ...
def __call__(self, text: str, rules: Optional[Sequence[str]] = ...) -> str: ...
def setup(
self,
links: Optional[Dict[str, Dict[str, str]]],
footnotes: Optional[Dict[str, int]],
) -> None: ...
line_match: Match[str]
line_started: bool
def output(self, text: str, rules: Optional[Sequence[str]] = ...) -> str: ...
def output_escape(self, m: Match[str]) -> str: ...
def output_autolink(self, m: Match[str]) -> str: ...
def output_url(self, m: Match[str]) -> str: ...
def output_inline_html(self, m: Match[str]) -> str: ...
def output_footnote(self, m: Match[str]) -> Optional[str]: ...
def output_link(self, m: Match[str]) -> str: ...
def output_reflink(self, m: Match[str]) -> Optional[str]: ...
def output_nolink(self, m: Match[str]) -> Optional[str]: ...
def output_double_emphasis(self, m: Match[str]) -> str: ...
def output_emphasis(self, m: Match[str]) -> str: ...
def output_code(self, m: Match[str]) -> str: ...
def output_linebreak(self, m: Match[str]) -> str: ...
def output_strikethrough(self, m: Match[str]) -> str: ...
def output_text(self, m: Match[str]) -> str: ...
class Renderer:
options: Dict[str, str]
def __init__(self, **kwargs: Any) -> None: ...
def placeholder(self) -> str: ...
def block_code(
self, code: str, lang: Any = ...
) -> str: ... # It seems that lang should be string, however other types are valid as well
def block_quote(self, text: str) -> str: ...
def block_html(self, html: str) -> str: ...
def header(self, text: str, level: int, raw: Optional[str] = ...) -> str: ...
def hrule(self) -> str: ...
def list(
self, body: Any, ordered: bool = ...
) -> str: ... # body - same reason as for lang above, and for other Any in this class
def list_item(self, text: Any) -> str: ...
def paragraph(self, text: str) -> str: ...
def METHOD_NAME(self, header: Any, body: Any) -> str: ...
def table_row(self, content: Any) -> str: ...
def table_cell(self, content: Any, **flags: Dict[str, Any]) -> str: ...
def double_emphasis(self, text: Any) -> str: ...
def emphasis(self, text: Any) -> str: ...
def codespan(self, text: str) -> str: ...
def linebreak(self) -> str: ...
def strikethrough(self, text: Any) -> str: ...
def text(self, text: Any) -> str: ...
def escape(self, text: Any) -> str: ...
def autolink(self, link: Any, is_email: bool = ...) -> str: ...
def link(self, link: Any, title: Any, text: Any) -> str: ...
def image(self, src: Any, title: Any, text: Any) -> str: ...
def inline_html(self, html: Any) -> str: ...
def newline(self) -> str: ...
def footnote_ref(self, key: Any, index: int) -> str: ...
def footnote_item(self, key: Any, text: str) -> str: ...
def footnotes(self, text: Any) -> str: ...
class Markdown:
renderer = ... # type: Renderer
inline = ... # type: InlineLexer
block = ... # type: BlockLexer
footnotes = ... # type: List[Dict[str, Any]]
tokens = ... # type: Tokens
def __init__(
self,
renderer: Optional[Renderer] = ...,
inline: Optional[InlineLexer] = ...,
block: Optional[BlockLexer] = ...,
**kwargs: Any,
) -> None: ...
def __call__(self, text: str) -> str: ...
def render(self, text: str) -> str: ...
def parse(self, text: str) -> str: ...
token = ... # type: Dict[str, Any]
def pop(self) -> Optional[Dict[str, Any]]: ...
def peek(self) -> Optional[Dict[str, Any]]: ...
def output(self, text: str, rules: Optional[Sequence[str]] = ...) -> str: ...
def tok(self) -> str: ...
def tok_text(self) -> str: ...
def output_newline(self) -> str: ...
def output_hrule(self) -> str: ...
def output_heading(self) -> str: ...
def output_code(self) -> str: ...
def output_table(self) -> str: ...
def output_block_quote(self) -> str: ...
def output_list(self) -> str: ...
def output_list_item(self) -> str: ...
def output_loose_item(self) -> str: ...
def output_footnote(self) -> str: ...
def output_close_html(self) -> str: ...
def output_open_html(self) -> str: ...
def output_paragraph(self) -> str: ...
def output_text(self) -> str: ...
def markdown(text: str, escape: bool = ..., **kwargs: Any) -> str: ...
| null |
1,527 |
""" test for app action functionality """
from unittest.mock import patch
from django.contrib.auth.models import AnonymousUser
from django.template.response import TemplateResponse
from django.test import TestCase
from django.test.client import RequestFactory
from bookwyrm import forms, models
from bookwyrm import views
from bookwyrm.tests.validate_html import validate_html
class InviteViews(TestCase):
"""every response to a get request, html or json"""
# pylint: disable=invalid-name
def setUp(self):
"""we need basic test data and mocks"""
self.factory = RequestFactory()
with patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"), patch(
"bookwyrm.activitystreams.populate_stream_task.delay"
), patch("bookwyrm.lists_stream.populate_lists_task.delay"):
self.local_user = models.User.objects.create_user(
"[email protected]",
"[email protected]",
"password",
local=True,
localname="mouse",
)
models.SiteSettings.objects.create()
def test_invite_page(self):
"""there are so many views, this just makes sure it LOADS"""
view = views.Invite.as_view()
models.SiteInvite.objects.create(code="hi", user=self.local_user)
request = self.factory.get("")
request.user = AnonymousUser
# why?? this is annoying.
request.user.is_authenticated = False
with patch("bookwyrm.models.site.SiteInvite.valid") as invite:
invite.return_value = True
result = view(request, "hi")
self.assertIsInstance(result, TemplateResponse)
validate_html(result.render())
self.assertEqual(result.status_code, 200)
def METHOD_NAME(self):
"""there are so many views, this just makes sure it LOADS"""
view = views.ManageInvites.as_view()
request = self.factory.get("")
request.user = self.local_user
request.user.is_superuser = True
result = view(request)
self.assertIsInstance(result, TemplateResponse)
validate_html(result.render())
self.assertEqual(result.status_code, 200)
def test_manage_invites_post(self):
"""there are so many views, this just makes sure it LOADS"""
view = views.ManageInvites.as_view()
form = forms.CreateInviteForm()
form.data["use_limit"] = 3
form.data["expiry"] = ""
request = self.factory.post("", form.data)
request.user = self.local_user
request.user.is_superuser = True
result = view(request)
self.assertIsInstance(result, TemplateResponse)
validate_html(result.render())
self.assertEqual(result.status_code, 200)
invite = models.SiteInvite.objects.get()
self.assertEqual(invite.use_limit, 3)
self.assertIsNone(invite.expiry)
def test_invite_request(self):
"""request to join a server"""
form = forms.InviteRequestForm()
form.data["email"] = "[email protected]"
view = views.InviteRequest.as_view()
request = self.factory.post("", form.data)
request.user = self.local_user
result = view(request)
validate_html(result.render())
req = models.InviteRequest.objects.get()
self.assertEqual(req.email, "[email protected]")
def test_invite_request_email_taken(self):
"""request to join a server with an existing user email"""
form = forms.InviteRequestForm()
form.data["email"] = "[email protected]"
view = views.InviteRequest.as_view()
request = self.factory.post("", form.data)
request.user = self.local_user
result = view(request)
validate_html(result.render())
# no request created
self.assertFalse(models.InviteRequest.objects.exists())
def test_manage_invite_requests(self):
"""there are so many views, this just makes sure it LOADS"""
view = views.ManageInviteRequests.as_view()
request = self.factory.get("")
request.user = self.local_user
request.user.is_superuser = True
result = view(request)
self.assertIsInstance(result, TemplateResponse)
validate_html(result.render())
self.assertEqual(result.status_code, 200)
# now with data
models.InviteRequest.objects.create(email="[email protected]")
result = view(request)
self.assertIsInstance(result, TemplateResponse)
validate_html(result.render())
self.assertEqual(result.status_code, 200)
def test_manage_invite_requests_send(self):
"""send an invite"""
req = models.InviteRequest.objects.create(email="[email protected]")
view = views.ManageInviteRequests.as_view()
request = self.factory.post("", {"invite-request": req.id})
request.user = self.local_user
request.user.is_superuser = True
with patch("bookwyrm.emailing.send_email.delay") as mock:
view(request)
self.assertEqual(mock.call_count, 1)
req.refresh_from_db()
self.assertIsNotNone(req.invite)
def test_ignore_invite_request(self):
"""don't invite that jerk"""
req = models.InviteRequest.objects.create(email="[email protected]")
view = views.ignore_invite_request
request = self.factory.post("", {"invite-request": req.id})
request.user = self.local_user
request.user.is_superuser = True
view(request)
req.refresh_from_db()
self.assertTrue(req.ignored)
view(request)
req.refresh_from_db()
self.assertFalse(req.ignored)
| null |
1,528 |
# -*- coding: utf-8 -*-
from PySide2.QtCore import Qt
from PySide2.QtWidgets import QDialogButtonBox, QMessageBox, QWidget
from activity_browser.ui.widgets import (
BiosphereUpdater, SwitchComboBox, CutoffMenu, ForceInputDialog,
parameter_save_errorbox, simple_warning_box
)
# NOTE: No way of testing the BiosphereUpdater class without causing the
# ab_app fixture to flip its lid and fail to clean itself up.
def METHOD_NAME(qtbot):
parent = QWidget()
parent.has_scenarios = False
qtbot.addWidget(parent)
box = SwitchComboBox(parent)
box.configure(False, False)
size = box.count()
assert size == 0
assert not box.isVisible()
def test_comparison_switch_no_scenarios(qtbot):
parent = QWidget()
parent.has_scenarios = False
qtbot.addWidget(parent)
box = SwitchComboBox(parent)
box.configure()
size = box.count()
assert size == 2
# assert box.isVisible() # Box fails to be visible, except it definitely is?
def test_comparison_switch_all(qtbot):
parent = QWidget()
parent.has_scenarios = True
qtbot.addWidget(parent)
box = SwitchComboBox(parent)
box.configure()
size = box.count()
assert size == 3
# assert box.isVisible() # Box fails to be visible, except it definitely is?
#Outdated doesnt work with the new update
# def test_cutoff_menu_relative(qtbot):
# """ Simple check of all the slots on the CutoffMenu class
# """
# slider = CutoffMenu()
# qtbot.addWidget(slider)
# assert slider.cutoff_value == 0.01
# assert slider.is_relative
#
# assert slider.sliders.relative.value() == 20
# assert slider.sliders.relative.log_value == 1.8
# qtbot.mouseClick(slider.cutoff_slider_lft_btn, Qt.LeftButton)
# assert slider.sliders.relative.value() == 21
# assert slider.sliders.relative.log_value == 2.0
# qtbot.mouseClick(slider.cutoff_slider_rght_btn, Qt.LeftButton)
# assert slider.sliders.relative.value() == 20
# assert slider.sliders.relative.log_value == 1.8
#
# with qtbot.waitSignal(slider.slider_change, timeout=1600):
# slider.cutoff_slider_line.setText("0.1")
# assert slider.sliders.relative.value() == 40
# assert slider.sliders.relative.log_value == 10
def test_cutoff_slider_toggle(qtbot):
slider = CutoffMenu()
qtbot.addWidget(slider)
with qtbot.waitSignal(slider.buttons.topx.toggled, timeout=800):
slider.buttons.topx.click()
assert not slider.is_relative
assert slider.limit_type == "number"
# def test_cutoff_slider_top(qtbot):
# slider = CutoffMenu()
# qtbot.addWidget(slider)
# slider.buttons.topx.click()
#
# assert slider.sliders.topx.value() == 1
# qtbot.mouseClick(slider.cutoff_slider_rght_btn, Qt.LeftButton)
# assert slider.sliders.topx.value() == 2
# qtbot.mouseClick(slider.cutoff_slider_lft_btn, Qt.LeftButton)
# assert slider.sliders.topx.value() == 1
#
# with qtbot.waitSignal(slider.slider_change, timeout=1600):
# slider.cutoff_slider_line.setText("15")
# assert slider.sliders.topx.value() == 15
def test_input_dialog(qtbot):
""" Test the various thing about the dialog widget.
"""
parent = QWidget()
qtbot.addWidget(parent)
dialog = ForceInputDialog.get_text(
parent, "Early in the morning", "What should we do with a drunken sailor"
)
assert dialog.output == ""
assert not dialog.buttons.button(QDialogButtonBox.Ok).isEnabled()
existing = ForceInputDialog.get_text(
parent, "Existence", "is a nightmare", "and here is why"
)
assert existing.output == "and here is why"
# Text in dialog MUST be changed before Ok button is enabled.
assert not dialog.buttons.button(QDialogButtonBox.Ok).isEnabled()
with qtbot.waitSignal(dialog.input.textChanged, timeout=100):
dialog.input.setText("Now it works.")
assert dialog.buttons.button(QDialogButtonBox.Ok).isEnabled()
def test_parameter_errorbox(qtbot, monkeypatch):
""" Not truly used anymore in favour of not saving invalid values.
"""
parent = QWidget()
qtbot.addWidget(parent)
monkeypatch.setattr(QMessageBox, "exec_", lambda *args: QMessageBox.Cancel)
result = parameter_save_errorbox(parent, "got an error")
assert result == QMessageBox.Cancel
def test_simple_warning_box(qtbot, monkeypatch):
parent = QWidget()
qtbot.addWidget(parent)
monkeypatch.setattr(QMessageBox, "warning", lambda *args: QMessageBox.Ok)
result = simple_warning_box(parent, "Warning title", "This is a warning")
assert result == QMessageBox.Ok
| null |
1,529 |
from typing import Any, Protocol, Type
from rest_framework.mixins import CreateModelMixin
from rest_framework.mixins import DestroyModelMixin
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import Serializer
from rest_framework.viewsets import GenericViewSet
from rest_framework.viewsets import ModelViewSet
from rest_framework.viewsets import ReadOnlyModelViewSet as _ReadOnlyModelViewSet
from django.core.exceptions import ImproperlyConfigured
from app.validators import Validator
__all__ = [
"AppViewSet",
"ReadOnlyAppViewSet",
"CreateDeleteAppViewSet",
]
class ViewsetWithValidationProtocol(Protocol):
validator_class: Type[Validator] | None
class ValidationMixin(ViewsetWithValidationProtocol):
def METHOD_NAME(self) -> Type[Validator]:
if self.validator_class is None:
raise ImproperlyConfigured("Please set validator_class class variable")
return self.validator_class
def _validate(self, data: dict, context: dict | None = None) -> None:
Validator = self.METHOD_NAME()
Validator.do(data, context=self.get_validator_context())
def get_validator_context(self) -> dict[str, Any]:
return {
"request": self.request, # type: ignore
}
class MultiSerializerMixin:
def get_serializer_class(self, action: str | None = None) -> Type[Serializer]:
"""
Look for serializer class in self.serializer_action_classes, which
should be a dict mapping action name (key) to serializer class (value),
i.e.:
class MyViewSet(MultiSerializerViewSetMixin, ViewSet):
serializer_class = MyDefaultSerializer
serializer_action_classes = {
'list': MyListSerializer,
'my_action': MyActionSerializer,
}
@action
def my_action:
...
If there's no entry for that action then just fallback to the regular
get_serializer_class lookup: self.serializer_class, DefaultSerializer.
Thanks gonz: http://stackoverflow.com/a/22922156/11440
"""
if action is None:
action = self.action # type: ignore
try:
return self.serializer_action_classes[action] # type: ignore
except (KeyError, AttributeError):
return super().get_serializer_class() # type: ignore
class ReadOnlyAppViewSet(MultiSerializerMixin, _ReadOnlyModelViewSet):
pass
class AppViewSet(MultiSerializerMixin, ModelViewSet):
def update(self, request: Request, *args: Any, **kwargs: dict[str, Any]) -> Response:
"""
Always serialize response with the default serializer.
CAUTION: we are loosing serializer context here!
If you need it, feel free to rewrite this method with http://www.cdrf.co/3.6/rest_framework.mixins/UpdateModelMixin.html
"""
response = super().update(request, *args, **kwargs)
Serializer = self.get_serializer_class(action="retrieve")
response.data = Serializer(self.get_object()).data
return response
def create(self, request: Request, *args: Any, **kwargs: dict[str, Any]) -> Response:
"""
Always serialize response with the default serializer.
CAUTION: we are loosing serializer context here!
If you need it, feel free to rewrite this method with http://www.cdrf.co/3.6/rest_framework.mixins/CreateModelMixin.html
"""
response = super().create(request, *args, **kwargs)
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
try:
self.kwargs[lookup_url_kwarg] = response.data.get(self.lookup_field) or response.data["id"]
except KeyError:
return response # if you want to mangle with response serializing, please provide ID or lookup_url_kwarg in your serializer
Serializer = self.get_serializer_class(action="retrieve")
response.data = Serializer(self.get_object()).data
return response
class CreateDeleteAppViewSet(MultiSerializerMixin, CreateModelMixin, DestroyModelMixin, GenericViewSet):
...
| null |
1,530 |
# Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
import logging
import traceback
from sublime import windows as _windows
from NeoVintageous.nv.polyfill import status_message as _status_message
_log = logging.getLogger(__name__)
# NeoVintageous always runs actions based on selections. Some Vim commands,
# however, behave differently depending on whether the current mode is NORMAL or
# VISUAL. To differentiate NORMAL mode operations (involving only an action, or
# a motion plus an action) from VISUAL mode, we need to add an additional mode
# for handling selections that won't interfere with the actual VISUAL mode. This
# is INTERNAL_NORMAL's job. INTERNAL_NORMAL is a pseudomode, because global
# state's .mode property should never set to it, yet it's set in vi_cmd_data
# often. Note that for pure motions we still use plain NORMAL mode.
INSERT = 'mode_insert'
INTERNAL_NORMAL = 'mode_internal_normal'
NORMAL = 'mode_normal'
OPERATOR_PENDING = 'mode_operator_pending'
REPLACE = 'mode_replace'
SELECT = 'mode_select'
UNKNOWN = 'mode_unknown'
VISUAL = 'mode_visual'
VISUAL_BLOCK = 'mode_visual_block'
VISUAL_LINE = 'mode_visual_line'
ACTION_MODES = (NORMAL, VISUAL, VISUAL_LINE, VISUAL_BLOCK)
MOTION_MODES = (NORMAL, OPERATOR_PENDING, VISUAL, VISUAL_LINE, VISUAL_BLOCK)
DIRECTION_UP = 1
DIRECTION_DOWN = 2
EOF = '\x00'
NL = '\n'
_MODES = {
INSERT: 'INSERT',
INTERNAL_NORMAL: '',
NORMAL: '',
OPERATOR_PENDING: '',
VISUAL: 'VISUAL',
VISUAL_BLOCK: 'VISUAL BLOCK',
VISUAL_LINE: 'VISUAL LINE',
UNKNOWN: 'UNKNOWN',
REPLACE: 'REPLACE',
SELECT: 'SELECT',
}
_MODE2CHAR = {
INSERT: 'i',
NORMAL: 'n',
SELECT: 's',
VISUAL: 'v',
VISUAL_LINE: 'V', # Sometimes "l" in code e.g. case-insensitive situations.
VISUAL_BLOCK: 'b',
}
def mode_to_name(mode: str) -> str:
try:
return _MODES[mode]
except KeyError:
return '*UNKNOWN'
def METHOD_NAME(mode: str) -> str:
try:
return _MODE2CHAR[mode]
except KeyError:
return ''
def reset_status_line(view, mode: str) -> None:
view.erase_status('vim-seq')
if mode == NORMAL:
view.erase_status('vim-mode')
def is_visual_mode(mode: str) -> bool:
return mode in (VISUAL, VISUAL_LINE, VISUAL_BLOCK)
def is_ex_mode(view) -> bool:
return view.settings().get('_nv_ex_mode')
def message(msg: str, *args: str) -> None:
_status_message('NeoVintageous: ' + msg, *args)
def status_message(msg: str, *args: str) -> None:
_status_message(msg, *args)
def run_motion(instance, motion: dict) -> None:
instance.run_command(motion['motion'], motion['motion_args'])
def run_action(instance, action: dict) -> None:
instance.run_command(action['action'], action['action_args'])
def enter_normal_mode(view_or_window, mode: str = None) -> None:
view_or_window.run_command('nv_enter_normal_mode', {'mode': mode})
def enter_insert_mode(view_or_window, mode: str) -> None:
view_or_window.run_command('nv_enter_insert_mode', {'mode': mode})
def enter_visual_mode(view_or_window, mode: str, force: bool = False) -> None:
view_or_window.run_command('nv_enter_visual_mode', {'mode': mode})
def enter_visual_line_mode(view_or_window, mode: str, force: bool = False) -> None:
view_or_window.run_command('nv_enter_visual_line_mode', {'mode': mode})
def enter_visual_block_mode(view_or_window, mode: str, force: bool = False) -> None:
view_or_window.run_command('nv_enter_visual_block_mode', {'mode': mode})
def clean_views() -> None:
for window in _windows():
for view in window.views():
clean_view(view)
def clean_view(view) -> None:
# Reset mode, caret, state, etc. In the case of plugin errors this clean
# routine prevents the normal functioning of editor becoming unusable e.g.
# the cursor getting stuck in a block shape or the mode getting stuck.
try:
settings = view.settings()
if settings.has('command_mode'):
settings.erase('command_mode')
if settings.has('inverse_caret_state'):
settings.erase('inverse_caret_state')
if settings.has('vintage'):
settings.erase('vintage')
except Exception: # pragma: no cover
traceback.print_exc()
| null |
1,531 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class DeleteTransitRouterRouteEntryRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'DeleteTransitRouterRouteEntry')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_TransitRouterRouteEntryId(self): # String
return self.get_query_params().get('TransitRouterRouteEntryId')
def set_TransitRouterRouteEntryId(self, TransitRouterRouteEntryId): # String
self.add_query_param('TransitRouterRouteEntryId', TransitRouterRouteEntryId)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_TransitRouterRouteEntryNextHopType(self): # String
return self.get_query_params().get('TransitRouterRouteEntryNextHopType')
def set_TransitRouterRouteEntryNextHopType(self, TransitRouterRouteEntryNextHopType): # String
self.add_query_param('TransitRouterRouteEntryNextHopType', TransitRouterRouteEntryNextHopType)
def get_TransitRouterRouteEntryDestinationCidrBlock(self): # String
return self.get_query_params().get('TransitRouterRouteEntryDestinationCidrBlock')
def set_TransitRouterRouteEntryDestinationCidrBlock(self, TransitRouterRouteEntryDestinationCidrBlock): # String
self.add_query_param('TransitRouterRouteEntryDestinationCidrBlock', TransitRouterRouteEntryDestinationCidrBlock)
def get_TransitRouterRouteTableId(self): # String
return self.get_query_params().get('TransitRouterRouteTableId')
def set_TransitRouterRouteTableId(self, TransitRouterRouteTableId): # String
self.add_query_param('TransitRouterRouteTableId', TransitRouterRouteTableId)
def get_TransitRouterRouteEntryNextHopId(self): # String
return self.get_query_params().get('TransitRouterRouteEntryNextHopId')
def set_TransitRouterRouteEntryNextHopId(self, TransitRouterRouteEntryNextHopId): # String
self.add_query_param('TransitRouterRouteEntryNextHopId', TransitRouterRouteEntryNextHopId)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def METHOD_NAME(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
| null |
1,532 |
import pwmio
from math import e, exp, pi, sin
from kmk.extensions import Extension, InvalidExtensionEnvironment
from kmk.keys import make_argumented_key, make_key
from kmk.utils import clamp
class LEDKeyMeta:
def __init__(self, *leds):
self.leds = leds
self.brightness = None
class AnimationModes:
OFF = 0
STATIC = 1
STATIC_STANDBY = 2
BREATHING = 3
USER = 4
class LED(Extension):
def __init__(
self,
led_pin,
brightness=50,
brightness_step=5,
brightness_limit=100,
breathe_center=1.5,
animation_mode=AnimationModes.STATIC,
animation_speed=1,
user_animation=None,
val=100,
):
try:
pins_iter = iter(led_pin)
except TypeError:
pins_iter = [led_pin]
try:
self._leds = [pwmio.PWMOut(pin) for pin in pins_iter]
except Exception as e:
print(e)
raise InvalidExtensionEnvironment(
'Unable to create pwmio.PWMOut() instance with provided led_pin'
)
self._brightness = brightness
self._pos = 0
self._effect_init = False
self._enabled = True
self.brightness_step = brightness_step
self.brightness_limit = brightness_limit
self.animation_mode = animation_mode
self.animation_speed = animation_speed
self.breathe_center = breathe_center
self.val = val
if user_animation is not None:
self.user_animation = user_animation
make_argumented_key(
names=('LED_TOG',),
validator=self._led_key_validator,
on_press=self._key_led_tog,
)
make_argumented_key(
names=('LED_INC',),
validator=self._led_key_validator,
on_press=self._key_led_inc,
)
make_argumented_key(
names=('LED_DEC',),
validator=self._led_key_validator,
on_press=self._key_led_dec,
)
make_argumented_key(
names=('LED_SET',),
validator=self._led_set_key_validator,
on_press=self._key_led_set,
)
make_key(names=('LED_ANI',), on_press=self._key_led_ani)
make_key(names=('LED_AND',), on_press=self._key_led_and)
make_key(
names=('LED_MODE_PLAIN', 'LED_M_P'), on_press=self._key_led_mode_static
)
make_key(
names=('LED_MODE_BREATHE', 'LED_M_B'), on_press=self._key_led_mode_breathe
)
def __repr__(self):
return f'LED({self._to_dict()})'
def _to_dict(self):
return {
'_brightness': self._brightness,
'_pos': self._pos,
'brightness_step': self.brightness_step,
'brightness_limit': self.brightness_limit,
'animation_mode': self.animation_mode,
'animation_speed': self.animation_speed,
'breathe_center': self.breathe_center,
'val': self.val,
}
def on_runtime_enable(self, sandbox):
return
def on_runtime_disable(self, sandbox):
return
def during_bootup(self, sandbox):
return
def before_matrix_scan(self, sandbox):
return
def after_matrix_scan(self, sandbox):
return
def before_hid_send(self, sandbox):
return
def after_hid_send(self, sandbox):
self.animate()
def on_powersave_enable(self, sandbox):
return
def on_powersave_disable(self, sandbox):
return
def _init_effect(self):
self._pos = 0
self._effect_init = False
return self
def METHOD_NAME(self, percent, leds=None):
leds = leds or range(0, len(self._leds))
for i in leds:
self._leds[i].duty_cycle = int(percent / 100 * 65535)
def step_brightness(self, step, leds=None):
leds = leds or range(0, len(self._leds))
for i in leds:
brightness = int(self._leds[i].duty_cycle / 65535 * 100) + step
self.METHOD_NAME(clamp(brightness), [i])
def increase_brightness(self, step=None, leds=None):
if step is None:
step = self.brightness_step
self.step_brightness(step, leds)
def decrease_brightness(self, step=None, leds=None):
if step is None:
step = self.brightness_step
self.step_brightness(-step, leds)
def off(self):
self.METHOD_NAME(0)
def increase_ani(self):
'''
Increases animation speed by 1 amount stopping at 10
:param step:
'''
if (self.animation_speed + 1) >= 10:
self.animation_speed = 10
else:
self.val += 1
def decrease_ani(self):
'''
Decreases animation speed by 1 amount stopping at 0
:param step:
'''
if (self.val - 1) <= 0:
self.val = 0
else:
self.val -= 1
def effect_breathing(self):
# http://sean.voisen.org/blog/2011/10/breathing-led-with-arduino/
# https://github.com/qmk/qmk_firmware/blob/9f1d781fcb7129a07e671a46461e501e3f1ae59d/quantum/rgblight.c#L806
sined = sin((self._pos / 255.0) * pi)
multip_1 = exp(sined) - self.breathe_center / e
multip_2 = self.brightness_limit / (e - 1 / e)
self._brightness = int(multip_1 * multip_2)
self._pos = (self._pos + self.animation_speed) % 256
self.METHOD_NAME(self._brightness)
def effect_static(self):
self.METHOD_NAME(self._brightness)
# Set animation mode to standby to prevent cycles from being wasted
self.animation_mode = AnimationModes.STATIC_STANDBY
def animate(self):
'''
Activates a "step" in the animation based on the active mode
:return: Returns the new state in animation
'''
if self._effect_init:
self._init_effect()
if self._enabled:
if self.animation_mode == AnimationModes.BREATHING:
return self.effect_breathing()
elif self.animation_mode == AnimationModes.STATIC:
return self.effect_static()
elif self.animation_mode == AnimationModes.STATIC_STANDBY:
pass
elif self.animation_mode == AnimationModes.USER:
return self.user_animation(self)
else:
self.off()
def _led_key_validator(self, *leds):
if leds:
for led in leds:
assert self._leds[led]
return LEDKeyMeta(*leds)
def _led_set_key_validator(self, brightness, *leds):
meta = self._led_key_validator(*leds)
meta.brightness = brightness
return meta
def _key_led_tog(self, *args, **kwargs):
if self.animation_mode == AnimationModes.STATIC_STANDBY:
self.animation_mode = AnimationModes.STATIC
if self._enabled:
self.off()
self._enabled = not self._enabled
def _key_led_inc(self, key, *args, **kwargs):
self.increase_brightness(leds=key.meta.leds)
def _key_led_dec(self, key, *args, **kwargs):
self.decrease_brightness(leds=key.meta.leds)
def _key_led_set(self, key, *args, **kwargs):
self.METHOD_NAME(percent=key.meta.brightness, leds=key.meta.leds)
def _key_led_ani(self, *args, **kwargs):
self.increase_ani()
def _key_led_and(self, *args, **kwargs):
self.decrease_ani()
def _key_led_mode_static(self, *args, **kwargs):
self._effect_init = True
self.animation_mode = AnimationModes.STATIC
def _key_led_mode_breathe(self, *args, **kwargs):
self._effect_init = True
self.animation_mode = AnimationModes.BREATHING
| null |
1,533 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class ModifyBackupPolicyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'ModifyBackupPolicy')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_LocalLogRetentionHours(self): # String
return self.get_query_params().get('LocalLogRetentionHours')
def set_LocalLogRetentionHours(self, LocalLogRetentionHours): # String
self.add_query_param('LocalLogRetentionHours', LocalLogRetentionHours)
def get_BackupPriority(self): # Integer
return self.get_query_params().get('BackupPriority')
def set_BackupPriority(self, BackupPriority): # Integer
self.add_query_param('BackupPriority', BackupPriority)
def get_LogBackupFrequency(self): # String
return self.get_query_params().get('LogBackupFrequency')
def set_LogBackupFrequency(self, LogBackupFrequency): # String
self.add_query_param('LogBackupFrequency', LogBackupFrequency)
def get_ArchiveBackupKeepCount(self): # Integer
return self.get_query_params().get('ArchiveBackupKeepCount')
def set_ArchiveBackupKeepCount(self, ArchiveBackupKeepCount): # Integer
self.add_query_param('ArchiveBackupKeepCount', ArchiveBackupKeepCount)
def get_BackupLog(self): # String
return self.get_query_params().get('BackupLog')
def set_BackupLog(self, BackupLog): # String
self.add_query_param('BackupLog', BackupLog)
def get_BackupInterval(self): # String
return self.get_query_params().get('BackupInterval')
def METHOD_NAME(self, BackupInterval): # String
self.add_query_param('BackupInterval', BackupInterval)
def get_HighSpaceUsageProtection(self): # String
return self.get_query_params().get('HighSpaceUsageProtection')
def set_HighSpaceUsageProtection(self, HighSpaceUsageProtection): # String
self.add_query_param('HighSpaceUsageProtection', HighSpaceUsageProtection)
def get_LogBackupLocalRetentionNumber(self): # Integer
return self.get_query_params().get('LogBackupLocalRetentionNumber')
def set_LogBackupLocalRetentionNumber(self, LogBackupLocalRetentionNumber): # Integer
self.add_query_param('LogBackupLocalRetentionNumber', LogBackupLocalRetentionNumber)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_EnableBackupLog(self): # String
return self.get_query_params().get('EnableBackupLog')
def set_EnableBackupLog(self, EnableBackupLog): # String
self.add_query_param('EnableBackupLog', EnableBackupLog)
def get_BackupPolicyMode(self): # String
return self.get_query_params().get('BackupPolicyMode')
def set_BackupPolicyMode(self, BackupPolicyMode): # String
self.add_query_param('BackupPolicyMode', BackupPolicyMode)
def get_PreferredBackupPeriod(self): # String
return self.get_query_params().get('PreferredBackupPeriod')
def set_PreferredBackupPeriod(self, PreferredBackupPeriod): # String
self.add_query_param('PreferredBackupPeriod', PreferredBackupPeriod)
def get_EnableIncrementDataBackup(self): # Boolean
return self.get_query_params().get('EnableIncrementDataBackup')
def set_EnableIncrementDataBackup(self, EnableIncrementDataBackup): # Boolean
self.add_query_param('EnableIncrementDataBackup', EnableIncrementDataBackup)
def get_ReleasedKeepPolicy(self): # String
return self.get_query_params().get('ReleasedKeepPolicy')
def set_ReleasedKeepPolicy(self, ReleasedKeepPolicy): # String
self.add_query_param('ReleasedKeepPolicy', ReleasedKeepPolicy)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_CompressType(self): # String
return self.get_query_params().get('CompressType')
def set_CompressType(self, CompressType): # String
self.add_query_param('CompressType', CompressType)
def get_LocalLogRetentionSpace(self): # String
return self.get_query_params().get('LocalLogRetentionSpace')
def set_LocalLogRetentionSpace(self, LocalLogRetentionSpace): # String
self.add_query_param('LocalLogRetentionSpace', LocalLogRetentionSpace)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ArchiveBackupKeepPolicy(self): # String
return self.get_query_params().get('ArchiveBackupKeepPolicy')
def set_ArchiveBackupKeepPolicy(self, ArchiveBackupKeepPolicy): # String
self.add_query_param('ArchiveBackupKeepPolicy', ArchiveBackupKeepPolicy)
def get_PreferredBackupTime(self): # String
return self.get_query_params().get('PreferredBackupTime')
def set_PreferredBackupTime(self, PreferredBackupTime): # String
self.add_query_param('PreferredBackupTime', PreferredBackupTime)
def get_BackupRetentionPeriod(self): # String
return self.get_query_params().get('BackupRetentionPeriod')
def set_BackupRetentionPeriod(self, BackupRetentionPeriod): # String
self.add_query_param('BackupRetentionPeriod', BackupRetentionPeriod)
def get_BackupMethod(self): # String
return self.get_query_params().get('BackupMethod')
def set_BackupMethod(self, BackupMethod): # String
self.add_query_param('BackupMethod', BackupMethod)
def get_ArchiveBackupRetentionPeriod(self): # String
return self.get_query_params().get('ArchiveBackupRetentionPeriod')
def set_ArchiveBackupRetentionPeriod(self, ArchiveBackupRetentionPeriod): # String
self.add_query_param('ArchiveBackupRetentionPeriod', ArchiveBackupRetentionPeriod)
def get_Category(self): # String
return self.get_query_params().get('Category')
def set_Category(self, Category): # String
self.add_query_param('Category', Category)
def get_LogBackupRetentionPeriod(self): # String
return self.get_query_params().get('LogBackupRetentionPeriod')
def set_LogBackupRetentionPeriod(self, LogBackupRetentionPeriod): # String
self.add_query_param('LogBackupRetentionPeriod', LogBackupRetentionPeriod)
| null |
1,534 |
"""Tests for docker engine."""
import json
import re
from pathlib import Path
from shutil import which
import pytest
from cwltool.main import main
from .util import (
get_data,
get_main_output,
needs_docker,
needs_podman,
needs_singularity,
)
@needs_docker
def test_docker_workflow(tmp_path: Path) -> None:
"""Basic test for docker with a CWL Workflow."""
result_code, _, stderr = get_main_output(
[
"--default-container",
"docker.io/debian:stable-slim",
"--outdir",
str(tmp_path),
get_data("tests/wf/hello-workflow.cwl"),
"--usermessage",
"hello",
]
)
assert "completed success" in stderr
assert (tmp_path / "response.txt").read_text("utf-8") == "hello"
assert result_code == 0
def test_docker_iwdr() -> None:
result_code = main(
[
"--default-container",
"docker.io/debian:stable-slim",
get_data("tests/wf/iwdr-entry.cwl"),
"--message",
"hello",
]
)
docker_installed = bool(which("docker"))
if docker_installed:
assert result_code == 0
else:
assert result_code != 0
@needs_docker
def METHOD_NAME() -> None:
result_code = main(
[
"--default-container",
"non-existant-weird-image",
get_data("tests/wf/hello-workflow.cwl"),
"--usermessage",
"hello",
]
)
assert result_code != 0
@needs_docker
def test_docker_file_mount() -> None:
# test for bug in
# ContainerCommandLineJob.create_file_and_add_volume()
#
# the bug was that it would use the file literal contents as the
# temporary file name, which can easily result in a file name that
# is too long or otherwise invalid. This test case uses ".."
result_code = main(
[get_data("tests/wf/literalfile.cwl"), get_data("tests/wf/literalfile-job.yml")]
)
assert result_code == 0
@needs_docker
def test_docker_strict_cpu_limit(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--strict-cpu-limit",
"--default-container",
"docker.io/debian:stable-slim",
"--outdir",
str(tmp_path),
get_data("tests/wf/cores_float.cwl"),
]
)
stderr = re.sub(r"\s\s+", " ", stderr)
assert result_code == 0
assert "--cpus=2" in stderr
@needs_docker
def test_docker_strict_memory_limit(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--strict-memory-limit",
"--default-container",
"docker.io/debian:stable-slim",
"--outdir",
str(tmp_path),
get_data("tests/wf/storage_float.cwl"),
]
)
stderr = re.sub(r"\s\s+", " ", stderr)
assert result_code == 0
assert "--memory=255m" in stderr
@needs_docker
def test_docker_strict_cpu_limit_warning(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--default-container",
"docker.io/debian:stable-slim",
"--outdir",
str(tmp_path),
get_data("tests/wf/cores_float.cwl"),
]
)
stderr = re.sub(r"\s\s+", " ", stderr)
assert result_code == 0
assert "Skipping Docker software container '--cpus' limit" in stderr
@needs_docker
def test_docker_strict_memory_limit_warning(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--default-container",
"docker.io/debian:stable-slim",
"--outdir",
str(tmp_path),
get_data("tests/wf/storage_float.cwl"),
]
)
stderr = re.sub(r"\s\s+", " ", stderr)
assert result_code == 0
assert "Skipping Docker software container '--memory' limit" in stderr
@needs_docker
def test_docker_required_secfile(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--outdir",
str(tmp_path),
get_data("tests/secondary-files-required-container.cwl"),
]
)
assert result_code == 0, stderr
assert (
json.loads(stdout)["output"]["secondaryFiles"][0]["checksum"]
== "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
)
@needs_podman
def test_podman_required_secfile(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--podman",
"--outdir",
str(tmp_path),
get_data("tests/secondary-files-required-container.cwl"),
]
)
assert result_code == 0, stderr
assert (
json.loads(stdout)["output"]["secondaryFiles"][0]["checksum"]
== "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
)
@needs_singularity
def test_singularity_required_secfile(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
singularity_dir = tmp_path / "singularity"
singularity_dir.mkdir()
monkeypatch.setenv("CWL_SINGULARITY_CACHE", str(singularity_dir))
result_code, stdout, stderr = get_main_output(
[
"--singularity",
"--outdir",
str(tmp_path / "out"),
get_data("tests/secondary-files-required-container.cwl"),
]
)
assert result_code == 0, stderr
assert (
json.loads(stdout)["output"]["secondaryFiles"][0]["checksum"]
== "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
)
@needs_docker
def test_docker_required_missing_secfile(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--outdir",
str(tmp_path),
get_data("tests/secondary-files-required-missing-container.cwl"),
]
)
assert result_code == 1, stderr
stderr = re.sub(r"\s\s+", " ", stderr)
assert "Job error:" in stderr
assert "Error collecting output for parameter 'output'" in stderr
assert (
"tests/secondary-files-required-missing-container.cwl:16:5: Missing required secondary file"
)
assert "file.ext3" in stderr
@needs_podman
def test_podman_required_missing_secfile(tmp_path: Path) -> None:
result_code, stdout, stderr = get_main_output(
[
"--podman",
"--outdir",
str(tmp_path),
get_data("tests/secondary-files-required-missing-container.cwl"),
]
)
assert result_code == 1, stderr
stderr = re.sub(r"\s\s+", " ", stderr)
assert "Job error:" in stderr
assert "Error collecting output for parameter 'output'" in stderr
assert (
"tests/secondary-files-required-missing-container.cwl:16:5: Missing required secondary file"
)
assert "file.ext3" in stderr
@needs_singularity
def test_singularity_required_missing_secfile(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
singularity_dir = tmp_path / "singularity"
singularity_dir.mkdir()
monkeypatch.setenv("CWL_SINGULARITY_CACHE", str(singularity_dir))
result_code, stdout, stderr = get_main_output(
[
"--singularity",
"--outdir",
str(tmp_path),
get_data("tests/secondary-files-required-missing-container.cwl"),
]
)
assert result_code == 1, stderr
stderr = re.sub(r"\s\s+", " ", stderr)
assert "Job error:" in stderr
assert "Error collecting output for parameter 'output'" in stderr
assert (
"tests/secondary-files-required-missing-container.cwl:16:5: Missing required secondary file"
)
assert "file.ext3" in stderr
| null |
1,535 |
#
# Copyright The pyparted Project Authors
# SPDX-License-Identifier: GPL-2.0-or-later
#
import _ped
import parted
import os
import tempfile
import unittest
# Base class for any test case that requires a temp device node
class RequiresDeviceNode(unittest.TestCase):
def setUp(self):
super().setUp()
self.temp_prefix = "temp-device-"
(self.fd, self.path) = tempfile.mkstemp(prefix=self.temp_prefix)
self.f = os.fdopen(self.fd)
self.f.seek(140000)
os.write(self.fd, b"0")
def tearDown(self):
os.close(self.fd)
if self.path and os.path.exists(self.path):
os.unlink(self.path)
self.fd = None
self.path = None
self.temp_prefix = None
# Base class for any test case that requires a _ped.Device or parted.Device
# object first.
class RequiresDevice(RequiresDeviceNode):
def setUp(self):
super().setUp()
self.addCleanup(self.removeDevice)
self._device = _ped.device_get(self.path)
self.device = parted.getDevice(self.path)
def removeDevice(self):
self.device = None
self._device = None
# Base class for any test case that requires a filesystem on a device.
class RequiresFileSystem(unittest.TestCase):
def setUp(self):
super().setUp()
self._fileSystemType = {}
ty = _ped.file_system_type_get_next()
self._fileSystemType[ty.name] = ty
while True:
try:
ty = _ped.file_system_type_get_next(ty)
self._fileSystemType[ty.name] = ty
except (IndexError, TypeError, _ped.UnknownTypeException):
break
self.temp_prefix = "temp-device-"
(
self.fd,
self.path,
) = tempfile.mkstemp(prefix=self.temp_prefix)
self.f = os.fdopen(self.fd)
self.f.seek(140000)
os.write(self.fd, b"0")
self.f.close()
os.system("mke2fs -F -q %s" % (self.path,))
self._device = _ped.device_get(self.path)
self._geometry = _ped.Geometry(self._device, 0, self._device.length - 1)
def tearDown(self):
if self.path and os.path.exists(self.path):
os.unlink(self.path)
self.mountpoint = None
# Base class for certain alignment tests that require a _ped.Device
class RequiresDeviceAlignment(RequiresDevice):
def METHOD_NAME(self, sector, grain_size):
if sector < 0:
shift = sector % grain_size + grain_size
else:
shift = sector % grain_size
return sector - shift
def roundUpTo(self, sector, grain_size):
if sector % grain_size:
return self.METHOD_NAME(sector, grain_size) + grain_size
else:
return sector
def closestInsideGeometry(self, alignment, geometry, sector):
if alignment.grain_size == 0:
if alignment.is_aligned(geometry, sector) and (
(geometry is None) or geometry.test_sector_inside(sector)
):
return sector
else:
return -1
if sector < geometry.start:
sector += self.roundUpTo(geometry.start - sector, alignment.grain_size)
if sector > geometry.end:
sector -= self.roundUpTo(sector - geometry.end, alignment.grain_size)
if not geometry.test_sector_inside(sector):
return -1
return sector
def closest(self, sector, a, b):
if a == -1:
return b
if b == -1:
return a
if abs(sector - a) < abs(sector - b):
return a
else:
return b
# Base class for any test case that requires a labeled device
class RequiresLabeledDevice(RequiresDevice):
def setUp(self):
super().setUp()
os.system("parted -s %s mklabel msdos" % (self.path,))
# Base class for any test case that requires a _ped.Disk or parted.Disk.
class RequiresDisk(RequiresDevice):
def setUp(self):
super().setUp()
self._disk = _ped.disk_new_fresh(self._device, _ped.disk_type_get("msdos"))
self.disk = parted.Disk(PedDisk=self._disk)
def reopen(self):
self._disk = _ped.disk_new(self._device)
self.disk = parted.Disk(PedDisk=self._disk)
# Base class for any test case that requires a GPT-labeled _ped.Disk or parted.Disk.
class RequiresGPTDisk(RequiresDevice):
def setUp(self):
super().setUp()
self._disk = _ped.disk_new_fresh(self._device, _ped.disk_type_get("gpt"))
self.disk = parted.Disk(PedDisk=self._disk)
def reopen(self):
self._disk = _ped.disk_new(self._device)
self.disk = parted.Disk(PedDisk=self._disk)
# Base class for any test case that requires a filesystem made and mounted.
class RequiresMount(RequiresDevice):
def setUp(self):
super().setUp()
self.addCleanup(self.removeMountpoint)
self.mountpoint = None
def mkfs(self):
os.system("mkfs.ext2 -F -q %s" % self.path)
def doMount(self):
self.mountpoint = tempfile.mkdtemp()
os.system("mount -o loop %s %s" % (self.path, self.mountpoint))
def removeMountpoint(self):
if self.mountpoint and os.path.exists(self.mountpoint):
os.system("umount %s" % self.mountpoint)
os.rmdir(self.mountpoint)
# Base class for any test case that requires a _ped.Partition.
class RequiresPartition(RequiresDisk):
def setUp(self):
super().setUp()
self._part = _ped.Partition(
disk=self._disk,
type=_ped.PARTITION_NORMAL,
start=1,
end=100,
fs_type=_ped.file_system_type_get("ext2"),
)
def reopen(self):
super().reopen()
self._part = self._disk.next_partition(self._disk.next_partition())
# Base class for any test case that requires a _ped.Partition on GPT disk.
class RequiresGPTPartition(RequiresGPTDisk):
def setUp(self):
super().setUp()
self._part = _ped.Partition(
disk=self._disk,
type=_ped.PARTITION_NORMAL,
start=0,
end=100,
fs_type=_ped.file_system_type_get("ext2"),
)
def reopen(self):
super().reopen()
self._part = self._disk.next_partition()
# Base class for any test case that requires a hash table of all
# _ped.DiskType objects available
class RequiresDiskTypes(unittest.TestCase):
def setUp(self):
super().setUp()
self.disktype = {}
ty = _ped.disk_type_get_next()
self.disktype[ty.name] = ty
while True:
try:
ty = _ped.disk_type_get_next(ty)
self.disktype[ty.name] = ty
except (IndexError, TypeError, _ped.UnknownTypeException):
break
# Base class for any test case that requires a list being built via successive
# calls of some function. The function must raise IndexError when there's no
# more output to add to the return list. This class is most useful for all
# those _get_next methods.
class BuildList:
def getDeviceList(self, func):
lst = []
prev = None
while True:
try:
if not prev:
prev = func()
else:
prev = func(prev)
lst.append(prev)
except IndexError:
break
return lst
| null |
1,536 |
#!/usr/bin/env python3
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
This program can publish drive event message
"""
from cyber.python.cyber_py3 import cyber
from cyber.python.cyber_py3 import cyber_time
import argparse
import datetime
import shutil
import time
import os
import sys
from modules.tools.common.message_manager import PbMessageManager
from modules.tools.common import proto_utils
g_message_manager = PbMessageManager()
g_args = None
g_localization = None
def OnReceiveLocalization(localization_msg):
global g_localization
g_localization = localization_msg
def METHOD_NAME(args):
drive_event_meta_msg = g_message_manager.get_msg_meta_by_topic(
args.drive_event_topic)
if not drive_event_meta_msg:
print('Unknown drive_event topic name: %s' % args.drive_event_topic)
sys.exit(1)
localization_meta_msg = g_message_manager.get_msg_meta_by_topic(
args.localization_topic)
if not localization_meta_msg:
print('Unknown localization topic name: %s' % args.localization_topic)
sys.exit(1)
cyber.init()
node = cyber.Node("derive_event_node")
node.create_reader(localization_meta_msg.topic,
localization_meta_msg.msg_type, OnReceiveLocalization)
writer = node.create_writer(drive_event_meta_msg.topic,
drive_event_meta_msg.msg_type)
seq_num = 0
while not cyber.is_shutdown():
event_type = input(
"Type in Event Type('d') and press Enter (current time: " +
str(datetime.datetime.now()) + ")\n>")
event_type = event_type.strip()
if len(event_type) != 1 or event_type[0].lower() != 'd':
continue
current_time = cyber_time.Time.now().to_sec()
event_str = None
while not event_str:
event_str = input("Type Event:>")
event_str = event_str.strip()
event_msg = drive_event_meta_msg.msg_type()
event_msg.header.timestamp_sec = current_time
event_msg.header.module_name = 'drive_event'
seq_num += 1
event_msg.header.sequence_num = seq_num
event_msg.header.version = 1
event_msg.event = event_str
if g_localization:
event_msg.location.CopyFrom(g_localization.pose)
writer.write(event_msg)
time_str = datetime.datetime.fromtimestamp(current_time).strftime(
"%Y%m%d%H%M%S")
filename = os.path.join(args.dir, "%s_drive_event.pb.txt" % time_str)
proto_utils.write_pb_to_text_file(event_msg, filename)
print('Logged to rosbag and written to file %s' % filename)
time.sleep(0.1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="A tool to write events when recording rosbag")
parser.add_argument(
"--drive_event_topic",
action="store",
default="/apollo/drive_event",
help="""the drive event topic""")
parser.add_argument(
"--localization_topic",
action="store",
default="/apollo/localization/pose",
help="""the drive event topic""")
parser.add_argument(
"--dir",
action="store",
default="data/bag",
help="""The log export directory.""")
g_args = parser.parse_args()
METHOD_NAME(g_args)
| null |
1,537 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkadcp.endpoint import endpoint_data
import json
class UpdateHubClusterFeatureRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'adcp', '2022-01-01', 'UpdateHubClusterFeature','adcp')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AccessControlList(self): # Array
return self.get_query_params().get('AccessControlList')
def set_AccessControlList(self, AccessControlList): # Array
self.add_query_param("AccessControlList", json.dumps(AccessControlList))
def get_MonitorEnabled(self): # Boolean
return self.get_query_params().get('MonitorEnabled')
def set_MonitorEnabled(self, MonitorEnabled): # Boolean
self.add_query_param('MonitorEnabled', MonitorEnabled)
def get_DeletionProtection(self): # Boolean
return self.get_query_params().get('DeletionProtection')
def set_DeletionProtection(self, DeletionProtection): # Boolean
self.add_query_param('DeletionProtection', DeletionProtection)
def get_EnableMesh(self): # Boolean
return self.get_query_params().get('EnableMesh')
def set_EnableMesh(self, EnableMesh): # Boolean
self.add_query_param('EnableMesh', EnableMesh)
def get_ArgoCDHAEnabled(self): # Boolean
return self.get_query_params().get('ArgoCDHAEnabled')
def set_ArgoCDHAEnabled(self, ArgoCDHAEnabled): # Boolean
self.add_query_param('ArgoCDHAEnabled', ArgoCDHAEnabled)
def get_ArgoCDEnabled(self): # Boolean
return self.get_query_params().get('ArgoCDEnabled')
def set_ArgoCDEnabled(self, ArgoCDEnabled): # Boolean
self.add_query_param('ArgoCDEnabled', ArgoCDEnabled)
def get_VSwitches(self): # Array
return self.get_query_params().get('VSwitches')
def METHOD_NAME(self, VSwitches): # Array
self.add_query_param("VSwitches", json.dumps(VSwitches))
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_PublicAccessEnabled(self): # Boolean
return self.get_query_params().get('PublicAccessEnabled')
def set_PublicAccessEnabled(self, PublicAccessEnabled): # Boolean
self.add_query_param('PublicAccessEnabled', PublicAccessEnabled)
def get_PublicApiServerEnabled(self): # Boolean
return self.get_query_params().get('PublicApiServerEnabled')
def set_PublicApiServerEnabled(self, PublicApiServerEnabled): # Boolean
self.add_query_param('PublicApiServerEnabled', PublicApiServerEnabled)
def get_ArgoServerEnabled(self): # Boolean
return self.get_query_params().get('ArgoServerEnabled')
def set_ArgoServerEnabled(self, ArgoServerEnabled): # Boolean
self.add_query_param('ArgoServerEnabled', ArgoServerEnabled)
def get_WorkflowScheduleMode(self): # String
return self.get_query_params().get('WorkflowScheduleMode')
def set_WorkflowScheduleMode(self, WorkflowScheduleMode): # String
self.add_query_param('WorkflowScheduleMode', WorkflowScheduleMode)
def get_AuditLogEnabled(self): # Boolean
return self.get_query_params().get('AuditLogEnabled')
def set_AuditLogEnabled(self, AuditLogEnabled): # Boolean
self.add_query_param('AuditLogEnabled', AuditLogEnabled)
def get_ClusterId(self): # String
return self.get_query_params().get('ClusterId')
def set_ClusterId(self, ClusterId): # String
self.add_query_param('ClusterId', ClusterId)
def get_PriceLimit(self): # String
return self.get_query_params().get('PriceLimit')
def set_PriceLimit(self, PriceLimit): # String
self.add_query_param('PriceLimit', PriceLimit)
def get_ApiServerEipId(self): # String
return self.get_query_params().get('ApiServerEipId')
def set_ApiServerEipId(self, ApiServerEipId): # String
self.add_query_param('ApiServerEipId', ApiServerEipId)
| null |
1,538 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CheckImportDataAddressRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hcs-mgw', '2017-10-24', 'CheckImportDataAddress')
self.set_method('POST')
def get_InvPath(self):
return self.get_query_params().get('InvPath')
def set_InvPath(self,InvPath):
self.add_query_param('InvPath',InvPath)
def get_AccessMethod(self):
return self.get_query_params().get('AccessMethod')
def set_AccessMethod(self,AccessMethod):
self.add_query_param('AccessMethod',AccessMethod)
def get_InvAccessKeyId(self):
return self.get_query_params().get('InvAccessKeyId')
def set_InvAccessKeyId(self,InvAccessKeyId):
self.add_query_param('InvAccessKeyId',InvAccessKeyId)
def get_AccessKeySecret(self):
return self.get_query_params().get('AccessKeySecret')
def set_AccessKeySecret(self,AccessKeySecret):
self.add_query_param('AccessKeySecret',AccessKeySecret)
def get_ListFilePath(self):
return self.get_query_params().get('ListFilePath')
def set_ListFilePath(self,ListFilePath):
self.add_query_param('ListFilePath',ListFilePath)
def get_InvDomain(self):
return self.get_query_params().get('InvDomain')
def set_InvDomain(self,InvDomain):
self.add_query_param('InvDomain',InvDomain)
def get_AccessKey(self):
return self.get_query_params().get('AccessKey')
def set_AccessKey(self,AccessKey):
self.add_query_param('AccessKey',AccessKey)
def get_AddressType(self):
return self.get_query_params().get('AddressType')
def set_AddressType(self,AddressType):
self.add_query_param('AddressType',AddressType)
def get_Direction(self):
return self.get_query_params().get('Direction')
def set_Direction(self,Direction):
self.add_query_param('Direction',Direction)
def get_Address(self):
return self.get_query_params().get('Address')
def set_Address(self,Address):
self.add_query_param('Address',Address)
def get_AccessProxy(self):
return self.get_query_params().get('AccessProxy')
def set_AccessProxy(self,AccessProxy):
self.add_query_param('AccessProxy',AccessProxy)
def get_VSwitchId(self):
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self,VSwitchId):
self.add_query_param('VSwitchId',VSwitchId)
def get_AliasName(self):
return self.get_query_params().get('AliasName')
def set_AliasName(self,AliasName):
self.add_query_param('AliasName',AliasName)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_Domain(self):
return self.get_query_params().get('Domain')
def METHOD_NAME(self,Domain):
self.add_query_param('Domain',Domain)
def get_Appid(self):
return self.get_query_params().get('Appid')
def set_Appid(self,Appid):
self.add_query_param('Appid',Appid)
def get_InvSecretKey(self):
return self.get_query_params().get('InvSecretKey')
def set_InvSecretKey(self,InvSecretKey):
self.add_query_param('InvSecretKey',InvSecretKey)
def get_MgwRegionId(self):
return self.get_query_params().get('MgwRegionId')
def set_MgwRegionId(self,MgwRegionId):
self.add_query_param('MgwRegionId',MgwRegionId)
def get_SubAddress(self):
return self.get_query_params().get('SubAddress')
def set_SubAddress(self,SubAddress):
self.add_query_param('SubAddress',SubAddress
| null |
1,539 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdms_enterprise.endpoint import endpoint_data
class RegisterInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dms-enterprise', '2018-11-01', 'RegisterInstance','dms-enterprise')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_EcsRegion(self): # String
return self.get_query_params().get('EcsRegion')
def set_EcsRegion(self, EcsRegion): # String
self.add_query_param('EcsRegion', EcsRegion)
def get_DdlOnline(self): # Integer
return self.get_query_params().get('DdlOnline')
def set_DdlOnline(self, DdlOnline): # Integer
self.add_query_param('DdlOnline', DdlOnline)
def get_UseDsql(self): # Integer
return self.get_query_params().get('UseDsql')
def set_UseDsql(self, UseDsql): # Integer
self.add_query_param('UseDsql', UseDsql)
def METHOD_NAME(self): # String
return self.get_query_params().get('NetworkType')
def set_NetworkType(self, NetworkType): # String
self.add_query_param('NetworkType', NetworkType)
def get_Tid(self): # Long
return self.get_query_params().get('Tid')
def set_Tid(self, Tid): # Long
self.add_query_param('Tid', Tid)
def get_Sid(self): # String
return self.get_query_params().get('Sid')
def set_Sid(self, Sid): # String
self.add_query_param('Sid', Sid)
def get_EnableSellSitd(self): # String
return self.get_query_params().get('EnableSellSitd')
def set_EnableSellSitd(self, EnableSellSitd): # String
self.add_query_param('EnableSellSitd', EnableSellSitd)
def get_DataLinkName(self): # String
return self.get_query_params().get('DataLinkName')
def set_DataLinkName(self, DataLinkName): # String
self.add_query_param('DataLinkName', DataLinkName)
def get_TemplateType(self): # String
return self.get_query_params().get('TemplateType')
def set_TemplateType(self, TemplateType): # String
self.add_query_param('TemplateType', TemplateType)
def get_InstanceSource(self): # String
return self.get_query_params().get('InstanceSource')
def set_InstanceSource(self, InstanceSource): # String
self.add_query_param('InstanceSource', InstanceSource)
def get_EnvType(self): # String
return self.get_query_params().get('EnvType')
def set_EnvType(self, EnvType): # String
self.add_query_param('EnvType', EnvType)
def get_Host(self): # String
return self.get_query_params().get('Host')
def set_Host(self, Host): # String
self.add_query_param('Host', Host)
def get_InstanceType(self): # String
return self.get_query_params().get('InstanceType')
def set_InstanceType(self, InstanceType): # String
self.add_query_param('InstanceType', InstanceType)
def get_QueryTimeout(self): # Integer
return self.get_query_params().get('QueryTimeout')
def set_QueryTimeout(self, QueryTimeout): # Integer
self.add_query_param('QueryTimeout', QueryTimeout)
def get_EcsInstanceId(self): # String
return self.get_query_params().get('EcsInstanceId')
def set_EcsInstanceId(self, EcsInstanceId): # String
self.add_query_param('EcsInstanceId', EcsInstanceId)
def get_ExportTimeout(self): # Integer
return self.get_query_params().get('ExportTimeout')
def set_ExportTimeout(self, ExportTimeout): # Integer
self.add_query_param('ExportTimeout', ExportTimeout)
def get_DatabasePassword(self): # String
return self.get_query_params().get('DatabasePassword')
def set_DatabasePassword(self, DatabasePassword): # String
self.add_query_param('DatabasePassword', DatabasePassword)
def get_InstanceAlias(self): # String
return self.get_query_params().get('InstanceAlias')
def set_InstanceAlias(self, InstanceAlias): # String
self.add_query_param('InstanceAlias', InstanceAlias)
def get_TemplateId(self): # Long
return self.get_query_params().get('TemplateId')
def set_TemplateId(self, TemplateId): # Long
self.add_query_param('TemplateId', TemplateId)
def get_DatabaseUser(self): # String
return self.get_query_params().get('DatabaseUser')
def set_DatabaseUser(self, DatabaseUser): # String
self.add_query_param('DatabaseUser', DatabaseUser)
def get_Port(self): # Integer
return self.get_query_params().get('Port')
def set_Port(self, Port): # Integer
self.add_query_param('Port', Port)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_DbaUid(self): # Long
return self.get_query_params().get('DbaUid')
def set_DbaUid(self, DbaUid): # Long
self.add_query_param('DbaUid', DbaUid)
def get_SkipTest(self): # Boolean
return self.get_query_params().get('SkipTest')
def set_SkipTest(self, SkipTest): # Boolean
self.add_query_param('SkipTest', SkipTest)
def get_SafeRule(self): # String
return self.get_query_params().get('SafeRule')
def set_SafeRule(self, SafeRule): # String
self.add_query_param('SafeRule', SafeRule)
| null |
1,540 |
# Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from toil.job import Job
from toil.exceptions import FailedJobsException
from toil.test import ToilTest
class CleanWorkDirTest(ToilTest):
"""
Tests testing :class:toil.fileStores.abstractFileStore.AbstractFileStore
"""
def setUp(self):
super().setUp()
self.testDir = self._createTempDir()
def tearDown(self):
super().tearDown()
shutil.rmtree(self.testDir)
def testNever(self):
retainedTempData = self._runAndReturnWorkDir("never", job=tempFileTestJob)
self.assertNotEqual(retainedTempData, [], "The worker's temporary workspace was deleted despite "
"cleanWorkDir being set to 'never'")
def testAlways(self):
retainedTempData = self._runAndReturnWorkDir("always", job=tempFileTestJob)
self.assertEqual(retainedTempData, [], "The worker's temporary workspace was not deleted despite "
"cleanWorkDir being set to 'always'")
def testOnErrorWithError(self):
retainedTempData = self._runAndReturnWorkDir("onError", job=tempFileTestErrorJob, expectError=True)
self.assertEqual(retainedTempData, [], "The worker's temporary workspace was not deleted despite "
"an error occurring and cleanWorkDir being set to 'onError'")
def METHOD_NAME(self):
retainedTempData = self._runAndReturnWorkDir("onError", job=tempFileTestJob)
self.assertNotEqual(retainedTempData, [], "The worker's temporary workspace was deleted despite "
"no error occurring and cleanWorkDir being set to 'onError'")
def testOnSuccessWithError(self):
retainedTempData = self._runAndReturnWorkDir("onSuccess", job=tempFileTestErrorJob, expectError=True)
self.assertNotEqual(retainedTempData, [], "The worker's temporary workspace was deleted despite "
"an error occurring and cleanWorkDir being set to 'onSuccesss'")
def testOnSuccessWithSuccess(self):
retainedTempData = self._runAndReturnWorkDir("onSuccess", job=tempFileTestJob)
self.assertEqual(retainedTempData, [], "The worker's temporary workspace was not deleted despite "
"a successful job execution and cleanWorkDir being set to 'onSuccesss'")
def _runAndReturnWorkDir(self, cleanWorkDir, job, expectError=False):
"""
Runs toil with the specified job and cleanWorkDir setting. expectError determines whether the test's toil
run is expected to succeed, and the test will fail if that expectation is not met. returns the contents of
the workDir after completion of the run
"""
options = Job.Runner.getDefaultOptions(self._getTestJobStorePath())
options.workDir = self.testDir
options.clean = "always"
options.cleanWorkDir = cleanWorkDir
A = Job.wrapJobFn(job)
if expectError:
self._launchError(A, options)
else:
self._launchRegular(A, options)
return os.listdir(self.testDir)
def _launchRegular(self, A, options):
Job.Runner.startToil(A, options)
def _launchError(self, A, options):
try:
Job.Runner.startToil(A, options)
except FailedJobsException:
pass # we expect a job to fail here
else:
self.fail("Toil run succeeded unexpectedly")
def tempFileTestJob(job):
with open(job.fileStore.getLocalTempFile(), "w") as f:
f.write("test file retention")
def tempFileTestErrorJob(job):
with open(job.fileStore.getLocalTempFile(), "w") as f:
f.write("test file retention")
raise RuntimeError() # test failure
| null |
1,541 |
from itertools import chain
import unittest
from unittest.mock import Mock
from queue import Queue
from AnyQt.QtGui import QStandardItem
from Orange.data import Table
from Orange.widgets.visualize.utils import (
VizRankDialog, Result, run_vizrank, QueuedScore
)
from Orange.widgets.tests.base import WidgetTest
def compute_score(x):
return (x[0] + 1) / (x[1] + 1)
class TestRunner(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.data = Table("iris")
def test_Result(self):
res = Result(queue=Queue(), scores=[])
self.assertIsInstance(res.queue, Queue)
self.assertIsInstance(res.scores, list)
def test_run_vizrank(self):
scores, task = [], Mock()
# run through all states
task.is_interruption_requested.return_value = False
states = [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
res = run_vizrank(compute_score, lambda initial: chain(states),
None, scores, 0, 6, task)
next_state = self.assertQueueEqual(
res.queue, [0, 0, 0, 3, 2, 5], compute_score,
states, states[1:] + [None])
self.assertIsNone(next_state)
res_scores = sorted([compute_score(x) for x in states])
self.assertListEqual(res.scores, res_scores)
self.assertIsNot(scores, res.scores)
self.assertEqual(task.set_partial_result.call_count, 2)
self.assertEqual(task.set_progress_value.call_count, 7)
def test_run_vizrank_interrupt(self):
scores, task = [], Mock()
# interrupt calculation in third iteration
task.is_interruption_requested.side_effect = lambda: \
True if task.is_interruption_requested.call_count > 2 else False
states = [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
res = run_vizrank(compute_score, lambda initial: chain(states),
None, scores, 0, 6, task)
next_state = self.assertQueueEqual(
res.queue, [0, 0], compute_score, states[:2], states[1:3])
self.assertEqual(next_state, (0, 3))
res_scores = sorted([compute_score(x) for x in states[:2]])
self.assertListEqual(res.scores, res_scores)
self.assertIsNot(scores, res.scores)
self.assertEqual(task.set_partial_result.call_count, 1)
self.assertEqual(task.set_progress_value.call_count, 3)
task.set_progress_value.assert_called_with(int(1 / 6 * 100))
# continue calculation through all states
task.is_interruption_requested.side_effect = lambda: False
i = states.index(next_state)
res = run_vizrank(compute_score, lambda initial: chain(states[i:]),
None, res_scores, 2, 6, task)
next_state = self.assertQueueEqual(
res.queue, [0, 3, 2, 5], compute_score, states[2:],
states[3:] + [None])
self.assertIsNone(next_state)
res_scores = sorted([compute_score(x) for x in states])
self.assertListEqual(res.scores, res_scores)
self.assertIsNot(scores, res.scores)
self.assertEqual(task.set_partial_result.call_count, 3)
self.assertEqual(task.set_progress_value.call_count, 8)
task.set_progress_value.assert_called_with(int(5 / 6 * 100))
def assertQueueEqual(self, queue, positions, f, states, next_states):
self.assertIsInstance(queue, Queue)
for qs in (QueuedScore(position=p, score=f(s), state=s, next_state=ns)
for p, s, ns in zip(positions, states, next_states)):
result = queue.get_nowait()
self.assertEqual(result.position, qs.position)
self.assertEqual(result.state, qs.state)
self.assertEqual(result.next_state, qs.next_state)
self.assertEqual(result.score, qs.score)
next_state = result.next_state
return next_state
class TestVizRankDialog(WidgetTest):
def METHOD_NAME(self):
def iterate_states(initial_state):
if initial_state is not None:
return chain(states[states.index(initial_state):])
return chain(states)
def invoke_on_partial_result():
widget.on_partial_result(run_vizrank(
widget.compute_score,
widget.iterate_states,
widget.saved_state,
widget.scores,
widget.saved_progress,
widget.state_count(),
task
))
task = Mock()
states = [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
widget = VizRankDialog(None)
widget.progressBarInit()
widget.compute_score = compute_score
widget.iterate_states = iterate_states
widget.row_for_state = lambda sc, _: [QStandardItem(str(sc))]
widget.state_count = lambda: len(states)
# interrupt calculation in third iteration
task.is_interruption_requested.side_effect = lambda: \
True if task.is_interruption_requested.call_count > 2 else False
invoke_on_partial_result()
self.assertEqual(widget.rank_model.rowCount(), 2)
for row, score in enumerate(
sorted([compute_score(x) for x in states[:2]])):
self.assertEqual(widget.rank_model.item(row, 0).text(), str(score))
self.assertEqual(widget.saved_progress, 2)
task.set_progress_value.assert_called_with(int(1 / 6 * 100))
# continue calculation through all states
task.is_interruption_requested.side_effect = lambda: False
invoke_on_partial_result()
self.assertEqual(widget.rank_model.rowCount(), 6)
for row, score in enumerate(
sorted([compute_score(x) for x in states])):
self.assertEqual(widget.rank_model.item(row, 0).text(), str(score))
self.assertEqual(widget.saved_progress, 6)
task.set_progress_value.assert_called_with(int(5 / 6 * 100))
if __name__ == "__main__":
unittest.main()
| null |
1,542 |
import os.path
from unittest import TestCase
from pcs import settings
from pcs.common.reports import codes as report_codes
from pcs.lib.commands import cluster
from pcs_test.tools import fixture
from pcs_test.tools.command_env import get_env_tools
class SuccessMinimal(TestCase):
def setUp(self):
self.env_assist, self.config = get_env_tools(self)
self.nodes = [f"node{i}" for i in range(3)]
def test_live_cib_required(self):
self.config.env.set_cib_data("<cib />")
self.env_assist.assert_raise_library_error(
lambda: cluster.remove_nodes_from_cib(
self.env_assist.get_env(),
self.nodes,
),
[
fixture.error(
report_codes.LIVE_ENVIRONMENT_REQUIRED,
forbidden_options=["CIB"],
)
],
expected_in_processor=False,
)
def test_success_pcmk_running(self):
self.config.services.is_running("pacemaker")
for node in self.nodes:
self.config.runner.pcmk.remove_node(
node,
name=f"remove_node.{node}",
)
cluster.remove_nodes_from_cib(self.env_assist.get_env(), self.nodes)
def METHOD_NAME(self):
err_msg = "an error"
self.config.services.is_running("pacemaker")
self.config.runner.pcmk.remove_node(
self.nodes[0],
)
self.config.runner.pcmk.remove_node(
self.nodes[1],
returncode=1,
stderr=err_msg,
name="remove_node_failure",
)
self.env_assist.assert_raise_library_error(
lambda: cluster.remove_nodes_from_cib(
self.env_assist.get_env(),
self.nodes,
),
[
fixture.error(
report_codes.NODE_REMOVE_IN_PACEMAKER_FAILED,
node_list_to_remove=[self.nodes[1]],
node="",
reason=err_msg,
)
],
expected_in_processor=False,
)
def test_success_pcmk_not_running(self):
cmd_env = dict(CIB_file=os.path.join(settings.cib_dir, "cib.xml"))
self.config.services.is_running("pacemaker", return_value=False)
for node in self.nodes:
self.config.runner.place(
[
settings.cibadmin_exec,
"--delete-all",
"--force",
f"--xpath=/cib/configuration/nodes/node[@uname='{node}']",
],
name=f"remove_node.{node}",
env=cmd_env,
)
cluster.remove_nodes_from_cib(self.env_assist.get_env(), self.nodes)
def test_failure_pcmk_not_running(self):
err_msg = "an error"
cmd_env = dict(CIB_file=os.path.join(settings.cib_dir, "cib.xml"))
cmd = [settings.cibadmin_exec, "--delete-all", "--force"]
cmd_xpath = "--xpath=/cib/configuration/nodes/node[@uname='{}']"
self.config.services.is_running("pacemaker", return_value=False)
self.config.runner.place(
cmd + [cmd_xpath.format(self.nodes[0])],
name="remove_node_success",
env=cmd_env,
)
self.config.runner.place(
cmd + [cmd_xpath.format(self.nodes[1])],
returncode=1,
stderr=err_msg,
name="remove_node_failure",
env=cmd_env,
)
self.env_assist.assert_raise_library_error(
lambda: cluster.remove_nodes_from_cib(
self.env_assist.get_env(),
self.nodes,
),
[
fixture.error(
report_codes.NODE_REMOVE_IN_PACEMAKER_FAILED,
node_list_to_remove=[self.nodes[1]],
node="",
reason=err_msg,
)
],
expected_in_processor=False,
)
| null |
1,543 |
from datetime import timedelta
import pytest
from django.utils import timezone
from notion.block import NotionBlock
from notion.block import NotionBlockList
from notion.cache import get_cached_page
from notion.cache import NotionCache
from notion.cache import TIMEOUT
from notion.models import NotionCacheEntry
from notion.page import NotionPage
pytestmark = [
pytest.mark.django_db,
]
@pytest.fixture
def current_user_staff(mocker, staff_user):
return mocker.patch("notion.cache.get_current_user", return_value=staff_user)
@pytest.fixture
def current_user_casual(mocker, user):
return mocker.patch("notion.cache.get_current_user", return_value=user)
@pytest.fixture
def mock_cache_set(mocker):
return mocker.patch("notion.cache.NotionCache.set")
@pytest.fixture
def mock_fetch_page(mocker):
return mocker.patch("notion.client.NotionClient.fetch_page_recursively")
@pytest.fixture
def cache():
return NotionCache()
@pytest.fixture
def another_page() -> NotionPage:
return NotionPage(
blocks=NotionBlockList(
[
NotionBlock(id="block-2", data={"role": "reader-6"}),
]
)
)
@pytest.fixture
def page_from_callable(page, mocker):
return mocker.MagicMock(return_value=page)
@pytest.fixture
def not_expired_datetime():
return timezone.now() + timedelta(seconds=TIMEOUT)
@pytest.fixture
def METHOD_NAME():
return timezone.now()
@pytest.fixture
def expired_cache_entry(cache_entry, METHOD_NAME):
cache_entry.setattr_and_save("expires", METHOD_NAME)
return cache_entry
def test_set(cache, page, page_as_dict):
cache.set("some_key", page)
cache_entry = NotionCacheEntry.objects.get()
assert cache_entry.content == page_as_dict
def test_set_callable(cache, page_from_callable, page_as_dict):
cache.set("some_key", page_from_callable)
cache_entry = NotionCacheEntry.objects.get()
assert cache_entry.content == page_as_dict
page_from_callable.assert_called_once()
def test_get(cache, page, cache_entry):
got = cache.get(cache_entry.cache_key)
assert got == page
def test_get_nothing_if_cache_expired(cache, expired_cache_entry):
got = cache.get(expired_cache_entry.cache_key)
assert not got
def test_set_and_get(cache, page):
cache.set("some_key", page)
got = cache.get("some_key")
assert got == page
def test_get_or_set_get_if_exists_and_not_expired(cache, page, cache_entry, page_from_callable):
got = cache.get_or_set(cache_entry.cache_key, content=page_from_callable)
page_from_callable.assert_not_called()
assert got == page
assert got != page_from_callable
def test_get_or_set_set_if_expired(cache, another_page, expired_cache_entry):
got = cache.get_or_set(expired_cache_entry.cache_key, content=another_page)
new_cache_entry = NotionCacheEntry.objects.get(cache_key=expired_cache_entry.cache_key)
assert got == another_page
assert got == NotionPage.from_json(new_cache_entry.content)
def test_get_or_set_set_if_doesnt_exist(cache, another_page):
got = cache.get_or_set("some random cache key", content=another_page)
new_cache_entry = NotionCacheEntry.objects.get(cache_key="some random cache key")
assert got == another_page
assert got == NotionPage.from_json(new_cache_entry.content)
@pytest.mark.parametrize("env_value", ["On", ""])
@pytest.mark.usefixtures("current_user_casual")
def test_user_always_gets_page_from_existing_cache(settings, cache_entry, env_value, mock_cache_set, mock_fetch_page):
settings.NOTION_CACHE_ONLY = bool(env_value)
get_cached_page(cache_entry.cache_key)
mock_cache_set.assert_not_called()
mock_fetch_page.assert_not_called()
@pytest.mark.usefixtures("current_user_staff")
def test_staff_user_get_page_from_cache_if_env_cache(settings, cache_entry, mock_cache_set, mock_fetch_page):
settings.NOTION_CACHE_ONLY = bool("On")
get_cached_page(cache_entry.cache_key)
mock_cache_set.assert_not_called()
mock_fetch_page.assert_not_called()
@pytest.mark.usefixtures("current_user_staff")
def test_staff_user_get_page_from_notion_if_not_env_cache(settings, cache_entry, mock_cache_set, mock_fetch_page):
settings.NOTION_CACHE_ONLY = bool("")
got = get_cached_page(cache_entry.cache_key)
mock_page = mock_fetch_page.return_value
assert got == mock_page
mock_cache_set.assert_called_once_with(cache_entry.cache_key, mock_page)
mock_fetch_page.assert_called_once_with(cache_entry.cache_key)
| null |
1,544 |
# Copyright 2023 Memgraph Ltd.
#
# Use of this software is governed by the Business Source License
# included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
# License, and you may not use this file except in compliance with the Business Source License.
#
# As of the Change Date specified in that file, in accordance with
# the Business Source License, use of this software will be governed
# by the Apache License, Version 2.0, included in the file
# licenses/APL.txt.
import mgp
# isort: off
from common.shared import BaseClass, InitializationGraphMutable, InitializationUnderlyingGraphMutable
initialization_underlying_graph_mutable = InitializationUnderlyingGraphMutable()
def cleanup_underlying():
initialization_underlying_graph_mutable.reset()
def init_underlying_graph_is_mutable(ctx: mgp.ProcCtx, object: mgp.Any):
initialization_underlying_graph_mutable.set()
def underlying_graph_is_mutable(ctx: mgp.ProcCtx, object: mgp.Any) -> mgp.Record(mutable=bool, init_called=bool):
if initialization_underlying_graph_mutable.get_to_return() > 0:
initialization_underlying_graph_mutable.increment_returned(1)
return mgp.Record(
mutable=object.underlying_graph_is_mutable(), init_called=initialization_underlying_graph_mutable.get()
)
return []
# Register batched
mgp.add_batch_read_proc(underlying_graph_is_mutable, init_underlying_graph_is_mutable, cleanup_underlying)
initialization_graph_mutable = InitializationGraphMutable()
def init_graph_is_mutable(ctx: mgp.ProcCtx):
initialization_graph_mutable.set()
def graph_is_mutable(ctx: mgp.ProcCtx) -> mgp.Record(mutable=bool, init_called=bool):
if initialization_graph_mutable.get_to_return() > 0:
initialization_graph_mutable.increment_returned(1)
return mgp.Record(mutable=ctx.graph.is_mutable(), init_called=initialization_graph_mutable.get())
return []
def cleanup_graph():
initialization_graph_mutable.reset()
# Register batched
mgp.add_batch_read_proc(graph_is_mutable, init_graph_is_mutable, cleanup_graph)
class BatchingNums(BaseClass):
def __init__(self, nums_to_return):
super().__init__(nums_to_return)
self._nums = []
self._i = 0
batching_nums = BatchingNums(10)
def init_batching_nums(ctx: mgp.ProcCtx):
batching_nums.set()
batching_nums._nums = [i for i in range(1, 11)]
batching_nums._i = 0
def METHOD_NAME(ctx: mgp.ProcCtx) -> mgp.Record(num=int, init_called=bool, is_valid=bool):
if batching_nums.get_to_return() > 0:
batching_nums.increment_returned(1)
batching_nums._i += 1
return mgp.Record(
num=batching_nums._nums[batching_nums._i - 1],
init_called=batching_nums.get(),
is_valid=ctx.graph.is_valid(),
)
return []
def cleanup_batching_nums():
batching_nums.reset()
batching_nums._i = 0
# Register batched
mgp.add_batch_read_proc(METHOD_NAME, init_batching_nums, cleanup_batching_nums)
class BatchingVertices(BaseClass):
def __init__(self):
super().__init__()
self._vertices = []
self._i = 0
batching_vertices = BatchingVertices()
def init_batching_vertices(ctx: mgp.ProcCtx):
print("init called")
print("graph is mutable", ctx.graph.is_mutable())
batching_vertices.set()
batching_vertices._vertices = list(ctx.graph.vertices)
batching_vertices._i = 0
batching_vertices._num_to_return = len(batching_vertices._vertices)
def batch_vertices(ctx: mgp.ProcCtx) -> mgp.Record(vertex_id=int, init_called=bool):
if batching_vertices.get_to_return() == 0:
return []
batching_vertices.increment_returned(1)
return mgp.Record(vertex=batching_vertices._vertices[batching_vertices._i].id, init_called=batching_vertices.get())
def cleanup_batching_vertices():
batching_vertices.reset()
batching_vertices._vertices = []
batching_vertices._i = 0
batching_vertices._num_to_return = 0
# Register batched
mgp.add_batch_read_proc(batch_vertices, init_batching_vertices, cleanup_batching_vertices)
| null |
1,545 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkoceanbasepro.endpoint import endpoint_data
import json
class CreateProjectRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'OceanBasePro', '2019-09-01', 'CreateProject','oceanbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SinkEndpointId(self): # String
return self.get_body_params().get('SinkEndpointId')
def set_SinkEndpointId(self, SinkEndpointId): # String
self.add_body_params('SinkEndpointId', SinkEndpointId)
def get_UseOss(self): # Boolean
return self.get_body_params().get('UseOss')
def set_UseOss(self, UseOss): # Boolean
self.add_body_params('UseOss', UseOss)
def get_OssKey(self): # String
return self.get_body_params().get('OssKey')
def set_OssKey(self, OssKey): # String
self.add_body_params('OssKey', OssKey)
def get_SourceEndpointId(self): # String
return self.get_body_params().get('SourceEndpointId')
def set_SourceEndpointId(self, SourceEndpointId): # String
self.add_body_params('SourceEndpointId', SourceEndpointId)
def get_Type(self): # String
return self.get_body_params().get('Type')
def set_Type(self, Type): # String
self.add_body_params('Type', Type)
def get_FullTransferConfig(self): # Struct
return self.get_body_params().get('FullTransferConfig')
def set_FullTransferConfig(self, FullTransferConfig): # Struct
self.add_body_params("FullTransferConfig", json.dumps(FullTransferConfig))
def get_EnableStructTransfer(self): # Boolean
return self.get_body_params().get('EnableStructTransfer')
def set_EnableStructTransfer(self, EnableStructTransfer): # Boolean
self.add_body_params('EnableStructTransfer', EnableStructTransfer)
def get_TransferMapping(self): # Struct
return self.get_body_params().get('TransferMapping')
def set_TransferMapping(self, TransferMapping): # Struct
self.add_body_params("TransferMapping", json.dumps(TransferMapping))
def get_WorkerGradeId(self): # String
return self.get_body_params().get('WorkerGradeId')
def set_WorkerGradeId(self, WorkerGradeId): # String
self.add_body_params('WorkerGradeId', WorkerGradeId)
def get_CommonTransferConfig(self): # Struct
return self.get_body_params().get('CommonTransferConfig')
def METHOD_NAME(self, CommonTransferConfig): # Struct
self.add_body_params("CommonTransferConfig", json.dumps(CommonTransferConfig))
def get_StructTransferConfig(self): # Struct
return self.get_body_params().get('StructTransferConfig')
def set_StructTransferConfig(self, StructTransferConfig): # Struct
self.add_body_params("StructTransferConfig", json.dumps(StructTransferConfig))
def get_EnableIncrTransfer(self): # Boolean
return self.get_body_params().get('EnableIncrTransfer')
def set_EnableIncrTransfer(self, EnableIncrTransfer): # Boolean
self.add_body_params('EnableIncrTransfer', EnableIncrTransfer)
def get_EnableFullTransfer(self): # Boolean
return self.get_body_params().get('EnableFullTransfer')
def set_EnableFullTransfer(self, EnableFullTransfer): # Boolean
self.add_body_params('EnableFullTransfer', EnableFullTransfer)
def get_EnableFullVerify(self): # Boolean
return self.get_body_params().get('EnableFullVerify')
def set_EnableFullVerify(self, EnableFullVerify): # Boolean
self.add_body_params('EnableFullVerify', EnableFullVerify)
def get_Name(self): # String
return self.get_body_params().get('Name')
def set_Name(self, Name): # String
self.add_body_params('Name', Name)
def get_LabelIds(self): # Array
return self.get_body_params().get('LabelIds')
def set_LabelIds(self, LabelIds): # Array
self.add_body_params("LabelIds", json.dumps(LabelIds))
def get_IncrTransferConfig(self): # Struct
return self.get_body_params().get('IncrTransferConfig')
def set_IncrTransferConfig(self, IncrTransferConfig): # Struct
self.add_body_params("IncrTransferConfig", json.dumps(IncrTransferConfig))
def get_EnableReverseIncrTransfer(self): # Boolean
return self.get_body_params().get('EnableReverseIncrTransfer')
def set_EnableReverseIncrTransfer(self, EnableReverseIncrTransfer): # Boolean
self.add_body_params('EnableReverseIncrTransfer', EnableReverseIncrTransfer)
| null |
1,546 |
import unittest
import subprocess
import os
import utils
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
cleanup_py = os.path.join(TOPDIR, 'cleanup_code.py')
class Tests(unittest.TestCase):
def make_dummy_format(self, name, d, exitval=0):
"""Make a dummy formmater binary in the directory d.
Return the modified environment needed to put this binary
in the PATH."""
fname = os.path.join(d, name)
utils.write_file(fname, "#!/bin/sh\necho foo\nexit %d\n" % exitval)
os.chmod(fname, 493) # 493 = 0755
env = os.environ.copy()
env['PATH'] = d + os.pathsep + env['PATH']
return env
def test_python_reindent(self):
"""Test cleanup_code script on Python files with reindent."""
for args in ['--all', 'test.py']:
with utils.TempDir() as tmpdir:
pyfile = os.path.join(tmpdir, 'test.py')
utils.write_file(pyfile, 'def foo():\n bar\n')
p = subprocess.Popen([cleanup_py, args], cwd=tmpdir)
stdout, stderr = p.communicate()
self.assertEqual(p.returncode, 0)
# 2-space indentation should have been corrected to 4-space
self.assertEqual(utils.read_file(pyfile),
'def foo():\n bar\n')
def METHOD_NAME(self):
"""Test cleanup_code script on Python files with autopep8."""
for args in ['--all', 'test.py']:
with utils.TempDir() as tmpdir:
env = self.make_dummy_format('autopep8', tmpdir)
pyfile = os.path.join(tmpdir, 'test.py')
utils.write_file(pyfile, 'def foo():\n bar\n')
p = subprocess.Popen([cleanup_py, '-a', '-v', args],
cwd=tmpdir, env=env)
stdout, stderr = p.communicate()
self.assertEqual(p.returncode, 0)
# dummy autopep8 should have written out 'foo'
self.assertEqual(utils.read_file(pyfile), 'foo\n')
def test_cpp_clang(self):
"""Test cleanup_code script on C++ files with clang-format."""
# directories that should be ignored
igdirs = ['dependency', 'eigen3', 'igdir', 'git-repo']
for args in ['--all', 'test.cpp']:
with utils.TempDir() as tmpdir:
env = self.make_dummy_format('clang-format', tmpdir)
for d in igdirs:
os.mkdir(os.path.join(tmpdir, d))
utils.write_file(os.path.join(tmpdir, d, 'test.h'), 'bar')
# git-repo is a git submodule so shouldn't be descended into
os.mkdir(os.path.join(tmpdir, 'git-repo', '.git'))
cppfile = os.path.join(tmpdir, 'test.cpp')
utils.write_file(cppfile, 'bar')
p = subprocess.Popen([cleanup_py, '-v', '-e', 'igdir', args],
cwd=tmpdir, env=env)
stdout, stderr = p.communicate()
self.assertEqual(p.returncode, 0)
# dummy clang-format should have written out 'foo'
self.assertEqual(utils.read_file(cppfile), 'foo\n')
# ignored directories should be unchanged
for d in igdirs:
con = utils.read_file(os.path.join(tmpdir, d, 'test.h'))
self.assertEqual(con, 'bar')
def test_no_files(self):
"""Test cleanup_code script with no files selected."""
with utils.TempDir() as tmpdir:
p = subprocess.Popen([cleanup_py], cwd=tmpdir)
stdout, stderr = p.communicate()
self.assertEqual(p.returncode, 2)
def test_run_error(self):
"""Test cleanup_code handling of subprocess error"""
with utils.TempDir() as tmpdir:
env = self.make_dummy_format('autopep8', tmpdir, exitval=1)
pyfile = os.path.join(tmpdir, 'test.py')
utils.write_file(pyfile, 'bar')
p = subprocess.Popen([cleanup_py, '-a', '--all'], cwd=tmpdir,
env=env)
stdout, stderr = p.communicate()
# error should be caught and not fail entire job
self.assertEqual(p.returncode, 0)
# file should be unchanged
self.assertEqual(utils.read_file(pyfile), 'bar')
if __name__ == '__main__':
unittest.main()
| null |
1,547 |
# **************************************************************************
# *
# * Authors: Roberto Marabini ([email protected]), May 2013
# * Marta Martinez ([email protected])
# *
# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia , CSIC
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2 of the License, or
# * (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# * 02111-1307 USA
# *
# * All comments concerning this program package may be sent to the
# * e-mail address '[email protected]'
# *
# **************************************************************************
import os
from distutils.spawn import find_executable
from os.path import exists
import pyworkflow.protocol.params as params
from pwem.constants import SYM_I222
from pwem.emlib.image import ImageHandler
from pwem.objects import (SetOfVolumes)
from pyworkflow.viewer import DESKTOP_TKINTER, WEB_DJANGO
from pwem.viewers import Chimera, ChimeraView, EmProtocolViewer
from xmipp3.protocols.protocol_extract_asymmetric_unit import XmippProtExtractUnit
from xmipp3.constants import (XMIPP_TO_SCIPION, XMIPP_I222)
VOLUME_SLICES = 1
VOLUME_CHIMERA = 0
class viewerXmippProtExtractUnit(EmProtocolViewer):
""" Visualize the input and output volumes of protocol XmippProtExtractUnit
by choosing Chimera (3D) or Xmipp visualizer (2D).
The axes of coordinates x, y, z will be shown by choosing Chimera"""
_label = 'viewer extract asymmetric unit'
_targets = [XmippProtExtractUnit]
_environments = [DESKTOP_TKINTER, WEB_DJANGO]
# ROB: I know that there is a nice chimera interface but it does not work
# in this case since I am interested in reading the MRC header. So I will
# use chimera as an external program
def _defineParams(self, form):
form.addSection(label='Visualization of input volume and extracted '
'asymmetric unit')
form.addParam('displayVol', params.EnumParam,
choices=['chimerax', 'slices'], default=VOLUME_CHIMERA,
display=params.EnumParam.DISPLAY_HLIST,
label='Display volume with',
help='*chimerax*: display volumes as surface with '
'ChimeraX.\n*slices*: display volumes as 2D slices '
'along z axis.\n')
def _getVisualizeDict(self):
return{
'displayVol': self._showVolumes,
}
def _validate(self):
if find_executable(Chimera.getProgram()) is None:
return ["chimerax is not available. Either install it or choose"
" option 'slices'. "]
return []
# =========================================================================
# Show Volumes
# =========================================================================
def _showVolumes(self, paramName=None):
if self.displayVol == VOLUME_CHIMERA:
return self.METHOD_NAME()
elif self.displayVol == VOLUME_SLICES:
return self._showVolumesXmipp()
def _createSetOfVolumes(self):
if not exists(self.protocol._getExtraPath('tmpVolumes.sqlite')):
tmpFileName = self.protocol._getExtraPath("tmpVolumes.sqlite")
_inputVol = self.protocol.inputVolumes.get()
_outputVol = self.protocol.outputVolume
setOfVolumes = SetOfVolumes(filename=tmpFileName)
setOfVolumes.append(_inputVol)
setOfVolumes.append(_outputVol)
setOfVolumes.write()
else:
tmpFileName = self.protocol._getExtraPath('tmpVolumes.sqlite')
setOfVolumes = SetOfVolumes(filename=tmpFileName)
return setOfVolumes
def METHOD_NAME(self):
tmpFileNameCMD = self.protocol._getExtraPath("chimera.cxc")
f = open(tmpFileNameCMD, "w")
dim = self.protocol.inputVolumes.get().getDim()[0]
sampling = self.protocol.inputVolumes.get().getSamplingRate()
tmpFileName = os.path.abspath(self.protocol._getExtraPath("axis.bild"))
Chimera.createCoordinateAxisFile(dim,
bildFileName=tmpFileName,
sampling=sampling)
f.write("open %s\n" % tmpFileName)
f.write("cofr 0,0,0\n") # set center of coordinates
_inputVol = self.protocol.inputVolumes.get()
_outputVol = self.protocol.outputVolume
inputVolFileName = os.path.abspath(ImageHandler.removeFileType(
_inputVol.getFileName()))
# input vol origin coordinates
x_input, y_input, z_input = _inputVol.getShiftsFromOrigin()
f.write("open %s\n" % inputVolFileName)
f.write("volume #2 style mesh level 0.001 voxelSize %f origin "
"%0.2f,%0.2f,%0.2f\n"
% (_inputVol.getSamplingRate(), x_input, y_input, z_input))
outputVolFileName = os.path.abspath(ImageHandler.removeFileType(
_outputVol.getFileName()))
# output vol origin coordinates
x_output, y_output, z_output = _outputVol.getShiftsFromOrigin()
f.write("open %s\n" % outputVolFileName)
f.write("volume #3 style surface level 0.001 voxelSize %f origin "
"%0.2f,%0.2f,%0.2f\n"
% (_outputVol.getSamplingRate(), x_output, y_output, z_output))
cMap = ['red', 'yellow', 'green', 'cyan', 'blue']
d = {}
innerRadius = self.protocol.innerRadius.get()
d['outerRadius'] = self.protocol.outerRadius.get() * sampling
if innerRadius < 0:
innerRadius = 0
d['innerRadius'] = innerRadius * sampling
d['innerRadius'] = self.protocol.innerRadius.get() * sampling
d['symmetry'] = Chimera.getSymmetry(XMIPP_TO_SCIPION[self.protocol.symmetryGroup.get()])
if self.protocol.symmetryGroup >= XMIPP_I222:
f.write("shape icosahedron mesh true radius %(outerRadius)d "
"orientation %(symmetry)s\n" % d)
step = (d['outerRadius'] - d['innerRadius']) / float(len(cMap) - 1)
f.write("color radial #3 center 0,0,0 palette -")
counter = 0
s = ""
for color in cMap:
s += "%d,%s:" % (d['innerRadius'] + counter * step, color)
counter += 1
f.write(s[:-1] + '\n')
f.close()
return [ChimeraView(tmpFileNameCMD)]
def _showVolumesXmipp(self):
setOfVolumes = self._createSetOfVolumes()
return [self.objectView(setOfVolumes)]
| null |
1,548 |
# Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
from NeoVintageous.tests import unittest
class Test_ex_set(unittest.FunctionalTestCase):
@unittest.mock_status_message()
def test_set_belloff(self):
self.feed(':set belloff=')
self.assertOption('belloff', '')
self.feed(':set belloff=all')
self.assertOption('belloff', 'all')
self.feed(':set belloff=')
self.assertOption('belloff', '')
self.feed(':set belloff=all')
self.assertOption('belloff', 'all')
self.feed(':set belloff?')
self.assertStatusMessage('belloff=all')
self.feed(':set belloff=')
self.feed(':set belloff?')
self.assertStatusMessage('belloff=')
@unittest.mock_status_message()
def METHOD_NAME(self):
self.assertOption('hlsearch', True)
self.feed(':set nohlsearch')
self.assertOption('hlsearch', False)
self.feed(':set hlsearch')
self.assertOption('hlsearch', True)
self.feed(':set nohls')
self.assertOption('hlsearch', False)
self.feed(':set hls')
self.assertOption('hlsearch', True)
self.feed(':set hlsearch!')
self.assertOption('hlsearch', False)
self.feed(':set hlsearch!')
self.assertOption('hlsearch', True)
self.feed(':set invhlsearch')
self.assertOption('hlsearch', False)
self.feed(':set invhlsearch')
self.assertOption('hlsearch', True)
self.feed(':set hlsearch?')
self.assertStatusMessage('hlsearch')
@unittest.mock_status_message()
def test_set_list(self):
self.feed(':set nolist')
self.assertOption('list', False)
self.feed(':set list')
self.assertOption('list', True)
self.feed(':set nolist')
self.assertOption('list', False)
self.feed(':set list!')
self.assertOption('list', True)
self.feed(':set list!')
self.assertOption('list', False)
self.feed(':set invlist')
self.assertOption('list', True)
self.feed(':set invlist')
self.assertOption('list', False)
self.feed(':set list?')
self.assertStatusMessage('nolist')
@unittest.mock_status_message()
def test_set_modelines(self):
self.assertOption('modelines', 5)
self.feed(':set modelines=8')
self.assertOption('modelines', 8)
self.feed(':set modelines=5')
self.assertOption('modelines', 5)
self.feed(':set modelines?')
self.assertStatusMessage('modelines=5')
@unittest.mock_status_message()
def test_set_scrolloff(self):
self.feed(':set scrolloff=8')
self.assertOption('scrolloff', 8)
self.feed(':set scrolloff=5')
self.assertOption('scrolloff', 5)
self.feed(':set scrolloff?')
self.assertStatusMessage('scrolloff=5')
@unittest.mock_status_message()
def test_set_spell(self):
self.assertOption('spell', False)
self.feed(':set spell')
self.assertOption('spell', True)
self.feed(':set nospell')
self.assertOption('spell', False)
self.feed(':set spell!')
self.assertOption('spell', True)
self.feed(':set spell!')
self.assertOption('spell', False)
self.feed(':set invspell')
self.assertOption('spell', True)
self.feed(':set invspell')
self.assertOption('spell', False)
self.feed(':set spell?')
self.assertStatusMessage('nospell')
@unittest.mock_status_message()
def test_set_winaltkeys(self):
self.feed(':set winaltkeys=no')
self.assertOption('winaltkeys', 'no')
self.feed(':set winaltkeys=yes')
self.assertOption('winaltkeys', 'yes')
self.feed(':set winaltkeys=menu')
self.assertOption('winaltkeys', 'menu')
self.feed(':set winaltkeys?')
self.assertStatusMessage('winaltkeys=menu')
self.feed(':set wak=yes')
self.assertOption('winaltkeys', 'yes')
@unittest.mock_status_message()
def test_set_unknown_option(self):
self.feed(':set foobar')
self.assertStatusMessage('E518: Unknown option: foobar')
@unittest.mock_status_message()
def test_set_unknown_nooption(self):
self.feed(':set nofoobar')
self.assertStatusMessage('E518: Unknown option: nofoobar')
@unittest.mock_status_message()
def test_set_invalid_option_value(self):
self.feed(':set modelines=foobar')
self.assertStatusMessage('invalid literal for int() with base 10: \'foobar\'')
| null |
1,549 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class StoreMaterialTemporarilyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Trademark', '2018-07-24', 'StoreMaterialTemporarily','trademark')
def get_ContactEmail(self):
return self.get_query_params().get('ContactEmail')
def set_ContactEmail(self,ContactEmail):
self.add_query_param('ContactEmail',ContactEmail)
def get_ContactAddress(self):
return self.get_query_params().get('ContactAddress')
def set_ContactAddress(self,ContactAddress):
self.add_query_param('ContactAddress',ContactAddress)
def get_EAddress(self):
return self.get_query_params().get('EAddress')
def set_EAddress(self,EAddress):
self.add_query_param('EAddress',EAddress)
def get_Country(self):
return self.get_query_params().get('Country')
def set_Country(self,Country):
self.add_query_param('Country',Country)
def get_LegalNoticeOssKey(self):
return self.get_query_params().get('LegalNoticeOssKey')
def set_LegalNoticeOssKey(self,LegalNoticeOssKey):
self.add_query_param('LegalNoticeOssKey',LegalNoticeOssKey)
def get_Address(self):
return self.get_query_params().get('Address')
def set_Address(self,Address):
self.add_query_param('Address',Address)
def get_Town(self):
return self.get_query_params().get('Town')
def set_Town(self,Town):
self.add_query_param('Town',Town)
def get_ContactNumber(self):
return self.get_query_params().get('ContactNumber')
def set_ContactNumber(self,ContactNumber):
self.add_query_param('ContactNumber',ContactNumber)
def get_City(self):
return self.get_query_params().get('City')
def set_City(self,City):
self.add_query_param('City',City)
def get_IdCardOssKey(self):
return self.get_query_params().get('IdCardOssKey')
def set_IdCardOssKey(self,IdCardOssKey):
self.add_query_param('IdCardOssKey',IdCardOssKey)
def get_Type(self):
return self.get_query_params().get('Type')
def set_Type(self,Type):
self.add_query_param('Type',Type)
def get_ContactName(self):
return self.get_query_params().get('ContactName')
def set_ContactName(self,ContactName):
self.add_query_param('ContactName',ContactName)
def get_PassportOssKey(self):
return self.get_query_params().get('PassportOssKey')
def set_PassportOssKey(self,PassportOssKey):
self.add_query_param('PassportOssKey',PassportOssKey)
def get_ContactZipcode(self):
return self.get_query_params().get('ContactZipcode')
def set_ContactZipcode(self,ContactZipcode):
self.add_query_param('ContactZipcode',ContactZipcode)
def get_EName(self):
return self.get_query_params().get('EName')
def set_EName(self,EName):
self.add_query_param('EName',EName)
def get_Province(self):
return self.get_query_params().get('Province')
def set_Province(self,Province):
self.add_query_param('Province',Province)
def get_BusinessLicenceOssKey(self):
return self.get_query_params().get('BusinessLicenceOssKey')
def set_BusinessLicenceOssKey(self,BusinessLicenceOssKey):
self.add_query_param('BusinessLicenceOssKey',BusinessLicenceOssKey)
def get_Name(self):
return self.get_query_params().get('Name')
def METHOD_NAME(self,Name):
self.add_query_param('Name',Name)
def get_CardNumber(self):
return self.get_query_params().get('CardNumber')
def set_CardNumber(self,CardNumber):
self.add_query_param('CardNumber',CardNumber)
def get_Region(self):
return self.get_query_params().get('Region')
def set_Region(self,Region):
self.add_query_param('Region',Region)
def get_LoaOssKey(self):
return self.get_query_params().get('LoaOssKey')
def set_LoaOssKey(self,LoaOssKey):
self.add_query_param('LoaOssKey',LoaOssKey
| null |
1,550 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreateFullNatEntryRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreateFullNatEntry','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_FullNatEntryDescription(self): # String
return self.get_query_params().get('FullNatEntryDescription')
def set_FullNatEntryDescription(self, FullNatEntryDescription): # String
self.add_query_param('FullNatEntryDescription', FullNatEntryDescription)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AccessIp(self): # String
return self.get_query_params().get('AccessIp')
def set_AccessIp(self, AccessIp): # String
self.add_query_param('AccessIp', AccessIp)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_NatIpPort(self): # String
return self.get_query_params().get('NatIpPort')
def set_NatIpPort(self, NatIpPort): # String
self.add_query_param('NatIpPort', NatIpPort)
def get_FullNatTableId(self): # String
return self.get_query_params().get('FullNatTableId')
def set_FullNatTableId(self, FullNatTableId): # String
self.add_query_param('FullNatTableId', FullNatTableId)
def get_AccessPort(self): # String
return self.get_query_params().get('AccessPort')
def set_AccessPort(self, AccessPort): # String
self.add_query_param('AccessPort', AccessPort)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def METHOD_NAME(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_IpProtocol(self): # String
return self.get_query_params().get('IpProtocol')
def set_IpProtocol(self, IpProtocol): # String
self.add_query_param('IpProtocol', IpProtocol)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_FullNatEntryName(self): # String
return self.get_query_params().get('FullNatEntryName')
def set_FullNatEntryName(self, FullNatEntryName): # String
self.add_query_param('FullNatEntryName', FullNatEntryName)
def get_NatIp(self): # String
return self.get_query_params().get('NatIp')
def set_NatIp(self, NatIp): # String
self.add_query_param('NatIp', NatIp)
def get_NetworkInterfaceId(self): # String
return self.get_query_params().get('NetworkInterfaceId')
def set_NetworkInterfaceId(self, NetworkInterfaceId): # String
self.add_query_param('NetworkInterfaceId', NetworkInterfaceId)
| null |
1,551 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import logging
from typing import Dict, Optional
import torch
from reagent.core.dataclasses import dataclass, field
from reagent.core.parameters import NormalizationData, NormalizationKey, param_hash
from reagent.evaluation.evaluator import get_metrics_to_score
from reagent.model_managers.discrete_dqn_base import DiscreteDQNBase
from reagent.net_builder.discrete_dqn.fully_connected import FullyConnected
from reagent.net_builder.quantile_dqn.dueling_quantile import DuelingQuantile
from reagent.net_builder.unions import (
DiscreteDQNNetBuilder__Union,
QRDQNNetBuilder__Union,
)
from reagent.training import (
QRDQNTrainer,
QRDQNTrainerParameters,
ReAgentLightningModule,
)
from reagent.workflow.types import RewardOptions
logger = logging.getLogger(__name__)
@dataclass
class DiscreteQRDQN(DiscreteDQNBase):
__hash__ = param_hash
trainer_param: QRDQNTrainerParameters = field(
default_factory=QRDQNTrainerParameters
)
net_builder: QRDQNNetBuilder__Union = field(
# pyre-fixme[28]: Unexpected keyword argument `DuelingQuantile`.
default_factory=lambda: QRDQNNetBuilder__Union(
DuelingQuantile=DuelingQuantile()
)
)
cpe_net_builder: DiscreteDQNNetBuilder__Union = field(
# pyre-fixme[28]: Unexpected keyword argument `FullyConnected`
default_factory=lambda: DiscreteDQNNetBuilder__Union(
FullyConnected=FullyConnected()
)
)
def __post_init_post_parse__(self):
super().__post_init_post_parse__()
assert len(self.action_names) > 1, "DiscreteQRDQNModel needs at least 2 actions"
assert (
self.trainer_param.minibatch_size % 8 == 0
), "The minibatch size must be divisible by 8 for performance reasons."
@property
def action_names(self):
return self.trainer_param.actions
@property
def METHOD_NAME(self):
return self.trainer_param.rl
def build_trainer(
self,
normalization_data_map: Dict[str, NormalizationData],
use_gpu: bool,
reward_options: Optional[RewardOptions] = None,
) -> QRDQNTrainer:
net_builder = self.net_builder.value
q_network = net_builder.build_q_network(
normalization_data_map[NormalizationKey.STATE],
len(self.action_names),
# pyre-fixme[16]: `QRDQNTrainerParameters` has no attribute `num_atoms`.
num_atoms=self.trainer_param.num_atoms,
)
q_network_target = q_network.get_target_network()
reward_options = reward_options or RewardOptions()
metrics_to_score = get_metrics_to_score(reward_options.metric_reward_values)
reward_network, q_network_cpe, q_network_cpe_target = None, None, None
if self.eval_parameters.calc_cpe_in_training:
# Metrics + reward
num_output_nodes = (len(metrics_to_score) + 1) * len(
# pyre-fixme[16]: `QRDQNTrainerParameters` has no attribute `actions`.
self.trainer_param.actions
)
cpe_net_builder = self.cpe_net_builder.value
reward_network = cpe_net_builder.build_q_network(
self.state_feature_config,
normalization_data_map[NormalizationKey.STATE],
num_output_nodes,
)
q_network_cpe = cpe_net_builder.build_q_network(
self.state_feature_config,
normalization_data_map[NormalizationKey.STATE],
num_output_nodes,
)
q_network_cpe_target = q_network_cpe.get_target_network()
trainer = QRDQNTrainer(
q_network=q_network,
q_network_target=q_network_target,
reward_network=reward_network,
q_network_cpe=q_network_cpe,
q_network_cpe_target=q_network_cpe_target,
metrics_to_score=metrics_to_score,
evaluation=self.eval_parameters,
# pyre-fixme[16]: `QRDQNTrainerParameters` has no attribute `asdict`.
**self.trainer_param.asdict(),
)
return trainer
def build_serving_module(
self,
trainer_module: ReAgentLightningModule,
normalization_data_map: Dict[str, NormalizationData],
) -> torch.nn.Module:
"""
Returns a TorchScript predictor module
"""
assert isinstance(trainer_module, QRDQNTrainer)
net_builder = self.net_builder.value
return net_builder.build_serving_module(
trainer_module.q_network,
normalization_data_map[NormalizationKey.STATE],
action_names=self.action_names,
state_feature_config=self.state_feature_config,
)
| null |
1,552 |
import arkouda as ak
import pytest
from server_util.test.server_test_util import start_arkouda_server
class TestClient:
def test_client_connected(self):
"""
Tests the following methods:
ak.client.connected()
ak.client.disconnect()
ak.client.connect()
:return: None
:raise: AssertionError if an assert* method returns incorrect value or
if there is a error in connecting or disconnecting from the
Arkouda server
"""
assert ak.client.connected
try:
ak.disconnect()
except Exception as e:
raise AssertionError(e)
assert not ak.client.connected
try:
ak.connect(server=pytest.server, port=pytest.port)
except Exception as e:
raise AssertionError(e)
assert ak.client.connected
def test_disconnect_on_disconnected_client(self):
"""
Tests the ak.disconnect() method invoked on a client that is already
disconnect to ensure there is no error
"""
ak.disconnect()
assert not ak.client.connected
ak.disconnect()
ak.connect(server=pytest.server, port=pytest.port)
def test_shutdown(self):
"""
Tests the ak.shutdown() method
"""
ak.shutdown()
start_arkouda_server(numlocales=pytest.nl)
# reconnect to server so subsequent tests will pass
ak.connect(server=pytest.server, port=pytest.port, timeout=pytest.timeout)
def test_client_get_config(self):
"""
Tests the ak.client.get_config() method
:return: None
:raise: AssertionError if one or more Config values are not as expected
or the call to ak.client.get_config() fails
"""
try:
config = ak.client.get_config()
except Exception as e:
raise AssertionError(e)
assert pytest.port == config["ServerPort"]
assert "arkoudaVersion" in config
assert "INFO" == config["logLevel"]
def test_get_mem_used(self):
"""
Tests the ak.get_mem_used and ak.get_mem_avail methods
:return: None
:raise: AssertionError if one or more ak.get_mem_used values are not as
expected or the call to ak.client.get_mem_used() fails
"""
try:
config = ak.client.get_config()
a = ak.ones(1024 * 1024 * config["numLocales"])
mem_used = ak.client.get_mem_used()
except Exception as e:
raise AssertionError(e)
assert mem_used > 0
# test units
mem_used = ak.get_mem_used()
mem_avail = ak.get_mem_avail()
for u, f in ak.client._memunit2factor.items():
assert round(mem_used / f) == ak.get_mem_used(u)
assert round(mem_avail / f) == ak.get_mem_avail(u)
# test as_percent
tot_mem = ak.get_mem_used() + ak.get_mem_avail()
assert ak.get_mem_used(as_percent=True) == round((ak.get_mem_used() / tot_mem) * 100)
assert ak.get_mem_avail(as_percent=True), round((ak.get_mem_avail() / tot_mem) * 100)
assert 100 == ak.get_mem_used(as_percent=True) + ak.get_mem_avail(as_percent=True)
def test_no_op(self):
"""
Tests the ak.client._no_op method
:return: None
:raise: AssertionError if return message is not 'noop'
"""
assert "noop" == ak.client._no_op()
def METHOD_NAME(self):
"""
Tests the ak.client.ruok method
:return: None
:raise: AssertionError if return message is not 'imok'
"""
assert "imok" == ak.client.ruok()
def test_client_configuration(self):
"""
Tests the ak.client.set_defaults() method as well as set/get
parrayIterThresh, maxTransferBytes, and verbose config params.
"""
ak.client.pdarrayIterThresh = 50
ak.client.maxTransferBytes = 1048576000
ak.client.verbose = True
assert 50 == ak.client.pdarrayIterThresh
assert 1048576000 == ak.client.maxTransferBytes
assert ak.client.verbose
ak.client.set_defaults()
assert 100 == ak.client.pdarrayIterThresh
assert 1073741824 == ak.client.maxTransferBytes
assert not ak.client.verbose
def test_client_get_server_commands(self):
"""
Tests the ak.client.get_server_commands() method contains an expected
sample of commands.
"""
cmds = ak.client.get_server_commands()
for cmd in ["connect", "array", "create", "tondarray", "info", "str"]:
assert cmd in cmds
| null |
1,553 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeInvocationsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeInvocations','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_InvokeStatus(self): # String
return self.get_query_params().get('InvokeStatus')
def set_InvokeStatus(self, InvokeStatus): # String
self.add_query_param('InvokeStatus', InvokeStatus)
def get_IncludeOutput(self): # Boolean
return self.get_query_params().get('IncludeOutput')
def set_IncludeOutput(self, IncludeOutput): # Boolean
self.add_query_param('IncludeOutput', IncludeOutput)
def get_CommandId(self): # String
return self.get_query_params().get('CommandId')
def set_CommandId(self, CommandId): # String
self.add_query_param('CommandId', CommandId)
def get_PageNumber(self): # Long
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Long
self.add_query_param('PageNumber', PageNumber)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_ContentEncoding(self): # String
return self.get_query_params().get('ContentEncoding')
def set_ContentEncoding(self, ContentEncoding): # String
self.add_query_param('ContentEncoding', ContentEncoding)
def get_RepeatMode(self): # String
return self.get_query_params().get('RepeatMode')
def set_RepeatMode(self, RepeatMode): # String
self.add_query_param('RepeatMode', RepeatMode)
def get_PageSize(self): # Long
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Long
self.add_query_param('PageSize', PageSize)
def METHOD_NAME(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_InvokeId(self): # String
return self.get_query_params().get('InvokeId')
def set_InvokeId(self, InvokeId): # String
self.add_query_param('InvokeId', InvokeId)
def get_Timed(self): # Boolean
return self.get_query_params().get('Timed')
def set_Timed(self, Timed): # Boolean
self.add_query_param('Timed', Timed)
def get_CommandName(self): # String
return self.get_query_params().get('CommandName')
def set_CommandName(self, CommandName): # String
self.add_query_param('CommandName', CommandName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_CommandType(self): # String
return self.get_query_params().get('CommandType')
def set_CommandType(self, CommandType): # String
self.add_query_param('CommandType', CommandType)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
| null |
1,554 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class ModifyDBInstanceSpecRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dds', '2015-12-01', 'ModifyDBInstanceSpec','dds')
self.set_method('POST')
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_DBInstanceStorage(self): # String
return self.get_query_params().get('DBInstanceStorage')
def set_DBInstanceStorage(self, DBInstanceStorage): # String
self.add_query_param('DBInstanceStorage', DBInstanceStorage)
def get_ReadonlyReplicas(self): # String
return self.get_query_params().get('ReadonlyReplicas')
def set_ReadonlyReplicas(self, ReadonlyReplicas): # String
self.add_query_param('ReadonlyReplicas', ReadonlyReplicas)
def get_ExtraParam(self): # String
return self.get_query_params().get('ExtraParam')
def set_ExtraParam(self, ExtraParam): # String
self.add_query_param('ExtraParam', ExtraParam)
def get_CouponNo(self): # String
return self.get_query_params().get('CouponNo')
def set_CouponNo(self, CouponNo): # String
self.add_query_param('CouponNo', CouponNo)
def get_ReplicationFactor(self): # String
return self.get_query_params().get('ReplicationFactor')
def set_ReplicationFactor(self, ReplicationFactor): # String
self.add_query_param('ReplicationFactor', ReplicationFactor)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_EffectiveTime(self): # String
return self.get_query_params().get('EffectiveTime')
def set_EffectiveTime(self, EffectiveTime): # String
self.add_query_param('EffectiveTime', EffectiveTime)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_BusinessInfo(self): # String
return self.get_query_params().get('BusinessInfo')
def set_BusinessInfo(self, BusinessInfo): # String
self.add_query_param('BusinessInfo', BusinessInfo)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def METHOD_NAME(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_OrderType(self): # String
return self.get_query_params().get('OrderType')
def set_OrderType(self, OrderType): # String
self.add_query_param('OrderType', OrderType)
| null |
1,555 |
# Copyright 2017-2021 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from pipeline import PipelineAPI
def find_region(api, region_id):
region = api.get_region(region_id)
if not region:
raise RuntimeError("Failed to find region by ID %s" % str(region_id))
return region
def METHOD_NAME(api, user_id):
return api.load_profiles_for_user(user_id)
def filter_profiles_by_region(profiles, region):
if region.get('mountCredentialsRule') == 'NONE':
return []
if region.get('mountCredentialsRule') == 'ALL':
return profiles
if region.get('mountCredentialsRule') == 'CLOUD':
return [profile for profile in profiles if profile.get('cloudProvider') == region.get('provider')]
def build_command(path_to_script, profile_id, python_path, log_dir):
return "%s %s --profile-id=%s --log-dir=%s" % (python_path, path_to_script, profile_id, log_dir)
def write_content(f, credentials_process, profile_name, region_field):
f.write(profile_name)
f.write("\n")
f.write(region_field)
f.write("\n")
f.write(credentials_process)
f.write("\n")
def create_config_dir(path_to_config):
path_to_config_dir = os.path.dirname(path_to_config)
if path_to_config_dir and not os.path.exists(path_to_config_dir):
os.makedirs(path_to_config_dir)
def write_to_config_file(profiles, region, path_to_script, path_to_config, python_path, log_dir,
default_profile_id=None):
if not profiles:
return
create_config_dir(path_to_config)
with open(path_to_config, 'w+') as f:
for profile in profiles:
default_profile_name = '[default]' if default_profile_id \
and int(profile.get('id')) == int(default_profile_id) else None
profile_name = '[profile %s]' % profile.get('profileName')
credentials_process = 'credential_process = %s' % build_command(path_to_script, profile.get('id'),
python_path, log_dir)
region_field = "region = %s" % region.get('regionId')
if default_profile_name:
write_content(f, credentials_process, default_profile_name, region_field)
if profile.get('profileName') == 'default':
continue
write_content(f, credentials_process, profile_name, region_field)
def find_user(api):
user = api.load_current_user()
if not user:
raise RuntimeError("Failed to load current user")
return user
def find_default_profile_id(user):
user_profile_id = user.get('defaultProfileId', None)
if user_profile_id:
return user_profile_id
roles = user.get('roles')
if not roles:
return None
for role in roles:
role_profile_id = role.get('defaultProfileId', None)
if role_profile_id:
return role_profile_id
return None
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--script-path', required=True)
parser.add_argument('--python-path', required=True) # $CP_PYTHON2_PATH
parser.add_argument('--config-file', default="~/.aws/config")
parser.add_argument('--log-dir', default='logs')
args = parser.parse_args()
script_path = args.script_path
python_path = args.python_path
config_file = os.path.expanduser(args.config_file)
log_dir = args.log_dir
api = PipelineAPI(os.environ['API'], log_dir)
region_id = int(os.environ['CLOUD_REGION_ID'])
region = find_region(api, region_id)
user = find_user(api)
user_id = int(user.get('id'))
default_profile_id = find_default_profile_id(user)
profiles = filter_profiles_by_region(METHOD_NAME(api, user_id), region)
write_to_config_file(profiles, region, script_path, config_file, python_path, log_dir, default_profile_id)
if __name__ == '__main__':
main()
| null |
1,556 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksas.endpoint import endpoint_data
class CreateHoneypotProbeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Sas', '2018-12-03', 'CreateHoneypotProbe')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ControlNodeId(self): # String
return self.get_query_params().get('ControlNodeId')
def set_ControlNodeId(self, ControlNodeId): # String
self.add_query_param('ControlNodeId', ControlNodeId)
def get_ProxyIp(self): # String
return self.get_query_params().get('ProxyIp')
def set_ProxyIp(self, ProxyIp): # String
self.add_query_param('ProxyIp', ProxyIp)
def get_Ping(self): # Boolean
return self.get_query_params().get('Ping')
def set_Ping(self, Ping): # Boolean
self.add_query_param('Ping', Ping)
def get_Uuid(self): # String
return self.get_query_params().get('Uuid')
def set_Uuid(self, Uuid): # String
self.add_query_param('Uuid', Uuid)
def get_ProbeVersion(self): # String
return self.get_query_params().get('ProbeVersion')
def set_ProbeVersion(self, ProbeVersion): # String
self.add_query_param('ProbeVersion', ProbeVersion)
def get_HoneypotBindLists(self): # RepeatList
return self.get_query_params().get('HoneypotBindList')
def set_HoneypotBindLists(self, HoneypotBindList): # RepeatList
for depth1 in range(len(HoneypotBindList)):
if HoneypotBindList[depth1].get('BindPortList') is not None:
for depth2 in range(len(HoneypotBindList[depth1].get('BindPortList'))):
if HoneypotBindList[depth1].get('BindPortList')[depth2].get('StartPort') is not None:
self.add_query_param('HoneypotBindList.' + str(depth1 + 1) + '.BindPortList.' + str(depth2 + 1) + '.StartPort', HoneypotBindList[depth1].get('BindPortList')[depth2].get('StartPort'))
if HoneypotBindList[depth1].get('BindPortList')[depth2].get('BindPort') is not None:
self.add_query_param('HoneypotBindList.' + str(depth1 + 1) + '.BindPortList.' + str(depth2 + 1) + '.BindPort', HoneypotBindList[depth1].get('BindPortList')[depth2].get('BindPort'))
if HoneypotBindList[depth1].get('BindPortList')[depth2].get('Fixed') is not None:
self.add_query_param('HoneypotBindList.' + str(depth1 + 1) + '.BindPortList.' + str(depth2 + 1) + '.Fixed', HoneypotBindList[depth1].get('BindPortList')[depth2].get('Fixed'))
if HoneypotBindList[depth1].get('BindPortList')[depth2].get('EndPort') is not None:
self.add_query_param('HoneypotBindList.' + str(depth1 + 1) + '.BindPortList.' + str(depth2 + 1) + '.EndPort', HoneypotBindList[depth1].get('BindPortList')[depth2].get('EndPort'))
if HoneypotBindList[depth1].get('BindPortList')[depth2].get('TargetPort') is not None:
self.add_query_param('HoneypotBindList.' + str(depth1 + 1) + '.BindPortList.' + str(depth2 + 1) + '.TargetPort', HoneypotBindList[depth1].get('BindPortList')[depth2].get('TargetPort'))
if HoneypotBindList[depth1].get('HoneypotId') is not None:
self.add_query_param('HoneypotBindList.' + str(depth1 + 1) + '.HoneypotId', HoneypotBindList[depth1].get('HoneypotId'))
def get_Arp(self): # Boolean
return self.get_query_params().get('Arp')
def set_Arp(self, Arp): # Boolean
self.add_query_param('Arp', Arp)
def get_ProbeType(self): # String
return self.get_query_params().get('ProbeType')
def set_ProbeType(self, ProbeType): # String
self.add_query_param('ProbeType', ProbeType)
def METHOD_NAME(self): # String
return self.get_query_params().get('BusinessGroupId')
def set_BusinessGroupId(self, BusinessGroupId): # String
self.add_query_param('BusinessGroupId', BusinessGroupId)
def get_DisplayName(self): # String
return self.get_query_params().get('DisplayName')
def set_DisplayName(self, DisplayName): # String
self.add_query_param('DisplayName', DisplayName)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
| null |
1,557 |
# Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
from NeoVintageous.tests import unittest
class TestLeftSquareBracketTarget(unittest.FunctionalTestCase):
def test_n_paren(self):
self.eq('x(\n_|)_', 'n_[(', 'x|(\n_)_')
def test_n_brace(self):
self.eq('x{\n_|}_', 'n_[{', 'x|{\n_}_')
def METHOD_NAME(self):
self.eq('{ a { b { x |} c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a { b { x } c |} d }', 'n_]}', '{ a { b { x } c } d |}')
self.eq('{ a { b { x } c } d |}', 'n_]}', '{ a { b { x } c } d |}')
self.eq('{ a { b { x } c }| d }', 'n_]}', '{ a { b { x } c } d |}')
self.eq('{ a { b { x } c| } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a { b { x } |c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a { b { x }| c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a { b { x| } c } d }', 'n_]}', '{ a { b { x |} c } d }')
self.eq('{ a { b { |x } c } d }', 'n_]}', '{ a { b { x |} c } d }')
self.eq('{ a { b {| x } c } d }', 'n_]}', '{ a { b { x |} c } d }')
self.eq('{ a { b |{ x } c } d }', 'n_]}', '{ a { b { x |} c } d }')
self.eq('{ a { b| { x } c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a { |b { x } c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a {| b { x } c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a |{ b { x } c } d }', 'n_]}', '{ a { b { x } c |} d }')
self.eq('{ a| { b { x } c } d }', 'n_]}', '{ a { b { x } c } d |}')
self.eq('{ |a { b { x } c } d }', 'n_]}', '{ a { b { x } c } d |}')
self.eq('{| a { b { x } c } d }', 'n_]}', '{ a { b { x } c } d |}')
self.eq('|{ a { b { x } c } d }', 'n_]}', '{ a { b { x } c } d |}')
def test_n_brace_unbalanced(self):
self.eq('{ x } x |{ y }', 'n_[{', '{ x } x |{ y }')
self.eq('{ x } x| { y }', 'n_[{', '{ x } x| { y }')
self.eq('{ x } |x { y }', 'n_[{', '{ x } |x { y }')
self.eq('{ x }| x { y }', 'n_[{', '{ x }| x { y }')
def test_n_brace_multiline(self):
self.eq('{\n {\nfi|zz\n }\n}', 'n_[{', '{\n |{\nfizz\n }\n}')
self.eq('{\n |{\nfizz\n }\n}', 'n_[{', '|{\n {\nfizz\n }\n}')
def test_v(self):
self.eq('{ a { b { |x } c } d }|', 'v_[{', 'r_|{ a { b { x| } c } d }')
self.eq('{ a { b { |x } c }| d }', 'v_[{', 'r_{ a |{ b { x| } c } d }')
self.eq('{ a { b { |x }| c } d }', 'v_[{', 'r_{ a { b |{ x| } c } d }')
self.eq('{ a { b { |x| } c } d }', 'v_[{', 'r_{ a { b |{ x| } c } d }')
self.eq('{ a { b {| x }| c } d }', 'v_[{', 'r_{ a { b |{ |x } c } d }')
self.eq('{ a { b {| x| } c } d }', 'v_[{', 'r_{ a { b |{ |x } c } d }')
self.eq('{ a { b |{ x }| c } d }', 'v_[{', '{ a { b |{| x } c } d }')
self.eq('{ a { b |{ x| } c } d }', 'v_[{', '{ a { b |{| x } c } d }')
self.eq('{ a { b| { x| } c } d }', 'v_[{', '{ a { b| {| x } c } d }')
self.eq('{ a |{ b { x } c }| d }', 'v_[{', '{ a |{| b { x } c } d }')
self.eq('{ a |{ b { x| } c } d }', 'v_[{', '{ a |{ b {| x } c } d }')
self.eq('{ |a { b { x }| c } d }', 'v_[{', '{ |a { b {| x } c } d }')
self.eq('r_{ a { b { |x| } c } d }', 'v_[{', 'r_{ a { b |{ x| } c } d }')
self.eq('r_{ a { b {| x| } c } d }', 'v_[{', 'r_{ a { b |{ x| } c } d }')
self.eq('r_{ a { b |{ x| } c } d }', 'v_[{', 'r_{ a |{ b { x| } c } d }')
| null |
1,558 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcloudauth.endpoint import endpoint_data
class DescribeVerifyTokenRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cloudauth', '2019-03-07', 'DescribeVerifyToken')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_FaceRetainedImageUrl(self): # String
return self.get_query_params().get('FaceRetainedImageUrl')
def set_FaceRetainedImageUrl(self, FaceRetainedImageUrl): # String
self.add_query_param('FaceRetainedImageUrl', FaceRetainedImageUrl)
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_CallbackSeed(self): # String
return self.get_query_params().get('CallbackSeed')
def set_CallbackSeed(self, CallbackSeed): # String
self.add_query_param('CallbackSeed', CallbackSeed)
def get_UserIp(self): # String
return self.get_query_params().get('UserIp')
def set_UserIp(self, UserIp): # String
self.add_query_param('UserIp', UserIp)
def get_IdCardBackImageUrl(self): # String
return self.get_query_params().get('IdCardBackImageUrl')
def set_IdCardBackImageUrl(self, IdCardBackImageUrl): # String
self.add_query_param('IdCardBackImageUrl', IdCardBackImageUrl)
def get_IdCardNumber(self): # String
return self.get_query_params().get('IdCardNumber')
def set_IdCardNumber(self, IdCardNumber): # String
self.add_query_param('IdCardNumber', IdCardNumber)
def get_IdCardFrontImageUrl(self): # String
return self.get_query_params().get('IdCardFrontImageUrl')
def set_IdCardFrontImageUrl(self, IdCardFrontImageUrl): # String
self.add_query_param('IdCardFrontImageUrl', IdCardFrontImageUrl)
def get_BizType(self): # String
return self.get_query_params().get('BizType')
def set_BizType(self, BizType): # String
self.add_query_param('BizType', BizType)
def get_PassedRedirectUrl(self): # String
return self.get_query_params().get('PassedRedirectUrl')
def set_PassedRedirectUrl(self, PassedRedirectUrl): # String
self.add_query_param('PassedRedirectUrl', PassedRedirectUrl)
def METHOD_NAME(self): # Long
return self.get_query_params().get('UserRegistTime')
def set_UserRegistTime(self, UserRegistTime): # Long
self.add_query_param('UserRegistTime', UserRegistTime)
def get_BizId(self): # String
return self.get_query_params().get('BizId')
def set_BizId(self, BizId): # String
self.add_query_param('BizId', BizId)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_UserPhoneNumber(self): # String
return self.get_query_params().get('UserPhoneNumber')
def set_UserPhoneNumber(self, UserPhoneNumber): # String
self.add_query_param('UserPhoneNumber', UserPhoneNumber)
def get_CallbackUrl(self): # String
return self.get_query_params().get('CallbackUrl')
def set_CallbackUrl(self, CallbackUrl): # String
self.add_query_param('CallbackUrl', CallbackUrl)
def get_FailedRedirectUrl(self): # String
return self.get_query_params().get('FailedRedirectUrl')
def set_FailedRedirectUrl(self, FailedRedirectUrl): # String
self.add_query_param('FailedRedirectUrl', FailedRedirectUrl)
| null |
1,559 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class ListSlotRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'ListSlot')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AdSlotType(self): # String
return self.get_query_params().get('AdSlotType')
def set_AdSlotType(self, AdSlotType): # String
self.add_query_param('AdSlotType', AdSlotType)
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_OriginSiteUserId(self): # String
return self.get_query_params().get('OriginSiteUserId')
def set_OriginSiteUserId(self, OriginSiteUserId): # String
self.add_query_param('OriginSiteUserId', OriginSiteUserId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MediaName(self): # String
return self.get_query_params().get('MediaName')
def set_MediaName(self, MediaName): # String
self.add_query_param('MediaName', MediaName)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_AdSlotStatus(self): # String
return self.get_query_params().get('AdSlotStatus')
def set_AdSlotStatus(self, AdSlotStatus): # String
self.add_query_param('AdSlotStatus', AdSlotStatus)
def METHOD_NAME(self): # String
return self.get_query_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_query_param('TenantId', TenantId)
def get_AdSlotId(self): # String
return self.get_query_params().get('AdSlotId')
def set_AdSlotId(self, AdSlotId): # String
self.add_query_param('AdSlotId', AdSlotId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AdSlotCorporateStatus(self): # String
return self.get_query_params().get('AdSlotCorporateStatus')
def set_AdSlotCorporateStatus(self, AdSlotCorporateStatus): # String
self.add_query_param('AdSlotCorporateStatus', AdSlotCorporateStatus)
def get_EndCreateTime(self): # Long
return self.get_query_params().get('EndCreateTime')
def set_EndCreateTime(self, EndCreateTime): # Long
self.add_query_param('EndCreateTime', EndCreateTime)
def get_Business(self): # String
return self.get_query_params().get('Business')
def set_Business(self, Business): # String
self.add_query_param('Business', Business)
def get_MediaId(self): # String
return self.get_query_params().get('MediaId')
def set_MediaId(self, MediaId): # String
self.add_query_param('MediaId', MediaId)
def get_Environment(self): # String
return self.get_query_params().get('Environment')
def set_Environment(self, Environment): # String
self.add_query_param('Environment', Environment)
def get_StartCreateTime(self): # Long
return self.get_query_params().get('StartCreateTime')
def set_StartCreateTime(self, StartCreateTime): # Long
self.add_query_param('StartCreateTime', StartCreateTime)
def get_UserSite(self): # String
return self.get_query_params().get('UserSite')
def set_UserSite(self, UserSite): # String
self.add_query_param('UserSite', UserSite)
def get_AdSlotName(self): # String
return self.get_query_params().get('AdSlotName')
def set_AdSlotName(self, AdSlotName): # String
self.add_query_param('AdSlotName', AdSlotName)
| null |
1,560 |
"""Module for defining custom logger."""
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
# ruff: noqa: PLW0603
import functools
import logging
import os
import sys
from typing import Callable
import torch.distributed as dist
# __all__ = ['config_logger', 'get_log_dir', 'get_logger']
__all__ = ["config_logger", "get_log_dir"]
_LOGGING_FORMAT = "%(asctime)s | %(levelname)s : %(message)s"
_LOG_DIR = None
_FILE_HANDLER = None
_CUSTOM_LOG_LEVEL = 31
LEVEL = logging.INFO
logging.addLevelName(_CUSTOM_LOG_LEVEL, "LOG")
def _get_logger():
logger = logging.getLogger("mpa")
logger.propagate = False
def logger_print(message, *args, **kws):
if logger.isEnabledFor(_CUSTOM_LOG_LEVEL):
logger.log(_CUSTOM_LOG_LEVEL, message, *args, **kws)
logger.print = logger_print
logger.setLevel(LEVEL)
console = logging.StreamHandler(sys.stdout)
console.setFormatter(logging.Formatter(_LOGGING_FORMAT))
logger.addHandler(console)
return logger
_logger = _get_logger()
# # to expose supported APIs
# _override_methods = ['setLevel', 'addHandler', 'addFilter', 'info',
# 'warning', 'error', 'critical', 'print']
# for fn in _override_methods:
# locals()[fn] = getattr(_logger, fn)
# __all__.append(fn)
def config_logger(log_file, level="WARNING"):
"""A function that configures the logging system.
:param log_file: str, a string representing the path to the log file.
:param level: str, a string representing the log level. Default is "WARNING".
:return: None
"""
global _LOG_DIR, _FILE_HANDLER # pylint: disable=global-statement
if _FILE_HANDLER is not None:
_logger.removeHandler(_FILE_HANDLER)
del _FILE_HANDLER
_LOG_DIR = os.path.dirname(log_file)
os.makedirs(_LOG_DIR, exist_ok=True)
file = logging.FileHandler(log_file, mode="w", encoding="utf-8")
file.setFormatter(logging.Formatter(_LOGGING_FORMAT))
_FILE_HANDLER = file
_logger.addHandler(file)
_logger.setLevel(METHOD_NAME(level))
def METHOD_NAME(level):
# sanity checks
if level is None:
return None
# get level number
level_number = logging.getLevelName(level.upper())
if level_number not in [0, 10, 20, 30, 40, 50, _CUSTOM_LOG_LEVEL]:
msg = f"Log level must be one of DEBUG/INFO/WARN/ERROR/CRITICAL/LOG, but {level} is given."
raise ValueError(msg)
return level_number
def get_log_dir():
"""A function that retrieves the directory path of the log file.
:return: str, a string representing the directory path of the log file.
"""
return _LOG_DIR
class _DummyLogger(logging.Logger):
def debug(self, message, *args, **kws):
pass
def info(self, message, *args, **kws):
pass
def warning(self, message, *args, **kws):
pass
def critical(self, message, *args, **kws):
pass
def error(self, message, *args, **kws):
pass
def local_master_only(func: Callable) -> Callable:
"""A decorator that allows a function to be executed only by the local master process in distributed training setup.
Args:
func: the function to be decorated.
Returns:
A wrapped function that can only be executed by the local master process.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs): # pylint: disable=inconsistent-return-statements
local_rank = 0
if dist.is_available() and dist.is_initialized():
local_rank = int(os.environ["LOCAL_RANK"])
if local_rank == 0:
return func(*args, **kwargs)
return wrapper
# apply decorator @local_master_only to the lower severity logging functions
_logging_methods = ["print", "debug", "info", "warning"]
for fn in _logging_methods:
setattr(_logger, fn, local_master_only(getattr(_logger, fn)))
def get_logger():
"""Return logger."""
# if dist.is_available() and dist.is_initialized():
# rank = dist.get_rank()
# else:
# rank = 0
# if rank == 0:
# return _logger
# return _DummyLogger('dummy')
return _logger
| null |
1,561 |
# SPDX-FileCopyrightText: 2021 Melissa LeBlanc-Williams for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""Custom PulseIn Class to read PWM signals"""
import time
import subprocess
import os
import atexit
import random
import struct
import sysv_ipc
DEBUG = False
queues = []
procs = []
# The message queues live outside of python space, and must be formally cleaned!
def final():
"""In case the program is cancelled or quit, we need to clean up the PulseIn
helper process and also the message queue, this is called at exit to do so"""
if DEBUG:
print("Cleaning up message queues", queues)
print("Cleaning up processes", procs)
for q in queues:
q.remove()
for proc in procs:
proc.terminate()
atexit.register(final)
# pylint: disable=c-extension-no-member
class PulseIn:
"""PulseIn Class to read PWM signals"""
def __init__(self, pin, maxlen=2, idle_state=False):
"""Create a PulseIn object associated with the given pin.
The object acts as a read-only sequence of pulse lengths with
a given max length. When it is active, new pulse lengths are
added to the end of the list. When there is no more room
(len() == maxlen) the oldest pulse length is removed to make room."""
self._pin = pin
self._maxlen = maxlen
self._idle_state = idle_state
self._queue_key = random.randint(1, 9999)
try:
self._mq = sysv_ipc.MessageQueue(None, flags=sysv_ipc.IPC_CREX)
if DEBUG:
print("Message Queue Key: ", self._mq.key)
queues.append(self._mq)
except sysv_ipc.ExistentialError:
raise RuntimeError(
"Message queue creation failed"
) from sysv_ipc.ExistentialError
# Check if OS is 64-bit
if struct.calcsize("P") * 8 == 64: # pylint: disable=no-member
libgpiod_filename = "libgpiod_pulsein64"
else:
libgpiod_filename = "libgpiod_pulsein"
dir_path = os.path.dirname(os.path.realpath(__file__))
cmd = [
dir_path + "/" + libgpiod_filename,
"--pulses",
str(maxlen),
"--queue",
str(self._mq.key),
]
if idle_state:
cmd.append("-i")
cmd.append("gpiochip0")
cmd.append(str(pin))
if DEBUG:
print(cmd)
self._process = subprocess.Popen(cmd) # pylint: disable=consider-using-with
procs.append(self._process)
# wait for it to start up
if DEBUG:
print("Waiting for startup success message from subprocess")
message = self._wait_receive_msg(timeout=0.25)
if message[0] != b"!":
raise RuntimeError("Could not establish message queue with subprocess")
self._paused = False
# pylint: disable=redefined-builtin
def _wait_receive_msg(self, timeout=0, type=2):
"""Internal helper that will wait for new messages of a given type,
and throw an exception on timeout"""
if timeout > 0:
stamp = time.monotonic()
while (time.monotonic() - stamp) < timeout:
try:
message = self._mq.receive(block=False, type=type)
return message
except sysv_ipc.BusyError:
time.sleep(0.001) # wait a bit then retry!
# uh-oh timed out
raise RuntimeError(
"Timed out waiting for PulseIn message. Make sure libgpiod is installed."
)
message = self._mq.receive(block=True, type=type)
return message
# pylint: enable=redefined-builtin
def deinit(self):
"""Deinitialises the PulseIn and releases any hardware and software
resources for reuse."""
# Clean up after ourselves
self._process.terminate()
procs.remove(self._process)
self._mq.remove()
queues.remove(self._mq)
def __enter__(self):
"""No-op used by Context Managers."""
return self
def __exit__(self, exc_type, exc_value, tb):
"""Automatically deinitializes the hardware when exiting a context."""
self.deinit()
def resume(self, trigger_duration=0):
"""Resumes pulse capture after an optional trigger pulse."""
if trigger_duration != 0:
self._mq.send("t%d" % trigger_duration, True, type=1)
else:
self._mq.send("r", True, type=1)
self._paused = False
def pause(self):
"""Pause pulse capture"""
self._mq.send("p", True, type=1)
self._paused = True
@property
def paused(self):
"""True when pulse capture is paused as a result of pause() or
an error during capture such as a signal that is too fast."""
return self._paused
@property
def maxlen(self):
"""The maximum length of the PulseIn. When len() is equal to maxlen,
it is unclear which pulses are active and which are idle."""
return self._maxlen
def clear(self):
"""Clears all captured pulses"""
self._mq.send("c", True, type=1)
def METHOD_NAME(self):
"""Removes and returns the oldest read pulse."""
self._mq.send("^", True, type=1)
message = self._wait_receive_msg()
reply = int(message[0].decode("utf-8"))
# print(reply)
if reply == -1:
raise IndexError("pop from empty list")
return reply
def __len__(self):
"""Returns the current pulse length"""
self._mq.send("l", True, type=1)
message = self._wait_receive_msg()
return int(message[0].decode("utf-8"))
# pylint: disable=redefined-builtin
def __getitem__(self, index, type=None):
"""Returns the value at the given index or values in slice."""
self._mq.send("i%d" % index, True, type=1)
message = self._wait_receive_msg()
ret = int(message[0].decode("utf-8"))
if ret == -1:
raise IndexError("list index out of range")
return ret
# pylint: enable=redefined-builtin
| null |
1,562 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeDBInstancesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dds', '2015-12-01', 'DescribeDBInstances','dds')
self.set_method('POST')
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_NetworkType(self): # String
return self.get_query_params().get('NetworkType')
def set_NetworkType(self, NetworkType): # String
self.add_query_param('NetworkType', NetworkType)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_ReplicationFactor(self): # String
return self.get_query_params().get('ReplicationFactor')
def set_ReplicationFactor(self, ReplicationFactor): # String
self.add_query_param('ReplicationFactor', ReplicationFactor)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Expired(self): # String
return self.get_query_params().get('Expired')
def set_Expired(self, Expired): # String
self.add_query_param('Expired', Expired)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_Engine(self): # String
return self.get_query_params().get('Engine')
def set_Engine(self, Engine): # String
self.add_query_param('Engine', Engine)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_DBNodeType(self): # String
return self.get_query_params().get('DBNodeType')
def set_DBNodeType(self, DBNodeType): # String
self.add_query_param('DBNodeType', DBNodeType)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_DBInstanceDescription(self): # String
return self.get_query_params().get('DBInstanceDescription')
def set_DBInstanceDescription(self, DBInstanceDescription): # String
self.add_query_param('DBInstanceDescription', DBInstanceDescription)
def METHOD_NAME(self): # String
return self.get_query_params().get('DBInstanceStatus')
def set_DBInstanceStatus(self, DBInstanceStatus): # String
self.add_query_param('DBInstanceStatus', DBInstanceStatus)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_ExpireTime(self): # String
return self.get_query_params().get('ExpireTime')
def set_ExpireTime(self, ExpireTime): # String
self.add_query_param('ExpireTime', ExpireTime)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_ConnectionDomain(self): # String
return self.get_query_params().get('ConnectionDomain')
def set_ConnectionDomain(self, ConnectionDomain): # String
self.add_query_param('ConnectionDomain', ConnectionDomain)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DBInstanceType(self): # String
return self.get_query_params().get('DBInstanceType')
def set_DBInstanceType(self, DBInstanceType): # String
self.add_query_param('DBInstanceType', DBInstanceType)
def get_DBInstanceClass(self): # String
return self.get_query_params().get('DBInstanceClass')
def set_DBInstanceClass(self, DBInstanceClass): # String
self.add_query_param('DBInstanceClass', DBInstanceClass)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_ChargeType(self): # String
return self.get_query_params().get('ChargeType')
def set_ChargeType(self, ChargeType): # String
self.add_query_param('ChargeType', ChargeType)
| null |
1,563 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from abc import abstractmethod
from typing import List
import numpy as np
from overrides import EnforceOverrides
from archai.discrete_search.api.archai_model import ArchaiModel
class DiscreteSearchSpace(EnforceOverrides):
"""Abstract class for discrete search spaces.
This class serves as a base for implementing search spaces. The class enforces
implementation of five methods: `save_arch`, `load_arch`, `save_model_weights`,
`load_model_weights` and `random_sample`.
Note:
This class is inherited from `EnforceOverrides` and any overridden methods in the
subclass should be decorated with `@overrides` to ensure they are properly overridden.
Examples:
>>> class MyDiscreteSearchSpace(DiscreteSearchSpace):
>>> def __init__(self) -> None:
>>> super().__init__()
>>>
>>> @overrides
>>> def save_arch(self, arch, file_path) -> None:
>>> torch.save(arch, file_path)
>>>
>>> @overrides
>>> def load_arch(self, file_path) -> ArchaiModel:
>>> return torch.load(file_path)
>>>
>>> @overrides
>>> def save_model_weights(self, model, file_path) -> None:
>>> torch.save(model.state_dict(), file_path)
>>>
>>> @overrides
>>> def load_model_weights(self, model, file_path) -> None:
>>> model.load_state_dict(torch.load(file_path))
>>>
>>> @overrides
>>> def random_sample(self, config) -> ArchaiModel:
>>> return ArchaiModel(config)
"""
@abstractmethod
def METHOD_NAME(self, model: ArchaiModel, file_path: str) -> None:
"""Save an architecture to a file without saving the weights.
Args:
model: Model's architecture to save.
file_path: File path to save the architecture.
"""
pass
@abstractmethod
def load_arch(self, file_path: str) -> ArchaiModel:
"""Load from a file an architecture that was saved using `SearchSpace.save_arch()`.
Args:
file_path: File path to load the architecture.
Returns:
Loaded model.
"""
pass
@abstractmethod
def save_model_weights(self, model: ArchaiModel, file_path: str) -> None:
"""Save the weights of a model.
Args:
model: Model to save the weights.
file_path: File path to save the weights.
"""
pass
@abstractmethod
def load_model_weights(self, model: ArchaiModel, file_path: str) -> None:
"""Load the weights (created with `SearchSpace.save_model_weights()`) into a model
of the same architecture.
Args:
model: Model to load the weights.
file_path: File path to load the weights.
"""
pass
@abstractmethod
def random_sample(self) -> ArchaiModel:
"""Randomly sample an architecture from the search spaces.
Returns:
Sampled architecture.
"""
pass
class EvolutionarySearchSpace(DiscreteSearchSpace, EnforceOverrides):
"""Abstract class for discrete search spaces compatible with evolutionary algorithms.
The class enforces implementation of two methods: `mutate` and `crossover`.
Note:
This class is inherited from `EnforceOverrides` and any overridden methods in the
subclass should be decorated with `@overrides` to ensure they are properly overridden.
"""
@abstractmethod
def mutate(self, arch: ArchaiModel) -> ArchaiModel:
"""Mutate an architecture from the search space.
This method should not alter the base model architecture directly,
only generate a new one.
Args:
arch: Base model.
Returns:
Mutated model.
"""
pass
@abstractmethod
def crossover(self, arch_list: List[ArchaiModel]) -> ArchaiModel:
"""Combine a list of architectures into a new one.
Args:
arch_list: List of architectures.
Returns:
Resulting model.
"""
pass
class BayesOptSearchSpace(DiscreteSearchSpace, EnforceOverrides):
"""Abstract class for discrete search spaces compatible with Bayesian Optimization algorithms.
The class enforces implementation of a single method: `encode`.
Note:
This class is inherited from `EnforceOverrides` and any overridden methods in the
subclass should be decorated with `@overrides` to ensure they are properly overridden.
"""
@abstractmethod
def encode(self, arch: ArchaiModel) -> np.ndarray:
"""Encode an architecture into a fixed-length vector representation.
Args:
arch: Model from the search space.
Returns:
Fixed-length vector representation of `arch`.
"""
pass
| null |
1,564 |
import unittest
from copy import deepcopy
from random import randint, random, seed
from lightly.api.bitmask import BitMask
N = 10
class TestBitMask(unittest.TestCase):
def setup(self, psuccess=1.0):
pass
def test_get_and_set(self):
mask = BitMask.from_bin("0b11110000")
self.assertFalse(mask.get_kth_bit(2))
mask.set_kth_bit(2)
self.assertTrue(mask.get_kth_bit(2))
self.assertTrue(mask.get_kth_bit(4))
mask.unset_kth_bit(4)
self.assertFalse(mask.get_kth_bit(4))
def test_large_bitmasks(self):
bitstring = "0b" + "1" * 5678
mask = BitMask.from_bin(bitstring)
mask_as_bitstring = mask.to_bin()
self.assertEqual(mask_as_bitstring, bitstring)
def test_bitmask_from_length(self):
length = 4
mask = BitMask.from_length(length)
self.assertEqual(mask.to_bin(), "0b1111")
def test_get_and_set_outside_of_range(self):
mask = BitMask.from_bin("0b11110000")
self.assertFalse(mask.get_kth_bit(100))
mask.set_kth_bit(100)
self.assertTrue(mask.get_kth_bit(100))
def test_inverse(self):
# TODO: proper implementation
return
x = int("0b11110000", 2)
y = int("0b00001111", 2)
mask = BitMask(x)
mask.invert()
self.assertEqual(mask.x, y)
x = int("0b010101010101010101", 2)
y = int("0b101010101010101010", 2)
mask = BitMask(x)
mask.invert()
self.assertEqual(mask.x, y)
def test_store_and_retrieve(self):
x = int("0b01010100100100100100100010010100100100101001001010101010", 2)
mask = BitMask(x)
mask.set_kth_bit(11)
mask.set_kth_bit(22)
mask.set_kth_bit(33)
mask.set_kth_bit(44)
mask.set_kth_bit(55)
mask.set_kth_bit(66)
mask.set_kth_bit(77)
mask.set_kth_bit(88)
mask.set_kth_bit(99)
somewhere = mask.to_hex()
somewhere_else = mask.to_bin()
mask_somewhere = BitMask.from_hex(somewhere)
mask_somewhere_else = BitMask.from_bin(somewhere_else)
self.assertEqual(mask.x, mask_somewhere.x)
self.assertEqual(mask.x, mask_somewhere_else.x)
def test_union(self):
mask_a = BitMask.from_bin("0b001")
mask_b = BitMask.from_bin("0b100")
mask_a.union(mask_b)
self.assertEqual(mask_a.x, int("0b101", 2))
def test_intersection(self):
mask_a = BitMask.from_bin("0b101")
mask_b = BitMask.from_bin("0b100")
mask_a.intersection(mask_b)
self.assertEqual(mask_a.x, int("0b100", 2))
def assert_difference(self, bistring_1: str, bitstring_2: str, target: str):
mask_a = BitMask.from_bin(bistring_1)
mask_b = BitMask.from_bin(bitstring_2)
mask_a.difference(mask_b)
self.assertEqual(mask_a.x, int(target, 2))
def test_differences(self):
self.assert_difference("0b101", "0b001", "0b100")
self.assert_difference("0b0111", "0b1100", "0b0011")
self.assert_difference("0b10111", "0b01100", "0b10011")
def METHOD_NAME(self, length: int):
bitsting = "0b"
for i in range(length):
bitsting += str(randint(0, 1))
return bitsting
def test_difference_random(self):
seed(42)
for rep in range(10):
for string_length in range(1, 100, 10):
bitstring_1 = self.METHOD_NAME(string_length)
bitstring_2 = self.METHOD_NAME(string_length)
target = "0b"
for bit_1, bit_2 in zip(bitstring_1[2:], bitstring_2[2:]):
if bit_1 == "1" and bit_2 == "0":
target += "1"
else:
target += "0"
self.assert_difference(bitstring_1, bitstring_2, target)
def test_operator_minus(self):
mask_a = BitMask.from_bin("0b10111")
mask_a_old = deepcopy(mask_a)
mask_b = BitMask.from_bin("0b01100")
mask_target = BitMask.from_bin("0b10011")
diff = mask_a - mask_b
self.assertEqual(diff, mask_target)
self.assertEqual(
mask_a_old, mask_a
) # make sure the original mask is unchanged.
def test_equal(self):
mask_a = BitMask.from_bin("0b101")
mask_b = BitMask.from_bin("0b101")
self.assertEqual(mask_a, mask_b)
def test_masked_select_from_list(self):
n = 1000
list_ = [randint(0, 1) for _ in range(n - 2)] + [0, 1]
mask = BitMask.from_length(n)
for index, item_ in enumerate(list_):
if item_ == 0:
mask.unset_kth_bit(index)
else:
mask.set_kth_bit(index)
all_ones = mask.masked_select_from_list(list_)
mask.invert(n)
all_zeros = mask.masked_select_from_list(list_)
self.assertGreater(len(all_ones), 0)
self.assertGreater(len(all_zeros), 0)
self.assertTrue(all([item_ > 0 for item_ in all_ones]))
self.assertTrue(all([item_ == 0 for item_ in all_zeros]))
def test_masked_select_from_list_example(self):
list_ = [1, 2, 3, 4, 5, 6]
mask = BitMask.from_bin("0b001101") # expected result is [1, 3, 4]
selected = mask.masked_select_from_list(list_)
self.assertListEqual(selected, [1, 3, 4])
def test_invert(self):
# get random bitstring
length = 10
bitstring = self.METHOD_NAME(10)
# get inverse
mask = BitMask.from_bin(bitstring)
mask.invert(length)
inverted = mask.to_bin()
# remove 0b
inverted = inverted[2:]
bitstring = bitstring[2:]
for i in range(min(len(bitstring), len(inverted))):
if bitstring[-i - 1] == "0":
self.assertEqual(inverted[-i - 1], "1")
else:
self.assertEqual(inverted[-i - 1], "0")
def test_nonzero_bits(self):
mask = BitMask.from_bin("0b0")
indices = [100, 1000, 10_000, 100_000]
self.assertEqual(mask.x, 0)
for index in indices:
mask.set_kth_bit(index)
self.assertGreaterEqual(mask.x, 0)
also_indices = mask.to_indices()
for i, j in zip(indices, also_indices):
self.assertEqual(i, j)
| null |
1,565 |
# Copyright (c) 2021 by Apex.AI Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
import os
import unittest
import launch
from launch_ros.substitutions import ExecutableInPackage
import launch_testing
import launch_testing.actions
from launch_testing.asserts import assertSequentialStdout
import pytest
# @brief Test goal: "Integrationtest for the user-header example of iceoryx"
# @pre setup ROS2 launch executables for RouDi (debug mode) and the example processes
# @post check if all applications return exitcode 0 (success) after test run
@pytest.mark.launch_test
def generate_test_description():
proc_env = os.environ.copy()
colcon_prefix_path = os.environ.get('COLCON_PREFIX_PATH', '')
executable_list = ['iox-cpp-user-header-subscriber',
'iox-cpp-user-header-untyped-subscriber',
'iox-c-user-header-subscriber',
'iox-cpp-user-header-publisher',
'iox-cpp-user-header-untyped-publisher',
'iox-c-user-header-publisher']
process_list = []
roudi_executable = os.path.join(
colcon_prefix_path,
'iceoryx_posh/bin/',
'iox-roudi'
)
roudi_process = launch.actions.ExecuteProcess(
cmd=[roudi_executable, '-l', 'debug'],
env=proc_env, output='screen',
sigterm_timeout='20')
for exec in executable_list:
tmp_exec = os.path.join(
colcon_prefix_path,
'example_user_header/bin/',
exec)
tmp_process = launch.actions.ExecuteProcess(
cmd=[tmp_exec],
env=proc_env, output='screen')
process_list.append(tmp_process)
print("Process list:", process_list)
return launch.LaunchDescription([
roudi_process,
process_list[0],
process_list[1],
process_list[2],
process_list[3],
process_list[4],
process_list[5],
launch_testing.actions.ReadyToTest()
]), {'roudi_process': roudi_process,
'user_header_cpp_subscriber_process': process_list[0],
'user_header_cpp_untyped_subscriber_process': process_list[1],
'user_header_c_subscriber_process': process_list[2],
'user_header_cpp_publisher_process': process_list[3],
'user_header_cpp_untyped_publisher_process': process_list[4],
'user_header_c_publisher_process': process_list[5]}
# These tests will run concurrently with the dut process. After this test is done,
# the launch system will shut down RouDi
class TestUserHeaderExample(unittest.TestCase):
def METHOD_NAME(self, proc_output):
proc_output.assertWaitFor(
'RouDi is ready for clients', timeout=45, stream='stdout')
def test_user_header_typed_cpp_publisher_to_all_subscriber(self, proc_output):
proc_output.assertWaitFor(
'iox-cpp-user-header-publisher sent data: 5 with timestamp 3042ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-cpp-user-header-subscriber got value: 5 with timestamp 3042ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-cpp-user-header-untyped-subscriber got value: 5 with timestamp 3042ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-c-user-header-subscriber got value: 5 with timestamp 3042ms', timeout=45, stream='stdout')
def test_user_header_untyped_cpp_publisher_to_all_subscriber(self, proc_output):
proc_output.assertWaitFor(
'iox-cpp-user-header-untyped-publisher sent data: 5 with timestamp 3073ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-cpp-user-header-subscriber got value: 5 with timestamp 3073ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-cpp-user-header-untyped-subscriber got value: 5 with timestamp 3073ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-c-user-header-subscriber got value: 5 with timestamp 3073ms', timeout=45, stream='stdout')
def test_user_header_c_publisher_to_all_subscriber(self, proc_output):
proc_output.assertWaitFor(
'iox-c-user-header-publisher sent data: 5 with timestamp 3037ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-cpp-user-header-subscriber got value: 5 with timestamp 3037ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-cpp-user-header-untyped-subscriber got value: 5 with timestamp 3037ms', timeout=45, stream='stdout')
proc_output.assertWaitFor(
'iox-c-user-header-subscriber got value: 5 with timestamp 3037ms', timeout=45, stream='stdout')
# These tests run after shutdown and examine the stdout log
@launch_testing.post_shutdown_test()
class TestUserHeaderExampleExitCodes(unittest.TestCase):
def test_exit_code(self, proc_info):
launch_testing.asserts.assertExitCodes(proc_info)
| null |
1,566 |
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
import os
import platform
import random
import string
import sys
import tempfile
import unittest
# We assume that this module is in the unittest directory
module_globals = globals()
unittest_dir = os.path.dirname(os.path.realpath(module_globals["__file__"]))
# Check to see if $GLIDEINWMS_LOCATION is defined. If it is, use that as the
# base directory for glideinWMS source code. If not, then assume the source is
# one level above the current directory. The reason of this is so that a
# developer can write and execute unittests without having to setup a special
# environment. However, on nmi, the location of the tests may or may not be
# tied to the location of glideinWMS source. On nmi, the $GLIDEINWMS_LOCATION
# will be defined instead.
if "GLIDEINWMS_LOCATION" in os.environ:
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "lib"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "factory"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "frontend"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "factory/tools"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "install"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "poolwatcher"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "tools"))
sys.path.append(os.path.join(os.environ["GLIDEINWMS_LOCATION"], "tools/lib"))
else:
sys.path.append(os.path.join(unittest_dir, "../lib"))
sys.path.append(os.path.join(unittest_dir, "../factory"))
sys.path.append(os.path.join(unittest_dir, "../frontend"))
sys.path.append(os.path.join(unittest_dir, "../factory/tools"))
sys.path.append(os.path.join(unittest_dir, "../install"))
sys.path.append(os.path.join(unittest_dir, "../poolwatcher"))
sys.path.append(os.path.join(unittest_dir, "../tools"))
sys.path.append(os.path.join(unittest_dir, "../tools/lib"))
def runTest(cls):
"""
Given a test class, generate and run a test suite
@param cls: Test class to use to generate a test suite. It is assumed
that the constructor for this class has signature cls(cp, site_name).
If per_site=False, then the signature is assumed to be cls().
@type cls: class
"""
testSuite = unittest.TestLoader().loadTestsFromTestCase(cls)
testRunner = unittest.TextTestRunner(verbosity=2)
result = testRunner.run(testSuite)
return not result.wasSuccessful()
def runAllTests():
"""
We assume that this particular module is in the unittest directory
Search the unittest directory for all files matching test_*.py.
Attempt to import main()
execute main()
What kinds of safety checks do we need here?
"""
def is_test(filename):
if (
os.path.isfile(os.path.join(unittest_dir, filename))
and filename.startswith("test_")
and filename.endswith(".py")
):
return True
return False
test_modules = [f[:-3] for f in os.listdir(unittest_dir) if is_test(f)]
modules = list(map(__import__, test_modules))
for test in modules:
test.main()
class FakeLogger:
"""
Super simple logger for the unittests
"""
def __init__(self, afile=sys.stderr):
self.file = afile
pass
def debug(self, msg, *args):
"""
Pass a debug message to stderr.
Prints out msg % args.
@param msg: A message string.
@param args: Arguments which should be evaluated into the message.
"""
msg = "DEBUG: %s" % msg
print(str(msg) % args, file=self.file, flush=True)
def info(self, msg, *args):
"""
Pass an info-level message to stderr.
@see: debug
"""
msg = "INFO: %s" % msg
print(str(msg) % args, file=self.file, flush=True)
def warning(self, msg, *args):
"""
Pass a warning-level message to stderr.
@see: debug
"""
msg = "WARNING: %s" % msg
print(str(msg) % args, file=self.file, flush=True)
def error(self, msg, *args):
"""
Pass an error message to stderr.
@see: debug
"""
msg = "ERROR: %s" % msg
print(str(msg) % args, file=self.file, flush=True)
def METHOD_NAME(self, msg, *args):
"""
Pass an exception message to stderr.
@see: debug
"""
msg = "EXCEPTION: %s" % msg
print(str(msg) % args, file=self.file, flush=True)
class TestImportError(Exception):
"""
Error handler for import errors in this test suite
If import of package listed in handled_import_errors fails, print
out hopefully informative message and exit 0
"""
def __init__(self, err_msg="Error"):
handled_import_errors = ["M2Crypto"]
sys_ = platform.system()
if sys_ != "Linux":
err_msg += """. Platform %s is not well tested/supported """ % sys_
for imp_lib in handled_import_errors:
if imp_lib in err_msg:
if sys_ == "Darwin":
err_msg += """. Hint: try brew install or conda install %s first.""" % imp_lib
elif sys_ == "Linux":
err_msg += """. Hint: try yum install or apt-get install %s first.""" % imp_lib
else:
err_msg += """. %s python package must be present.""" % imp_lib
print("%s" % err_msg)
sys.exit(0)
raise Exception(err_msg)
def create_temp_file(file_suffix="", file_prefix="tmp", file_dir="/tmp", text_access=True, write_path_to_file=True):
fd, path = tempfile.mkstemp(suffix=file_suffix, prefix=file_prefix, dir=file_dir, text=text_access)
if write_path_to_file:
os.write(fd, path.encode("UTF-8"))
os.close(fd)
return path
def create_random_string(length=8):
char_set = string.ascii_uppercase + string.digits
return "".join(random.choice(char_set) for x in range(length))
def balanced_text(myText):
"""
checks line by line that parens and quotations are balanced
Args:
myText: contents of a text file, multi-line string
Returns:
string, "Balanced" if no problems found
"Unbalanced, line x" if a problem was found
on line x of text
"""
open_list = ["[", "{", "("]
close_list = ["]", "}", ")"]
quote_list = [
"'",
'"',
"`",
]
lnum = 1
for line in myText:
stack = []
for i in line:
if i in open_list:
stack.append(i)
elif i in close_list:
pos = close_list.index(i)
rm = open_list[pos]
if (len(stack) > 0) and rm in stack:
stack.remove(rm)
if i in quote_list:
if (len(stack) > 0) and i in stack:
stack.remove(i)
else:
stack.append(i)
if len(stack) != 0:
return "Unbalanced line %s" % lnum
lnum += 1
return "Balanced"
if __name__ == "__main__":
runAllTests()
| null |
1,567 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class CreateForwardingRulesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'CreateForwardingRules','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_ListenerId(self): # String
return self.get_query_params().get('ListenerId')
def METHOD_NAME(self, ListenerId): # String
self.add_query_param('ListenerId', ListenerId)
def get_AcceleratorId(self): # String
return self.get_query_params().get('AcceleratorId')
def set_AcceleratorId(self, AcceleratorId): # String
self.add_query_param('AcceleratorId', AcceleratorId)
def get_ForwardingRules(self): # Array
return self.get_query_params().get('ForwardingRules')
def set_ForwardingRules(self, ForwardingRules): # Array
for index1, value1 in enumerate(ForwardingRules):
if value1.get('Priority') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.Priority', value1.get('Priority'))
if value1.get('RuleConditions') is not None:
for index2, value2 in enumerate(value1.get('RuleConditions')):
if value2.get('RuleConditionType') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleConditions.' + str(index2 + 1) + '.RuleConditionType', value2.get('RuleConditionType'))
if value2.get('RuleConditionValue') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleConditions.' + str(index2 + 1) + '.RuleConditionValue', value2.get('RuleConditionValue'))
if value2.get('PathConfig') is not None:
if value2.get('PathConfig').get('Values') is not None:
for index3, value3 in enumerate(value2.get('PathConfig').get('Values')):
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleConditions.' + str(index2 + 1) + '.PathConfig.Values.' + str(index3 + 1), value3)
if value2.get('HostConfig') is not None:
if value2.get('HostConfig').get('Values') is not None:
for index3, value3 in enumerate(value2.get('HostConfig').get('Values')):
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleConditions.' + str(index2 + 1) + '.HostConfig.Values.' + str(index3 + 1), value3)
if value1.get('RuleActions') is not None:
for index2, value2 in enumerate(value1.get('RuleActions')):
if value2.get('Order') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleActions.' + str(index2 + 1) + '.Order', value2.get('Order'))
if value2.get('RuleActionType') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleActions.' + str(index2 + 1) + '.RuleActionType', value2.get('RuleActionType'))
if value2.get('RuleActionValue') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleActions.' + str(index2 + 1) + '.RuleActionValue', value2.get('RuleActionValue'))
if value2.get('ForwardGroupConfig') is not None:
if value2.get('ForwardGroupConfig').get('ServerGroupTuples') is not None:
for index3, value3 in enumerate(value2.get('ForwardGroupConfig').get('ServerGroupTuples')):
if value3.get('EndpointGroupId') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleActions.' + str(index2 + 1) + '.ForwardGroupConfig.ServerGroupTuples.' + str(index3 + 1) + '.EndpointGroupId', value3.get('EndpointGroupId'))
if value1.get('ForwardingRuleName') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.ForwardingRuleName', value1.get('ForwardingRuleName'))
if value1.get('RuleDirection') is not None:
self.add_query_param('ForwardingRules.' + str(index1 + 1) + '.RuleDirection', value1.get('RuleDirection'))
| null |
1,568 |
# Drakkar-Software OctoBot-Tentacles
# Copyright (c) Drakkar-Software, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import pytest
import octobot_commons.constants as commons_constants
import octobot_commons.logging as logging
import octobot_services.constants as services_constants
import tentacles.Evaluator.Social as Social
import tests.test_utils.config as test_utils_config
# All test coroutines will be treated as marked.
pytestmark = pytest.mark.asyncio
async def _trigger_callback_with_data_and_assert_note(evaluator: Social.TelegramChannelSignalEvaluator,
data=None,
note=commons_constants.START_PENDING_EVAL_NOTE):
await evaluator._feed_callback(data)
assert evaluator.eval_note == note
evaluator.eval_note = commons_constants.START_PENDING_EVAL_NOTE
def _create_evaluator_with_supported_channel_signals():
evaluator = Social.TelegramChannelSignalEvaluator(test_utils_config.load_test_tentacles_config())
evaluator.logger = logging.get_logger(evaluator.get_name())
evaluator.specific_config = {
"telegram-channels": [
{
"channel_name": "TEST-CHAN-1",
"signal_pattern": {
"MARKET_BUY": "Side: (BUY)",
"MARKET_SELL": "Side: (SELL)"
},
"signal_pair": "Pair: (.*)"
},
{
"channel_name": "TEST-CHAN-2",
"signal_pattern": {
"MARKET_BUY": ".* : (-1)$",
"MARKET_SELL": ".* : (1)$"
},
"signal_pair": "(.*):"
}
]
}
evaluator.init_user_inputs({})
evaluator.eval_note = commons_constants.START_PENDING_EVAL_NOTE
return evaluator
async def test_without_data():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator)
async def test_with_empty_data():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={})
async def test_incorrect_signal_without_sender_without_channel_message():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: False,
services_constants.CONFIG_MESSAGE_SENDER: "",
services_constants.CONFIG_MESSAGE_CONTENT: "",
})
async def test_incorrect_signal_without_sender_with_channel_message():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "",
services_constants.CONFIG_MESSAGE_CONTENT: "",
})
async def test_incorrect_signal_chan1_without_content():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-1",
services_constants.CONFIG_MESSAGE_CONTENT: "",
})
async def test_incorrect_signal_chan1_without_coin():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-1",
services_constants.CONFIG_MESSAGE_CONTENT: """
Order Id: 1631033831358699
Pair:
Side:
Price: 12.909
""",
})
async def METHOD_NAME():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-1",
services_constants.CONFIG_MESSAGE_CONTENT: """
Order Id: 1631033831358699
Pair QTUMUSDT
Side: BUY
Price: 12.909
""",
})
async def test_correct_signal_chan1_with_not_channel_message():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: False,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-1",
services_constants.CONFIG_MESSAGE_CONTENT: """
Order Id: 1631033831358699
Pair: QTUMUSDT
Side: BUY
Price: 12.909
""",
})
async def test_correct_signal_chan1_with_chan2():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-2",
services_constants.CONFIG_MESSAGE_CONTENT: """
Order Id: 1631033831358699
Pair: QTUMUSDT
Side: BUY
Price: 12.909
""",
})
async def test_correct_signal_chan1():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-1",
services_constants.CONFIG_MESSAGE_CONTENT: """
Order Id: 1631033831358699
Pair: QTUMUSDT
Side: BUY
Price: 12.909
""",
}, note=-1)
async def test_correct_signal_chan2_but_with_chan1():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-1",
services_constants.CONFIG_MESSAGE_CONTENT: "BTC/USDT : 1",
})
async def test_correct_signal_chan2():
evaluator = _create_evaluator_with_supported_channel_signals()
await _trigger_callback_with_data_and_assert_note(evaluator, data={
services_constants.CONFIG_IS_CHANNEL_MESSAGE: True,
services_constants.CONFIG_MESSAGE_SENDER: "TEST-CHAN-2",
services_constants.CONFIG_MESSAGE_CONTENT: "BTC/USDT : -1",
}, note=-1)
| null |
1,569 |
#! /usr/bin/env python
'''Tests of joinmarket bots end-to-end (including IRC and bitcoin) '''
import time
from twisted.trial import unittest
from twisted.internet import reactor, task
from jmdaemon import IRCMessageChannel, MessageChannelCollection
#needed for test framework
from jmclient import (load_test_config, get_mchannels, jm_single)
import pytest
pytestmark = pytest.mark.usefixtures("setup_miniircd", "setup_regtest_bitcoind")
si = 1
class DummyDaemon(object):
def request_signature_verify(self, a, b, c, d, e,
f, g, h):
return True
class DummyMC(IRCMessageChannel):
def __init__(self, configdata, nick, daemon):
super().__init__(configdata, daemon=daemon)
self.daemon = daemon
self.set_nick(nick)
def on_connect(x):
print('simulated on-connect')
def on_welcome(mc):
print('simulated on-welcome')
mc.tx_irc_client.lineRate = 0.2
if mc.nick == "irc_publisher":
d = task.deferLater(reactor, 3.0, junk_pubmsgs, mc)
d.addCallback(junk_longmsgs)
d.addCallback(junk_announce)
d.addCallback(junk_fill)
def on_disconnect(x):
print('simulated on-disconnect')
def METHOD_NAME(dummy, counterparty, oid, ordertype, minsize,
maxsize, txfee, cjfee):
global yg_name
yg_name = counterparty
def on_pubkey(pubkey):
print("received pubkey: " + pubkey)
def junk_pubmsgs(mc):
#start a raw IRCMessageChannel instance in a thread;
#then call send_* on it with various errant messages
time.sleep(si)
mc.request_orderbook()
time.sleep(si)
#now try directly
mc.pubmsg("!orderbook")
time.sleep(si)
#should be ignored; can we check?
mc.pubmsg("!orderbook!orderbook")
return mc
def junk_longmsgs(mc):
#assuming MAX_PRIVMSG_LEN is not something crazy
#big like 550, this should fail
#with pytest.raises(AssertionError) as e_info:
mc.pubmsg("junk and crap"*40)
time.sleep(si)
#assuming MAX_PRIVMSG_LEN is not something crazy
#small like 180, this should succeed
mc.pubmsg("junk and crap"*15)
time.sleep(si)
return mc
def junk_announce(mc):
#try a long order announcement in public
#because we don't want to build a real orderbook,
#call the underlying IRC announce function.
#TODO: how to test that the sent format was correct?
print('got here')
mc._announce_orders(["!abc def gh 0001"]*30)
time.sleep(si)
return mc
def junk_fill(mc):
cpname = "irc_receiver"
#send a fill with an invalid pubkey to the existing yg;
#this should trigger a NaclError but should NOT kill it.
mc._privmsg(cpname, "fill", "0 10000000 abcdef")
#Try with ob flag
mc._pubmsg("!reloffer stuff")
time.sleep(si)
#Trigger throttling with large messages
mc._privmsg(cpname, "tx", "aa"*5000)
time.sleep(si)
#with pytest.raises(CJPeerError) as e_info:
mc.send_error(cpname, "fly you fools!")
time.sleep(si)
return mc
def getmc(nick):
dm = DummyDaemon()
mc = DummyMC(get_mchannels()[0], nick, dm)
mc.register_orderbookwatch_callbacks(METHOD_NAME=METHOD_NAME)
mc.register_taker_callbacks(on_pubkey=on_pubkey)
mc.on_connect = on_connect
mc.on_disconnect = on_disconnect
mc.on_welcome = on_welcome
mcc = MessageChannelCollection([mc])
return dm, mc, mcc
class TrialIRC(unittest.TestCase):
def setUp(self):
load_test_config()
print(get_mchannels()[0])
jm_single().maker_timeout_sec = 1
dm, mc, mcc = getmc("irc_publisher")
dm2, mc2, mcc2 = getmc("irc_receiver")
mcc.run()
mcc2.run()
def cb(m):
#don't try to reconnect
m.give_up = True
m.tcp_connector.disconnect()
self.addCleanup(cb, mc)
self.addCleanup(cb, mc2)
#test_junk_messages()
print("Got here")
def test_waiter(self):
print("test_main()")
#reactor.callLater(1.0, junk_messages, self.mcc)
return task.deferLater(reactor, 30, self._called_by_deffered)
def _called_by_deffered(self):
pass
| null |
1,570 |
## @file
# This file is used to define class objects of INF file [Packages] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfPackageObject
'''
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import GlobalData
from Library.Misc import Sdict
from Library.ParserValidate import IsValidPath
from Library.ExpressionValidate import IsValidFeatureFlagExp
class InfPackageItem():
def __init__(self,
PackageName = '',
FeatureFlagExp = '',
HelpString = ''):
self.PackageName = PackageName
self.FeatureFlagExp = FeatureFlagExp
self.HelpString = HelpString
self.SupArchList = []
def SetPackageName(self, PackageName):
self.PackageName = PackageName
def GetPackageName(self):
return self.PackageName
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
## INF package section
#
#
#
class InfPackageObject():
def __init__(self):
self.Packages = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
def METHOD_NAME(self, PackageData, Arch = None):
IsValidFileFlag = False
SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
SupArchList.append(ArchItem)
for PackageItem in PackageData:
PackageItemObj = InfPackageItem()
HelpStringObj = PackageItem[1]
CurrentLineOfPackItem = PackageItem[2]
PackageItem = PackageItem[0]
if HelpStringObj is not None:
HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
PackageItemObj.SetHelpString(HelpString)
if len(PackageItem) >= 1:
#
# Validate file exist/format.
#
if IsValidPath(PackageItem[0], ''):
IsValidFileFlag = True
elif IsValidPath(PackageItem[0], GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
elif IsValidPath(PackageItem[0], GlobalData.gWORKSPACE):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(PackageItem[0]),
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
return False
if IsValidFileFlag:
PackageItemObj.SetPackageName(PackageItem[0])
if len(PackageItem) == 2:
#
# Validate Feature Flag Express
#
if PackageItem[1].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(PackageItem[1].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
PackageItemObj.SetFeatureFlagExp(PackageItem[1].strip())
if len(PackageItem) > 2:
#
# Invalid format of Package statement
#
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR,
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
PackageItemObj.SetSupArchList(SupArchList)
#
# Determine package file name duplicate. Follow below rule:
#
# A package filename must not be duplicated within a [Packages]
# section. Package filenames may appear in multiple architectural
# [Packages] sections. A package filename listed in an
# architectural [Packages] section must not be listed in the common
# architectural [Packages] section.
#
# NOTE: This check will not report error now.
#
for Item in self.Packages:
if Item.GetPackageName() == PackageItemObj.GetPackageName():
ItemSupArchList = Item.GetSupArchList()
for ItemArch in ItemSupArchList:
for PackageItemObjArch in SupArchList:
if ItemArch == PackageItemObjArch:
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE
#
pass
if ItemArch.upper() == 'COMMON' or PackageItemObjArch.upper() == 'COMMON':
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
if (PackageItemObj) in self.Packages:
PackageList = self.Packages[PackageItemObj]
PackageList.append(PackageItemObj)
self.Packages[PackageItemObj] = PackageList
else:
PackageList = []
PackageList.append(PackageItemObj)
self.Packages[PackageItemObj] = PackageList
return True
def GetPackages(self, Arch = None):
if Arch is None:
return self.Packages
| null |
1,571 |
# License: MIT
# Copyright © 2022 Frequenz Energy-as-a-Service GmbH
"""Frequenz Python SDK resampling example."""
import asyncio
import dataclasses
from collections.abc import Iterator
from datetime import datetime, timedelta, timezone
import async_solipsism
import pytest
import time_machine
from frequenz.channels import Broadcast
from frequenz.sdk.actor import (
ChannelRegistry,
ComponentMetricRequest,
ComponentMetricsResamplingActor,
ResamplerConfig,
)
from frequenz.sdk.microgrid.component import ComponentMetricId
from frequenz.sdk.timeseries import Sample
from frequenz.sdk.timeseries._quantities import Quantity
# pylint: disable=too-many-locals,redefined-outer-name
#
# Setting 'autouse' has no effect as this method replaces the event loop for all tests in the file.
@pytest.fixture()
def METHOD_NAME() -> Iterator[async_solipsism.EventLoop]:
"""Replace the loop with one that doesn't interact with the outside world."""
loop = async_solipsism.EventLoop()
yield loop
loop.close()
def _now() -> datetime:
return datetime.now(timezone.utc)
async def _assert_resampling_works(
channel_registry: ChannelRegistry,
fake_time: time_machine.Coordinates,
*,
resampling_chan_name: str,
data_source_chan_name: str,
) -> None:
timeseries_receiver = channel_registry.new_receiver(resampling_chan_name)
timeseries_sender = channel_registry.new_sender(data_source_chan_name)
fake_time.shift(0.2)
new_sample = await timeseries_receiver.receive() # At 0.2s (timer)
assert new_sample == Sample(_now(), None)
fake_time.shift(0.1)
sample = Sample(_now(), Quantity(3)) # ts = 0.3s
await timeseries_sender.send(sample)
fake_time.shift(0.1)
new_sample = await timeseries_receiver.receive() # At 0.4s (timer)
assert new_sample is not None and new_sample.value is not None
assert new_sample.value.base_value == 3
assert new_sample.timestamp >= sample.timestamp
assert new_sample.timestamp == _now()
fake_time.shift(0.05)
sample = Sample(_now(), Quantity(4)) # ts = 0.45s
await timeseries_sender.send(sample)
fake_time.shift(0.15)
new_sample = await timeseries_receiver.receive() # At 0.6s (timer)
assert new_sample is not None and new_sample.value is not None
assert new_sample.value.base_value == 3.5 # avg(3, 4)
assert new_sample.timestamp >= sample.timestamp
assert new_sample.timestamp == _now()
fake_time.shift(0.05)
await timeseries_sender.send(Sample(_now(), Quantity(8))) # ts = 0.65s
fake_time.shift(0.05)
await timeseries_sender.send(Sample(_now(), Quantity(1))) # ts = 0.7s
fake_time.shift(0.05)
sample = Sample(_now(), Quantity(9)) # ts = 0.75s
await timeseries_sender.send(sample)
fake_time.shift(0.05)
new_sample = await timeseries_receiver.receive() # At 0.8s (timer)
assert new_sample is not None and new_sample.value is not None
assert new_sample.value.base_value == 5.5 # avg(4, 8, 1, 9)
assert new_sample.timestamp >= sample.timestamp
assert new_sample.timestamp == _now()
# No more samples sent
fake_time.shift(0.2)
new_sample = await timeseries_receiver.receive() # At 1.0s (timer)
assert new_sample is not None and new_sample.value is not None
assert new_sample.value.base_value == 6 # avg(8, 1, 9)
assert new_sample.timestamp >= sample.timestamp
assert new_sample.timestamp == _now()
# No more samples sent
fake_time.shift(0.2)
new_sample = await timeseries_receiver.receive() # At 1.2s (timer)
assert new_sample is not None
assert new_sample.value is None
assert new_sample.timestamp == _now()
async def test_single_request(
fake_time: time_machine.Coordinates,
) -> None:
"""Run main functions that initializes and creates everything."""
channel_registry = ChannelRegistry(name="test")
data_source_req_chan = Broadcast[ComponentMetricRequest]("data-source-req")
data_source_req_recv = data_source_req_chan.new_receiver()
resampling_req_chan = Broadcast[ComponentMetricRequest]("resample-req")
resampling_req_sender = resampling_req_chan.new_sender()
async with ComponentMetricsResamplingActor(
channel_registry=channel_registry,
data_sourcing_request_sender=data_source_req_chan.new_sender(),
resampling_request_receiver=resampling_req_chan.new_receiver(),
config=ResamplerConfig(
resampling_period=timedelta(seconds=0.2),
max_data_age_in_periods=2,
),
) as resampling_actor:
subs_req = ComponentMetricRequest(
namespace="Resampling",
component_id=9,
metric_id=ComponentMetricId.SOC,
start_time=None,
)
await resampling_req_sender.send(subs_req)
data_source_req = await data_source_req_recv.receive()
assert data_source_req is not None
assert data_source_req == dataclasses.replace(
subs_req, namespace="Resampling:Source"
)
await _assert_resampling_works(
channel_registry,
fake_time,
resampling_chan_name=subs_req.get_channel_name(),
data_source_chan_name=data_source_req.get_channel_name(),
)
await resampling_actor._resampler.stop() # pylint: disable=protected-access
async def test_duplicate_request(
fake_time: time_machine.Coordinates,
) -> None:
"""Run main functions that initializes and creates everything."""
channel_registry = ChannelRegistry(name="test")
data_source_req_chan = Broadcast[ComponentMetricRequest]("data-source-req")
data_source_req_recv = data_source_req_chan.new_receiver()
resampling_req_chan = Broadcast[ComponentMetricRequest]("resample-req")
resampling_req_sender = resampling_req_chan.new_sender()
async with ComponentMetricsResamplingActor(
channel_registry=channel_registry,
data_sourcing_request_sender=data_source_req_chan.new_sender(),
resampling_request_receiver=resampling_req_chan.new_receiver(),
config=ResamplerConfig(
resampling_period=timedelta(seconds=0.2),
max_data_age_in_periods=2,
),
) as resampling_actor:
subs_req = ComponentMetricRequest(
namespace="Resampling",
component_id=9,
metric_id=ComponentMetricId.SOC,
start_time=None,
)
await resampling_req_sender.send(subs_req)
data_source_req = await data_source_req_recv.receive()
# Send duplicate request
await resampling_req_sender.send(subs_req)
with pytest.raises(asyncio.TimeoutError):
await asyncio.wait_for(data_source_req_recv.receive(), timeout=0.1)
await _assert_resampling_works(
channel_registry,
fake_time,
resampling_chan_name=subs_req.get_channel_name(),
data_source_chan_name=data_source_req.get_channel_name(),
)
await resampling_actor._resampler.stop() # pylint: disable=protected-access
| null |
1,572 |
# Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
import json
import os
import tempfile
from NeoVintageous.nv import session
from NeoVintageous.tests import unittest
class TestSession(unittest.ViewTestCase):
@unittest.mock_session()
def test_session_on_close_view(self):
session.session_on_close(self.view)
@unittest.mock_session()
def test_session_on_close_silently_catches_key_errors(self):
view = unittest.mock.Mock()
view.id.return_value = -1
session.session_on_close(view)
view.id.assert_called_once()
@unittest.mock_session()
def test_get_set_session_value(self):
session.set_session_value('fizz', 'buzz')
self.assertEqual('buzz', session.get_session_value('fizz'))
@unittest.mock_session()
def test_get__session_value_returns_default_when_does_not_exist(self):
self.assertEqual(None, session.get_session_value('fizz'))
@unittest.mock_session()
def test_get_session_value_returns_default_param_value_when_does_not_exist(self):
self.assertEqual('default', session.get_session_value('fizz', 'default'))
@unittest.mock_session()
def METHOD_NAME(self):
session.set_session_value('fizz', 'buzz', persist=True)
if unittest.ST_VERSION >= 4081:
self.assertSessionNotSaved()
else:
self.assertSessionSaved()
@unittest.mock_session()
@unittest.mock.patch('NeoVintageous.nv.session._get_session_file')
def test_load_empty_object_session(self, get_session_file):
get_session_file.return_value = self.fixturePath('session_is_empty_object.json')
session.load_session()
self.assertSession({})
@unittest.mock_session()
@unittest.mock.patch('NeoVintageous.nv.session._get_session_file')
def test_load_empty_session(self, get_session_file):
get_session_file.return_value = self.fixturePath('session_is_empty.json')
session.load_session()
self.assertSession({})
@unittest.mock_session()
@unittest.mock.patch('NeoVintageous.nv.session._get_session_file')
def test_load_blank_session(self, get_session_file):
get_session_file.return_value = self.fixturePath('session_is_blank.json')
session.load_session()
self.assertSession({})
@unittest.mock_session()
@unittest.mock.patch('NeoVintageous.nv.session._get_session_file')
def test_load_session_does_not_exist_is_noop(self, get_session_file):
get_session_file.return_value = self.fixturePath('session_does_not_exist.json')
session.load_session()
self.assertSession({})
@unittest.mock_session()
@unittest.mock.patch('NeoVintageous.nv.session._get_session_file')
def test_load_session_basic(self, get_session_file):
get_session_file.return_value = self.fixturePath('session_basic.json')
session.load_session()
self.assertSession({
"history": {
1: {
'num': 6,
'items': {
1: 's/fizz/buzz/',
2: 'ls',
6: 'help'
}
},
2: {'num': 0, "items": {}},
3: {'num': 0, "items": {}},
4: {'num': 0, "items": {}},
5: {'num': 0, "items": {}},
},
"last_substitute_search_pattern": "fizz",
"last_substitute_string": "buzz",
"last_used_register_name": "w",
"macros": {
"w": [
[
"nv_vi_w",
{
"mode": "mode_normal",
"count": 1
}
]
]
},
})
@unittest.mock.patch.dict('NeoVintageous.nv.session._session', {
"foo": "bar",
"history": {
1: {
'num': 6,
'items': {
1: 's/fizz/buzz/',
2: 'ls',
6: 'help'
}
},
2: {'num': 0, "items": {}},
3: {'num': 0, "items": {}},
4: {'num': 0, "items": {}},
5: {'num': 0, "items": {}}
},
"last_substitute_search_pattern": "fizz",
"last_substitute_string": "buzz",
"last_used_register_name": "w",
"macros": {
"w": [
[
"nv_vi_w",
{
"mode": "mode_normal",
"count": 1
}
]
]
},
}, clear=True)
@unittest.mock.patch('NeoVintageous.nv.session._get_session_file')
def test_save_session(self, get_session_file):
self.maxDiff = None
with tempfile.TemporaryDirectory() as tmpdir:
session_file = os.path.join(tmpdir, 'test.session')
get_session_file.return_value = session_file
session.save_session()
with open(session_file, 'r', encoding='utf-8', errors='replace') as f:
with open(self.fixturePath('session_basic.json'), 'r', encoding='utf-8') as s:
self.assertEqual(
sorted(json.loads(f.read())),
sorted(json.loads(s.read())))
| null |
1,573 |
# Copyright (c) 2023 The Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional
import urllib.parse
class AbstractClient(ABC):
def verify_status_code(self, status_code: int) -> None:
"""
Verifies that the status code is 200.
:param status_code: The status code to verify.
"""
if status_code == 200:
return
if status_code == 429:
raise Exception("Panic: Too many requests")
if status_code == 401:
raise Exception("Panic: Unauthorized")
if status_code == 404:
raise Exception("Panic: Not found")
if status_code == 400:
raise Exception("Panic: Bad request")
if status_code == 500:
raise Exception("Panic: Internal server error")
raise Exception(f"Panic: Unknown status code {status_code}")
def _url_validator(self, url: str) -> bool:
"""
Validates the provided URL.
:param url: The URL to be validated.
:return: True if the URL is valid, False otherwise.
"""
try:
result = urllib.parse.urlparse(url)
return all([result.scheme, result.netloc, result.path])
except:
return False
@abstractmethod
def get_resources(
self,
resource_id: Optional[str] = None,
resource_version: Optional[str] = None,
gem5_version: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""
:param resource_id: The ID of the Resource. Optional, if not set, all
resources will be returned.
:param resource_version: The version of the Resource. Optional, if
not set, all resource versions will be returned. Note: If `resource_id`
is not set, this parameter will be ignored.
:param gem5_version: The version of gem5. Optional, if not set, all
versions will be returned.
:return: A list of all the Resources with the given ID.
"""
raise NotImplementedError
def filter_incompatible_resources(
self,
resources_to_filter: List[Dict[str, Any]],
gem5_version: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Returns a filtered list resources based on gem5 version
compatibility.
Note: This function assumes if the minor component of
a resource's gem5_version is not specified, the resource is compatible
with all minor versions of the same major version.
Likewise, if no hot-fix component is specified, it is assumed that
the resource is compatible with all hot-fix versions of the same
minor version.
* '20.1' would be compatible with gem5 '20.1.1.0' and '20.1.2.0'.
* '21.5.2' would be compatible with gem5 '21.5.2.0' and '21.5.2.0'.
* '22.3.2.4' would only be compatible with gem5 '22.3.2.4'.
:param resources_to_filter: The list of resources to filter.
:param gem5_version: The gem5 version in which the filtered resources
should be compatible. If None, no filtering will be done.
:
"""
if not gem5_version:
return resources_to_filter
filtered_resources = []
for resource in resources_to_filter:
for version in resource["gem5_versions"]:
if gem5_version.startswith(version):
filtered_resources.append(resource)
return filtered_resources
def METHOD_NAME(self, resource_id: str) -> List[Dict[str, Any]]:
"""
:param resource_id: The ID of the Resource.
:return: A list of all the Resources with the given ID.
"""
return self.get_resources(resource_id=resource_id)
| null |
1,574 |
from shlex import quote
from typing import (
Optional,
Sequence,
)
from pcs.cli.nvset import nvset_dto_to_lines
from pcs.cli.resource.output import resource_agent_parameter_metadata_to_text
from pcs.common.pacemaker.cluster_property import ClusterPropertyMetadataDto
from pcs.common.pacemaker.nvset import (
CibNvsetDto,
ListCibNvsetDto,
)
from pcs.common.resource_agent.dto import ResourceAgentParameterDto
from pcs.common.str_tools import (
format_name_value_default_list,
format_name_value_list,
indent,
)
from pcs.common.types import (
StringCollection,
StringSequence,
)
class PropertyConfigurationFacade:
def __init__(
self,
properties: Sequence[CibNvsetDto],
properties_metadata: Sequence[ResourceAgentParameterDto],
readonly_properties: StringCollection,
) -> None:
self._properties = properties
self._first_nvpair_set = (
self._properties[0].nvpairs if self._properties else []
)
self._properties_metadata = properties_metadata
self._readonly_properties = readonly_properties
self._defaults_map = self.get_defaults(include_advanced=True)
self._name_nvpair_dto_map = {
nvpair_dto.name: nvpair_dto for nvpair_dto in self._first_nvpair_set
}
@classmethod
def from_properties_dtos(
cls,
properties_dto: ListCibNvsetDto,
properties_metadata_dto: ClusterPropertyMetadataDto,
) -> "PropertyConfigurationFacade":
return cls(
properties_dto.nvsets,
properties_metadata_dto.properties_metadata,
properties_metadata_dto.readonly_properties,
)
@classmethod
def from_properties_config(
cls, properties_dto: ListCibNvsetDto
) -> "PropertyConfigurationFacade":
return cls(
properties_dto.nvsets,
[],
[],
)
@classmethod
def from_properties_metadata(
cls, properties_metadata_dto: ClusterPropertyMetadataDto
) -> "PropertyConfigurationFacade":
return cls(
[],
properties_metadata_dto.properties_metadata,
properties_metadata_dto.readonly_properties,
)
@property
def properties(self) -> Sequence[CibNvsetDto]:
return self._properties
@property
def properties_metadata(self) -> Sequence[ResourceAgentParameterDto]:
return self._properties_metadata
@property
def readonly_properties(self) -> StringCollection:
return self._readonly_properties
def METHOD_NAME(
self, property_name: str, custom_default: Optional[str] = None
) -> Optional[str]:
nvpair = self._name_nvpair_dto_map.get(property_name)
return nvpair.value if nvpair else custom_default
def get_property_value_or_default(
self, property_name: str, custom_default: Optional[str] = None
) -> Optional[str]:
value = self.METHOD_NAME(property_name)
if value is not None:
return value
return self._defaults_map.get(property_name, custom_default)
def get_defaults(
self,
property_names: Optional[StringSequence] = None,
include_advanced: bool = False,
) -> dict[str, str]:
return {
metadata.name: metadata.default
for metadata in self.get_properties_metadata(
property_names, include_advanced
)
if metadata.default is not None
}
def get_properties_metadata(
self,
property_names: Optional[StringSequence] = None,
include_advanced: bool = False,
) -> Sequence[ResourceAgentParameterDto]:
if property_names:
filtered_metadata = [
metadata
for metadata in self._properties_metadata
if metadata.name in property_names
]
else:
filtered_metadata = [
metadata
for metadata in self._properties_metadata
if include_advanced or not metadata.advanced
]
deduplicated_metadata = {
metadata.name: metadata for metadata in filtered_metadata
}
return list(deduplicated_metadata.values())
def get_name_value_default_list(self) -> list[tuple[str, str, bool]]:
name_value_default_list = [
(nvpair_dto.name, nvpair_dto.value, False)
for nvpair_dto in self._first_nvpair_set
]
name_value_default_list.extend(
[
(metadata_dto.name, metadata_dto.default, True)
for metadata_dto in self.get_properties_metadata(
include_advanced=True
)
if metadata_dto.name not in self._name_nvpair_dto_map
and metadata_dto.default is not None
]
)
return name_value_default_list
def properties_to_text(
properties_facade: PropertyConfigurationFacade,
) -> list[str]:
"""
Return a text format of configured properties.
properties_facade -- cluster property configuration and metadata
"""
if properties_facade.properties:
return nvset_dto_to_lines(
properties_facade.properties[0],
nvset_label="Cluster Properties",
)
return []
def properties_to_text_with_default_mark(
properties_facade: PropertyConfigurationFacade,
property_names: Optional[StringSequence] = None,
) -> list[str]:
"""
Return text format of configured properties or property default values.
If default property value is missing then property is not displayed at all.
If property_names is specified, then only properties from the list is
displayed.
properties_facade -- cluster property configuration and metadata
property_names -- properties to be displayed
"""
lines: list[str] = []
id_part = (
f" {properties_facade.properties[0].id}"
if properties_facade.properties
else ""
)
lines = [f"Cluster Properties:{id_part}"]
tuple_list = [
item
for item in properties_facade.get_name_value_default_list()
if not property_names or item[0] in property_names
]
lines.extend(indent(format_name_value_default_list(sorted(tuple_list))))
return lines
def properties_to_cmd(
properties_facade: PropertyConfigurationFacade,
) -> list[str]:
"""
Convert configured properties to the `pcs property set` command.
properties_facade -- cluster property configuration and metadata
"""
if properties_facade.properties and properties_facade.properties[0].nvpairs:
options = [
quote("=".join([nvpair.name, nvpair.value]))
for nvpair in properties_facade.properties[0].nvpairs
if nvpair.name not in properties_facade.readonly_properties
]
if options:
return ["pcs property set --force --"] + indent(options)
return []
def properties_defaults_to_text(property_dict: dict[str, str]) -> list[str]:
"""
Convert property default values to lines of text.
property_dict -- name to default value map
"""
return format_name_value_list(sorted(property_dict.items()))
def cluster_property_metadata_to_text(
metadata: Sequence[ResourceAgentParameterDto],
) -> list[str]:
"""
Convert cluster property metadata to lines of description text.
Output example:
property-name
Description: <longdesc or shortdesc>
Type: <type> / Allowed values: <enum values>
Default: <default value>
metadata - list of ResourceAgentParameterDto which is used for cluster
property metadata
"""
text: list[str] = []
for parameter_dto in metadata:
text.extend(resource_agent_parameter_metadata_to_text(parameter_dto))
return text
| null |
1,575 |
#/*##########################################################################
# Copyright (C) 2004-2022 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#############################################################################*/
__author__ = "E. Papillon, V.A. Sole - ESRF"
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
import logging
from PyMca5.PyMcaGui import PyMcaQt as qt
QTVERSION = qt.qVersion()
_logger = logging.getLogger(__name__)
class SpecFileMcaTable(qt.QWidget):
sigMcaDeviceSelected = qt.pyqtSignal(object)
def __init__(self, parent=None):
qt.QWidget.__init__(self, parent)
self.l = qt.QVBoxLayout(self)
self.table= qt.QTableWidget(self)
self.table.setColumnCount(1)
self.table.setRowCount(0)
item = self.table.horizontalHeaderItem(0)
if item is None:
item = qt.QTableWidgetItem("No MCA for the selected scan",
qt.QTableWidgetItem.Type)
self.table.setHorizontalHeaderItem(0,item)
self.table.resizeColumnToContents(0)
self.table.setEditTriggers(qt.QAbstractItemView.NoEditTriggers)
self.table.setSelectionMode(qt.QAbstractItemView.MultiSelection)
self.l.addWidget(self.table)
#self.table.cellActivated[int, int].connect(self._cellActivated)
self.table.cellClicked[int, int].connect(self._cellClicked)
self.table.cellDoubleClicked[int, int].connect(self._cellDoubleClicked)
self.table._hHeader = self.table.horizontalHeader()
self.table._hHeader.sectionClicked[int].connect(self._horizontalHeaderClicked)
self.table._hHeader.menu = qt.QMenu()
self.table._hHeader.menu.addAction('ADD Image')
self.table._hHeader.menu.addAction('REMOVE Image')
self.table._hHeader.menu.addAction('REPLACE Image')
self.table._hHeader.menu.addAction('ADD Stack')
def _horizontalHeaderClicked(self, value):
if value < 0:
return
item = self.table.horizontalHeaderItem(value)
text = str(item.text())
if text.startswith("No MCA for"):
return
action = self.table._hHeader.menu.exec(self.cursor().pos())
if action is None:
return
txt = str(action.text())
ddict = {}
ddict['event'] = 'McaDeviceSelected'
ddict['mca'] = value
ddict['action'] = txt
self.sigMcaDeviceSelected.emit(ddict)
def build(self, info):
if info['NbMca'] > 0:
ncol = int(info['NbMcaDet'])
else:
ncol = 1
nrow = info['NbMca'] // ncol
self.table.setColumnCount(ncol)
self.table.setRowCount(nrow)
if nrow == 0:
item = self.table.horizontalHeaderItem(0)
item.setText("No MCA for the selected scan")
self.table.resizeColumnToContents(0)
return
for c in range(ncol):
text = "Mca %d" % (c+1)
item = self.table.horizontalHeaderItem(c)
if item is None:
item = qt.QTableWidgetItem(text,
qt.QTableWidgetItem.Type)
self.table.setHorizontalHeaderItem(c,item)
else:
item.setText(text)
self.table.resizeColumnToContents(c)
if nrow == 1:
if ncol == 1:
item = self.table.item(0, 0)
if item is None:
item = qt.QTableWidgetItem('',
qt.QTableWidgetItem.Type)
self.table.setItem(0, 0, item)
item.setSelected(True)
def _toggleCell(self, row, col):
item = self.table.item(row, col)
if item is None:
item = qt.QTableWidgetItem('X',
qt.QTableWidgetItem.Type)
self.table.setItem(row, col, item)
return
text = str(item.text())
if text == "X":
item.setText("")
else:
item.setText("X")
def _cellClicked(self, row, col):
_logger.debug("_cellClicked %d %d ", row, col)
item = self.table.item(row, col)
if item is None:
item = qt.QTableWidgetItem('',qt.QTableWidgetItem.Type)
self.table.setItem(row, col, item)
def _cellDoubleClicked(self, row, col):
_logger.debug("_cellDoubleClicked %d %d", (row, col))
#self._toggleCell(row, col)
pass
def getCurrentlySelectedMca(self):
mca = []
for item in self.table.selectedItems():
row = self.table.row(item)
col = self.table.column(item)
mca.append("%d.%d" % (row+1, col+1))
return mca
def METHOD_NAME(self):
mca = self.getCurrentlySelectedMca() # They may be not X marked
for r in range(self.table.rowCount()):
for c in range(self.table.ColumnCount()):
item = self.table.item(r, c)
if item is not None:
text = str(item.text)
if text == "X":
new = "%d.%d" % (r+1, c+1)
if new not in mca:
mca.append(new)
return mca
def setSelectedMca(self, mcalist):
for r in range(self.table.rowCount()):
for c in range(self.table.columnCount()):
item = self.table.item(r, c)
new = "%d.%d" % (r+1, c+1)
if item is not None:
if new not in mcalist:
item.setText("")
else:
item.setText("X")
else:
if new in mcalist:
self._toggleCell(r, c)
def test():
import sys
from PyMca5.PyMcaCore import SpecFileLayer
app = qt.QApplication([])
tab = SpecFileMcaTable()
d = SpecFileLayer.SpecFileLayer()
if len(sys.argv) > 1:
d.SetSource(sys.argv[1])
else:
d.SetSource('03novs060sum.mca')
info, data = d.LoadSource('1.1')
tab.build(info)
tab.setSelectedMca(["1.1"])
tab.show()
app.exec()
if __name__ == "__main__":
test()
| null |
1,576 |
import ast
from typing import Any, Dict, List, Optional, Tuple
from boa3.internal import constants
from boa3.internal.model.builtin.method.builtinmethod import IBuiltinMethod
from boa3.internal.model.expression import IExpression
from boa3.internal.model.type.collection.sequence.uint160type import UInt160Type
from boa3.internal.model.type.itype import IType
from boa3.internal.model.variable import Variable
from boa3.internal.neo.vm.opcode.Opcode import Opcode
class UInt160Method(IBuiltinMethod):
def __init__(self, return_type: UInt160Type, argument_type: IType = None):
from boa3.internal.model.type.type import Type
if argument_type is None or not self.validate_parameters(argument_type):
argument_type = Type.none
identifier = 'UInt160'
args: Dict[str, Variable] = {'object': Variable(argument_type)}
args_default = ast.parse("{0}".format(Type.int.default_value)
).body[0].value
super().__init__(identifier, args, [args_default], return_type=return_type)
@property
def _arg_object(self) -> Variable:
return self.args['object']
@property
def identifier(self) -> str:
from boa3.internal.model.type.type import Type
if self._arg_object.type is Type.none:
return self._identifier
return '-{0}_from_{1}'.format(self._identifier, self._arg_object.type._identifier)
def validate_parameters(self, *params: IExpression) -> bool:
if len(params) > 1:
return False
if len(params) == 0:
return True
from boa3.internal.model.type.itype import IType
if not isinstance(params[0], (IExpression, IType)):
return False
param_type: IType = params[0].type if isinstance(params[0], IExpression) else params[0]
from boa3.internal.model.type.type import Type
return (Type.bytes.is_type_of(param_type)
or Type.int.is_type_of(param_type))
def evaluate_literal(self, *args: Any) -> Any:
from boa3.internal.neo3.core.types import UInt160
if len(args) == 0:
return UInt160.zero().to_array()
if len(args) == 1:
arg = args[0]
if isinstance(arg, int):
from boa3.internal.neo.vm.type.Integer import Integer
arg = Integer(arg).to_byte_array(min_length=UInt160._BYTE_LEN)
if isinstance(arg, bytes):
value = UInt160(arg).to_array()
return value
return super().evaluate_literal(*args)
@property
def _opcode(self) -> List[Tuple[Opcode, bytes]]:
from boa3.internal.neo.vm.type.Integer import Integer
from boa3.internal.model.type.type import Type
from boa3.internal.neo.vm.type.StackItem import StackItemType
return [
(Opcode.DUP, b''),
(Opcode.ISTYPE, Type.int.stack_item), # if istype(arg, int):
(Opcode.JMPIFNOT, Integer(46).to_byte_array(signed=True)),
(Opcode.DUP, b''), # assert num >= 0
(Opcode.PUSH0, b''),
(Opcode.GE, b''),
(Opcode.ASSERT, b''),
(Opcode.DUP, b''), # if len(num) < 20
(Opcode.SIZE, b''), # increase number's length to 20
(Opcode.PUSHINT8, Integer(constants.SIZE_OF_INT160).to_byte_array(signed=True)),
(Opcode.OVER, b''),
(Opcode.OVER, b''),
(Opcode.JMPGE, Integer(30).to_byte_array(signed=True)),
(Opcode.PUSHDATA1, (Integer(constants.SIZE_OF_INT160).to_byte_array(signed=True)
+ bytes(constants.SIZE_OF_INT160))),
(Opcode.REVERSE3, b''),
(Opcode.SUB, b''),
(Opcode.LEFT, b''),
(Opcode.CAT, b''),
(Opcode.JMP, Integer(4).to_byte_array()),
(Opcode.DROP, b''),
(Opcode.DROP, b''),
(Opcode.CONVERT, StackItemType.ByteString), # convert to uint160
(Opcode.DUP, b''),
(Opcode.SIZE, b''),
(Opcode.PUSHINT8, Integer(constants.SIZE_OF_INT160).to_byte_array(signed=True)),
(Opcode.NUMEQUAL, b''),
(Opcode.ASSERT, b''),
]
@property
def METHOD_NAME(self) -> int:
return len(self.args)
@property
def _body(self) -> Optional[str]:
return
def build(self, value: Any) -> IBuiltinMethod:
if type(value) == type(self._arg_object.type):
return self
if isinstance(value, list):
value = value[0] if len(value) > 0 else None
if self.validate_parameters(value):
return UInt160Method(self.return_type, value)
return super().build(value)
| null |
1,577 |
# Copyright 2017-2023 Posit Software, PBC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import yaml
def encode_yaml(val, default_flow_style=False, strict=False):
"""Returns val encoded as YAML.
Uses PyYAML `safe_dump` to serialize `val`. `default_flow_style`
is passed through to `safe_dump`.
`strict` patches PyYAML to comply with the YAML standard code for
single characters 'y' and 'n', which need to be quote to retain
their string type as strict YAML. This is for compatibility
outside PyYAML.
"""
with StrictPatch(strict):
encoded = yaml.safe_dump(
val,
default_flow_style=default_flow_style,
indent=2,
)
return METHOD_NAME(encoded)
def METHOD_NAME(encoded):
stripped = encoded.strip()
if stripped.endswith("\n..."):
stripped = stripped[:-4]
return stripped
def decode_yaml(s):
try:
return yaml.safe_load(s)
except yaml.scanner.ScannerError as e:
raise ValueError(e) from e
def yaml_front_matter(filename):
fm_s = _yaml_front_matter_s(filename)
if not fm_s:
return {}
return yaml.safe_load(fm_s)
def _yaml_front_matter_s(filename):
lines = []
reading = False
with open(filename) as f:
for line in f:
trimmed = line.rstrip()
if not trimmed.lstrip():
continue
if trimmed == "---":
if reading:
break
reading = True
elif reading:
lines.append(trimmed)
else:
break
return "\n".join(lines) if lines else None
class StrictPatch:
"""Patches `yaml` to strictly adhere to the YAML spec*.
Maybe used as a no-op with `StrictPatch(False)`.
* This patch makes no guarantee of strict correctness but rather
fixes known issues with PyYAML:
- Encoding/decoding of single char boolean chars `[yYnN]`
"""
implicit_resolver_patches = [
(
"tag:yaml.org,2002:bool",
re.compile(r"^(?:y|Y|n|N)$", re.X),
list('yYnN'),
)
]
bool_value_patches = {
"y": True,
"n": False,
}
def __init__(self, strict=True):
self.strict = strict
def __enter__(self):
if not self.strict:
return
self._apply_implicit_resolver_patches()
self._apply_bool_value_patches()
def _apply_implicit_resolver_patches(self):
for tag, pattern, first in self.implicit_resolver_patches:
yaml.resolver.Resolver.add_implicit_resolver(tag, pattern, first)
def _apply_bool_value_patches(self):
for key, val in self.bool_value_patches.items():
assert key not in yaml.constructor.SafeConstructor.bool_values, key
yaml.constructor.SafeConstructor.bool_values[key] = val
def __exit__(self, *_exc):
if not self.strict:
return
self._unapply_implicit_resolver_patches()
self._unapply_bool_value_patches()
def _unapply_implicit_resolver_patches(self):
for tag, pattern, first in self.implicit_resolver_patches:
for ch in first:
resolvers = yaml.resolver.Resolver.yaml_implicit_resolvers.get(ch)
assert resolvers
assert resolvers[-1] == (tag, pattern), (resolvers, tag, pattern)
resolvers.pop()
def _unapply_bool_value_patches(self):
for key in self.bool_value_patches:
del yaml.constructor.SafeConstructor.bool_values[key]
def patch_yaml_resolver():
"""Patch yaml parsing to support Guild specific resolution rules.
- Make '+' or '-' optional in scientific notation
- Make use of decimal '.' optional in scientific notation
This patch replaces the default 'tag:yaml.org,2002:float' resolver
with an augmented set of regex patterns. Refer to
`yaml/resolver.py` for the original patterns.
"""
yaml.resolver.Resolver.add_implicit_resolver(
"tag:yaml.org,2002:float",
# The patterns below are modified from the original set in two
# ways: the first pattern makes `[-+]` optional and the second
# is a new pattern to match scientific notation that
# does not include a decimal (e.g. `1e2`).
re.compile(
r"""^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|[-+]?\.(?:inf|Inf|INF)
|\.(?:nan|NaN|NAN))$""",
re.X,
),
list("-+0123456789."),
)
if os.getenv("NO_PATCH_YAML") != "1":
patch_yaml_resolver()
| null |
1,578 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbr.endpoint import endpoint_data
class CreateHanaRestoreRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hbr', '2017-09-08', 'CreateHanaRestore')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SidAdmin(self): # String
return self.get_query_params().get('SidAdmin')
def set_SidAdmin(self, SidAdmin): # String
self.add_query_param('SidAdmin', SidAdmin)
def get_RecoveryPointInTime(self): # Long
return self.get_query_params().get('RecoveryPointInTime')
def set_RecoveryPointInTime(self, RecoveryPointInTime): # Long
self.add_query_param('RecoveryPointInTime', RecoveryPointInTime)
def get_LogPosition(self): # Long
return self.get_query_params().get('LogPosition')
def set_LogPosition(self, LogPosition): # Long
self.add_query_param('LogPosition', LogPosition)
def get_Source(self): # String
return self.get_query_params().get('Source')
def set_Source(self, Source): # String
self.add_query_param('Source', Source)
def get_ClearLog(self): # Boolean
return self.get_query_params().get('ClearLog')
def set_ClearLog(self, ClearLog): # Boolean
self.add_query_param('ClearLog', ClearLog)
def get_Mode(self): # String
return self.get_query_params().get('Mode')
def set_Mode(self, Mode): # String
self.add_query_param('Mode', Mode)
def METHOD_NAME(self): # Boolean
return self.get_query_params().get('CheckAccess')
def set_CheckAccess(self, CheckAccess): # Boolean
self.add_query_param('CheckAccess', CheckAccess)
def get_BackupId(self): # Long
return self.get_query_params().get('BackupId')
def set_BackupId(self, BackupId): # Long
self.add_query_param('BackupId', BackupId)
def get_ClusterId(self): # String
return self.get_query_params().get('ClusterId')
def set_ClusterId(self, ClusterId): # String
self.add_query_param('ClusterId', ClusterId)
def get_UseDelta(self): # Boolean
return self.get_query_params().get('UseDelta')
def set_UseDelta(self, UseDelta): # Boolean
self.add_query_param('UseDelta', UseDelta)
def get_UseCatalog(self): # Boolean
return self.get_query_params().get('UseCatalog')
def set_UseCatalog(self, UseCatalog): # Boolean
self.add_query_param('UseCatalog', UseCatalog)
def get_BackupPrefix(self): # String
return self.get_query_params().get('BackupPrefix')
def set_BackupPrefix(self, BackupPrefix): # String
self.add_query_param('BackupPrefix', BackupPrefix)
def get_DatabaseName(self): # String
return self.get_query_params().get('DatabaseName')
def set_DatabaseName(self, DatabaseName): # String
self.add_query_param('DatabaseName', DatabaseName)
def get_VolumeId(self): # Integer
return self.get_query_params().get('VolumeId')
def set_VolumeId(self, VolumeId): # Integer
self.add_query_param('VolumeId', VolumeId)
def get_SourceClusterId(self): # String
return self.get_query_params().get('SourceClusterId')
def set_SourceClusterId(self, SourceClusterId): # String
self.add_query_param('SourceClusterId', SourceClusterId)
def get_SystemCopy(self): # Boolean
return self.get_query_params().get('SystemCopy')
def set_SystemCopy(self, SystemCopy): # Boolean
self.add_query_param('SystemCopy', SystemCopy)
| null |
1,579 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import argparse
import sys
from itertools import combinations
from skupper_router_internal.tools.command import (main,
UsageError,
parse_args_skstat,
_skmanage_parser,
_skstat_parser)
from system_test import unittest
def mock_error(self, message):
raise ValueError(message)
argparse.ArgumentParser.error = mock_error # type: ignore[assignment] # Cannot assign to a method
# Since BusManager file is defined in tools/skmanage.in -> tools/skmanage
# otherwise it could be just imported
class FakeBusManager:
def displayGeneral(self): pass
def displayConnections(self): pass
def displayRouterLinks(self): pass
def displayRouterNodes(self): pass
def displayEdges(self): pass
def displayAddresses(self): pass
def displayMemory(self): pass
def displayPolicy(self): pass
def displayVhosts(self): pass
def displayVhostgroups(self): pass
def displayVhoststats(self): pass
def displayAutolinks(self): pass
def displayLog(self): pass
def show_all(self): pass
FBM = FakeBusManager
class TestParseArgsSkstat(unittest.TestCase):
def setUp(self):
super().setUp()
self.parser = _skstat_parser(BusManager=FBM)
def test_parse_args_skstat_print_help(self):
self.parser.print_help()
def METHOD_NAME(self):
options1 = ["-g", "-c",
"-l", "-n", "-e", "-a", "-m", "--autolinks", "--log",
"--all-entities"]
options2 = ["-r", "--all-routers"]
def _call_pairs(options):
for options_pair in combinations(options, 2):
with self.assertRaises(ValueError):
self.parser.parse_args(options_pair)
_call_pairs(options1)
_call_pairs(options2)
def test_parse_args_skstat_default(self):
args = parse_args_skstat(FBM, argv=[])
self.assertEqual(FBM.displayGeneral.__name__, args.show)
def test_parse_args_skstat_method_show_matching(self):
matching = [("-g", FBM.displayGeneral.__name__),
("-c", FBM.displayConnections.__name__),
("-l", FBM.displayRouterLinks.__name__),
("-n", FBM.displayRouterNodes.__name__),
("-e", FBM.displayEdges.__name__),
("-a", FBM.displayAddresses.__name__),
("-m", FBM.displayMemory.__name__),
("--autolinks", FBM.displayAutolinks.__name__),
("--log", FBM.displayLog.__name__),
("--all-entities", FBM.show_all.__name__),
]
for option, expected in matching:
args = self.parser.parse_args([option])
self.assertEqual(expected, args.show)
def test_parse_args_skstat_limit(self):
args = self.parser.parse_args([])
self.assertEqual(None, args.limit)
args = self.parser.parse_args(["--limit", "1"])
self.assertEqual(1, args.limit)
class TestParseArgsSkmanage(unittest.TestCase):
def setUp(self):
super().setUp()
self.operations = ["HERE", "SOME", "OPERATIONS"]
self.parser = _skmanage_parser(operations=self.operations)
def test_parse_args_skmanage_print_help(self):
self.parser.print_help()
def test_parse_args_skmanage_operation_no_args(self):
argv = "-r r1 QUERY --type some --name the_name -b 127.0.0.1:5672"
opts, args = self.parser.parse_known_args(argv.split())
self.assertEqual("QUERY", args[0])
def test_parse_args_skmanage_operation_and_args(self):
argv = "-r r1 QUERY arg1=val1 --type some other=argument --name the_name -b 127.0.0.1:5672"
opts, args = self.parser.parse_known_args(argv.split())
self.assertEqual(["QUERY", "arg1=val1", "other=argument"], args)
class TestMain(unittest.TestCase):
def test_main(self):
def run_success(argv):
self.assertEqual(sys.argv, argv)
def run_raises(argv, _Exception):
run_success(argv)
raise _Exception("some")
def run_raises_UsageError(argv):
run_raises(argv, UsageError)
def run_raises_Exception(argv):
run_raises(argv, Exception)
def run_raises_KeyboardInterrupt(argv):
run_raises(argv, KeyboardInterrupt)
self.assertEqual(0, main(run_success))
failed_runs = [
# run_raises_UsageError, ##uncomment this exposes bug
run_raises_Exception,
run_raises_KeyboardInterrupt,
]
for run in failed_runs:
self.assertEqual(1, main(run))
if __name__ == '__main__':
unittest.main()
| null |
1,580 |
# Drakkar-Software OctoBot-Tentacles
# Copyright (c) Drakkar-Software, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import asyncio
import time
import asyncprawcore.exceptions
import logging
import octobot_commons.constants as commons_constants
import octobot_services.channel as services_channel
import octobot_services.constants as services_constants
import octobot_services.service_feeds as service_feeds
import tentacles.Services.Services_bases as Services_bases
class RedditServiceFeedChannel(services_channel.AbstractServiceFeedChannel):
pass
class RedditServiceFeed(service_feeds.AbstractServiceFeed):
FEED_CHANNEL = RedditServiceFeedChannel
REQUIRED_SERVICES = [Services_bases.RedditService]
MAX_CONNECTION_ATTEMPTS = 10
def __init__(self, config, main_async_loop, bot_id):
service_feeds.AbstractServiceFeed.__init__(self, config, main_async_loop, bot_id)
self.subreddits = None
self.counter = 0
self.connect_attempts = 0
self.credentials_ok = False
self.listener_task = None
# merge new config into existing config
def METHOD_NAME(self, config):
if services_constants.CONFIG_REDDIT_SUBREDDITS in self.feed_config:
self.feed_config[services_constants.CONFIG_REDDIT_SUBREDDITS] = {
**self.feed_config[services_constants.CONFIG_REDDIT_SUBREDDITS],
**config[services_constants.CONFIG_REDDIT_SUBREDDITS]}
else:
self.feed_config[services_constants.CONFIG_REDDIT_SUBREDDITS] = config[
services_constants.CONFIG_REDDIT_SUBREDDITS]
def _init_subreddits(self):
self.subreddits = ""
for symbol in self.feed_config[services_constants.CONFIG_REDDIT_SUBREDDITS]:
for subreddit in self.feed_config[services_constants.CONFIG_REDDIT_SUBREDDITS][symbol]:
if subreddit not in self.subreddits:
if self.subreddits:
self.subreddits = self.subreddits + "+" + subreddit
else:
self.subreddits = self.subreddits + subreddit
def _initialize(self):
if not self.subreddits:
self._init_subreddits()
def _something_to_watch(self):
return services_constants.CONFIG_REDDIT_SUBREDDITS in self.feed_config and self.feed_config[
services_constants.CONFIG_REDDIT_SUBREDDITS]
@staticmethod
def _get_entry_weight(entry_age):
if entry_age > 0:
# entry in history => weight proportional to entry's age
# last 12 hours: weight = 4
# last 2 days: weight = 3
# last 7 days: weight = 2
# older: weight = 1
if entry_age / commons_constants.HOURS_TO_SECONDS <= 12:
return 4
elif entry_age / commons_constants.DAYS_TO_SECONDS <= 2:
return 3
elif entry_age / commons_constants.DAYS_TO_SECONDS <= 7:
return 2
else:
return 1
# new entry => max weight
return 5
async def _start_listener(self):
# avoid debug log at each asyncprawcore fetch
logging.getLogger("asyncprawcore").setLevel(logging.WARNING)
subreddit = await self.services[0].get_endpoint().subreddit(self.subreddits)
start_time = time.time()
async for entry in subreddit.stream.submissions():
self.credentials_ok = True
self.connect_attempts = 0
self.counter += 1
# check if we are in the 100 history or if it's a new entry (new posts are more valuables)
# the older the entry is, the les weight it gets
entry_age_when_feed_started_in_sec = start_time - entry.created_utc
entry_weight = self._get_entry_weight(entry_age_when_feed_started_in_sec)
await self._async_notify_consumers(
{
services_constants.FEED_METADATA: entry.subreddit.display_name.lower(),
services_constants.CONFIG_REDDIT_ENTRY: entry,
services_constants.CONFIG_REDDIT_ENTRY_WEIGHT: entry_weight
}
)
async def _start_listener_task(self):
while not self.should_stop and self.connect_attempts < self.MAX_CONNECTION_ATTEMPTS:
try:
await self._start_listener()
except asyncprawcore.exceptions.RequestException:
# probably a connexion loss, try again
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except asyncprawcore.exceptions.InvalidToken as e:
# expired, try again
self.logger.exception(e, True, f"Error when receiving Reddit feed: '{e}'")
self.logger.info(f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds.")
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except asyncprawcore.exceptions.ServerError as e:
# server error, try again
self.logger.exception(e, True, "Error when receiving Reddit feed: '{e}'")
self.logger.info(f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds.")
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except asyncprawcore.exceptions.OAuthException as e:
self.logger.exception(e, True, f"Error when receiving Reddit feed: '{e}' this may mean that reddit "
f"login info in config.json are wrong")
self.keep_running = False
self.should_stop = True
except asyncprawcore.exceptions.ResponseException as e:
message_complement = "this may mean that reddit login info in config.json are invalid." \
if not self.credentials_ok else \
f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds."
self.logger.exception(e, True,
f"Error when receiving Reddit feed: '{e}' this may mean {message_complement}")
if not self.credentials_ok:
self.connect_attempts += 1
else:
self.connect_attempts += 0.1
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except Exception as e:
self.logger.error(f"Error when receiving Reddit feed: '{e}'")
self.logger.exception(e, True, f"Error when receiving Reddit feed: '{e}'")
self.keep_running = False
self.should_stop = True
return False
async def _start_service_feed(self):
self.listener_task = asyncio.create_task(self._start_listener_task())
return True
async def stop(self):
await super().stop()
if self.listener_task is not None:
self.listener_task.cancel()
self.listener_task = None
| null |
1,581 |
# Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous).
#
# This file is part of NeoVintageous.
#
# NeoVintageous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NeoVintageous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>.
from NeoVintageous.tests import unittest
class Test_ctrl_u(unittest.FunctionalTestCase):
@unittest.mock_ui()
def test_ctrl_u(self):
self.eq('1\n2\n3\n4\n5\n|6', 'n_<C-u>', '1\n2\n|3\n4\n5\n6')
self.eq('1\n2\n3\n4\n|5\n6', 'n_<C-u>', '1\n|2\n3\n4\n5\n6')
self.eq('1\n2\n3\n|4\n5\n6', 'n_<C-u>', '|1\n2\n3\n4\n5\n6')
self.eq('1\n2\n|3\n4\n5\n6', 'n_<C-u>', '|1\n2\n3\n4\n5\n6')
self.eq('1\n|2\n3\n4\n5\n6', 'n_<C-u>', '|1\n2\n3\n4\n5\n6')
self.eq('|1\n2\n3\n4\n5\n6', 'n_<C-u>', '|1\n2\n3\n4\n5\n6')
self.eq('|1\n2\n3\n4\n5\n6', 'n_<C-u>', '|1\n2\n3\n4\n5\n6')
self.eq('1\n2\n 3\n4\n5\n6|', 'n_<C-u>', '1\n2\n |3\n4\n5\n6')
self.eq('\n1\n|2\n3\n4\n5\n6', 'n_<C-u>', '|\n1\n2\n3\n4\n5\n6')
self.eq('\n\n1\n|2\n3\n4\n5\n6', 'n_<C-u>', '|\n\n1\n2\n3\n4\n5\n6')
self.eq('\n\n\n1\n|2\n3\n4\n5\n6', 'n_<C-u>', '|\n\n\n1\n2\n3\n4\n5\n6')
def test_i_ctrl_u(self):
self.eq('fiz |buz', 'i_<C-u>', '|buz')
self.eq('fiz bu|zz', 'i_<C-u>', '|zz')
self.eq('fiz bu|zz\nx\nx', 'i_<C-u>', '|zz\nx\nx')
@unittest.mock_ui()
def test_ctrl_u_already_at_eof_invokes_bell_and_does_not_scroll(self):
self.eq('fi|zz\n2\n3\n4', 'n_<C-u>', 'fi|zz\n2\n3\n4')
self.assertBell()
@unittest.mock_ui()
def test_ctrl_u_on_empty_view_invokes_bell(self):
self.eq('|', 'n_<C-u>', '|')
self.assertBell()
@unittest.mock_ui(screen_rows=12)
def test_ctrl_u_uses_screen_size_to_calculate_number_of_lines_to_scroll(self):
self.eq('1\n2\n3\n4\n5\n6\n7\n8\n|9', 'n_<C-u>', '1\n2\n|3\n4\n5\n6\n7\n8\n9')
@unittest.mock_ui(screen_rows=20)
def test_ctrl_u_should_use_count_as_number_of_lines_to_scroll(self):
self.normal(('\n' * 10) + '|')
self.feed('n_3<C-u>')
self.assertSelection(7)
self.feed('n_4<C-u>')
self.assertSelection(3)
self.feed('n_1<C-u>')
self.assertSelection(2)
self.feed('n_5<C-u>')
self.assertSelection(0)
self.assertNoBell()
@unittest.mock_ui()
def METHOD_NAME(self):
self.eq('r_1\n2\n3xy\n4\n5\na|b|c', 'v_<C-u>', 'r_1\n2\n|3xy\n4\n5\nab|c')
self.eq('r_1\n2\n3xy\n4\n5\n|a|bc', 'v_<C-u>', 'r_1\n2\n|3xy\n4\n5\na|bc')
self.eq('r_1\n2\n3xy\n4\n5\nab|c|', 'v_<C-u>', 'r_1\n2\n|3xy\n4\n5\nabc|')
self.eq('r_1\n2\n3xy\n4\n5\n|ab|c', 'v_<C-u>', 'r_1\n2\n|3xy\n4\n5\nab|c')
self.eq('r_1\n2xy\n3xy\n4\n|5\nab|c', 'v_<C-u>', 'r_1\n|2xy\n3xy\n4\n5\nab|c')
self.eq('r_1\n2xy\n3xy\n4\n5x|y\nab|c', 'v_<C-u>', 'r_1\n|2xy\n3xy\n4\n5xy\nab|c')
self.eq('r_1xy\n2\n3\n4|x\n5\nab|c', 'v_<C-u>', 'r_|1xy\n2\n3\n4x\n5\nab|c')
self.eq('r_1xy\n2\n3\n4|x\n5x|y\n6', 'v_<C-u>', 'r_|1xy\n2\n3\n4x\n5x|y\n6')
self.eq('r_1\n2\n 3xy\n4\n5\na|b|c', 'v_<C-u>', 'r_1\n2\n |3xy\n4\n5\nab|c')
self.eq('1\n2\n3x\n4\n5xy\nf|iz|z', 'v_<C-u>', 'r_1\n2\n|3x\n4\n5xy\nfi|zz')
self.eq('1\n2\n3x\n4\n5|xy\n6|xy', 'v_<C-u>', 'r_1\n2\n|3x\n4\n5x|y\n6xy')
self.eq('1\n2\n3x\n|4xy\n5\n6x|y', 'v_<C-u>', 'r_1\n2\n|3x\n4|xy\n5\n6xy')
self.eq('1\n2x\n3\n|4xy\n5x|y\n6', 'v_<C-u>', 'r_1\n|2x\n3\n4|xy\n5xy\n6')
self.eq('|1x\n2x\n3x\n4x\n5x\n6|x', 'v_<C-u>', '|1x\n2x\n3|x\n4x\n5x\n6x')
self.eq('1\n2\n|3x\n4\n5\n6x|y', 'v_<C-u>', '1\n2\n|3|x\n4\n5\n6xy')
self.eq('1\n|2\n3\n4\n5|\n6', 'v_<C-u>', '1\n|2|\n3\n4\n5\n6')
self.eq('1\n2x\n3\n4\nf|iz|z\n6', 'v_<C-u>', 'r_1\n|2x\n3\n4\nfi|zz\n6')
@unittest.mock_ui()
def test_v_ctrl_u_invokes_bell_when_at_bof(self):
self.eq('r_fi|zz\n2\n3\na|bc', 'v_<C-u>', 'r_fi|zz\n2\n3\na|bc')
self.assertBell()
@unittest.mock_ui()
def test_V_ctrl_u_invokes_bell_when_at_sof(self):
self.eq('r_|1\n2\n3|', 'V_<C-u>', 'r_|1\n2\n3|')
self.assertBell()
@unittest.mock_ui()
def test_V_ctrl_u(self):
self.eq('r_1x\n2x\n3x\n4x\n5x\n|6x|', 'V_<C-u>', 'r_1x\n2x\n|3x\n4x\n5x\n6x|')
self.eq('r_1x\n2x\n3x\n4x\n5x\n|6x\n|', 'V_<C-u>', 'r_1x\n2x\n|3x\n4x\n5x\n6x\n|')
self.eq('r_1x\n2x\n3x\n4x\n|5x\n6x|', 'V_<C-u>', 'r_1x\n|2x\n3x\n4x\n5x\n6x|')
self.eq('r_1x\n2x\n3x\n4x\n|5x\n|6x', 'V_<C-u>', 'r_1x\n|2x\n3x\n4x\n5x\n|6x')
self.eq('r_1x\n2x\n3x\n|4x\n5x\n6x|', 'V_<C-u>', 'r_|1x\n2x\n3x\n4x\n5x\n6x|')
self.eq('r_1x\n2x\n3x\n|4x\n5x\n|6x', 'V_<C-u>', 'r_|1x\n2x\n3x\n4x\n5x\n|6x')
self.eq('r_1x\n2x\n|3x\n4x\n5x\n6x|', 'V_<C-u>', 'r_|1x\n2x\n3x\n4x\n5x\n6x|')
self.eq('r_\n\n2x\n|3x\n4x\n5x\n6x|', 'V_<C-u>', 'r_|\n\n2x\n3x\n4x\n5x\n6x|')
self.eq('r_\n\n\n2x\n|3x\n4x\n5x\n6x|', 'V_<C-u>', 'r_|\n\n\n2x\n3x\n4x\n5x\n6x|')
self.eq('1x\n2x\n3x\n4x\n5x\n|6x|', 'V_<C-u>', 'r_1x\n2x\n|3x\n4x\n5x\n6x|')
self.eq('1x\n2x\n3x\n4x\n|5x\n6x|', 'V_<C-u>', 'r_1x\n2x\n|3x\n4x\n5x\n|6x')
self.eq('1x\n2x\n3x\n|4x\n5x\n6x|', 'V_<C-u>', 'r_1x\n2x\n|3x\n4x\n|5x\n6x')
self.eq('1x\n2x\n3x\n|4x\n5x\n|6x', 'V_<C-u>', 'r_1x\n|2x\n3x\n4x\n|5x\n6x')
self.eq('1x\n2x\n|3x\n4x\n|5x\n6x', 'V_<C-u>', 'r_|1x\n2x\n3x\n|4x\n5x\n6x')
self.eq('1x\n2x\n|3x\n4x\n5x\n6x|', 'V_<C-u>', 'r_1x\n2x\n|3x\n|4x\n5x\n6x')
self.eq('1x\n|2x\n3x\n|4x\n5x\n6x', 'V_<C-u>', 'r_|1x\n2x\n|3x\n4x\n5x\n6x')
self.eq('|1x\n2x\n3x\n4x\n5x\n6x|', 'V_<C-u>', '|1x\n2x\n3x\n|4x\n5x\n6x')
@unittest.mock_ui()
def test_d(self):
self.eq('1\n2\n3\n4\n5\n|6', 'd<C-u>', '1\n2\n|6')
self.eq('1\n2\n3\n4\n|5\n6', 'd<C-u>', '1\n|5\n6')
| null |
1,582 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkehpc.endpoint import endpoint_data
class EditJobTemplateRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'EHPC', '2018-04-12', 'EditJobTemplate')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StderrRedirectPath(self): # String
return self.get_query_params().get('StderrRedirectPath')
def set_StderrRedirectPath(self, StderrRedirectPath): # String
self.add_query_param('StderrRedirectPath', StderrRedirectPath)
def get_ClockTime(self): # String
return self.get_query_params().get('ClockTime')
def set_ClockTime(self, ClockTime): # String
self.add_query_param('ClockTime', ClockTime)
def get_CommandLine(self): # String
return self.get_query_params().get('CommandLine')
def set_CommandLine(self, CommandLine): # String
self.add_query_param('CommandLine', CommandLine)
def get_ArrayRequest(self): # String
return self.get_query_params().get('ArrayRequest')
def set_ArrayRequest(self, ArrayRequest): # String
self.add_query_param('ArrayRequest', ArrayRequest)
def get_UnzipCmd(self): # String
return self.get_query_params().get('UnzipCmd')
def set_UnzipCmd(self, UnzipCmd): # String
self.add_query_param('UnzipCmd', UnzipCmd)
def get_PackagePath(self): # String
return self.get_query_params().get('PackagePath')
def set_PackagePath(self, PackagePath): # String
self.add_query_param('PackagePath', PackagePath)
def get_Mem(self): # String
return self.get_query_params().get('Mem')
def set_Mem(self, Mem): # String
self.add_query_param('Mem', Mem)
def get_StdoutRedirectPath(self): # String
return self.get_query_params().get('StdoutRedirectPath')
def set_StdoutRedirectPath(self, StdoutRedirectPath): # String
self.add_query_param('StdoutRedirectPath', StdoutRedirectPath)
def get_Variables(self): # String
return self.get_query_params().get('Variables')
def set_Variables(self, Variables): # String
self.add_query_param('Variables', Variables)
def get_RunasUser(self): # String
return self.get_query_params().get('RunasUser')
def set_RunasUser(self, RunasUser): # String
self.add_query_param('RunasUser', RunasUser)
def get_ReRunable(self): # Boolean
return self.get_query_params().get('ReRunable')
def set_ReRunable(self, ReRunable): # Boolean
self.add_query_param('ReRunable', ReRunable)
def get_Thread(self): # Integer
return self.get_query_params().get('Thread')
def METHOD_NAME(self, Thread): # Integer
self.add_query_param('Thread', Thread)
def get_TemplateId(self): # String
return self.get_query_params().get('TemplateId')
def set_TemplateId(self, TemplateId): # String
self.add_query_param('TemplateId', TemplateId)
def get_Priority(self): # Integer
return self.get_query_params().get('Priority')
def set_Priority(self, Priority): # Integer
self.add_query_param('Priority', Priority)
def get_Gpu(self): # Integer
return self.get_query_params().get('Gpu')
def set_Gpu(self, Gpu): # Integer
self.add_query_param('Gpu', Gpu)
def get_WithUnzipCmd(self): # Boolean
return self.get_query_params().get('WithUnzipCmd')
def set_WithUnzipCmd(self, WithUnzipCmd): # Boolean
self.add_query_param('WithUnzipCmd', WithUnzipCmd)
def get_Node(self): # Integer
return self.get_query_params().get('Node')
def set_Node(self, Node): # Integer
self.add_query_param('Node', Node)
def get_Task(self): # Integer
return self.get_query_params().get('Task')
def set_Task(self, Task): # Integer
self.add_query_param('Task', Task)
def get_InputFileUrl(self): # String
return self.get_query_params().get('InputFileUrl')
def set_InputFileUrl(self, InputFileUrl): # String
self.add_query_param('InputFileUrl', InputFileUrl)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_Queue(self): # String
return self.get_query_params().get('Queue')
def set_Queue(self, Queue): # String
self.add_query_param('Queue', Queue)
| null |
1,583 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class AllocateDedicatedHostsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'AllocateDedicatedHosts','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_CpuOverCommitRatio(self): # Float
return self.get_query_params().get('CpuOverCommitRatio')
def set_CpuOverCommitRatio(self, CpuOverCommitRatio): # Float
self.add_query_param('CpuOverCommitRatio', CpuOverCommitRatio)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_MinQuantity(self): # Integer
return self.get_query_params().get('MinQuantity')
def set_MinQuantity(self, MinQuantity): # Integer
self.add_query_param('MinQuantity', MinQuantity)
def get_ActionOnMaintenance(self): # String
return self.get_query_params().get('ActionOnMaintenance')
def set_ActionOnMaintenance(self, ActionOnMaintenance): # String
self.add_query_param('ActionOnMaintenance', ActionOnMaintenance)
def get_DedicatedHostClusterId(self): # String
return self.get_query_params().get('DedicatedHostClusterId')
def set_DedicatedHostClusterId(self, DedicatedHostClusterId): # String
self.add_query_param('DedicatedHostClusterId', DedicatedHostClusterId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_DedicatedHostType(self): # String
return self.get_query_params().get('DedicatedHostType')
def set_DedicatedHostType(self, DedicatedHostType): # String
self.add_query_param('DedicatedHostType', DedicatedHostType)
def get_AutoRenewPeriod(self): # Integer
return self.get_query_params().get('AutoRenewPeriod')
def set_AutoRenewPeriod(self, AutoRenewPeriod): # Integer
self.add_query_param('AutoRenewPeriod', AutoRenewPeriod)
def get_Period(self): # Integer
return self.get_query_params().get('Period')
def set_Period(self, Period): # Integer
self.add_query_param('Period', Period)
def get_Quantity(self): # Integer
return self.get_query_params().get('Quantity')
def METHOD_NAME(self, Quantity): # Integer
self.add_query_param('Quantity', Quantity)
def get_DedicatedHostName(self): # String
return self.get_query_params().get('DedicatedHostName')
def set_DedicatedHostName(self, DedicatedHostName): # String
self.add_query_param('DedicatedHostName', DedicatedHostName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_AutoReleaseTime(self): # String
return self.get_query_params().get('AutoReleaseTime')
def set_AutoReleaseTime(self, AutoReleaseTime): # String
self.add_query_param('AutoReleaseTime', AutoReleaseTime)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_PeriodUnit(self): # String
return self.get_query_params().get('PeriodUnit')
def set_PeriodUnit(self, PeriodUnit): # String
self.add_query_param('PeriodUnit', PeriodUnit)
def get_AutoRenew(self): # Boolean
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # Boolean
self.add_query_param('AutoRenew', AutoRenew)
def get_NetworkAttributesSlbUdpTimeout(self): # Integer
return self.get_query_params().get('NetworkAttributes.SlbUdpTimeout')
def set_NetworkAttributesSlbUdpTimeout(self, NetworkAttributesSlbUdpTimeout): # Integer
self.add_query_param('NetworkAttributes.SlbUdpTimeout', NetworkAttributesSlbUdpTimeout)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def get_AutoPlacement(self): # String
return self.get_query_params().get('AutoPlacement')
def set_AutoPlacement(self, AutoPlacement): # String
self.add_query_param('AutoPlacement', AutoPlacement)
def get_ChargeType(self): # String
return self.get_query_params().get('ChargeType')
def set_ChargeType(self, ChargeType): # String
self.add_query_param('ChargeType', ChargeType)
def get_NetworkAttributesUdpTimeout(self): # Integer
return self.get_query_params().get('NetworkAttributes.UdpTimeout')
def set_NetworkAttributesUdpTimeout(self, NetworkAttributesUdpTimeout): # Integer
self.add_query_param('NetworkAttributes.UdpTimeout', NetworkAttributesUdpTimeout)
| null |
1,584 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvcs.endpoint import endpoint_data
class UpdateMonitorRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vcs', '2020-05-15', 'UpdateMonitor','vcs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_CorpId(self):
return self.get_body_params().get('CorpId')
def set_CorpId(self,CorpId):
self.add_body_params('CorpId', CorpId)
def get_Description(self):
return self.get_body_params().get('Description')
def set_Description(self,Description):
self.add_body_params('Description', Description)
def get_RuleName(self):
return self.get_body_params().get('RuleName')
def set_RuleName(self,RuleName):
self.add_body_params('RuleName', RuleName)
def get_PicOperateType(self):
return self.get_body_params().get('PicOperateType')
def set_PicOperateType(self,PicOperateType):
self.add_body_params('PicOperateType', PicOperateType)
def get_AttributeName(self):
return self.get_body_params().get('AttributeName')
def set_AttributeName(self,AttributeName):
self.add_body_params('AttributeName', AttributeName)
def get_AttributeOperateType(self):
return self.get_body_params().get('AttributeOperateType')
def set_AttributeOperateType(self,AttributeOperateType):
self.add_body_params('AttributeOperateType', AttributeOperateType)
def get_RuleExpression(self):
return self.get_body_params().get('RuleExpression')
def set_RuleExpression(self,RuleExpression):
self.add_body_params('RuleExpression', RuleExpression)
def get_NotifierTimeOut(self):
return self.get_body_params().get('NotifierTimeOut')
def set_NotifierTimeOut(self,NotifierTimeOut):
self.add_body_params('NotifierTimeOut', NotifierTimeOut)
def get_TaskId(self):
return self.get_body_params().get('TaskId')
def set_TaskId(self,TaskId):
self.add_body_params('TaskId', TaskId)
def get_DeviceOperateType(self):
return self.get_body_params().get('DeviceOperateType')
def set_DeviceOperateType(self,DeviceOperateType):
self.add_body_params('DeviceOperateType', DeviceOperateType)
def get_PicList(self):
return self.get_body_params().get('PicList')
def set_PicList(self,PicList):
self.add_body_params('PicList', PicList)
def get_AttributeValueList(self):
return self.get_body_params().get('AttributeValueList')
def set_AttributeValueList(self,AttributeValueList):
self.add_body_params('AttributeValueList', AttributeValueList)
def METHOD_NAME(self):
return self.get_body_params().get('NotifierAppSecret')
def set_NotifierAppSecret(self,NotifierAppSecret):
self.add_body_params('NotifierAppSecret', NotifierAppSecret)
def get_NotifierExtendValues(self):
return self.get_body_params().get('NotifierExtendValues')
def set_NotifierExtendValues(self,NotifierExtendValues):
self.add_body_params('NotifierExtendValues', NotifierExtendValues)
def get_DeviceList(self):
return self.get_body_params().get('DeviceList')
def set_DeviceList(self,DeviceList):
self.add_body_params('DeviceList', DeviceList)
def get_NotifierUrl(self):
return self.get_body_params().get('NotifierUrl')
def set_NotifierUrl(self,NotifierUrl):
self.add_body_params('NotifierUrl', NotifierUrl)
def get_NotifierType(self):
return self.get_body_params().get('NotifierType')
def set_NotifierType(self,NotifierType):
self.add_body_params('NotifierType', NotifierType)
def get_AlgorithmVendor(self):
return self.get_body_params().get('AlgorithmVendor')
def set_AlgorithmVendor(self,AlgorithmVendor):
self.add_body_params('AlgorithmVendor', AlgorithmVendor
| null |
1,585 |
from unittest.mock import AsyncMock
from mock import patch
import pytest
import pytest_asyncio
from db.repositories.resources_history import ResourceHistoryRepository
from models.domain.resource import Resource, ResourceHistoryItem, ResourceType
from tests_ma.test_api.test_routes.test_resource_helpers import FAKE_CREATE_TIMESTAMP
from tests_ma.test_api.conftest import create_test_user
HISTORY_ID = "59676d53-5356-45b1-981a-180c0b089839"
RESOURCE_ID = "178c1ffe-de57-495b-b1eb-9bc37d3c5087"
USER_ID = "e5accc9a-3961-4da9-b5ee-1bc8a406388b"
RESOURCE_VERSION = 1
@pytest_asyncio.fixture
async def resource_history_repo():
with patch('db.repositories.base.BaseRepository._get_container', return_value=None):
with patch('azure.cosmos.CosmosClient') as cosmos_client_mock:
resource_history_repo = await ResourceHistoryRepository.create(cosmos_client_mock)
yield resource_history_repo
@pytest.fixture
def METHOD_NAME() -> Resource:
return Resource(
id=RESOURCE_ID,
isEnabled=True,
resourcePath="/resource/path",
templateName="template_name",
templateVersion="template_version",
properties={
'display_name': 'initial display name',
'description': 'initial description',
'computed_prop': 'computed_val'
},
resourceType=ResourceType.Workspace,
etag="some-etag-value",
resourceVersion=RESOURCE_VERSION,
updatedWhen=FAKE_CREATE_TIMESTAMP,
user=create_test_user()
)
@pytest.fixture
def sample_resource_history() -> ResourceHistoryItem:
return ResourceHistoryItem(
id=HISTORY_ID,
resourceId=RESOURCE_ID,
isEnabled=True,
resourceVersion=RESOURCE_VERSION,
templateVersion="template_version",
properties={
'display_name': 'initial display name',
'description': 'initial description',
'computed_prop': 'computed_val'
},
updatedWhen=FAKE_CREATE_TIMESTAMP,
user=create_test_user()
)
@pytest.mark.asyncio
@patch('db.repositories.resources_history.ResourceHistoryRepository.save_item', return_value=AsyncMock())
async def test_create_resource_history_item(mock_save, resource_history_repo, METHOD_NAME):
resource_history = await resource_history_repo.create_resource_history_item(METHOD_NAME)
# Assertions
assert isinstance(resource_history, ResourceHistoryItem)
mock_save.assert_called_once_with(resource_history)
assert resource_history.id is not None
assert resource_history.resourceId == METHOD_NAME.id
assert resource_history.isEnabled is True
assert resource_history.properties == METHOD_NAME.properties
assert resource_history.resourceVersion == METHOD_NAME.resourceVersion
assert resource_history.updatedWhen == METHOD_NAME.updatedWhen
assert resource_history.user == METHOD_NAME.user
assert resource_history.templateVersion == METHOD_NAME.templateVersion
@pytest.mark.asyncio
@patch('db.repositories.resources_history.ResourceHistoryRepository.save_item', side_effect=Exception)
async def test_create_resource_history_item_throws_error_when_saving(mock_save, resource_history_repo, METHOD_NAME):
with pytest.raises(Exception):
resource_history = await resource_history_repo.create_resource_history_item(METHOD_NAME)
assert mock_save.called
assert resource_history.id is not None
assert resource_history.resourceId == METHOD_NAME.id
assert resource_history.isEnabled is True
assert resource_history.properties == METHOD_NAME.properties
assert resource_history.resourceVersion == METHOD_NAME.resourceVersion
assert resource_history.updatedWhen == METHOD_NAME.updatedWhen
assert resource_history.user == METHOD_NAME.user
assert resource_history.templateVersion == METHOD_NAME.templateVersion
@pytest.mark.asyncio
@patch('db.repositories.resources_history.ResourceHistoryRepository.query')
async def test_get_resource_history_by_resource_id_if_found(mock_query, resource_history_repo, sample_resource_history):
mock_query.return_value = [sample_resource_history]
result = await resource_history_repo.get_resource_history_by_resource_id(RESOURCE_ID)
assert result == mock_query.return_value
@pytest.mark.asyncio
@patch('db.repositories.resources_history.ResourceHistoryRepository.query')
async def test_get_resource_history_by_resource_id_if_not_found(mock_query, resource_history_repo):
mock_query.return_value = []
result = await resource_history_repo.get_resource_history_by_resource_id(RESOURCE_ID)
assert result == mock_query.return_value
| null |
1,586 |
# coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Facebook Low Resource (FLoRes) machine translation benchmark dataset."""
import collections
from etils import epath
import tensorflow_datasets.public_api as tfds
_DESCRIPTION = """\
Evaluation datasets for low-resource machine translation: Nepali-English and Sinhala-English.
"""
_CITATION = """\
@misc{guzmn2019new,
title={Two New Evaluation Datasets for Low-Resource Machine Translation: Nepali-English and Sinhala-English},
author={Francisco Guzman and Peng-Jen Chen and Myle Ott and Juan Pino and Guillaume Lample and Philipp Koehn and Vishrav Chaudhary and Marc'Aurelio Ranzato},
year={2019},
eprint={1902.01382},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
"""
_DATA_URL = "https://github.com/facebookresearch/flores/raw/master/data/wikipedia_en_ne_si_test_sets.tgz"
# Tuple that describes a single pair of files with matching translations.
# language_to_file is the map from language (2 letter string: example 'en')
# to the file path in the extracted directory.
TranslateData = collections.namedtuple(
"TranslateData", ["url", "language_to_file"]
)
class FloresConfig(tfds.core.BuilderConfig):
"""BuilderConfig for FLoRes."""
def __init__(self, *, language_pair=(None, None), **kwargs):
"""BuilderConfig for FLoRes.
Args:
language_pair: pair of languages that will be used for translation. Should
contain 2-letter coded strings. First will be used at source and second
as target in supervised mode. For example: ("se", "en").
**kwargs: keyword arguments forwarded to super.
"""
name = f"{language_pair[0]}{language_pair[1]}"
description = (
f"Translation dataset from {language_pair[0]} to {language_pair[1]}."
)
super(FloresConfig, self).__init__(
name=name,
description=description,
version=tfds.core.Version("1.2.0"),
**kwargs,
)
# Validate language pair.
assert "en" in language_pair, (
"Config language pair must contain `en`, got: %s",
language_pair,
)
source, target = language_pair
non_en = source if target == "en" else target
assert non_en in ["ne", "si"], (
"Invalid non-en language in pair: %s",
non_en,
)
self.language_pair = language_pair
class Flores(tfds.core.GeneratorBasedBuilder):
"""FLoRes machine translation dataset."""
BUILDER_CONFIGS = [
FloresConfig(
language_pair=("ne", "en"),
),
FloresConfig(
language_pair=("si", "en"),
),
]
def _info(self):
source, target = self.builder_config.language_pair
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.Translation(
languages=self.builder_config.language_pair
),
supervised_keys=(source, target),
homepage="https://github.com/facebookresearch/flores/",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
dl_dir = dl_manager.download_and_extract(_DATA_URL)
source, target = self.builder_config.language_pair
non_en = source if target == "en" else target
path_tmpl = (
"{dl_dir}/wikipedia_en_ne_si_test_sets/wikipedia.{split}.{non_en}-en."
"{lang}"
)
files = {}
for split in ("dev", "devtest"):
files[split] = {
"source_file": path_tmpl.format(
dl_dir=dl_dir, split=split, non_en=non_en, lang=source
),
"target_file": path_tmpl.format(
dl_dir=dl_dir, split=split, non_en=non_en, lang=target
),
}
return [
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION, gen_kwargs=files["dev"]
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST, gen_kwargs=files["devtest"]
),
]
def METHOD_NAME(self, source_file, target_file):
"""This function returns the examples in the raw (text) form."""
with epath.Path(source_file).open() as f:
source_sentences = f.read().split("\n")
with epath.Path(target_file).open() as f:
target_sentences = f.read().split("\n")
assert len(target_sentences) == len(
source_sentences
), "Sizes do not match: %d vs %d for %s vs %s." % (
len(source_sentences),
len(target_sentences),
source_file,
target_file,
)
source, target = self.builder_config.language_pair
for idx, (l1, l2) in enumerate(zip(source_sentences, target_sentences)):
result = {source: l1, target: l2}
# Make sure that both translations are non-empty.
if all(result.values()):
yield idx, result
| null |
1,587 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class DeleteTransitRouterRouteEntryRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'DeleteTransitRouterRouteEntry')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_TransitRouterRouteEntryId(self): # String
return self.get_query_params().get('TransitRouterRouteEntryId')
def set_TransitRouterRouteEntryId(self, TransitRouterRouteEntryId): # String
self.add_query_param('TransitRouterRouteEntryId', TransitRouterRouteEntryId)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_TransitRouterRouteEntryNextHopType(self): # String
return self.get_query_params().get('TransitRouterRouteEntryNextHopType')
def set_TransitRouterRouteEntryNextHopType(self, TransitRouterRouteEntryNextHopType): # String
self.add_query_param('TransitRouterRouteEntryNextHopType', TransitRouterRouteEntryNextHopType)
def get_TransitRouterRouteEntryDestinationCidrBlock(self): # String
return self.get_query_params().get('TransitRouterRouteEntryDestinationCidrBlock')
def set_TransitRouterRouteEntryDestinationCidrBlock(self, TransitRouterRouteEntryDestinationCidrBlock): # String
self.add_query_param('TransitRouterRouteEntryDestinationCidrBlock', TransitRouterRouteEntryDestinationCidrBlock)
def get_TransitRouterRouteTableId(self): # String
return self.get_query_params().get('TransitRouterRouteTableId')
def set_TransitRouterRouteTableId(self, TransitRouterRouteTableId): # String
self.add_query_param('TransitRouterRouteTableId', TransitRouterRouteTableId)
def get_TransitRouterRouteEntryNextHopId(self): # String
return self.get_query_params().get('TransitRouterRouteEntryNextHopId')
def set_TransitRouterRouteEntryNextHopId(self, TransitRouterRouteEntryNextHopId): # String
self.add_query_param('TransitRouterRouteEntryNextHopId', TransitRouterRouteEntryNextHopId)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def METHOD_NAME(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
| null |
1,588 |
from typing import Callable, Sequence, Tuple, Union
import numpy as np
from ..C import FVAL, GRAD, HESS, MODE_FUN, MODE_RES, RES, SRES, ModeType
from .base import ObjectiveBase, ResultDict
class Objective(ObjectiveBase):
"""
Objective class.
The objective class allows the user explicitly specify functions that
compute the function value and/or residuals as well as respective
derivatives.
Denote dimensions `n` = parameters, `m` = residuals.
Parameters
----------
fun:
The objective function to be minimized. If it only computes the
objective function value, it should be of the form
``fun(x) -> float``
where x is an 1-D array with shape (n,), and n is the parameter space
dimension.
grad:
Method for computing the gradient vector. If it is a callable,
it should be of the form
``grad(x) -> array_like, shape (n,).``
If its value is True, then fun should return the gradient as a second
output.
hess:
Method for computing the Hessian matrix. If it is a callable,
it should be of the form
``hess(x) -> array, shape (n, n).``
If its value is True, then fun should return the gradient as a
second, and the Hessian as a third output, and grad should be True as
well.
hessp:
Method for computing the Hessian vector product, i.e.
``hessp(x, v) -> array_like, shape (n,)``
computes the product H*v of the Hessian of fun at x with v.
res:
Method for computing residuals, i.e.
``res(x) -> array_like, shape(m,).``
sres:
Method for computing residual sensitivities. If it is a callable,
it should be of the form
``sres(x) -> array, shape (m, n).``
If its value is True, then res should return the residual
sensitivities as a second output.
x_names:
Parameter names. None if no names provided, otherwise a list of str,
length dim_full (as in the Problem class). Can be read by the
problem.
"""
def __init__(
self,
fun: Callable = None,
grad: Union[Callable, bool] = None,
hess: Callable = None,
hessp: Callable = None,
res: Callable = None,
sres: Union[Callable, bool] = None,
x_names: Sequence[str] = None,
):
self.fun = fun
self.grad = grad
self.hess = hess
self.hessp = hessp
self.res = res
self.sres = sres
super().__init__(x_names)
@property
def METHOD_NAME(self) -> bool:
"""Check whether function is defined."""
return callable(self.fun)
@property
def has_grad(self) -> bool:
"""Check whether gradient is defined."""
return callable(self.grad) or self.grad is True
@property
def has_hess(self) -> bool:
"""Check whether Hessian is defined."""
return callable(self.hess) or self.hess is True
@property
def has_hessp(self) -> bool:
"""Check whether Hessian vector product is defined."""
# Not supported yet
return False
@property
def has_res(self) -> bool:
"""Check whether residuals are defined."""
return callable(self.res)
@property
def has_sres(self) -> bool:
"""Check whether residual sensitivities are defined."""
return callable(self.sres) or self.sres is True
def get_config(self) -> dict:
"""Return basic information of the objective configuration."""
info = super().get_config()
info['x_names'] = self.x_names
sensi_order = 0
while self.check_sensi_orders(
sensi_orders=(sensi_order,), mode=MODE_FUN
):
sensi_order += 1
info['sensi_order'] = sensi_order - 1
return info
def call_unprocessed(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
mode: ModeType,
**kwargs,
) -> ResultDict:
"""
Call objective function without pre- or post-processing and formatting.
Returns
-------
result:
A dict containing the results.
"""
if mode == MODE_FUN:
result = self._call_mode_fun(x=x, sensi_orders=sensi_orders)
elif mode == MODE_RES:
result = self._call_mode_res(x=x, sensi_orders=sensi_orders)
else:
raise ValueError("This mode is not supported.")
return result
def _call_mode_fun(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
) -> ResultDict:
if not sensi_orders:
result = {}
elif sensi_orders == (0,):
if self.grad is True:
fval = self.fun(x)[0]
else:
fval = self.fun(x)
result = {FVAL: fval}
elif sensi_orders == (1,):
if self.grad is True:
grad = self.fun(x)[1]
else:
grad = self.grad(x)
result = {GRAD: grad}
elif sensi_orders == (2,):
if self.hess is True:
hess = self.fun(x)[2]
else:
hess = self.hess(x)
result = {HESS: hess}
elif sensi_orders == (0, 1):
if self.grad is True:
fval, grad = self.fun(x)[:2]
else:
fval = self.fun(x)
grad = self.grad(x)
result = {FVAL: fval, GRAD: grad}
elif sensi_orders == (0, 2):
if self.hess is True:
fval, _, hess = self.fun(x)[:3]
else:
if self.grad is True:
fval = self.fun(x)[0]
else:
fval = self.fun(x)
hess = self.hess(x)
result = {FVAL: fval, HESS: hess}
elif sensi_orders == (1, 2):
if self.hess is True:
grad, hess = self.fun(x)[1:3]
else:
hess = self.hess(x)
if self.grad is True:
grad = self.fun(x)[1]
else:
grad = self.grad(x)
result = {GRAD: grad, HESS: hess}
elif sensi_orders == (0, 1, 2):
if self.hess is True:
fval, grad, hess = self.fun(x)[0:3]
else:
hess = self.hess(x)
if self.grad is True:
fval, grad = self.fun(x)[0:2]
else:
fval = self.fun(x)
grad = self.grad(x)
result = {FVAL: fval, GRAD: grad, HESS: hess}
else:
raise ValueError("These sensitivity orders are not supported.")
return result
def _call_mode_res(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
) -> ResultDict:
if not sensi_orders:
result = {}
elif sensi_orders == (0,):
if self.sres is True:
res = self.res(x)[0]
else:
res = self.res(x)
result = {RES: res}
elif sensi_orders == (1,):
if self.sres is True:
sres = self.res(x)[1]
else:
sres = self.sres(x)
result = {SRES: sres}
elif sensi_orders == (0, 1):
if self.sres is True:
res, sres = self.res(x)
else:
res = self.res(x)
sres = self.sres(x)
result = {RES: res, SRES: sres}
else:
raise ValueError("These sensitivity orders are not supported.")
return result
| null |
1,589 |
#!/usr/bin/env python3
#
# Generate a Doxygen file from a KConfig specification.
#
# Copyright (C) Sierra Wireless Inc.
#
import argparse
import os
import re
import sys
import textwrap
legato_root = os.getenv("LEGATO_ROOT")
if legato_root:
sys.path.append(os.path.join(legato_root, "3rdParty", "Kconfiglib"))
sys.path.append(legato_root)
import kconfiglib
INDENT = 2
def write_header(dox, page, title):
dox.write("/** @page {0} {1}\n".format(page, title))
dox.write("Build-time configuration options.\n")
def write_footer(dox, footer):
dox.write("\n{0}\n\n**/\n".format(footer))
def write_dt(dox, content, level):
indent_str = ' ' * (level * INDENT)
wrapped = textwrap.fill(content, 100, initial_indent=indent_str + (' ' * INDENT),
subsequent_indent=indent_str + (' ' * INDENT))
dox.write("{0}<dt>\n".format(indent_str))
dox.write("{0}\n".format(wrapped))
dox.write("{0}</dt>\n".format(indent_str))
def write_dd_start(dox, level):
indent_str = ' ' * (level * INDENT)
dox.write("{0}<dd>\n".format(indent_str))
def write_dd_end(dox, level):
indent_str = ' ' * (level * INDENT)
dox.write("{0}</dd>\n".format(indent_str))
def write_content(dox, content, level):
if content:
indent_str = ' ' * (level * INDENT)
wrapped = textwrap.fill(content, 100, initial_indent=indent_str + (' ' * INDENT),
subsequent_indent=indent_str + (' ' * INDENT))
dox.write("{0}\n".format(wrapped))
def write_type(dox, type_, level):
write_content(dox, "@b Type: @c {0}<br>".format(kconfiglib.TYPE_TO_STR[type_]), level)
def format_expression(expr, skip_y=True, prefix=" if "):
expr_str = kconfiglib.expr_str(expr)
if expr_str == '"y"' and skip_y:
return ""
return prefix + re.sub(r'\b([A-Z][A-Z0-9_]*)\b', r'@ref \1', expr_str)
def write_ranges(dox, ranges, level):
if ranges:
write_content(dox, "@b Range{0}:<br>".format("s" if len(ranges) > 1 else ""), level)
write_content(dox, "<ul>", level)
for r in ranges:
condition = format_expression(r[2])
write_content(dox,
"<li><kbd>{0} - {1}{2}</kbd></li>".format(r[0].name, r[1].name, condition),
level + 1)
write_content(dox, "</ul>", level)
def write_defaults(dox, defaults, level):
if defaults:
if len(defaults) == 1:
value = format_expression(defaults[0][0], skip_y=False, prefix="")
condition = format_expression(defaults[0][1])
write_content(dox, "@b Default: <kbd>{0}{1}</kbd><br>".format(value, condition), level)
else:
write_content(dox, "@b Defaults:", level)
write_content(dox, "<ul>", level)
for d in defaults:
value = format_expression(d[0], skip_y=False, prefix="")
condition = format_expression(d[1])
write_content(dox, "<li><kbd>{0}{1}</kbd></li>".format(value, condition), level + 1)
write_content(dox, "</ul>", level)
def write_dependencies(dox, node, level):
has_deps = False
if isinstance(node.dep, tuple):
for d in node.dep:
if isinstance(d, kconfiglib.Symbol):
has_deps = True
break
if has_deps:
write_content(dox, "<b>Depends on:</b><br>", level)
write_content(dox, "<ul>", level)
for d in node.dep:
if isinstance(d, kconfiglib.Symbol):
write_content(dox, "<li><kbd>@ref {0}</kbd></li>".format(d.name), level + 1)
write_content(dox, "</ul>", level)
def METHOD_NAME(dox, node, level, state):
sym = node.item
if sym.type != 'UNKNOWN' and sym.nodes:
if sym.name in state:
content = "<kbd>@ref {0}:</kbd> {1}"
else:
content = "<kbd>@anchor {0} {0}:</kbd> {1}"
if node.prompt:
content = content.format(sym.name, node.prompt[0])
write_dt(dox, content, level)
state[sym.name] = {}
elif node.help:
content = content.format(sym.name, "<em>Hidden. Only set by other selections.</em>")
write_dt(dox, content, level)
state[sym.name] = {}
else:
# Hide items with no help and no prompt text from output. They are purely internal.
return False
write_dd_start(dox, level)
write_type(dox, sym.orig_type, level)
write_ranges(dox, sym.ranges, level)
write_defaults(dox, sym.defaults, level)
write_dependencies(dox, node, level)
write_content(dox, node.help, level)
return True
return False
def write_choice_start(dox, node, level, state):
choice = node.item
write_dt(dox, "<kbd>{0}</kbd>".format(node.prompt[0]), level)
write_dd_start(dox, level)
write_defaults(dox, node.defaults, level)
write_content(dox, "<em>Choose one of:</em>", level)
return True
def write_menu_start(dox, node, level, state):
name = node.prompt[0] or str(node.linenr)
write_dt(dox, "{0}".format(name), level)
write_dd_start(dox, level)
return True
def write_dl_start(dox, level):
indent_str = ' ' * (level * INDENT)
dox.write("{0}<hr>\n{0}<dl>\n".format(indent_str))
def write_dl_end(dox, level):
indent_str = ' ' * (level * INDENT)
dox.write("{0}</dl>\n".format(indent_str))
def write_node(dox, node, level, state):
needs_closure = False
if isinstance(node.item, kconfiglib.Symbol):
needs_closure = METHOD_NAME(dox, node, level, state)
elif isinstance(node.item, kconfiglib.Choice):
needs_closure = write_choice_start(dox, node, level, state)
elif node.item == kconfiglib.MENU:
needs_closure = write_menu_start(dox, node, level, state)
else:
return
if node.list:
write_dl_start(dox, level + 1)
entry = node.list
while (entry):
write_node(dox, entry, level + 2, state)
entry = entry.next
write_dl_end(dox, level + 1)
if needs_closure:
write_dd_end(dox, level)
def main():
parser = argparse.ArgumentParser(
description='Generate a Doxygen file from a KConfig specification.')
parser.add_argument('-k', '--kconfig', default='KConfig', help='KConfig file to read')
parser.add_argument('-t', '--title', default=None, help='Page title')
parser.add_argument('-p', '--page', default=None, help='Page Doxygen identifier')
parser.add_argument('-f', '--footer', default='Copyright (C) Sierra Wireless Inc.',
help='Page footer text')
parser.add_argument('-d', '--dox', required=True, type=argparse.FileType('w'),
help='Doxygen file to generate')
args = parser.parse_args()
parts = os.path.split(args.kconfig)
oldwd = os.getcwd()
os.chdir(parts[0])
kconf = kconfiglib.Kconfig(parts[1])
os.chdir(oldwd)
title = args.title or kconf.mainmenu_text
page = args.page or os.path.splitext(os.path.basename(args.dox.name))[0]
state = {}
write_header(args.dox, page, title)
write_dl_start(args.dox, 0)
write_node(args.dox, kconf.top_node, 1, state)
write_dl_end(args.dox, 0)
write_footer(args.dox, args.footer)
args.dox.close()
main()
| null |
1,590 |
# Copyright 2018-2021 The glTF-Blender-IO authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import typing
import math
from mathutils import Matrix, Vector, Quaternion, Euler
from .gltf2_blender_data_path import get_target_property_name
def list_to_mathutils(values: typing.List[float], data_path: str) -> typing.Union[Vector, Quaternion, Euler]:
"""Transform a list to blender py object."""
target = get_target_property_name(data_path)
if target == 'delta_location':
return Vector(values) # TODO Should be Vector(values) - Vector(something)?
elif target == 'delta_rotation_euler':
return Euler(values).to_quaternion() # TODO Should be Euler(values).to_quaternion() @ something?
elif target == 'location':
return Vector(values)
elif target == 'rotation_axis_angle':
angle = values[0]
axis = values[1:]
return Quaternion(axis, math.radians(angle))
elif target == 'rotation_euler':
return Euler(values).to_quaternion()
elif target == 'rotation_quaternion':
return Quaternion(values)
elif target == 'scale':
return Vector(values)
elif target == 'value':
return Vector(values)
return values
def mathutils_to_gltf(x: typing.Union[Vector, Quaternion]) -> typing.List[float]:
"""Transform a py object to glTF list."""
if isinstance(x, Vector):
return list(x)
if isinstance(x, Quaternion):
# Blender has w-first quaternion notation
return [x[1], x[2], x[3], x[0]]
else:
return list(x)
def to_yup() -> Matrix:
"""Transform to Yup."""
return Matrix(
((1.0, 0.0, 0.0, 0.0),
(0.0, 0.0, 1.0, 0.0),
(0.0, -1.0, 0.0, 0.0),
(0.0, 0.0, 0.0, 1.0))
)
to_zup = to_yup
def METHOD_NAME(v: typing.Union[Vector, Quaternion], data_path: str) -> typing.Union[Vector, Quaternion]:
"""Manage Yup."""
target = get_target_property_name(data_path)
swizzle_func = {
"delta_location": swizzle_yup_location,
"delta_rotation_euler": swizzle_yup_rotation,
"location": swizzle_yup_location,
"rotation_axis_angle": swizzle_yup_rotation,
"rotation_euler": swizzle_yup_rotation,
"rotation_quaternion": swizzle_yup_rotation,
"scale": swizzle_yup_scale,
"value": swizzle_yup_value
}.get(target)
if swizzle_func is None:
raise RuntimeError("Cannot transform values at {}".format(data_path))
return swizzle_func(v)
def swizzle_yup_location(loc: Vector) -> Vector:
"""Manage Yup location."""
return Vector((loc[0], loc[2], -loc[1]))
def swizzle_yup_rotation(rot: Quaternion) -> Quaternion:
"""Manage Yup rotation."""
return Quaternion((rot[0], rot[1], rot[3], -rot[2]))
def swizzle_yup_scale(scale: Vector) -> Vector:
"""Manage Yup scale."""
return Vector((scale[0], scale[2], scale[1]))
def swizzle_yup_value(value: typing.Any) -> typing.Any:
"""Manage Yup value."""
return value
def transform(v: typing.Union[Vector, Quaternion], data_path: str, transform: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> typing \
.Union[Vector, Quaternion]:
"""Manage transformations."""
target = get_target_property_name(data_path)
transform_func = {
"delta_location": transform_location,
"delta_rotation_euler": transform_rotation,
"location": transform_location,
"rotation_axis_angle": transform_rotation,
"rotation_euler": transform_rotation,
"rotation_quaternion": transform_rotation,
"scale": transform_scale,
"value": transform_value
}.get(target)
if transform_func is None:
raise RuntimeError("Cannot transform values at {}".format(data_path))
return transform_func(v, transform, need_rotation_correction)
def transform_location(location: Vector, transform: Matrix = Matrix.Identity(4), need_rotation_correction:bool = False) -> Vector:
"""Transform location."""
correction = Quaternion((2**0.5/2, -2**0.5/2, 0.0, 0.0))
m = Matrix.Translation(location)
if need_rotation_correction:
m @= correction.to_matrix().to_4x4()
m = transform @ m
return m.to_translation()
def transform_rotation(rotation: Quaternion, transform: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> Quaternion:
"""Transform rotation."""
rotation.normalize()
correction = Quaternion((2**0.5/2, -2**0.5/2, 0.0, 0.0))
m = rotation.to_matrix().to_4x4()
if need_rotation_correction:
m @= correction.to_matrix().to_4x4()
m = transform @ m
return m.to_quaternion()
def transform_scale(scale: Vector, transform: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> Vector:
"""Transform scale."""
m = Matrix.Identity(4)
m[0][0] = scale.x
m[1][1] = scale.y
m[2][2] = scale.z
m = transform @ m
return m.to_scale()
def transform_value(value: Vector, _: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> Vector:
"""Transform value."""
return value
def round_if_near(value: float, target: float) -> float:
"""If value is very close to target, round to target."""
return value if abs(value - target) > 2.0e-6 else target
def scale_rot_swap_matrix(rot):
"""Returns a matrix m st. Scale[s] Rot[rot] = Rot[rot] Scale[m s].
If rot.to_matrix() is a signed permutation matrix, works for any s.
Otherwise works only if s is a uniform scaling.
"""
m = nearby_signed_perm_matrix(rot) # snap to signed perm matrix
m.transpose() # invert permutation
for i in range(3):
for j in range(3):
m[i][j] = abs(m[i][j]) # discard sign
return m
def nearby_signed_perm_matrix(rot):
"""Returns a signed permutation matrix close to rot.to_matrix().
(A signed permutation matrix is like a permutation matrix, except
the non-zero entries can be ±1.)
"""
m = rot.to_matrix()
x, y, z = m[0], m[1], m[2]
# Set the largest entry in the first row to ±1
a, b, c = abs(x[0]), abs(x[1]), abs(x[2])
i = 0 if a >= b and a >= c else 1 if b >= c else 2
x[i] = 1 if x[i] > 0 else -1
x[(i+1) % 3] = 0
x[(i+2) % 3] = 0
# Same for second row: only two columns to consider now.
a, b = abs(y[(i+1) % 3]), abs(y[(i+2) % 3])
j = (i+1) % 3 if a >= b else (i+2) % 3
y[j] = 1 if y[j] > 0 else -1
y[(j+1) % 3] = 0
y[(j+2) % 3] = 0
# Same for third row: only one column left
k = (0 + 1 + 2) - i - j
z[k] = 1 if z[k] > 0 else -1
z[(k+1) % 3] = 0
z[(k+2) % 3] = 0
return m
| null |
1,591 |
"""
@file
@brief This file contains the profiles treeview, used by the profile window
@author Jonathan Thomas <[email protected]>
@section LICENSE
Copyright (c) 2008-2023 OpenShot Studios, LLC
(http://www.openshotstudios.com). This file is part of
OpenShot Video Editor (http://www.openshot.org), an open-source project
dedicated to delivering high quality video editing and animation solutions
to the world.
OpenShot Video Editor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenShot Video Editor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
"""
from PyQt5.QtCore import Qt, QItemSelectionModel, QRegExp, pyqtSignal, QTimer
from PyQt5.QtWidgets import QListView, QTreeView, QAbstractItemView, QSizePolicy
from classes.app import get_app
from windows.models.profiles_model import ProfilesModel
class ProfilesTreeView(QTreeView):
FilterCountChanged = pyqtSignal(int)
""" A ListView QWidget used on the credits window """
def selectionChanged(self, selected, deselected):
if deselected and deselected.first() and self.is_filter_running:
# Selection changed due to filtering... clear selections
self.selectionModel().clear()
if not self.is_filter_running and selected and selected.first() and selected.first().indexes():
# Selection changed due to user selection or init of treeview
self.selected_profile_object = selected.first().indexes()[0].data(Qt.UserRole)
super().selectionChanged(selected, deselected)
def refresh_view(self, filter_text=""):
"""Filter transitions with proxy class"""
self.is_filter_running = True
self.model().setFilterCaseSensitivity(Qt.CaseInsensitive)
self.model().setFilterRegExp(QRegExp(filter_text.lower()))
self.model().sort(Qt.DescendingOrder)
# Format columns
self.sortByColumn(0, Qt.DescendingOrder)
self.setColumnHidden(0, True)
self.is_filter_running = False
# Update filter count
self.FilterCountChanged.emit(self.profiles_model.proxy_model.rowCount())
if self.selectionModel().hasSelection():
current = self.selectionModel().currentIndex()
self.scrollTo(current)
def select_profile(self, profile_index):
"""Select a specific profile Key"""
self.selectionModel().setCurrentIndex(profile_index, QItemSelectionModel.Select | QItemSelectionModel.Rows)
def METHOD_NAME(self):
"""Return the selected profile object, if any"""
return self.selected_profile_object
def __init__(self, profiles, *args):
# Invoke parent init
QListView.__init__(self, *args)
# Get a reference to the window object
self.win = get_app().window
# Get Model data
self.profiles_model = ProfilesModel(profiles)
self.selected = []
# Setup header columns
self.is_filter_running = False
self.setModel(self.profiles_model.proxy_model)
self.setIndentation(0)
self.setSelectionMode(QAbstractItemView.SingleSelection)
self.setSelectionBehavior(QTreeView.SelectRows)
self.setSelectionBehavior(QAbstractItemView.SelectRows)
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setWordWrap(True)
self.setStyleSheet('QTreeView::item { padding-top: 2px; }')
self.columns = 6
self.selected_profile_object = None
# Refresh view
self.profiles_model.update_model()
QTimer.singleShot(50, self.refresh_view)
# Resize columns (initial data)
for column in range(self.columns):
self.resizeColumnToContents(column)
| null |
1,592 |
import itertools
from pharmpy.modeling import get_observations, set_initial_estimates, set_name, set_tmdd
def product_dict(**kwargs):
keys = kwargs.keys()
for instance in itertools.product(*kwargs.values()):
yield dict(zip(keys, instance))
def create_qss_models(model):
# Create qss models with different initial estimates from basic pk model
qss_base_model = set_tmdd(model, type="QSS")
cmax = get_observations(model).max()
all_inits = product_dict(
POP_KDEG=(0.5623, 17.28), POP_R_0=(0.001 * cmax, 0.01 * cmax, 0.1 * cmax, 1 * cmax)
)
qss_candidate_models = [
set_initial_estimates(set_name(qss_base_model, f"QSS{i}"), inits)
for i, inits in enumerate(all_inits, start=1)
]
return qss_candidate_models
def create_remaining_models(model, ests):
models = (
METHOD_NAME(model, ests)
+ create_cr_models(model, ests)
+ create_ib_models(model, ests)
+ create_crib_models(model, ests)
+ create_wagner_model(model, ests)
+ create_mmapp_model(model, ests)
)
return models
def create_cr_models(model, ests):
# Create cr models with different initial estimates from basic pk model and best qss ests
cr_base_model = set_tmdd(model, type="CR")
cr_base_model = set_initial_estimates(
cr_base_model,
{"POP_KINT": ests['POP_KINT'], "POP_R0": ests['POP_R0'], "IIV_R0": ests['IIV_R0']},
)
cr1 = set_name(cr_base_model, "CR1")
cr1 = set_initial_estimates(
cr_base_model, {"POP_KOFF": 0.5623, "POP_KON": 0.5623 / (ests['POP_KDC'] * ests['POP_VC'])}
)
cr2 = set_name(cr_base_model, "CR2")
cr2 = set_initial_estimates(
cr_base_model, {"POP_KOFF": 17.78, "POP_KON": 17.78 / (ests['POP_KDC'] * ests['POP_VC'])}
)
return [cr1, cr2]
def create_ib_models(model, ests):
# Create ib models with different initial estimates from basic pk model and best qss ests
ib_base_model = set_tmdd(model, type="IB")
ib_base_model = set_initial_estimates(
ib_base_model,
{
"POP_KINT": ests['POP_KINT'],
"POP_R0": ests['POP_R0'],
"POP_KDEG": ests['POP_KDEG'],
"IIV_R0": ests['IIV_R0'],
},
)
ib1 = set_name(ib_base_model, "IB1")
ib1 = set_initial_estimates(
ib_base_model, {"POP_KON": 0.5623 / (ests['POP_KDC'] * ests['POP_VC'])}
)
ib2 = set_name(ib_base_model, "IB2")
ib2 = set_initial_estimates(
ib_base_model, {"POP_KON": 17.78 / (ests['POP_KDC'] * ests['POP_VC'])}
)
return [ib1, ib2]
def create_crib_models(model, ests):
# Create crib models with different initial estimates from basic pk model and best qss ests
crib_base_model = set_tmdd(model, type="IB")
crib_base_model = set_initial_estimates(
crib_base_model,
{"POP_KINT": ests['POP_KINT'], "POP_R0": ests['POP_R0'], "IIV_R0": ests['IIV_R0']},
)
crib1 = set_name(crib_base_model, "CRIB1")
crib1 = set_initial_estimates(
crib_base_model, {"POP_KON": 0.5623 / (ests['POP_KDC'] * ests['POP_VC'])}
)
crib2 = set_name(crib_base_model, "CRIB2")
crib2 = set_initial_estimates(
crib_base_model, {"POP_KON": 17.78 / (ests['POP_KDC'] * ests['POP_VC'])}
)
return [crib1, crib2]
def METHOD_NAME(model, ests):
# Create full models with different initial estimates from basic pk model and best qss ests
full_base_model = set_tmdd(model, type="FULL")
full_base_model = set_initial_estimates(
full_base_model,
{
"POP_KINT": ests['POP_KINT'],
"POP_R0": ests['POP_R0'],
"IIV_R0": ests['IIV_R0'],
"POP_KDEG": ests['POP_KDEG'],
"POP_KON": 0.1 / (ests['POP_KDEG'] * ests['POP_VC']),
},
)
candidates = [
set_initial_estimates(full_base_model, {'POP_KOFF': koff}) for koff in (0.1, 1, 10, 100)
]
return candidates
def create_wagner_model(model, ests):
wagner = set_tmdd(model, type="WAGNER")
wagner = set_name(wagner, "WAGNER")
wagner = set_initial_estimates(
wagner,
{
"POP_KINT": ests['POP_KINT'],
"POP_R0": ests['POP_R0'],
"IIV_R0": ests['IIV_R0'],
"POP_KM": ests['POP_KDC'] * ests['POP_VC'],
},
)
return [wagner]
def create_mmapp_model(model, ests):
mmapp = set_tmdd(model, type="MMAPP")
mmapp = set_name(mmapp, "MMAPP")
mmapp = set_initial_estimates(
mmapp,
{
"POP_KINT": ests['POP_KINT'],
"POP_R0": ests['POP_R0'],
"IIV_R0": ests['IIV_R0'],
"POP_KDEG": ests['POP_KDEG'],
},
)
return [mmapp]
| null |
1,593 |
import json
import requests
import re
misperrors = {'error': 'Error'}
mispattributes = {'input': ["hostname", "domain", "ip-src", "ip-dst", "md5", "sha1", "sha256", "sha512"],
'output': ["domain", "ip-src", "ip-dst", "text", "md5", "sha1", "sha256", "sha512", "email"]
}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'chrisdoman',
'description': 'Get information from AlienVault OTX',
'module-type': ['expansion']}
# We're not actually using the API key yet
moduleconfig = ["apikey"]
# Avoid adding windows update to enrichment etc.
def isBlacklisted(value):
blacklist = ['0.0.0.0', '8.8.8.8', '255.255.255.255', '192.168.56.', 'time.windows.com']
for b in blacklist:
if value in b:
return False
return True
def valid_ip(ip):
m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip)
return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups()))
def findAll(data, keys):
a = []
if isinstance(data, dict):
for key, value in data.items():
if key == keys:
a.append(value)
else:
if isinstance(value, (dict, list)):
a.extend(findAll(value, keys))
if isinstance(data, list):
for i in data:
a.extend(findAll(i, keys))
return a
def valid_email(email):
return bool(re.search(r"[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?", email))
def handler(q=False):
if q is False:
return False
q = json.loads(q)
key = q["config"]["apikey"]
r = {"results": []}
if "ip-src" in q:
r["results"] += getIP(q["ip-src"], key)
if "ip-dst" in q:
r["results"] += getIP(q["ip-dst"], key)
if "domain" in q:
r["results"] += getDomain(q["domain"], key)
if 'hostname' in q:
r["results"] += getDomain(q['hostname'], key)
if 'md5' in q:
r["results"] += METHOD_NAME(q['md5'], key)
if 'sha1' in q:
r["results"] += METHOD_NAME(q['sha1'], key)
if 'sha256' in q:
r["results"] += METHOD_NAME(q['sha256'], key)
if 'sha512' in q:
r["results"] += METHOD_NAME(q['sha512'], key)
uniq = []
for res in r["results"]:
if res not in uniq:
uniq.append(res)
r["results"] = uniq
return r
def METHOD_NAME(_hash, key):
ret = []
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/file/analysis/" + _hash).text)
for ip in findAll(req, "dst"):
if not isBlacklisted(ip) and valid_ip(ip):
ret.append({"types": ["ip-dst", "ip-src"], "values": [ip]})
for domain in findAll(req, "hostname"):
if "." in domain and not isBlacklisted(domain):
ret.append({"types": ["hostname"], "values": [domain]})
return ret
def getIP(ip, key):
ret = []
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/ip/malware/" + ip + "?limit=1000").text)
for _hash in findAll(req, "hash"):
ret.append({"types": ["sha256"], "values": [_hash]})
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/ip/passive_dns/" + ip).text)
for hostname in findAll(req, "hostname"):
if not isBlacklisted(hostname):
ret.append({"types": ["hostname"], "values": [hostname]})
return ret
def getDomain(domain, key):
ret = []
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/domain/malware/" + domain + "?limit=1000").text)
for _hash in findAll(req, "hash"):
ret.append({"types": ["sha256"], "values": [_hash]})
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/domain/whois/" + domain).text)
for _domain in findAll(req, "domain"):
ret.append({"types": ["hostname"], "values": [_domain]})
for email in findAll(req, "value"):
if valid_email(email):
ret.append({"types": ["email"], "values": [email]})
for _domain in findAll(req, "hostname"):
if "." in _domain and not isBlacklisted(_domain):
ret.append({"types": ["hostname"], "values": [_domain]})
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/hostname/passive_dns/" + domain).text)
for ip in findAll(req, "address"):
if valid_ip(ip):
ret.append({"types": ["ip-dst"], "values": [ip]})
return ret
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
| null |
1,594 |
#!/usr/bin/env python
import asyncio
import aiohttp
import logging
from typing import (
AsyncIterable,
Dict,
Optional,
Any
)
import time
import ujson
import websockets
from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource
from hummingbot.logger import HummingbotLogger
from hummingbot.connector.exchange.loopring.loopring_auth import LoopringAuth
from hummingbot.connector.exchange.loopring.loopring_api_order_book_data_source import LoopringAPIOrderBookDataSource
from hummingbot.connector.exchange.loopring.loopring_order_book import LoopringOrderBook
from hummingbot.connector.exchange.loopring.loopring_utils import get_ws_api_key
LOOPRING_WS_URL = "wss://ws.api3.loopring.io/v3/ws"
LOOPRING_ROOT_API = "https://api3.loopring.io"
class LoopringAPIUserStreamDataSource(UserStreamTrackerDataSource):
_krausds_logger: Optional[HummingbotLogger] = None
@classmethod
def logger(cls) -> HummingbotLogger:
if cls._krausds_logger is None:
cls._krausds_logger = logging.getLogger(__name__)
return cls._krausds_logger
def __init__(self, orderbook_tracker_data_source: LoopringAPIOrderBookDataSource, loopring_auth: LoopringAuth):
self._loopring_auth: LoopringAuth = loopring_auth
self._orderbook_tracker_data_source: LoopringAPIOrderBookDataSource = orderbook_tracker_data_source
self._shared_client: Optional[aiohttp.ClientSession] = None
self._last_recv_time: float = 0
super().__init__()
@property
def order_book_class(self):
return LoopringOrderBook
@property
def last_recv_time(self):
return self._last_recv_time
async def listen_for_user_stream(self, output: asyncio.Queue):
while True:
try:
ws_key: str = await get_ws_api_key()
async with websockets.connect(f"{LOOPRING_WS_URL}?wsApiKey={ws_key}") as ws:
ws: websockets.WebSocketClientProtocol = ws
topics = [{"topic": "order", "market": m} for m in self._orderbook_tracker_data_source.trading_pairs]
topics.append({
"topic": "account"
})
subscribe_request: Dict[str, Any] = {
"op": "sub",
"apiKey": self._loopring_auth.generate_auth_dict()["X-API-KEY"],
"unsubscribeAll": True,
"topics": topics
}
await ws.send(ujson.dumps(subscribe_request))
async for raw_msg in self._inner_messages(ws):
self._last_recv_time = time.time()
diff_msg = ujson.loads(raw_msg)
if 'op' in diff_msg:
continue # These messages are for control of the stream, so skip sending them to the market class
output.put_nowait(diff_msg)
except asyncio.CancelledError:
raise
except Exception:
self.logger().error("Unexpected error with Loopring WebSocket connection. "
"Retrying after 30 seconds...", exc_info=True)
await asyncio.sleep(30.0)
async def _inner_messages(self,
ws: websockets.WebSocketClientProtocol) -> AsyncIterable[str]:
"""
Generator function that returns messages from the web socket stream
:param ws: current web socket connection
:returns: message in AsyncIterable format
"""
# Terminate the recv() loop as soon as the next message timed out, so the outer loop can reconnect.
try:
while True:
msg: str = await asyncio.wait_for(ws.recv(), timeout=None) # This will throw the ConnectionClosed exception on disconnect
if msg == "ping":
await ws.send("pong") # skip returning this and handle this protocol level message here
else:
yield msg
except websockets.exceptions.ConnectionClosed:
self.logger().warning("Loopring websocket connection closed. Reconnecting...")
return
finally:
await ws.close()
async def METHOD_NAME(self):
if self._shared_client is not None and not self._shared_client.closed:
await self._shared_client.close()
| null |
1,595 |
import asyncio
import logging
from decimal import Decimal
from typing import Dict, Optional, Set
from pydantic import BaseModel
from hummingbot.connector.utils import split_hb_trading_pair
from hummingbot.core.data_type.common import TradeType
from hummingbot.core.gateway.gateway_http_client import GatewayHttpClient
from hummingbot.core.network_base import NetworkBase
from hummingbot.core.network_iterator import NetworkStatus
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.logger import HummingbotLogger
class TokenBuySellPrice(BaseModel):
base: str
quote: str
connector: str
chain: str
network: str
order_amount_in_base: Decimal
buy_price: Decimal
sell_price: Decimal
class AmmGatewayDataFeed(NetworkBase):
dex_logger: Optional[HummingbotLogger] = None
gateway_client = GatewayHttpClient.get_instance()
def __init__(
self,
connector_chain_network: str,
trading_pairs: Set[str],
order_amount_in_base: Decimal,
update_interval: float = 1.0,
) -> None:
super().__init__()
self._ev_loop = asyncio.get_event_loop()
self._price_dict: Dict[str, TokenBuySellPrice] = {}
self._update_interval = update_interval
self.fetch_data_loop_task: Optional[asyncio.Task] = None
# param required for DEX API request
self.connector_chain_network = connector_chain_network
self.trading_pairs = trading_pairs
self.order_amount_in_base = order_amount_in_base
@classmethod
def logger(cls) -> HummingbotLogger:
if cls.dex_logger is None:
cls.dex_logger = logging.getLogger(__name__)
return cls.dex_logger
@property
def name(self) -> str:
return f"AmmDataFeed[{self.connector_chain_network}]"
@property
def connector(self) -> str:
return self.connector_chain_network.split("_")[0]
@property
def chain(self) -> str:
return self.connector_chain_network.split("_")[1]
@property
def network(self) -> str:
return self.connector_chain_network.split("_")[2]
@property
def price_dict(self) -> Dict[str, TokenBuySellPrice]:
return self._price_dict
def is_ready(self) -> bool:
return len(self._price_dict) == len(self.trading_pairs)
async def check_network(self) -> NetworkStatus:
is_gateway_online = await self.gateway_client.ping_gateway()
if not is_gateway_online:
self.logger().warning("Gateway is not online. Please check your gateway connection.")
return NetworkStatus.CONNECTED if is_gateway_online else NetworkStatus.NOT_CONNECTED
async def start_network(self) -> None:
await self.stop_network()
self.fetch_data_loop_task = safe_ensure_future(self._fetch_data_loop())
async def stop_network(self) -> None:
if self.fetch_data_loop_task is not None:
self.fetch_data_loop_task.cancel()
self.fetch_data_loop_task = None
async def _fetch_data_loop(self) -> None:
while True:
try:
await self._fetch_data()
except asyncio.CancelledError:
raise
except Exception as e:
self.logger().error(
f"Error getting data from {self.name}"
f"Check network connection. Error: {e}",
)
await self.METHOD_NAME(self._update_interval)
async def _fetch_data(self) -> None:
token_price_tasks = [
asyncio.create_task(self._register_token_buy_sell_price(trading_pair))
for trading_pair in self.trading_pairs
]
await asyncio.gather(*token_price_tasks)
async def _register_token_buy_sell_price(self, trading_pair: str) -> None:
base, quote = split_hb_trading_pair(trading_pair)
token_buy_price_task = asyncio.create_task(self._request_token_price(trading_pair, TradeType.BUY))
token_sell_price_task = asyncio.create_task(self._request_token_price(trading_pair, TradeType.SELL))
self._price_dict[trading_pair] = TokenBuySellPrice(
base=base,
quote=quote,
connector=self.connector,
chain=self.chain,
network=self.network,
order_amount_in_base=self.order_amount_in_base,
buy_price=await token_buy_price_task,
sell_price=await token_sell_price_task,
)
async def _request_token_price(self, trading_pair: str, trade_type: TradeType) -> Decimal:
base, quote = split_hb_trading_pair(trading_pair)
connector, chain, network = self.connector_chain_network.split("_")
token_price = await self.gateway_client.get_price(
chain,
network,
connector,
base,
quote,
self.order_amount_in_base,
trade_type,
)
return Decimal(token_price["price"])
@staticmethod
async def METHOD_NAME(delay: float) -> None:
"""Used to mock in test cases."""
await asyncio.sleep(delay)
| null |
1,596 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateCmsOrderRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cms', '2019-01-01', 'CreateCmsOrder','cms')
self.set_method('POST')
def get_SmsCount(self): # String
return self.get_query_params().get('SmsCount')
def set_SmsCount(self, SmsCount): # String
self.add_query_param('SmsCount', SmsCount)
def get_AutoUseCoupon(self): # Boolean
return self.get_query_params().get('AutoUseCoupon')
def set_AutoUseCoupon(self, AutoUseCoupon): # Boolean
self.add_query_param('AutoUseCoupon', AutoUseCoupon)
def get_LogMonitorStream(self): # String
return self.get_query_params().get('LogMonitorStream')
def set_LogMonitorStream(self, LogMonitorStream): # String
self.add_query_param('LogMonitorStream', LogMonitorStream)
def get_CustomTimeSeries(self): # String
return self.get_query_params().get('CustomTimeSeries')
def set_CustomTimeSeries(self, CustomTimeSeries): # String
self.add_query_param('CustomTimeSeries', CustomTimeSeries)
def get_ApiCount(self): # String
return self.get_query_params().get('ApiCount')
def set_ApiCount(self, ApiCount): # String
self.add_query_param('ApiCount', ApiCount)
def get_PhoneCount(self): # String
return self.get_query_params().get('PhoneCount')
def set_PhoneCount(self, PhoneCount): # String
self.add_query_param('PhoneCount', PhoneCount)
def get_AutoRenewPeriod(self): # Integer
return self.get_query_params().get('AutoRenewPeriod')
def set_AutoRenewPeriod(self, AutoRenewPeriod): # Integer
self.add_query_param('AutoRenewPeriod', AutoRenewPeriod)
def get_Period(self): # Integer
return self.get_query_params().get('Period')
def set_Period(self, Period): # Integer
self.add_query_param('Period', Period)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_SuggestType(self): # String
return self.get_query_params().get('SuggestType')
def set_SuggestType(self, SuggestType): # String
self.add_query_param('SuggestType', SuggestType)
def get_EventStoreNum(self): # String
return self.get_query_params().get('EventStoreNum')
def set_EventStoreNum(self, EventStoreNum): # String
self.add_query_param('EventStoreNum', EventStoreNum)
def get_SiteTaskNum(self): # String
return self.get_query_params().get('SiteTaskNum')
def set_SiteTaskNum(self, SiteTaskNum): # String
self.add_query_param('SiteTaskNum', SiteTaskNum)
def get_PeriodUnit(self): # String
return self.get_query_params().get('PeriodUnit')
def METHOD_NAME(self, PeriodUnit): # String
self.add_query_param('PeriodUnit', PeriodUnit)
def get_SiteOperatorNum(self): # String
return self.get_query_params().get('SiteOperatorNum')
def set_SiteOperatorNum(self, SiteOperatorNum): # String
self.add_query_param('SiteOperatorNum', SiteOperatorNum)
def get_SiteEcsNum(self): # String
return self.get_query_params().get('SiteEcsNum')
def set_SiteEcsNum(self, SiteEcsNum): # String
self.add_query_param('SiteEcsNum', SiteEcsNum)
def get_EventStoreTime(self): # String
return self.get_query_params().get('EventStoreTime')
def set_EventStoreTime(self, EventStoreTime): # String
self.add_query_param('EventStoreTime', EventStoreTime)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
| null |
1,597 |
# coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictInt, StrictStr, conint, constr, validator
from lightly.openapi_generated.swagger_client.models.job_state import JobState
from lightly.openapi_generated.swagger_client.models.job_status_data_result import JobStatusDataResult
from lightly.openapi_generated.swagger_client.models.job_status_meta import JobStatusMeta
class JobStatusData(BaseModel):
"""
JobStatusData
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
dataset_id: Optional[constr(strict=True)] = Field(None, alias="datasetId", description="MongoDB ObjectId")
status: JobState = Field(...)
meta: Optional[JobStatusMeta] = None
wait_time_till_next_poll: StrictInt = Field(..., alias="waitTimeTillNextPoll", description="The time in seconds the client should wait before doing the next poll.")
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
last_modified_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="lastModifiedAt", description="unix timestamp in milliseconds")
finished_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="finishedAt", description="unix timestamp in milliseconds")
error: Optional[StrictStr] = None
result: Optional[JobStatusDataResult] = None
__properties = ["id", "datasetId", "status", "meta", "waitTimeTillNextPoll", "createdAt", "lastModifiedAt", "finishedAt", "error", "result"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('dataset_id')
def dataset_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def METHOD_NAME(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> JobStatusData:
"""Create an instance of JobStatusData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of meta
if self.meta:
_dict['meta' if by_alias else 'meta'] = self.meta.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of result
if self.result:
_dict['result' if by_alias else 'result'] = self.result.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> JobStatusData:
"""Create an instance of JobStatusData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return JobStatusData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in JobStatusData) in the input: " + str(obj))
_obj = JobStatusData.parse_obj({
"id": obj.get("id"),
"dataset_id": obj.get("datasetId"),
"status": obj.get("status"),
"meta": JobStatusMeta.from_dict(obj.get("meta")) if obj.get("meta") is not None else None,
"wait_time_till_next_poll": obj.get("waitTimeTillNextPoll"),
"created_at": obj.get("createdAt"),
"last_modified_at": obj.get("lastModifiedAt"),
"finished_at": obj.get("finishedAt"),
"error": obj.get("error"),
"result": JobStatusDataResult.from_dict(obj.get("result")) if obj.get("result") is not None else None
})
return _obj
| null |
1,598 |
import sys
def not_exit():
raise RuntimeError("runtime error in PiVy")
def get_writer(parent):
class PivyWriter(parent):
def __init__(self):
parent.__init__(self, "pivy")
self._setup_pivy()
def _setup_pivy(self):
import pivy.sogui
import pivy.coin
import os
self.pivy = pivy
# actually, pivy seems to call exit on failure. Bah.
if "DISPLAY" not in os.environ.keys():
raise RuntimeError("No display variable found")
myWindow = pivy.sogui.SoGui.init(sys.argv[0])
if myWindow is None:
raise RuntimeError("Can't open PiVy window.")
self.window = myWindow
print("scene")
scene = pivy.coin.SoSeparator()
# Create a viewer in which to see our scene graph.
viewer = pivy.sogui.SoGuiExaminerViewer(myWindow)
# Put our scene into viewer, change the title
viewer.setSceneGraph(scene)
print("title")
viewer.setTitle("IMP")
print("show")
viewer.show()
self.root = scene
self.viewer = viewer
def METHOD_NAME(self, parent, c):
color = self.pivy.coin.SoMFColor()
color.setValue(c.get_red(), c.get_green(), c.get_blue())
myMaterial = self.pivy.coin.SoMaterial()
myMaterial.diffuseColor.setValue(color)
parent.addChild(myMaterial)
def handle_sphere(self, s, c, n):
sep = self.pivy.coin.SoSeparator()
self.root.addChild(sep)
s.set_was_used(True)
self.METHOD_NAME(sep, c)
tr = self.pivy.coin.SoTransform()
tr.translation.setValue(s.get_geometry().get_center()[0],
s.get_geometry().get_center()[1],
s.get_geometry().get_center()[2])
tr.scaleFactor.setValue(s.get_geometry().get_radius(),
s.get_geometry().get_radius(),
s.get_geometry().get_radius())
sep.addChild(tr)
sphere = self.pivy.coin.SoSphere()
sep.addChild(sphere)
return True
def handle_label(self, s, c, n):
txt = s.get_text()
loc = s.get_location()
s.set_was_used(True)
sep = self.pivy.coin.SoSeparator()
self.METHOD_NAME(sep, c)
tr = self.pivy.coin.SoTransform()
tr.translation.setValue(
loc.get_center()[0],
loc.get_center()[1],
loc.get_center()[2])
sep.addChild(tr)
to = self.pivy.coin.SoText2()
to.string.setValue(txt)
sep.addChild(to)
return True
def handle_cylinder(self, s, c, n):
s.set_was_used(True)
sep = self.pivy.coin.SoSeparator()
self.root.addChild(sep)
self.METHOD_NAME(sep, c)
tr = self.pivy.coin.SoTransform()
cyl = s.get_geometry()
seg = cyl.get_segment()
center = seg.get_middle_point()
#tr.translation.setValue(self.pivy.coin.SbVec3f(0, seg.get_length()/2, 0))
tr.translation.setValue(
self.pivy.coin.SbVec3f(center[0], center[1],
center[2]))
#tr.center.setValue(self.pivy.coin.SbVec3f(center[0], center[1], center[2]))
uv = seg.get_direction().get_unit_vector()
tr.rotation.setValue(
self.pivy.coin.SbRotation(self.pivy.coin.SbVec3f(0, 1, 0),
self.pivy.coin.SbVec3f(uv[0], uv[1], uv[2])))
# print "tr", tr.translation.getValue()[0], tr.translation.getValue()[1], tr.translation.getValue()[2]
# print "scale", tr.scaleFactor.getValue()[0], tr.scaleFactor.getValue()[1], tr.scaleFactor.getValue()[2]
# tr.rotation.getValue()
sep.addChild(tr)
sphere = self.pivy.coin.SoCylinder()
sphere.radius.setValue(cyl.get_radius())
sphere.height.setValue(cyl.get_segment().get_length())
# sphere.addPart(self.pivy.coin.SoCylinder.ALL)
sep.addChild(sphere)
return True
def show(self):
self.set_was_used(True)
self.pivy.sogui.SoGui.show(self.window) # Display main window
self.pivy.sogui.SoGui.mainLoop()
try:
import pivy.sogui
except:
return None
return PivyWriter
| null |
1,599 |
"""Unit Tests for the MPA Dataset Pipelines Transforms Augments."""
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import random
import numpy as np
import pytest
from PIL import Image
from otx.algorithms.classification.adapters.mmcls.datasets.pipelines.transforms.augmix import (
AugMixAugment,
OpsFabric,
)
from otx.algorithms.common.adapters.mmcv.pipelines.transforms.augments import (
CythonAugments,
)
@pytest.fixture
def ops_fabric() -> OpsFabric:
return OpsFabric("Rotate", 5, {"img_mean": 128})
@pytest.mark.xfail(reason="random may not return the same value on different machines.")
class TestOpsFabric:
def test_init(self, ops_fabric: OpsFabric) -> None:
"""Test OpsFabric initialization."""
assert ops_fabric.prob == 1.0
assert ops_fabric.hparams == {"img_mean": 128}
assert ops_fabric.aug_kwargs == {
"fillcolor": 128,
"resample": (Image.BILINEAR, Image.BICUBIC),
}
assert ops_fabric.aug_factory.magnitude == 5
assert ops_fabric.aug_factory.magnitude_std == float("inf")
assert ops_fabric.aug_factory.level_fn == ops_fabric._rotate_level_to_arg
assert ops_fabric.aug_factory.aug_fn == CythonAugments.rotate
def test_randomly_negate(self) -> None:
"""Test randomly_negate function."""
random.seed(1234)
assert OpsFabric.randomly_negate(5) == -5
assert OpsFabric.randomly_negate(5) == 5
assert OpsFabric.randomly_negate(5) == -5
def test_rotate_level_to_arg(self, ops_fabric: OpsFabric) -> None:
"""Test rotate_level_to_arg function."""
assert ops_fabric._rotate_level_to_arg(0, ops_fabric.hparams) == (0,)
assert ops_fabric._rotate_level_to_arg(5, ops_fabric.hparams) == (5 / 10 * 30,)
def test_enhance_increasing_level_to_arg(self, ops_fabric: OpsFabric) -> None:
"""Test enhance_increasing_level_to_arg function."""
assert ops_fabric._enhance_increasing_level_to_arg(0, ops_fabric.hparams) == (1.0,)
assert ops_fabric._enhance_increasing_level_to_arg(5, ops_fabric.hparams) == (1.0 + 5 / 10 * 0.9,)
def test_shear_level_to_arg(self, ops_fabric: OpsFabric) -> None:
"""Test shear_level_to_arg function."""
assert ops_fabric._shear_level_to_arg(0, ops_fabric.hparams) == (0,)
assert ops_fabric._shear_level_to_arg(5, ops_fabric.hparams) == (5 / 10 * 0.3,)
def test_translate_rel_level_to_arg(self, ops_fabric: OpsFabric) -> None:
"""Test translate_rel_level_to_arg function."""
assert ops_fabric._translate_rel_level_to_arg(0, ops_fabric.hparams) == (0,)
assert ops_fabric._translate_rel_level_to_arg(5, {"translate_pct": 0.5}) == (5 / 10 * 0.5,)
def test_posterize_increasing_level_to_arg(self, ops_fabric: OpsFabric) -> None:
"""Test posterize_increasing_level_to_arg function."""
assert ops_fabric._posterize_increasing_level_to_arg(0, ops_fabric.hparams) == (4,)
assert ops_fabric._posterize_increasing_level_to_arg(5, ops_fabric.hparams) == (4 - int(5 / 10 * 4),)
def test_solarize_increasing_level_to_arg(self, ops_fabric: OpsFabric) -> None:
"""Test solarize_increasing_level_to_arg function."""
assert ops_fabric._solarize_increasing_level_to_arg(0, ops_fabric.hparams) == (0,)
assert ops_fabric._solarize_increasing_level_to_arg(5, ops_fabric.hparams) == (256 - int(5 / 10 * 256),)
def test_call(self, ops_fabric: OpsFabric) -> None:
"""Test __call__ function."""
img = Image.new("RGB", (256, 256))
transformed_img = ops_fabric(img)
assert transformed_img != img # make sure the image was actually transformed
class TestAugMixAugment:
def test_init(self) -> None:
"""Test AugMixAugment initialization."""
aug_mix_augment = AugMixAugment(config_str="augmix-m5-w3")
assert isinstance(aug_mix_augment, AugMixAugment)
assert len(aug_mix_augment.ops) > 0
def test_apply_basic(self) -> None:
"""Test _apply_basic function."""
aug_mix_augment = AugMixAugment(config_str="augmix-m5-w3")
img = Image.new("RGB", (224, 224), color=(255, 0, 0))
mixing_weights = np.float32(np.random.dirichlet([aug_mix_augment.alpha] * aug_mix_augment.width))
m = np.float32(np.random.beta(aug_mix_augment.alpha, aug_mix_augment.alpha))
mixed_img = aug_mix_augment._apply_basic(img, mixing_weights, m)
assert isinstance(mixed_img, Image.Image)
def METHOD_NAME(self) -> None:
"""Test augmix_ops function."""
aug_mix_augment = AugMixAugment(config_str="augmix-m5-w3")
assert len(aug_mix_augment.ops) > 0
assert isinstance(aug_mix_augment.alpha, float)
assert isinstance(aug_mix_augment.width, int)
assert isinstance(aug_mix_augment.depth, int)
def test_call(self) -> None:
"""Test __call__ method."""
aug_mix_augment = AugMixAugment(config_str="augmix-m5-w3")
data = {"img": np.random.randint(0, 255, size=(224, 224, 3)).astype(np.uint8)}
results = aug_mix_augment(data)
assert "augmix" in results
assert isinstance(results["img"], Image.Image)
| null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.