id
int64 0
300k
| label
stringlengths 1
74
⌀ | text
stringlengths 4k
8k
|
---|---|---|
3,800 |
flush
|
import collections
import logging
import pickle
import string
import sys
from collections import OrderedDict
from gzip import GzipFile
from io import BytesIO
from prettytable import PrettyTable
from datetime import datetime
from typing import Any, IO, Dict
from .config import get_group
from .enums import StateLists
logger = logging.getLogger(__name__)
consts = get_group("core")
consts.add(
"compress_states",
default=True,
description="Seamlessly compress state files on disk. Reduces space usage and improves performance on slow disks, "
"at the cost of some slight [de]compression overhead.",
)
def interval_intersection(min1, max1, min2, max2):
"""
Given two intervals, (min1, max1) and (min2, max2) return their intersecting interval,
or None if they do not overlap.
"""
left, right = max(min1, min2), min(max1, max2)
if left < right:
return left, right
return None
def printable_bytes(bytes: bytes):
return "".join([c for c in map(chr, bytes) if c in string.printable])
class CacheDict(OrderedDict):
def __init__(self, *args, max_size=30000, flush_perc=30, **kwargs):
self._max_size = max_size
self._purge_percent = flush_perc * 0.01
self._misses = 0
self._hits = 0
self._flushes = 0
super().__init__(*args, **kwargs)
def __del__(self):
try:
log = logging.getLogger(self.__class__.__name__)
log.debug(
f"DictCache: hits: {self._hits}, misses: {self._misses}, flushes: {self._flushes}, size: {self.__len__()}"
)
except TypeError:
# Prevent "TypeError: attribute of type 'NoneType' is not callable" on line 32
# TODO - figure out why this happens (I think it's only on concrete runs?)
pass
def __setitem__(self, key, value):
if len(self) > self._max_size:
self.METHOD_NAME()
return super().__setitem__(key, value)
def __contains__(self, item):
x = super().__contains__(item)
if x:
self._hits += 1
else:
self._misses += 1
return x
def METHOD_NAME(self):
self._flushes += 1
purge_count = int(len(self) * self._purge_percent)
for i in range(purge_count):
self.popitem(last=False)
self._hits -= purge_count
class StateSerializer:
"""
StateSerializer can serialize and deserialize :class:`~manticore.core.state.State` objects from and to
stream-like objects.
"""
def __init__(self):
pass
def serialize(self, state, f):
raise NotImplementedError
def deserialize(self, f):
raise NotImplementedError
class PickleSerializer(StateSerializer):
"""
A StateSerializer that uses a gzip-based Python pickle format.
"""
DEFAULT_RECURSION: int = 0x10000 # 1M
MAX_RECURSION: int = 0x1000000 # 16.7M
COMPRESSION_LEVEL: int = 1 # minimal compression, but still gets >10x reduction
def __init__(self):
super().__init__()
sys.setrecursionlimit(PickleSerializer.DEFAULT_RECURSION)
def serialize(self, state, f):
logger.info("Serializing %s", f.name if hasattr(f, "name") else "<unknown>")
try:
pickle_dump(
state,
GzipFile(fileobj=f, mode="wb", compresslevel=PickleSerializer.COMPRESSION_LEVEL)
if consts.compress_states
else f,
)
except RuntimeError:
new_limit = sys.getrecursionlimit() * 2
if new_limit > PickleSerializer.MAX_RECURSION:
raise Exception(
f"PickleSerializer recursion limit surpassed {PickleSerializer.MAX_RECURSION}, aborting"
)
logger.info(f"Recursion soft limit {sys.getrecursionlimit()} hit, increasing")
sys.setrecursionlimit(new_limit)
self.serialize(state, f)
def deserialize(self, f):
logger.info("Deserializing %s", f.name if hasattr(f, "name") else "<unknown>")
return pickle.load(GzipFile(fileobj=f, mode="rb") if consts.compress_states else f)
def pickle_dumps(obj: Any) -> bytes:
"""
Serializes an object as a gzipped pickle.
"""
# This consolidates pickling in one place so we can fix the protocol version
fp = BytesIO()
pickle_dump(obj, fp)
return fp.getvalue()
def pickle_dump(obj: Any, fp: IO[bytes]) -> None:
"""
Serializes an object as a gzipped pickle to the given file.
"""
return pickle.dump(obj, fp, protocol=pickle.HIGHEST_PROTOCOL)
def pretty_print_state_descriptors(desc: Dict):
"""
Given a dict of state descriptors, nicely formats and prints it to stdout.
:param desc: Dict mapping state IDs to State Descriptors, like the one returned from MantioreBase.introspect
"""
descriptors = desc.values()
nready, nbusy, nkill, nterm = 0, 0, 0, 0
for st in descriptors:
nready += 1 if (st.state_list == StateLists.ready) else 0
nbusy += 1 if (st.state_list == StateLists.busy) else 0
nkill += 1 if (st.state_list == StateLists.busy) else 0
nterm += 1 if (st.state_list == StateLists.terminated) else 0
print(
"Ready States:",
nready,
" | ",
"Busy States:",
nbusy,
" | ",
"Terminated States:",
nterm,
" | ",
"Killed States:",
nkill,
)
tab = PrettyTable()
tab.field_names = ["ID", "Status", "Duration", "Execs", "Execs/Sec"]
if nbusy:
now = datetime.now()
for st in descriptors:
if st.state_list == StateLists.busy:
duration = (
now - st.field_updated_at["state_list"]
) # Time since this state became Busy
execs = st.own_execs if st.own_execs is not None else 0
tab.add_row(
[
st.state_id,
st.status.value,
str(duration)[:-4],
execs,
"{:.2f}".format(execs / (now - st.created_at).total_seconds()),
]
)
print(tab)
print()
class deque(collections.deque):
"""A wrapper around collections.deque that adds a few APIs present in SyncManager.Queue"""
def empty(self) -> bool:
return len(self) == 0
def get(self):
return self.popleft()
|
3,801 |
save load
|
"""
Insert minion return data into a sqlite3 database
:maintainer: Mickey Malone <[email protected]>
:maturity: New
:depends: None
:platform: All
Sqlite3 is a serverless database that lives in a single file.
In order to use this returner the database file must exist,
have the appropriate schema defined, and be accessible to the
user whom the minion process is running as. This returner
requires the following values configured in the master or
minion config:
.. code-block:: yaml
sqlite3.database: /usr/lib/salt/salt.db
sqlite3.timeout: 5.0
Alternative configuration values can be used by prefacing the configuration.
Any values not found in the alternative configuration will be pulled from
the default location:
.. code-block:: yaml
alternative.sqlite3.database: /usr/lib/salt/salt.db
alternative.sqlite3.timeout: 5.0
Use the commands to create the sqlite3 database and tables:
.. code-block:: sql
sqlite3 /usr/lib/salt/salt.db << EOF
--
-- Table structure for table 'jids'
--
CREATE TABLE jids (
jid TEXT PRIMARY KEY,
load TEXT NOT NULL
);
--
-- Table structure for table 'salt_returns'
--
CREATE TABLE salt_returns (
fun TEXT KEY,
jid TEXT KEY,
id TEXT KEY,
fun_args TEXT,
date TEXT NOT NULL,
full_ret TEXT NOT NULL,
success TEXT NOT NULL
);
EOF
To use the sqlite returner, append '--return sqlite3' to the salt command.
.. code-block:: bash
salt '*' test.ping --return sqlite3
To use the alternative configuration, append '--return_config alternative' to the salt command.
.. versionadded:: 2015.5.0
.. code-block:: bash
salt '*' test.ping --return sqlite3 --return_config alternative
To override individual configuration items, append --return_kwargs '{"key:": "value"}' to the salt command.
.. versionadded:: 2016.3.0
.. code-block:: bash
salt '*' test.ping --return sqlite3 --return_kwargs '{"db": "/var/lib/salt/another-salt.db"}'
"""
import datetime
import logging
import salt.returners
import salt.utils.jid
import salt.utils.json
# Better safe than sorry here. Even though sqlite3 is included in python
try:
import sqlite3
HAS_SQLITE3 = True
except ImportError:
HAS_SQLITE3 = False
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = "sqlite3"
def __virtual__():
if not HAS_SQLITE3:
return False, "Could not import sqlite3 returner; sqlite3 is not installed."
return __virtualname__
def _get_options(ret=None):
"""
Get the SQLite3 options from salt.
"""
attrs = {"database": "database", "timeout": "timeout"}
_options = salt.returners.get_returner_options(
__virtualname__, ret, attrs, __salt__=__salt__, __opts__=__opts__
)
return _options
def _get_conn(ret=None):
"""
Return a sqlite3 database connection
"""
# Possible todo: support detect_types, isolation_level, check_same_thread,
# factory, cached_statements. Do we really need to though?
_options = _get_options(ret)
database = _options.get("database")
timeout = _options.get("timeout")
if not database:
raise Exception('sqlite3 config option "sqlite3.database" is missing')
if not timeout:
raise Exception('sqlite3 config option "sqlite3.timeout" is missing')
log.debug("Connecting the sqlite3 database: %s timeout: %s", database, timeout)
conn = sqlite3.connect(database, timeout=float(timeout))
return conn
def _close_conn(conn):
"""
Close the sqlite3 database connection
"""
log.debug("Closing the sqlite3 database connection")
conn.commit()
conn.close()
def returner(ret):
"""
Insert minion return data into the sqlite3 database
"""
log.debug("sqlite3 returner <returner> called with data: %s", ret)
conn = _get_conn(ret)
cur = conn.cursor()
sql = """INSERT INTO salt_returns
(fun, jid, id, fun_args, date, full_ret, success)
VALUES (:fun, :jid, :id, :fun_args, :date, :full_ret, :success)"""
cur.execute(
sql,
{
"fun": ret["fun"],
"jid": ret["jid"],
"id": ret["id"],
"fun_args": str(ret["fun_args"]) if ret.get("fun_args") else None,
"date": str(datetime.datetime.now()),
"full_ret": salt.utils.json.dumps(ret["return"]),
"success": ret.get("success", ""),
},
)
_close_conn(conn)
def METHOD_NAME(jid, load, minions=None):
"""
Save the load to the specified jid
"""
log.debug("sqlite3 returner <save_load> called jid: %s load: %s", jid, load)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = """INSERT INTO jids (jid, load) VALUES (:jid, :load)"""
cur.execute(sql, {"jid": jid, "load": salt.utils.json.dumps(load)})
_close_conn(conn)
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
"""
Included for API consistency
"""
def get_load(jid):
"""
Return the load from a specified jid
"""
log.debug("sqlite3 returner <get_load> called jid: %s", jid)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = """SELECT load FROM jids WHERE jid = :jid"""
cur.execute(sql, {"jid": jid})
data = cur.fetchone()
if data:
return salt.utils.json.loads(data[0].encode())
_close_conn(conn)
return {}
def get_jid(jid):
"""
Return the information returned from a specified jid
"""
log.debug("sqlite3 returner <get_jid> called jid: %s", jid)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = """SELECT id, full_ret FROM salt_returns WHERE jid = :jid"""
cur.execute(sql, {"jid": jid})
data = cur.fetchone()
log.debug("query result: %s", data)
ret = {}
if data and len(data) > 1:
ret = {str(data[0]): {"return": salt.utils.json.loads(data[1])}}
log.debug("ret: %s", ret)
_close_conn(conn)
return ret
def get_fun(fun):
"""
Return a dict of the last function called for all minions
"""
log.debug("sqlite3 returner <get_fun> called fun: %s", fun)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = """SELECT s.id, s.full_ret, s.jid
FROM salt_returns s
JOIN ( SELECT MAX(jid) AS jid FROM salt_returns GROUP BY fun, id) max
ON s.jid = max.jid
WHERE s.fun = :fun
"""
cur.execute(sql, {"fun": fun})
data = cur.fetchall()
ret = {}
if data:
# Pop the jid off the list since it is not
# needed and I am trying to get a perfect
# pylint score :-)
data.pop()
for minion, ret in data:
ret[minion] = salt.utils.json.loads(ret)
_close_conn(conn)
return ret
def get_jids():
"""
Return a list of all job ids
"""
log.debug("sqlite3 returner <get_jids> called")
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = """SELECT jid, load FROM jids"""
cur.execute(sql)
data = cur.fetchall()
ret = {}
for jid, load in data:
ret[jid] = salt.utils.jid.format_jid_instance(jid, salt.utils.json.loads(load))
_close_conn(conn)
return ret
def get_minions():
"""
Return a list of minions
"""
log.debug("sqlite3 returner <get_minions> called")
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = """SELECT DISTINCT id FROM salt_returns"""
cur.execute(sql)
data = cur.fetchall()
ret = []
for minion in data:
ret.append(minion[0])
_close_conn(conn)
return ret
def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument
"""
Do any work necessary to prepare a JID, including sending a custom id
"""
return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid(__opts__)
|
3,802 |
trace
|
"""Module that adds tracing to pandas execution.
With tracing enabled, this module will log time and call stack information of
the executed expression. Call stack information is presented with indentation
level.
For example:
import pandas as pd
import logging
import ibis.expr.datatypes as dt
import ibis.backends.pandas
from ibis.legacy.udf.vectorized import elementwise
from ibis.backends.pandas import trace
logging.basicConfig()
trace.enable()
df = pd.DataFrame(
{
'a': [1, 2, 3]
}
)
con = ibis.pandas.connect({"table1": df})
@elementwise(
input_type=[dt.double],
output_type=dt.double
)
def add_one(v):
import time
time.sleep(5)
return v + 1
table = con.table("table1")
table = table.mutate(b=add_one(table['a']))
table.execute()
Output:
DEBUG:ibis.backends.pandas.trace: main_execute Selection
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope Selection
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope PandasTable
DEBUG:ibis.backends.pandas.trace: execute_database_table_client PandasTable
DEBUG:ibis.backends.pandas.trace: execute_database_table_client PandasTable 0:00:00.000085
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope PandasTable 0:00:00.000362
DEBUG:ibis.backends.pandas.trace: execute_selection_dataframe Selection
DEBUG:ibis.backends.pandas.trace: main_execute ElementWiseVectorizedUDF
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope ElementWiseVectorizedUDF
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope TableColumn
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope PandasTable
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope PandasTable 0:00:00.000061
DEBUG:ibis.backends.pandas.trace: execute_table_column_df_or_df_groupby TableColumn
DEBUG:ibis.backends.pandas.trace: execute_table_column_df_or_df_groupby TableColumn 0:00:00.000304 # noqa: E501
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope TableColumn 0:00:00.000584
DEBUG:ibis.backends.pandas.trace: execute_udf_node ElementWiseVectorizedUDF
DEBUG:ibis.backends.pandas.trace: execute_udf_node ElementWiseVectorizedUDF 0:00:05.019173
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope ElementWiseVectorizedUDF 0:00:05.052604 # noqa: E501
DEBUG:ibis.backends.pandas.trace: main_execute ElementWiseVectorizedUDF 0:00:05.052819
DEBUG:ibis.backends.pandas.trace: execute_selection_dataframe Selection 0:00:05.054894
DEBUG:ibis.backends.pandas.trace: execute_until_in_scope Selection 0:00:05.055662
DEBUG:ibis.backends.pandas.trace: main_execute Selection 0:00:05.056556
"""
from __future__ import annotations
import functools
import logging
import traceback
from datetime import datetime
import ibis
from ibis.backends.pandas.dispatcher import TwoLevelDispatcher
from ibis.config import options
from ibis.expr import types as ir
_logger = logging.getLogger("ibis.backends.pandas.trace")
# A list of funcs that is traced
_trace_funcs = set()
def enable():
"""Enable tracing."""
if options.pandas is None:
# pandas options haven't been registered yet - force module __getattr__
ibis.pandas # noqa: B018
options.pandas.enable_trace = True
logging.getLogger("ibis.backends.pandas.trace").setLevel(logging.DEBUG)
def _log_trace(func, start=None):
level = 0
current_frame = None
# Increase the current level for each traced function in the stackframe
# This way we can visualize the call stack.
for frame, _ in traceback.walk_stack(None):
current_frame = current_frame if current_frame is not None else frame
func_name = frame.f_code.co_name
if func_name in _trace_funcs:
level += 1
# We can assume we have 'args' because we only call _log_trace inside
# trace or TraceDispatcher.register
current_op = current_frame.f_locals["args"][0]
# If the first argument is a Expr, we print its op because it's more
# informative.
if isinstance(current_op, ir.Expr):
current_op = current_op.op()
_logger.debug(
"%s %s %s %s",
" " * level,
func.__name__,
type(current_op).__qualname__,
f"{datetime.now() - start}" if start else "",
)
def METHOD_NAME(func):
"""Return a function decorator that wraps `func` with tracing."""
_trace_funcs.add(func.__name__)
@functools.wraps(func)
def traced_func(*args, **kwargs):
# Unfortunately, this function can be called before the `ibis.pandas`
# attribute has ever been accessed, which means the trace configuration
# option might never get registered and will raise an error. Accessing
# the pandas attribute here forces the option initialization
import ibis
ibis.pandas # noqa: B018
if not options.pandas.enable_trace:
return func(*args, **kwargs)
else:
start = datetime.now()
_log_trace(func)
res = func(*args, **kwargs)
_log_trace(func, start)
return res
return traced_func
class TraceTwoLevelDispatcher(TwoLevelDispatcher):
"""A Dispatcher that also wraps the registered function with tracing."""
def __init__(self, name, doc=None):
super().__init__(name, doc)
def register(self, *types, **kwargs):
"""Register a function with this Dispatcher.
The function will also be wrapped with tracing information.
"""
def _(func):
trace_func = METHOD_NAME(func)
TwoLevelDispatcher.register(self, *types, **kwargs)(trace_func)
# return func instead trace_func here so that
# chained register didn't get wrapped multiple
# times
return func
return _
|
3,803 |
label
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from typing import Any, Callable, Dict, List, Optional
import numpy as np
import torch
from torch import Tensor
from torch.utils.data import DataLoader, Dataset, random_split
from flash.core.data.data_module import DataModule
from flash.core.data.io.input import InputBase
from flash.core.data.io.input_transform import create_worker_input_transform_processor
from flash.core.utilities.imports import _BAAL_AVAILABLE, requires
from flash.core.utilities.stages import RunningStage
if _BAAL_AVAILABLE:
from baal.active.dataset import ActiveLearningDataset
from baal.active.heuristics import BALD, AbstractHeuristic
else:
class AbstractHeuristic:
pass
class BALD(AbstractHeuristic):
pass
def dataset_to_non_labelled_tensor(dataset: InputBase) -> torch.tensor:
return np.zeros(len(dataset))
def filter_unlabelled_data(dataset: InputBase) -> Dataset:
return dataset
def train_val_split(dataset: Dataset, val_size: float = 0.1):
L = len(dataset)
train_size = int(L * (1 - val_size))
val_size = L - train_size
return random_split(dataset, [train_size, val_size], generator=torch.Generator().manual_seed(42))
class ActiveLearningDataModule(DataModule):
@requires("baal")
def __init__(
self,
labelled: Optional[DataModule] = None,
heuristic: "AbstractHeuristic" = BALD(),
map_dataset_to_labelled: Optional[Callable] = dataset_to_non_labelled_tensor,
filter_unlabelled_data: Optional[Callable] = filter_unlabelled_data,
initial_num_labels: Optional[int] = None,
query_size: int = 1,
val_split: Optional[float] = None,
):
"""The `ActiveLearningDataModule` handles data manipulation for ActiveLearning.
Args:
labelled: DataModule containing labelled train data for research use-case.
The labelled data would be masked.
heuristic: Sorting algorithm used to rank samples on how likely they can help with model performance.
map_dataset_to_labelled: Function used to emulate masking on labelled dataset.
filter_unlabelled_data: Function used to filter the unlabelled data while computing uncertainties.
initial_num_labels: Number of samples to randomly label to start the training with.
query_size: Number of samples to be labelled at each Active Learning loop based on the fed heuristic.
val_split: Float to split train dataset into train and validation set.
"""
super().__init__(batch_size=1)
self.labelled = labelled
self.heuristic = heuristic
self.map_dataset_to_labelled = map_dataset_to_labelled
self.filter_unlabelled_data = filter_unlabelled_data
self.initial_num_labels = initial_num_labels
self.query_size = query_size
self.val_split = val_split
self._dataset: Optional[ActiveLearningDataset] = None
if not self.labelled:
raise TypeError("The labelled `datamodule` should be provided.")
if not self.labelled.num_classes:
raise TypeError("The labelled dataset should be labelled")
if self.labelled and (self.labelled._val_input or self.labelled._predict_input):
raise TypeError("The labelled `datamodule` should have only train data.")
self._dataset = ActiveLearningDataset(
self.labelled._train_input, labelled=self.map_dataset_to_labelled(self.labelled._train_input)
)
if not self.val_split or not self.has_labelled_data:
self.val_dataloader = None
elif self.val_split < 0 or self.val_split > 1:
raise ValueError("The `val_split` should a float between 0 and 1.")
if self.labelled._test_input:
self.test_dataloader = self._test_dataloader
if hasattr(self.labelled, "on_after_batch_transfer"):
self.on_after_batch_transfer = self.labelled.on_after_batch_transfer
if not self.initial_num_labels:
warnings.warn(
"No labels provided for the initial step," "the estimated uncertainties are unreliable!", UserWarning
)
else:
self._dataset.label_randomly(self.initial_num_labels)
@property
def has_test(self) -> bool:
return bool(self.labelled._test_input)
@property
def has_labelled_data(self) -> bool:
return self._dataset.n_labelled > 0
@property
def has_unlabelled_data(self) -> bool:
return self._dataset.n_unlabelled > 0
@property
def num_classes(self) -> Optional[int]:
return getattr(self.labelled, "num_classes", None) or getattr(self.unlabelled, "num_classes", None)
def train_dataloader(self) -> "DataLoader":
if self.val_split:
self.labelled._train_input = train_val_split(self._dataset, self.val_split)[0]
else:
self.labelled._train_input = self._dataset
if self.has_labelled_data and self.val_split:
self.val_dataloader = self._val_dataloader
if self.has_labelled_data:
return self.labelled.train_dataloader()
# Return a dummy dataloader, will be replaced by the loop
return DataLoader(["dummy"])
def _val_dataloader(self) -> "DataLoader":
self.labelled._val_input = train_val_split(self._dataset, self.val_split)[1]
dataloader = self.labelled._val_dataloader()
dataloader.collate_fn = create_worker_input_transform_processor(
RunningStage.TRAINING, self.labelled.input_transform
)
return dataloader
def _test_dataloader(self) -> "DataLoader":
return self.labelled.test_dataloader()
def predict_dataloader(self) -> "DataLoader":
self.labelled._predict_input = self.filter_unlabelled_data(self._dataset.pool)
dataloader = self.labelled._predict_dataloader()
dataloader.collate_fn = create_worker_input_transform_processor(
RunningStage.TRAINING, self.labelled.input_transform
)
return dataloader
def on_after_batch_transfer(self, batch: Any, dataloader_idx: int) -> Any:
current_stage = self.trainer.state.stage
if current_stage == RunningStage.VALIDATING or current_stage == RunningStage.PREDICTING:
self.trainer.state.stage = RunningStage.TRAINING
batch = super().on_after_batch_transfer(batch, dataloader_idx)
self.trainer.state.stage = current_stage
return batch
def METHOD_NAME(self, probabilities: List[Tensor] = None, indices=None):
if probabilities is not None and indices:
raise RuntimeError("The `probabilities` and `indices` are mutually exclusive, pass only of one them.")
if probabilities is not None and len(probabilities) != 0:
probabilities = torch.cat([p[0].unsqueeze(0) for p in probabilities], dim=0)
uncertainties = self.heuristic.get_uncertainties(probabilities)
indices = np.argsort(uncertainties)
if self._dataset is not None:
self._dataset.METHOD_NAME(indices[-self.query_size :])
def state_dict(self) -> Dict[str, Tensor]:
return self._dataset.state_dict()
def load_state_dict(self, state_dict) -> None:
return self._dataset.load_state_dict(state_dict)
|
3,804 |
make random ou id
|
import re
import string
from moto.moto_api._internal import mock_random as random
from typing import Pattern, Union
MASTER_ACCOUNT_EMAIL = "[email protected]"
DEFAULT_POLICY_ID = "p-FullAWSAccess"
ORGANIZATION_ARN_FORMAT = "arn:aws:organizations::{0}:organization/{1}"
MASTER_ACCOUNT_ARN_FORMAT = "arn:aws:organizations::{0}:account/{1}/{0}"
ACCOUNT_ARN_FORMAT = "arn:aws:organizations::{0}:account/{1}/{2}"
ROOT_ARN_FORMAT = "arn:aws:organizations::{0}:root/{1}/{2}"
OU_ARN_FORMAT = "arn:aws:organizations::{0}:ou/{1}/{2}"
SCP_ARN_FORMAT = "arn:aws:organizations::{0}:policy/{1}/service_control_policy/{2}"
AI_POLICY_ARN_FORMAT = (
"arn:aws:organizations::{0}:policy/{1}/aiservices_opt_out_policy/{2}"
)
CHARSET = string.ascii_lowercase + string.digits
ORG_ID_SIZE = 10
ROOT_ID_SIZE = 4
ACCOUNT_ID_SIZE = 12
OU_ID_SUFFIX_SIZE = 8
CREATE_ACCOUNT_STATUS_ID_SIZE = 8
POLICY_ID_SIZE = 8
EMAIL_REGEX = "^.+@[a-zA-Z0-9-.]+.[a-zA-Z]{2,3}|[0-9]{1,3}$"
ORG_ID_REGEX = rf"o-[a-z0-9]{{{ORG_ID_SIZE}}}"
ROOT_ID_REGEX = rf"r-[a-z0-9]{{{ROOT_ID_SIZE}}}"
OU_ID_REGEX = rf"ou-[a-z0-9]{{{ROOT_ID_SIZE}}}-[a-z0-9]{{{OU_ID_SUFFIX_SIZE}}}"
ACCOUNT_ID_REGEX = rf"[0-9]{{{ACCOUNT_ID_SIZE}}}"
CREATE_ACCOUNT_STATUS_ID_REGEX = rf"car-[a-z0-9]{{{CREATE_ACCOUNT_STATUS_ID_SIZE}}}"
POLICY_ID_REGEX = rf"{DEFAULT_POLICY_ID}|p-[a-z0-9]{{{POLICY_ID_SIZE}}}"
PAGINATION_MODEL = {
"list_accounts": {
"input_token": "next_token",
"limit_key": "max_results",
"limit_default": 100,
"result_key": "Accounts",
"unique_attribute": "JoinedTimestamp",
},
"list_accounts_for_parent": {
"input_token": "next_token",
"limit_key": "max_results",
"limit_default": 20,
"result_key": "Accounts",
"unique_attribute": "JoinedTimestamp",
},
"list_organizational_units_for_parent": {
"input_token": "next_token",
"limit_key": "max_results",
"limit_default": 20,
"result_key": "OrganizationalUnits",
"unique_attribute": "Id",
},
}
def make_random_org_id() -> str:
# The regex pattern for an organization ID string requires "o-"
# followed by from 10 to 32 lower-case letters or digits.
# e.g. 'o-vipjnq5z86'
return "o-" + "".join(random.choice(CHARSET) for x in range(ORG_ID_SIZE))
def make_random_root_id() -> str:
# The regex pattern for a root ID string requires "r-" followed by
# from 4 to 32 lower-case letters or digits.
# e.g. 'r-3zwx'
return "r-" + "".join(random.choice(CHARSET) for x in range(ROOT_ID_SIZE))
def METHOD_NAME(root_id: str) -> str:
# The regex pattern for an organizational unit ID string requires "ou-"
# followed by from 4 to 32 lower-case letters or digits (the ID of the root
# that contains the OU) followed by a second "-" dash and from 8 to 32
# additional lower-case letters or digits.
# e.g. ou-g8sd-5oe3bjaw
return "-".join(
[
"ou",
root_id.partition("-")[2],
"".join(random.choice(CHARSET) for x in range(OU_ID_SUFFIX_SIZE)),
]
)
def make_random_account_id() -> str:
# The regex pattern for an account ID string requires exactly 12 digits.
# e.g. '488633172133'
return "".join([random.choice(string.digits) for n in range(ACCOUNT_ID_SIZE)])
def make_random_create_account_status_id() -> str:
# The regex pattern for an create account request ID string requires
# "car-" followed by from 8 to 32 lower-case letters or digits.
# e.g. 'car-35gxzwrp'
return "car-" + "".join(
random.choice(CHARSET) for x in range(CREATE_ACCOUNT_STATUS_ID_SIZE)
)
def make_random_policy_id() -> str:
# The regex pattern for a policy ID string requires "p-" followed by
# from 8 to 128 lower-case letters or digits.
# e.g. 'p-k2av4a8a'
return "p-" + "".join(random.choice(CHARSET) for x in range(POLICY_ID_SIZE))
def fullmatch(regex: Union[Pattern[str], str], s: str, flags: int = 0) -> bool:
"""Emulate python-3.4 re.fullmatch()."""
m = re.match(regex, s, flags=flags)
if m and m.span()[1] == len(s):
return True
return False
|
3,805 |
add feedback graphs
|
import logging
from django.utils.translation import gettext as _, ngettext
from utils.misc import reverse_tournament
from utils.tables import TabbycatTableBuilder
from .progress import FeedbackProgressForAdjudicator, FeedbackProgressForTeam
logger = logging.getLogger(__name__)
class FeedbackTableBuilder(TabbycatTableBuilder):
def add_breaking_checkbox(self, adjudicators, key="Breaking"):
breaking_header = {
'key': 'breaking',
'icon': 'award',
'tooltip': _("Whether the adj is marked as breaking (click to mark)"),
}
breaking_data = [{
'component': 'check-cell',
'checked': adj.breaking ,
'sort': adj.breaking,
'type': 'breaking',
'saveURL': reverse_tournament('adjfeedback-set-adj-breaking-status', self.tournament),
'id': adj.pk,
} for adj in adjudicators]
self.add_column(breaking_header, breaking_data)
@staticmethod
def get_formatted_adj_score(score, strong=False):
if score is None:
return _('N/A')
if strong is True:
return '<strong>%0.1f</strong>' % score
else:
return '%0.1f' % score
def add_weighted_score_columns(self, adjudicators, scores):
overall_header = {
'key': 'score',
'icon': 'trending-up',
'tooltip': _("Current weighted score"),
}
overall_data = [{
'sort': scores[adj],
'text': self.get_formatted_adj_score(scores[adj], True),
'tooltip': _("This adjudicator's current rating."),
} for adj in adjudicators]
self.add_column(overall_header, overall_data)
def add_base_score_columns(self, adjudicators, editable=False):
test_header = {
'key': 'base-score',
'icon': 'file',
'tooltip': _("Base score result"),
}
if editable:
test_data = [{
'text': self.get_formatted_adj_score(adj.base_score),
'modal': adj.id,
'class': 'edit-base-score',
'tooltip': _("Click to edit base score"),
'sort': adj.base_score,
} for adj in adjudicators]
else:
test_data = [{
'text': self.get_formatted_adj_score(adj.base_score),
'tooltip': _("Assigned base score"),
'sort': adj.base_score,
} for adj in adjudicators]
self.add_column(test_header, test_data)
def add_score_difference_columns(self, adjudicators, scores):
diff_header = {
'key': 'score-difference',
'icon': 'maximize-2',
'tooltip': _("The current difference between an adjudicator's base score and current score"),
}
diff_data = [{
'text': self.get_formatted_adj_score(scores[adj] - adj.base_score),
'sort': scores[adj] - adj.base_score,
'tooltip': _("The difference between this adjudicator's base score and current score"),
} for adj in adjudicators]
self.add_column(diff_header, diff_data)
def add_score_variance_columns(self, adjudicators):
diff_header = {
'key': 'score-variance',
'icon': 'bar-chart-2',
'tooltip': _("The standard deviation of this adjudicator's current scores; with larger numbers meaning less consistent feedback scores."),
}
diff_data = [{
'text': '%0.1f' % adj.feedback_variance if adj.feedback_variance is not None else '',
'tooltip': _("The standard deviation of this adjudicator's current scores"),
} for adj in adjudicators]
self.add_column(diff_header, diff_data)
def METHOD_NAME(self, adjudicators):
nprelims = self.tournament.prelim_rounds().count()
feedback_head = {
'key': 'feedback',
'title': _('Feedback Per Round'),
'tooltip': _("Hover over the data points to show the average score received in that round"),
}
feedback_graph_data = [{
'graphData': adj.feedback_data,
'component': 'feedback-trend',
'minScore': self.tournament.pref('adj_min_score'),
'maxScore': self.tournament.pref('adj_max_score'),
'roundSeq': nprelims,
} for adj in adjudicators]
self.add_column(feedback_head, feedback_graph_data)
def add_feedback_link_columns(self, adjudicators):
link_head = {
'key': 'view-feedback',
'icon': 'eye',
}
link_cell = [{
'text': ngettext(
"View %(count)s<br>feedback",
"View %(count)s<br>feedbacks",
len(adj.feedback_data) - 1,
) % {'count': len(adj.feedback_data) - 1}, # -1 to account for base score
'class': 'view-feedback',
'sort': len(adj.feedback_data) - 1,
'link': reverse_tournament('adjfeedback-view-on-adjudicator', self.tournament, kwargs={'pk': adj.pk}),
} for adj in adjudicators]
self.add_column(link_head, link_cell)
def add_feedback_progress_columns(self, progress_list, key="P"):
def _owed_cell(progress):
owed = progress.num_unsubmitted()
cell = {
'text': owed,
'sort': owed,
'class': 'text-danger strong' if owed > 0 else 'text-success',
}
return cell
owed_header = {
'key': 'owed',
'icon': 'slash',
'tooltip': _("Unsubmitted feedback ballots"),
}
owed_data = [_owed_cell(progress) for progress in progress_list]
self.add_column(owed_header, owed_data)
if self._show_record_links:
def _record_link(progress):
if isinstance(progress, FeedbackProgressForTeam):
url_name = 'participants-team-record' if self.admin else 'participants-public-team-record'
pk = progress.team.pk
elif isinstance(progress, FeedbackProgressForAdjudicator):
url_name = 'participants-adjudicator-record' if self.admin else 'participants-public-adjudicator-record'
pk = progress.adjudicator.pk
else:
logger.error("Unrecognised progress type: %s", progress.__class__.__name__)
return ''
return reverse_tournament(url_name, self.tournament, kwargs={'pk': pk})
owed_link_header = {
'key': 'submitted',
'icon': 'check',
}
owed_link_data = [{
'text': _("View Missing Feedback"),
'link': _record_link(progress),
} for progress in progress_list]
self.add_column(owed_link_header, owed_link_data)
|
3,806 |
register huggingface local model config
|
from typing import Dict, Optional, Union
from dataclasses import dataclass
import re
import os
from helm.common.hierarchical_logger import hlog
from helm.proxy.models import (
Model,
ALL_MODELS,
MODEL_NAME_TO_MODEL,
TEXT_MODEL_TAG,
FULL_FUNCTIONALITY_TEXT_MODEL_TAG,
LOCAL_HUGGINGFACE_MODEL_TAG,
)
# The path where local HuggingFace models should be downloaded or symlinked, e.g. ./huggingface_models/llama-7b
LOCAL_HUGGINGFACE_MODEL_DIR = "huggingface_models"
@dataclass(frozen=True)
class HuggingFaceHubModelConfig:
namespace: Optional[str]
"""Name of the group or user that owns the model. e.g. 'stanford-crfm'
May be None if the model (e.g. gpt2) does not have a namespace."""
model_name: str
"""Name of the model. e.g. 'BioMedLM'
Does not include the namespace."""
revision: Optional[str]
"""Revision of the model to use e.g. 'main'.
If None, use the default revision."""
@property
def model_id(self) -> str:
"""Return the model ID.
Examples:
- 'gpt2'
- 'stanford-crfm/BioMedLM'"""
if self.namespace:
return f"{self.namespace}/{self.model_name}"
return self.model_name
def __str__(self) -> str:
"""Return the full model name used by HELM in the format "[namespace/]model_name[@revision]".
Examples:
- 'gpt2'
- 'stanford-crfm/BioMedLM'
- 'stanford-crfm/BioMedLM@main'"""
result = self.model_name
if self.namespace:
result = f"{self.namespace}/{result}"
if self.revision:
result = f"{result}@{self.revision}"
return result
@staticmethod
def from_string(raw: str) -> "HuggingFaceHubModelConfig":
"""Parses a string in the format "[namespace/]model_name[@revision]" to a HuggingFaceHubModelConfig.
Examples:
- 'gpt2'
- 'stanford-crfm/BioMedLM'
- 'stanford-crfm/BioMedLM@main'"""
pattern = r"((?P<namespace>[^/@]+)/)?(?P<model_name>[^/@]+)(@(?P<revision>[^/@]+))?"
match = re.fullmatch(pattern, raw)
if not match:
raise ValueError(f"Could not parse model name: '{raw}'; Expected format: [namespace/]model_name[@revision]")
model_name = match.group("model_name")
assert model_name
return HuggingFaceHubModelConfig(
namespace=match.group("namespace"), model_name=model_name, revision=match.group("revision")
)
@dataclass(frozen=True)
class HuggingFaceLocalModelConfig:
model_name: str
"""Name of the model. e.g. 'llama-7b'"""
path: str
"""Local path to the Hugging Face model weights.
For pre-registered local models that are already in _huggingface_model_registry below,
this will get set to LOCAL_HUGGINGFACE_MODEL_DIR by default.
Otherwise, this is specified using the flag --enable-local-huggingface-models <path>."""
@property
def model_id(self) -> str:
"""Return the model ID.
Examples:
- 'huggingface/llama-7b'"""
return f"huggingface/{self.model_name}"
def __str__(self) -> str:
"""Return the full model name used by HELM in the format "[namespace/]model_name[@revision]".
Local models don't have a revision and the namespace is set to huggingface.
Examples:
- 'huggingface/llama-7b'"""
return f"huggingface/{self.model_name}"
@staticmethod
def from_path(path: str) -> "HuggingFaceLocalModelConfig":
"""Generates a HuggingFaceHubModelConfig from a (relative or absolute) path to a local HuggingFace model."""
model_name = os.path.split(path)[-1]
return HuggingFaceLocalModelConfig(model_name=model_name, path=path)
HuggingFaceModelConfig = Union[HuggingFaceHubModelConfig, HuggingFaceLocalModelConfig]
# Initialize registry with local models from models.py
_huggingface_model_registry: Dict[str, HuggingFaceModelConfig] = {
model.name: HuggingFaceLocalModelConfig.from_path(os.path.join(LOCAL_HUGGINGFACE_MODEL_DIR, model.engine))
for model in ALL_MODELS
if LOCAL_HUGGINGFACE_MODEL_TAG in model.tags
}
def register_huggingface_hub_model_config(model_name: str) -> HuggingFaceHubModelConfig:
"""Register a AutoModelForCausalLM model from Hugging Face Model Hub for later use.
model_name format: namespace/model_name[@revision]"""
config = HuggingFaceHubModelConfig.from_string(model_name)
if config.model_id in _huggingface_model_registry:
raise ValueError(f"A Hugging Face model is already registered for model_id {model_name}")
_huggingface_model_registry[config.model_id] = config
# HELM model names require a namespace
if not config.namespace:
raise Exception("Registration of Hugging Face models without a namespace is not supported")
if config.model_id in MODEL_NAME_TO_MODEL:
raise ValueError(f"A HELM model is already registered for model name: {config.model_id}")
description = f"HuggingFace model {config.model_id}"
if config.revision:
description += f" at revision {config.revision}"
model = Model(
group=config.namespace,
name=model_name,
tags=[TEXT_MODEL_TAG, FULL_FUNCTIONALITY_TEXT_MODEL_TAG],
)
MODEL_NAME_TO_MODEL[config.model_id] = model
ALL_MODELS.append(model)
hlog(f"Registered Hugging Face model: {model} config: {config}")
return config
def METHOD_NAME(path: str) -> HuggingFaceLocalModelConfig:
"""Register a AutoModelForCausalLM model from a local directory for later use.
path: a path to your HF model"""
config = HuggingFaceLocalModelConfig.from_path(path)
if config.model_id in _huggingface_model_registry:
raise ValueError(f"A Hugging Face model is already registered for model_id {config.model_id}")
_huggingface_model_registry[config.model_id] = config
if config.model_name in MODEL_NAME_TO_MODEL:
raise ValueError(f"A HELM model is already registered for model name: {config.model_name}")
model = Model(
group="huggingface",
name=config.model_id,
tags=[TEXT_MODEL_TAG, FULL_FUNCTIONALITY_TEXT_MODEL_TAG, LOCAL_HUGGINGFACE_MODEL_TAG],
)
MODEL_NAME_TO_MODEL[config.model_id] = model
ALL_MODELS.append(model)
hlog(f"Registered Hugging Face model: {model} config: {config}")
return config
def get_huggingface_model_config(model_name: str) -> Optional[HuggingFaceModelConfig]:
"""Returns a HuggingFaceModelConfig for the model_id."""
return _huggingface_model_registry.get(model_name)
|
3,807 |
format relationship
|
"""Formatting of UML elements like attributes, operations, stereotypes, etc."""
import re
from typing import Tuple
from gaphor.core.format import format
from gaphor.i18n import gettext
from gaphor.UML import uml as UML
# Do not render if the name still contains a visibility element
no_render_pat = re.compile(r"^\s*[+#-]", re.MULTILINE | re.S)
vis_map = {"public": "+", "protected": "#", "package": "~", "private": "-"}
@format.register(UML.Property)
def format_property(
el,
visibility=False,
is_derived=False,
type=False,
multiplicity=False,
default=False,
tags=False,
note=False,
):
"""Create an OCL representation of the attribute, Returns the attribute as
a string. If one or more of the parameters (visibility, is_derived, type,
multiplicity, default and/or tags) is set, only that field is rendered.
Note that the name of the attribute is always rendered, so a parseable
string is returned.
Note that, when some of those parameters are set, parsing the string
will not give you the same result.
"""
name = el.name
if name and no_render_pat.match(name):
return name
# Render all fields if they all are set to False
if not (visibility or is_derived or type or multiplicity or default):
visibility = is_derived = type = multiplicity = default = True
s: list[str] = []
if name:
if visibility:
s.extend((vis_map[el.visibility], " "))
if is_derived and el.isDerived:
s.append("/")
s.append(name)
if type:
if el.typeValue:
s.append(f": {el.typeValue}")
elif el.type and el.type.name:
s.append(f": {el.type.name}")
if multiplicity:
s.append(format_multiplicity(el))
if default and el.defaultValue:
s.append(f" = {el.defaultValue}")
if tags and (
slots := [format(slot) for slot in el.appliedStereotype[:].slot if slot]
):
s.append(" { %s }" % ", ".join(slots))
if note and el.note:
s.append(f" # {el.note}")
return "".join(s)
def format_association_end(el) -> Tuple[str, str]:
"""Format association end."""
name = ""
if el.name:
n = [vis_map[el.visibility], " "]
if el.isDerived:
n.append("/")
n.append(el.name)
name = "".join(n)
m = [format_multiplicity(el, bare=True)]
if slots := [format(slot) for slot in el.appliedStereotype[:].slot if slot]:
m.append(" { %s }" % ",\n".join(slots))
mult = "".join(m)
return name, mult
@format.register(UML.Operation)
def format_operation(
el,
pattern=None,
visibility=False,
type=False,
multiplicity=False,
default=False,
tags=False,
direction=False,
note=False,
):
"""Create an OCL representation of the operation, Returns the operation as
a string."""
name = el.name
if not name:
return ""
if no_render_pat.match(name):
return name
# Render all fields if they all are set to False
if not (visibility or type or multiplicity or default or tags or direction):
visibility = type = multiplicity = default = tags = direction = True
s = []
if visibility:
s.append(f"{vis_map[el.visibility]} ")
s.extend(
(
name,
"(",
", ".join(
format(
p,
direction=direction,
type=type,
multiplicity=multiplicity,
default=default,
)
for p in el.ownedParameter
if p.direction != "return"
),
")",
)
)
if rr := next((p for p in el.ownedParameter if p.direction == "return"), None):
s.append(format(rr, type=type, multiplicity=multiplicity, default=default))
if note and el.note:
s.append(f" # {el.note}")
return "".join(s)
@format.register(UML.Parameter)
def format_parameter(
el, direction=False, type=False, multiplicity=False, default=False
):
if not (direction or type or multiplicity or default):
direction = type = multiplicity = default = True
s = []
name = el.name
if name and direction:
s.append(f"{el.direction} ")
s.append(name or "")
if type and el.typeValue:
s.append(f": {el.typeValue}")
if multiplicity:
s.append(format_multiplicity(el))
if default and el.defaultValue:
s.append(f" = {el.defaultValue}")
return "".join(s)
@format.register(UML.Slot)
def format_slot(el):
return f'{el.definingFeature.name} = "{el.value}"'
@format.register(UML.NamedElement)
def format_namedelement(el, **kwargs):
return el.name or ""
@format.register(UML.Pin)
def format_pin(el, **kwargs):
if not el:
return ""
s = []
s.append(el.name or "")
if el.type and el.type.name:
s.append(f": {el.type.name}")
if el.upperValue or el.lowerValue:
s.append(format_multiplicity(el))
return "".join(s)
@format.register(UML.MultiplicityElement)
def format_multiplicity(el, bare=False):
m = ""
if el.upperValue:
m = f"{el.lowerValue}..{el.upperValue}" if el.lowerValue else f"{el.upperValue}"
return f"[{m}]" if m and not bare else m
@format.register(UML.Relationship)
def METHOD_NAME(el):
return el.__class__.__name__
@format.register(UML.Generalization)
def format_generalization(el):
return gettext("general: {name}").format(name=el.general and el.general.name or "")
@format.register(UML.Dependency)
def format_dependency(el):
return gettext("supplier: {name}").format(
name=el.supplier and el.supplier.name or ""
)
@format.register(UML.Extend)
def format_extend(el):
return gettext("extend: {name}").format(
name=el.extendedCase and el.extendedCase.name or ""
)
@format.register(UML.Include)
def format_include(el):
return gettext("include: {name}").format(
name=el.addition and el.addition.name or ""
)
@format.register(UML.CallBehaviorAction)
def format_call_behavior_action_name(el):
"""Name conforms to UML2.5.1 16.3.4.1 naming description"""
if el.behavior and not el.name:
return el.behavior.name
return el.name or ""
|
3,808 |
replace refs
|
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Resource-aware json reference loaders to be used with jsonref."""
from __future__ import absolute_import, division, print_function
from flask import current_app, url_for
from jsonref import JsonLoader, JsonRef
from werkzeug.urls import url_parse
import jsonresolver
from jsonresolver.contrib.jsonref import json_loader_factory
from inspire_schemas.utils import load_schema
from inspire_utils.urls import ensure_scheme
from inspirehep.modules.pidstore.utils import get_pid_type_from_endpoint
from inspirehep.utils import record_getter
class AbstractRecordLoader(JsonLoader):
"""Base for resource-aware record loaders.
Resolves the refered resource by the given uri by first checking against
local resources.
"""
def get_record(self, pid_type, recid):
raise NotImplementedError()
def get_remote_json(self, uri, **kwargs):
parsed_uri = url_parse(uri)
# Add http:// protocol so uri.netloc is correctly parsed.
server_name = current_app.config.get('SERVER_NAME')
parsed_server = url_parse(ensure_scheme(server_name))
if parsed_uri.netloc and parsed_uri.netloc != parsed_server.netloc:
return super(AbstractRecordLoader, self).get_remote_json(uri,
**kwargs)
path_parts = parsed_uri.path.strip('/').split('/')
if len(path_parts) < 2:
current_app.logger.error('Bad JSONref URI: {0}'.format(uri))
return None
endpoint = path_parts[-2]
pid_type = get_pid_type_from_endpoint(endpoint)
recid = path_parts[-1]
res = self.get_record(pid_type, recid)
return res
class ESJsonLoader(AbstractRecordLoader):
"""Resolve resources by retrieving them from Elasticsearch."""
def get_record(self, pid_type, recid):
try:
return record_getter.get_es_record(pid_type, recid)
except record_getter.RecordGetterError:
return None
class DatabaseJsonLoader(AbstractRecordLoader):
def get_record(self, pid_type, recid):
try:
return record_getter.get_db_record(pid_type, recid)
except record_getter.RecordGetterError:
return None
es_record_loader = ESJsonLoader()
db_record_loader = DatabaseJsonLoader()
SCHEMA_LOADER_CLS = json_loader_factory(
jsonresolver.JSONResolver(
plugins=['invenio_jsonschemas.jsonresolver']
)
)
"""Used in invenio-jsonschemas to resolve relative $ref."""
def load_resolved_schema(name):
"""Load a JSON schema with all references resolved.
Args:
name(str): name of the schema to load.
Returns:
dict: the JSON schema with resolved references.
Examples:
>>> resolved_schema = load_resolved_schema('authors')
"""
schema = load_schema(name)
return JsonRef.METHOD_NAME(
schema,
base_uri=url_for('invenio_jsonschemas.get_schema', schema_path='records/{}.json'.format(name)),
loader=SCHEMA_LOADER_CLS()
)
def METHOD_NAME(obj, source='db'):
"""Replaces record refs in obj by bypassing HTTP requests.
Any reference URI that comes from the same server and references a resource
will be resolved directly either from the database or from Elasticsearch.
:param obj:
Dict-like object for which '$ref' fields are recursively replaced.
:param source:
List of sources from which to resolve the references. It can be any of:
* 'db' - resolve from Database
* 'es' - resolve from Elasticsearch
* 'http' - force using HTTP
:returns:
The same obj structure with the '$ref' fields replaced with the object
available at the given URI.
"""
loaders = {
'db': db_record_loader,
'es': es_record_loader,
'http': None
}
if source not in loaders:
raise ValueError('source must be one of {}'.format(loaders.keys()))
loader = loaders[source]
return JsonRef.METHOD_NAME(obj, loader=loader, load_on_repr=False)
|
3,809 |
select step scalar
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import matplotlib
import numpy as np
from astropy import units as u
from astropy.coordinates import BaseCoordinateFrame, UnitSphericalRepresentation
from astropy.utils.introspection import minversion
__all__ = [
"select_step_degree",
"select_step_hour",
"select_step_scalar",
"transform_contour_set_inplace",
]
MATPLOTLIB_LT_3_8 = not minversion(matplotlib, "3.8.dev")
def select_step_degree(dv):
# Modified from axis_artist, supports astropy.units
if dv > 1.0 * u.arcsec:
degree_limits_ = [1.5, 3, 7, 13, 20, 40, 70, 120, 270, 520]
degree_steps_ = [1, 2, 5, 10, 15, 30, 45, 90, 180, 360]
degree_units = [u.degree] * len(degree_steps_)
minsec_limits_ = [1.5, 2.5, 3.5, 8, 11, 18, 25, 45]
minsec_steps_ = [1, 2, 3, 5, 10, 15, 20, 30]
minute_limits_ = np.array(minsec_limits_) / 60.0
minute_units = [u.arcmin] * len(minute_limits_)
second_limits_ = np.array(minsec_limits_) / 3600.0
second_units = [u.arcsec] * len(second_limits_)
degree_limits = np.concatenate([second_limits_, minute_limits_, degree_limits_])
degree_steps = minsec_steps_ + minsec_steps_ + degree_steps_
degree_units = second_units + minute_units + degree_units
n = degree_limits.searchsorted(dv.to(u.degree))
step = degree_steps[n]
unit = degree_units[n]
return step * unit
else:
return METHOD_NAME(dv.to_value(u.arcsec)) * u.arcsec
def select_step_hour(dv):
if dv > 15.0 * u.arcsec:
hour_limits_ = [1.5, 2.5, 3.5, 5, 7, 10, 15, 21, 36]
hour_steps_ = [1, 2, 3, 4, 6, 8, 12, 18, 24]
hour_units = [u.hourangle] * len(hour_steps_)
minsec_limits_ = [1.5, 2.5, 3.5, 4.5, 5.5, 8, 11, 14, 18, 25, 45]
minsec_steps_ = [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30]
minute_limits_ = np.array(minsec_limits_) / 60.0
minute_units = [15.0 * u.arcmin] * len(minute_limits_)
second_limits_ = np.array(minsec_limits_) / 3600.0
second_units = [15.0 * u.arcsec] * len(second_limits_)
hour_limits = np.concatenate([second_limits_, minute_limits_, hour_limits_])
hour_steps = minsec_steps_ + minsec_steps_ + hour_steps_
hour_units = second_units + minute_units + hour_units
n = hour_limits.searchsorted(dv.to(u.hourangle))
step = hour_steps[n]
unit = hour_units[n]
return step * unit
else:
return METHOD_NAME(dv.to_value(15.0 * u.arcsec)) * (15.0 * u.arcsec)
def METHOD_NAME(dv):
log10_dv = np.log10(dv)
base = np.floor(log10_dv)
frac = log10_dv - base
steps = np.log10([1, 2, 5, 10])
imin = np.argmin(np.abs(frac - steps))
return 10.0 ** (base + steps[imin])
def get_coord_meta(frame):
coord_meta = {}
coord_meta["type"] = ("longitude", "latitude")
from astropy.coordinates import frame_transform_graph
if isinstance(frame, str):
initial_frame = frame
frame = frame_transform_graph.lookup_name(frame)
if frame is None:
raise ValueError(f"Unknown frame: {initial_frame}")
if not isinstance(frame, BaseCoordinateFrame):
frame = frame()
names = list(frame.representation_component_names.keys())
coord_meta["name"] = names[:2]
# Add dummy data to the frame to determine the longitude wrap angle and the units
frame = frame.realize_frame(UnitSphericalRepresentation(0 * u.deg, 0 * u.deg))
coord_meta["wrap"] = (frame.spherical.lon.wrap_angle, None)
coord_meta["unit"] = (frame.spherical.lon.unit, frame.spherical.lat.unit)
return coord_meta
def transform_contour_set_inplace(cset, transform):
"""
Transform a contour set in-place using a specified
:class:`matplotlib.transform.Transform`.
Using transforms with the native Matplotlib contour/contourf can be slow if
the transforms have a non-negligible overhead (which is the case for
WCS/SkyCoord transforms) since the transform is called for each individual
contour line. It is more efficient to stack all the contour lines together
temporarily and transform them in one go.
"""
# The contours are represented as paths grouped into levels. Each can have
# one or more paths. The approach we take here is to stack the vertices of
# all paths and transform them in one go. The pos_level list helps us keep
# track of where the set of segments for each overall contour level ends.
# The pos_segments list helps us keep track of where each segmnt ends for
# each contour level.
all_paths = []
pos_level = []
pos_segments = []
if MATPLOTLIB_LT_3_8:
for collection in cset.collections:
paths = collection.get_paths()
if len(paths) == 0:
continue
all_paths.append(paths)
# The last item in pos isn't needed for np.split and in fact causes
# issues if we keep it because it will cause an extra empty array to be
# returned.
pos = np.cumsum([len(x) for x in paths])
pos_segments.append(pos[:-1])
pos_level.append(pos[-1])
else:
paths = cset.get_paths()
if len(paths) > 0:
all_paths.append(paths)
# The last item in pos isn't needed for np.split and in fact causes
# issues if we keep it because it will cause an extra empty array to be
# returned.
pos = np.cumsum([len(x) for x in paths])
pos_segments.append(pos[:-1])
pos_level.append(pos[-1])
# As above the last item isn't needed
pos_level = np.cumsum(pos_level)[:-1]
# Stack all the segments into a single (n, 2) array
vertices = [path.vertices for paths in all_paths for path in paths]
if len(vertices) > 0:
vertices = np.concatenate(vertices)
else:
return
# Transform all coordinates in one go
vertices = transform.transform(vertices)
# Split up into levels again
vertices = np.split(vertices, pos_level)
# Now re-populate the segments in the line collections
for ilevel, vert in enumerate(vertices):
vert = np.split(vert, pos_segments[ilevel])
for iseg, ivert in enumerate(vert):
all_paths[ilevel][iseg].vertices = ivert
|
3,810 |
check ga state
|
import logging as log
import os
import shutil
from virttest import virsh
from virttest import utils_libvirtd
from virttest import utils_selinux
from virttest.utils_test import libvirt
from virttest.libvirt_xml import vm_xml
from virttest.utils_libvirt import libvirt_vmxml
# Using as lower capital is not the best way to do, but this is just a
# workaround to avoid changing the entire file.
logging = log.getLogger('avocado.' + __name__)
def METHOD_NAME(vm, vm_name):
"""
Check the guest agent state from guest xml
:param vm: The vm to be checked
:param vm_name: The vm's name
:return: the guest agent state
"""
# The session is just to make sure the guest
# is fully boot up
vm.wait_for_login().close()
cur_xml = vm_xml.VMXML.new_from_dumpxml(vm_name)
channels = cur_xml.get_agent_channels()
for channel in channels:
state = channel.find('./target').get('state')
logging.debug("The guest agent state is %s", state)
agent_status = state == "connected"
return agent_status
def check_ga_function(vm_name, status_error, hotunplug_ga):
"""
Check whether guest agent function can work as expected
:param vm_name: The vm's name
:param status_error: Expect status error or not
:param hotunplug_ga: hotunplug guest agent device or not
"""
error_msg = []
if status_error:
error_msg.append("QEMU guest agent is not connected")
if hotunplug_ga:
error_msg.append("QEMU guest agent is not configured")
result = virsh.domtime(vm_name, ignore_status=True, debug=True)
libvirt.check_result(result, expected_fails=error_msg,
any_error=status_error)
def get_ga_xml(vm, vm_name):
"""
Get the xml snippet of guest agent
:param vm: The vm to get xml from
:param vm_name: The vm's name
:return: the the xml snippet of guest agent
"""
cur_xml = vm_xml.VMXML.new_from_dumpxml(vm_name)
channels = cur_xml.get_devices('channel')
ga_xml = None
for channel in channels:
target = channel['xmltreefile'].find('./target')
if target is not None:
name = target.get('name')
if name and name.startswith("org.qemu.guest_agent"):
ga_xml = channel
break
return ga_xml
def run(test, params, env):
vm_name = params.get("main_vm")
status_error = ("yes" == params.get("status_error", "no"))
start_ga = ("yes" == params.get("start_ga", "yes"))
prepare_channel = ("yes" == params.get("prepare_channel", "yes"))
src_path = params.get("src_path")
tgt_name = params.get("tgt_name", "org.qemu.guest_agent.0")
restart_libvirtd = ("yes" == params.get("restart_libvirtd"))
suspend_resume_guest = ("yes" == params.get("suspend_resume_guest"))
hotunplug_ga = ("yes" == params.get("hotunplug_ga"))
hotplug_ga_without_tgt_type = "yes" == params.get("hotplug_ga_without_tgt_type")
loop_time = int(params.get("loop_time", '1'))
dev_type = params.get("dev_type", "channel")
dev_dict = eval(params.get("dev_dict", "{}"))
label = params.get("con_label")
vm = env.get_vm(vm_name)
if src_path:
socket_file_dir = os.path.dirname(src_path)
if not os.path.exists(socket_file_dir):
os.mkdir(socket_file_dir)
shutil.chown(socket_file_dir, "qemu", "qemu")
utils_selinux.set_context_of_file(filename=socket_file_dir,
context=label)
vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
backup_xml = vmxml.copy()
vmxml.remove_agent_channels()
vmxml.sync()
try:
if prepare_channel:
vm.prepare_guest_agent(start=start_ga, channel=True,
source_path=src_path)
if restart_libvirtd:
utils_libvirtd.libvirtd_restart()
if suspend_resume_guest:
virsh.suspend(vm_name, debug=True)
virsh.resume(vm_name, debug=True)
if hotunplug_ga:
ga_xml = get_ga_xml(vm, vm_name)
result = virsh.detach_device(vm_name, ga_xml.xml)
if result.exit_status:
test.fail("hotunplug guest agent device failed, %s"
% result)
vmxml = vm_xml.VMXML.new_from_dumpxml(vm_name)
if vmxml.get_agent_channels():
test.fail("hotunplug guest agent device failed as "
"guest agent xml still exists")
else:
if start_ga != METHOD_NAME(vm, vm_name):
test.fail("guest agent device is not in correct state")
if hotplug_ga_without_tgt_type:
dev_obj = libvirt_vmxml.create_vm_device_by_type(dev_type, dev_dict)
for i in range(loop_time):
res = virsh.attach_device(vm.name, dev_obj.xml, debug=True)
libvirt.check_exit_status(res, status_error)
# Unsuccessful plugging of a device should not break the guest agent
status_error = False
check_ga_function(vm_name, status_error, hotunplug_ga)
finally:
vm.destroy()
backup_xml.sync()
|
3,811 |
is negative
|
# coding=utf-8
#
# This file is part of Hypothesis, which may be found at
# https://github.com/HypothesisWorks/hypothesis/
#
# Most of this work is copyright (C) 2013-2019 David R. MacIver
# ([email protected]), but it contains contributions by others. See
# CONTRIBUTING.rst for a full list of people who may hold copyright, and
# consult the git log if you need to determine who owns an individual
# contribution.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
#
# END HEADER
from __future__ import absolute_import, division, print_function
import math
from hypothesis.internal.compat import (
CAN_PACK_HALF_FLOAT,
quiet_raise,
struct_pack,
struct_unpack,
)
# Format codes for (int, float) sized types, used for byte-wise casts.
# See https://docs.python.org/3/library/struct.html#format-characters
STRUCT_FORMATS = {
16: (b"!H", b"!e"), # Note: 'e' is new in Python 3.6, so we have helpers
32: (b"!I", b"!f"),
64: (b"!Q", b"!d"),
}
# There are two versions of this: the one that uses Numpy to support Python
# 3.5 and earlier, and the elegant one for new versions. We use the new
# one if Numpy is unavailable too, because it's slightly faster in all cases.
def reinterpret_bits(x, from_, to):
return struct_unpack(to, struct_pack(from_, x))[0]
if not CAN_PACK_HALF_FLOAT: # pragma: no cover
try:
import numpy
except (ImportError, TypeError):
# We catch TypeError because that can be raised if Numpy is installed on
# PyPy for Python 2.7; and we only need a workaround until 2020-01-01.
pass
else:
def reinterpret_bits(x, from_, to): # noqa: F811
if from_ == b"!e":
arr = numpy.array([x], dtype=">f2")
if numpy.isfinite(x) and not numpy.isfinite(arr[0]):
quiet_raise(OverflowError("%r too large for float16" % (x,)))
buf = arr.tobytes()
else:
buf = struct_pack(from_, x)
if to == b"!e":
return float(numpy.frombuffer(buf, dtype=">f2")[0])
return struct_unpack(to, buf)[0]
def float_of(x, width):
assert width in (16, 32, 64)
if width == 64:
return float(x)
elif width == 32:
return reinterpret_bits(float(x), b"!f", b"!f")
else:
return reinterpret_bits(float(x), b"!e", b"!e")
def sign(x):
try:
return math.copysign(1.0, x)
except TypeError:
raise TypeError("Expected float but got %r of type %s" % (x, type(x).__name__))
def METHOD_NAME(x):
return sign(x) < 0
def count_between_floats(x, y, width=64):
assert x <= y
if METHOD_NAME(x):
if METHOD_NAME(y):
return float_to_int(x, width) - float_to_int(y, width) + 1
else:
return count_between_floats(x, -0.0, width) + count_between_floats(
0.0, y, width
)
else:
assert not METHOD_NAME(y)
return float_to_int(y, width) - float_to_int(x, width) + 1
def float_to_int(value, width=64):
fmt_int, fmt_flt = STRUCT_FORMATS[width]
return reinterpret_bits(value, fmt_flt, fmt_int)
def int_to_float(value, width=64):
fmt_int, fmt_flt = STRUCT_FORMATS[width]
return reinterpret_bits(value, fmt_int, fmt_flt)
def next_up(value, width=64):
"""Return the first float larger than finite `val` - IEEE 754's `nextUp`.
From https://stackoverflow.com/a/10426033, with thanks to Mark Dickinson.
"""
assert isinstance(value, float)
if math.isnan(value) or (math.isinf(value) and value > 0):
return value
if value == 0.0 and METHOD_NAME(value):
return 0.0
fmt_int, fmt_flt = STRUCT_FORMATS[width]
# Note: n is signed; float_to_int returns unsigned
fmt_int = fmt_int.lower()
n = reinterpret_bits(value, fmt_flt, fmt_int)
if n >= 0:
n += 1
else:
n -= 1
return reinterpret_bits(n, fmt_int, fmt_flt)
def next_down(value, width=64):
return -next_up(-value, width)
|
3,812 |
executable exists
|
"""
Pyperclip
A cross-platform clipboard module for Python. (only handles plain text for now)
By Al Sweigart [email protected]
BSD License
Usage:
import pyperclip
pyperclip.copy('The text to be copied to the clipboard.')
spam = pyperclip.paste()
if not pyperclip.copy:
print("Copy functionality unavailable!")
On Windows, no additional modules are needed.
On Mac, the module uses pbcopy and pbpaste, which should come with the os.
On Linux, install xclip or xsel via package manager. For example, in Debian:
sudo apt-get install xclip
Otherwise on Linux, you will need the gtk, qtpy or PyQt modules installed.
qtpy also requires a python-qt-bindings module: PyQt4, PyQt5, PySide, PySide2
gtk and PyQt4 modules are not available for Python 3,
and this module does not work with PyGObject yet.
"""
__version__ = '1.5.27'
import platform
import os
import subprocess
from .clipboards import (init_osx_clipboard,
init_gtk_clipboard, init_qt_clipboard,
init_xclip_clipboard, init_xsel_clipboard,
init_klipper_clipboard, init_no_clipboard)
from .windows import init_windows_clipboard
# `import qtpy` sys.exit()s if DISPLAY is not in the environment.
# Thus, we need to detect the presence of $DISPLAY manually
# and not load qtpy if it is absent.
HAS_DISPLAY = os.getenv("DISPLAY", False)
CHECK_CMD = "where" if platform.system() == "Windows" else "which"
def METHOD_NAME(name):
return subprocess.call([CHECK_CMD, name],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
def determine_clipboard():
# Determine the OS/platform and set
# the copy() and paste() functions accordingly.
if 'cygwin' in platform.system().lower():
# FIXME: pyperclip currently does not support Cygwin,
# see https://github.com/asweigart/pyperclip/issues/55
pass
elif os.name == 'nt' or platform.system() == 'Windows':
return init_windows_clipboard()
if os.name == 'mac' or platform.system() == 'Darwin':
return init_osx_clipboard()
if HAS_DISPLAY:
# Determine which command/module is installed, if any.
try:
# Check if gtk is installed
import gtk # noqa
except ImportError:
pass
else:
return init_gtk_clipboard()
try:
# qtpy is a small abstraction layer that lets you write
# applications using a single api call to either PyQt or PySide
# https://pypi.org/project/QtPy
import qtpy # noqa
except ImportError:
# If qtpy isn't installed, fall back on importing PyQt5, or PyQt5
try:
import PyQt5 # noqa
except ImportError:
try:
import PyQt4 # noqa
except ImportError:
pass # fail fast for all non-ImportError exceptions.
else:
return init_qt_clipboard()
else:
return init_qt_clipboard()
pass
else:
return init_qt_clipboard()
if METHOD_NAME("xclip"):
return init_xclip_clipboard()
if METHOD_NAME("xsel"):
return init_xsel_clipboard()
if METHOD_NAME("klipper") and METHOD_NAME("qdbus"):
return init_klipper_clipboard()
return init_no_clipboard()
def set_clipboard(clipboard):
global copy, paste
clipboard_types = {'osx': init_osx_clipboard,
'gtk': init_gtk_clipboard,
'qt': init_qt_clipboard,
'xclip': init_xclip_clipboard,
'xsel': init_xsel_clipboard,
'klipper': init_klipper_clipboard,
'windows': init_windows_clipboard,
'no': init_no_clipboard}
copy, paste = clipboard_types[clipboard]()
copy, paste = determine_clipboard()
__all__ = ["copy", "paste"]
# pandas aliases
clipboard_get = paste
clipboard_set = copy
|
3,813 |
test close
|
# coding: utf-8
# Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department
# Distributed under the terms of "New BSD License", see the LICENSE file.
import os
from pyiron_base.project.path import ProjectPath
from pyiron_base._tests import PyironTestCase
from pyiron_base.state import state
class TestProjectPath(PyironTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
if os.name == "nt":
cls.current_dir = os.path.dirname(os.path.abspath(__file__)).replace(
"\\", "/"
)
else:
cls.current_dir = os.path.dirname(os.path.abspath(__file__))
cls.settings_configuration = state.settings.configuration.copy()
def setUp(self) -> None:
super().setUp()
state.settings.configuration["project_paths"] = [self.current_dir + "/"]
state.settings.configuration["project_check_enabled"] = True
self.project_path = ProjectPath(path=self.current_dir)
self.project_path = self.project_path.open("test_project_path")
def tearDown(self) -> None:
super().tearDown()
state.settings.configuration.update(self.settings_configuration)
self.project_path.removedirs()
def test_open(self):
with self.project_path.open("test_open") as test_open:
self.assertEqual(
test_open.path, self.current_dir + "/test_project_path/test_open/"
)
self.project_path.removedirs("test_open")
def METHOD_NAME(self):
with self.project_path.open("test_close") as METHOD_NAME:
self.assertEqual(
METHOD_NAME.path, self.current_dir + "/test_project_path/test_close/"
)
self.assertEqual(
self.project_path.path, self.current_dir + "/test_project_path/"
)
self.project_path.removedirs("test_close")
def test_copy(self):
with self.project_path.open("test_copy") as test_copy:
copied_path = test_copy.copy()
self.assertEqual(copied_path.path, test_copy.path)
self.project_path.removedirs("test_copy")
def test_removedirs(self):
self.project_path = self.project_path.open("test_removedirs")
self.project_path = self.project_path.open("..")
self.assertTrue("test_removedirs" in self.project_path.listdir())
self.project_path.removedirs("test_removedirs")
self.project_path.close()
self.assertFalse("test_removedirs" in self.project_path.listdir())
def test_path(self):
self.assertEqual(
self.project_path.path, self.current_dir + "/test_project_path/"
)
def test_root_path(self):
root_paths = state.settings.configuration["project_paths"]
self.assertIn(
self.project_path.root_path,
root_paths,
msg="root project.root_path not properly set by default. Check if `project_check_enabled`.",
)
def test_project_path(self):
root_paths = state.settings.configuration["project_paths"]
self.assertIn(
self.current_dir + "/test_project_path/",
[root_path + self.project_path.project_path for root_path in root_paths],
msg="project.project_path not properly set by default. Check if `project_check_enabled`.",
)
def test__get_project_from_path(self):
with self.subTest(
"No project_check_enabled /some/random/path not in root_path"
):
state.settings.configuration["project_check_enabled"] = False
path = "/some/random/path"
root_path, pr_path = self.project_path._get_project_from_path(path)
self.assertIs(root_path, None)
self.assertEqual(pr_path, path)
with self.subTest("project_check_enabled /some/random/path not in root_path"):
state.settings.configuration["project_check_enabled"] = True
path = "/some/random/path"
self.assertRaises(
ValueError, self.project_path._get_project_from_path, path
)
with self.subTest("No project_check_enabled /some/random/path in root_path"):
state.settings.configuration["project_check_enabled"] = False
state.settings.configuration["project_paths"] = ["/some"]
path = "/some/random/path"
root_path, pr_path = self.project_path._get_project_from_path(path)
self.assertEqual(root_path, "/some")
self.assertEqual(pr_path, "random/path")
with self.subTest("project_check_enabled /some/random/path in root_path"):
state.settings.configuration["project_check_enabled"] = True
path = "/some/random/path"
root_path, pr_path = self.project_path._get_project_from_path(path)
self.assertEqual(root_path, "/some")
self.assertEqual(pr_path, "random/path")
|
3,814 |
getsignal
|
"""
Cooperative implementation of special cases of :func:`signal.signal`.
This module is designed to work with libev's child watchers, as used
by default in :func:`gevent.os.fork` Note that each ``SIGCHLD``
handler will be run in a new greenlet when the signal is delivered
(just like :class:`gevent.hub.signal`)
The implementations in this module are only monkey patched if
:func:`gevent.os.waitpid` is being used (the default) and if
:const:`signal.SIGCHLD` is available; see :func:`gevent.os.fork` for
information on configuring this not to be the case for advanced uses.
.. versionadded:: 1.1b4
.. versionchanged:: 1.5a4
Previously there was a backwards compatibility alias
``gevent.signal``, introduced in 1.1b4, that partly shadowed this
module, confusing humans and static analysis tools alike. That alias
has been removed. (See `gevent.signal_handler`.)
"""
from __future__ import absolute_import
from gevent._util import _NONE as _INITIAL
from gevent._util import copy_globals
import signal as _signal
__implements__ = []
__extensions__ = []
_child_handler = _INITIAL
_signal_signal = _signal.signal
_signal_getsignal = _signal.METHOD_NAME
def METHOD_NAME(signalnum):
"""
Exactly the same as :func:`signal.getsignal` except where
:const:`signal.SIGCHLD` is concerned.
For :const:`signal.SIGCHLD`, this cooperates with :func:`signal`
to provide consistent answers.
"""
if signalnum != _signal.SIGCHLD:
return _signal_getsignal(signalnum)
global _child_handler
if _child_handler is _INITIAL:
_child_handler = _signal_getsignal(_signal.SIGCHLD)
return _child_handler
def signal(signalnum, handler):
"""
Exactly the same as :func:`signal.signal` except where
:const:`signal.SIGCHLD` is concerned.
.. note::
A :const:`signal.SIGCHLD` handler installed with this function
will only be triggered for children that are forked using
:func:`gevent.os.fork` (:func:`gevent.os.fork_and_watch`);
children forked before monkey patching, or otherwise by the raw
:func:`os.fork`, will not trigger the handler installed by this
function. (It's unlikely that a SIGCHLD handler installed with
the builtin :func:`signal.signal` would be triggered either;
libev typically overwrites such a handler at the C level. At
the very least, it's full of race conditions.)
.. note::
Use of ``SIG_IGN`` and ``SIG_DFL`` may also have race conditions
with libev child watchers and the :mod:`gevent.subprocess` module.
.. versionchanged:: 1.2a1
If ``SIG_IGN`` or ``SIG_DFL`` are used to ignore ``SIGCHLD``, a
future use of ``gevent.subprocess`` and libev child watchers
will once again work. However, on Python 2, use of ``os.popen``
will fail.
.. versionchanged:: 1.1rc2
Allow using ``SIG_IGN`` and ``SIG_DFL`` to reset and ignore ``SIGCHLD``.
However, this allows the possibility of a race condition if ``gevent.subprocess``
had already been used.
"""
if signalnum != _signal.SIGCHLD:
return _signal_signal(signalnum, handler)
# TODO: raise value error if not called from the main
# greenlet, just like threads
if handler != _signal.SIG_IGN and handler != _signal.SIG_DFL and not callable(handler):
# exact same error message raised by the stdlib
raise TypeError("signal handler must be signal.SIG_IGN, signal.SIG_DFL, or a callable object")
old_handler = METHOD_NAME(signalnum)
global _child_handler
_child_handler = handler
if handler in (_signal.SIG_IGN, _signal.SIG_DFL):
# Allow resetting/ignoring this signal at the process level.
# Note that this conflicts with gevent.subprocess and other users
# of child watchers, until the next time gevent.subprocess/loop.install_sigchld()
# is called.
from gevent.hub import get_hub # Are we always safe to import here?
_signal_signal(signalnum, handler)
get_hub().loop.reset_sigchld()
return old_handler
def _on_child_hook():
# This is called in the hub greenlet. To let the function
# do more useful work, like use blocking functions,
# we run it in a new greenlet; see gevent.hub.signal
if callable(_child_handler):
# None is a valid value for the frame argument
from gevent import Greenlet
greenlet = Greenlet(_child_handler, _signal.SIGCHLD, None)
greenlet.switch()
import gevent.os
if 'waitpid' in gevent.os.__implements__ and hasattr(_signal, 'SIGCHLD'):
# Tightly coupled here to gevent.os and its waitpid implementation; only use these
# if necessary.
gevent.os._on_child_hook = _on_child_hook
__implements__.append("signal")
__implements__.append("getsignal")
else:
# XXX: This breaks test__all__ on windows
__extensions__.append("signal")
__extensions__.append("getsignal")
__imports__ = copy_globals(_signal, globals(),
names_to_ignore=__implements__ + __extensions__,
dunder_names_to_keep=())
__all__ = __implements__ + __extensions__
|
3,815 |
on item expanded
|
from typing import TYPE_CHECKING, Optional, Union
from PySide6.QtCore import QSize, Qt
from PySide6.QtGui import QFont, QStandardItem, QStandardItemModel
from PySide6.QtWidgets import QHBoxLayout, QHeaderView, QLabel, QTreeView, QTreeWidget, QVBoxLayout
from angrmanagement.config import Conf
from angrmanagement.logic.debugger import DebuggerWatcher
from angrmanagement.logic.debugger.bintrace import BintraceDebugger
from .view import BaseView
if TYPE_CHECKING:
from angr.knowledge_plugins import Function
try:
from bintrace import TraceEvent
except ImportError:
TraceEvent = "TraceEvent"
class CallTreeModel(QStandardItemModel):
"""
Model for the call tree.
"""
Headers = ["Function"]
def hasChildren(self, index):
item: Optional[CallTreeItem] = self.itemFromIndex(index)
if isinstance(item, CallTreeItem):
return item.expandable
return super().hasChildren(index)
def headerData(self, section, orientation, role): # pylint:disable=unused-argument
if role != Qt.DisplayRole:
return None
if section < len(self.Headers):
return self.Headers[section]
return None
class CallTreeItem(QStandardItem):
"""
Item in call tree representing a function.
"""
def __init__(self, function, event):
name = hex(function) if isinstance(function, int) else function.name
super().__init__(name)
self.function: Union[int, Function] = function
self.event: TraceEvent = event
self.populated: bool = False
self.expandable: bool = True
class CallExplorerView(BaseView):
"""
Call Explorer view.
"""
def __init__(self, workspace, instance, default_docking_position, *args, **kwargs):
super().__init__("call_explorer", workspace, instance, default_docking_position, *args, **kwargs)
self._last_updated_func: Optional[Union[int, Function]] = None
self._inhibit_update: bool = False
self.base_caption = "Call Explorer"
self._tree: Optional[QTreeWidget] = None
self._init_widgets()
self.reload()
self.width_hint = 500
self.height_hint = 400
self.updateGeometry()
self._dbg_manager = instance.debugger_mgr
self._dbg_watcher = DebuggerWatcher(self._on_debugger_state_updated, self._dbg_manager.debugger)
self._on_debugger_state_updated()
@staticmethod
def minimumSizeHint(*args, **kwargs): # pylint:disable=unused-argument
return QSize(200, 200)
def _init_widgets(self):
vlayout = QVBoxLayout()
vlayout.setSpacing(0)
vlayout.setContentsMargins(0, 0, 0, 0)
self._top_level_function_level = QLabel()
self._reset_function_label()
hlayout = QHBoxLayout()
hlayout.addWidget(self._top_level_function_level)
hlayout.setContentsMargins(3, 3, 3, 3)
vlayout.addLayout(hlayout)
self._tree = QTreeView(self)
self._model = CallTreeModel(self._tree)
self._tree.setModel(self._model)
self._tree.setFont(QFont(Conf.disasm_font))
header = self._tree.header()
header.setSectionResizeMode(QHeaderView.ResizeToContents)
self._tree.expanded.connect(self.METHOD_NAME)
self._tree.clicked.connect(self._on_item_clicked)
self._tree.doubleClicked.connect(self._on_item_double_clicked)
vlayout.addWidget(self._tree)
self.setLayout(vlayout)
#
# Events
#
def closeEvent(self, event):
self._dbg_watcher.shutdown()
super().closeEvent(event)
def _on_item_clicked(self, index):
"""
Highlights the corresponding call site.
"""
item = self._model.itemFromIndex(index)
# Do not try to update on a single click. Allow user to browse through the call tree
original_inhibit = self._inhibit_update
self._inhibit_update = True
# Replay up to just before call
dbg = self.instance.debugger_mgr.debugger
dbg.replay_to_event(dbg._btrace.get_prev_exec_event(item.event, vcpu=dbg._trace_dbg.vcpu))
self._inhibit_update = original_inhibit
def _on_item_double_clicked(self, index):
"""
Navigates into the call.
"""
item = self._model.itemFromIndex(index)
# Replay after the jump, jumping into the called function
# FIXME: Doesn't consider proper selected debugger, assumes bintrace
dbg = self.instance.debugger_mgr.debugger
dbg.replay_to_event(dbg._btrace.get_next_exec_event(item.event, vcpu=dbg._trace_dbg.vcpu))
def METHOD_NAME(self, index):
"""
Descend into call tree for this node.
"""
expanding_item = self._model.itemFromIndex(index)
if not expanding_item.populated:
dbg = self.instance.debugger_mgr.debugger
if dbg.am_none:
return
called = dbg.get_called_functions(expanding_item.event)
for func_or_addr, event in called:
expanding_item.appendRow(CallTreeItem(func_or_addr, event))
expanding_item.expandable = len(called) > 0
expanding_item.populated = True
def _on_debugger_state_updated(self):
"""
Update current call state.
"""
if self._inhibit_update:
return
dbg = self._dbg_watcher.debugger
if isinstance(dbg.am_obj, BintraceDebugger):
func = dbg.get_current_function()
if func is not None:
func = func[0]
else:
func = None
if func is self._last_updated_func:
return
self._model.clear()
self._last_updated_func = func
if func is not None and isinstance(dbg.am_obj, BintraceDebugger):
self._top_level_function_level.setText(f"Current function: {func.name}")
for func, event in dbg.get_called_functions():
self._model.appendRow(CallTreeItem(func, event))
else:
self._reset_function_label()
def _reset_function_label(self):
self._top_level_function_level.setText("Current function: Unknown")
|
3,816 |
create subprocess shell
|
__all__ = 'create_subprocess_exec', 'create_subprocess_shell'
import subprocess
from . import events
from . import protocols
from . import streams
from . import tasks
from .log import logger
PIPE = subprocess.PIPE
STDOUT = subprocess.STDOUT
DEVNULL = subprocess.DEVNULL
class SubprocessStreamProtocol(streams.FlowControlMixin,
protocols.SubprocessProtocol):
"""Like StreamReaderProtocol, but for a subprocess."""
def __init__(self, limit, loop):
super().__init__(loop=loop)
self._limit = limit
self.stdin = self.stdout = self.stderr = None
self._transport = None
self._process_exited = False
self._pipe_fds = []
self._stdin_closed = self._loop.create_future()
def __repr__(self):
info = [self.__class__.__name__]
if self.stdin is not None:
info.append(f'stdin={self.stdin!r}')
if self.stdout is not None:
info.append(f'stdout={self.stdout!r}')
if self.stderr is not None:
info.append(f'stderr={self.stderr!r}')
return '<{}>'.format(' '.join(info))
def connection_made(self, transport):
self._transport = transport
stdout_transport = transport.get_pipe_transport(1)
if stdout_transport is not None:
self.stdout = streams.StreamReader(limit=self._limit,
loop=self._loop)
self.stdout.set_transport(stdout_transport)
self._pipe_fds.append(1)
stderr_transport = transport.get_pipe_transport(2)
if stderr_transport is not None:
self.stderr = streams.StreamReader(limit=self._limit,
loop=self._loop)
self.stderr.set_transport(stderr_transport)
self._pipe_fds.append(2)
stdin_transport = transport.get_pipe_transport(0)
if stdin_transport is not None:
self.stdin = streams.StreamWriter(stdin_transport,
protocol=self,
reader=None,
loop=self._loop)
def pipe_data_received(self, fd, data):
if fd == 1:
reader = self.stdout
elif fd == 2:
reader = self.stderr
else:
reader = None
if reader is not None:
reader.feed_data(data)
def pipe_connection_lost(self, fd, exc):
if fd == 0:
pipe = self.stdin
if pipe is not None:
pipe.close()
self.connection_lost(exc)
if exc is None:
self._stdin_closed.set_result(None)
else:
self._stdin_closed.set_exception(exc)
return
if fd == 1:
reader = self.stdout
elif fd == 2:
reader = self.stderr
else:
reader = None
if reader is not None:
if exc is None:
reader.feed_eof()
else:
reader.set_exception(exc)
if fd in self._pipe_fds:
self._pipe_fds.remove(fd)
self._maybe_close_transport()
def process_exited(self):
self._process_exited = True
self._maybe_close_transport()
def _maybe_close_transport(self):
if len(self._pipe_fds) == 0 and self._process_exited:
self._transport.close()
self._transport = None
def _get_close_waiter(self, stream):
if stream is self.stdin:
return self._stdin_closed
class Process:
def __init__(self, transport, protocol, loop):
self._transport = transport
self._protocol = protocol
self._loop = loop
self.stdin = protocol.stdin
self.stdout = protocol.stdout
self.stderr = protocol.stderr
self.pid = transport.get_pid()
def __repr__(self):
return f'<{self.__class__.__name__} {self.pid}>'
@property
def returncode(self):
return self._transport.get_returncode()
async def wait(self):
"""Wait until the process exit and return the process return code."""
return await self._transport._wait()
def send_signal(self, signal):
self._transport.send_signal(signal)
def terminate(self):
self._transport.terminate()
def kill(self):
self._transport.kill()
async def _feed_stdin(self, input):
debug = self._loop.get_debug()
self.stdin.write(input)
if debug:
logger.debug(
'%r communicate: feed stdin (%s bytes)', self, len(input))
try:
await self.stdin.drain()
except (BrokenPipeError, ConnectionResetError) as exc:
# communicate() ignores BrokenPipeError and ConnectionResetError
if debug:
logger.debug('%r communicate: stdin got %r', self, exc)
if debug:
logger.debug('%r communicate: close stdin', self)
self.stdin.close()
async def _noop(self):
return None
async def _read_stream(self, fd):
transport = self._transport.get_pipe_transport(fd)
if fd == 2:
stream = self.stderr
else:
assert fd == 1
stream = self.stdout
if self._loop.get_debug():
name = 'stdout' if fd == 1 else 'stderr'
logger.debug('%r communicate: read %s', self, name)
output = await stream.read()
if self._loop.get_debug():
name = 'stdout' if fd == 1 else 'stderr'
logger.debug('%r communicate: close %s', self, name)
transport.close()
return output
async def communicate(self, input=None):
if input is not None:
stdin = self._feed_stdin(input)
else:
stdin = self._noop()
if self.stdout is not None:
stdout = self._read_stream(1)
else:
stdout = self._noop()
if self.stderr is not None:
stderr = self._read_stream(2)
else:
stderr = self._noop()
stdin, stdout, stderr = await tasks.gather(stdin, stdout, stderr)
await self.wait()
return (stdout, stderr)
async def METHOD_NAME(cmd, stdin=None, stdout=None, stderr=None,
limit=streams._DEFAULT_LIMIT, **kwds):
loop = events.get_running_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
transport, protocol = await loop.subprocess_shell(
protocol_factory,
cmd, stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
return Process(transport, protocol, loop)
async def create_subprocess_exec(program, *args, stdin=None, stdout=None,
stderr=None, limit=streams._DEFAULT_LIMIT,
**kwds):
loop = events.get_running_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
transport, protocol = await loop.subprocess_exec(
protocol_factory,
program, *args,
stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
return Process(transport, protocol, loop)
|
3,817 |
set up module
|
#!/usr/bin/env python
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
def METHOD_NAME():
global mol, molsym, m, msym
b = 1.4
mol = gto.M(
verbose = 5,
output = '/dev/null',
atom = [
['N',( 0.000000, 0.000000, -b/2)],
['N',( 0.000000, 0.000000, b/2)], ],
basis = {'N': 'ccpvdz', },
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
molsym = gto.M(
verbose = 5,
output = '/dev/null',
atom = [
['N',( 0.000000, 0.000000, -b/2)],
['N',( 0.000000, 0.000000, b/2)], ],
basis = {'N': 'ccpvdz', },
symmetry = True
)
msym = scf.RHF(molsym)
msym.conv_tol = 1e-9
msym.scf()
def tearDownModule():
global mol, molsym, m, msym
mol.stdout.close()
molsym.stdout.close()
del mol, molsym, m, msym
class KnownValues(unittest.TestCase):
def test_mc1step_4o4e(self):
mc = mcscf.CASSCF(m, 4, 4)
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc1step_4o4e_internal_rotation(self):
mc = mcscf.CASSCF(m, 4, 4)
mc.internal_rotation = True
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc2step_4o4e(self):
mc = mcscf.CASSCF(m, 4, 4)
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc1step_6o6e_high_cost(self):
mc = mcscf.CASSCF(m, 6, 6)
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_mc2step_6o6e_high_cost(self):
mc = mcscf.CASSCF(m, 6, 6)
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_mc1step_symm_4o4e(self):
mc = mcscf.CASSCF(msym, 4, 4)
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc2step_symm_4o4e(self):
mc = mcscf.CASSCF(msym, 4, 4)
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc1step_symm_6o6e_high_cost(self):
mc = mcscf.CASSCF(msym, 6, 6)
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_mc2step_symm_6o6e_high_cost(self):
mc = mcscf.CASSCF(msym, 6, 6)
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_casci_4o4e(self):
mc = mcscf.CASCI(m, 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.8896744464714, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.6910275883606078, 4)
def test_casci_symm_4o4e(self):
mc = mcscf.CASCI(msym, 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.8896744464714, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.6910275883606078, 4)
mc.wfnsym = 'A2u'
# raised by mc.fcisolver.guess_wfnsym
self.assertRaises(RuntimeError, mc.kernel)
def test_casci_from_uhf(self):
mf = scf.UHF(mol)
mf.scf()
mc = mcscf.CASCI(mf, 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.8896744464714, 6)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.6910275883606078, 4)
def test_casci_from_uhf1(self):
mf = scf.UHF(mol)
mf.scf()
mc = mcscf.CASSCF(mf, 4, 4)
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
def test_frozen1s(self):
mc = mcscf.CASSCF(msym, 4, 4)
mc.frozen = 3
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.91373646206542, 7)
def test_frozenselect(self):
mc = mcscf.CASSCF(msym, 4, 4)
mc.frozen = [i-1 for i in [19, 20, 26, 27]]
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.91238513746941, 7)
def test_wfnsym(self):
mc = mcscf.CASSCF(msym, 4, (3,1))
mc.fcisolver.wfnsym = 14
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.74508322877787, 7)
mc.wfnsym = 'A2u'
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.69019443475308, 7)
def test_ucasci(self):
mc = mcscf.UCASCI(msym, 4, (3,1))
emc = mc.kernel()[0]
self.assertAlmostEqual(emc, -108.77486560653847, 7)
def test_ucasscf_high_cost(self):
mc = mcscf.UCASSCF(msym, 4, (3,1))
emc = mc.kernel()[0]
self.assertAlmostEqual(emc, -108.80789718975041, 7)
def test_newton_casscf(self):
mc = mcscf.newton(mcscf.CASSCF(m, 4, 4)).run()
self.assertAlmostEqual(mc.e_tot, -108.9137864132358, 8)
def test_newton_casscf_symm(self):
mc = mcscf.newton(mcscf.CASSCF(msym, 4, 4)).run()
self.assertAlmostEqual(mc.e_tot, -108.9137864132358, 8)
if __name__ == "__main__":
print("Full Tests for N2")
unittest.main()
|
3,818 |
test line splitter corner cases
|
from __future__ import annotations
from typing import Optional
from unittest.mock import (
call,
patch,
)
from datalad.tests.utils_pytest import (
assert_equal,
assert_in,
assert_is_none,
)
from ..utils import (
AssemblingDecoderMixIn,
LineSplitter,
)
test_lines = [
"first line",
"second line",
"third line",
""
]
def _check_splitting_endings_separator(endings: list[str],
separator: Optional[str] = None,
keep_ends: bool = False,
check_continuation: bool = False
) -> None:
for line_ending in endings:
line_splitter = LineSplitter(separator=separator, keep_ends=keep_ends)
full_end = line_ending + separator if separator else line_ending
if separator:
expected_end = full_end if keep_ends else line_ending
else:
expected_end = line_ending if keep_ends else ""
lines = line_splitter.process(
full_end.join(test_lines)
+ full_end
+ ("fourth " if check_continuation else "")
)
assert_equal(
lines,
[line + expected_end for line in test_lines]
)
if check_continuation:
assert_equal(line_splitter.remaining_data, "fourth ")
lines = line_splitter.process("line" + full_end)
assert_equal(
lines,
["fourth line" + expected_end])
assert_is_none(line_splitter.finish_processing())
else:
assert_is_none(line_splitter.finish_processing())
def test_line_splitter_basic() -> None:
# expect lines without endings, split at standard line-endings
_check_splitting_endings_separator(["\n", "\r\n"])
_check_splitting_endings_separator(["\n", "\r\n"], check_continuation=True)
def test_line_splitter_basic_keep() -> None:
# expect lines without endings, split at standard line-endings
_check_splitting_endings_separator(["\n", "\r\n"], keep_ends=True)
_check_splitting_endings_separator(
["\n", "\r\n"],
keep_ends=True,
check_continuation=True)
def test_line_splitter_zero() -> None:
# expect lines without endings, split at standard line-endings
_check_splitting_endings_separator(["\n", "\r\n"], separator="\x00")
_check_splitting_endings_separator(
["\n", "\r\n"],
separator="\x00",
check_continuation=True)
def test_line_splitter_zero_keep() -> None:
# expect lines without endings, split at standard line-endings
_check_splitting_endings_separator(
["\n", "\r\n"],
separator="\x00",
keep_ends=True)
_check_splitting_endings_separator(
["\n", "\r\n"],
separator="\x00",
keep_ends=True,
check_continuation=True)
def METHOD_NAME() -> None:
line_splitter = LineSplitter()
lines = line_splitter.process("")
assert_equal(lines, [])
assert_equal(line_splitter.remaining_data, None)
line_splitter = LineSplitter()
lines = line_splitter.process("")
assert_equal(lines, [])
lines = line_splitter.process("\n")
assert_equal(lines, [""])
assert_equal(line_splitter.remaining_data, None)
line_splitter = LineSplitter()
lines = line_splitter.process(" a \f \r\n")
assert_equal(lines, [" a ", " "])
def test_assembling_decoder_mix_in_basic() -> None:
encoding = "utf-8"
unicode_str = "These are not ASCII: ä, ö, ü. These can be ASCII: a, o, u."
data_bytes = unicode_str.encode(encoding)
adm = AssemblingDecoderMixIn()
single_result = "".join([
adm.decode(1, bytes([data_byte]), encoding)
for data_byte in data_bytes
])
assert_equal(single_result, unicode_str)
def _decode_multiple(adm: AssemblingDecoderMixIn,
encoded_strings: list[bytes],
encoding: str,
fixed_index: Optional[int] = None) -> list[str]:
# Interleave decoding of multiple strings
decoded_chars: list[list] = [list() for _ in range(len(encoded_strings))]
for data_index in range(max([len(es) for es in encoded_strings])):
for string_index in range(len(encoded_strings)):
if data_index < len(encoded_strings[string_index]):
decoded_char = adm.decode(
string_index if fixed_index is None else fixed_index,
bytes([encoded_strings[string_index][data_index]]),
encoding)
decoded_chars[string_index].append(decoded_char)
return ["".join(decoded_list) for decoded_list in decoded_chars]
def test_assembling_decoder_mix_in_multiple() -> None:
encoding = "utf-8"
unicode_strings = [
"These are not ASCII: ä, ö, ü. These can be ASCII: a, o, u.",
"Some other weird stuff: öäöß.",
"Even weirder: 🐷🐶.",
]
encoded_strings = [
unicode_string.encode(encoding)
for unicode_string in unicode_strings
]
adm = AssemblingDecoderMixIn()
decoded_strings = _decode_multiple(adm, encoded_strings, encoding)
assert_equal(unicode_strings, decoded_strings)
def test_assembling_decoder_mix_in_multiple_fail() -> None:
encoding = "utf-8"
unicode_strings = [
"A: ä, ö, ü.",
"B: öäöß.",
"C: 🐷🐶.",
]
encoded_strings = [
unicode_string.encode(encoding)
for unicode_string in unicode_strings
]
adm = AssemblingDecoderMixIn()
decoded_strings = _decode_multiple(adm, encoded_strings, encoding, 0)
# Because the strings are not separated, we do not expect any proper
# output after single-byte encoded chars.
assert_equal(decoded_strings, ["A: ", "B: ", "C: "])
def test_assembling_decoder_mix_in_warning() -> None:
encoding = "utf-8"
data_bytes = "🐷🐶.".encode(encoding)
adm = AssemblingDecoderMixIn()
with patch("datalad.runner.utils.logger") as logger_mock:
result = adm.decode(1, data_bytes[0:1], encoding)
assert_equal(result, '')
del adm
assert_in(
call.warning("unprocessed data in AssemblingDecoderMixIn"),
logger_mock.mock_calls)
assert_in(
call.debug(
"unprocessed data in AssemblingDecoderMixIn:\n"
"fd: 1, data: b'\\xf0'\n"),
logger_mock.mock_calls)
|
3,819 |
date formatter
|
import json
import urllib
import dateutil.parser
from collections import OrderedDict
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
from matplotlib.transforms import blended_transform_factory
cache = '_pr_cache.txt'
# Obtain release dates using, e.g.,
#
# git log v0.4 -n 1 --format='%ai'
#
# The first two releases are commented out.
# This was in the era before PRs.
#
releases = OrderedDict([
#('0.1', u'2009-10-07 13:52:19 +0200'),
#('0.2', u'2009-11-12 14:48:45 +0200'),
#('0.3', u'2011-10-10 03:28:47 -0700'),
('0.4', '2011-12-03 14:31:32 -0800'),
('0.5', '2012-02-26 21:00:51 -0800'),
('0.6', '2012-06-24 21:37:05 -0700'),
('0.7', '2012-09-29 18:08:49 -0700'),
('0.8', '2013-03-04 20:46:09 +0100')])
month_duration = 24
def fetch_PRs(user='scikit-image', repo='scikit-image', state='open'):
params = {'state': state,
'per_page': 100,
'page': 1}
data = []
page_data = True
while page_data:
config = {'user': user,
'repo': repo,
'params': urllib.urlencode(params)}
fetch_status = (
f"Fetching page {params['page']} (state={params['state']})"
f" from {config['user']}/{config['repo']}..."
)
print(fetch_status)
f = urllib.urlopen(
f"https://api.github.com/repos/{config['user']}/{{config['repos']}}/pulls?{config['params']}"
)
params['page'] += 1
page_data = json.loads(f.read())
if 'message' in page_data and page_data['message'] == "Not Found":
page_data = []
print(f"Warning: Repo not found ({config['user']}/{config['repo']})")
else:
data.extend(page_data)
return data
def seconds_from_epoch(dates):
seconds = [(dt - epoch).total_seconds() for dt in dates]
return seconds
def get_month_bins(dates):
now = datetime.now(tz=dates[0].tzinfo)
this_month = datetime(year=now.year, month=now.month, day=1,
tzinfo=dates[0].tzinfo)
bins = [this_month - relativedelta(months=i)
for i in reversed(range(-1, month_duration))]
return seconds_from_epoch(bins)
def METHOD_NAME(value, _):
dt = epoch + timedelta(seconds=value)
return dt.strftime('%Y/%m')
for r in releases:
releases[r] = dateutil.parser.parse(releases[r])
try:
PRs = json.loads(open(cache).read())
print('Loaded PRs from cache...')
except OSError:
PRs = fetch_PRs(user='stefanv', repo='scikits.image', state='closed')
PRs.extend(fetch_PRs(state='open'))
PRs.extend(fetch_PRs(state='closed'))
cf = open(cache, 'w')
cf.write(json.dumps(PRs))
cf.flush()
nrs = [pr['number'] for pr in PRs]
print(f'Processing {len(nrs)} pull requests...')
dates = [dateutil.parser.parse(pr['created_at']) for pr in PRs]
epoch = datetime(2009, 1, 1, tzinfo=dates[0].tzinfo)
dates_f = seconds_from_epoch(dates)
bins = get_month_bins(dates)
fig, ax = plt.subplots(figsize=(7, 5))
n, bins, _ = ax.hist(dates_f, bins=bins, color='blue', alpha=0.6)
ax.xaxis.set_major_formatter(FuncFormatter(METHOD_NAME))
ax.set_xticks(bins[2:-1:3]) # Date label every 3 months.
labels = ax.get_xticklabels()
for l in labels:
l.set_rotation(40)
l.set_size(10)
mixed_transform = blended_transform_factory(ax.transData, ax.transAxes)
for version, date in releases.items():
date = seconds_from_epoch([date])[0]
ax.axvline(date, color='black', linestyle=':', label=version)
ax.text(date, 1, version, color='r', va='bottom', ha='center',
transform=mixed_transform)
ax.set_title('Pull request activity').set_y(1.05)
ax.set_xlabel('Date')
ax.set_ylabel('PRs per month', color='blue')
cumulative = np.cumsum(n)
cumulative += len(dates) - cumulative[-1]
ax2 = ax.twinx()
ax2.plot(bins[1:], cumulative, color='black', linewidth=2)
ax2.set_ylabel('Total PRs', color='black')
plt.tight_layout()
fig.savefig('PRs.png')
plt.show()
|
3,820 |
test morph stat json
|
import json
from pathlib import Path
import tempfile
import pandas as pd
import yaml
from click.testing import CliRunner
from mock import patch
from neurom.apps.cli import cli
from neurom.exceptions import ConfigError
DATA = Path(__file__).parent.parent / 'data'
@patch('neurom.apps.cli.plt.show')
def test_viewer_matplotlib(mock):
runner = CliRunner()
filename = str(DATA / 'swc' / 'simple.swc')
result = runner.invoke(cli, ['view', filename])
assert result.exit_code == 0
mock.assert_called_once()
mock.reset_mock()
result = runner.invoke(cli, ['view', filename, '--3d'])
assert result.exit_code == 0
mock.assert_called_once()
mock.reset_mock()
result = runner.invoke(cli, ['view', filename, '--plane', 'xy'])
assert result.exit_code == 0
mock.assert_called_once()
@patch('neurom.view.plotly_impl.plot')
def test_viewer_plotly(mock):
runner = CliRunner()
filename = str(DATA / 'swc' / 'simple.swc')
result = runner.invoke(cli, ['view', filename, '--3d',
'--backend', 'plotly'])
assert result.exit_code == 0
mock.assert_called_once()
mock.reset_mock()
result = runner.invoke(cli, ['view', filename,
'--backend', 'plotly',
'--plane', 'xy'])
assert result.exit_code == 0
mock.assert_called_once()
def test_morph_stat():
runner = CliRunner()
filename = DATA / 'swc' / 'simple.swc'
with tempfile.NamedTemporaryFile() as f:
result = runner.invoke(cli, ['stats', str(filename), '--output', f.name])
assert result.exit_code == 0
df = pd.read_csv(f)
assert set(df.columns) == {'name', 'axon:max_section_lengths', 'axon:sum_section_lengths',
'axon:sum_section_volumes', 'axon:max_section_branch_orders',
'apical_dendrite:max_section_lengths',
'apical_dendrite:sum_section_lengths',
'apical_dendrite:sum_section_volumes',
'apical_dendrite:max_section_branch_orders',
'basal_dendrite:max_section_lengths',
'basal_dendrite:sum_section_lengths',
'basal_dendrite:sum_section_volumes',
'basal_dendrite:max_section_branch_orders',
'all:max_section_lengths',
'all:sum_section_lengths', 'all:sum_section_volumes',
'all:max_section_branch_orders', 'morphology:mean_soma_radius'}
def test_morph_stat_full_config():
runner = CliRunner()
filename = DATA / 'h5/v1/Neuron.h5'
with tempfile.NamedTemporaryFile() as f:
result = runner.invoke(cli, ['stats', str(filename), '--full-config', '--output', f.name])
assert result.exit_code == 0
df = pd.read_csv(f)
assert not df.empty
def test_morph_stat_invalid_config():
runner = CliRunner()
with tempfile.NamedTemporaryFile('w') as config_f:
yaml.dump({'neurite': 'invalid'}, config_f)
result = runner.invoke(cli, ['stats', '--config', config_f.name])
assert result.exit_code == 1
assert isinstance(result.exception, ConfigError)
def test_morph_stat_stdout():
runner = CliRunner()
filename = DATA / 'swc' / 'simple.swc'
result = runner.invoke(cli, ['stats', str(filename)])
assert result.exit_code == 0
def test_morph_stat_as_population():
runner = CliRunner()
filename = DATA / 'swc' / 'simple.swc'
result = runner.invoke(cli, ['stats', str(filename), '--as-population'])
assert result.exit_code == 0
def METHOD_NAME():
runner = CliRunner()
filename = DATA / 'swc' / 'simple.swc'
with tempfile.NamedTemporaryFile(suffix='.json') as f:
result = runner.invoke(cli, ['stats', str(filename), '--output', f.name])
assert result.exit_code == 0
content = json.load(f)
assert content
def test_morph_check():
runner = CliRunner()
filename = DATA / 'swc' / 'simple.swc'
with tempfile.NamedTemporaryFile() as f:
result = runner.invoke(cli, ['check', str(filename), '--output', f.name])
assert result.exit_code == 0
content = json.load(f)
assert content == {'files': {
str(filename.absolute()): {'Has basal dendrite': True,
'Has axon': True,
'Has apical dendrite': False,
'Has all nonzero segment lengths': True,
'Has all nonzero section lengths': True,
'Has all nonzero neurite radii': False,
'Has nonzero soma radius': True,
'ALL': False}},
'STATUS': 'FAIL'}
|
3,821 |
delete position limit for instrument strategy
|
from sysdata.mongodb.mongo_generic import mongoDataWithMultipleKeys
from sysdata.production.position_limits import positionLimitData
from sysobjects.production.position_limits import (
positionLimitForInstrument,
positionLimitForStrategyInstrument,
)
from sysobjects.production.tradeable_object import (
listOfInstrumentStrategies,
instrumentStrategy,
)
from syslogging.logger import *
POSITION_LIMIT_STATUS_COLLECTION = "position_limit_status"
MARKER_KEY = "marker"
MARKER_STRATEGY_INSTRUMENT = "strategy_instrument"
MARKER_INSTRUMENT = "instrument"
INSTRUMENT_KEY = "instrument_code"
STRATEGY_KEY = "strategy_name"
POSITION_LIMIT_KEY = "position_limit"
class mongoPositionLimitData(positionLimitData):
"""
Read and write data class to get override state data
"""
def __init__(self, mongo_db=None, log=get_logger("mongoPositionLimitData")):
super().__init__(log=log)
self._mongo_data = mongoDataWithMultipleKeys(
POSITION_LIMIT_STATUS_COLLECTION, mongo_db=mongo_db
)
@property
def mongo_data(self):
return self._mongo_data
def __repr__(self):
return "Data connection for position limit data, mongodb %s"
def get_all_instruments_with_limits(self) -> list:
dict_of_keys = {MARKER_KEY: MARKER_INSTRUMENT}
list_of_dicts = self.mongo_data.get_list_of_result_dicts_for_dict_keys(
dict_of_keys
)
list_of_instruments = [db_entry[INSTRUMENT_KEY] for db_entry in list_of_dicts]
return list_of_instruments
def get_all_instrument_strategies_with_limits(self) -> listOfInstrumentStrategies:
dict_of_keys = {MARKER_KEY: MARKER_STRATEGY_INSTRUMENT}
list_of_dicts = self.mongo_data.get_list_of_result_dicts_for_dict_keys(
dict_of_keys
)
list_of_instrument_strategies = [
instrumentStrategy(
strategy_name=db_entry[STRATEGY_KEY],
instrument_code=db_entry[INSTRUMENT_KEY],
)
for db_entry in list_of_dicts
]
list_of_instrument_strategies = listOfInstrumentStrategies(
list_of_instrument_strategies
)
return list_of_instrument_strategies
def METHOD_NAME(
self, instrument_strategy: instrumentStrategy
):
dict_of_keys = {
MARKER_KEY: MARKER_STRATEGY_INSTRUMENT,
STRATEGY_KEY: instrument_strategy.strategy_name,
INSTRUMENT_KEY: instrument_strategy.instrument_code,
}
self.mongo_data.delete_data_without_any_warning(dict_of_keys)
def delete_position_limit_for_instrument(self, instrument_code: str):
dict_of_keys = {MARKER_KEY: MARKER_INSTRUMENT, INSTRUMENT_KEY: instrument_code}
self.mongo_data.delete_data_without_any_warning(dict_of_keys)
def _get_abs_position_limit_for_instrument_strategy(
self, instrument_strategy: instrumentStrategy
) -> int:
dict_of_keys = {
MARKER_KEY: MARKER_STRATEGY_INSTRUMENT,
STRATEGY_KEY: instrument_strategy.strategy_name,
INSTRUMENT_KEY: instrument_strategy.instrument_code,
}
find_object_dict = self.mongo_data.get_result_dict_for_dict_keys(dict_of_keys)
position_limit = find_object_dict[POSITION_LIMIT_KEY]
return position_limit
def _get_abs_position_limit_for_instrument(
self,
instrument_code: str,
) -> int:
dict_of_keys = {MARKER_KEY: MARKER_INSTRUMENT, INSTRUMENT_KEY: instrument_code}
find_object_dict = self.mongo_data.get_result_dict_for_dict_keys(dict_of_keys)
position_limit = find_object_dict[POSITION_LIMIT_KEY]
return position_limit
def set_position_limit_for_instrument_strategy(
self, instrument_strategy: instrumentStrategy, new_position_limit: int
):
dict_of_keys = {
MARKER_KEY: MARKER_STRATEGY_INSTRUMENT,
STRATEGY_KEY: instrument_strategy.strategy_name,
INSTRUMENT_KEY: instrument_strategy.instrument_code,
}
data_dict = {POSITION_LIMIT_KEY: new_position_limit}
self.mongo_data.add_data(dict_of_keys, data_dict, allow_overwrite=True)
def set_position_limit_for_instrument(
self, instrument_code: str, new_position_limit: int
):
dict_of_keys = {MARKER_KEY: MARKER_INSTRUMENT, INSTRUMENT_KEY: instrument_code}
data_dict = {POSITION_LIMIT_KEY: new_position_limit}
self.mongo_data.add_data(dict_of_keys, data_dict, allow_overwrite=True)
|
3,822 |
test metho d set to local ip
|
from unittest import TestCase
from lamden.crypto.wallet import Wallet
from lamden.network import Network
from lamden.peer import Peer
import asyncio
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
GET_ALL_PEERS = "get_all_peers"
GET_LATEST_BLOCK = 'get_latest_block'
class TestMultiNode(TestCase):
def setUp(self):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
self.networks = []
def tearDown(self):
for network in self.networks:
if network.running:
network.stop()
del self.networks
loop = asyncio.get_event_loop()
loop.stop()
loop.close()
def create_network(self, index=0):
network = Network(
wallet=Wallet(),
socket_ports=self.create_socket_ports(index),
)
network.ip = '127.0.0.1'
network.add_action(GET_ALL_PEERS, self.get_peer_list)
network.add_action(GET_LATEST_BLOCK, self.get_latest_block)
self.networks.append(network)
network.get_all_peers = self.get_peer_list
network.router.cred_provider.get_all_peers = self.get_peer_list
return network
def get_peer_list(self):
return [network.wallet.verifying_key for network in self.networks]
def get_latest_block(self):
return {}
def start_network(self, network):
tasks = asyncio.gather(
network.start()
)
loop = asyncio.get_event_loop()
res = loop.run_until_complete(tasks)
return res
def start_all_networks(self):
for network in self.networks:
self.start_network(network=network)
def ensure_async_process(self, process):
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop=loop)
asyncio.ensure_future(process())
def await_async_process(self, process):
tasks = asyncio.gather(
process()
)
loop = asyncio.get_event_loop()
res = loop.run_until_complete(tasks)
return res
def async_sleep(self, delay):
tasks = asyncio.gather(
asyncio.sleep(delay)
)
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
def create_socket_ports(self, index=0):
return {
'router': 19000 + index,
'publisher': 19080 + index,
'webserver': 18080 + index
}
def test_connects_to_peer_network(self):
# Create two network instances
network_1 = self.create_network()
self.start_network(network=network_1)
self.assertTrue(network_1.running)
network_2 = self.create_network(index=1)
self.start_network(network=network_2)
self.assertTrue(network_2.running)
# connect networks to each other
network_1.connect(ip=network_2.external_address, vk=network_2.vk)
# await connections
self.async_sleep(delay=1)
# verify connections
peer_1 = network_1.get_peer(network_2.vk)
self.assertTrue(peer_1.running)
peer_2 = network_2.get_peer(network_1.vk)
self.assertTrue(peer_2.running)
def test_network_propagates_joined_peers(self):
# Create two network instances
network_1 = self.create_network()
self.start_network(network=network_1)
self.assertTrue(network_1.running)
network_2 = self.create_network(index=1)
self.start_network(network=network_2)
self.assertTrue(network_2.running)
# connect networks to each other
network_1.connect(ip=network_2.external_address, vk=network_2.vk)
# await connections
self.async_sleep(delay=1)
# verify connections
peer_1 = network_1.get_peer(network_2.vk)
self.assertTrue(peer_1.running)
peer_2 = network_2.get_peer(network_1.vk)
self.assertTrue(peer_2.running)
# Create new network
network_3 = self.create_network(index=2)
self.start_network(network=network_3)
# Join to one peer on the network
network_3.connect(ip=network_1.external_address, vk=network_1.vk)
# await connect
self.async_sleep(1)
peer_3 = network_3.get_peer(vk=network_1.vk)
self.assertTrue(peer_3.running)
# await connect
self.async_sleep(1)
# All networks joined new peer
for network in self.networks:
self.assertEqual(2, len(network.peers))
for peer in network.peers.values():
self.assertTrue(peer.running)
def test_num_of_peers_zero(self):
network_1 = self.create_network()
self.assertEqual(0, network_1.num_of_peers())
def test_num_of_peers(self):
network_1 = self.create_network()
network_1.peers['node_2'] = {}
network_1.peers['node_3'] = {}
self.assertEqual(2, network_1.num_of_peers())
def test_num_of_peers_connected_zero(self):
network_1 = self.create_network()
self.assertEqual(0, network_1.num_of_peers_connected())
def test_num_of_peers_connected(self):
network_1 = self.create_network()
network_1.peers['node_2'] = Peer()
network_1.peers['node_3'] = Peer(dealer_running=False)
self.assertEqual(1, network_1.num_of_peers_connected())
def test_all_peers_connected_True(self):
network_1 = self.create_network()
network_1.peers['node_2'] = Peer()
network_1.peers['node_3'] = Peer()
self.assertTrue(network_1.all_peers_connected())
def test_all_peers_connected_False(self):
network_1 = self.create_network()
network_1.peers['node_2'] = Peer()
network_1.peers['node_3'] = Peer(subscriber_running=False)
self.assertFalse(network_1.all_peers_connected())
def test_reconnect_peer(self):
# Create two network instances
network_1 = self.create_network()
self.start_network(network=network_1)
self.assertTrue(network_1.running)
network_2 = self.create_network(index=1)
self.start_network(network=network_2)
self.assertTrue(network_2.running)
# connect networks to each other
network_1.connect(ip=network_2.external_address, vk=network_2.vk)
# await connections
self.async_sleep(delay=1)
# Disable Network 2
network_2.router.pause()
# Call reconnect loop on other network
peer = network_1.get_peer(vk=network_2.vk)
peer.dealer.check_connection()
self.async_sleep(delay=1)
self.assertFalse(peer.is_running)
self.assertTrue(peer.reconnecting)
# Enable Network 2
network_2.router.unpause()
# await Network 1 reconnects to network 2
self.async_sleep(delay=2.5)
net_1_all_connected = network_1.all_peers_connected()
net_2_all_connected = network_2.all_peers_connected()
self.assertTrue(net_1_all_connected)
self.assertTrue(net_2_all_connected)
def METHOD_NAME(self):
network = Network(
wallet=Wallet(),
socket_ports=self.create_socket_ports(index=0),
)
network.set_to_local()
self.assertTrue(network.local)
self.assertEqual('127.0.0.1', network.external_ip)
|
3,823 |
iscase
|
'''Define SearchEngine for search dialogs.'''
import re
from Tkinter import StringVar, BooleanVar, TclError
import tkMessageBox
def get(root):
'''Return the singleton SearchEngine instance for the process.
The single SearchEngine saves settings between dialog instances.
If there is not a SearchEngine already, make one.
'''
if not hasattr(root, "_searchengine"):
root._searchengine = SearchEngine(root)
# This creates a cycle that persists until root is deleted.
return root._searchengine
class SearchEngine:
"""Handles searching a text widget for Find, Replace, and Grep."""
def __init__(self, root):
'''Initialize Variables that save search state.
The dialogs bind these to the UI elements present in the dialogs.
'''
self.root = root # need for report_error()
self.patvar = StringVar(root, '') # search pattern
self.revar = BooleanVar(root, False) # regular expression?
self.casevar = BooleanVar(root, False) # match case?
self.wordvar = BooleanVar(root, False) # match whole word?
self.wrapvar = BooleanVar(root, True) # wrap around buffer?
self.backvar = BooleanVar(root, False) # search backwards?
# Access methods
def getpat(self):
return self.patvar.get()
def setpat(self, pat):
self.patvar.set(pat)
def isre(self):
return self.revar.get()
def METHOD_NAME(self):
return self.casevar.get()
def isword(self):
return self.wordvar.get()
def iswrap(self):
return self.wrapvar.get()
def isback(self):
return self.backvar.get()
# Higher level access methods
def setcookedpat(self, pat):
"Set pattern after escaping if re."
# called only in SearchDialog.py: 66
if self.isre():
pat = re.escape(pat)
self.setpat(pat)
def getcookedpat(self):
pat = self.getpat()
if not self.isre(): # if True, see setcookedpat
pat = re.escape(pat)
if self.isword():
pat = r"\b%s\b" % pat
return pat
def getprog(self):
"Return compiled cooked search pattern."
pat = self.getpat()
if not pat:
self.report_error(pat, "Empty regular expression")
return None
pat = self.getcookedpat()
flags = 0
if not self.METHOD_NAME():
flags = flags | re.IGNORECASE
try:
prog = re.compile(pat, flags)
except re.error as what:
args = what.args
msg = args[0]
col = args[1] if len(args) >= 2 else -1
self.report_error(pat, msg, col)
return None
return prog
def report_error(self, pat, msg, col=-1):
# Derived class could override this with something fancier
msg = "Error: " + str(msg)
if pat:
msg = msg + "\nPattern: " + str(pat)
if col >= 0:
msg = msg + "\nOffset: " + str(col)
tkMessageBox.showerror("Regular expression error",
msg, master=self.root)
def search_text(self, text, prog=None, ok=0):
'''Return (lineno, matchobj) or None for forward/backward search.
This function calls the right function with the right arguments.
It directly return the result of that call.
Text is a text widget. Prog is a precompiled pattern.
The ok parameter is a bit complicated as it has two effects.
If there is a selection, the search begin at either end,
depending on the direction setting and ok, with ok meaning that
the search starts with the selection. Otherwise, search begins
at the insert mark.
To aid progress, the search functions do not return an empty
match at the starting position unless ok is True.
'''
if not prog:
prog = self.getprog()
if not prog:
return None # Compilation failed -- stop
wrap = self.wrapvar.get()
first, last = get_selection(text)
if self.isback():
if ok:
start = last
else:
start = first
line, col = get_line_col(start)
res = self.search_backward(text, prog, line, col, wrap, ok)
else:
if ok:
start = first
else:
start = last
line, col = get_line_col(start)
res = self.search_forward(text, prog, line, col, wrap, ok)
return res
def search_forward(self, text, prog, line, col, wrap, ok=0):
wrapped = 0
startline = line
chars = text.get("%d.0" % line, "%d.0" % (line+1))
while chars:
m = prog.search(chars[:-1], col)
if m:
if ok or m.end() > col:
return line, m
line = line + 1
if wrapped and line > startline:
break
col = 0
ok = 1
chars = text.get("%d.0" % line, "%d.0" % (line+1))
if not chars and wrap:
wrapped = 1
wrap = 0
line = 1
chars = text.get("1.0", "2.0")
return None
def search_backward(self, text, prog, line, col, wrap, ok=0):
wrapped = 0
startline = line
chars = text.get("%d.0" % line, "%d.0" % (line+1))
while 1:
m = search_reverse(prog, chars[:-1], col)
if m:
if ok or m.start() < col:
return line, m
line = line - 1
if wrapped and line < startline:
break
ok = 1
if line <= 0:
if not wrap:
break
wrapped = 1
wrap = 0
pos = text.index("end-1c")
line, col = map(int, pos.split("."))
chars = text.get("%d.0" % line, "%d.0" % (line+1))
col = len(chars) - 1
return None
def search_reverse(prog, chars, col):
'''Search backwards and return an re match object or None.
This is done by searching forwards until there is no match.
Prog: compiled re object with a search method returning a match.
Chars: line of text, without \\n.
Col: stop index for the search; the limit for match.end().
'''
m = prog.search(chars)
if not m:
return None
found = None
i, j = m.span() # m.start(), m.end() == match slice indexes
while i < col and j <= col:
found = m
if i == j:
j = j+1
m = prog.search(chars, j)
if not m:
break
i, j = m.span()
return found
def get_selection(text):
'''Return tuple of 'line.col' indexes from selection or insert mark.
'''
try:
first = text.index("sel.first")
last = text.index("sel.last")
except TclError:
first = last = None
if not first:
first = text.index("insert")
if not last:
last = first
return first, last
def get_line_col(index):
'''Return (line, col) tuple of ints from 'line.col' string.'''
line, col = map(int, index.split(".")) # Fails on invalid index
return line, col
if __name__ == "__main__":
import unittest
unittest.main('idlelib.idle_test.test_searchengine', verbosity=2, exit=False)
|
3,824 |
test assign none with allow none false
|
# (C) Copyright 2005-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
"""
Tests for the Date trait type.
"""
import datetime
import unittest
from traits.testing.optional_dependencies import requires_traitsui, traitsui
from traits.api import Date, HasStrictTraits, TraitError
#: Unix epoch date.
UNIX_EPOCH = datetime.date(1970, 1, 1)
#: Windows NT epoch
NT_EPOCH = datetime.date(1600, 1, 1)
class HasDateTraits(HasStrictTraits):
#: Simple case - no default, no parameters, no metadata
simple_date = Date()
#: Date with default
epoch = Date(UNIX_EPOCH)
#: Date with default provided via keyword.
alternative_epoch = Date(default_value=NT_EPOCH)
#: Datetime instances prohibited
datetime_prohibited = Date(allow_datetime=False)
#: Datetime instances allowed
datetime_allowed = Date(allow_datetime=True)
#: None prohibited
none_prohibited = Date(allow_none=False)
#: None allowed
none_allowed = Date(allow_none=True)
#: Strictly a non-None non-datetime date
strict = Date(allow_datetime=False, allow_none=False)
class TestDate(unittest.TestCase):
def test_default(self):
obj = HasDateTraits()
self.assertEqual(obj.simple_date, None)
self.assertEqual(obj.epoch, UNIX_EPOCH)
self.assertEqual(obj.alternative_epoch, NT_EPOCH)
def test_assign_date(self):
test_date = datetime.date(1975, 2, 13)
obj = HasDateTraits()
obj.simple_date = test_date
self.assertEqual(obj.simple_date, test_date)
def test_assign_non_date(self):
obj = HasDateTraits()
with self.assertRaises(TraitError) as exception_context:
obj.simple_date = "1975-2-13"
message = str(exception_context.exception)
self.assertIn("must be a non-datetime date, but", message)
def test_assign_none_with_allow_none_not_given(self):
obj = HasDateTraits(simple_date=UNIX_EPOCH)
with self.assertRaises(TraitError) as exception_context:
obj.simple_date = None
self.assertEqual(obj.simple_date, UNIX_EPOCH)
message = str(exception_context.exception)
self.assertIn("must be a non-datetime date, but", message)
def METHOD_NAME(self):
obj = HasDateTraits(none_prohibited=UNIX_EPOCH)
with self.assertRaises(TraitError) as exception_context:
obj.none_prohibited = None
message = str(exception_context.exception)
self.assertIn("must be a non-datetime date, but", message)
def test_assign_none_with_allow_none_true(self):
obj = HasDateTraits(none_allowed=UNIX_EPOCH)
self.assertIsNotNone(obj.none_allowed)
obj.none_allowed = None
self.assertIsNone(obj.none_allowed)
def test_assign_datetime_with_allow_datetime_false(self):
test_datetime = datetime.datetime(1975, 2, 13)
obj = HasDateTraits()
with self.assertRaises(TraitError) as exception_context:
obj.datetime_prohibited = test_datetime
message = str(exception_context.exception)
self.assertIn("must be a non-datetime date, but", message)
def test_assign_datetime_with_allow_datetime_true(self):
test_datetime = datetime.datetime(1975, 2, 13)
obj = HasDateTraits()
obj.datetime_allowed = test_datetime
self.assertEqual(obj.datetime_allowed, test_datetime)
def test_assign_datetime_with_allow_datetime_not_given(self):
# For traits where "allow_datetime" is not specified, a
# DeprecationWarning should be issued on assignment of datetime.
test_date = datetime.date(2023, 1, 11)
test_datetime = datetime.datetime(1975, 2, 13)
obj = HasDateTraits(simple_date=test_date)
with self.assertRaises(TraitError) as exception_context:
obj.simple_date = test_datetime
self.assertEqual(obj.simple_date, test_date)
message = str(exception_context.exception)
self.assertIn("must be a non-datetime date, but", message)
def test_allow_none_false_allow_datetime_false(self):
obj = HasDateTraits(strict=UNIX_EPOCH)
with self.assertRaises(TraitError) as exception_context:
obj.strict = None
message = str(exception_context.exception)
self.assertIn("must be a non-datetime date, but", message)
@requires_traitsui
def test_get_editor(self):
obj = HasDateTraits()
trait = obj.base_trait("epoch")
editor_factory = trait.get_editor()
self.assertIsInstance(editor_factory, traitsui.api.DateEditor)
|
3,825 |
test copy in buffers with py error
|
import pytest
import string
from random import randrange, choice
from psycopg.pq import Format
from psycopg import sql, errors as e
from psycopg.adapt import PyFormat
from psycopg.types.numeric import Int4
from ..utils import eur, gc_collect, gc_count
from ..test_copy import sample_text, sample_binary # noqa
from ..test_copy import sample_records
from ..test_copy_async import ensure_table
from .test_copy import sample_tabledef, copyopt
pytestmark = [pytest.mark.crdb, pytest.mark.anyio]
@pytest.mark.parametrize(
"format, buffer",
[(Format.TEXT, "sample_text"), (Format.BINARY, "sample_binary")],
)
async def test_copy_in_buffers(aconn, format, buffer):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
async with cur.copy(f"copy copy_in from stdin {copyopt(format)}") as copy:
await copy.write(globals()[buffer])
await cur.execute("select * from copy_in order by 1")
data = await cur.fetchall()
assert data == sample_records
async def test_copy_in_buffers_pg_error(aconn):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
with pytest.raises(e.UniqueViolation):
async with cur.copy("copy copy_in from stdin") as copy:
await copy.write(sample_text)
await copy.write(sample_text)
assert aconn.info.transaction_status == aconn.TransactionStatus.INERROR
async def test_copy_in_str(aconn):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
async with cur.copy("copy copy_in from stdin") as copy:
await copy.write(sample_text.decode())
await cur.execute("select * from copy_in order by 1")
data = await cur.fetchall()
assert data == sample_records
@pytest.mark.xfail(reason="bad sqlstate - CRDB #81559")
async def test_copy_in_error(aconn):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
with pytest.raises(e.QueryCanceled):
async with cur.copy("copy copy_in from stdin with binary") as copy:
await copy.write(sample_text.decode())
assert aconn.info.transaction_status == aconn.TransactionStatus.INERROR
@pytest.mark.parametrize("format", Format)
async def test_copy_in_empty(aconn, format):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
async with cur.copy(f"copy copy_in from stdin {copyopt(format)}"):
pass
assert aconn.info.transaction_status == aconn.TransactionStatus.INTRANS
assert cur.rowcount == 0
@pytest.mark.slow
async def test_copy_big_size_record(aconn):
cur = aconn.cursor()
await ensure_table(cur, "id serial primary key, data text")
data = "".join(chr(randrange(1, 256)) for i in range(10 * 1024 * 1024))
async with cur.copy("copy copy_in (data) from stdin") as copy:
await copy.write_row([data])
await cur.execute("select data from copy_in limit 1")
assert (await cur.fetchone())[0] == data
@pytest.mark.slow
async def test_copy_big_size_block(aconn):
cur = aconn.cursor()
await ensure_table(cur, "id serial primary key, data text")
data = "".join(choice(string.ascii_letters) for i in range(10 * 1024 * 1024))
copy_data = data + "\n"
async with cur.copy("copy copy_in (data) from stdin") as copy:
await copy.write(copy_data)
await cur.execute("select data from copy_in limit 1")
assert (await cur.fetchone())[0] == data
async def test_copy_in_buffers_with_pg_error(aconn):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
with pytest.raises(e.UniqueViolation):
async with cur.copy("copy copy_in from stdin") as copy:
await copy.write(sample_text)
await copy.write(sample_text)
assert aconn.info.transaction_status == aconn.TransactionStatus.INERROR
@pytest.mark.parametrize("format", Format)
async def test_copy_in_records(aconn, format):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
async with cur.copy(f"copy copy_in from stdin {copyopt(format)}") as copy:
for row in sample_records:
if format == Format.BINARY:
row = tuple(
Int4(i) if isinstance(i, int) else i for i in row
) # type: ignore[assignment]
await copy.write_row(row)
await cur.execute("select * from copy_in order by 1")
data = await cur.fetchall()
assert data == sample_records
@pytest.mark.parametrize("format", Format)
async def test_copy_in_records_set_types(aconn, format):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
async with cur.copy(f"copy copy_in from stdin {copyopt(format)}") as copy:
copy.set_types(["int4", "int4", "text"])
for row in sample_records:
await copy.write_row(row)
await cur.execute("select * from copy_in order by 1")
data = await cur.fetchall()
assert data == sample_records
@pytest.mark.parametrize("format", Format)
async def test_copy_in_records_binary(aconn, format):
cur = aconn.cursor()
await ensure_table(cur, "col1 serial primary key, col2 int4, data text")
async with cur.copy(
f"copy copy_in (col2, data) from stdin {copyopt(format)}"
) as copy:
for row in sample_records:
await copy.write_row((None, row[2]))
await cur.execute("select col2, data from copy_in order by 2")
data = await cur.fetchall()
assert data == [(None, "hello"), (None, "world")]
@pytest.mark.crdb_skip("copy canceled")
async def METHOD_NAME(aconn):
cur = aconn.cursor()
await ensure_table(cur, sample_tabledef)
with pytest.raises(e.QueryCanceled) as exc:
async with cur.copy("copy copy_in from stdin") as copy:
await copy.write(sample_text)
raise Exception("nuttengoggenio")
assert "nuttengoggenio" in str(exc.value)
assert aconn.info.transaction_status == aconn.TransactionStatus.INERROR
async def test_copy_in_allchars(aconn):
cur = aconn.cursor()
await ensure_table(cur, "col1 int primary key, col2 int, data text")
async with cur.copy("copy copy_in from stdin") as copy:
for i in range(1, 256):
await copy.write_row((i, None, chr(i)))
await copy.write_row((ord(eur), None, eur))
await cur.execute(
"""
select col1 = ascii(data), col2 is null, length(data), count(*)
from copy_in group by 1, 2, 3
"""
)
data = await cur.fetchall()
assert data == [(True, True, 1, 256)]
@pytest.mark.slow
@pytest.mark.parametrize(
"fmt, set_types",
[(Format.TEXT, True), (Format.TEXT, False), (Format.BINARY, True)],
)
@pytest.mark.crdb_skip("copy array")
async def test_copy_from_leaks(aconn_cls, dsn, faker, fmt, set_types):
faker.format = PyFormat.from_pq(fmt)
faker.choose_schema(ncols=20)
faker.make_records(20)
async def work():
async with await aconn_cls.connect(dsn) as conn:
async with conn.cursor(binary=fmt) as cur:
await cur.execute(faker.drop_stmt)
await cur.execute(faker.create_stmt)
stmt = sql.SQL("copy {} ({}) from stdin {}").format(
faker.table_name,
sql.SQL(", ").join(faker.fields_names),
sql.SQL("with binary" if fmt else ""),
)
async with cur.copy(stmt) as copy:
if set_types:
copy.set_types(faker.types_names)
for row in faker.records:
await copy.write_row(row)
await cur.execute(faker.select_stmt)
recs = await cur.fetchall()
for got, want in zip(recs, faker.records):
faker.assert_record(got, want)
gc_collect()
n = []
for i in range(3):
await work()
gc_collect()
n.append(gc_count())
assert n[0] == n[1] == n[2], f"objects leaked: {n[1] - n[0]}, {n[2] - n[1]}"
|
3,826 |
test grange10
|
from unittest import TestCase
from timApp.markdown.markdownconverter import genfields, gfrange
class TestGenfields(TestCase):
def test_genfields(self):
s1 = ["d1"]
e1 = "{#d1 stem: 'd1'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in normal case")
def test_genfields2(self):
s1 = ["d(1,3)"]
e1 = "{#d1 stem: 'd1'#}{#d2 stem: 'd2'#}{#d3 stem: 'd3'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in 3 field case")
def test_genfields3(self):
s1 = ["d(1,3)"]
e1 = "{#d1 header: 'd1'#}{#d2 header: 'd2'#}{#d3 header: 'd3'#}"
r1 = genfields(s1, "", "header")
self.assertEqual(e1, r1, "Not same in 3 field header case")
def test_genfields4(self):
s1 = ["d(1,3)=demo"]
e1 = "{#d1 stem: 'demo1'#}{#d2 stem: 'demo2'#}{#d3 stem: 'demo3'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in 3 field demo case")
def test_genfields5(self):
s1 = ["d(1,2)=demo"]
e1 = "{#d1 stem: 'demo1', autosave: true, cols: 3#}{#d2 stem: 'demo2', autosave: true, cols: 3#}"
r1 = genfields(s1, "autosave: true, cols: 3")
self.assertEqual(e1, r1, "Not same in 2 field attrs case")
def test_genfields6(self):
s1 = "d(1,2)=demo;s1"
e1 = "{#d1 stem: 'demo1'#}{#d2 stem: 'demo2'#}{#s1 stem: 's1'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in 2 field attrs case")
def test_genfields7(self):
s1 = "d:cbfield(1,2)=demo;s1"
e1 = "{#d1:cbfield stem: 'demo1'#}{#d2:cbfield stem: 'demo2'#}{#s1 stem: 's1'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in 2 field attrs case")
def test_genfields8(self):
s1 = "d1:cbfield;d2:cbfield;s1"
e1 = "{#d1:cbfield stem: 'd1'#}{#d2:cbfield stem: 'd2'#}{#s1 stem: 's1'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in 2 field attrs case")
def test_genfields9(self):
s1 = "d1:cbfield=d1;d2:cbfield=d2;s1"
e1 = "{#d1:cbfield stem: 'd1'#}{#d2:cbfield stem: 'd2'#}{#s1 stem: 's1'#}"
r1 = genfields(s1)
self.assertEqual(e1, r1, "Not same in 2 field attrs case")
def test_grange(self):
s1 = "d"
e1 = "{#d1 stem: 'd1'#}{#d2 stem: 'd2'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in normal case")
def test_grange2(self):
s1 = "d=demo"
e1 = "{#d1 stem: 'demo1'#}{#d2 stem: 'demo2'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in demo case")
def test_grange3(self):
s1 = "d=demo;t"
e1 = "{#d1 stem: 'demo1'#}{#d2 stem: 'demo2'#}{#t stem: 't'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in multiple case")
def test_grange4(self):
s1 = "d;t=ta;b=tb"
e1 = "{#d1 stem: 'd1'#}{#d2 stem: 'd2'#}{#t stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in multiple case")
def test_grange5(self):
s1 = "d:cbfield;t:cbfield=ta;b=tb"
e1 = "{#d1:cbfield stem: 'd1'#}{#d2:cbfield stem: 'd2'#}{#t:cbfield stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in field type case no alias")
def test_grange6(self):
s1 = "d:cbfield=;t:cbfield=ta;b=tb"
e1 = "{#d1:cbfield stem: '1'#}{#d2:cbfield stem: '2'#}{#t:cbfield stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in field type case empty alias")
def test_grange7(self):
s1 = "d{0}a:cbfield=;t:cbfield=ta;b=tb"
e1 = "{#d1a:cbfield stem: '1'#}{#d2a:cbfield stem: '2'#}{#t:cbfield stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in field type case own format empty alias")
def test_grange8(self):
s1 = "d{0}a:cbfield;t:cbfield=ta;b=tb"
e1 = "{#d1a:cbfield stem: 'd1a'#}{#d2a:cbfield stem: 'd2a'#}{#t:cbfield stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in field type case own format no alias")
def test_grange9(self):
s1 = "d{0}a:cbfield=a{0}b;t:cbfield=ta;b=tb"
e1 = "{#d1a:cbfield stem: 'a1b'#}{#d2a:cbfield stem: 'a2b'#}{#t:cbfield stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(
e1, r1, "Not same in field type case own format format in alias"
)
def METHOD_NAME(self):
s1 = "d{0}a:cbfield=a;t:cbfield=ta;b=tb"
e1 = "{#d1a:cbfield stem: 'a1'#}{#d2a:cbfield stem: 'a2'#}{#t:cbfield stem: 'ta'#}{#b stem: 'tb'#}"
r1 = gfrange(s1, 1, 2)
self.assertEqual(e1, r1, "Not same in field type case own format alias")
def test_grange11(self):
s1 = "d"
e1 = "{#d5 stem: 'd5'#}{#d4 stem: 'd4'#}{#d3 stem: 'd3'#}"
r1 = gfrange(s1, 5, 3)
self.assertEqual(e1, r1, "Not same in 5,3")
|
3,827 |
gauge
|
from __future__ import annotations
import logging
import os
import sys
import typing as t
from functools import partial
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ... import external_typing as ext
logger = logging.getLogger(__name__)
class PrometheusClient:
def __init__(
self,
*,
multiproc: bool = True,
multiproc_dir: str | None = None,
):
"""
PrometheusClient is BentoML's own prometheus client that extends the official Python client.
It sets up a multiprocess dir for Prometheus to work in multiprocess mode, which is required
for BentoML to work in production.
.. note::
For Prometheus to behave properly, ``prometheus_client`` must be imported after this client
is called. This has to do with ``prometheus_client`` relies on ``PROMEHEUS_MULTIPROC_DIR``, which
will be set by this client.
For API documentation, refer to https://docs.bentoml.com/en/latest/reference/metrics.html.
"""
if multiproc:
assert multiproc_dir is not None, "multiproc_dir must be provided"
self.multiproc = multiproc
self.multiproc_dir: str | None = multiproc_dir
self._registry = None
self._imported = False
self._pid: int | None = None
@property
def prometheus_client(self):
if self.multiproc and not self._imported:
# step 1: check environment
assert (
"prometheus_client" not in sys.modules
), "prometheus_client is already imported, multiprocessing will not work properly"
assert (
self.multiproc_dir
), f"Invalid prometheus multiproc directory: {self.multiproc_dir}"
assert os.path.isdir(self.multiproc_dir)
os.environ["PROMETHEUS_MULTIPROC_DIR"] = self.multiproc_dir
# step 2:
import prometheus_client
import prometheus_client.exposition
import prometheus_client.metrics
import prometheus_client.metrics_core
import prometheus_client.multiprocess
import prometheus_client.parser
self._imported = True
return prometheus_client
@property
def registry(self):
if self._registry is None:
if self.multiproc:
self._pid = os.getpid()
self._registry = self.prometheus_client.REGISTRY
else:
if self.multiproc:
assert self._pid is not None
assert (
os.getpid() == self._pid
), "The current process's different than the process which the prometheus client gets created"
return self._registry
def __del__(self):
self.mark_process_dead()
def mark_process_dead(self) -> None:
if self.multiproc:
assert self._pid is not None
assert (
os.getpid() == self._pid
), "The current process's different than the process which the prometheus client gets created"
self.prometheus_client.multiprocess.mark_process_dead(self._pid)
def start_http_server(self, port: int, addr: str = "") -> None:
self.prometheus_client.start_http_server(
port=port,
addr=addr,
registry=self.registry,
)
start_wsgi_server = start_http_server
def write_to_textfile(self, path: str) -> None:
"""
Write metrics to given path. This is intended to be used with
the Node expoerter textfile collector.
Args:
path: path to write the metrics to. This file must end
with '.prom' for the textfile collector to process it.
"""
self.prometheus_client.write_to_textfile(path, registry=self.registry)
def make_wsgi_app(self) -> ext.WSGIApp:
# Used by gRPC prometheus server.
return self.prometheus_client.make_wsgi_app(registry=self.registry) # type: ignore (unfinished prometheus types)
def generate_latest(self):
if self.multiproc:
registry = self.prometheus_client.CollectorRegistry()
self.prometheus_client.multiprocess.MultiProcessCollector(registry)
return self.prometheus_client.generate_latest(registry)
else:
return self.prometheus_client.generate_latest()
def text_string_to_metric_families(self) -> t.Generator[Metric, None, None]:
yield from self.prometheus_client.parser.text_string_to_metric_families(
self.generate_latest().decode("utf-8")
)
@property
def CONTENT_TYPE_LATEST(self) -> str:
"""
Returns:
str: Content type of the latest text format
"""
return self.prometheus_client.CONTENT_TYPE_LATEST
# For all of the documentation for instruments metrics below, we will extend
# upon prometheus_client's documentation, since their code segment aren't rst friendly, and
# not that easy to read.
@property
def Histogram(self):
return partial(self.prometheus_client.Histogram, registry=self.registry)
@property
def Counter(self):
return partial(self.prometheus_client.Counter, registry=self.registry)
@property
def Summary(self):
return partial(self.prometheus_client.Summary, registry=self.registry)
@property
def METHOD_NAME(self):
return partial(self.prometheus_client.METHOD_NAME, registry=self.registry)
@property
def Info(self):
raise RuntimeError("Info is not supported in Prometheus multiprocess mode.")
@property
def Enum(self):
raise RuntimeError("Enum is not supported in Prometheus multiprocess mode.")
@property
def Metric(self):
"""
A Metric family and its samples.
This is a base class to be used by instrumentation client. Custom collectors should use ``bentoml.metrics.metrics_core.GaugeMetricFamily``, ``bentoml.metrics.metrics_core.CounterMetricFamily``, ``bentoml.metrics.metrics_core.SummaryMetricFamily`` instead.
"""
return partial(self.prometheus_client.Metric, registry=self.registry)
|
3,828 |
test list faq language available
|
"""
All test cases of the `faq` views.
"""
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from backoffice.models import FAQAnswerLocale, FAQEntry, FAQuestionLocale
def create_entry(name, rank, enabled):
return FAQEntry.objects.create(name=name, rank=rank, enabled=enabled)
def create_answer(question, lang, text=None):
if not text:
text = question.name
return FAQAnswerLocale.objects.create(question=question, language=lang, text=text)
class FAQuestionLocalizedListViewTestCase(TestCase):
"""
TestCase of the `FAQuestionLocalizedListView` view.
"""
default_lang = "en"
available_lang = "fr"
unavailable_lang = "zz"
def setUp(self):
self.client = APIClient()
self.faq_base_url = "/backoffice/faq/"
self.question1 = create_entry("i_dont_understand_why", 20, True)
self.question1_loc_en = FAQuestionLocale.objects.create(
question=self.question1,
language=self.default_lang,
text="I don't understand why.",
)
self.question1_loc_fr = FAQuestionLocale.objects.create(
question=self.question1,
language=self.available_lang,
text="Je ne comprends pas pourquoi.",
)
self.answer1_loc_en = create_answer(
self.question1, self.default_lang, "Tournesol aims to..."
)
self.answer1_loc_fr = create_answer(
self.question1, self.available_lang, "Tournesol cherche à..."
)
def test_anon_200_list_language_unknown(self):
"""
An anonymous user can access the FAQ.
"""
response = self.client.get(self.faq_base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_list_faq_language_unknown(self):
"""
When no HTTP Accept-Language header is present in the request, the
`en` translation must be returned.
"""
response = self.client.get(self.faq_base_url)
results = response.data["results"]
self.assertDictEqual(
results[0],
{
"name": self.question1.name,
"question": self.question1_loc_en.text,
"answer": self.answer1_loc_en.text,
},
)
def test_list_faq_language_unavailable(self):
"""
When the requested translation is not available in the database, the
`en` translation must be returned.
"""
response = self.client.get(self.faq_base_url, HTTP_ACCEPT_LANGUAGE=self.unavailable_lang)
results = response.data["results"]
self.assertDictEqual(
results[0],
{
"name": self.question1.name,
"question": self.question1_loc_en.text,
"answer": self.answer1_loc_en.text,
},
)
def METHOD_NAME(self):
"""
When a known language is requested, the matching translation must be
returned.
"""
response = self.client.get(self.faq_base_url, HTTP_ACCEPT_LANGUAGE=self.available_lang)
results = response.data["results"]
self.assertDictEqual(
results[0],
{
"name": self.question1.name,
"question": self.question1_loc_fr.text,
"answer": self.answer1_loc_fr.text,
},
)
def test_list_faq_question_without_answer(self):
"""
A question without answer must not be returned by the API.
"""
self.question1.answers.all().delete()
response = self.client.get(self.faq_base_url)
results = response.data["results"]
self.assertListEqual(results, [])
def test_list_faq_disabled_questions_dont_appear(self):
"""
Disabled questions must not be returned by the API.
"""
question = create_entry("new_question", 1, True)
create_answer(question, self.default_lang)
response = self.client.get(self.faq_base_url)
self.assertEqual(len(response.data["results"]), 2)
question.enabled = False
question.save(update_fields=["enabled"])
response = self.client.get(self.faq_base_url)
self.assertEqual(len(response.data["results"]), 1)
def test_list_faq_ordering(self):
"""
The questions must be ordered by rank.
"""
question = create_entry("first_question", self.question1.rank - 1, True)
create_answer(question, self.default_lang, "first_answer")
response = self.client.get(self.faq_base_url)
results = response.data["results"]
self.assertDictEqual(
results[0],
{
"name": "first_question",
"question": "first_question",
"answer": "first_answer",
},
)
self.assertDictEqual(
results[1],
{
"name": self.question1.name,
"question": self.question1_loc_en.text,
"answer": self.answer1_loc_en.text,
},
)
question.rank = self.question1.rank + 1
question.save(update_fields=["rank"])
response = self.client.get(self.faq_base_url)
results = response.data["results"]
self.assertDictEqual(
results[0],
{
"name": self.question1.name,
"question": self.question1_loc_en.text,
"answer": self.answer1_loc_en.text,
},
)
self.assertDictEqual(
results[1],
{
"name": "first_question",
"question": "first_question",
"answer": "first_answer",
},
)
|
3,829 |
main
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2021-2022 Hewlett Packard Enterprise, Inc. All rights reserved.
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: ilo_redfish_info
short_description: Gathers server information through iLO using Redfish APIs
version_added: 4.2.0
description:
- Builds Redfish URIs locally and sends them to iLO to
get information back.
- For use with HPE iLO operations that require Redfish OEM extensions.
extends_documentation_fragment:
- community.general.attributes
- community.general.attributes.info_module
options:
category:
required: true
description:
- List of categories to execute on iLO.
type: list
elements: str
command:
required: true
description:
- List of commands to execute on iLO.
type: list
elements: str
baseuri:
required: true
description:
- Base URI of iLO.
type: str
username:
description:
- Username for authenticating to iLO.
type: str
password:
description:
- Password for authenticating to iLO.
type: str
auth_token:
description:
- Security token for authenticating to iLO.
type: str
timeout:
description:
- Timeout in seconds for HTTP requests to iLO.
default: 10
type: int
author:
- "Bhavya B (@bhavya06)"
'''
EXAMPLES = '''
- name: Get iLO Sessions
community.general.ilo_redfish_info:
category: Sessions
command: GetiLOSessions
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
register: result_sessions
'''
RETURN = '''
ilo_redfish_info:
description: Returns iLO sessions.
type: dict
contains:
GetiLOSessions:
description: Returns the iLO session msg and whether the function executed successfully.
type: dict
contains:
ret:
description: Check variable to see if the information was successfully retrieved.
type: bool
msg:
description: Information of all active iLO sessions.
type: list
elements: dict
contains:
Description:
description: Provides a description of the resource.
type: str
Id:
description: The sessionId.
type: str
Name:
description: The name of the resource.
type: str
UserName:
description: Name to use to log in to the management processor.
type: str
returned: always
'''
CATEGORY_COMMANDS_ALL = {
"Sessions": ["GetiLOSessions"]
}
CATEGORY_COMMANDS_DEFAULT = {
"Sessions": "GetiLOSessions"
}
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.ilo_redfish_utils import iLORedfishUtils
def METHOD_NAME():
result = {}
category_list = []
module = AnsibleModule(
argument_spec=dict(
category=dict(required=True, type='list', elements='str'),
command=dict(required=True, type='list', elements='str'),
baseuri=dict(required=True),
username=dict(),
password=dict(no_log=True),
auth_token=dict(no_log=True),
timeout=dict(type='int', default=10)
),
required_together=[
('username', 'password'),
],
required_one_of=[
('username', 'auth_token'),
],
mutually_exclusive=[
('username', 'auth_token'),
],
supports_check_mode=True
)
creds = {"user": module.params['username'],
"pswd": module.params['password'],
"token": module.params['auth_token']}
timeout = module.params['timeout']
root_uri = "https://" + module.params['baseuri']
rf_utils = iLORedfishUtils(creds, root_uri, timeout, module)
# Build Category list
if "all" in module.params['category']:
for entry in CATEGORY_COMMANDS_ALL:
category_list.append(entry)
else:
# one or more categories specified
category_list = module.params['category']
for category in category_list:
command_list = []
# Build Command list for each Category
if category in CATEGORY_COMMANDS_ALL:
if not module.params['command']:
# True if we don't specify a command --> use default
command_list.append(CATEGORY_COMMANDS_DEFAULT[category])
elif "all" in module.params['command']:
for entry in CATEGORY_COMMANDS_ALL[category]:
command_list.append(entry)
# one or more commands
else:
command_list = module.params['command']
# Verify that all commands are valid
for cmd in command_list:
# Fail if even one command given is invalid
if cmd not in CATEGORY_COMMANDS_ALL[category]:
module.fail_json(msg="Invalid Command: %s" % cmd)
else:
# Fail if even one category given is invalid
module.fail_json(msg="Invalid Category: %s" % category)
# Organize by Categories / Commands
if category == "Sessions":
for command in command_list:
if command == "GetiLOSessions":
result[command] = rf_utils.get_ilo_sessions()
module.exit_json(ilo_redfish_info=result)
if __name__ == '__main__':
METHOD_NAME()
|
3,830 |
get optparser
|
#!/usr/bin/python3
from pprint import pprint
import os, sys, re
import logging
import cmdln
import abichecker_dbmodel as DB
from abichecker_common import Config, CACHEDIR
from datetime import datetime, timedelta
class BoilderPlate(cmdln.Cmdln):
def __init__(self, *args, **kwargs):
cmdln.Cmdln.__init__(self, args, kwargs)
self.session = None
def METHOD_NAME(self):
parser = cmdln.CmdlnOptionParser(self)
parser.add_option("--dry", action="store_true", help="dry run")
parser.add_option("--debug", action="store_true", help="debug output")
parser.add_option("--verbose", action="store_true", help="verbose")
return parser
def postoptparse(self):
logging.basicConfig()
self.logger = logging.getLogger(self.optparser.prog)
if (self.options.debug):
self.logger.setLevel(logging.DEBUG)
elif (self.options.verbose):
self.logger.setLevel(logging.INFO)
DB.Base.metadata.create_all(DB.db_engine())
self.session = DB.db_session()
def do_list(self, subcmd, opts, *args):
"""${cmd_name}: foo bar
${cmd_usage}
${cmd_option_list}
"""
for req in self.session.query(DB.Request).all():
print('%s %s'%(req.id, req.state))
for a in req.abichecks:
print(' %s %s %s'%(a.dst_project, a.dst_package, a.result))
for r in a.reports:
print(' %s %10s %-25s %s'%(r.id, r.arch, r.dst_lib, r.result))
def do_prune(self, subcmd, opts, days):
"""${cmd_name}: prune old records
${cmd_usage}
${cmd_option_list}
"""
oldest = datetime.today() - timedelta(days = 365)
requests = self.session.query(DB.Request).filter(DB.Request.t_updated < oldest)
for req in requests:
for a in req.abichecks:
self.logger.info('prune %s %s %s', req.id, a.dst_project, a.dst_package)
for r in a.reports:
fn = os.path.join(CACHEDIR, r.htmlreport)
if os.path.exists(fn):
self.logger.info('removing %s', r.htmlreport)
os.unlink(fn)
self.session.delete(req)
self.session.commit()
def do_log(self, subcmd, opts, request_id):
"""${cmd_name}: foo bar
${cmd_usage}
${cmd_option_list}
"""
request = self.session.query(DB.Request).filter(DB.Request.id == request_id).one()
for log in request.log:
print(log.line)
def do_delete(self, subcmd, opts, request_id):
"""${cmd_name}: foo bar
${cmd_usage}
${cmd_option_list}
"""
request = self.session.query(DB.Request).filter(DB.Request.id == request_id).one()
self.session.delete(request)
self.session.commit()
def do_recheck(self, subcmd, opts, request_id):
"""${cmd_name}: set request id to seen
${cmd_usage}
${cmd_option_list}
"""
request = self.session.query(DB.Request).filter(DB.Request.id == request_id).one()
logentry = DB.Log(request_id = request_id,
line = 'manually setting state to seen. previous state: %s (%s)'%(request.state, request.result))
request.state = 'seen'
request.result = None
self.session.add(logentry)
self.session.commit()
@cmdln.option("--get", action="store_true", help="get some values")
@cmdln.option("--set", action="store_true", help="set some values")
@cmdln.option("--delete", action="store_true", help="delete some values")
def do_config(self, subcmd, opts, *args):
"""${cmd_name}: manage config file
${cmd_usage}
${cmd_option_list}
"""
config = Config(self.session)
if opts.set:
config.set(args[0], args[1])
elif opts.get:
print(config.get(args[0]))
elif opts.delete:
config.delete(args[0])
else:
for entry in config.settings():
print("%s=%s"%entry)
if __name__ == "__main__":
app = BoilderPlate()
sys.exit( app.main() )
|
3,831 |
download daymet
|
"""Download DayMet data and convert it to ATS format.
DayMet is downloaded in point mode based on lat-lon, then converted to
hdf5 files that ATS knows how to read.
"""
import requests
import datetime
import logging
import h5py
import sys, os
import numpy as np
def file_id(lat, lon):
"""Returns a lat-lon id for use in filenames"""
return '{:.4f}_{:.4f}'.format(lat,lon).replace('.','p')
def daymet_rest_url(lat, lon, start, end, vars=None):
"""Generates the DayMet Rest API URL."""
#
# NOTE: it is unclear why this was implemented this way -- shouldn't it use
# the standard requests dictionary-style syntax? But if it ain't broke
# don't fix it. --etc
#
daymet_vars = ['dayl', 'prcp', 'srad', 'swe', 'tmax', 'tmin', 'vp']
# check variable names are valid
if vars is None:
vars = daymet_vars
elif type(vars) is list:
for v in vars:
if v not in daymet_vars:
raise RuntimeError('Requested DayMet variable "%s" is not a valid variable name. Valid are "%r"'%(v, vald_vars))
elif type(vars) is str:
if vars not in daymet_vars:
raise RuntimeError('Requested DayMet variable "%s" is not a valid variable name. Valid are "%r"'%(vars, vald_vars))
vars = [vars,]
# generate the URL
base = 'https://daymet.ornl.gov/single-pixel/api/data?'
lat_str = 'lat={:.4f}'.format(lat)
lon_str = 'lon={:.4f}'.format(lon)
var_str = 'vars='+','.join(vars)
start_str = 'start={}'.format(start)
end_str = 'end={}'.format(end)
return base + '&'.join([lat_str, lon_str, var_str, start_str, end_str])
def read_daymet(filename):
"""Reads a text file of the form provided by DayMet"""
return np.genfromtxt(filename, skip_header=7, names=True, delimiter=',')
def METHOD_NAME(outdir, lat, lon, start, end, vars=None):
"""Calls the DayMet Rest API to get data and save raw data."""
url = daymet_rest_url(lat, lon, start, end, vars)
logging.info('Querying: %s'%url)
resp = requests.get(url)
if resp.status_code != requests.codes.ok:
logging.warning(' returned code: %r'%resp.status_code)
raise RuntimeError('Failed download on "%s" with error code "%r"'%(url, resp.status_code))
else:
logging.info(' returned code: %r'%resp.status_code)
filename = os.path.join(outdir, 'daymet_raw_%s.dat'%file_id(lat, lon))
logging.info(' writing to disk: %s'%filename)
with open(filename, 'w') as fid:
fid.write(resp.text)
return read_daymet(filename)
def daymet_to_ats(dat):
"""Accepts a numpy named array of DayMet data and returns a dictionary ATS data."""
dout = dict()
logging.info('Converting to ATS')
mean_air_temp_c = (dat['tmin_deg_c'] + dat['tmax_deg_c'])/2.0
precip_ms = dat['prcp_mmday'] / 1.e3 / 86400.
dout['time [s]'] = np.arange(0, len(dat), 1)*86400.
dout['air temperature [K]'] = 273.15 + mean_air_temp_c
dout['incoming shortwave radiation [W m^-2]'] = dat['dayl_s']/86400*dat['srad_Wm2']
dout['vapor pressure air [Pa]'] = dat['vp_Pa']
dout['precipitation rain [m s^-1]'] = np.where(mean_air_temp_c >= 0, precip_ms, 0)
dout['precipitation snow [m SWE s^-1]'] = np.where(mean_air_temp_c < 0, precip_ms, 0)
dout['wind speed [m s^-1]'] = 4. * np.ones_like(dout['time [s]'])
return dout
def write_ats(dat, attrs, filename):
"""Accepts a dictionary of ATS data and writes it to HDF5 file."""
logging.info('Writing ATS file: {}'.format(filename))
with h5py.File(filename, 'w') as fid:
for key, val in dat.items():
fid.create_dataset(key, data=val)
for key, val in attrs.items():
fid.attrs[key] = val
return
def get_argument_parser():
"""Gets an argparse parser for use in main"""
import argparse
parser = argparse.ArgumentParser(__doc__)
parser.add_argument('lat', type=float,
help='Latitude, in decimal form, up to 4 decimal digits')
parser.add_argument('lon', type=float,
help='Longitude, in decimal form, up to 4 decimal digits')
parser.add_argument('-d', '--directory', default='.',
help='Directory in which to place output files.')
def string_to_start_date(s):
if len(s) == 4:
return datetime.datetime(int(s),1,1).date()
else:
return datetime.datetime.strptime(s, '%Y-%m-%d').date()
parser.add_argument('-s', '--start', type=string_to_start_date,
help='Start date, either YYYY or YYYY-MM-DD')
def string_to_end_date(s):
if len(s) == 4:
return datetime.datetime(int(s),12,31).date()
else:
return datetime.datetime.strptime(s, '%Y-%m-%d').date()
parser.add_argument('-e', '--end', type=string_to_end_date,
help='End date, either YYYY or YYYY-MM-DD')
parser.add_argument('--download-only', action='store_true',
help='Only download raw data.')
parser.add_argument('--raw-file',
help='Do not download, and instead use this file as raw data.')
return parser
def validate_start_end(start, end):
"""Checks that these are valid dates for use with DayMet"""
daymet_start = datetime.date(1980, 1, 1)
daymet_end = datetime.date(2018, 12, 31)
# check start time
if start is None:
start = daymet_start
else:
if start < daymet_start:
raise RuntimeError('DayMet starts at "%r", so cannot request data starting at "%r'%(daymet_start, start))
# check end time
if end is None:
end = daymet_end
else:
if end > daymet_end:
raise RuntimeError('DayMet ends at "%r", so cannot request data ending at "%r'%(daymet_end, end))
# check end and start
if end <= start:
raise RuntimeError('Requested start time %r is after requested end time %r'%(start, end))
return start, end
def daymet_attrs(lat, lon, start, end):
# set the wind speed height, which is made up
attrs = dict()
attrs['wind speed reference height [m]'] = 2.0
attrs['DayMet latitude [deg]'] = lat
attrs['DayMet longitude [deg]'] = lon
attrs['DayMet start date'] = str(start)
attrs['DayMet end date'] = str(end)
return attrs
def daymet_filename(lat, lon):
return 'daymet_raw_%s.h5'%file_id(lat, lon)
if __name__ == '__main__':
parser = get_argument_parser()
args = parser.parse_args()
start, end = validate_start_end(args.start, args.end)
# download or read daymet data
if args.raw_file is not None:
daymet = read_daymet(args.raw_file)
else:
daymet = METHOD_NAME(args.directory, args.lat, args.lon,
start, end)
# convert to ats and write
if not args.download_only:
ats = daymet_to_ats(daymet)
attrs = daymet_attrs(args.lat, args.lon, start, end)
filename_out = daymet_filename(args.lat, args.lon)
write_ats(ats, attrs, filename_out)
sys.exit(0)
|
3,832 |
getsourcenode
|
#
# Copyright 2008, 2010 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
"""Module for handling Qt Linguist Phrase Book (.qph) files.
Extract from the `Qt Linguist Manual: Translators
<http://doc.trolltech.com/4.3/linguist-translators.html>`_:
.qph Qt Phrase Book Files are human-readable XML files containing standard
phrases and their translations. These files are created and updated by Qt
Linguist and may be used by any number of projects and applications.
A DTD to define the format does not seem to exist, but the following `code
<http://qt.gitorious.org/qt/qt/blobs/4.7/tools/linguist/shared/qph.cpp>`_
provides the reference implementation for the Qt Linguist product.
"""
from lxml import etree
from translate.lang import data
from translate.storage import lisa
class QphUnit(lisa.LISAunit):
"""A single term in the qph file."""
rootNode = "phrase"
languageNode = "source"
textNode = ""
namespace = ""
def createlanguageNode(self, lang, text, purpose):
"""Returns an xml Element setup with given parameters."""
assert purpose
langset = etree.Element(self.namespaced(purpose))
langset.text = text
return langset
def METHOD_NAME(self):
return self.xmlelement.find(self.namespaced(self.languageNode))
def _gettargetnode(self):
return self.xmlelement.find(self.namespaced("target"))
def getlanguageNodes(self):
"""We override this to get source and target nodes."""
return [
n for n in [self.METHOD_NAME(), self._gettargetnode()] if n is not None
]
def addnote(self, text, origin=None, position="append"):
"""Add a note specifically in a "definition" tag"""
current_notes = self.getnotes(origin)
self.removenotes(origin)
note = etree.SubElement(self.xmlelement, self.namespaced("definition"))
note.text = "\n".join(filter(None, [current_notes, text.strip()]))
def getnotes(self, origin=None):
# TODO: consider only responding when origin has certain values
notenode = self.xmlelement.find(self.namespaced("definition"))
comment = ""
if notenode is not None:
comment = notenode.text
return comment
def removenotes(self, origin=None):
"""Remove all the translator notes."""
note = self.xmlelement.find(self.namespaced("definition"))
if note is not None:
self.xmlelement.remove(note)
class QphFile(lisa.LISAfile):
"""Class representing a QPH file store."""
UnitClass = QphUnit
Name = "Qt Phrase Book"
Mimetypes = ["application/x-qph"]
Extensions = ["qph"]
rootNode = "QPH"
bodyNode = "QPH"
XMLskeleton = """<!DOCTYPE QPH>
<QPH>
</QPH>
"""
namespace = ""
def initbody(self):
"""Initialises self.body so it never needs to be retrieved from the XML
again.
"""
self.namespace = self.document.getroot().nsmap.get(None, None)
self.header = self.document.getroot()
self.body = self.document.getroot() # The root node contains the units
def getsourcelanguage(self):
"""Get the source language for this .qph file.
We don't implement setsourcelanguage as users really shouldn't be
altering the source language in .qph files, it should be set correctly
by the extraction tools.
:return: ISO code e.g. af, fr, pt_BR
:rtype: String
"""
lang = data.normalize_code(self.header.get("sourcelanguage", "en"))
if lang == "en-us":
return "en"
return lang
def gettargetlanguage(self):
"""Get the target language for this .qph file.
:return: ISO code e.g. af, fr, pt_BR
:rtype: String
"""
return data.normalize_code(self.header.get("language"))
def settargetlanguage(self, targetlanguage):
"""Set the target language for this .qph file to *targetlanguage*.
:param targetlanguage: ISO code e.g. af, fr, pt_BR
:type targetlanguage: String
"""
if targetlanguage:
self.header.set("language", targetlanguage)
def serialize(self, out):
"""Write the XML document to the file `out`.
We have to override this to ensure mimic the Qt convention:
- no XML declaration
"""
self.document.write(
out, pretty_print=True, xml_declaration=False, encoding="utf-8"
)
|
3,833 |
validate alert context
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from datetime import datetime
from azure.cli.core.azclierror import ValidationError, InvalidArgumentValueError, RequiredArgumentMissingError
def validate_datetime_format(namespace):
format = '%Y-%m-%d %H:%M:%S'
if namespace.schedule_start_datetime:
datetime.strptime(namespace.schedule_start_datetime, format)
if namespace.schedule_end_datetime:
datetime.strptime(namespace.schedule_end_datetime, format)
if namespace.schedule_start_datetime:
if namespace.schedule_end_datetime < namespace.schedule_start_datetime:
raise ValidationError('Argument Error: end-date is before start-date')
def validate_time_format(namespace):
format = '%H:%M:%S'
if namespace.schedule_recurrence_start_time:
datetime.strptime(namespace.schedule_recurrence_start_time, format)
if namespace.schedule_recurrence_end_time:
datetime.strptime(namespace.schedule_recurrence_end_time, format)
def validate_severity(namespace):
if namespace.filter_severity:
validate_only_equals_operator(namespace.filter_severity)
for x in namespace.filter_severity[1:]:
if x not in ['Sev0', 'Sev1', 'Sev2', 'Sev3', 'Sev4']:
raise InvalidArgumentValueError('Argument Error: filter-severity values have to be one of [Equals, NotEquals, Sev0, Sev1, Sev2, Sev3, Sev4]')
def validate_monitor_condition(namespace):
if namespace.filter_monitor_condition:
validate_only_equals_operator(namespace.filter_monitor_condition)
for x in namespace.filter_monitor_condition[1:]:
if x not in ['Fired', 'Resolved']:
raise InvalidArgumentValueError('Argument Error: filter-monitor-condition values have to be one of [Equals, NotEquals, Fired, Resolved]')
def validate_signal_type(namespace):
if namespace.filter_signal_type:
validate_only_equals_operator(namespace.filter_signal_type)
for x in namespace.filter_signal_type[1:]:
if x not in ['Metric', 'Log', 'Unknown']:
raise InvalidArgumentValueError('Argument Error: filter-signal-type values have to be one of [Equals, NotEquals, Metric, Log, Unknown]')
def validate_monitor_service(namespace):
if namespace.filter_monitor_service:
validate_only_equals_operator(namespace.filter_monitor_service)
def validate_alert_rule_name(namespace):
if namespace.filter_alert_rule_name:
validate_full_operator(namespace.filter_alert_rule_name)
def validate_alert_rule_id(namespace):
if namespace.filter_alert_rule_id:
validate_full_operator(namespace.filter_alert_rule_id)
def validate_alert_rule_description(namespace):
if namespace.filter_alert_rule_description:
validate_full_operator(namespace.filter_alert_rule_description)
def METHOD_NAME(namespace):
if namespace.filter_alert_context:
validate_full_operator(namespace.filter_alert_context)
def validate_target_resource(namespace):
if namespace.filter_target_resource:
validate_full_operator(namespace.filter_target_resource)
def validate_resource_group(namespace):
if namespace.filter_resource_group:
validate_full_operator(namespace.filter_resource_group)
def validate_resource_type(namespace):
if namespace.filter_resource_type:
validate_full_operator(namespace.filter_resource_type)
def validate_full_operator(args):
if len(args) < 2:
raise RequiredArgumentMissingError('Filter Argument Error: values length can\'t be smaller than 2')
if args[0].lower() not in ['equals', 'notequals', 'contains', 'doesnotcontain']:
raise InvalidArgumentValueError('Filter Argument Error: operator must be one of the follows: Equals, NotEquals, Contains, DoesNotContain')
def validate_only_equals_operator(args):
if len(args) < 2:
raise RequiredArgumentMissingError('Filter Argument Error: values length can\'t be smaller than 2')
if args[0].lower() not in ['equals', 'notequals']:
raise InvalidArgumentValueError('Filter Argument Error: operator must be one of the follows: Equals, NotEquals')
|
3,834 |
result
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Saranya Sridharan
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
module: pids
description: "Retrieves a list of PIDs of given process name in Ansible controller/controlled machines.Returns an empty list if no process in that name exists."
short_description: Retrieves process IDs list if the process is running otherwise return empty list
author:
- Saranya Sridharan (@saranyasridharan)
requirements:
- psutil(python module)
extends_documentation_fragment:
- community.general.attributes
attributes:
check_mode:
support: full
diff_mode:
support: none
options:
name:
description: The name of the process(es) you want to get PID(s) for.
type: str
pattern:
description: The pattern (regular expression) to match the process(es) you want to get PID(s) for.
type: str
version_added: 3.0.0
ignore_case:
description: Ignore case in pattern if using the O(pattern) option.
type: bool
default: false
version_added: 3.0.0
'''
EXAMPLES = r'''
# Pass the process name
- name: Getting process IDs of the process
community.general.pids:
name: python
register: pids_of_python
- name: Printing the process IDs obtained
ansible.builtin.debug:
msg: "PIDS of python:{{pids_of_python.pids|join(',')}}"
- name: Getting process IDs of processes matching pattern
community.general.pids:
pattern: python(2(\.7)?|3(\.6)?)?\s+myapp\.py
register: myapp_pids
'''
RETURN = '''
pids:
description: Process IDs of the given process
returned: list of none, one, or more process IDs
type: list
sample: [100,200]
'''
import abc
import re
from os.path import basename
from ansible.module_utils import six
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils import deps
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
with deps.declare("psutil"):
import psutil
class PSAdapterError(Exception):
pass
@six.add_metaclass(abc.ABCMeta)
class PSAdapter(object):
NAME_ATTRS = ('name', 'cmdline')
PATTERN_ATTRS = ('name', 'exe', 'cmdline')
def __init__(self, psutil):
self._psutil = psutil
@staticmethod
def from_package(psutil):
version = LooseVersion(psutil.__version__)
if version < LooseVersion('2.0.0'):
return PSAdapter100(psutil)
elif version < LooseVersion('5.3.0'):
return PSAdapter200(psutil)
else:
return PSAdapter530(psutil)
def get_pids_by_name(self, name):
return [p.pid for p in self._process_iter(*self.NAME_ATTRS) if self._has_name(p, name)]
def _process_iter(self, *attrs):
return self._psutil.process_iter()
def _has_name(self, proc, name):
attributes = self._get_proc_attributes(proc, *self.NAME_ATTRS)
return (compare_lower(attributes['name'], name) or
attributes['cmdline'] and compare_lower(attributes['cmdline'][0], name))
def _get_proc_attributes(self, proc, *attributes):
return dict((attribute, self._get_attribute_from_proc(proc, attribute)) for attribute in attributes)
@staticmethod
@abc.abstractmethod
def _get_attribute_from_proc(proc, attribute):
pass
def get_pids_by_pattern(self, pattern, ignore_case):
flags = 0
if ignore_case:
flags |= re.I
try:
regex = re.compile(pattern, flags)
except re.error as e:
raise PSAdapterError("'%s' is not a valid regular expression: %s" % (pattern, to_native(e)))
return [p.pid for p in self._process_iter(*self.PATTERN_ATTRS) if self._matches_regex(p, regex)]
def _matches_regex(self, proc, regex):
# See https://psutil.readthedocs.io/en/latest/#find-process-by-name for more information
attributes = self._get_proc_attributes(proc, *self.PATTERN_ATTRS)
matches_name = regex.search(to_native(attributes['name']))
matches_exe = attributes['exe'] and regex.search(basename(to_native(attributes['exe'])))
matches_cmd = attributes['cmdline'] and regex.search(to_native(' '.join(attributes['cmdline'])))
return any([matches_name, matches_exe, matches_cmd])
class PSAdapter100(PSAdapter):
def __init__(self, psutil):
super(PSAdapter100, self).__init__(psutil)
@staticmethod
def _get_attribute_from_proc(proc, attribute):
return getattr(proc, attribute)
class PSAdapter200(PSAdapter):
def __init__(self, psutil):
super(PSAdapter200, self).__init__(psutil)
@staticmethod
def _get_attribute_from_proc(proc, attribute):
method = getattr(proc, attribute)
return method()
class PSAdapter530(PSAdapter):
def __init__(self, psutil):
super(PSAdapter530, self).__init__(psutil)
def _process_iter(self, *attrs):
return self._psutil.process_iter(attrs=attrs)
@staticmethod
def _get_attribute_from_proc(proc, attribute):
return proc.info[attribute]
def compare_lower(a, b):
if a is None or b is None:
# this could just be "return False" but would lead to surprising behavior if both a and b are None
return a == b
return a.lower() == b.lower()
class Pids(object):
def __init__(self, module):
deps.validate(module)
self._ps = PSAdapter.from_package(psutil)
self._module = module
self._name = module.params['name']
self._pattern = module.params['pattern']
self._ignore_case = module.params['ignore_case']
self._pids = []
def execute(self):
if self._name:
self._pids = self._ps.get_pids_by_name(self._name)
else:
try:
self._pids = self._ps.get_pids_by_pattern(self._pattern, self._ignore_case)
except PSAdapterError as e:
self._module.fail_json(msg=to_native(e))
return self._module.exit_json(**self.METHOD_NAME)
@property
def METHOD_NAME(self):
return {
'pids': self._pids,
}
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type="str"),
pattern=dict(type="str"),
ignore_case=dict(type="bool", default=False),
),
required_one_of=[
('name', 'pattern')
],
mutually_exclusive=[
('name', 'pattern')
],
supports_check_mode=True,
)
Pids(module).execute()
if __name__ == '__main__':
main()
|
3,835 |
delete line
|
from talon import Context, Module, actions
ctx = Context()
mod = Module()
mod.apps.eclipse = """
os: windows
and app.name: eclipse.exe
"""
ctx.matches = r"""
app: eclipse
"""
@ctx.action_class("app")
class AppActions:
# talon app actions
def tab_close():
actions.key("ctrl-w")
def tab_next():
actions.key("ctrl-pagedown")
def tab_previous():
actions.key("ctrl-pageup")
# action(app.tab_reopen):
def window_close():
actions.key("alt-f4")
def window_open():
actions.key("alt-w n")
@ctx.action_class("code")
class CodeActions:
# talon code actions
def toggle_comment():
actions.key("ctrl-7")
@ctx.action_class("edit")
class EditActions:
def find(text: str):
actions.key("ctrl-f")
actions.insert(text)
def line_swap_up():
actions.key("alt-up")
def line_swap_down():
actions.key("alt-down")
def line_clone():
actions.key("ctrl-alt-down")
def jump_line(n: int):
actions.key("ctrl-l")
actions.insert(str(n))
actions.key("enter")
def METHOD_NAME():
actions.key("ctrl-d")
def indent_more():
actions.key("tab")
def indent_less():
actions.key("shift-tab")
def save_all():
actions.key("ctrl-shift-s")
@ctx.action_class("user")
class UserActions:
# generic_snippet.py support beginHelp close
# def snippet_search(text: str):
# actions.user.vscode("Insert Snippet")
# actions.insert(text)
# def snippet_insert(text: str):
# """Inserts a snippet"""
# actions.user.vscode("Insert Snippet")
# actions.insert(text)
# actions.key("enter")
# def snippet_create():
# """Triggers snippet creation"""
# actions.user.vscode("Preferences: Configure User Snippets")
# generic_snippet.py support end
# splits.py support begin
# requires https://marketplace.eclipse.org/content/handysplit
def split_clear_all():
actions.key("alt-shift-s f")
def split_clear():
actions.key("alt-shift-s f")
# action(user.split_flip):
def split_last():
actions.key("alt-shift-s t")
def split_next():
actions.key("alt-shift-s t")
def split_window_down():
actions.key("alt-shift-s m")
def split_window_horizontally():
actions.key("alt-ctrl-s s")
def split_window_right():
actions.key("alt-shift-s m")
def split_window_up():
actions.key("alt-shift-s m")
def split_window_vertically():
actions.key("alt-shift-s s")
def split_window():
actions.key("alt-ctrl-s s")
# splits.py support end
# find_and_replace.py support begin
def find(text: str):
"""Triggers find in current editor"""
actions.key("ctrl-f")
if text:
actions.insert(text)
def find_next():
actions.key("enter")
def find_previous():
actions.key("shift-enter")
def find_everywhere(text: str):
"""Triggers find across project"""
actions.key("ctrl-h")
if text:
actions.insert(text)
# todo: these commands should only be available
# when it's focused
def find_toggle_match_by_case():
"""Toggles find match by case sensitivity"""
actions.key("alt-c")
def find_toggle_match_by_word():
"""Toggles find match by whole words"""
actions.key("alt-w")
def find_toggle_match_by_regex():
"""Toggles find match by regex"""
actions.key("alt-e")
def replace(text: str):
"""Search and replaces in the active editor"""
actions.key("ctrl-f")
if text:
actions.insert(text)
def replace_everywhere(text: str):
"""Search and replaces in the entire project"""
actions.key("alt-a f")
if text:
actions.insert(text)
def replace_confirm():
"""Confirm replace at current position"""
actions.key("alt-r")
def replace_confirm_all():
"""Confirm replace all"""
actions.key("alt-a")
def select_previous_occurrence(text: str):
actions.edit.find(text)
actions.sleep("100ms")
actions.key("alt-b alt-f enter esc")
def select_next_occurrence(text: str):
actions.edit.find(text)
actions.sleep("100ms")
actions.key("alt-f alt-o esc")
# find_and_replace.py support end
|
3,836 |
center
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path as osp
import math
from functools import partial
from PyQt5.QtCore import QPoint
from PyQt5.QtWidgets import QDesktopWidget
from qtpy import QtCore, QtWidgets
from qtpy.QtWidgets import (
QWidget,
QLabel,
QPushButton,
QGridLayout,
QKeySequenceEdit,
QMessageBox, )
from qtpy.QtGui import QIcon
from qtpy import QtCore
from qtpy.QtCore import Qt
from util import save_configs
class RecordShortcutWidget(QKeySequenceEdit):
def __init__(self, finishCallback, location):
super().__init__()
self.finishCallback = finishCallback
# 隐藏界面
self.setWindowFlags(Qt.FramelessWindowHint)
self.move(location)
self.show()
self.editingFinished.connect(lambda: finishCallback(self.keySequence()))
def keyReleaseEvent(self, ev):
self.finishCallback(self.keySequence())
class ShortcutWidget(QWidget):
def __init__(self, actions, pjpath):
super().__init__()
self.tr = partial(QtCore.QCoreApplication.translate, "ShortcutWidget")
self.setWindowTitle(self.tr("编辑快捷键"))
self.setWindowIcon(QIcon(osp.join(pjpath, "resource/Shortcut.png")))
# self.setFixedSize(self.width(), self.height())
self.actions = actions
self.recorder = None
self.initUI()
def initUI(self):
grid = QGridLayout()
self.setLayout(grid)
actions = self.actions
for idx, action in enumerate(actions):
# 2列英文看不清
grid.addWidget(QLabel(action.iconText()[1:]), idx // 3, idx % 3 * 3)
shortcut = action.shortcut().toString()
if len(shortcut) == 0:
shortcut = self.tr("-")
button = QPushButton(shortcut)
button.setFixedWidth(150)
button.setFixedHeight(30)
button.clicked.connect(partial(self.recordShortcut, action))
grid.addWidget(
button,
idx // 3,
idx % 3 * 3 + 1, )
def refreshUi(self):
actions = self.actions
for idx, action in enumerate(actions):
shortcut = action.shortcut().toString()
if len(shortcut) == 0:
shortcut = self.tr("-")
self.layout().itemAtPosition(
idx // 3,
idx % 3 * 3 + 1, ).widget().setText(shortcut)
def recordShortcut(self, action):
# 打开快捷键设置的窗口时,如果之前的还在就先关闭
if self.recorder is not None:
self.recorder.close()
rect = self.geometry()
x = rect.x()
y = rect.y() + rect.height()
self.recorder = RecordShortcutWidget(self.setShortcut, QPoint(x, y))
self.currentAction = action
def setShortcut(self, key):
self.recorder.close()
for a in self.actions:
if a.shortcut() == key:
key = key.toString()
msg = QMessageBox()
msg.setIcon(QMessageBox.Warning)
msg.setWindowTitle(key + " " + self.tr("快捷键冲突"))
msg.setText(key + " " + self.tr("快捷键已被") + " " + a.data(
) + " " + self.tr("使用,请设置其他快捷键或先修改") + " " + a.data() + " " +
self.tr("的快捷键"))
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
return
key = "" if key.toString() == "Esc" else key # ESC不设置快捷键
self.currentAction.setShortcut(key)
self.refreshUi()
save_configs(None, None, self.actions)
def METHOD_NAME(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().METHOD_NAME()
qr.moveCenter(cp)
self.move(qr.topLeft())
# 快捷键设置跟随移动
def moveEvent(self, event):
p = self.geometry()
x = p.x()
y = p.y() + p.height()
if self.recorder is not None:
self.recorder.move(x, y)
def closeEvent(self, event):
# 关闭时也退出快捷键设置
if self.recorder is not None:
self.recorder.close()
|
3,837 |
project
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetJobIamPolicyResult',
'AwaitableGetJobIamPolicyResult',
'get_job_iam_policy',
'get_job_iam_policy_output',
]
@pulumi.output_type
class GetJobIamPolicyResult:
"""
A collection of values returned by getJobIamPolicy.
"""
def __init__(__self__, etag=None, id=None, location=None, name=None, policy_data=None, METHOD_NAME=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if policy_data and not isinstance(policy_data, str):
raise TypeError("Expected argument 'policy_data' to be a str")
pulumi.set(__self__, "policy_data", policy_data)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'project' to be a str")
pulumi.set(__self__, "project", METHOD_NAME)
@property
@pulumi.getter
def etag(self) -> str:
"""
(Computed) The etag of the IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> str:
"""
(Required only by `cloudrunv2.JobIamPolicy`) The policy data generated by
a `organizations_get_iam_policy` data source.
"""
return pulumi.get(self, "policy_data")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
return pulumi.get(self, "project")
class AwaitableGetJobIamPolicyResult(GetJobIamPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetJobIamPolicyResult(
etag=self.etag,
id=self.id,
location=self.location,
name=self.name,
policy_data=self.policy_data,
METHOD_NAME=self.METHOD_NAME)
def get_job_iam_policy(location: Optional[str] = None,
name: Optional[str] = None,
METHOD_NAME: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetJobIamPolicyResult:
"""
Retrieves the current IAM policy data for job
## example
```python
import pulumi
import pulumi_gcp as gcp
policy = gcp.cloudrunv2.get_job_iam_policy(project=google_cloud_run_v2_job["default"]["project"],
location=google_cloud_run_v2_job["default"]["location"],
name=google_cloud_run_v2_job["default"]["name"])
```
:param str location: The location of the cloud run job Used to find the parent resource to bind the IAM policy to
:param str name: Used to find the parent resource to bind the IAM policy to
:param str project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
__args__ = dict()
__args__['location'] = location
__args__['name'] = name
__args__['project'] = METHOD_NAME
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('gcp:cloudrunv2/getJobIamPolicy:getJobIamPolicy', __args__, opts=opts, typ=GetJobIamPolicyResult).value
return AwaitableGetJobIamPolicyResult(
etag=pulumi.get(__ret__, 'etag'),
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
policy_data=pulumi.get(__ret__, 'policy_data'),
METHOD_NAME=pulumi.get(__ret__, 'project'))
@_utilities.lift_output_func(get_job_iam_policy)
def get_job_iam_policy_output(location: Optional[pulumi.Input[Optional[str]]] = None,
name: Optional[pulumi.Input[str]] = None,
METHOD_NAME: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetJobIamPolicyResult]:
"""
Retrieves the current IAM policy data for job
## example
```python
import pulumi
import pulumi_gcp as gcp
policy = gcp.cloudrunv2.get_job_iam_policy(project=google_cloud_run_v2_job["default"]["project"],
location=google_cloud_run_v2_job["default"]["location"],
name=google_cloud_run_v2_job["default"]["name"])
```
:param str location: The location of the cloud run job Used to find the parent resource to bind the IAM policy to
:param str name: Used to find the parent resource to bind the IAM policy to
:param str project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
...
|
3,838 |
init gui
|
import json
import cv2
import base64
import threading
import time
from datetime import datetime
from websocket_server import WebsocketServer
import logging
import os
import rclpy
from interfaces.pose3d import ListenerPose3d
from map import Map
# Graphical User Interface Class
class GUI:
# Initialization function
# The actual initialization
def __init__(self, host, hal):
t = threading.Thread(target=self.run_server)
self.payload = {'map': ''}
self.server = None
self.client = None
self.host = host
self.acknowledge = False
self.acknowledge_lock = threading.Lock()
self.hal = hal
t.start()
# Create the lap object
self.map = Map(self.hal.pose3d)
# Explicit initialization function
# Class method, so user can call it without instantiation
@classmethod
def METHOD_NAME(cls, host, console):
# self.payload = {'map': ''}
new_instance = cls(host, console)
return new_instance
# Function to get the client
# Called when a new client is received
def get_client(self, client, server):
self.client = client
# Function to get value of Acknowledge
def get_acknowledge(self):
self.acknowledge_lock.acquire()
acknowledge = self.acknowledge
self.acknowledge_lock.release()
return acknowledge
# Function to get value of Acknowledge
def set_acknowledge(self, value):
self.acknowledge_lock.acquire()
self.acknowledge = value
self.acknowledge_lock.release()
# Update the gui
def update_gui(self):
# Payload Map Message
pos_message = self.map.getRobotCoordinates()
ang_message = self.map.getRobotAngle()
pos_message = str(pos_message + ang_message)
self.payload["map"] = pos_message
message = "#gui" + json.dumps(self.payload)
self.server.send_message(self.client, message)
# Function to read the message from websocket
# Gets called when there is an incoming message from the client
def get_message(self, client, server, message):
# Acknowledge Message for GUI Thread
if (message[:4] == "#ack"):
self.set_acknowledge(True)
# Activate the server
def run_server(self):
self.server = WebsocketServer(port=2303, host=self.host)
self.server.set_fn_new_client(self.get_client)
self.server.set_fn_message_received(self.get_message)
home_dir = os.path.expanduser('~')
logged = False
while not logged:
try:
f = open(f"{home_dir}/ws_gui.log", "w")
f.write("websocket_gui=ready")
f.close()
logged = True
except:
time.sleep(0.1)
self.server.run_forever()
# Function to reset
def reset_gui(self):
self.map.reset()
# This class decouples the user thread
# and the GUI update thread
class ThreadGUI:
def __init__(self, gui):
self.gui = gui
# Time variables
self.ideal_cycle = 80
self.measured_cycle = 80
self.iteration_counter = 0
# Function to start the execution of threads
def start(self):
self.measure_thread = threading.Thread(target=self.measure_thread)
self.thread = threading.Thread(target=self.run)
self.measure_thread.start()
self.thread.start()
print("GUI Thread Started!")
# The measuring thread to measure frequency
def measure_thread(self):
while (self.gui.client == None):
pass
previous_time = datetime.now()
while (True):
# Sleep for 2 seconds
time.sleep(2)
# Measure the current time and subtract from previous time to get real time interval
current_time = datetime.now()
dt = current_time - previous_time
ms = (dt.days * 24 * 60 * 60 + dt.seconds) * \
1000 + dt.microseconds / 1000.0
previous_time = current_time
# Get the time period
try:
# Division by zero
self.measured_cycle = ms / self.iteration_counter
except:
self.measured_cycle = 0
# Reset the counter
self.iteration_counter = 0
# The main thread of execution
def run(self):
while (self.gui.client == None):
pass
while (True):
start_time = datetime.now()
self.gui.update_gui()
acknowledge_message = self.gui.get_acknowledge()
while (acknowledge_message == False):
acknowledge_message = self.gui.get_acknowledge()
self.gui.set_acknowledge(False)
finish_time = datetime.now()
self.iteration_counter = self.iteration_counter + 1
dt = finish_time - start_time
ms = (dt.days * 24 * 60 * 60 + dt.seconds) * \
1000 + dt.microseconds / 1000.0
if (ms < self.ideal_cycle):
time.sleep((self.ideal_cycle-ms) / 1000.0)
|
3,839 |
store
|
import json
import logging
import os
import urllib.request
from typing import Any, Dict, List, Optional
import aiohttp
import requests
from shared import perf
from shared.pd_exception import OperationalException
logger = logging.getLogger(__name__)
def fetch(url: str, character_encoding: Optional[str] = None, force: bool = False, retry: bool = False, session: Optional[requests.Session] = None) -> str:
headers = {}
if force:
headers['Cache-Control'] = 'no-cache'
logger.info('Fetching {url} ({cache})'.format(url=url, cache='no cache' if force else 'cache ok'))
try:
p = perf.start()
if session is not None:
response = session.get(url, headers=headers)
else:
response = requests.get(url, headers=headers)
perf.check(p, 'slow_fetch', (url, headers), 'fetch')
if character_encoding is not None:
response.encoding = character_encoding
if response.status_code in [500, 502, 503]:
raise FetchException(f'Server returned a {response.status_code} from {url}')
p = perf.start()
t = response.text
took = round(perf.took(p), 2)
if took > 1:
logger.warning('Getting text from response was very slow. Setting an explicit character_encoding may help.')
return t
except (urllib.error.HTTPError, requests.exceptions.ConnectionError, TimeoutError) as e:
if retry:
return fetch(url, character_encoding, force, retry=False)
raise FetchException(e) from e
async def fetch_async(url: str) -> str:
logger.info(f'Async fetching {url}')
try:
async with aiohttp.ClientSession() as aios:
response = await aios.get(url)
return await response.text()
except (urllib.error.HTTPError, requests.exceptions.ConnectionError, aiohttp.ClientConnectorError) as e:
raise FetchException(e) from e
async def post_async_with_json(url: str, data: dict) -> str:
logger.info(f'Async posting to {url}')
try:
async with aiohttp.ClientSession() as aios:
response = await aios.post(url, json=data)
return await response.text()
except (urllib.error.HTTPError, requests.exceptions.ConnectionError) as e:
raise FetchException(e) from e
def fetch_json(url: str, character_encoding: Optional[str] = None, session: Optional[requests.Session] = None) -> Any:
try:
blob = fetch(url, character_encoding, session=session)
if blob:
return json.loads(blob)
return None
except json.decoder.JSONDecodeError as e:
logger.error('Failed to load JSON:\n{0}'.format(blob))
raise FetchException(e) from e
async def fetch_json_async(url: str) -> Any:
try:
blob = await fetch_async(url)
if blob:
return json.loads(blob)
return None
except json.decoder.JSONDecodeError:
logger.error('Failed to load JSON:\n{0}'.format(blob))
raise
async def post_json_async(url: str, data: dict) -> Any:
try:
blob = await post_async_with_json(url, data)
if blob:
return json.loads(blob)
return None
except json.decoder.JSONDecodeError:
logger.error('Failed to load JSON:\n{0}'.format(blob))
raise
def post(url: str,
data: Optional[Dict[str, str]] = None,
json_data: Optional[Any] = None,
) -> str:
logger.info('POSTing to {url} with {data} / {json_data}'.format(url=url, data=data, json_data=json_data))
try:
response = requests.post(url, data=data, json=json_data)
return response.text
except requests.exceptions.ConnectionError as e:
raise FetchException(e) from e
def METHOD_NAME(url: str, path: str) -> requests.Response:
logger.info('Storing {url} in {path}'.format(url=url, path=path))
try:
response = requests.get(url, stream=True)
with open(path, 'wb') as fout:
for chunk in response.iter_content(1024):
fout.write(chunk)
return response
except urllib.error.HTTPError as e:
raise FetchException(e) from e
except requests.exceptions.ConnectionError as e:
raise FetchException(e) from e
async def store_async(url: str, path: str) -> aiohttp.ClientResponse:
logger.info('Async storing {url} in {path}'.format(url=url, path=path))
try:
async with aiohttp.ClientSession() as aios:
response = await aios.get(url)
with open(path, 'wb') as fout:
while True:
chunk = await response.content.read(1024)
if not chunk:
break
fout.write(chunk)
return response
except (urllib.error.HTTPError, aiohttp.ClientError) as e:
raise FetchException(e) from e
class FetchException(OperationalException):
pass
def acceptable_file(filepath: str) -> bool:
return os.path.isfile(filepath) and os.path.getsize(filepath) > 1000
def escape(str_input: str, skip_double_slash: bool = False) -> str:
# Expand 'AE' into two characters. This matches the legal list and
# WotC's naming scheme in Kaladesh, and is compatible with the
# image server and scryfall.
s = str_input
if skip_double_slash:
s = s.replace('//', '-split-')
s = urllib.parse.quote_plus(s.replace(u'Æ', 'AE')).lower()
if skip_double_slash:
s = s.replace('-split-', '//')
return s
def post_discord_webhook(webhook_id: str,
webhook_token: str,
message: Optional[str] = None,
username: Optional[str] = None,
avatar_url: Optional[str] = None,
embeds: Optional[List[Dict[str, Any]]] = None,
) -> bool:
if not webhook_id or not webhook_token:
return False
url = 'https://discordapp.com/api/webhooks/{id}/{token}'.format(
id=webhook_id, token=webhook_token)
post(url, json_data={
'content': message,
'username': username,
'avatar_url': avatar_url,
'embeds': embeds,
})
return True
|
3,840 |
fetch table search results
|
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from abc import ABCMeta, abstractmethod
from typing import (
Any, Dict, List, Union,
)
from amundsen_common.models.api.health_check import HealthCheck
from amundsen_common.models.search import (
Filter, HighlightOptions, SearchResponse,
)
from search_service.models.dashboard import SearchDashboardResult
from search_service.models.feature import SearchFeatureResult
from search_service.models.table import SearchTableResult
from search_service.models.user import SearchUserResult
from search_service.proxy.es_proxy_v2_1 import Resource
class BaseProxy(metaclass=ABCMeta):
"""
Base Proxy, which behaves like an interface for all
the proxy clients available in the amundsen search service
"""
def health(self) -> HealthCheck:
"""
Runs one or more series of checks on the service. Can also
optionally return additional metadata about each check (e.g.
latency to database, cpu utilization, etc.).
"""
return HealthCheck(status='ok', checks={f'{type(self).__name__}:connection': {'status': 'not checked'}})
@abstractmethod
def search(self, *,
query_term: str,
page_index: int,
results_per_page: int,
resource_types: List[Resource],
filters: List[Filter],
highlight_options: Dict[Resource, HighlightOptions]) -> SearchResponse:
pass
@abstractmethod
def METHOD_NAME(self, *,
query_term: str,
page_index: int = 0,
index: str = '') -> SearchTableResult:
pass
@abstractmethod
def fetch_dashboard_search_results(self, *,
query_term: str,
page_index: int = 0,
index: str = '') -> SearchDashboardResult:
pass
@abstractmethod
def fetch_feature_search_results(self, *,
query_term: str,
page_index: int = 0,
index: str = '') -> SearchFeatureResult:
pass
@abstractmethod
def fetch_user_search_results(self, *,
query_term: str,
page_index: int = 0,
index: str = '') -> SearchUserResult:
pass
@abstractmethod
def fetch_search_results_with_filter(self, *,
query_term: str,
search_request: dict,
page_index: int = 0,
index: str = '') -> Union[SearchTableResult,
SearchDashboardResult,
SearchFeatureResult]:
pass
@abstractmethod
def update_document(self, *,
data: List[Dict[str, Any]],
index: str = '') -> str:
pass
@abstractmethod
def update_document_by_key(self, *,
resource_key: str,
resource_type: Resource,
field: str,
value: str = None,
operation: str = 'add') -> str:
pass
@abstractmethod
def delete_document_by_key(self, *,
resource_key: str,
resource_type: Resource,
field: str,
value: str = None) -> str:
pass
@abstractmethod
def create_document(self, *,
data: List[Dict[str, Any]],
index: str = '') -> str:
pass
@abstractmethod
def delete_document(self, *,
data: List[str],
index: str = '') -> str:
pass
|
3,841 |
setup run environment
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack.package import *
class Libpeas(AutotoolsPackage):
"""libpeas is a gobject-based plugins engine, and is targeted at
giving every application the chance to assume its own
extensibility."""
homepage = "http://developer.gnome.org/libpeas/stable"
url = "https://download.gnome.org/sources/libpeas/1.22/libpeas-1.22.0.tar.xz"
version("1.22.0", sha256="5b2fc0f53962b25bca131a5ec0139e6fef8e254481b6e777975f7a1d2702a962")
depends_on("m4", type="build")
depends_on("autoconf", type="build")
depends_on("automake", type="build")
depends_on("libtool", type="build")
depends_on("gettext", type="build")
depends_on("pkgconfig", type="build")
depends_on("atk")
depends_on("[email protected]:")
depends_on("xmlto", type="build")
depends_on("perl", type="build")
depends_on("perl-xml-parser", type="build")
depends_on("[email protected]:")
depends_on("gobject-introspection")
depends_on("libffi")
depends_on("gtkplus")
depends_on("gdk-pixbuf")
depends_on("pango")
depends_on("gnome-common")
depends_on("py-pygobject@3:", type="build")
depends_on("python@3:3.7.9", type="build")
def url_for_version(self, version):
url = "https://download.gnome.org/sources/libpeas/"
url += "{0}/libpeas-{1}.tar.xz"
return url.format(version.up_to(2), version)
def setup_dependent_build_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def setup_dependent_run_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def setup_build_environment(self, env):
# Let
#
# python = self.spec['python']
# prefix = python.prefix
# pyversion = python.version.up_to(2)
# python_lib_path = os.path.join(prefix, 'Frameworks',
# 'Python.framework', 'Versions',
# pyversion)
#
# self.spec['python'].libs.ld_flags returns (on macOS)
# '-L{0} -lPython'.format(python_lib_path)
#
# e.g., for [email protected] on macOS via Homebrew, python_lib_path is
# /usr/local/opt/python/Frameworks/Python.framework/Versions/3.7
#
# This directory is correct for many purposes, but libpeas uses the
# link flag '-lpython{0}m'.format(pyversion) and does not use an
# appropriate -L flag to locate this library, so the correct -L flag
# must be appended to LDFLAGS. Furthermore, this library is not found
# in python_lib_path. However, pkg-config returns the correct
# directory, so pkg-config is used to generate the correct paths for
# LDFLAGS.
pkg_config = which("pkg-config")
python_prefix = self.spec["python"].prefix.lib.pkgconfig
python_pc_file = os.path.join(python_prefix, "python3.pc")
python_ldflags = pkg_config("--libs", python_pc_file, output=str)
env.append_path("LDFLAGS", python_ldflags)
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def METHOD_NAME(self, env):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def autoreconf(self, spec, prefix):
autoreconf_args = ["-ivf"]
aclocal_pkg_list = [
"pkgconfig",
"gettext",
"intltool",
"glib",
"gobject-introspection",
"gnome-common",
"gtkplus",
]
aclocal_path = os.path.join("share", "aclocal")
for pkg in aclocal_pkg_list:
autoreconf_args += ["-I", os.path.join(spec[pkg].prefix, aclocal_path)]
autoreconf = which("autoreconf")
autoreconf(*autoreconf_args)
def configure_args(self):
args = ["--disable-silent-rules", "--enable-gtk", "--enable-python3", "--disable-python2"]
return args
|
3,842 |
stop moves
|
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
from rbac.helper.common import *
import rbac.helper.errors as errors
@TestSuite
def privileges_granted_directly(self, node=None):
"""Check that a user is able to execute `SYSTEM MOVES` commands if and only if
the privilege has been granted directly.
"""
user_name = f"user_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
table_name = f"table_name_{getuid()}"
Suite(run=check_privilege,
examples=Examples("privilege on grant_target_name user_name table_name", [
tuple(list(row)+[user_name,user_name,table_name]) for row in check_privilege.examples
], args=Args(name="check privilege={privilege}", format_name=True)))
@TestSuite
def privileges_granted_via_role(self, node=None):
"""Check that a user is able to execute `SYSTEM MOVES` commands if and only if
the privilege has been granted via role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"), role(node, f"{role_name}"):
table_name = f"table_name_{getuid()}"
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Suite(run=check_privilege,
examples=Examples("privilege on grant_target_name user_name table_name", [
tuple(list(row)+[role_name,user_name,table_name]) for row in check_privilege.examples
], args=Args(name="check privilege={privilege}", format_name=True)))
@TestOutline(Suite)
@Examples("privilege on",[
("ALL", "*.*"),
("SYSTEM", "*.*"),
("SYSTEM MOVES", "table"),
("SYSTEM STOP MOVES", "table"),
("SYSTEM START MOVES", "table"),
("START MOVES", "table"),
("STOP MOVES", "table"),
])
def check_privilege(self, privilege, on, grant_target_name, user_name, table_name, node=None):
"""Run checks for commands that require SYSTEM MOVES privilege.
"""
if node is None:
node = self.context.node
Suite(test=start_moves)(privilege=privilege, on=on, grant_target_name=grant_target_name, user_name=user_name, table_name=table_name)
Suite(test=METHOD_NAME)(privilege=privilege, on=on, grant_target_name=grant_target_name, user_name=user_name, table_name=table_name)
@TestSuite
def start_moves(self, privilege, on, grant_target_name, user_name, table_name, node=None):
"""Check that user is only able to execute `SYSTEM START MOVES` when they have privilege.
"""
exitcode, message = errors.not_enough_privileges(name=user_name)
if node is None:
node = self.context.node
on = on.replace("table", f"{table_name}")
with table(node, table_name):
with Scenario("SYSTEM START MOVES without privilege"):
with When("I grant the user NONE privilege"):
node.query(f"GRANT NONE TO {grant_target_name}")
with And("I grant the user USAGE privilege"):
node.query(f"GRANT USAGE ON *.* TO {grant_target_name}")
with Then("I check the user can't start moves"):
node.query(f"SYSTEM START MOVES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with Scenario("SYSTEM START MOVES with privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with Then("I check the user can start moves"):
node.query(f"SYSTEM START MOVES {table_name}", settings = [("user", f"{user_name}")])
with Scenario("SYSTEM START MOVES with revoked privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with And(f"I revoke {privilege} on the table"):
node.query(f"REVOKE {privilege} ON {on} FROM {grant_target_name}")
with Then("I check the user can't start moves"):
node.query(f"SYSTEM START MOVES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestSuite
def METHOD_NAME(self, privilege, on, grant_target_name, user_name, table_name, node=None):
"""Check that user is only able to execute `SYSTEM STOP MOVES` when they have privilege.
"""
exitcode, message = errors.not_enough_privileges(name=user_name)
if node is None:
node = self.context.node
on = on.replace("table", f"{table_name}")
with table(node, table_name):
with Scenario("SYSTEM STOP MOVES without privilege"):
with When("I grant the user NONE privilege"):
node.query(f"GRANT NONE TO {grant_target_name}")
with And("I grant the user USAGE privilege"):
node.query(f"GRANT USAGE ON *.* TO {grant_target_name}")
with Then("I check the user can't stop moves"):
node.query(f"SYSTEM STOP MOVES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with Scenario("SYSTEM STOP MOVES with privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with Then("I check the user can stop moves"):
node.query(f"SYSTEM STOP MOVES {table_name}", settings = [("user", f"{user_name}")])
with Scenario("SYSTEM STOP MOVES with revoked privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with And(f"I revoke {privilege} on the table"):
node.query(f"REVOKE {privilege} ON {on} FROM {grant_target_name}")
with Then("I check the user can't stop moves"):
node.query(f"SYSTEM STOP MOVES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestFeature
@Name("system moves")
@Requirements(
RQ_SRS_006_RBAC_Privileges_System_Moves("1.0"),
RQ_SRS_006_RBAC_Privileges_All("1.0"),
RQ_SRS_006_RBAC_Privileges_None("1.0")
)
def feature(self, node="clickhouse1"):
"""Check the RBAC functionality of SYSTEM MOVES.
"""
self.context.node = self.context.cluster.node(node)
Suite(run=privileges_granted_directly, setup=instrument_clickhouse_server_log)
Suite(run=privileges_granted_via_role, setup=instrument_clickhouse_server_log)
|
3,843 |
clean all
|
import os
import enigma
import time
from Components.config import config
from Components.GUIComponent import GUIComponent
from Components import Harddisk
import Components.Task
from Components.VariableText import VariableText
def isTrashFolder(path):
path = os.path.realpath(path)
return getTrashFolder(path) == path
def getTrashFolder(path=None):
# Returns trash folder without symlinks
try:
if path is None or os.path.realpath(path) == "/media/autofs":
return ""
else:
trashcan = Harddisk.findMountPoint(os.path.realpath(path))
if "/movie" in path:
trashcan = os.path.join(trashcan, "movie")
elif config.usage.default_path.value in path:
# if default_path happens to not be the default /hdd/media/movie, then we can have a trash folder there instead
trashcan = os.path.join(trashcan, config.usage.default_path.value)
return os.path.realpath(os.path.join(trashcan, ".Trash"))
except:
return None
def createTrashFolder(path=None):
trash = getTrashFolder(path)
print("[Trashcan] Debug path %s => %s" % (path, trash))
if trash and os.access(os.path.split(trash)[0], os.W_OK):
if not os.path.isdir(trash):
try:
os.mkdir(trash)
except:
return None
return trash
else:
return None
def get_size(start_path="."):
total_size = 0
if start_path:
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
try:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
except:
pass
return total_size
class Trashcan:
def __init__(self, session):
self.session = session
session.nav.record_event.append(self.gotRecordEvent)
self.gotRecordEvent(None, None)
def gotRecordEvent(self, service, event):
from RecordTimer import n_recordings
if event == enigma.iRecordableService.evEnd:
self.cleanIfIdle()
def destroy(self):
if self.session is not None:
self.session.nav.record_event.remove(self.gotRecordEvent)
self.session = None
def __del__(self):
self.destroy()
def cleanIfIdle(self):
# RecordTimer calls this when preparing a recording. That is a
# nice moment to clean up.
from RecordTimer import n_recordings
if n_recordings > 0:
print("[Trashcan] Recording(s) in progress:", n_recordings)
return
# If movielist_trashcan_days is 0 it means don't timeout anything -
# just use the "leave nGB settting"
#
if (config.usage.movielist_trashcan_days.value > 0):
ctimeLimit = time.time() - (config.usage.movielist_trashcan_days.value * 3600 * 24)
else:
ctimeLimit = 0
reserveBytes = 1024 * 1024 * 1024 * int(config.usage.movielist_trashcan_reserve.value)
clean(ctimeLimit, reserveBytes)
def clean(ctimeLimit, reserveBytes):
isCleaning = False
for job in Components.Task.job_manager.getPendingJobs():
jobname = str(job.name)
if jobname.startswith(_("Cleaning Trashes")):
isCleaning = True
break
if config.usage.movielist_trashcan.value and not isCleaning:
name = _("Cleaning Trashes")
job = Components.Task.Job(name)
task = CleanTrashTask(job, name)
task.openFiles(ctimeLimit, reserveBytes)
Components.Task.job_manager.AddJob(job)
elif isCleaning:
print("[Trashcan] Cleanup already running")
else:
print("[Trashcan] Disabled skipping check.")
def METHOD_NAME(path=None):
trash = getTrashFolder(path)
if not os.path.isdir(trash):
print("[Trashcan] No trash.", trash)
return 0
for root, dirs, files in os.walk(trash.encode(), topdown=False): # handle non utf-8 filenames
for name in files:
fn = os.path.join(root, name)
enigma.eBackgroundFileEraser.getInstance().erase(fn)
for name in dirs: # Remove empty directories if possible
try:
os.rmdir(os.path.join(root, name))
except:
pass
def init(session):
global instance
instance = Trashcan(session)
class CleanTrashTask(Components.Task.PythonTask):
def openFiles(self, ctimeLimit, reserveBytes):
self.ctimeLimit = ctimeLimit
self.reserveBytes = reserveBytes
def work(self):
# add the default movie path
trashcanLocations = set([os.path.join(config.usage.default_path.value)])
# add the root and the movie directory of each mount
print("[Trashcan] probing folders")
f = open("/proc/mounts", "r")
for line in f.readlines():
parts = line.strip().split()
if parts[1] == "/media/autofs":
continue
# skip network mounts unless the option to clean them is set
if (not config.usage.movielist_trashcan_network_clean.value and
(parts[1].startswith("/media/net") or parts[1].startswith("/media/autofs"))):
continue
# one trashcan in the root, one in movie subdirectory
trashcanLocations.add(parts[1])
trashcanLocations.add(os.path.join(parts[1], "movie"))
f.close()
for trashfolder in trashcanLocations:
trashfolder = os.path.join(trashfolder, ".Trash")
if os.path.isdir(trashfolder):
print("[Trashcan][work] looking in trashcan", trashfolder)
trashsize = get_size(trashfolder)
diskstat = os.statvfs(trashfolder)
free = diskstat.f_bfree * diskstat.f_bsize
bytesToRemove = self.reserveBytes - free
print("[Trashcan][work] " + str(trashfolder) + ": Size:", "{:,}".format(trashsize))
candidates = []
size = 0
for root, dirs, files in os.walk(trashfolder.encode(), topdown=False):
for name in files: # Don't delete any per-directory config files from .Trash
if (config.movielist.settings_per_directory.value and name == b".e2settings.pkl"):
continue
fn = os.path.join(root, name)
try: # file may not exist, if dual delete activities.
st = os.stat(fn)
except FileNotFoundError:
print("[Trashcan][CleanTrashTask[work] FileNotFoundError ", fn)
continue
if st.st_ctime < self.ctimeLimit or config.usage.movielist_trashcan_days.value == 0:
enigma.eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st.st_size
else:
candidates.append((st.st_ctime, fn, st.st_size))
size += st.st_size
for name in dirs: # Remove empty directories if possible
try:
os.rmdir(os.path.join(root, name))
except:
pass
candidates.sort()
# Now we have a list of ctime, candidates, size. Sorted by ctime (=deletion time)
for st_ctime, fn, st_size in candidates:
if bytesToRemove < 0:
break
try: # file may not exist if simultaneously a network trashcan and main box emptying trash
enigma.eBackgroundFileEraser.getInstance().erase(fn)
except:
pass
bytesToRemove -= st_size
size -= st_size
print("[Trashcan][work] " + str(trashfolder) + ": Size now:", "{:,}".format(size))
class TrashInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update=True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
if update and path != "/media/autofs/":
self.update(path)
def update(self, path):
try:
total_size = get_size(getTrashFolder(path))
except OSError:
return -1
if self.type == self.USED:
try:
if total_size < 10000000:
total_size = _("%d KB") % (total_size >> 10)
elif total_size < 10000000000:
total_size = _("%d MB") % (total_size >> 20)
else:
total_size = _("%d GB") % (total_size >> 30)
self.setText(_("Trashcan:") + " " + total_size)
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = enigma.eLabel
|
3,844 |
get ca certificate
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetCaCertificateResult',
'AwaitableGetCaCertificateResult',
'get_ca_certificate',
'get_ca_certificate_output',
]
@pulumi.output_type
class GetCaCertificateResult:
"""
The CA Certificate resource.
"""
def __init__(__self__, description=None, encoded_certificate=None, expiry_time_in_utc=None, id=None, issue_time_in_utc=None, name=None, provisioning_state=None, system_data=None, type=None):
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if encoded_certificate and not isinstance(encoded_certificate, str):
raise TypeError("Expected argument 'encoded_certificate' to be a str")
pulumi.set(__self__, "encoded_certificate", encoded_certificate)
if expiry_time_in_utc and not isinstance(expiry_time_in_utc, str):
raise TypeError("Expected argument 'expiry_time_in_utc' to be a str")
pulumi.set(__self__, "expiry_time_in_utc", expiry_time_in_utc)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if issue_time_in_utc and not isinstance(issue_time_in_utc, str):
raise TypeError("Expected argument 'issue_time_in_utc' to be a str")
pulumi.set(__self__, "issue_time_in_utc", issue_time_in_utc)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description for the CA Certificate resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="encodedCertificate")
def encoded_certificate(self) -> Optional[str]:
"""
Base64 encoded PEM (Privacy Enhanced Mail) format certificate data.
"""
return pulumi.get(self, "encoded_certificate")
@property
@pulumi.getter(name="expiryTimeInUtc")
def expiry_time_in_utc(self) -> str:
"""
Certificate expiry time in UTC. This is a read-only field.
"""
return pulumi.get(self, "expiry_time_in_utc")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified identifier of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="issueTimeInUtc")
def issue_time_in_utc(self) -> str:
"""
Certificate issue time in UTC. This is a read-only field.
"""
return pulumi.get(self, "issue_time_in_utc")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the CA Certificate resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to the CaCertificate resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetCaCertificateResult(GetCaCertificateResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCaCertificateResult(
description=self.description,
encoded_certificate=self.encoded_certificate,
expiry_time_in_utc=self.expiry_time_in_utc,
id=self.id,
issue_time_in_utc=self.issue_time_in_utc,
name=self.name,
provisioning_state=self.provisioning_state,
system_data=self.system_data,
type=self.type)
def METHOD_NAME(ca_certificate_name: Optional[str] = None,
namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCaCertificateResult:
"""
Get properties of a CA certificate.
:param str ca_certificate_name: Name of the CA certificate.
:param str namespace_name: Name of the namespace.
:param str resource_group_name: The name of the resource group within the user's subscription.
"""
__args__ = dict()
__args__['caCertificateName'] = ca_certificate_name
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:eventgrid/v20230601preview:getCaCertificate', __args__, opts=opts, typ=GetCaCertificateResult).value
return AwaitableGetCaCertificateResult(
description=pulumi.get(__ret__, 'description'),
encoded_certificate=pulumi.get(__ret__, 'encoded_certificate'),
expiry_time_in_utc=pulumi.get(__ret__, 'expiry_time_in_utc'),
id=pulumi.get(__ret__, 'id'),
issue_time_in_utc=pulumi.get(__ret__, 'issue_time_in_utc'),
name=pulumi.get(__ret__, 'name'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(METHOD_NAME)
def get_ca_certificate_output(ca_certificate_name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCaCertificateResult]:
"""
Get properties of a CA certificate.
:param str ca_certificate_name: Name of the CA certificate.
:param str namespace_name: Name of the namespace.
:param str resource_group_name: The name of the resource group within the user's subscription.
"""
...
|
3,845 |
iovs n payload names
|
#!/usr/bin/env python3
import argparse
import sys
import logging
import copy
import h5py
import numpy as np
from collections import OrderedDict
import zlib
import lzma
from CondCore.CondHDF5ESSource.hdf5Writer import writeH5File
#Global tags hold a list of Tags
# Tags give the
# record name,
# list of data products
# list of IOVs
# list of payloads per IOV
# Payloads give
# a payload name and
# the serialized data for a data product
# the type of data for the data product
#
class IOVSyncValue(object):
def __init__(self, high, low):
self.high = high
self.low = low
class H5Payload(object):
def __init__(self,dataset,name, compressor):
self._dataset = dataset
self._hash = name
self._type = dataset.attrs['type']
self._memsize = dataset.attrs['memsize']
self._compressor = compressor
def name(self):
return self._hash
def actualType(self):
return self._type
def memsize(self):
return self._memsize
def data(self):
ds = self._dataset[()]
if len(ds) == self.memsize():
return ds
#was compressed
return self._compressor.decompress(ds)
class H5DataProduct(object):
def __init__(self, group, name, compressor):
self._type = group.attrs['type']
self._name = name
self._payloadGroup = group['Payloads']
self._compressor = compressor
def name(self):
return self._name
def objtype(self):
return self._type
def payloads(self):
return [H5Payload(self._payloadGroup[p],p.split('/')[-1], self._compressor) for p in self._payloadGroup]
def idToPayloadNames(self):
return { self._payloadGroup[p].id:p.split('/')[-1] for p in self._payloadGroup }
class H5Tag(object):
def __init__(self, file, group, name):
self._file = file
compressor = None
compressorName = self._file.attrs['default_payload_compressor']
if compressorName == 'lzma':
compressor = lzma
if compressorName == 'zlib':
compressor = zlib
self._group = group
self._record = self._group.attrs['record']
self._name = name
recordGroup = file['Records'][self._record]
dataProductsGroup = recordGroup['DataProducts']
self._dataProducts = [H5DataProduct(dataProductsGroup[g],g.split('/')[-1], compressor) for g in dataProductsGroup]
self._dbtags = self._group.attrs['db_tags']
self._time_type = self._group.attrs['time_type']
def record(self):
return self._record
def name(self):
return self._name
def time_type(self):
return self._time_type
def originalTagNames(self):
return self._dbtags
def METHOD_NAME(self):
#asking an h5 object for its name is a slow operation
idToName = {self._file['null_payload'].id: None}
for d in self._dataProducts:
idToName.update(d.idToPayloadNames())
first = self._group['first'][()]
last = self._group['last'][()]
payloadRefs = self._group['payload']
return list(zip( (IOVSyncValue(x['high'],x['low']) for x in first),
(IOVSyncValue(x['high'],x['low']) for x in last),
([idToName[self._file[r].id] for r in refs] for refs in payloadRefs)) )
def dataProducts(self):
return self._dataProducts
class H5GlobalTag(object):
def __init__(self, filename, name):
self._file = h5py.File(filename,'r')
self._name = name
def tags(self):
#looking up names is slow so better to make cache
tagID2Name = {}
recordsGroup = self._file['Records']
for recordName in recordsGroup:
r = recordsGroup[recordName]
tagsGroup = r['Tags']
for tagName in tagsGroup:
tagID2Name[tagsGroup[tagName].id] = tagName
globalTagGroup = self._file['GlobalTags'][self._name]
return (H5Tag(self._file, self._file[t], tagID2Name[self._file[t].id]) for t in globalTagGroup['Tags'])
def main():
parser = argparse.ArgumentParser(description='Read from HDF5 file and write to HDF5 file')
parser.add_argument('input', help="Name of file to read")
parser.add_argument('name', nargs='+', help="Name of the global tag.")
parser.add_argument('--exclude', '-e', nargs='*', help = 'list of records to exclude from the file (can not be used with --include)')
parser.add_argument('--include', '-i', nargs='*', help = 'lost of the only records that should be included in the file (can not be used with --exclude')
parser.add_argument('--output', '-o', default='test.h5cond', help='name of hdf5 output file to write')
parser.add_argument('--compressor', '-c', default='zlib', choices=['zlib', 'lzma', 'none'], help="compress data using 'zlib', 'lzma' or 'none'")
args = parser.parse_args()
if args.exclude and args.include:
print("Can not use --exclude and --include at the same time")
exit(-1)
excludeRecords = set()
if args.exclude:
excludeRecords = set(args.exclude)
includeRecords = set()
if args.include:
includeRecords = set(args.include)
writeH5File(args.output, args.name, excludeRecords, includeRecords, lambda x: H5GlobalTag(args.input, x), args.compressor)
if __name__ == '__main__':
main()
|
3,846 |
test property decorator baseclass
|
# Test case for property
# more tests are in test_descr
import sys
import unittest
from test.test_support import run_unittest
class PropertyBase(Exception):
pass
class PropertyGet(PropertyBase):
pass
class PropertySet(PropertyBase):
pass
class PropertyDel(PropertyBase):
pass
class BaseClass(object):
def __init__(self):
self._spam = 5
@property
def spam(self):
"""BaseClass.getter"""
return self._spam
@spam.setter
def spam(self, value):
self._spam = value
@spam.deleter
def spam(self):
del self._spam
class SubClass(BaseClass):
@BaseClass.spam.getter
def spam(self):
"""SubClass.getter"""
raise PropertyGet(self._spam)
@spam.setter
def spam(self, value):
raise PropertySet(self._spam)
@spam.deleter
def spam(self):
raise PropertyDel(self._spam)
class PropertyDocBase(object):
_spam = 1
def _get_spam(self):
return self._spam
spam = property(_get_spam, doc="spam spam spam")
class PropertyDocSub(PropertyDocBase):
@PropertyDocBase.spam.getter
def spam(self):
"""The decorator does not use this doc string"""
return self._spam
class PropertySubNewGetter(BaseClass):
@BaseClass.spam.getter
def spam(self):
"""new docstring"""
return 5
class PropertyNewGetter(object):
@property
def spam(self):
"""original docstring"""
return 1
@spam.getter
def spam(self):
"""new docstring"""
return 8
class PropertyTests(unittest.TestCase):
def METHOD_NAME(self):
# see #1620
base = BaseClass()
self.assertEqual(base.spam, 5)
self.assertEqual(base._spam, 5)
base.spam = 10
self.assertEqual(base.spam, 10)
self.assertEqual(base._spam, 10)
delattr(base, "spam")
self.assertTrue(not hasattr(base, "spam"))
self.assertTrue(not hasattr(base, "_spam"))
base.spam = 20
self.assertEqual(base.spam, 20)
self.assertEqual(base._spam, 20)
def test_property_decorator_subclass(self):
# see #1620
sub = SubClass()
self.assertRaises(PropertyGet, getattr, sub, "spam")
self.assertRaises(PropertySet, setattr, sub, "spam", None)
self.assertRaises(PropertyDel, delattr, sub, "spam")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_property_decorator_subclass_doc(self):
sub = SubClass()
self.assertEqual(sub.__class__.spam.__doc__, "SubClass.getter")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_property_decorator_baseclass_doc(self):
base = BaseClass()
self.assertEqual(base.__class__.spam.__doc__, "BaseClass.getter")
def test_property_decorator_doc(self):
base = PropertyDocBase()
sub = PropertyDocSub()
self.assertEqual(base.__class__.spam.__doc__, "spam spam spam")
self.assertEqual(sub.__class__.spam.__doc__, "spam spam spam")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_property_getter_doc_override(self):
newgettersub = PropertySubNewGetter()
self.assertEqual(newgettersub.spam, 5)
self.assertEqual(newgettersub.__class__.spam.__doc__, "new docstring")
newgetter = PropertyNewGetter()
self.assertEqual(newgetter.spam, 8)
self.assertEqual(newgetter.__class__.spam.__doc__, "new docstring")
# Issue 5890: subclasses of property do not preserve method __doc__ strings
class PropertySub(property):
"""This is a subclass of property"""
class PropertySubSlots(property):
"""This is a subclass of property that defines __slots__"""
__slots__ = ()
class PropertySubclassTests(unittest.TestCase):
def test_slots_docstring_copy_exception(self):
try:
class Foo(object):
@PropertySubSlots
def spam(self):
"""Trying to copy this docstring will raise an exception"""
return 1
except AttributeError:
pass
else:
raise Exception("AttributeError not raised")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_docstring_copy(self):
class Foo(object):
@PropertySub
def spam(self):
"""spam wrapped in property subclass"""
return 1
self.assertEqual(
Foo.spam.__doc__,
"spam wrapped in property subclass")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_property_setter_copies_getter_docstring(self):
class Foo(object):
def __init__(self): self._spam = 1
@PropertySub
def spam(self):
"""spam wrapped in property subclass"""
return self._spam
@spam.setter
def spam(self, value):
"""this docstring is ignored"""
self._spam = value
foo = Foo()
self.assertEqual(foo.spam, 1)
foo.spam = 2
self.assertEqual(foo.spam, 2)
self.assertEqual(
Foo.spam.__doc__,
"spam wrapped in property subclass")
class FooSub(Foo):
@Foo.spam.setter
def spam(self, value):
"""another ignored docstring"""
self._spam = 'eggs'
foosub = FooSub()
self.assertEqual(foosub.spam, 1)
foosub.spam = 7
self.assertEqual(foosub.spam, 'eggs')
self.assertEqual(
FooSub.spam.__doc__,
"spam wrapped in property subclass")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_property_new_getter_new_docstring(self):
class Foo(object):
@PropertySub
def spam(self):
"""a docstring"""
return 1
@spam.getter
def spam(self):
"""a new docstring"""
return 2
self.assertEqual(Foo.spam.__doc__, "a new docstring")
class FooBase(object):
@PropertySub
def spam(self):
"""a docstring"""
return 1
class Foo2(FooBase):
@FooBase.spam.getter
def spam(self):
"""a new docstring"""
return 2
self.assertEqual(Foo.spam.__doc__, "a new docstring")
def test_main():
run_unittest(PropertyTests, PropertySubclassTests)
if __name__ == '__main__':
test_main()
|
3,847 |
test import vmtk scripts
|
## Program: VMTK
## Language: Python
## Date: January 10, 2018
## Version: 1.4
## Copyright (c) Richard Izzo, Luca Antiga, All rights reserved.
## See LICENSE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
## Note: this code was contributed by
## Richard Izzo (Github @rlizzo)
## University at Buffalo
import pytest
import unittest
import os
@pytest.fixture(scope='function')
def vmtk_scripts():
allscripts = [
'vmtk.vmtkactivetubes',
'vmtk.vmtkbifurcationprofiles',
'vmtk.vmtkbifurcationreferencesystems',
'vmtk.vmtkbifurcationsections',
'vmtk.vmtkbifurcationvectors',
'vmtk.vmtkboundarylayer',
'vmtk.vmtkboundaryreferencesystems',
'vmtk.vmtkbranchclipper',
'vmtk.vmtkbranchextractor',
'vmtk.vmtkbranchgeometry',
'vmtk.vmtkbranchmapping',
'vmtk.vmtkbranchmetrics',
'vmtk.vmtkbranchpatching',
'vmtk.vmtkbranchsections',
'vmtk.vmtkcenterlineattributes',
'vmtk.vmtkcenterlinegeometry',
'vmtk.vmtkcenterlineinterpolation',
'vmtk.vmtkcenterlinelabeler',
'vmtk.vmtkcenterlinemerge',
'vmtk.vmtkcenterlinemodeller',
'vmtk.vmtkcenterlineoffsetattributes',
'vmtk.vmtkcenterlineresampling',
'vmtk.vmtkcenterlines',
'vmtk.vmtkcenterlinesnetwork',
'vmtk.vmtkcenterlinestonumpy',
'vmtk.vmtkcenterlinesections',
'vmtk.vmtkcenterlinesmoothing',
'vmtk.vmtkcenterlineviewer',
'vmtk.vmtkdelaunayvoronoi',
'vmtk.vmtkdistancetocenterlines',
'vmtk.vmtkendpointextractor',
'vmtk.vmtkflowextensions',
'vmtk.vmtkicpregistration',
'vmtk.vmtkimagebinarize',
'vmtk.vmtkimagecast',
'vmtk.vmtkimagecompose',
'vmtk.vmtkimagecurvedmpr',
'vmtk.vmtkimagefeaturecorrection',
'vmtk.vmtkimagefeatures',
'vmtk.vmtkimageinitialization',
'vmtk.vmtkimagemipviewer',
'vmtk.vmtkimagemorphology',
'vmtk.vmtkimagenormalize',
'vmtk.vmtkimageobjectenhancement',
'vmtk.vmtkimageotsuthresholds',
'vmtk.vmtkimagereader',
'vmtk.vmtkimagereslice',
'vmtk.vmtkimageseeder',
'vmtk.vmtkimageshiftscale',
'vmtk.vmtkimagesmoothing',
'vmtk.vmtkimagetonumpy',
'vmtk.vmtkimageviewer',
'vmtk.vmtkimagevesselenhancement',
'vmtk.vmtkimagevoipainter',
'vmtk.vmtkimagevoiselector',
'vmtk.vmtkimagewriter',
'vmtk.vmtklevelsetsegmentation',
'vmtk.vmtklineartoquadratic',
'vmtk.vmtklineresampling',
'vmtk.vmtklocalgeometry',
'vmtk.vmtkmarchingcubes',
'vmtk.vmtkmesharrayoperation',
'vmtk.vmtkmeshboundaryinspector',
'vmtk.vmtkmeshbranchclipper',
'vmtk.vmtkmeshclipper',
'vmtk.vmtkmeshconnectivity',
'vmtk.vmtkmeshcutter',
'vmtk.vmtkmeshdatareader',
'vmtk.vmtkmeshextractpointdata',
'vmtk.vmtkmeshlambda2',
'vmtk.vmtkmeshlinearize',
'vmtk.vmtkmeshgenerator',
'vmtk.vmtkmeshmergetimesteps',
'vmtk.vmtkmeshpolyballevaluation',
'vmtk.vmtkmeshprojection',
'vmtk.vmtkmeshreader',
'vmtk.vmtkmeshscaling',
'vmtk.vmtkmeshtetrahedralize',
'vmtk.vmtkmeshtosurface',
'vmtk.vmtkmeshtransform',
'vmtk.vmtkmeshtransformtoras',
'vmtk.vmtkmeshvectorfromcomponents',
'vmtk.vmtkmeshviewer',
'vmtk.vmtkmeshvolume',
'vmtk.vmtkmeshvorticityhelicity',
'vmtk.vmtkmeshwallshearrate',
'vmtk.vmtkmeshwriter',
'vmtk.vmtknetworkeditor',
'vmtk.vmtknetworkextraction',
'vmtk.vmtknetworkwriter',
'vmtk.vmtknumpyreader',
'vmtk.vmtknumpytocenterlines',
'vmtk.vmtknumpytoimage',
'vmtk.vmtknumpytosurface',
'vmtk.vmtknumpywriter',
'vmtk.vmtkparticletracer',
'vmtk.vmtkpathlineanimator',
'vmtk.vmtkpointsplitextractor',
'vmtk.vmtkpointtransform',
'vmtk.vmtkpolyballmodeller',
'vmtk.vmtkpotentialfit',
'vmtk.vmtkpythonscript',
'vmtk.vmtkrenderer',
'vmtk.vmtkrendertoimage',
'vmtk.vmtkrbfinterpolation',
'vmtk.vmtksurfaceappend',
'vmtk.vmtksurfacearraysmoothing',
'vmtk.vmtksurfacearrayoperation',
'vmtk.vmtksurfacebooleanoperation',
'vmtk.vmtksurfacecapper',
'vmtk.vmtksurfacecelldatatopointdata',
'vmtk.vmtksurfacecenterlineprojection',
'vmtk.vmtksurfaceclipper',
'vmtk.vmtksurfacecliploop',
'vmtk.vmtksurfaceconnectivity',
'vmtk.vmtksurfacecurvature',
'vmtk.vmtksurfacedecimation',
'vmtk.vmtksurfacedistance',
'vmtk.vmtksurfaceendclipper',
'vmtk.vmtksurfacekiteremoval',
'vmtk.vmtksurfaceloopextraction',
'vmtk.vmtksurfacemassproperties',
'vmtk.vmtksurfacemodeller',
'vmtk.vmtksurfacenormals',
'vmtk.vmtksurfacepointdatatocelldata',
'vmtk.vmtksurfacepolyballevaluation',
'vmtk.vmtksurfaceprojection',
'vmtk.vmtksurfacereader',
'vmtk.vmtksurfacereferencesystemtransform',
'vmtk.vmtksurfaceregiondrawing',
'vmtk.vmtksurfaceremeshing',
'vmtk.vmtksurfacescaling',
'vmtk.vmtksurfacesmoothing',
'vmtk.vmtksurfacesubdivision',
'vmtk.vmtksurfacetonumpy',
'vmtk.vmtksurfacetransform',
'vmtk.vmtksurfacetransforminteractive',
'vmtk.vmtksurfacetransformtoras',
'vmtk.vmtksurfacetriangle',
'vmtk.vmtksurfacetomesh',
'vmtk.vmtksurfaceviewer',
'vmtk.vmtksurfacewriter',
'vmtk.vmtksurfmesh',
'vmtk.vmtktetgen',
'vmtk.vmtktetringenerator',
'vmtk.vmtkboundarylayer2',
'vmtk.vmtkcenterlinestonumpy',
'vmtk.vmtkdijkstradistancetopoints',
'vmtk.vmtkdistancetospheres',
'vmtk.vmtkentityrenumber',
'vmtk.vmtkgeodesicsurfaceresolution',
'vmtk.vmtkimagetonumpy',
'vmtk.vmtkmeshaddexternallayer',
'vmtk.vmtkmeshclipcenterlines',
'vmtk.vmtkmeshmerge',
'vmtk.vmtkmeshtetrahedralize2',
'vmtk.vmtkmeshviewer2',
'vmtk.vmtkmeshwriter2',
'vmtk.vmtknumpyreader',
'vmtk.vmtknumpytocenterlines',
'vmtk.vmtknumpytoimage',
'vmtk.vmtknumpytosurface',
'vmtk.vmtknumpywriter',
'vmtk.vmtksurfaceextractinnercylinder',
'vmtk.vmtksurfaceresolution',
'vmtk.vmtksurfacetonumpy',
'vmtk.vmtksurfacewriter2',
'vmtk.vmtkthreshold' ]
return allscripts
def METHOD_NAME(vmtk_scripts):
import importlib
print(vmtk_scripts)
for name in vmtk_scripts:
assert importlib.import_module(name)
|
3,848 |
test has enough time for trial avg
|
import pytest
from autogluon.common import space
from autogluon.core.scheduler.seq_scheduler import LocalSequentialScheduler
cls = LocalSequentialScheduler
def test_get_average_trial_time_():
running_time = cls.get_average_trial_time_(0, avg_trial_run_time=None, trial_start_time=100, time_end=102)
assert running_time == 2
running_time = cls.get_average_trial_time_(1, avg_trial_run_time=running_time, trial_start_time=110, time_end=114)
assert running_time == 3.0
running_time = cls.get_average_trial_time_(2, avg_trial_run_time=running_time, trial_start_time=120, time_end=126)
assert running_time == 4.0
def test_has_enough_time_for_trial__enough_time__no_avg_time():
# Enough time - no average time
assert cls.has_enough_time_for_trial_(time_out=10, time_start=100, trial_start_time=105, trial_end_time=106, avg_trial_run_time=None)
def test_has_enough_time_for_trial__enough_time__avg_time_allows_trials():
# Enough time - average time allows more trial
assert cls.has_enough_time_for_trial_(time_out=10, time_start=100, trial_start_time=105, trial_end_time=106, avg_trial_run_time=1)
def test_has_enough_time_for_trial__enough_time__avg_time_not_allows_trials():
# Enough time - average time does not allow more trial
assert not cls.has_enough_time_for_trial_(time_out=10, time_start=100, trial_start_time=105, trial_end_time=106, avg_trial_run_time=5)
def test_has_enough_time_for_trial__time_exceeded_no_avg_time():
# Time exceeded - no average time
assert not cls.has_enough_time_for_trial_(time_out=10, time_start=100, trial_start_time=105, trial_end_time=116, avg_trial_run_time=None)
def METHOD_NAME():
# Time exceeded - no average time
assert not cls.has_enough_time_for_trial_(time_out=10, time_start=100, trial_start_time=105, trial_end_time=116, avg_trial_run_time=0)
def test_has_enough_time_for_trial__enough_time__avg_time_not_allows_trials_by_fill_factor():
# Enough time - average time does not allow more trial
assert not cls.has_enough_time_for_trial_(time_out=10, time_start=100, trial_start_time=105, trial_end_time=106, avg_trial_run_time=1, fill_factor=5)
def test_LocalSequentialScheduler_no_criteria():
search_space = {"lr": space.Real(1e-2, 1e-1, log=True)}
def _train_fn_():
pass
with pytest.raises(AssertionError, match="Need stopping criterion: Either num_trials or time_out"):
LocalSequentialScheduler(train_fn=_train_fn_, search_space=search_space, reward_attr="reward_attr", resource={})
def test_search_space():
search_space = dict(
a=space.Real(1e-3, 1e-2, log=True),
b=space.Real(1e-3, 1e-2),
c=space.Int(1, 10),
d=space.Categorical("a", "b", "c", "d"),
e=space.Bool(),
)
def train_fn(args, reporter):
a, b, c, d, e = args["a"], args["b"], args["c"], args["d"], args["e"]
assert a <= 1e-2 and a >= 1e-3
assert b <= 1e-2 and b >= 1e-3
assert c <= 10 and c >= 1
assert d in ["a", "b", "c", "d"]
assert e in [True, False]
reporter(epoch=1, accuracy=0)
scheduler = LocalSequentialScheduler(
train_fn,
search_space=search_space,
resource={"num_cpus": "all", "num_gpus": 0},
num_trials=10,
reward_attr="accuracy",
time_attr="epoch",
checkpoint=None,
)
scheduler.run()
def test_scheduler_can_handle_failing_jobs():
trails_outcomes = []
best_result = [-1]
search_space = dict(a=space.Real(0, 1))
def train_fn(args, reporter):
test_should_fail = args["a"] > 0.7
trails_outcomes.append(test_should_fail)
if test_should_fail:
raise Exception("Failed Trial")
elif args["a"] > best_result[0]:
best_result[0] = args["a"]
reporter(epoch=1, accuracy=args["a"])
scheduler = LocalSequentialScheduler(
train_fn,
search_space=search_space,
resource={"num_cpus": "all", "num_gpus": 0},
num_trials=10,
reward_attr="accuracy",
time_attr="epoch",
checkpoint=None,
)
scheduler.run()
actual_runs = []
for trial in scheduler.training_history.values():
is_failed = False
for i in trial:
if "traceback" in i:
is_failed = True
break
actual_runs.append(is_failed)
assert trails_outcomes == actual_runs
assert scheduler.get_best_reward() == best_result[0]
assert scheduler.get_best_config() == {"a": best_result[0]}
|
3,849 |
client exited server
|
# Copyright 2017-2020 Palantir Technologies, Inc.
# Copyright 2021- Python Language Server Contributors.
import os
import time
import multiprocessing
import sys
from threading import Thread
from flaky import flaky
from pylsp_jsonrpc.exceptions import JsonRpcMethodNotFound
import pytest
from pylsp.python_lsp import start_io_lang_server, PythonLSPServer
CALL_TIMEOUT = 10
RUNNING_IN_CI = bool(os.environ.get('CI'))
def start_client(client):
client.start()
class _ClientServer:
""" A class to setup a client/server pair """
def __init__(self, check_parent_process=False):
# Client to Server pipe
csr, csw = os.pipe()
# Server to client pipe
scr, scw = os.pipe()
if os.name == 'nt':
ParallelKind = Thread
else:
if sys.version_info[:2] >= (3, 8):
ParallelKind = multiprocessing.get_context("fork").Process
else:
ParallelKind = multiprocessing.Process
self.process = ParallelKind(target=start_io_lang_server, args=(
os.fdopen(csr, 'rb'), os.fdopen(scw, 'wb'), check_parent_process, PythonLSPServer
))
self.process.start()
self.client = PythonLSPServer(os.fdopen(scr, 'rb'), os.fdopen(csw, 'wb'), start_io_lang_server)
self.client_thread = Thread(target=start_client, args=[self.client])
self.client_thread.daemon = True
self.client_thread.start()
@pytest.fixture
def client_server():
""" A fixture that sets up a client/server pair and shuts down the server
This client/server pair does not support checking parent process aliveness
"""
client_server_pair = _ClientServer()
yield client_server_pair.client
shutdown_response = client_server_pair.client._endpoint.request('shutdown').result(timeout=CALL_TIMEOUT)
assert shutdown_response is None
client_server_pair.client._endpoint.notify('exit')
@pytest.fixture
def METHOD_NAME():
""" A fixture that sets up a client/server pair that support checking parent process aliveness
and assert the server has already exited
"""
client_server_pair = _ClientServer(True)
# yield client_server_pair.client
yield client_server_pair
assert client_server_pair.process.is_alive() is False
@flaky(max_runs=10, min_passes=1)
@pytest.mark.skipif(sys.platform == 'darwin', reason='Too flaky on Mac')
def test_initialize(client_server): # pylint: disable=redefined-outer-name
response = client_server._endpoint.request('initialize', {
'rootPath': os.path.dirname(__file__),
'initializationOptions': {}
}).result(timeout=CALL_TIMEOUT)
assert 'capabilities' in response
@flaky(max_runs=10, min_passes=1)
@pytest.mark.skipif(not sys.platform.startswith('Linux'), reason='Skipped on win and flaky on mac')
def test_exit_with_parent_process_died(METHOD_NAME): # pylint: disable=redefined-outer-name
# language server should have already exited before responding
lsp_server, mock_process = METHOD_NAME.client, METHOD_NAME.process
# with pytest.raises(Exception):
lsp_server._endpoint.request('initialize', {
'processId': mock_process.pid,
'rootPath': os.path.dirname(__file__),
'initializationOptions': {}
}).result(timeout=CALL_TIMEOUT)
mock_process.terminate()
time.sleep(CALL_TIMEOUT)
assert not METHOD_NAME.client_thread.is_alive()
@flaky(max_runs=10, min_passes=1)
@pytest.mark.skipif(sys.platform.startswith('linux'), reason='Fails on linux')
def test_not_exit_without_check_parent_process_flag(client_server): # pylint: disable=redefined-outer-name
response = client_server._endpoint.request('initialize', {
'processId': 1234,
'rootPath': os.path.dirname(__file__),
'initializationOptions': {}
}).result(timeout=CALL_TIMEOUT)
assert 'capabilities' in response
@flaky(max_runs=10, min_passes=1)
@pytest.mark.skipif(RUNNING_IN_CI, reason='This test is hanging on CI')
def test_missing_message(client_server): # pylint: disable=redefined-outer-name
with pytest.raises(JsonRpcMethodNotFound):
client_server._endpoint.request('unknown_method').result(timeout=CALL_TIMEOUT)
|
3,850 |
to dict
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.28
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1PriorityClassList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1PriorityClass]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""V1PriorityClassList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1PriorityClassList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1PriorityClassList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1PriorityClassList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1PriorityClassList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1PriorityClassList. # noqa: E501
items is the list of PriorityClasses # noqa: E501
:return: The items of this V1PriorityClassList. # noqa: E501
:rtype: list[V1PriorityClass]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1PriorityClassList.
items is the list of PriorityClasses # noqa: E501
:param items: The items of this V1PriorityClassList. # noqa: E501
:type: list[V1PriorityClass]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1PriorityClassList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1PriorityClassList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1PriorityClassList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1PriorityClassList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1PriorityClassList. # noqa: E501
:return: The metadata of this V1PriorityClassList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1PriorityClassList.
:param metadata: The metadata of this V1PriorityClassList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def METHOD_NAME(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.METHOD_NAME() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.METHOD_NAME()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].METHOD_NAME())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.METHOD_NAME())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1PriorityClassList):
return False
return self.METHOD_NAME() == other.METHOD_NAME()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1PriorityClassList):
return True
return self.METHOD_NAME() != other.METHOD_NAME()
|
3,851 |
find by
|
# Copyright © 2022 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds data for request tracker."""
from __future__ import annotations
from enum import auto
from typing import List
from legal_api.utils.base import BaseEnum
from legal_api.utils.datetime import datetime
from legal_api.utils.legislation_datetime import LegislationDatetime
from .db import db
class RequestTracker(db.Model): # pylint: disable=too-many-instance-attributes
"""This class manages the request tracker."""
class ServiceName(BaseEnum):
"""Render an Enum of the service name."""
BN_HUB = auto()
class RequestType(BaseEnum):
"""Render an Enum of the service name."""
INFORM_CRA = auto()
GET_BN = auto()
CHANGE_DELIVERY_ADDRESS = auto()
CHANGE_MAILING_ADDRESS = auto()
CHANGE_NAME = auto()
CHANGE_STATUS = auto()
CHANGE_PARTY = auto()
__tablename__ = 'request_tracker'
id = db.Column(db.Integer, primary_key=True)
request_type = db.Column('request_type', db.Enum(RequestType), nullable=False)
is_processed = db.Column('is_processed', db.Boolean, default=False)
request_object = db.Column(db.Text)
response_object = db.Column(db.Text)
retry_number = db.Column('retry_number', db.Integer, default=0, nullable=False)
service_name = db.Column('service_name', db.Enum(ServiceName), nullable=False)
creation_date = db.Column('creation_date', db.DateTime(timezone=True), default=datetime.utcnow)
last_modified = db.Column('last_modified', db.DateTime(timezone=True), default=datetime.utcnow)
is_admin = db.Column('is_admin', db.Boolean, default=False)
message_id = db.Column('message_id', db.String(60))
# parent keys
business_id = db.Column('business_id', db.Integer, db.ForeignKey('businesses.id'), index=True)
filing_id = db.Column('filing_id', db.Integer, db.ForeignKey('filings.id'), index=True)
@property
def json(self) -> dict:
"""Return the request tracker as a json object."""
return {
'id': self.id,
'requestType': self.request_type.name,
'isProcessed': self.is_processed,
'serviceName': self.service_name.name,
'isAdmin': self.is_admin,
'creationDate': LegislationDatetime.as_legislation_timezone(self.creation_date).isoformat()
}
def save(self):
"""Save the object to the database immediately."""
db.session.add(self)
db.session.commit()
@classmethod
def find_by_id(cls, request_tracker_id: int) -> RequestTracker:
"""Return the request tracker matching the id."""
request_tracker = None
if request_tracker_id:
request_tracker = cls.query.filter_by(id=request_tracker_id).one_or_none()
return request_tracker
@classmethod
def METHOD_NAME(cls, # pylint: disable=too-many-arguments
business_id: int,
service_name: ServiceName,
request_type: RequestType = None,
filing_id: int = None,
is_admin: bool = None,
message_id: str = None) -> List[RequestTracker]:
"""Return the request tracker matching."""
query = db.session.query(RequestTracker). \
filter(RequestTracker.business_id == business_id). \
filter(RequestTracker.service_name == service_name)
if request_type:
query = query.filter(RequestTracker.request_type == request_type)
if filing_id:
query = query.filter(RequestTracker.filing_id == filing_id)
if is_admin:
query = query.filter(RequestTracker.is_admin == is_admin)
if message_id:
query = query.filter(RequestTracker.message_id == message_id)
request_trackers = query.order_by(RequestTracker.id).all()
return request_trackers
|
3,852 |
test autodetects
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import pretend
import pytest
from warehouse import db
from warehouse.cli import shell
class TestAutoDetection:
def test_bpython(self, monkeypatch):
monkeypatch.setitem(sys.modules, "bpython", pretend.stub())
assert shell.autodetect() == "bpython"
def test_bpython_over_ipython(self, monkeypatch):
monkeypatch.setitem(sys.modules, "bpython", pretend.stub())
monkeypatch.setitem(sys.modules, "IPython", pretend.stub())
assert shell.autodetect() == "bpython"
def test_ipython(self, monkeypatch):
monkeypatch.setitem(sys.modules, "IPython", pretend.stub())
assert shell.autodetect() == "ipython"
def test_plain(self, monkeypatch):
monkeypatch.setitem(sys.modules, "plain", pretend.stub())
assert shell.autodetect() == "plain"
class TestShells:
def test_bpython(self, monkeypatch):
bpython_mod = pretend.stub(embed=pretend.call_recorder(lambda a: None))
monkeypatch.setitem(sys.modules, "bpython", bpython_mod)
shell.bpython(one="two")
assert bpython_mod.embed.calls == [pretend.call({"one": "two"})]
def test_ipython(self, monkeypatch):
ipython_mod = pretend.stub(
start_ipython=pretend.call_recorder(lambda argv, user_ns: None)
)
monkeypatch.setitem(sys.modules, "IPython", ipython_mod)
shell.ipython(two="one")
assert ipython_mod.start_ipython.calls == [
pretend.call(argv=[], user_ns={"two": "one"})
]
def test_plain(self, monkeypatch):
code_mod = pretend.stub(interact=pretend.call_recorder(lambda local: None))
monkeypatch.setitem(sys.modules, "code", code_mod)
shell.plain(three="four")
assert code_mod.interact.calls == [pretend.call(local={"three": "four"})]
class TestCLIShell:
def METHOD_NAME(self, monkeypatch, cli):
autodetect = pretend.call_recorder(lambda: "plain")
monkeypatch.setattr(shell, "autodetect", autodetect)
session = pretend.stub()
session_cls = pretend.call_recorder(lambda bind: session)
monkeypatch.setattr(db, "Session", session_cls)
plain = pretend.call_recorder(lambda **kw: None)
monkeypatch.setattr(shell, "plain", plain)
engine = pretend.stub()
config = pretend.stub(registry={"sqlalchemy.engine": engine})
result = cli.invoke(shell.shell, obj=config)
assert result.exit_code == 0
assert autodetect.calls == [pretend.call()]
assert session_cls.calls == [pretend.call(bind=engine)]
assert plain.calls == [pretend.call(config=config, db=session)]
@pytest.mark.parametrize("type_", ["bpython", "ipython", "plain"])
def test_specify_type(self, monkeypatch, cli, type_):
autodetect = pretend.call_recorder(lambda: "plain")
monkeypatch.setattr(shell, "autodetect", autodetect)
session = pretend.stub()
session_cls = pretend.call_recorder(lambda bind: session)
monkeypatch.setattr(db, "Session", session_cls)
runner = pretend.call_recorder(lambda **kw: None)
monkeypatch.setattr(shell, type_, runner)
engine = pretend.stub()
config = pretend.stub(registry={"sqlalchemy.engine": engine})
result = cli.invoke(shell.shell, ["--type", type_], obj=config)
assert result.exit_code == 0
assert autodetect.calls == []
assert session_cls.calls == [pretend.call(bind=engine)]
assert runner.calls == [pretend.call(config=config, db=session)]
@pytest.mark.parametrize("type_", ["bpython", "ipython", "plain"])
def test_unavailable_shell(self, monkeypatch, cli, type_):
autodetect = pretend.call_recorder(lambda: "plain")
monkeypatch.setattr(shell, "autodetect", autodetect)
session = pretend.stub()
session_cls = pretend.call_recorder(lambda bind: session)
monkeypatch.setattr(db, "Session", session_cls)
@pretend.call_recorder
def runner(**kw):
raise ImportError
monkeypatch.setattr(shell, type_, runner)
engine = pretend.stub()
config = pretend.stub(registry={"sqlalchemy.engine": engine})
result = cli.invoke(shell.shell, ["--type", type_], obj=config)
assert result.exit_code == 1
assert autodetect.calls == []
assert session_cls.calls == [pretend.call(bind=engine)]
assert runner.calls == [pretend.call(config=config, db=session)]
|
3,853 |
output vcd
|
import re
import simplejson as sjson
from pathlib import Path
from fud.stages import Stage, SourceType, Source
from fud.utils import shell, TmpDir, log
from fud.stages.verilator.json_to_dat import convert2dat, convert2json
from fud.stages import futil
import fud.errors as errors
class IcarusBaseStage(Stage):
"""
Stage to run Verilog programs with Icarus Verilog
"""
name = "icarus-verilog"
def __init__(self, is_vcd, desc):
super().__init__(
src_state="icarus-verilog",
target_state="vcd" if is_vcd else "dat",
input_type=SourceType.Path,
output_type=SourceType.Stream,
description=desc,
)
self.is_vcd = is_vcd
self.object_name = "main.vvp"
@staticmethod
def pre_install():
pass
@staticmethod
def defaults():
parent = Path(__file__).parent.resolve()
test_bench = parent / "./tb.sv"
return {
"exec": "iverilog",
"testbench": str(test_bench.resolve()),
"round_float_to_fixed": True,
}
def known_opts(self):
return ["exec", "testbench", "round_float_to_fixed"]
def _define_steps(self, input_data, builder, config):
testbench = config["stages", self.name, "testbench"]
cmd = config["stages", self.name, "exec"]
# Step 1: Make a new temporary directory
@builder.step()
def mktmp() -> SourceType.Directory:
"""
Make temporary directory to store Verilator build files.
"""
return TmpDir()
# Step 2a: Dynamically retrieve the value of stages.verilog.data
@builder.step(
description="Dynamically retrieve the value of stages.verilog.data"
)
def get_verilog_data() -> SourceType.Path:
data_path = config.get(["stages", "verilog", "data"])
path = Path(data_path) if data_path else None
return Source(path, SourceType.Path)
# Step 2b: check if we need verilog.data to be passes
@builder.step()
def check_verilog_for_mem_read(
verilog_src: SourceType.String, data_path: SourceType.Path
):
"""
Read input verilog to see if `icarus-verilog.data` needs to be set.
"""
# If verilog.data exists, do nothing
if not data_path.data and "readmemh" in verilog_src:
raise errors.MissingDynamicConfiguration("verilog.data")
# Step 2: Transform data from JSON to Dat.
@builder.step()
def json_to_dat(tmp_dir: SourceType.Directory, json_path: SourceType.Path):
"""
Converts a `json` data format into a series of `.dat` files.
"""
round_float_to_fixed = config["stages", self.name, "round_float_to_fixed"]
# if verilog.data was not given, do nothing
if json_path.data:
convert2dat(
tmp_dir.name,
sjson.load(open(json_path.data), use_decimal=True),
"dat",
round_float_to_fixed,
)
# Step 3: compile with verilator
cmd = " ".join(
[
cmd,
"-g2012",
"-o",
"{exec_path}",
testbench,
"{input_path}",
]
)
@builder.step(description=cmd)
def compile_with_iverilog(
input_path: SourceType.Path, tmpdir: SourceType.Directory
) -> SourceType.Stream:
return shell(
cmd.format(
input_path=str(input_path),
exec_path=f"{tmpdir.name}/{self.object_name}",
),
stdout_as_debug=True,
)
# Step 4: simulate
@builder.step()
def simulate(tmpdir: SourceType.Directory) -> SourceType.Stream:
"""
Simulates compiled icarus verilog program.
"""
cycle_limit = config["stages", "verilog", "cycle_limit"]
return shell(
[
f"{tmpdir.name}/{self.object_name}",
f"+DATA={tmpdir.name}",
f"+CYCLE_LIMIT={str(cycle_limit)}",
f"+OUT={tmpdir.name}/output.vcd",
f"+NOTRACE={0 if self.is_vcd else 1}",
]
)
# Step 5(self.vcd == True): extract
@builder.step()
def METHOD_NAME(tmpdir: SourceType.Directory) -> SourceType.Stream:
"""
Return the generated `output.vcd`.
"""
# return stream instead of path because tmpdir gets deleted
# before the next stage runs
return (Path(tmpdir.name) / "output.vcd").open("rb")
# Step 5(self.vcd == False): extract cycles + data
@builder.step()
def output_json(
simulated_output: SourceType.String, tmpdir: SourceType.Directory
) -> SourceType.Stream:
"""
Convert .dat files back into a json file
"""
found = re.search(r"reached limit of\s+(\d+) cycles", simulated_output)
if found is not None:
raise errors.CycleLimitedReached("verilog", found.group(1))
r = re.search(r"Simulated\s+((-)?\d+) cycles", simulated_output)
cycle_count = int(r.group(1)) if r is not None else 0
if cycle_count < 0:
log.warn("Cycle count is less than 0")
data = {
"cycles": cycle_count,
"memories": convert2json(tmpdir.name, "out"),
}
# Write to a file so we can return a stream.
out = Path(tmpdir.name) / "output.json"
with out.open("w") as f:
sjson.dump(data, f, indent=2, sort_keys=True, use_decimal=True)
return out.open("rb")
@builder.step()
def cleanup(tmpdir: SourceType.Directory):
"""
Cleanup build files
"""
tmpdir.remove()
# Schedule
tmpdir = mktmp()
data_path = get_verilog_data()
# data_path_exists: bool = (
# config.get(["stages", "verilog", "data"]) or
# config.get(["stages", "mrxl", "data"])
# )
# if we need to, convert dynamically sourced json to dat
check_verilog_for_mem_read(input_data, data_path)
# otherwise, convert
json_to_dat(tmpdir, data_path)
compile_with_iverilog(input_data, tmpdir)
stdout = simulate(tmpdir)
result = None
if self.is_vcd:
result = METHOD_NAME(tmpdir)
else:
result = output_json(stdout, tmpdir)
cleanup(tmpdir)
return result
class FutilToIcarus(futil.CalyxStage):
"""
Stage to transform Calyx into icarus-verilog simulatable Verilog
"""
# No name since CalyxStage already defines names
@staticmethod
def pre_install():
pass
def __init__(self):
super().__init__(
"icarus-verilog",
"-b verilog --disable-verify",
"Compile Calyx to Verilog instrumented for simulation",
)
class IcarusToVCDStage(IcarusBaseStage):
"""
Stage to generate VCD files by simulating through Icarus
"""
def __init__(self):
super().__init__(True, "Runs Verilog programs with Icarus and generates VCD")
class IcarusToJsonStage(IcarusBaseStage):
"""
Stage to generate VCD files by simulating through Icarus
"""
def __init__(self):
super().__init__(
False,
"Runs Verilog programs with Icarus and generates JSON memory file",
)
# Export the defined stages to fud
__STAGES__ = [FutilToIcarus, IcarusToVCDStage, IcarusToJsonStage]
|
3,854 |
from wallet ws
|
import asyncio
import uvloop
uvloop.install() # noqa
import pytest
import pytest_asyncio
from fastapi.testclient import TestClient
from httpx import AsyncClient
from lnbits.app import create_app
from lnbits.core.crud import create_account, create_wallet
from lnbits.core.models import CreateInvoice
from lnbits.core.services import update_wallet_balance
from lnbits.core.views.api import api_payments_create_invoice
from lnbits.db import Database
from lnbits.settings import settings
from tests.helpers import get_hold_invoice, get_random_invoice_data, get_real_invoice
# dont install extensions for tests
settings.lnbits_extensions_default_install = []
@pytest_asyncio.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
loop.close()
# use session scope to run once before and once after all tests
@pytest_asyncio.fixture(scope="session")
async def app():
app = create_app()
await app.router.startup()
yield app
await app.router.shutdown()
@pytest_asyncio.fixture(scope="session")
async def client(app):
client = AsyncClient(app=app, base_url=f"http://{settings.host}:{settings.port}")
yield client
await client.aclose()
@pytest.fixture(scope="session")
def test_client(app):
return TestClient(app)
@pytest_asyncio.fixture(scope="session")
async def db():
yield Database("database")
@pytest_asyncio.fixture(scope="session")
async def from_user():
user = await create_account()
yield user
@pytest_asyncio.fixture(scope="session")
async def from_wallet(from_user):
user = from_user
wallet = await create_wallet(user_id=user.id, wallet_name="test_wallet_from")
await update_wallet_balance(
wallet_id=wallet.id,
amount=999999999,
)
yield wallet
@pytest_asyncio.fixture
async def METHOD_NAME(from_wallet, test_client):
# wait a bit in order to avoid receiving topup notification
await asyncio.sleep(0.1)
with test_client.websocket_connect(f"/api/v1/ws/{from_wallet.id}") as ws:
yield ws
@pytest_asyncio.fixture(scope="session")
async def to_user():
user = await create_account()
yield user
@pytest_asyncio.fixture(scope="session")
async def to_wallet(to_user):
user = to_user
wallet = await create_wallet(user_id=user.id, wallet_name="test_wallet_to")
await update_wallet_balance(
wallet_id=wallet.id,
amount=999999999,
)
yield wallet
@pytest_asyncio.fixture
async def to_wallet_ws(to_wallet, test_client):
# wait a bit in order to avoid receiving topup notification
await asyncio.sleep(0.1)
with test_client.websocket_connect(f"/api/v1/ws/{to_wallet.id}") as ws:
yield ws
@pytest_asyncio.fixture(scope="session")
async def inkey_headers_from(from_wallet):
wallet = from_wallet
yield {
"X-Api-Key": wallet.inkey,
"Content-type": "application/json",
}
@pytest_asyncio.fixture(scope="session")
async def adminkey_headers_from(from_wallet):
wallet = from_wallet
yield {
"X-Api-Key": wallet.adminkey,
"Content-type": "application/json",
}
@pytest_asyncio.fixture(scope="session")
async def inkey_headers_to(to_wallet):
wallet = to_wallet
yield {
"X-Api-Key": wallet.inkey,
"Content-type": "application/json",
}
@pytest_asyncio.fixture(scope="session")
async def adminkey_headers_to(to_wallet):
wallet = to_wallet
yield {
"X-Api-Key": wallet.adminkey,
"Content-type": "application/json",
}
@pytest_asyncio.fixture(scope="session")
async def invoice(to_wallet):
data = await get_random_invoice_data()
invoiceData = CreateInvoice(**data)
invoice = await api_payments_create_invoice(invoiceData, to_wallet)
yield invoice
del invoice
@pytest_asyncio.fixture(scope="function")
async def real_invoice():
invoice = get_real_invoice(100)
yield {"bolt11": invoice["payment_request"]}
del invoice
@pytest_asyncio.fixture(scope="function")
async def hold_invoice():
invoice = get_hold_invoice(100)
yield invoice
del invoice
|
3,855 |
test parse request with invalid body
|
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=unused-variable
import json
from typing import Callable
from uuid import UUID
import pytest
from aiohttp import web
from aiohttp.test_utils import TestClient
from faker import Faker
from pydantic import BaseModel, Extra, Field
from servicelib.aiohttp.requests_validation import (
parse_request_body_as,
parse_request_path_parameters_as,
parse_request_query_parameters_as,
)
from servicelib.json_serialization import json_dumps
RQT_USERID_KEY = f"{__name__}.user_id"
APP_SECRET_KEY = f"{__name__}.secret"
def jsonable_encoder(data):
# Neither models_library nor fastapi is not part of requirements.
# Q&D replacement for fastapi.encoders.jsonable_encoder
return json.loads(json_dumps(data))
class MyRequestContext(BaseModel):
user_id: int = Field(alias=RQT_USERID_KEY)
secret: str = Field(alias=APP_SECRET_KEY)
@classmethod
def create_fake(cls, faker: Faker):
return cls(user_id=faker.pyint(), secret=faker.password())
class MyRequestPathParams(BaseModel):
project_uuid: UUID
class Config:
extra = Extra.forbid
@classmethod
def create_fake(cls, faker: Faker):
return cls(project_uuid=faker.uuid4())
class MyRequestQueryParams(BaseModel):
is_ok: bool = True
label: str
def as_params(self, **kwargs) -> dict[str, str]:
data = self.dict(**kwargs)
return {k: f"{v}" for k, v in data.items()}
@classmethod
def create_fake(cls, faker: Faker):
return cls(is_ok=faker.pybool(), label=faker.word())
class Sub(BaseModel):
a: float = 33
@classmethod
def create_fake(cls, faker: Faker):
return cls(a=faker.pyfloat())
class MyBody(BaseModel):
x: int
y: bool = False
z: Sub
@classmethod
def create_fake(cls, faker: Faker):
return cls(x=faker.pyint(), y=faker.pybool(), z=Sub.create_fake(faker))
@pytest.fixture
def client(event_loop, aiohttp_client: Callable, faker: Faker) -> TestClient:
"""
Some app that:
- creates app and request context
- has a handler that parses request params, query and body
"""
async def _handler(request: web.Request) -> web.Response:
# --------- UNDER TEST -------
# NOTE: app context does NOT need to be validated everytime!
context = MyRequestContext.parse_obj({**dict(request.app), **dict(request)})
path_params = parse_request_path_parameters_as(
MyRequestPathParams, request, use_enveloped_error_v1=False
)
query_params = parse_request_query_parameters_as(
MyRequestQueryParams, request, use_enveloped_error_v1=False
)
body = await parse_request_body_as(
MyBody, request, use_enveloped_error_v1=False
)
# ---------------------------
return web.json_response(
{
"parameters": path_params.dict(),
"queries": query_params.dict(),
"body": body.dict(),
"context": context.dict(),
},
dumps=json_dumps,
)
# ---
@web.middleware
async def _middleware(request: web.Request, handler):
# request context
request[RQT_USERID_KEY] = 42
request["RQT_IGNORE_CONTEXT"] = "not interesting"
resp = await handler(request)
return resp
app = web.Application(
middlewares=[
_middleware,
]
)
# app context
app[APP_SECRET_KEY] = faker.password()
app["APP_IGNORE_CONTEXT"] = "not interesting"
# adds handler
app.add_routes([web.get("/projects/{project_uuid}", _handler)])
return event_loop.run_until_complete(aiohttp_client(app))
@pytest.fixture
def path_params(faker: Faker):
path_params = MyRequestPathParams.create_fake(faker)
return path_params
@pytest.fixture
def query_params(faker: Faker) -> MyRequestQueryParams:
return MyRequestQueryParams.create_fake(faker)
@pytest.fixture
def body(faker: Faker) -> MyBody:
return MyBody.create_fake(faker)
async def test_parse_request_as(
client: TestClient,
path_params: MyRequestPathParams,
query_params: MyRequestQueryParams,
body: MyBody,
):
assert client.app
r = await client.get(
f"/projects/{path_params.project_uuid}",
params=query_params.as_params(),
json=body.dict(),
)
assert r.status == web.HTTPOk.status_code, f"{await r.text()}"
got = await r.json()
assert got["parameters"] == jsonable_encoder(path_params.dict())
assert got["queries"] == jsonable_encoder(query_params.dict())
assert got["body"] == body.dict()
assert got["context"] == {
"secret": client.app[APP_SECRET_KEY],
"user_id": 42,
}
async def test_parse_request_with_invalid_path_params(
client: TestClient,
query_params: MyRequestQueryParams,
body: MyBody,
):
r = await client.get(
"/projects/invalid-uuid",
params=query_params.as_params(),
json=body.dict(),
)
assert r.status == web.HTTPUnprocessableEntity.status_code, f"{await r.text()}"
response_body = await r.json()
assert response_body["error"].pop("resource")
assert response_body == {
"error": {
"msg": "Invalid parameter/s 'project_uuid' in request path",
"details": [
{
"loc": "project_uuid",
"msg": "value is not a valid uuid",
"type": "type_error.uuid",
}
],
}
}
async def test_parse_request_with_invalid_query_params(
client: TestClient,
path_params: MyRequestPathParams,
body: MyBody,
):
r = await client.get(
f"/projects/{path_params.project_uuid}",
params={},
json=body.dict(),
)
assert r.status == web.HTTPUnprocessableEntity.status_code, f"{await r.text()}"
response_body = await r.json()
assert response_body["error"].pop("resource")
assert response_body == {
"error": {
"msg": "Invalid parameter/s 'label' in request query",
"details": [
{
"loc": "label",
"msg": "field required",
"type": "value_error.missing",
}
],
}
}
async def METHOD_NAME(
client: TestClient,
path_params: MyRequestPathParams,
query_params: MyRequestQueryParams,
):
r = await client.get(
f"/projects/{path_params.project_uuid}",
params=query_params.as_params(),
json={"invalid": "body"},
)
assert r.status == web.HTTPUnprocessableEntity.status_code, f"{await r.text()}"
response_body = await r.json()
assert response_body["error"].pop("resource")
assert response_body == {
"error": {
"msg": "Invalid field/s 'x, z' in request body",
"details": [
{
"loc": "x",
"msg": "field required",
"type": "value_error.missing",
},
{
"loc": "z",
"msg": "field required",
"type": "value_error.missing",
},
],
}
}
async def test_parse_request_with_invalid_json_body(
client: TestClient,
path_params: MyRequestPathParams,
query_params: MyRequestQueryParams,
):
r = await client.get(
f"/projects/{path_params.project_uuid}",
params=query_params.as_params(),
data=b"[ 1 2, 3 'broken-json' ]",
)
body = await r.text()
assert r.status == web.HTTPBadRequest.status_code, body
|
3,856 |
set up
|
import unittest
import tempfile
import os
import six
import caffe
class SimpleLayer(caffe.Layer):
"""A layer that just multiplies by ten"""
def setup(self, bottom, top):
pass
def reshape(self, bottom, top):
top[0].reshape(*bottom[0].data.shape)
def forward(self, bottom, top):
top[0].data[...] = 10 * bottom[0].data
def backward(self, top, propagate_down, bottom):
bottom[0].diff[...] = 10 * top[0].diff
class ExceptionLayer(caffe.Layer):
"""A layer for checking exceptions from Python"""
def setup(self, bottom, top):
raise RuntimeError
class ParameterLayer(caffe.Layer):
"""A layer that just multiplies by ten"""
def setup(self, bottom, top):
self.blobs.add_blob(1)
self.blobs[0].data[0] = 0
def reshape(self, bottom, top):
top[0].reshape(*bottom[0].data.shape)
def forward(self, bottom, top):
pass
def backward(self, top, propagate_down, bottom):
self.blobs[0].diff[0] = 1
class PhaseLayer(caffe.Layer):
"""A layer for checking attribute `phase`"""
def setup(self, bottom, top):
pass
def reshape(self, bootom, top):
top[0].reshape()
def forward(self, bottom, top):
top[0].data[()] = self.phase
def python_net_file():
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as f:
f.write("""name: 'pythonnet' force_backward: true
input: 'data' input_shape { dim: 10 dim: 9 dim: 8 }
layer { type: 'Python' name: 'one' bottom: 'data' top: 'one'
python_param { module: 'test_python_layer' layer: 'SimpleLayer' } }
layer { type: 'Python' name: 'two' bottom: 'one' top: 'two'
python_param { module: 'test_python_layer' layer: 'SimpleLayer' } }
layer { type: 'Python' name: 'three' bottom: 'two' top: 'three'
python_param { module: 'test_python_layer' layer: 'SimpleLayer' } }""")
return f.name
def exception_net_file():
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as f:
f.write("""name: 'pythonnet' force_backward: true
input: 'data' input_shape { dim: 10 dim: 9 dim: 8 }
layer { type: 'Python' name: 'layer' bottom: 'data' top: 'top'
python_param { module: 'test_python_layer' layer: 'ExceptionLayer' } }
""")
return f.name
def parameter_net_file():
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as f:
f.write("""name: 'pythonnet' force_backward: true
input: 'data' input_shape { dim: 10 dim: 9 dim: 8 }
layer { type: 'Python' name: 'layer' bottom: 'data' top: 'top'
python_param { module: 'test_python_layer' layer: 'ParameterLayer' } }
""")
return f.name
def phase_net_file():
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as f:
f.write("""name: 'pythonnet' force_backward: true
layer { type: 'Python' name: 'layer' top: 'phase'
python_param { module: 'test_python_layer' layer: 'PhaseLayer' } }
""")
return f.name
@unittest.skipIf('Python' not in caffe.layer_type_list(),
'Caffe built without Python layer support')
class TestPythonLayer(unittest.TestCase):
def METHOD_NAME(self):
net_file = python_net_file()
self.net = caffe.Net(net_file, caffe.TRAIN)
os.remove(net_file)
def test_forward(self):
x = 8
self.net.blobs['data'].data[...] = x
self.net.forward()
for y in self.net.blobs['three'].data.flat:
self.assertEqual(y, 10**3 * x)
def test_backward(self):
x = 7
self.net.blobs['three'].diff[...] = x
self.net.backward()
for y in self.net.blobs['data'].diff.flat:
self.assertEqual(y, 10**3 * x)
def test_reshape(self):
s = 4
self.net.blobs['data'].reshape(s, s, s, s)
self.net.forward()
for blob in six.itervalues(self.net.blobs):
for d in blob.data.shape:
self.assertEqual(s, d)
def test_exception(self):
net_file = exception_net_file()
self.assertRaises(RuntimeError, caffe.Net, net_file, caffe.TEST)
os.remove(net_file)
def test_parameter(self):
net_file = parameter_net_file()
net = caffe.Net(net_file, caffe.TRAIN)
# Test forward and backward
net.forward()
net.backward()
layer = net.layers[list(net._layer_names).index('layer')]
self.assertEqual(layer.blobs[0].data[0], 0)
self.assertEqual(layer.blobs[0].diff[0], 1)
layer.blobs[0].data[0] += layer.blobs[0].diff[0]
self.assertEqual(layer.blobs[0].data[0], 1)
# Test saving and loading
h, caffemodel_file = tempfile.mkstemp()
net.save(caffemodel_file)
layer.blobs[0].data[0] = -1
self.assertEqual(layer.blobs[0].data[0], -1)
net.copy_from(caffemodel_file)
self.assertEqual(layer.blobs[0].data[0], 1)
os.remove(caffemodel_file)
# Test weight sharing
net2 = caffe.Net(net_file, caffe.TRAIN)
net2.share_with(net)
layer = net.layers[list(net2._layer_names).index('layer')]
self.assertEqual(layer.blobs[0].data[0], 1)
os.remove(net_file)
def test_phase(self):
net_file = phase_net_file()
for phase in caffe.TRAIN, caffe.TEST:
net = caffe.Net(net_file, phase)
self.assertEqual(net.forward()['phase'], phase)
|
3,857 |
maybe wrap constant expr
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module providing various functionality from the top-level pandas namespace.
"""
import re
from typing import Mapping
import pandas as pd
from apache_beam.dataframe import expressions
from apache_beam.dataframe import frame_base
from apache_beam.dataframe import partitionings
def _call_on_first_arg(name):
def wrapper(target, *args, **kwargs):
if isinstance(target, frame_base.DeferredBase):
return getattr(target, name)(*args, **kwargs)
else:
return getattr(pd, name)(target, *args, **kwargs)
return staticmethod(wrapper)
def METHOD_NAME(res):
if type(res) in frame_base.DeferredBase._pandas_type_map:
return frame_base.DeferredBase.wrap(
expressions.ConstantExpression(res, res[0:0]))
else:
return res
def _defer_to_pandas(name):
func = getattr(pd, name)
def wrapper(*args, **kwargs):
res = func(*args, **kwargs)
return METHOD_NAME(res)
return staticmethod(wrapper)
def _defer_to_pandas_maybe_elementwise(name):
""" Same as _defer_to_pandas, except it handles DeferredBase args, assuming
the function can be processed elementwise. """
func = getattr(pd, name)
def wrapper(*args, **kwargs):
if any(isinstance(arg, frame_base.DeferredBase)
for arg in args + tuple(kwargs.values())):
return frame_base._elementwise_function(func, name)(*args, **kwargs)
res = func(*args, **kwargs)
return METHOD_NAME(res)
return staticmethod(wrapper)
def _is_top_level_function(o):
return (
callable(o) and not isinstance(o, type) and hasattr(o, '__name__') and
re.match('[a-z].*', o.__name__))
class DeferredPandasModule(object):
array = _defer_to_pandas('array')
bdate_range = _defer_to_pandas('bdate_range')
@staticmethod
@frame_base.args_to_kwargs(pd)
@frame_base.populate_defaults(pd)
def concat(
objs,
axis,
join,
ignore_index,
keys,
levels,
names,
verify_integrity,
sort,
copy):
if ignore_index:
raise NotImplementedError('concat(ignore_index)')
if levels:
raise NotImplementedError('concat(levels)')
if isinstance(objs, Mapping):
if keys is None:
keys = list(objs.keys())
objs = [objs[k] for k in keys]
else:
objs = list(objs)
if keys is None:
preserves_partitioning = partitionings.Arbitrary()
else:
# Index 0 will be a new index for keys, only partitioning by the original
# indexes (1 to N) will be preserved.
nlevels = min(o._expr.proxy().index.nlevels for o in objs)
preserves_partitioning = partitionings.Index(
[i for i in range(1, nlevels + 1)])
deferred_none = expressions.ConstantExpression(None)
exprs = [deferred_none if o is None else o._expr for o in objs]
if axis in (1, 'columns'):
required_partitioning = partitionings.Index()
elif verify_integrity:
required_partitioning = partitionings.Index()
else:
required_partitioning = partitionings.Arbitrary()
return frame_base.DeferredBase.wrap(
expressions.ComputedExpression(
'concat',
lambda *objs: pd.concat(
objs,
axis=axis,
join=join,
ignore_index=ignore_index,
keys=keys,
levels=levels,
names=names,
verify_integrity=verify_integrity), # yapf break
exprs,
requires_partition_by=required_partitioning,
preserves_partition_by=preserves_partitioning))
date_range = _defer_to_pandas('date_range')
describe_option = _defer_to_pandas('describe_option')
factorize = _call_on_first_arg('factorize')
get_option = _defer_to_pandas('get_option')
interval_range = _defer_to_pandas('interval_range')
isna = _call_on_first_arg('isna')
isnull = _call_on_first_arg('isnull')
json_normalize = _defer_to_pandas('json_normalize')
melt = _call_on_first_arg('melt')
merge = _call_on_first_arg('merge')
melt = _call_on_first_arg('melt')
merge_ordered = frame_base.wont_implement_method(
pd, 'merge_ordered', reason='order-sensitive')
notna = _call_on_first_arg('notna')
notnull = _call_on_first_arg('notnull')
option_context = _defer_to_pandas('option_context')
period_range = _defer_to_pandas('period_range')
pivot = _call_on_first_arg('pivot')
pivot_table = _call_on_first_arg('pivot_table')
show_versions = _defer_to_pandas('show_versions')
test = frame_base.wont_implement_method(
pd,
'test',
explanation="because it is an internal pandas testing utility.")
timedelta_range = _defer_to_pandas('timedelta_range')
to_pickle = frame_base.wont_implement_method(
pd, 'to_pickle', reason='order-sensitive')
to_datetime = _defer_to_pandas_maybe_elementwise('to_datetime')
notna = _call_on_first_arg('notna')
def __getattr__(self, name):
if name.startswith('read_'):
def func(*args, **kwargs):
raise frame_base.WontImplementError(
'Use p | apache_beam.dataframe.io.%s' % name)
return func
res = getattr(pd, name)
if _is_top_level_function(res):
return frame_base.not_implemented_method(name, base_type=pd)
else:
return res
pd_wrapper = DeferredPandasModule()
|
3,858 |
test dataset from text keep in memory
|
import pytest
from datasets import Dataset, DatasetDict, Features, NamedSplit, Value
from datasets.io.text import TextDatasetReader
from ..utils import assert_arrow_memory_doesnt_increase, assert_arrow_memory_increases
def _check_text_dataset(dataset, expected_features):
assert isinstance(dataset, Dataset)
assert dataset.num_rows == 4
assert dataset.num_columns == 1
assert dataset.column_names == ["text"]
for feature, expected_dtype in expected_features.items():
assert dataset.features[feature].dtype == expected_dtype
@pytest.mark.parametrize("keep_in_memory", [False, True])
def METHOD_NAME(keep_in_memory, text_path, tmp_path):
cache_dir = tmp_path / "cache"
expected_features = {"text": "string"}
with assert_arrow_memory_increases() if keep_in_memory else assert_arrow_memory_doesnt_increase():
dataset = TextDatasetReader(text_path, cache_dir=cache_dir, keep_in_memory=keep_in_memory).read()
_check_text_dataset(dataset, expected_features)
@pytest.mark.parametrize(
"features",
[
None,
{"text": "string"},
{"text": "int32"},
{"text": "float32"},
],
)
def test_dataset_from_text_features(features, text_path, tmp_path):
cache_dir = tmp_path / "cache"
default_expected_features = {"text": "string"}
expected_features = features.copy() if features else default_expected_features
features = (
Features({feature: Value(dtype) for feature, dtype in features.items()}) if features is not None else None
)
dataset = TextDatasetReader(text_path, features=features, cache_dir=cache_dir).read()
_check_text_dataset(dataset, expected_features)
@pytest.mark.parametrize("split", [None, NamedSplit("train"), "train", "test"])
def test_dataset_from_text_split(split, text_path, tmp_path):
cache_dir = tmp_path / "cache"
expected_features = {"text": "string"}
dataset = TextDatasetReader(text_path, cache_dir=cache_dir, split=split).read()
_check_text_dataset(dataset, expected_features)
assert dataset.split == split if split else "train"
@pytest.mark.parametrize("path_type", [str, list])
def test_dataset_from_text_path_type(path_type, text_path, tmp_path):
if issubclass(path_type, str):
path = text_path
elif issubclass(path_type, list):
path = [text_path]
cache_dir = tmp_path / "cache"
expected_features = {"text": "string"}
dataset = TextDatasetReader(path, cache_dir=cache_dir).read()
_check_text_dataset(dataset, expected_features)
def _check_text_datasetdict(dataset_dict, expected_features, splits=("train",)):
assert isinstance(dataset_dict, DatasetDict)
for split in splits:
dataset = dataset_dict[split]
assert dataset.num_rows == 4
assert dataset.num_columns == 1
assert dataset.column_names == ["text"]
for feature, expected_dtype in expected_features.items():
assert dataset.features[feature].dtype == expected_dtype
@pytest.mark.parametrize("keep_in_memory", [False, True])
def test_datasetdict_from_text_keep_in_memory(keep_in_memory, text_path, tmp_path):
cache_dir = tmp_path / "cache"
expected_features = {"text": "string"}
with assert_arrow_memory_increases() if keep_in_memory else assert_arrow_memory_doesnt_increase():
dataset = TextDatasetReader({"train": text_path}, cache_dir=cache_dir, keep_in_memory=keep_in_memory).read()
_check_text_datasetdict(dataset, expected_features)
@pytest.mark.parametrize(
"features",
[
None,
{"text": "string"},
{"text": "int32"},
{"text": "float32"},
],
)
def test_datasetdict_from_text_features(features, text_path, tmp_path):
cache_dir = tmp_path / "cache"
# CSV file loses col_1 string dtype information: default now is "int64" instead of "string"
default_expected_features = {"text": "string"}
expected_features = features.copy() if features else default_expected_features
features = (
Features({feature: Value(dtype) for feature, dtype in features.items()}) if features is not None else None
)
dataset = TextDatasetReader({"train": text_path}, features=features, cache_dir=cache_dir).read()
_check_text_datasetdict(dataset, expected_features)
@pytest.mark.parametrize("split", [None, NamedSplit("train"), "train", "test"])
def test_datasetdict_from_text_split(split, text_path, tmp_path):
if split:
path = {split: text_path}
else:
split = "train"
path = {"train": text_path, "test": text_path}
cache_dir = tmp_path / "cache"
expected_features = {"text": "string"}
dataset = TextDatasetReader(path, cache_dir=cache_dir).read()
_check_text_datasetdict(dataset, expected_features, splits=list(path.keys()))
assert all(dataset[split].split == split for split in path.keys())
|
3,859 |
parse proto libraries
|
import os
import re
import textwrap
def grpc_target_name(internal_name):
return f"grpc_{internal_name}"
class _ProtoLibrary:
name: str = None
srcs: list = None
deps: list = None
is_used: bool = False
def __init__(self) -> None:
self.srcs = []
self.deps = set(["protobuf::libprotobuf"]) # Add to all libraries even if not explicitly set
self.is_used = True
def validate(self, source_folder, all_deps):
# Check all files exists
for it in self.srcs:
assert os.path.exists(os.path.join(source_folder, it)), f"{self.name} - file '{it}' doesn't exist"
# Check all deps exists
for it in self.deps:
assert it in all_deps, f"{self.name} - dep '{it}' not found"
def dumps(self):
import json
return json.dumps({
"name": self.name,
"srcs": self.srcs,
"deps": list(self.deps),
}, indent=4)
@property
def cmake_target(self):
return grpc_target_name(self.name)
@property
def cmake_deps(self):
def to_cmake_target(item):
if item.startswith("//"):
return item[2:].replace("/", "_").replace(":", "_")
return item
return [to_cmake_target(it) for it in self.deps]
@property
def cmake_content(self):
content = f"\n\n# {self.cmake_target}\n"
content += "\n".join([f"#{it}" for it in self.dumps().split('\n')])
content += "\n"
if not self.srcs:
content += textwrap.dedent(f"""\
add_library({self.cmake_target} INTERFACE)
""")
else:
content += textwrap.dedent(f"""\
set({self.cmake_target}_PROTOS {" ".join(["${CMAKE_SOURCE_DIR}/"+it for it in self.srcs])})
add_library({self.cmake_target} ${{{self.cmake_target}_PROTOS}})
target_include_directories({self.cmake_target} PUBLIC ${{CMAKE_BINARY_DIR}})
target_compile_features({self.cmake_target} PUBLIC cxx_std_11)
protobuf_generate(LANGUAGE cpp
TARGET {self.cmake_target}
PROTOS ${{{self.cmake_target}_PROTOS}}
IMPORT_DIRS ${{IMPORT_DIRS}}
)
""")
if self.deps:
content += textwrap.dedent(f"""\
target_link_libraries({self.cmake_target} {"PUBLIC" if self.srcs else "INTERFACE"} {" ".join(self.cmake_deps)})
""")
return content
def METHOD_NAME(filename, source_folder, error):
# Generate the libraries to build dynamically
re_name = re.compile(r'name = "(.*)"')
re_srcs_oneline = re.compile(r'srcs = \["(.*)"\],')
re_deps_oneline = re.compile(r'deps = \["(.*)"\],')
re_add_varname = re.compile(r'] \+ (.*),')
proto_libraries = []
basedir = os.path.dirname(filename)
proto_library = None
def parsing_sources(line):
proto_path = os.path.relpath(os.path.join(basedir, line.strip(",").strip("\"")), source_folder).replace('\\', '/')
proto_library.srcs.append(proto_path)
def parsing_deps(line):
line = line.strip(",").strip("\"")
if line.startswith("@com_google_protobuf//:"):
proto_library.deps.add("protobuf::libprotobuf")
elif line.startswith("@com_google_googleapis//"):
proto_library.deps.add("googleapis::googleapis")
elif line.startswith(":"):
proto_library.deps.add(grpc_target_name(line[1:]))
else:
error(f"Unrecognized dep: {line} -- {os.path.relpath(filename, source_folder)}")
with open(filename, 'r') as f:
action = None
parsing_variable = None
variables = {}
for line in f.readlines():
line = line.strip()
if line == "proto_library(":
assert proto_library == None
proto_library = _ProtoLibrary()
elif parsing_variable != None:
if line == "]":
parsing_variable = None
else:
parsing_variable(line)
elif proto_library != None:
if line.startswith("name ="):
proto_library.name = re_name.search(line).group(1)
elif line.startswith("srcs = "):
m = re_srcs_oneline.search(line)
if m:
parsing_sources(m.group(1))
else:
action = parsing_sources
elif line.startswith("deps = "):
m = re_deps_oneline.search(line)
if m:
parsing_deps(m.group(1))
else:
action = parsing_deps
elif line.startswith("visibility = "):
pass
elif line == ")":
proto_libraries.append(proto_library)
proto_library = None
action = None
elif line == "],":
action = None
elif line.startswith("] + "):
varname = re_add_varname.search(line).group(1)
for it in variables[varname]:
action(it)
elif action:
action(line)
return proto_libraries
|
3,860 |
split generators
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""COTE: Chinese Opinion Target Extraction."""
import csv
import os
import datasets
logger = datasets.logging.get_logger(__name__)
_CITATION = """\
@inproceedings{li2018character,
title={Character-based bilstm-crf incorporating pos and dictionaries for chinese opinion target extraction},
author={Li, Yanzeng and Liu, Tingwen and Li, Diying and Li, Quangang and Shi, Jinqiao and Wang, Yanqiu},
booktitle={Asian Conference on Machine Learning},
pages={518--533},
year={2018},
organization={PMLR}
}
"""
_DESCRIPTION = """\
COTE, a dataset for Opinion target extraction (OTE) for sentiment analysis, which aims to extract target of a given text. This dataset covers data crawled on Baidu, Dianping, and Mafengwo.
More information refer to https://www.luge.ai/#/luge/dataDetail?id=19.
"""
_COTE_URLs = {
# pylint: disable=line-too-long
"bd": "https://paddlenlp.bj.bcebos.com/datasets/COTE-BD.zip",
"mfw": "https://paddlenlp.bj.bcebos.com/datasets/COTE-MFW.zip",
"dp": "https://paddlenlp.bj.bcebos.com/datasets/COTE-DP.zip",
# pylint: enable=line-too-long
}
class COTEConfig(datasets.BuilderConfig):
"""BuilderConfig for COTE."""
def __init__(self, data_url=None, data_dir=None, **kwargs):
"""BuilderConfig for COTE.
Args:
data_url: `string`, url to download the zip file.
data_dir: `string`, the path to the folder containing the tsv files in the downloaded zip.
**kwargs: keyword arguments forwarded to super.
"""
super(COTEConfig, self).__init__(**kwargs)
self.data_url = data_url
self.data_dir = data_dir
class COTE(datasets.GeneratorBasedBuilder):
"""COTE: Chinese Opinion Target Extraction."""
BUILDER_CONFIGS = [
COTEConfig(
name="bd",
data_url=_COTE_URLs["bd"],
data_dir="COTE-BD",
version=datasets.Version("1.0.0", ""),
description="COTE-BD crawled on baidu.",
),
COTEConfig(
name="mfw",
data_url=_COTE_URLs["mfw"],
data_dir="COTE-MFW",
version=datasets.Version("1.0.0", ""),
description="COTE-MFW crawled on Mafengwo.",
),
COTEConfig(
name="dp",
data_url=_COTE_URLs["dp"],
data_dir="COTE-DP",
version=datasets.Version("1.0.0", ""),
description="COTE-DP crawled on Dianping.",
),
]
def _info(self):
features = {
"id": datasets.Value("int32"),
"text_a": datasets.Value("string"),
"label": datasets.Value("string"),
}
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(features),
homepage="https://www.luge.ai/#/luge/dataDetail?id=19",
citation=_CITATION,
)
def METHOD_NAME(self, dl_manager):
downloaded_dir = dl_manager.download_and_extract(self.config.data_url)
data_dir = os.path.join(downloaded_dir, self.config.data_dir)
train_split = datasets.SplitGenerator(
name=datasets.Split.TRAIN, gen_kwargs={"filepath": os.path.join(data_dir, "train.tsv"), "split": "train"}
)
test_split = datasets.SplitGenerator(
name=datasets.Split.TEST, gen_kwargs={"filepath": os.path.join(data_dir, "test.tsv"), "split": "test"}
)
return [train_split, test_split]
def _generate_examples(self, filepath, split):
"""This function returns the examples in the raw (text) form."""
logger.info("generating examples from = %s", filepath)
with open(filepath, encoding="utf8") as f:
reader = csv.DictReader(f, delimiter="\t", quoting=csv.QUOTE_NONE)
for idx, row in enumerate(reader):
example = {}
example["id"] = idx
example["text_a"] = row["text_a"]
if split == "train":
example["label"] = row["label"]
else:
example["label"] = ""
# Filter out corrupted rows.
for value in example.values():
if value is None:
break
else:
yield idx, example
|
3,861 |
on validation end
|
# Copyright The Lightning AI team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Timer ^^^^^"""
import logging
import time
from datetime import timedelta
from typing import Any, Dict, Optional, Union
import lightning.pytorch as pl
from lightning.pytorch.callbacks.callback import Callback
from lightning.pytorch.trainer.states import RunningStage
from lightning.pytorch.utilities import LightningEnum
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.rank_zero import rank_zero_info
log = logging.getLogger(__name__)
class Interval(LightningEnum):
step = "step"
epoch = "epoch"
class Timer(Callback):
"""The Timer callback tracks the time spent in the training, validation, and test loops and interrupts the Trainer
if the given time limit for the training loop is reached.
Args:
duration: A string in the format DD:HH:MM:SS (days, hours, minutes seconds), or a :class:`datetime.timedelta`,
or a dict containing key-value compatible with :class:`~datetime.timedelta`.
interval: Determines if the interruption happens on epoch level or mid-epoch.
Can be either ``"epoch"`` or ``"step"``.
verbose: Set this to ``False`` to suppress logging messages.
Raises:
MisconfigurationException:
If ``interval`` is not one of the supported choices.
Example::
from lightning.pytorch import Trainer
from lightning.pytorch.callbacks import Timer
# stop training after 12 hours
timer = Timer(duration="00:12:00:00")
# or provide a datetime.timedelta
from datetime import timedelta
timer = Timer(duration=timedelta(weeks=1))
# or provide a dictionary
timer = Timer(duration=dict(weeks=4, days=2))
# force training to stop after given time limit
trainer = Trainer(callbacks=[timer])
# query training/validation/test time (in seconds)
timer.time_elapsed("train")
timer.start_time("validate")
timer.end_time("test")
"""
def __init__(
self,
duration: Optional[Union[str, timedelta, Dict[str, int]]] = None,
interval: str = Interval.step,
verbose: bool = True,
) -> None:
super().__init__()
if isinstance(duration, str):
dhms = duration.strip().split(":")
dhms = [int(i) for i in dhms]
duration = timedelta(days=dhms[0], hours=dhms[1], minutes=dhms[2], seconds=dhms[3])
if isinstance(duration, dict):
duration = timedelta(**duration)
if interval not in set(Interval):
raise MisconfigurationException(
f"Unsupported parameter value `Timer(interval={interval})`. Possible choices are:"
f" {', '.join(set(Interval))}"
)
self._duration = duration.total_seconds() if duration is not None else None
self._interval = interval
self._verbose = verbose
self._start_time: Dict[RunningStage, Optional[float]] = {stage: None for stage in RunningStage}
self._end_time: Dict[RunningStage, Optional[float]] = {stage: None for stage in RunningStage}
self._offset = 0
def start_time(self, stage: str = RunningStage.TRAINING) -> Optional[float]:
"""Return the start time of a particular stage (in seconds)"""
stage = RunningStage(stage)
return self._start_time[stage]
def end_time(self, stage: str = RunningStage.TRAINING) -> Optional[float]:
"""Return the end time of a particular stage (in seconds)"""
stage = RunningStage(stage)
return self._end_time[stage]
def time_elapsed(self, stage: str = RunningStage.TRAINING) -> float:
"""Return the time elapsed for a particular stage (in seconds)"""
start = self.start_time(stage)
end = self.end_time(stage)
offset = self._offset if stage == RunningStage.TRAINING else 0
if start is None:
return offset
if end is None:
return time.monotonic() - start + offset
return end - start + offset
def time_remaining(self, stage: str = RunningStage.TRAINING) -> Optional[float]:
"""Return the time remaining for a particular stage (in seconds)"""
if self._duration is not None:
return self._duration - self.time_elapsed(stage)
return None
def on_train_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._start_time[RunningStage.TRAINING] = time.monotonic()
def on_train_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._end_time[RunningStage.TRAINING] = time.monotonic()
def on_validation_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._start_time[RunningStage.VALIDATING] = time.monotonic()
def METHOD_NAME(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._end_time[RunningStage.VALIDATING] = time.monotonic()
def on_test_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._start_time[RunningStage.TESTING] = time.monotonic()
def on_test_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._end_time[RunningStage.TESTING] = time.monotonic()
def on_fit_start(self, trainer: "pl.Trainer", *args: Any, **kwargs: Any) -> None:
# this checks the time after the state is reloaded, regardless of the interval.
# this is necessary in case we load a state whose timer is already depleted
if self._duration is None:
return
self._check_time_remaining(trainer)
def on_train_batch_end(self, trainer: "pl.Trainer", *args: Any, **kwargs: Any) -> None:
if self._interval != Interval.step or self._duration is None:
return
self._check_time_remaining(trainer)
def on_train_epoch_end(self, trainer: "pl.Trainer", *args: Any, **kwargs: Any) -> None:
if self._interval != Interval.epoch or self._duration is None:
return
self._check_time_remaining(trainer)
def state_dict(self) -> Dict[str, Any]:
return {"time_elapsed": {stage.value: self.time_elapsed(stage) for stage in RunningStage}}
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
time_elapsed = state_dict.get("time_elapsed", {})
self._offset = time_elapsed.get(RunningStage.TRAINING.value, 0)
def _check_time_remaining(self, trainer: "pl.Trainer") -> None:
assert self._duration is not None
should_stop = self.time_elapsed() >= self._duration
should_stop = trainer.strategy.broadcast(should_stop)
trainer.should_stop = trainer.should_stop or should_stop
if should_stop and self._verbose:
elapsed = timedelta(seconds=int(self.time_elapsed(RunningStage.TRAINING)))
rank_zero_info(f"Time limit reached. Elapsed time is {elapsed}. Signaling Trainer to stop.")
|
3,862 |
test evaluate
|
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import os
import unittest
from os.path import join as pathjoin
import torch.distributed as dist
from monai.bundle import ConfigParser, ConfigWorkflow
from monai.fl.client.monai_algo import MonaiAlgo
from monai.fl.utils.constants import ExtraItems
from monai.fl.utils.exchange_object import ExchangeObject
from monai.networks import get_state_dict
from tests.utils import DistCall, DistTestCase, SkipIfBeforePyTorchVersion, SkipIfNoModule, skip_if_no_cuda
_root_dir = os.path.abspath(pathjoin(os.path.dirname(__file__)))
_data_dir = pathjoin(_root_dir, "testing_data")
_logging_file = pathjoin(_data_dir, "logging.conf")
@SkipIfNoModule("ignite")
@SkipIfBeforePyTorchVersion((1, 11, 1))
class TestFLMonaiAlgo(DistTestCase):
@DistCall(nnodes=1, nproc_per_node=2, init_method="no_init")
@skip_if_no_cuda
def test_train(self):
train_configs = [pathjoin(_data_dir, "config_fl_train.json"), pathjoin(_data_dir, "multi_gpu_train.json")]
eval_configs = [
pathjoin(_data_dir, "config_fl_train.json"),
pathjoin(_data_dir, "config_fl_evaluate.json"),
pathjoin(_data_dir, "multi_gpu_evaluate.json"),
]
train_workflow = ConfigWorkflow(config_file=train_configs, workflow_type="train", logging_file=_logging_file)
# simulate the case that this application has specific requirements for a bundle workflow
train_workflow.add_property(name="loader", required=True, config_id="train#training_transforms#0", desc="NA")
# initialize algo
algo = MonaiAlgo(
bundle_root=_data_dir,
train_workflow=ConfigWorkflow(config_file=train_configs, workflow_type="train", logging_file=_logging_file),
eval_workflow=ConfigWorkflow(config_file=eval_configs, workflow_type="train", logging_file=_logging_file),
config_filters_filename=pathjoin(_root_dir, "testing_data", "config_fl_filters.json"),
)
algo.initialize(extra={ExtraItems.CLIENT_NAME: "test_fl"})
self.assertTrue(dist.get_rank() in (0, 1))
# initialize model
parser = ConfigParser()
parser.read_config(train_configs)
parser.parse()
network = parser.get_parsed_content("network")
data = ExchangeObject(weights=get_state_dict(network))
# test train
for i in range(2):
print(f"Testing round {i+1} of {2}...")
# test evaluate
metric_eo = algo.evaluate(data=data, extra={})
self.assertIsInstance(metric_eo, ExchangeObject)
metric = metric_eo.metrics
self.assertIsInstance(metric["accuracy"], float)
# test train
algo.train(data=data, extra={})
weights_eo = algo.get_weights()
self.assertIsInstance(weights_eo, ExchangeObject)
self.assertTrue(weights_eo.is_valid_weights())
self.assertIsInstance(weights_eo.weights, dict)
self.assertTrue(len(weights_eo.weights) > 0)
@DistCall(nnodes=1, nproc_per_node=2, init_method="no_init")
@skip_if_no_cuda
def METHOD_NAME(self):
config_file = [
pathjoin(_data_dir, "config_fl_train.json"),
pathjoin(_data_dir, "config_fl_evaluate.json"),
pathjoin(_data_dir, "multi_gpu_evaluate.json"),
]
# initialize algo
algo = MonaiAlgo(
bundle_root=_data_dir,
config_train_filename=None,
eval_workflow=ConfigWorkflow(config_file=config_file, workflow_type="train", logging_file=_logging_file),
config_filters_filename=pathjoin(_data_dir, "config_fl_filters.json"),
)
algo.initialize(extra={ExtraItems.CLIENT_NAME: "test_fl"})
self.assertTrue(dist.get_rank() in (0, 1))
# initialize model
parser = ConfigParser()
parser.read_config(
[pathjoin(_data_dir, "config_fl_train.json"), pathjoin(_data_dir, "config_fl_evaluate.json")]
)
parser.parse()
network = parser.get_parsed_content("network")
data = ExchangeObject(weights=get_state_dict(network))
# test evaluate
metric_eo = algo.evaluate(data=data, extra={})
self.assertIsInstance(metric_eo, ExchangeObject)
metric = metric_eo.metrics
self.assertIsInstance(metric["accuracy"], float)
if __name__ == "__main__":
unittest.main()
|
3,863 |
update set
|
import json
import os
import time
from typing import Any, Dict, List, Set, Tuple
from ..util.setting import (
CompilerType,
JSON_FOLDER_BASE_DIR,
TestList,
TestPlatform,
TestStatusType,
)
from ..util.utils import (
detect_compiler_type,
print_error,
print_time,
related_to_test_list,
)
from .parser.coverage_record import CoverageRecord
from .parser.gcov_coverage_parser import GcovCoverageParser
from .parser.llvm_coverage_parser import LlvmCoverageParser
from .print_report import (
file_oriented_report,
html_oriented_report,
line_oriented_report,
)
# coverage_records: Dict[str, LineInfo] = {}
covered_lines: Dict[str, Set[int]] = {}
uncovered_lines: Dict[str, Set[int]] = {}
tests_type: TestStatusType = {"success": set(), "partial": set(), "fail": set()}
def transform_file_name(
file_path: str, interested_folders: List[str], platform: TestPlatform
) -> str:
remove_patterns: Set[str] = {".DEFAULT.cpp", ".AVX.cpp", ".AVX2.cpp"}
for pattern in remove_patterns:
file_path = file_path.replace(pattern, "")
# if user has specified interested folder
if interested_folders:
for folder in interested_folders:
if folder in file_path:
return file_path[file_path.find(folder) :]
# remove pytorch base folder path
if platform == TestPlatform.OSS:
from package.oss.utils import get_pytorch_folder # type: ignore[import]
pytorch_foler = get_pytorch_folder()
assert file_path.startswith(pytorch_foler)
file_path = file_path[len(pytorch_foler) + 1 :]
return file_path
def is_intrested_file(
file_path: str, interested_folders: List[str], platform: TestPlatform
) -> bool:
ignored_patterns = ["cuda", "aten/gen_aten", "aten/aten_", "build/"]
if any(pattern in file_path for pattern in ignored_patterns):
return False
# ignore files that are not belong to pytorch
if platform == TestPlatform.OSS:
from package.oss.utils import get_pytorch_folder
if not file_path.startswith(get_pytorch_folder()):
return False
# if user has specified interested folder
if interested_folders:
for folder in interested_folders:
intersted_folder_path = folder if folder.endswith("/") else f"{folder}/"
if intersted_folder_path in file_path:
return True
return False
else:
return True
def get_json_obj(json_file: str) -> Tuple[Any, int]:
"""
Sometimes at the start of file llvm/gcov will complains "fail to find coverage data",
then we need to skip these lines
-- success read: 0 - this json file have the full json coverage information
-- partial success: 1 - this json file starts with some error prompt, but still have the coverage information
-- fail to read: 2 - this json file doesn't have any coverage information
"""
read_status = -1
with open(json_file) as f:
lines = f.readlines()
for line in lines:
try:
json_obj = json.loads(line)
except json.JSONDecodeError:
read_status = 1
continue
else:
if read_status == -1:
# not meet jsonDecoderError before, return success
read_status = 0
return (json_obj, read_status)
return None, 2
def parse_json(json_file: str, platform: TestPlatform) -> List[CoverageRecord]:
print("start parse:", json_file)
json_obj, read_status = get_json_obj(json_file)
if read_status == 0:
tests_type["success"].add(json_file)
elif read_status == 1:
tests_type["partial"].add(json_file)
else:
tests_type["fail"].add(json_file)
raise RuntimeError(
"Fail to do code coverage! Fail to load json file: ", json_file
)
cov_type = detect_compiler_type(platform)
coverage_records: List[CoverageRecord] = []
if cov_type == CompilerType.CLANG:
coverage_records = LlvmCoverageParser(json_obj).parse("fbcode")
# print(coverage_records)
elif cov_type == CompilerType.GCC:
coverage_records = GcovCoverageParser(json_obj).parse()
return coverage_records
def parse_jsons(
test_list: TestList, interested_folders: List[str], platform: TestPlatform
) -> None:
g = os.walk(JSON_FOLDER_BASE_DIR)
for path, _, file_list in g:
for file_name in file_list:
if file_name.endswith(".json"):
# if compiler is clang, we only analyze related json / when compiler is gcc, we analyze all jsons
cov_type = detect_compiler_type(platform)
if cov_type == CompilerType.CLANG and not related_to_test_list(
file_name, test_list
):
continue
json_file = os.path.join(path, file_name)
try:
coverage_records = parse_json(json_file, platform)
except RuntimeError:
print_error("Fail to load json file: ", json_file)
continue
# collect information from each target's export file and merge them together:
update_coverage(coverage_records, interested_folders, platform)
def update_coverage(
coverage_records: List[CoverageRecord],
interested_folders: List[str],
platform: TestPlatform,
) -> None:
for item in coverage_records:
# extract information for the record
record = item.to_dict()
file_path = record["filepath"]
if not is_intrested_file(file_path, interested_folders, platform):
continue
covered_range = record["covered_lines"]
uncovered_range = record["uncovered_lines"]
# transform file name: remote/13223/caffe2/aten -> caffe2/aten
file_path = transform_file_name(file_path, interested_folders, platform)
# if file not exists, add it into dictionary
if file_path not in covered_lines:
covered_lines[file_path] = set()
if file_path not in uncovered_lines:
uncovered_lines[file_path] = set()
# update this file's covered and uncovered lines
if covered_range is not None:
covered_lines[file_path].update(covered_range)
if uncovered_range is not None:
uncovered_lines[file_path].update(uncovered_range)
def METHOD_NAME() -> None:
for file_name in covered_lines:
# difference_update
uncovered_lines[file_name].difference_update(covered_lines[file_name])
def summarize_jsons(
test_list: TestList,
interested_folders: List[str],
coverage_only: List[str],
platform: TestPlatform,
) -> None:
start_time = time.time()
if detect_compiler_type(platform) == CompilerType.GCC:
html_oriented_report()
else:
parse_jsons(test_list, interested_folders, platform)
METHOD_NAME()
line_oriented_report(
test_list,
tests_type,
interested_folders,
coverage_only,
covered_lines,
uncovered_lines,
)
file_oriented_report(
test_list,
tests_type,
interested_folders,
coverage_only,
covered_lines,
uncovered_lines,
)
print_time("summary jsons take time: ", start_time)
|
3,864 |
main
|
"""Sopel Config Command Line Interface (CLI): ``sopel-config``"""
from __future__ import annotations
import argparse
import os
from . import utils
def build_parser():
"""Build and configure an argument parser for ``sopel-config``.
:return: the argument parser
:rtype: :class:`argparse.ArgumentParser`
"""
parser = argparse.ArgumentParser(
description='Sopel configuration tool')
# Subparser: sopel-config <sub-parser> <sub-options>
subparsers = parser.add_subparsers(
help='Actions to perform',
dest='action')
# sopel-config list
list_parser = subparsers.add_parser(
'list',
help="List available configurations from Sopel's config directory",
description="""
List available configurations from Sopel's config directory
with the extension "{ext}". Use option ``--config-dir`` to use a
specific config directory.
""".format(ext='.cfg'))
utils.add_common_arguments(list_parser)
list_parser.add_argument(
'-e', '--ext', '--extension',
dest='extension',
default='.cfg',
help='Filter by extension (default to "%(default)s)"')
list_parser.add_argument(
'-p', '--path',
action='store_true',
dest='display_path',
default=False,
help='Display a list of absolute filenames instead of their names')
# sopel-config init
init_parser = subparsers.add_parser(
'init',
help='Initialize Sopel configuration file',
description='Initialize Sopel configuration file')
utils.add_common_arguments(init_parser)
# sopel-config get <section> <key>
get_parser = subparsers.add_parser(
'get',
help="Get a configuration option's value",
description="Get a configuration option's value",
)
get_parser.add_argument(
'section',
help='The name of the section to look in',
)
get_parser.add_argument(
'option',
help='The name of the option to retrieve',
)
utils.add_common_arguments(get_parser)
return parser
def handle_list(options):
"""Display a list of configurations available in Sopel's config directory.
:param options: parsed arguments
:type options: :class:`argparse.Namespace`
:return: 0 if everything went fine
This command displays an unordered list of config names from Sopel's
config directory, without their extensions::
$ sopel-config list
default
custom
By default, the config directory is ``~/.sopel``. To select a different
config directory, options ``--config-dir`` can be used.
It is possible to filter by extension using the
``-e``/``--ext``/``--extension`` option; default is ``.cfg``
(the ``.`` prefix is not required).
"""
configdir = options.configdir
display_path = options.display_path
extension = options.extension
if not extension.startswith('.'):
extension = '.' + extension
configs = utils.enumerate_configs(configdir, extension)
found = False
for config_filename in configs:
found = True
if display_path:
print(os.path.join(configdir, config_filename))
else:
name, _ = os.path.splitext(config_filename)
print(name)
if not found:
utils.stderr('No config file found at this location: %s' % configdir)
utils.stderr('Use `sopel-config init` to create a new config file.')
return 0 # successful operation
def handle_init(options):
"""Use config wizard to initialize a new configuration file for the bot.
:param options: parsed arguments
:type options: :class:`argparse.Namespace`
:return: 0 if everything went fine;
1 if the file is invalid or if it already exists
.. note::
Due to how the config wizard works, the configuration filename's
extension **must be** ``.cfg``.
"""
config_filename = utils.find_config(options.configdir, options.config)
config_name, ext = os.path.splitext(config_filename)
if ext and ext != '.cfg':
utils.stderr('Configuration wizard accepts .cfg files only')
return 1
elif not ext:
config_filename = config_name + '.cfg'
if os.path.isfile(config_filename):
utils.stderr('Configuration file %s already exists' % config_filename)
return 1
print('Starting Sopel config wizard for: %s' % config_filename)
try:
utils.wizard(config_name)
except KeyboardInterrupt:
utils.stderr('\nOperation cancelled; no file has been created.')
return 1 # cancelled operation
return 0 # successful operation
def handle_get(options):
"""Read the settings to display the value of ``<section> <key>``.
:param options: parsed arguments
:type options: :class:`argparse.Namespace`
:return: 0 if everything went fine;
1 if the section and/or key does not exist;
2 if the settings can't be loaded
"""
try:
settings = utils.load_settings(options)
except Exception as error:
utils.stderr(error)
return 2
section = options.section
option = options.option
# Making sure the section.option exists
if not settings.parser.has_section(section):
utils.stderr('Section "%s" does not exist' % section)
return 1
if not settings.parser.has_option(section, option):
utils.stderr(
'Section "%s" does not have a "%s" option' % (section, option))
return 1
# Display the value
print(settings.get(section, option))
return 0 # successful operation
def METHOD_NAME():
"""Console entry point for ``sopel-config``."""
parser = build_parser()
options = parser.parse_args()
action = options.action
if not action:
parser.print_help()
return
try:
# init command does not require existing settings
if action == 'list':
return handle_list(options)
elif action == 'init':
return handle_init(options)
elif action == 'get':
return handle_get(options)
except KeyboardInterrupt:
# ctrl+c was used, nothing to report here
pass
|
3,865 |
test factory
|
##########################################################################
#
# Copyright (c) 2022, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import imath
import IECore
import GafferTest
import GafferScene
class PlaceholderTest( GafferTest.TestCase ) :
def METHOD_NAME( self ) :
o = IECore.Object.create( "IECoreScenePreview::Placeholder" )
self.assertIsInstance( o, GafferScene.Private.IECoreScenePreview.Placeholder )
self.assertEqual( o.getBound(), imath.Box3f() )
self.assertEqual( o.getMode(), GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Default )
def testBound( self ) :
o = GafferScene.Private.IECoreScenePreview.Placeholder(
imath.Box3f( imath.V3f( 1, 2, 3 ), imath.V3f( 4, 5, 6 ) )
)
self.assertEqual(
o.getBound(),
imath.Box3f( imath.V3f( 1, 2, 3 ), imath.V3f( 4, 5, 6 ) )
)
o.setBound( imath.Box3f( imath.V3f( 2, 3, 4 ), imath.V3f( 5, 6, 7 ) ) )
self.assertEqual(
o.getBound(),
imath.Box3f( imath.V3f( 2, 3, 4 ), imath.V3f( 5, 6, 7 ) )
)
def testMode( self ) :
o = GafferScene.Private.IECoreScenePreview.Placeholder()
self.assertEqual(
o.getMode(),
GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Default
)
o.setMode( GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Excluded )
self.assertEqual(
o.getMode(),
GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Excluded
)
o2 = GafferScene.Private.IECoreScenePreview.Placeholder( mode = GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Excluded )
self.assertEqual(
o2.getMode(),
GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Excluded
)
def testCopy( self ) :
o = GafferScene.Private.IECoreScenePreview.Placeholder(
imath.Box3f( imath.V3f( 1, 2, 3 ), imath.V3f( 4, 5, 6 ) ),
GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Excluded,
)
o2 = o.copy()
self.assertEqual( o.getBound(), o2.getBound() )
self.assertEqual( o.getMode(), o2.getMode() )
self.assertEqual( o, o2 )
o2.setBound( imath.Box3f( imath.V3f( 2, 3, 4 ), imath.V3f( 5, 6, 7 ) ) )
self.assertNotEqual( o.getBound(), o2.getBound() )
o2.setMode( GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Default )
self.assertNotEqual( o.getMode(), o2.getMode() )
self.assertNotEqual( o, o2 )
def testSerialisation( self ) :
o = GafferScene.Private.IECoreScenePreview.Placeholder(
imath.Box3f( imath.V3f( 1, 2, 3 ), imath.V3f( 4, 5, 6 ) ),
GafferScene.Private.IECoreScenePreview.Placeholder.Mode.Excluded
)
m = IECore.MemoryIndexedIO( IECore.CharVectorData(), [], IECore.IndexedIO.OpenMode.Write )
o.save( m, "o" )
m2 = IECore.MemoryIndexedIO( m.buffer(), [], IECore.IndexedIO.OpenMode.Read )
o2 = IECore.Object.load( m2, "o" )
self.assertEqual( o.getBound(), o2.getBound() )
self.assertEqual( o.getMode(), o2.getMode() )
self.assertEqual( o, o2 )
if __name__ == "__main__":
unittest.main()
|
3,866 |
gp params scaling
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import math
from typing import Dict, Tuple
from gluonts.mx import Tensor
from gluonts.mx.distribution.distribution import getF, softplus
from . import Kernel, KernelOutputDict
class PeriodicKernel(Kernel):
r"""
Computes a covariance matrix based on the Periodic kernel
between inputs :math:`\mathbf{x_1}` and :math:`\mathbf{x_2}`:
:math:`k_{\text{Per}}(\mathbf{x_1}, \mathbf{x_2}) = \theta_0 \exp \left
(\frac{-2\sin^2(\theta_2 \pi \|\mathbf{x_1} - \mathbf{x_2}\|)}
{\theta_1^2} \right)`,
where :math:`\theta_0` is the amplitude parameter,
:math:`\theta_1` is the length scale parameter and
:math:`\theta_2` is the frequency parameter.
"""
def __init__(
self,
amplitude: Tensor,
length_scale: Tensor,
frequency: Tensor,
F=None,
) -> None:
"""
Parameters
----------
amplitude : Tensor
Periodic kernel amplitude hyper-parameter of shape
(batch_size, 1, 1).
length_scale : Tensor
Periodic kernel length scale hyper-parameter of of shape
(batch_size, 1, 1).
frequency : Tensor
Periodic kernel hyper-parameter of shape (batch_size, 1, 1).
F : ModuleType
A module that can either refer to the Symbol API or the NDArray
API in MXNet.
"""
self.F = F if F else getF(amplitude)
self.amplitude = amplitude
self.length_scale = length_scale
self.frequency = frequency
def kernel_matrix(self, x1: Tensor, x2: Tensor) -> Tensor:
"""
Parameters
--------------------
x1 : Tensor
Feature data of shape (batch_size, history_length, num_features).
x2 : Tensor
Feature data of shape (batch_size, history_length, num_features).
Returns
--------------------
Tensor
Periodic kernel matrix of shape (batch_size, history_length,
history_length).
"""
self._compute_square_dist(self.F, x1, x2)
return self.F.broadcast_mul(
self.amplitude,
self.F.exp(
self.F.broadcast_div(
-2
* self.F.sin(
self.F.broadcast_mul(
self.frequency,
math.pi
* self.F.sqrt(self.F.abs(self.square_dist)),
)
)
** 2,
self.length_scale**2,
)
),
)
class PeriodicKernelOutput(KernelOutputDict):
args_dim: Dict[str, int] = {
"amplitude": 1,
"length_scale": 1,
"frequency": 1,
}
kernel_cls: type = PeriodicKernel
def METHOD_NAME(
self, F, past_target: Tensor, past_time_feat: Tensor
) -> Tuple[Tensor, Tensor, Tensor, Tensor]:
"""
This function returns the scales for the GP Periodic Kernel hyper-
parameters by using the standard deviations of the past_target and
past_time_features.
Parameters
----------
F : ModuleType
A module that can either refer to the Symbol API or the NDArray
API in MXNet.
past_target : Tensor
Training time series values of shape (batch_size, context_length).
past_time_feat : Tensor
Training features of shape (batch_size, context_length,
num_features).
Returns
-------
Tuple
Three scaled GP hyper-parameters for the Periodic Kernel and scaled
model noise hyper-parameter. Each is a Tensor of shape
(batch_size, 1, 1).
"""
axis = 1
sigma_scaling = (
self.compute_std(F, past_target, axis=axis) / math.sqrt(2)
).expand_dims(axis=axis)
amplitude_scaling = sigma_scaling**2
length_scale_scaling = F.broadcast_mul(
F.mean(self.compute_std(F, past_time_feat, axis=axis)),
F.ones_like(amplitude_scaling),
)
# TODO: Define scaling for the frequency
frequency_scaling = F.ones_like(amplitude_scaling)
return (
amplitude_scaling,
length_scale_scaling,
frequency_scaling,
sigma_scaling,
)
@classmethod
def domain_map(cls, F, amplitude, length_scale, frequency):
r"""
This function applies the softmax to the Periodic Kernel
hyper-parameters.
Parameters
----------
F
A module that can either refer to the Symbol API or the NDArray
API in MXNet.
amplitude
Periodic kernel amplitude hyper-parameter of shape
(batch_size, 1, 1).
length_scale
Periodic kernel length scale hyper-parameter of of shape
(batch_size, 1, 1).
frequency
Periodic kernel hyper-parameter of shape (batch_size, 1, 1).
Returns
-------
Tuple[Tensor, Tensor, Tensor]
Three GP Periodic kernel hyper-parameters.
Each is a Tensor of shape: (batch_size, 1, 1).
"""
amplitude = softplus(F, amplitude)
length_scale = softplus(F, length_scale)
frequency = softplus(F, frequency)
return amplitude, length_scale, frequency
|
3,867 |
on 200
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"vm list-sizes",
)
class ListSizes(AAZCommand):
"""List available sizes for VMs.
:example: List the available VM sizes in the West US region.
az vm list-sizes -l westus
"""
_aaz_info = {
"version": "2020-06-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/providers/microsoft.compute/locations/{}/vmsizes", "2020-06-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.location = AAZResourceLocationArg(
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.VirtualMachineSizesList(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True)
return result
class VirtualMachineSizesList(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.METHOD_NAME(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/vmSizes",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"location", self.ctx.args.location,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2020-06-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def METHOD_NAME(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.value = AAZListType()
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element
_element.max_data_disk_count = AAZIntType(
serialized_name="maxDataDiskCount",
)
_element.memory_in_mb = AAZIntType(
serialized_name="memoryInMB",
)
_element.name = AAZStrType()
_element.number_of_cores = AAZIntType(
serialized_name="numberOfCores",
)
_element.os_disk_size_in_mb = AAZIntType(
serialized_name="osDiskSizeInMB",
)
_element.resource_disk_size_in_mb = AAZIntType(
serialized_name="resourceDiskSizeInMB",
)
return cls._schema_on_200
class _ListSizesHelper:
"""Helper class for ListSizes"""
__all__ = ["ListSizes"]
|
3,868 |
null prefix op
|
#!/usr/bin/env python
from __future__ import print_function
"""
arith_parse.py: Parse shell-like and C-like arithmetic.
"""
import sys
from asdl import tdop
from asdl.tdop import CompositeNode
from asdl import arith_ast
op_id = arith_ast.op_id_e # TODO: Rename this back.
#
# Null Denotation -- token that takes nothing on the left
#
def NullConstant(p, token, bp):
if token.type == 'number':
return arith_ast.Const(token.val)
# We have to wrap a string in some kind of variant.
if token.type == 'name':
return arith_ast.ArithVar(token.val)
raise AssertionError(token.type)
def NullParen(p, token, bp):
""" Arithmetic grouping """
r = p.ParseUntil(bp)
p.Eat(')')
return r
def METHOD_NAME(p, token, bp):
"""Prefix operator.
Low precedence: return, raise, etc.
return x+y is return (x+y), not (return x) + y
High precedence: logical negation, bitwise complement, etc.
!x && y is (!x) && y, not !(x && y)
"""
r = p.ParseUntil(bp)
return CompositeNode(token, [r])
def NullIncDec(p, token, bp):
""" ++x or ++x[1] """
right = p.ParseUntil(bp)
if right.token.type not in ('name', 'get'):
raise tdop.ParseError("Can't assign to %r (%s)" % (right, right.token))
return CompositeNode(token, [right])
#
# Left Denotation -- token that takes an expression on the left
#
def LeftIncDec(p, token, left, rbp):
""" For i++ and i--
"""
if left.token.type not in ('name', 'get'):
raise tdop.ParseError("Can't assign to %r (%s)" % (left, left.token))
token.type = 'post' + token.type
return CompositeNode(token, [left])
def LeftIndex(p, token, left, unused_bp):
""" index f[x+1] """
# f[x] or f[x][y]
if not isinstance(left, arith_ast.ArithVar):
raise tdop.ParseError("%s can't be indexed" % left)
index = p.ParseUntil(0)
if p.AtToken(':'):
p.Next()
end = p.ParseUntil(0)
else:
end = None
p.Eat(']')
# TODO: If you see ], then
# 1:4
# 1:4:2
# Both end and step are optional
if end:
return arith_ast.Slice(left, index, end, None)
else:
return arith_ast.Index(left, index)
def LeftTernary(p, token, left, bp):
""" e.g. a > 1 ? x : y """
true_expr = p.ParseUntil(bp)
p.Eat(':')
false_expr = p.ParseUntil(bp)
children = [left, true_expr, false_expr]
return CompositeNode(token, children)
def LeftBinaryOp(p, token, left, rbp):
""" Normal binary operator like 1+2 or 2*3, etc. """
if token.val == '+':
op_id_ = op_id.Plus
elif token.val == '-':
op_id_ = op_id.Minus
elif token.val == '*':
op_id_ = op_id.Star
else:
raise AssertionError(token.val)
return arith_ast.ArithBinary(op_id_, left, p.ParseUntil(rbp))
def LeftAssign(p, token, left, rbp):
""" Normal binary operator like 1+2 or 2*3, etc. """
# x += 1, or a[i] += 1
if left.token.type not in ('name', 'get'):
raise tdop.ParseError("Can't assign to %r (%s)" % (left, left.token))
return CompositeNode(token, [left, p.ParseUntil(rbp)])
def LeftComma(p, token, left, rbp):
""" foo, bar, baz
Could be sequencing operator, or tuple without parens
"""
r = p.ParseUntil(rbp)
if left.token.type == ',': # Keep adding more children
left.children.append(r)
return left
children = [left, r]
return CompositeNode(token, children)
# For overloading of , inside function calls
COMMA_PREC = 1
def LeftFuncCall(p, token, left, unused_bp):
""" Function call f(a, b). """
args = []
# f(x) or f[i](x)
if not isinstance(left, arith_ast.ArithVar):
raise tdop.ParseError("%s can't be called" % left)
func_name = left.name # get a string
while not p.AtToken(')'):
# We don't want to grab the comma, e.g. it is NOT a sequence operator. So
# set the precedence to 5.
args.append(p.ParseUntil(COMMA_PREC))
if p.AtToken(','):
p.Next()
p.Eat(")")
return arith_ast.FuncCall(func_name, args)
def MakeShellParserSpec():
"""
Create a parser.
Compare the code below with this table of C operator precedence:
http://en.cppreference.com/w/c/language/operator_precedence
"""
spec = tdop.ParserSpec()
spec.Left(31, LeftIncDec, ['++', '--'])
spec.Left(31, LeftFuncCall, ['('])
spec.Left(31, LeftIndex, ['['])
# 29 -- binds to everything except function call, indexing, postfix ops
spec.Null(29, NullIncDec, ['++', '--'])
spec.Null(29, METHOD_NAME, ['+', '!', '~', '-'])
# Right associative: 2 ** 3 ** 2 == 2 ** (3 ** 2)
spec.LeftRightAssoc(27, LeftBinaryOp, ['**'])
spec.Left(25, LeftBinaryOp, ['*', '/', '%'])
spec.Left(23, LeftBinaryOp, ['+', '-'])
spec.Left(21, LeftBinaryOp, ['<<', '>>'])
spec.Left(19, LeftBinaryOp, ['<', '>', '<=', '>='])
spec.Left(17, LeftBinaryOp, ['!=', '=='])
spec.Left(15, LeftBinaryOp, ['&'])
spec.Left(13, LeftBinaryOp, ['^'])
spec.Left(11, LeftBinaryOp, ['|'])
spec.Left(9, LeftBinaryOp, ['&&'])
spec.Left(7, LeftBinaryOp, ['||'])
spec.LeftRightAssoc(5, LeftTernary, ['?'])
# Right associative: a = b = 2 is a = (b = 2)
spec.LeftRightAssoc(3, LeftAssign, [
'=',
'+=', '-=', '*=', '/=', '%=',
'<<=', '>>=', '&=', '^=', '|='])
spec.Left(COMMA_PREC, LeftComma, [','])
# 0 precedence -- doesn't bind until )
spec.Null(0, NullParen, ['(']) # for grouping
# -1 precedence -- never used
spec.Null(-1, NullConstant, ['name', 'number'])
spec.Null(-1, tdop.NullError, [')', ']', ':', 'eof'])
return spec
def MakeParser(s):
"""Used by tests."""
spec = MakeShellParserSpec()
lexer = tdop.Tokenize(s)
p = tdop.Parser(spec, lexer)
return p
def ParseShell(s, expected=None):
"""Used by tests."""
p = MakeParser(s)
tree = p.Parse()
sexpr = repr(tree)
if expected is not None:
assert sexpr == expected, '%r != %r' % (sexpr, expected)
#print('%-40s %s' % (s, sexpr))
return tree
def main(argv):
try:
s = argv[1]
except IndexError:
print('Usage: ./arith_parse.py EXPRESSION')
else:
try:
tree = ParseShell(s)
except tdop.ParseError as e:
print('Error parsing %r: %s' % (s, e), file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
3,869 |
test vswitch create invalid controller
|
# Copyright Contributors to the Feilong Project.
# SPDX-License-Identifier: Apache-2.0
# Copyright 2017,2018 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import jwt
import mock
import unittest
from zvmsdk import exception
from zvmsdk import config
from zvmsdk.sdkwsgi.handlers import vswitch
from zvmsdk.sdkwsgi import util
CONF = config.CONF
FAKE_UUID = '00000000-0000-0000-0000-000000000000'
def set_conf(section, opt, value):
CONF[section][opt] = value
class FakeResp(object):
def __init__(self):
self.body = {}
class FakeReq(object):
def __init__(self):
self.headers = {}
self.environ = {}
self.__name__ = ''
self.response = FakeResp()
def __getitem__(self, name):
return self.headers
class HandlersGuestTest(unittest.TestCase):
def setUp(self):
set_conf('wsgi', 'auth', 'none')
expired_elapse = datetime.timedelta(seconds=100)
expired_time = datetime.datetime.utcnow() + expired_elapse
payload = jwt.encode({'exp': expired_time}, 'username')
self.req = FakeReq()
self.req.headers['X-Auth-Token'] = payload
@mock.patch.object(vswitch.VswitchAction, 'list')
def test_vswitch_list(self, mock_list):
mock_list.return_value = ''
vswitch.vswitch_list(self.req)
self.assertTrue(mock_list.called)
@mock.patch.object(vswitch.VswitchAction, 'create')
def test_vswitch_create(self, mock_create):
mock_create.return_value = {}
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234 abcd 123F",
"port_type": 1,
"controller": "*"}}"""
self.req.body = body_str
vswitch.vswitch_create(self.req)
body = util.extract_json(body_str)
mock_create.assert_called_once_with(body=body)
@mock.patch.object(vswitch.VswitchAction, 'create')
def test_vswitch_create_with_userid_controller(self, mock_create):
mock_create.return_value = {}
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234 abcd 123F",
"port_type": 1,
"controller": "userid01"}}"""
self.req.body = body_str
vswitch.vswitch_create(self.req)
body = util.extract_json(body_str)
mock_create.assert_called_once_with(body=body)
def test_vswitch_create_invalidname(self):
body_str = '{"vswitch": {"name": "", "rdev": "1234"}}'
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_rdevlist(self):
body_str = '{"vswitch": {"name": "name1", "rdev": "12345 sss"}}'
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
@mock.patch.object(util, 'wsgi_path_item')
@mock.patch.object(vswitch.VswitchAction, 'delete')
def test_vswitch_delete(self, mock_delete, mock_name):
mock_delete.return_value = {}
mock_name.return_value = 'vsw1'
vswitch.vswitch_delete(self.req)
mock_delete.assert_called_once_with('vsw1')
@mock.patch.object(util, 'wsgi_path_item')
@mock.patch.object(vswitch.VswitchAction, 'query')
def test_vswitch_query(self, mock_query, mock_name):
mock_query.return_value = {}
mock_name.return_value = 'vsw1'
vswitch.vswitch_query(self.req)
mock_query.assert_called_once_with('vsw1')
def test_vswitch_create_invalid_connection(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"connection": 3}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_queue_mem(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"queue_mem": 10}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_network_type(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"network_type": 3}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_update(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"update": 4}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_vid(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"vid": -1}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_native_vid(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"native_vid": 4096}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_router(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"router": 3}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def test_vswitch_create_invalid_grvp(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"gvrp": 3}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
def METHOD_NAME(self):
body_str = """{"vswitch": {"name": "name1",
"rdev": "1234",
"controller": "node12345"}}"""
self.req.body = body_str
self.assertRaises(exception.ValidationError, vswitch.vswitch_create,
self.req)
@mock.patch.object(util, 'wsgi_path_item')
@mock.patch.object(vswitch.VswitchAction, 'update')
def test_vswitch_update(self, mock_update, mock_name):
mock_name.return_value = 'vsw1'
body_str = '{"vswitch": {"grant_userid": "user1"}}'
mock_update.return_value = {}
self.req.body = body_str
vswitch.vswitch_update(self.req)
body = util.extract_json(body_str)
mock_update.assert_called_once_with('vsw1', body=body)
|
3,870 |
set logo static mono
|
# SPDX-License-Identifier: GPL-2.0-or-later
from openrazer_daemon.dbus_services import endpoint
def set_led_effect_common(self, zone: str, effect: str) -> None:
driver_path = self.get_driver_path(zone + '_led_effect')
with open(driver_path, 'w') as driver_file:
driver_file.write(effect)
@endpoint('razer.device.lighting.backlight', 'getBacklightActive', out_sig='b')
def get_backlight_active(self):
"""
Get if the backlight is lit up
"""
self.logger.debug("DBus call get_backlight_active")
return self.zone["backlight"]["active"]
@endpoint('razer.device.lighting.backlight', 'setBacklightActive', in_sig='b')
def set_backlight_active(self, active):
"""
Get if the backlight is lit up
"""
self.logger.debug("DBus call set_backlight_active")
# remember status
self.set_persistence("backlight", "active", bool(active))
driver_path = self.get_driver_path('backlight_led_state')
with open(driver_path, 'w') as driver_file:
driver_file.write('1' if active else '0')
@endpoint('razer.device.lighting.backlight', 'getBacklightBrightness', out_sig='d')
def get_backlight_brightness(self):
"""
Get the device's brightness
"""
self.logger.debug("DBus call get_backlight_brightness")
return self.zone["backlight"]["brightness"]
@endpoint('razer.device.lighting.backlight', 'setBacklightBrightness', in_sig='d')
def set_backlight_brightness(self, brightness):
"""
Set the device's brightness
"""
self.logger.debug("DBus call set_backlight_brightness")
driver_path = self.get_driver_path('backlight_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 100:
brightness = 100
elif brightness < 0:
brightness = 0
self.set_persistence("backlight", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.logo', 'getLogoActive', out_sig='b')
def get_logo_active(self):
"""
Get if the logo is lit up
"""
self.logger.debug("DBus call get_logo_active")
return self.zone["logo"]["active"]
@endpoint('razer.device.lighting.logo', 'setLogoActive', in_sig='b')
def set_logo_active(self, active):
"""
Set if the logo is lit up
"""
self.logger.debug("DBus call set_logo_active")
# remember status
self.set_persistence("logo", "active", bool(active))
driver_path = self.get_driver_path('logo_led_state')
with open(driver_path, 'w') as driver_file:
driver_file.write('1' if active else '0')
@endpoint('razer.device.lighting.logo', 'getLogoBrightness', out_sig='d')
def get_logo_brightness(self):
"""
Get the device's brightness
"""
self.logger.debug("DBus call get_logo_brightness")
return self.zone["logo"]["brightness"]
@endpoint('razer.device.lighting.logo', 'setLogoBrightness', in_sig='d')
def set_logo_brightness(self, brightness):
"""
Set the device's brightness
"""
self.logger.debug("DBus call set_logo_brightness")
driver_path = self.get_driver_path('logo_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 100:
brightness = 100
elif brightness < 0:
brightness = 0
self.set_persistence("logo", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.logo', 'setLogoStaticMono')
def METHOD_NAME(self):
"""
Set the device to static colour
"""
self.logger.debug("DBus call set_logo_static_mono")
# Notify others
self.send_effect_event('setStatic')
set_led_effect_common(self, 'logo', '0')
@endpoint('razer.device.lighting.logo', 'setLogoPulsateMono')
def set_logo_pulsate_mono(self):
"""
Set the device to pulsate
"""
self.logger.debug("DBus call set_logo_pulsate_mono")
# Notify others
self.send_effect_event('setPulsate')
set_led_effect_common(self, 'logo', '2')
@endpoint('razer.device.lighting.scroll', 'getScrollActive', out_sig='b')
def get_scroll_active(self):
"""
Get if the scroll is light up
"""
self.logger.debug("DBus call get_scroll_active")
return self.zone["scroll"]["active"]
@endpoint('razer.device.lighting.scroll', 'setScrollActive', in_sig='b')
def set_scroll_active(self, active):
"""
Get if the scroll is light up
"""
self.logger.debug("DBus call set_scroll_active")
# remember status
self.set_persistence("scroll", "active", bool(active))
driver_path = self.get_driver_path('scroll_led_state')
with open(driver_path, 'w') as driver_file:
driver_file.write('1' if active else '0')
@endpoint('razer.device.lighting.scroll', 'getScrollBrightness', out_sig='d')
def get_scroll_brightness(self):
"""
Get the device's brightness
"""
self.logger.debug("DBus call get_scroll_brightness")
return self.zone["scroll"]["brightness"]
@endpoint('razer.device.lighting.scroll', 'setScrollBrightness', in_sig='d')
def set_scroll_brightness(self, brightness):
"""
Set the device's brightness
"""
self.logger.debug("DBus call set_scroll_brightness")
driver_path = self.get_driver_path('scroll_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 100:
brightness = 100
elif brightness < 0:
brightness = 0
self.set_persistence("scroll", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.scroll', 'setScrollStaticMono')
def set_scroll_static_mono(self):
"""
Set the device to static colour
"""
self.logger.debug("DBus call set_scroll_static_mono")
# Notify others
self.send_effect_event('setStatic')
set_led_effect_common(self, 'scroll', '0')
@endpoint('razer.device.lighting.scroll', 'setScrollPulsateMono')
def set_scroll_pulsate_mono(self):
"""
Set the device to pulsate
"""
self.logger.debug("DBus call set_scroll_pulsate_mono")
# Notify others
self.send_effect_event('setPulsate')
set_led_effect_common(self, 'scroll', '2')
|
3,871 |
call event from name
|
# (c) Copyright IBM Corp. 2010, 2017. All Rights Reserved.
"""py.test config"""
from __future__ import print_function
import pytest
import sys
import threading
import collections
from time import sleep
from collections import deque
from circuits.core.manager import TIMEOUT
from circuits import handler, BaseComponent, Debugger, Manager
class Watcher(BaseComponent):
def init(self):
self._lock = threading.Lock()
self._events = deque()
@handler(channel="*", priority=999.9)
def _on_event(self, event, *args, **kwargs):
# print("WATCHER GOT ", event)
with self._lock:
self._events.append(event)
def clear(self):
self._events.clear()
def wait(self, name, parent=None, channel=None, timeout=6.0):
for i in range(int(timeout / TIMEOUT)):
with self._lock:
for event in self._events:
if event.name == name and event.waitingHandlers == 0:
if (channel is None) or (channel in event.channels):
if parent:
# match a parent of this event
p = event
while p:
if p == parent:
return event
p = p.parent
else:
e = event
self._events.remove(event)
return e
sleep(TIMEOUT)
else:
return False
class Flag(object):
status = False
def METHOD_NAME(manager, event, event_name, *channels):
fired = False
value = None
for r in manager.waitEvent(event_name):
if not fired:
fired = True
value = manager.fire(event, *channels)
sleep(0.1)
return value
def call_event(manager, event, *channels):
return METHOD_NAME(manager, event, event.name, *channels)
class WaitEvent(object):
def __init__(self, manager, name, channel=None, timeout=6.0):
if channel is None:
channel = getattr(manager, "channel", None)
self.timeout = timeout
self.manager = manager
flag = Flag()
@handler(name, channel=channel)
def on_event(self, *args, **kwargs):
flag.status = True
self.handler = self.manager.addHandler(on_event)
self.flag = flag
def wait(self):
try:
for i in range(int(self.timeout / TIMEOUT)):
if self.flag.status:
return True
sleep(TIMEOUT)
finally:
self.manager.removeHandler(self.handler)
def wait_for(obj, attr, value=True, timeout=3.0):
from circuits.core.manager import TIMEOUT
for i in range(int(timeout / TIMEOUT)):
if isinstance(value, collections.Callable):
if value(obj, attr):
return True
elif getattr(obj, attr) == value:
return True
sleep(TIMEOUT)
@pytest.fixture(scope="class")
def manager(request):
manager = Manager()
def finalizer():
manager.stop()
request.addfinalizer(finalizer)
waiter = WaitEvent(manager, "started")
manager.start()
assert waiter.wait()
if request.config.option.verbose:
verbose = True
else:
verbose = False
Debugger(events=verbose).register(manager)
return manager
@pytest.fixture(scope="class")
def watcher(request, manager):
watcher = Watcher().register(manager)
def finalizer():
waiter = WaitEvent(manager, "unregistered")
watcher.unregister()
waiter.wait()
request.addfinalizer(finalizer)
return watcher
def pytest_configure():
pytest.WaitEvent = WaitEvent
pytest.wait_for = wait_for
pytest.call_event = call_event
pytest.PLATFORM = sys.platform
pytest.PYVER = sys.version_info[:3]
pytest.METHOD_NAME = METHOD_NAME
|
3,872 |
test zero
|
#!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
import numpy as np
testdir = join(filedir(), 'nsgconstantq')
class TestNSGConstantQ(TestCase):
def initNsgconstantq(self, inputSize=2048, fmin=27, fmax=10000, binsPerOctave=24, normalize='none'):
return NSGConstantQ(inputSize=inputSize,
minFrequency=fmin,
maxFrequency=fmax,
binsPerOctave=binsPerOctave,
sampleRate=44100,
rasterize='full',
phaseMode='global',
gamma=0,
normalize=normalize,
window='hannnsgcq',
)
def testRegression(self):
input = essentia.array(np.sin(2 * np.pi * 1000 * np.arange(2048) / 44100))
# Compared against the implementation of the MATLAB CQT_toolbox_2013
expected = np.array([ 0.01764389 +8.19244758e-06j, -0.00327444 +1.78957267e-03j,
-0.00379942 +1.00535053e-02j, 0.00479218 +8.65996905e-03j,
0.00636455 -1.14715385e-03j, -0.00165716 -6.73704576e-03j,
-0.00948407 +1.39929814e-03j, -0.00132517 +9.10799044e-03j,
0.00804364 +2.70849478e-03j, 0.00373902 -8.13302867e-03j,
-0.00733613 -6.00933843e-03j, -0.00738841 +5.56821084e-03j,
0.00371405 +8.43253605e-03j, 0.00904939 -1.72925594e-03j,
0.00034281 -9.21268760e-03j, -0.00891524 -2.47832619e-03j,
-0.00459810 +8.25670810e-03j, 0.00651840 +6.09559784e-03j,
0.00661061 -5.63534139e-03j, -0.00441447 -8.19178966e-03j,
-0.00905809 +1.89702405e-03j, 0.00139695 +6.62663074e-03j,
0.00708779 -1.61311132e-03j, 0.00229181 -9.95998412e-03j,
-0.00574295 -7.79506339e-03j, -0.00166257 +5.33548630e-04j])
output = np.mean(self.initNsgconstantq(normalize='sine')(input)[0],axis=0)
self.assertAlmostEqualVector(np.abs(expected), np.abs(output), 1e-6)
def testDC(self):
# Checks the DC component of the transform
input= essentia.array(np.ones(2**11))
# Second output of NSGConstantQ contains the DC information nedeed for the inverse transform.
DCfilter = self.initNsgconstantq()(input)[1]
# Integrates the energy. DC filter should contain all the energy of the signal in this case.
DCenergy = np.sum(DCfilter)
inputEnergy = np.sum(input)
self.assertEqual(inputEnergy , DCenergy)
def testNyquist(self):
inputSize = 2**11
signalNyquist = [-1, 1] * int(inputSize / 2)
CQ, DC, Nyquist = self.initNsgconstantq(inputSize=inputSize)(signalNyquist)
# Checks that all the energy is contained in the Nyquist band
self.assertEqual(np.sum(np.abs(CQ)), 0)
self.assertEqual(np.sum(np.abs(DC)), 0)
self.assertGreater(np.sum(np.abs(Nyquist)), 0)
def METHOD_NAME(self):
inputSize = 2**11
signalZero = [0] * inputSize
output = np.abs(np.mean(self.initNsgconstantq()(signalZero)[0]))
self.assertEqual(0, output)
def testEmpty(self):
# Checks whether an empty input vector yields an exception
self.assertComputeFails(self.initNsgconstantq(), [])
def testOne(self,normalize='none'):
# Checks for a single value
self.assertComputeFails(self.initNsgconstantq(), [1])
def testInvalidParam(self):
self.assertConfigureFails(self.initNsgconstantq(), {'inputSize': -1})
self.assertConfigureFails(self.initNsgconstantq(), {'inputSize': 0})
self.assertConfigureFails(self.initNsgconstantq(), {'minFrequency': 30000})
self.assertConfigureFails(self.initNsgconstantq(), {'minFrequency': 1000,
'maxFrequency': 500})
self.assertConfigureFails(self.initNsgconstantq(), {'maxFrequency': 0})
self.assertConfigureFails(self.initNsgconstantq(), {'binsPerOctave': 0})
self.assertConfigureFails(self.initNsgconstantq(), {'sampleRate': 0})
self.assertConfigureFails(self.initNsgconstantq(), {'gamma': -1})
self.assertConfigureFails(self.initNsgconstantq(), {'minimumWindow': 1})
self.assertConfigureFails(self.initNsgconstantq(), {'windowSizeFactor': 0})
self.assertConfigureFails(self.initNsgconstantq(), {'minimumWindow': 1})
def testOddInput(self):
# Checks that compute does not fail for even input (former behavior).
a = np.ones(4099, dtype='float32')
NSGConstantQ()(a)
suite = allTests(TestNSGConstantQ)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
|
3,873 |
get keyring service
|
import requests
import typing_extensions
from bugwarrior import config
from bugwarrior.db import CACHE_REGION as cache
from bugwarrior.services import IssueService, Issue, ServiceClient
import logging
log = logging.getLogger(__name__)
class TaigaConfig(config.ServiceConfig):
service: typing_extensions.Literal['taiga']
base_uri: config.StrippedTrailingSlashUrl
auth_token: str
include_tasks: bool = False
label_template: str = '{{label}}'
class TaigaIssue(Issue):
SUMMARY = 'taigasummary'
URL = 'taigaurl'
FOREIGN_ID = 'taigaid'
UDAS = {
SUMMARY: {
'type': 'string',
'label': 'Taiga Summary'
},
URL: {
'type': 'string',
'label': 'Taiga URL',
},
FOREIGN_ID: {
'type': 'numeric',
'label': 'Taiga Issue ID'
},
}
UNIQUE_KEY = (URL, )
def to_taskwarrior(self):
return {
'project': self.extra['project'],
'annotations': self.extra['annotations'],
self.URL: self.extra['url'],
'priority': self.origin['default_priority'],
'tags': self.get_tags(),
self.FOREIGN_ID: self.record['ref'],
self.SUMMARY: self.record['subject'],
}
def get_tags(self):
return [x if isinstance(x, str) else x[0] for x in self.record['tags']]
def get_default_description(self):
return self.build_default_description(
title=self.record['subject'],
url=self.get_processed_url(self.extra['url']),
number=self.record['ref'],
cls='issue',
)
class TaigaService(IssueService, ServiceClient):
ISSUE_CLASS = TaigaIssue
CONFIG_SCHEMA = TaigaConfig
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.auth_token = self.get_password('auth_token')
self.session = requests.session()
self.session.headers.update({
'Accept': 'application/json',
'Authorization': 'Bearer %s' % self.auth_token,
})
@staticmethod
def METHOD_NAME(config):
return f"taiga://{config.base_uri}"
def get_service_metadata(self):
return {
'url': self.config.base_uri,
'label_template': self.config.label_template,
}
def get_owner(self, issue):
# TODO
raise NotImplementedError(
"This service has not implemented support for 'only_if_assigned'.")
def _issues(self, userid, task_type, task_type_plural, task_type_short):
log.debug('Getting %s' % task_type_plural)
response = self.session.get(
self.config.base_uri + '/api/v1/' + task_type_plural,
params={'assigned_to': userid, 'status__is_closed': "false"})
tasks = response.json()
for task in tasks:
project = self.get_project(task['project'])
extra = {
'project': project['slug'],
'annotations': self.annotations(task, project, task_type, task_type_short),
'url': self.build_url(task, project, task_type_short),
}
yield self.get_issue_for_record(task, extra)
def issues(self):
url = self.config.base_uri + '/api/v1/users/me'
me = self.session.get(url)
data = me.json()
# Check for errors and bail if we failed.
if '_error_message' in data:
raise RuntimeError("{_error_type} {_error_message}".format(**data))
# Otherwise, proceed.
userid = data['id']
yield from self._issues(userid, 'userstory', 'userstories', 'us')
if self.config.include_tasks:
yield from self._issues(userid, 'task', 'tasks', 'task')
@cache.cache_on_arguments()
def get_project(self, project_id):
url = '%s/api/v1/projects/%i' % (self.config.base_uri, project_id)
return self.json_response(self.session.get(url))
def build_url(self, task, project, task_type):
return '%s/project/%s/%s/%i' % (
self.config.base_uri, project['slug'], task_type, task['ref'])
def annotations(self, task, project, task_type, task_type_short):
url = f"{self.config.base_uri}/api/v1/history/{task_type}/{task['id']}"
response = self.session.get(url)
history = response.json()
return self.build_annotations(
((
item['user']['username'],
item['comment'],
) for item in history if item['comment']),
self.build_url(task, project, task_type_short)
)
|
3,874 |
test get insights report bad id
|
"""Test the insights report endpoint."""
import json
import tarfile
from io import BytesIO
import pytest
from django.test import override_settings
from rest_framework import status
from api.common.common_report import create_filename
from api.models import DeploymentsReport
from tests.factories import DeploymentReportFactory, SystemFingerprintFactory
@pytest.mark.dbcompat
@pytest.mark.django_db
class TestInsightsReport:
"""Tests against the Insights reports function."""
def test_get_insights_report_200_exists(self, django_client):
"""Retrieve insights report."""
deployment_report = DeploymentReportFactory(
number_of_fingerprints=3,
status=DeploymentsReport.STATUS_COMPLETE,
)
url = f"/api/v1/reports/{deployment_report.id}/insights/"
# mock slice size so we can expect 2 slices on this test
with override_settings(QPC_INSIGHTS_REPORT_SLICE_SIZE=2):
response = django_client.get(url)
assert response.status_code == 200, response.json()
response_json = response.json()
self.validate_data(response_json, deployment_report)
def test_get_insights_report_tarball_200_exists(self, django_client):
"""Retrieve insights report."""
deployments_report = DeploymentReportFactory(
number_of_fingerprints=11,
status=DeploymentsReport.STATUS_COMPLETE,
)
url = f"/api/v1/reports/{deployments_report.id}/insights/?format=tar.gz"
# mock slice size so we can expect 2 slices on this test
with override_settings(QPC_INSIGHTS_REPORT_SLICE_SIZE=10):
response = django_client.get(url)
assert response.status_code == 200
# reformat tarball to match json report
with tarfile.open(fileobj=BytesIO(response.content)) as tar:
restored_files = (
json.loads(tar.extractfile(file).read()) for file in tar.getmembers()
)
# tar.getnames / getmembers follow the same order, allowing this one-liner
data = dict(zip(tar.getnames(), restored_files))
self.validate_data(data, deployments_report)
def validate_data(self, data: dict, deployment_report: DeploymentsReport):
"""Validate insights report data."""
assert create_filename("metadata", "json", deployment_report.id) in data.keys()
report_slices = {}
metadata_filename = f"report_id_{deployment_report.id}/metadata.json"
for key in data:
assert f"report_id_{deployment_report.id}/" in key
if key != metadata_filename:
report_slices[key] = data[key]
# metadata slice number_hosts matches the actual
# number of hosts in a slice
report_slices_in_metadata = data[metadata_filename]["report_slices"]
assert len(report_slices_in_metadata) == 2
total_returned_hosts_num = 0
for key_1, key_2 in zip(report_slices_in_metadata, report_slices):
assert report_slices_in_metadata[key_1]["number_hosts"] == len(
report_slices[key_2]["hosts"]
)
# used later to check for the total size
total_returned_hosts_num += len(report_slices[key_2]["hosts"])
# no hosts lost
returned_host_names = {
host["bios_uuid"]
for slice_key in report_slices
for host in report_slices[slice_key]["hosts"]
}
expected_host_names = {
host.bios_uuid for host in deployment_report.system_fingerprints.all()
}
assert returned_host_names == expected_host_names
# sum of all hosts in a slice is equal to
# the total number of host (before call)
assert total_returned_hosts_num == len(expected_host_names)
def test_get_insights_report_200_generate_exists(self, django_client):
"""Retrieve insights report."""
deployment_report = DeploymentReportFactory(
status=DeploymentsReport.STATUS_COMPLETE,
)
url = f"/api/v1/reports/{deployment_report.id}/insights/"
response = django_client.get(url)
assert response.status_code == 200
response_json = response.json()
assert (
create_filename("metadata", "json", deployment_report.id)
in response_json.keys()
)
for key in response_json:
assert f"report_id_{deployment_report.id}/" in key
def test_get_insights_report_404_no_canonical(self, django_client):
"""Retrieve insights report."""
deployment_report = DeploymentReportFactory.create(
number_of_fingerprints=0,
status=DeploymentsReport.STATUS_COMPLETE,
)
# fingerprint without canonical facts
SystemFingerprintFactory.create(
deployment_report_id=deployment_report.id,
name=None,
bios_uuid=None,
insights_client_id=None,
ip_addresses=None,
mac_addresses=None,
subscription_manager_id=None,
cloud_provider=None,
)
assert deployment_report.system_fingerprints.count() == 1
url = f"/api/v1/reports/{deployment_report.id}/insights/"
response = django_client.get(url)
assert response.status_code == status.HTTP_404_NOT_FOUND
def METHOD_NAME(self, django_client):
"""Fail to get a report for bad id."""
url = "/api/v1/reports/string/insights/"
# Query API
response = django_client.get(url)
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_get_insights_nonexistent(self, django_client):
"""Fail to get a report for report id that doesn't exist."""
url = "/api/v1/reports/999/insights/"
# Query API
response = django_client.get(url)
assert response.status_code == status.HTTP_404_NOT_FOUND
|
3,875 |
id
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetServiceTaskResult',
'AwaitableGetServiceTaskResult',
'get_service_task',
'get_service_task_output',
]
@pulumi.output_type
class GetServiceTaskResult:
"""
A task resource
"""
def __init__(__self__, etag=None, METHOD_NAME=None, name=None, properties=None, system_data=None, type=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", METHOD_NAME)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
HTTP strong entity tag value. This is ignored if submitted.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> Any:
"""
Custom task properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetServiceTaskResult(GetServiceTaskResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetServiceTaskResult(
etag=self.etag,
METHOD_NAME=self.METHOD_NAME,
name=self.name,
properties=self.properties,
system_data=self.system_data,
type=self.type)
def get_service_task(expand: Optional[str] = None,
group_name: Optional[str] = None,
service_name: Optional[str] = None,
task_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetServiceTaskResult:
"""
The service tasks resource is a nested, proxy-only resource representing work performed by a DMS (classic) instance. The GET method retrieves information about a service task.
:param str expand: Expand the response
:param str group_name: Name of the resource group
:param str service_name: Name of the service
:param str task_name: Name of the Task
"""
__args__ = dict()
__args__['expand'] = expand
__args__['groupName'] = group_name
__args__['serviceName'] = service_name
__args__['taskName'] = task_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:datamigration/v20220330preview:getServiceTask', __args__, opts=opts, typ=GetServiceTaskResult).value
return AwaitableGetServiceTaskResult(
etag=pulumi.get(__ret__, 'etag'),
METHOD_NAME=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_service_task)
def get_service_task_output(expand: Optional[pulumi.Input[Optional[str]]] = None,
group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
task_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetServiceTaskResult]:
"""
The service tasks resource is a nested, proxy-only resource representing work performed by a DMS (classic) instance. The GET method retrieves information about a service task.
:param str expand: Expand the response
:param str group_name: Name of the resource group
:param str service_name: Name of the service
:param str task_name: Name of the Task
"""
...
|
3,876 |
get is new
|
from math import ceil
from django.urls import reverse
from rest_framework import serializers
from ...categories.serializers import CategorySerializer
from ...core.serializers import MutableFields
from ...notifications.threads import ThreadNotifications
from ..models import Thread
from .poll import PollSerializer
from .threadparticipant import ThreadParticipantSerializer
__all__ = ["ThreadSerializer", "PrivateThreadSerializer", "ThreadsListSerializer"]
BasicCategorySerializer = CategorySerializer.subset_fields(
"id",
"parent",
"name",
"short_name",
"color",
"description",
"is_closed",
"css_class",
"level",
"lft",
"rght",
"is_read",
"url",
)
class ThreadSerializer(serializers.ModelSerializer, MutableFields):
category = BasicCategorySerializer(many=False, read_only=True)
acl = serializers.SerializerMethodField()
has_unapproved_posts = serializers.SerializerMethodField()
is_new = serializers.SerializerMethodField()
is_read = serializers.SerializerMethodField()
path = BasicCategorySerializer(many=True, read_only=True)
poll = PollSerializer(many=False, read_only=True)
pages = serializers.SerializerMethodField()
best_answer = serializers.PrimaryKeyRelatedField(read_only=True)
best_answer_marked_by = serializers.PrimaryKeyRelatedField(read_only=True)
notifications = serializers.SerializerMethodField()
starter = serializers.SerializerMethodField()
last_poster = serializers.SerializerMethodField()
api = serializers.SerializerMethodField()
url = serializers.SerializerMethodField()
class Meta:
model = Thread
fields = [
"id",
"category",
"title",
"replies",
"has_unapproved_posts",
"started_on",
"starter_name",
"last_post_on",
"last_post_is_event",
"last_post",
"last_poster_name",
"is_unapproved",
"is_hidden",
"is_closed",
"weight",
"best_answer",
"best_answer_is_protected",
"best_answer_marked_on",
"best_answer_marked_by",
"best_answer_marked_by_name",
"best_answer_marked_by_slug",
"acl",
"is_new",
"is_read",
"path",
"poll",
"notifications",
"starter",
"last_poster",
"pages",
"api",
"url",
]
def get_acl(self, obj):
try:
return obj.acl
except AttributeError:
return {}
def get_has_unapproved_posts(self, obj):
try:
acl = obj.acl
except AttributeError:
return False
return acl.get("can_approve") and obj.has_unapproved_posts
def METHOD_NAME(self, obj):
try:
return obj.is_new
except AttributeError:
return None
def get_is_read(self, obj):
try:
return obj.is_read
except AttributeError:
return None
def get_participants(self, obj):
return ThreadParticipantSerializer(obj.participants_list, many=True).data
def get_notifications(self, obj):
if self.context:
watched_thread = self.context.get("watched_thread")
if watched_thread:
if watched_thread.send_emails:
return ThreadNotifications.SITE_AND_EMAIL
return ThreadNotifications.SITE_ONLY
watched_threads = self.context.get("watched_threads")
if watched_threads:
return watched_threads.get(obj.id)
return None
def get_starter(self, obj):
if obj.starter_id:
return {
"id": obj.starter_id,
"username": obj.starter.username,
"real_name": obj.starter.get_real_name(),
"avatars": obj.starter.avatars,
}
def get_last_poster(self, obj):
if obj.last_poster_id:
return {
"id": obj.last_poster_id,
"username": obj.last_poster.username,
"real_name": obj.last_poster.get_real_name(),
"avatars": obj.last_poster.avatars,
}
def get_pages(self, obj):
settings = self.context["settings"]
posts_per_page = settings.posts_per_page - 1
posts_per_page_orphans = settings.posts_per_page_orphans
if posts_per_page_orphans:
posts_per_page_orphans += 1
total_posts = obj.replies + 1
if total_posts <= posts_per_page + posts_per_page_orphans:
return 1
hits = total_posts - posts_per_page_orphans
return ceil(hits / posts_per_page)
def get_api(self, obj):
return {
"index": obj.get_api_url(),
"editor": obj.get_editor_api_url(),
"merge": obj.get_merge_api_url(),
"poll": obj.get_poll_api_url(),
"watch": obj.get_watch_api_url(),
"posts": {
"index": obj.get_posts_api_url(),
"merge": obj.get_post_merge_api_url(),
"move": obj.get_post_move_api_url(),
"split": obj.get_post_split_api_url(),
},
}
def get_url(self, obj):
return {
"index": obj.get_absolute_url(),
"new_post": obj.get_new_post_url(),
"last_post": obj.get_last_post_url(),
"best_answer": obj.get_best_answer_url(),
"unapproved_post": obj.get_unapproved_post_url(),
"starter": self.get_starter_url(obj),
"last_poster": self.get_last_poster_url(obj),
}
def get_starter_url(self, obj):
if obj.starter_id:
return reverse(
"misago:user", kwargs={"slug": obj.starter_slug, "pk": obj.starter_id}
)
def get_last_poster_url(self, obj):
if obj.last_poster_id:
return reverse(
"misago:user",
kwargs={"slug": obj.last_poster_slug, "pk": obj.last_poster_id},
)
class PrivateThreadSerializer(ThreadSerializer):
participants = serializers.SerializerMethodField()
class Meta:
model = Thread
fields = ThreadSerializer.Meta.fields + ["participants"]
class ThreadsListSerializer(ThreadSerializer):
category = serializers.PrimaryKeyRelatedField(read_only=True)
last_post = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Thread
fields = ThreadSerializer.Meta.fields + ["has_poll"]
ThreadsListSerializer = ThreadsListSerializer.exclude_fields("path", "poll")
|
3,877 |
mat power
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Matrix functions contains iterative methods for M^p."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
def matrix_square_root(mat_a, mat_a_size, iter_count=100, ridge_epsilon=1e-4):
"""Iterative method to get matrix square root.
Stable iterations for the matrix square root, Nicholas J. Higham
Page 231, Eq 2.6b
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.6.8799&rep=rep1&type=pdf
Args:
mat_a: the symmetric PSD matrix whose matrix square root be computed
mat_a_size: size of mat_a.
iter_count: Maximum number of iterations.
ridge_epsilon: Ridge epsilon added to make the matrix positive definite.
Returns:
mat_a^0.5
"""
def _iter_condition(i, unused_mat_y, unused_old_mat_y, unused_mat_z,
unused_old_mat_z, err, old_err):
# This method require that we check for divergence every step.
return math_ops.logical_and(i < iter_count, err < old_err)
def _iter_body(i, mat_y, unused_old_mat_y, mat_z, unused_old_mat_z, err,
unused_old_err):
current_iterate = 0.5 * (3.0 * identity - math_ops.matmul(mat_z, mat_y))
current_mat_y = math_ops.matmul(mat_y, current_iterate)
current_mat_z = math_ops.matmul(current_iterate, mat_z)
# Compute the error in approximation.
mat_sqrt_a = current_mat_y * math_ops.sqrt(norm)
mat_a_approx = math_ops.matmul(mat_sqrt_a, mat_sqrt_a)
residual = mat_a - mat_a_approx
current_err = math_ops.sqrt(math_ops.reduce_sum(residual * residual)) / norm
return i + 1, current_mat_y, mat_y, current_mat_z, mat_z, current_err, err
identity = linalg_ops.eye(math_ops.cast(mat_a_size, dtypes.int32))
mat_a = mat_a + ridge_epsilon * identity
norm = math_ops.sqrt(math_ops.reduce_sum(mat_a * mat_a))
mat_init_y = mat_a / norm
mat_init_z = identity
init_err = norm
_, _, prev_mat_y, _, _, _, _ = control_flow_ops.while_loop(
_iter_condition, _iter_body, [
0, mat_init_y, mat_init_y, mat_init_z, mat_init_z, init_err,
init_err + 1.0
])
return prev_mat_y * math_ops.sqrt(norm)
def matrix_inverse_pth_root(mat_g,
mat_g_size,
alpha,
iter_count=100,
epsilon=1e-6,
ridge_epsilon=1e-6):
"""Computes mat_g^alpha, where alpha = -1/p, p a positive integer.
We use an iterative Schur-Newton method from equation 3.2 on page 9 of:
A Schur-Newton Method for the Matrix p-th Root and its Inverse
by Chun-Hua Guo and Nicholas J. Higham
SIAM Journal on Matrix Analysis and Applications,
2006, Vol. 28, No. 3 : pp. 788-804
https://pdfs.semanticscholar.org/0abe/7f77433cf5908bfe2b79aa91af881da83858.pdf
Args:
mat_g: the symmetric PSD matrix whose power it to be computed
mat_g_size: size of mat_g.
alpha: exponent, must be -1/p for p a positive integer.
iter_count: Maximum number of iterations.
epsilon: accuracy indicator, useful for early termination.
ridge_epsilon: Ridge epsilon added to make the matrix positive definite.
Returns:
mat_g^alpha
"""
identity = linalg_ops.eye(math_ops.cast(mat_g_size, dtypes.int32))
def METHOD_NAME(mat_m, p):
"""Computes mat_m^p, for p a positive integer.
Power p is known at graph compile time, so no need for loop and cond.
Args:
mat_m: a square matrix
p: a positive integer
Returns:
mat_m^p
"""
assert p == int(p) and p > 0
power = None
while p > 0:
if p % 2 == 1:
power = math_ops.matmul(mat_m, power) if power is not None else mat_m
p //= 2
mat_m = math_ops.matmul(mat_m, mat_m)
return power
def _iter_condition(i, mat_m, _):
return math_ops.logical_and(
i < iter_count,
math_ops.reduce_max(math_ops.abs(mat_m - identity)) > epsilon)
def _iter_body(i, mat_m, mat_x):
mat_m_i = (1 - alpha) * identity + alpha * mat_m
return (i + 1, math_ops.matmul(METHOD_NAME(mat_m_i, -1.0 / alpha), mat_m),
math_ops.matmul(mat_x, mat_m_i))
if mat_g_size == 1:
mat_h = math_ops.pow(mat_g + ridge_epsilon, alpha)
else:
damped_mat_g = mat_g + ridge_epsilon * identity
z = (1 - 1 / alpha) / (2 * linalg_ops.norm(damped_mat_g))
# The best value for z is
# (1 - 1/alpha) * (c_max^{-alpha} - c_min^{-alpha}) /
# (c_max^{1-alpha} - c_min^{1-alpha})
# where c_max and c_min are the largest and smallest singular values of
# damped_mat_g.
# The above estimate assumes that c_max > c_min * 2^p. (p = -1/alpha)
# Can replace above line by the one below, but it is less accurate,
# hence needs more iterations to converge.
# z = (1 - 1/alpha) / math_ops.trace(damped_mat_g)
# If we want the method to always converge, use z = 1 / norm(damped_mat_g)
# or z = 1 / math_ops.trace(damped_mat_g), but these can result in many
# extra iterations.
_, _, mat_h = control_flow_ops.while_loop(
_iter_condition, _iter_body,
[0, damped_mat_g * z, identity * math_ops.pow(z, -alpha)])
return mat_h
|
3,878 |
migrate old containers
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Docker module implementation.
"""
from gns3.local_config import LocalConfig
from gns3.controller import Controller
from gns3.template_manager import TemplateManager
from gns3.template import Template
from ..module import Module
from .docker_vm import DockerVM
from .settings import DOCKER_SETTINGS, DOCKER_CONTAINER_SETTINGS
import logging
log = logging.getLogger(__name__)
class Docker(Module):
"""
Docker module.
"""
def __init__(self):
super().__init__()
self._loadSettings()
def _saveSettings(self):
"""
Saves the settings to the persistent settings file.
"""
LocalConfig.instance().saveSectionSettings(self.__class__.__name__, self._settings)
def _loadSettings(self):
"""
Loads the settings from the persistent settings file.
"""
local_config = LocalConfig.instance()
self._settings = local_config.loadSectionSettings(self.__class__.__name__, DOCKER_SETTINGS)
# migrate container settings to the controller (templates are managed on server side starting with version 2.0)
Controller.instance().connected_signal.connect(self.METHOD_NAME)
def METHOD_NAME(self):
"""
Migrate local container settings to the controller.
"""
if self._settings.get("containers"):
templates = []
for container in self._settings.get("containers"):
container_settings = DOCKER_CONTAINER_SETTINGS.copy()
container_settings.update(container)
templates.append(Template(container_settings))
TemplateManager.instance().updateList(templates)
self._settings["containers"] = []
self._saveSettings()
@staticmethod
def configurationPage():
"""
Returns the configuration page for this module.
:returns: QWidget object
"""
from .pages.docker_vm_configuration_page import DockerVMConfigurationPage
return DockerVMConfigurationPage
def getDockerImagesFromServer(self, compute_id, callback):
"""
Gets the Docker images list from a server.
:param server: server to send the request to
:param callback: callback for the reply from the server
"""
Controller.instance().getCompute("/docker/images", compute_id, callback)
@staticmethod
def getNodeClass(node_type, platform=None):
"""
Returns the class corresponding to node type.
:param node_type: node type (string)
:param platform: not used
:returns: class or None
"""
if node_type == "docker":
return DockerVM
return None
@staticmethod
def classes():
"""
Returns all the node classes supported by this module.
:returns: list of classes
"""
return [DockerVM]
@staticmethod
def preferencePages():
"""
Returns the preference pages for this module.
:returns: QWidget object list
"""
from .pages.docker_preferences_page import DockerPreferencesPage
from .pages.docker_vm_preferences_page import DockerVMPreferencesPage
return [DockerPreferencesPage, DockerVMPreferencesPage]
@staticmethod
def instance():
"""
Singleton to return only one instance of Docker module.
:returns: instance of Docker
"""
if not hasattr(Docker, "_instance"):
Docker._instance = Docker()
return Docker._instance
def __str__(self):
"""
Returns the module name.
"""
return "docker"
|
3,879 |
load dataplotly project settings
|
# coding=utf-8
""""Django signal receivers
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the Mozilla Public License 2.0.
"""
__author__ = '[email protected]'
__date__ = '2020-09-16'
__copyright__ = 'Copyright 2015 - 2020, Gis3w'
from django.conf import settings as g3wsettings
from django.dispatch import receiver
from django.apps import apps
from django.db.models.signals import post_save
from django.templatetags.static import static
from django.template import loader
from core.signals import load_layer_actions, load_js_modules
from qdjango.signals import load_qdjango_project_file, post_save_qdjango_project_file
from qdjango.utils.data import QgisProject
from qdjango.models import Layer
from qgis.PyQt.QtXml import QDomDocument
from qgis.PyQt.QtCore import QFile
from core.signals import initconfig_plugin_start
from .utils.qplotly_settings import QplotlySettings
from .utils.qplotly_factory import QplotlyFactoring
from .models import QplotlyWidget
import plotly
if plotly.__version__ != '2.5.1':
import plotly.graph_objects as go
import logging
logger = logging.getLogger('django.request')
@receiver(load_qdjango_project_file)
def METHOD_NAME(sender, **kwargs):
"""Load from qgis project dom document DatPlotly settings
and put data into sender(QgisProject instance)"""
if not isinstance(sender, QgisProject) or not g3wsettings.LOAD_QPLOTLY_FROM_PROJECT:
return
# to avoid multithreading load xml file.
doc = QDomDocument('QgsProject')
file = QFile(sender.qgs_project.fileName())
doc.setContent(file)
settings = QplotlySettings()
read = settings.read_from_project(doc)
file.close()
if not read or settings.source_layer_id is None:
logger.info('DataPlotly settings not found into project dom document.')
return
sender.qplotly = {
'qgs_layer_id': settings.source_layer_id,
'type': settings.plot_type,
'title': settings.layout['title'],
'selected_features_only': settings.properties['selected_features_only'],
'visible_features_only': settings.properties['visible_features_only'],
'xml': settings.write_xml_db().toString()
}
@receiver(post_save_qdjango_project_file)
def save_dataplotly_project_settings(sender, **kwargs):
"""Save qplotly settings into db"""
if not isinstance(sender, QgisProject):
return
if hasattr(sender, 'qplotly'):
layer = sender.instance.layer_set.get(qgs_layer_id=sender.qplotly['qgs_layer_id'])
qplw, created = QplotlyWidget.objects.update_or_create(defaults={
'datasource': layer.datasource,
'type': sender.qplotly['type'],
'title': sender.qplotly['title'],
'xml': sender.qplotly['xml'],
'selected_features_only': sender.qplotly['selected_features_only'],
'visible_features_only': sender.qplotly['visible_features_only']
}, project=sender.instance)
qplw.layers.add(layer)
@receiver(post_save, sender=Layer)
def update_widget(sender, **kwargs):
"""
Update Qplotly widget data when layer datasource change
"""
# only for update
if kwargs['created']:
return
layer = kwargs['instance']
# search for widget
widgets = layer.qplotlywidget_set.all()
for widget in widgets:
if widget.datasource != layer.datasource:
widget.datasource = layer.datasource
widget.save()
@receiver(initconfig_plugin_start)
def set_initconfig_value(sender, **kwargs):
"""Set base editing data for initconfig"""
project = apps.get_app_config(kwargs['projectType']).get_model('project').objects.get(pk=kwargs['project'])
plots = []
plot_config = config = {
'scrollZoom': True,
'editable': True,
'modeBarButtonsToRemove': ['sendDataToCloud', 'editInChartStudio']
}
layers = project.layer_set.all()
for layer in layers:
qplotly_widgets = layer.qplotlywidget_set.all()
for qplotly_widget in qplotly_widgets:
# load settings from db
settings = QplotlySettings()
if not settings.read_from_model(qplotly_widget):
continue
# instace q QplotlyFactory
factory = QplotlyFactoring(settings, request=None, layer=None)
factory.build_layout()
if plotly.__version__ != '2.5.1':
fig = go.Figure(layout=factory.layout)
layout = fig.to_dict()['layout']
else:
layout = factory.layout
plots.append({
'id': qplotly_widget.pk,
'qgs_layer_id': layer.qgs_layer_id,
'selected_features_only': qplotly_widget.selected_features_only,
'visible_features_only': qplotly_widget.visible_features_only,
'show': qplotly_widget.show_on_start_client,
'plot': {
'type': settings.plot_type,
'layout': layout,
'config': plot_config
}
})
# no plots no 'qplotly' section
if len(plots) == 0:
return
return {
'qplotly': {
'gid': "{}:{}".format(kwargs['projectType'], kwargs['project']),
'jsscripts': [
static('qplotly/polyfill.min.js'),
static('qplotly/plotly-1.52.2.min.js')
],
'plots': plots
}
}
@receiver(load_js_modules)
def get_js_modules(sender, **kwargs):
"""Add qplotly js scripts"""
try:
if sender.resolver_match.view_name == 'qdjango-project-layers-list':
return 'qplotly/js/widget.js'
except Exception as e:
logger.error(str(e))
@receiver(load_layer_actions)
def qplottly_layer_action(sender, **kwargs):
"""
Return html actions qplotly for project layer.
"""
# only admin and editor1 or editor2:
if sender.has_perm('change_project', kwargs['layer'].project) and \
kwargs['layer'].layer_type in (
Layer.TYPES.postgres,
Layer.TYPES.spatialite,
Layer.TYPES.ogr,
Layer.TYPES.mssql,
Layer.TYPES.oracle
):
try:
app_configs = apps.get_app_config(kwargs['app_name']).configs
except:
app_configs = object()
kwargs['as_col'] = True
template = loader.get_template('qplotly/layer_action.html')
return template.render(kwargs)
|
3,880 |
not ga backward compatibility change expected team
|
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
import shutil
from typing import List
import git
import pytest
import yaml
from connector_ops import acceptance_test_config_checks
@pytest.fixture
def mock_diffed_branched(mocker):
airbyte_repo = git.Repo(search_parent_directories=True)
mocker.patch.object(acceptance_test_config_checks.utils, "DIFFED_BRANCH", airbyte_repo.active_branch)
return airbyte_repo.active_branch
@pytest.fixture
def pokeapi_acceptance_test_config_path():
return "airbyte-integrations/connectors/source-pokeapi/acceptance-test-config.yml"
@pytest.fixture
def ga_connector_file():
return "airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml"
@pytest.fixture
def METHOD_NAME(tmp_path, pokeapi_acceptance_test_config_path) -> List:
expected_teams = [{"any-of": list(acceptance_test_config_checks.BACKWARD_COMPATIBILITY_REVIEWERS)}]
backup_path = tmp_path / "backup_poke_acceptance"
shutil.copyfile(pokeapi_acceptance_test_config_path, backup_path)
with open(pokeapi_acceptance_test_config_path, "a") as acceptance_test_config_file:
acceptance_test_config_file.write("disable_for_version: 0.0.0")
yield expected_teams
shutil.copyfile(backup_path, pokeapi_acceptance_test_config_path)
@pytest.fixture
def not_ga_test_strictness_level_change_expected_team(tmp_path, pokeapi_acceptance_test_config_path) -> List:
expected_teams = [{"any-of": list(acceptance_test_config_checks.TEST_STRICTNESS_LEVEL_REVIEWERS)}]
backup_path = tmp_path / "non_ga_acceptance_test_config.backup"
shutil.copyfile(pokeapi_acceptance_test_config_path, backup_path)
with open(pokeapi_acceptance_test_config_path, "a") as acceptance_test_config_file:
acceptance_test_config_file.write("test_strictness_level: foo")
yield expected_teams
shutil.copyfile(backup_path, pokeapi_acceptance_test_config_path)
@pytest.fixture
def not_ga_bypass_reason_file_change_expected_team(tmp_path, pokeapi_acceptance_test_config_path):
expected_teams = []
backup_path = tmp_path / "non_ga_acceptance_test_config.backup"
shutil.copyfile(pokeapi_acceptance_test_config_path, backup_path)
with open(pokeapi_acceptance_test_config_path, "a") as acceptance_test_config_file:
acceptance_test_config_file.write("bypass_reason:")
yield expected_teams
shutil.copyfile(backup_path, pokeapi_acceptance_test_config_path)
@pytest.fixture
def not_ga_not_tracked_change_expected_team(tmp_path, pokeapi_acceptance_test_config_path):
expected_teams = []
backup_path = tmp_path / "non_ga_acceptance_test_config.backup"
shutil.copyfile(pokeapi_acceptance_test_config_path, backup_path)
with open(pokeapi_acceptance_test_config_path, "a") as acceptance_test_config_file:
acceptance_test_config_file.write("not_tracked")
yield expected_teams
shutil.copyfile(backup_path, pokeapi_acceptance_test_config_path)
@pytest.fixture
def ga_connector_file_change_expected_team(tmp_path, ga_connector_file):
expected_teams = list(acceptance_test_config_checks.GA_CONNECTOR_REVIEWERS)
backup_path = tmp_path / "ga_acceptance_test_config.backup"
shutil.copyfile(ga_connector_file, backup_path)
with open(ga_connector_file, "a") as ga_acceptance_test_config_file:
ga_acceptance_test_config_file.write("foobar")
yield expected_teams
shutil.copyfile(backup_path, ga_connector_file)
@pytest.fixture
def ga_connector_backward_compatibility_file_change_expected_team(tmp_path, ga_connector_file):
expected_teams = [{"any-of": list(acceptance_test_config_checks.BACKWARD_COMPATIBILITY_REVIEWERS)}]
backup_path = tmp_path / "ga_acceptance_test_config.backup"
shutil.copyfile(ga_connector_file, backup_path)
with open(ga_connector_file, "a") as ga_acceptance_test_config_file:
ga_acceptance_test_config_file.write("disable_for_version: 0.0.0")
yield expected_teams
shutil.copyfile(backup_path, ga_connector_file)
@pytest.fixture
def ga_connector_bypass_reason_file_change_expected_team(tmp_path, ga_connector_file):
expected_teams = [{"any-of": list(acceptance_test_config_checks.GA_BYPASS_REASON_REVIEWERS)}]
backup_path = tmp_path / "ga_acceptance_test_config.backup"
shutil.copyfile(ga_connector_file, backup_path)
with open(ga_connector_file, "a") as ga_acceptance_test_config_file:
ga_acceptance_test_config_file.write("bypass_reason:")
yield expected_teams
shutil.copyfile(backup_path, ga_connector_file)
@pytest.fixture
def ga_connector_test_strictness_level_file_change_expected_team(tmp_path, ga_connector_file):
expected_teams = [{"any-of": list(acceptance_test_config_checks.TEST_STRICTNESS_LEVEL_REVIEWERS)}]
backup_path = tmp_path / "ga_acceptance_test_config.backup"
shutil.copyfile(ga_connector_file, backup_path)
with open(ga_connector_file, "a") as ga_acceptance_test_config_file:
ga_acceptance_test_config_file.write("test_strictness_level: 0.0.0")
yield expected_teams
shutil.copyfile(backup_path, ga_connector_file)
def verify_no_requirements_file_was_generated(captured: str):
assert captured.out.split("\n")[0].split("=")[-1] == "false"
def verify_requirements_file_was_generated(captured: str):
assert captured.out.split("\n")[0].split("=")[-1] == "true"
def verify_review_requirements_file_contains_expected_teams(requirements_file_path: str, expected_teams: List):
with open(requirements_file_path, "r") as requirements_file:
requirements = yaml.safe_load(requirements_file)
assert requirements[0]["teams"] == expected_teams
def check_review_requirements_file(capsys, expected_teams: List):
acceptance_test_config_checks.write_review_requirements_file()
captured = capsys.readouterr()
if not expected_teams:
verify_no_requirements_file_was_generated(captured)
else:
verify_requirements_file_was_generated(captured)
requirements_file_path = acceptance_test_config_checks.REVIEW_REQUIREMENTS_FILE_PATH
verify_review_requirements_file_contains_expected_teams(requirements_file_path, expected_teams)
def test_find_mandatory_reviewers_backward_compatibility(mock_diffed_branched, capsys, METHOD_NAME):
check_review_requirements_file(capsys, METHOD_NAME)
def test_find_mandatory_reviewers_test_strictness_level(mock_diffed_branched, capsys, not_ga_test_strictness_level_change_expected_team):
check_review_requirements_file(capsys, not_ga_test_strictness_level_change_expected_team)
def test_find_mandatory_reviewers_not_ga_bypass_reason(mock_diffed_branched, capsys, not_ga_bypass_reason_file_change_expected_team):
check_review_requirements_file(capsys, not_ga_bypass_reason_file_change_expected_team)
def test_find_mandatory_reviewers_ga(mock_diffed_branched, capsys, ga_connector_file_change_expected_team):
check_review_requirements_file(capsys, ga_connector_file_change_expected_team)
def test_find_mandatory_reviewers_ga_backward_compatibility(
mock_diffed_branched, capsys, ga_connector_backward_compatibility_file_change_expected_team
):
check_review_requirements_file(capsys, ga_connector_backward_compatibility_file_change_expected_team)
def test_find_mandatory_reviewers_ga_bypass_reason(mock_diffed_branched, capsys, ga_connector_bypass_reason_file_change_expected_team):
check_review_requirements_file(capsys, ga_connector_bypass_reason_file_change_expected_team)
def test_find_mandatory_reviewers_ga_test_strictness_level(
mock_diffed_branched, capsys, ga_connector_test_strictness_level_file_change_expected_team
):
check_review_requirements_file(capsys, ga_connector_test_strictness_level_file_change_expected_team)
def test_find_mandatory_reviewers_no_tracked_changed(mock_diffed_branched, capsys, not_ga_not_tracked_change_expected_team):
check_review_requirements_file(capsys, not_ga_not_tracked_change_expected_team)
|
3,881 |
date age
|
"""Add a few specific filters to Jinja2."""
from __future__ import annotations
import re
from calendar import timegm
from datetime import date, datetime
from functools import wraps
from typing import Any, Callable, Optional, Union
import bleach
import dateutil.parser
import flask_babel as babel
from babel.dates import DateTimePattern, format_timedelta, parse_pattern
from deprecated import deprecated
from jinja2 import Environment, escape, evalcontextfilter
from jinja2.nodes import EvalContext
from markupsafe import Markup
from pytz import utc
from werkzeug.routing import BuildError
from abilian.core.util import local_dt, slugify, utc_dt
from .util import url_for
def autoescape(filter_func: Callable) -> Callable:
"""Decorator to autoescape result from filters."""
@evalcontextfilter
@wraps(filter_func)
def _autoescape(eval_ctx: EvalContext, *args: str, **kwargs: Any) -> Markup | str:
result = filter_func(*args, **kwargs)
if eval_ctx.autoescape:
result = Markup(result)
return result
return _autoescape
@autoescape
def nl2br(value: str) -> Markup:
"""Replace newlines with <br />."""
result = escape(value).replace("\n", Markup("<br />\n"))
return result
_PARAGRAPH_RE = re.compile(r"(?:\r\n|\r|\n){2,}")
@autoescape
def paragraphs(value: str) -> str:
"""Blank lines delimitates paragraphs."""
result = "\n\n".join(
"<p>{}</p>".format(p.strip().replace("\n", Markup("<br />\n")))
for p in _PARAGRAPH_RE.split(escape(value))
)
return result
def labelize(s: str) -> str:
return " ".join(w.capitalize() for w in s.split("_"))
def filesize(d: int | str) -> Markup:
if not isinstance(d, int):
d = int(d)
if d < 1000:
s = f"{d:d} B"
elif d < int(1e4):
s = f"{d / 1000.0:.1f} kB"
elif d < int(1e6):
s = f"{d / 1000.0:.0f} kB"
elif d < int(1e7):
s = f"{d / 1000000.0:.1f} MB"
elif d < int(1e9):
s = f"{d / 1000000.0:.0f} MB"
elif d < int(1e10):
s = f"{d / 1000000000.0:.1f} GB"
else:
s = f"{d / 1000000000.0:.0f} GB"
return Markup(s)
def roughsize(size: int, above: int = 20, mod: int = 10) -> str:
"""6 -> '6' 15 -> '15' 134 -> '130+'."""
if size < above:
return str(size)
return f"{(size - size % mod):d}+"
def datetimeparse(s) -> datetime | None:
"""Parse a string date time to a datetime object.
Suitable for dates serialized with .isoformat()
:return: None, or an aware datetime instance, tz=UTC.
"""
try:
dt = dateutil.parser.parse(s)
except ValueError:
return None
return utc_dt(dt)
def age(
dt: datetime | None,
now: datetime | None = None,
add_direction: bool = True,
date_threshold: Any | None = None,
) -> str:
"""
:param dt: :class:`datetime<datetime>` instance to format
:param now: :class:`datetime<datetime>` instance to compare to `dt`
:param add_direction: if `True`, will add "in" or "ago" (example for `en`
locale) to time difference `dt - now`, i.e "in 9 min." or " 9min. ago"
:param date_threshold: above threshold, will use a formated date instead of
elapsed time indication. Supported values: "day".
"""
# Fail silently for now XXX
if not dt:
return ""
if not now:
now = datetime.utcnow()
locale = babel.get_locale()
dt = utc_dt(dt)
now = utc_dt(now)
delta = dt - now
if date_threshold is not None:
dy, dw, dd = dt_cal = dt.isocalendar()
ny, nw, nd = now_cal = now.isocalendar()
if dt_cal != now_cal:
# not same day
remove_year = dy == ny
date_fmt = locale.date_formats["long"].pattern
time_fmt = locale.time_formats["short"].pattern
fmt = locale.datetime_formats["medium"]
if remove_year:
date_fmt = date_fmt.replace("y", "").strip()
# remove leading or trailing spaces, comma, etc...
date_fmt = re.sub("^[^A-Za-z]*|[^A-Za-z]*$", "", date_fmt)
fmt = fmt.format(time_fmt, date_fmt)
return babel.format_datetime(dt, format=fmt)
# don't use (flask.ext.)babel.format_timedelta: as of Flask-Babel 0.9 it
# doesn't support "threshold" arg.
return format_timedelta(
delta,
locale=locale,
granularity="minute",
threshold=0.9,
add_direction=add_direction,
)
def METHOD_NAME(dt: datetime | None, now: datetime | None = None) -> str:
# Fail silently for now XXX
if not dt:
return ""
formatted_date = babel.format_datetime(dt, format="yyyy-MM-dd HH:mm")
return f"{formatted_date} ({age(dt, now)})"
@deprecated
def date_fmt(value, format="EE, d MMMM y"):
"""
@deprecated: use flask_babel's dateformat filter instead.
"""
if isinstance(value, date):
return babel.format_date(value, format)
else:
return babel.format_date(local_dt(value), format)
def babel2datepicker(pattern: DateTimePattern) -> str:
"""Convert date format from babel (http://babel.pocoo.org/docs/dates/#date-
fields)) to a format understood by bootstrap-datepicker."""
if not isinstance(pattern, DateTimePattern):
pattern = parse_pattern(pattern)
map_fmt = {
# days
"d": "dd",
"dd": "dd",
"EEE": "D",
"EEEE": "DD",
"EEEEE": "D", # narrow name => short name
# months
"M": "mm",
"MM": "mm",
"MMM": "M",
"MMMM": "MM",
# years
"y": "yyyy",
"yy": "yyyy",
"yyy": "yyyy",
"yyyy": "yyyy",
# time picker format
# hours
"h": "%I",
"hh": "%I",
"H": "%H",
"HH": "%H",
# minutes,
"m": "%M",
"mm": "%M",
# seconds
"s": "%S",
"ss": "%S",
# am/pm
"a": "%p",
}
return pattern.format % map_fmt
# Doesn't work yet. TZ issues.
def to_timestamp(dt):
utc_datetime = dt.astimezone(utc)
return timegm(utc_datetime.timetuple()) + utc_datetime.microsecond / 1e6
def abbrev(s: str, max_size: int) -> str:
if len(s) <= max_size:
return s
else:
h = max_size // 2 - 1
return f"{s[0:h]}...{s[-h:]}"
def bool2check(val, true="\u2713", false=""):
"""Filter value as boolean and show check mark (✓) or nothing."""
return true if val else false
@autoescape
def linkify(s: str) -> Markup:
return Markup(bleach.linkify(s))
def obj_to_url(obj):
"""Find url for obj using :func:`url_for`, return empty string is not
found.
:func:`url_for` is also provided in jinja context, the filtering version is
forgiving when `obj` has no default view set.
"""
try:
return url_for(obj)
except BuildError:
return ""
def init_filters(env: Environment):
env.filters["nl2br"] = nl2br
env.filters["paragraphs"] = paragraphs
env.filters["date_age"] = METHOD_NAME
env.filters["datetimeparse"] = datetimeparse
env.filters["age"] = age
env.filters["date"] = date_fmt
env.filters["babel2datepicker"] = babel2datepicker
env.filters["to_timestamp"] = to_timestamp
env.filters["url_for"] = obj_to_url
env.filters["abbrev"] = abbrev
env.filters["filesize"] = filesize
env.filters["roughsize"] = roughsize
env.filters["labelize"] = labelize
env.filters["linkify"] = linkify
env.filters["toslug"] = slugify
env.filters["bool2check"] = bool2check
|
3,882 |
count alnum
|
# Copyright © Michal Čihař <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""Database specific code to extend Django."""
from django.db import connection, models
from django.db.models import Case, IntegerField, Sum, When
from django.db.models.lookups import Contains, Exact, PatternLookup, Regex
from .inv_regex import invert_re
ESCAPED = frozenset(".\\+*?[^]$(){}=!<>|:-")
PG_TRGM = "CREATE INDEX {0}_{1}_fulltext ON trans_{0} USING GIN ({1} gin_trgm_ops {2})"
PG_DROP = "DROP INDEX {0}_{1}_fulltext"
MY_FTX = "CREATE FULLTEXT INDEX {0}_{1}_fulltext ON trans_{0}({1})"
MY_DROP = "ALTER TABLE trans_{0} DROP INDEX {0}_{1}_fulltext"
def conditional_sum(value=1, **cond):
"""Wrapper to generate SUM on boolean/enum values."""
return Sum(Case(When(then=value, **cond), default=0, output_field=IntegerField()))
def using_postgresql():
return connection.vendor == "postgresql"
def adjust_similarity_threshold(value: float):
"""
Adjusts pg_trgm.similarity_threshold for the % operator.
Ideally we would use directly similarity() in the search, but that doesn't seem
to use index, while using % does.
"""
if not using_postgresql():
return
current_similarity = getattr(connection, "weblate_similarity", -1)
# Ignore small differences
if abs(current_similarity - value) < 0.05:
return
with connection.cursor() as cursor:
# The SELECT has to be executed first as othervise the trgm extension
# might not yet be loaded and GUC setting not possible.
if current_similarity == -1:
cursor.execute("SELECT show_limit()")
# Adjust threshold
cursor.execute("SELECT set_limit(%s)", [value])
connection.weblate_similarity = value
def METHOD_NAME(string):
return sum(map(str.isalnum, string))
class PostgreSQLFallbackLookup(PatternLookup):
def __init__(self, lhs, rhs):
self.orig_lhs = lhs
self.orig_rhs = rhs
super().__init__(lhs, rhs)
def needs_fallback(self):
return isinstance(self.orig_rhs, str) and METHOD_NAME(self.orig_rhs) < 3
class FallbackStringMixin:
"""Avoid using index for lhs by concatenating to a string."""
def process_lhs(self, compiler, connection, lhs=None):
lhs_sql, params = super().process_lhs(compiler, connection, lhs)
return f"{lhs_sql} || ''", params
class PostgreSQLRegexFallbackLookup(FallbackStringMixin, Regex):
pass
class PostgreContainsFallbackLookup(FallbackStringMixin, Contains):
pass
class PostgreExactFallbackLookup(FallbackStringMixin, Exact):
pass
class PostgreSQLRegexLookup(Regex):
def __init__(self, lhs, rhs):
self.orig_lhs = lhs
self.orig_rhs = rhs
super().__init__(lhs, rhs)
def needs_fallback(self):
if not isinstance(self.orig_rhs, str):
return False
return (
min((METHOD_NAME(match) for match in invert_re(self.orig_rhs)), default=0)
< 3
)
def as_sql(self, compiler, connection):
if self.needs_fallback():
return PostgreSQLRegexFallbackLookup(self.orig_lhs, self.orig_rhs).as_sql(
compiler, connection
)
return super().as_sql(compiler, connection)
class PostgreSQLSearchLookup(PostgreSQLFallbackLookup):
lookup_name = "search"
param_pattern = "%s"
def as_sql(self, compiler, connection):
if self.needs_fallback():
return PostgreContainsFallbackLookup(self.orig_lhs, self.orig_rhs).as_sql(
compiler, connection
)
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return f"{lhs} %% {rhs} = true", params
class MySQLSearchLookup(models.Lookup):
lookup_name = "search"
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return f"MATCH ({lhs}) AGAINST ({rhs} IN NATURAL LANGUAGE MODE)", params
class PostgreSQLSubstringLookup(PostgreSQLFallbackLookup):
"""
Case insensitive substring lookup.
This is essentially same as icontains in Django, but utilizes ILIKE
operator which can use pg_trgm index.
"""
lookup_name = "substring"
def as_sql(self, compiler, connection):
if self.needs_fallback():
return PostgreContainsFallbackLookup(self.orig_lhs, self.orig_rhs).as_sql(
compiler, connection
)
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return f"{lhs} ILIKE {rhs}", params
def re_escape(pattern):
"""
Escape for use in database regexp match.
This is based on re.escape, but that one escapes too much.
"""
string = list(pattern)
for i, char in enumerate(pattern):
if char == "\000":
string[i] = "\\000"
elif char in ESCAPED:
string[i] = "\\" + char
return "".join(string)
|
3,883 |
type
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetDatastoreResult',
'AwaitableGetDatastoreResult',
'get_datastore',
'get_datastore_output',
]
@pulumi.output_type
class GetDatastoreResult:
"""
A datastore resource
"""
def __init__(__self__, disk_pool_volume=None, id=None, name=None, net_app_volume=None, provisioning_state=None, status=None, METHOD_NAME=None):
if disk_pool_volume and not isinstance(disk_pool_volume, dict):
raise TypeError("Expected argument 'disk_pool_volume' to be a dict")
pulumi.set(__self__, "disk_pool_volume", disk_pool_volume)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if net_app_volume and not isinstance(net_app_volume, dict):
raise TypeError("Expected argument 'net_app_volume' to be a dict")
pulumi.set(__self__, "net_app_volume", net_app_volume)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", METHOD_NAME)
@property
@pulumi.getter(name="diskPoolVolume")
def disk_pool_volume(self) -> Optional['outputs.DiskPoolVolumeResponse']:
"""
An iSCSI volume
"""
return pulumi.get(self, "disk_pool_volume")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="netAppVolume")
def net_app_volume(self) -> Optional['outputs.NetAppVolumeResponse']:
"""
An Azure NetApp Files volume
"""
return pulumi.get(self, "net_app_volume")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The state of the datastore provisioning
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def status(self) -> str:
"""
The operational status of the datastore
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetDatastoreResult(GetDatastoreResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDatastoreResult(
disk_pool_volume=self.disk_pool_volume,
id=self.id,
name=self.name,
net_app_volume=self.net_app_volume,
provisioning_state=self.provisioning_state,
status=self.status,
METHOD_NAME=self.METHOD_NAME)
def get_datastore(cluster_name: Optional[str] = None,
datastore_name: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDatastoreResult:
"""
A datastore resource
:param str cluster_name: Name of the cluster in the private cloud
:param str datastore_name: Name of the datastore in the private cloud cluster
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['clusterName'] = cluster_name
__args__['datastoreName'] = datastore_name
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:avs/v20230301:getDatastore', __args__, opts=opts, typ=GetDatastoreResult).value
return AwaitableGetDatastoreResult(
disk_pool_volume=pulumi.get(__ret__, 'disk_pool_volume'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
net_app_volume=pulumi.get(__ret__, 'net_app_volume'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
status=pulumi.get(__ret__, 'status'),
METHOD_NAME=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_datastore)
def get_datastore_output(cluster_name: Optional[pulumi.Input[str]] = None,
datastore_name: Optional[pulumi.Input[str]] = None,
private_cloud_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDatastoreResult]:
"""
A datastore resource
:param str cluster_name: Name of the cluster in the private cloud
:param str datastore_name: Name of the datastore in the private cloud cluster
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
|
3,884 |
route queries
|
import asyncio
import os
from operator import itemgetter
from pathlib import Path
from random import randint, sample
from urllib.parse import parse_qs
import asyncpg
import jinja2
import orjson
async def pg_setup():
global pool
pool = await asyncpg.create_pool(
user=os.getenv('PGUSER', 'benchmarkdbuser'),
password=os.getenv('PGPASS', 'benchmarkdbpass'),
database='hello_world',
host='tfb-database',
port=5432
)
SQL_SELECT = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1'
SQL_UPDATE = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2'
ROW_ADD = [0, 'Additional fortune added at request time.']
JSON_RESPONSE = {
'type': 'http.response.start',
'status': 200,
'headers': [
[b'content-type', b'application/json'],
]
}
HTML_RESPONSE = {
'type': 'http.response.start',
'status': 200,
'headers': [
[b'content-type', b'text/html; charset=utf-8'],
]
}
PLAINTEXT_RESPONSE = {
'type': 'http.response.start',
'status': 200,
'headers': [
[b'content-type', b'text/plain; charset=utf-8'],
]
}
pool = None
key = itemgetter(1)
json_dumps = orjson.dumps
with Path('templates/fortune.html').open('r') as f:
template = jinja2.Template(f.read())
asyncio.get_event_loop().run_until_complete(pg_setup())
def get_num_queries(scope):
try:
query_string = scope['query_string']
query_count = int(parse_qs(query_string)[b'queries'][0])
except (KeyError, IndexError, ValueError):
return 1
if query_count < 1:
return 1
if query_count > 500:
return 500
return query_count
async def route_json(scope, receive, send):
await send(JSON_RESPONSE)
await send({
'type': 'http.response.body',
'body': json_dumps({'message': 'Hello, world!'}),
'more_body': False
})
async def route_db(scope, receive, send):
row_id = randint(1, 10000)
async with pool.acquire() as connection:
number = await connection.fetchval(SQL_SELECT, row_id)
await send(JSON_RESPONSE)
await send({
'type': 'http.response.body',
'body': json_dumps({'id': row_id, 'randomNumber': number}),
'more_body': False
})
async def METHOD_NAME(scope, receive, send):
num_queries = get_num_queries(scope)
row_ids = sample(range(1, 10000), num_queries)
worlds = []
async with pool.acquire() as connection:
statement = await connection.prepare(SQL_SELECT)
for row_id in row_ids:
number = await statement.fetchval(row_id)
worlds.append({'id': row_id, 'randomNumber': number})
await send(JSON_RESPONSE)
await send({
'type': 'http.response.body',
'body': json_dumps(worlds),
'more_body': False
})
async def route_fortunes(scope, receive, send):
async with pool.acquire() as connection:
fortunes = await connection.fetch('SELECT * FROM Fortune')
fortunes.append(ROW_ADD)
fortunes.sort(key=key)
content = template.render(fortunes=fortunes).encode('utf-8')
await send(HTML_RESPONSE)
await send({
'type': 'http.response.body',
'body': content,
'more_body': False
})
async def route_updates(scope, receive, send):
num_queries = get_num_queries(scope)
updates = list(zip(
sample(range(1, 10000), num_queries),
sorted(sample(range(1, 10000), num_queries))
))
worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates]
async with pool.acquire() as connection:
statement = await connection.prepare(SQL_SELECT)
for row_id, _ in updates:
await statement.fetchval(row_id)
await connection.executemany(SQL_UPDATE, updates)
await send(JSON_RESPONSE)
await send({
'type': 'http.response.body',
'body': json_dumps(worlds),
'more_body': False
})
async def route_plaintext(scope, receive, send):
await send(PLAINTEXT_RESPONSE)
await send({
'type': 'http.response.body',
'body': b'Hello, world!',
'more_body': False
})
async def handle_404(scope, receive, send):
await send(PLAINTEXT_RESPONSE)
await send({
'type': 'http.response.body',
'body': b'Not found',
'more_body': False
})
routes = {
'/json': route_json,
'/db': route_db,
'/queries': METHOD_NAME,
'/fortunes': route_fortunes,
'/updates': route_updates,
'/plaintext': route_plaintext
}
def main(scope, receive, send):
handler = routes.get(scope['path'], handle_404)
return handler(scope, receive, send)
|
3,885 |
test harnesses
|
#!/usr/bin/env python3
from collections import defaultdict
import os
import re
import unittest
from openpilot.common.basedir import BASEDIR
from openpilot.selfdrive.car.car_helpers import interfaces, get_interface_attr
from openpilot.selfdrive.car.docs import CARS_MD_OUT, CARS_MD_TEMPLATE, generate_cars_md, get_all_car_info
from openpilot.selfdrive.car.docs_definitions import Cable, Column, PartType, Star
from openpilot.selfdrive.car.honda.values import CAR as HONDA
from openpilot.selfdrive.debug.dump_car_info import dump_car_info
from openpilot.selfdrive.debug.print_docs_diff import print_car_info_diff
class TestCarDocs(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.all_cars = get_all_car_info()
def test_generator(self):
generated_cars_md = generate_cars_md(self.all_cars, CARS_MD_TEMPLATE)
with open(CARS_MD_OUT, "r") as f:
current_cars_md = f.read()
self.assertEqual(generated_cars_md, current_cars_md,
"Run selfdrive/car/docs.py to update the compatibility documentation")
def test_docs_diff(self):
dump_path = os.path.join(BASEDIR, "selfdrive", "car", "tests", "cars_dump")
dump_car_info(dump_path)
print_car_info_diff(dump_path)
os.remove(dump_path)
def test_duplicate_years(self):
make_model_years = defaultdict(list)
for car in self.all_cars:
with self.subTest(car_info_name=car.name):
make_model = (car.make, car.model)
for year in car.year_list:
self.assertNotIn(year, make_model_years[make_model], f"{car.name}: Duplicate model year")
make_model_years[make_model].append(year)
def test_missing_car_info(self):
all_car_info_platforms = get_interface_attr("CAR_INFO", combine_brands=True).keys()
for platform in sorted(interfaces.keys()):
with self.subTest(platform=platform):
self.assertTrue(platform in all_car_info_platforms, "Platform: {} doesn't exist in CarInfo".format(platform))
def test_naming_conventions(self):
# Asserts market-standard car naming conventions by brand
for car in self.all_cars:
with self.subTest(car=car):
tokens = car.model.lower().split(" ")
if car.car_name == "hyundai":
self.assertNotIn("phev", tokens, "Use `Plug-in Hybrid`")
self.assertNotIn("hev", tokens, "Use `Hybrid`")
if "plug-in hybrid" in car.model.lower():
self.assertIn("Plug-in Hybrid", car.model, "Use correct capitalization")
if car.make != "Kia":
self.assertNotIn("ev", tokens, "Use `Electric`")
elif car.car_name == "toyota":
if "rav4" in tokens:
self.assertIn("RAV4", car.model, "Use correct capitalization")
def test_torque_star(self):
# Asserts brand-specific assumptions around steering torque star
for car in self.all_cars:
with self.subTest(car=car):
# honda sanity check, it's the definition of a no torque star
if car.car_fingerprint in (HONDA.ACCORD, HONDA.CIVIC, HONDA.CRV, HONDA.ODYSSEY, HONDA.PILOT):
self.assertEqual(car.row[Column.STEERING_TORQUE], Star.EMPTY, f"{car.name} has full torque star")
elif car.car_name in ("toyota", "hyundai"):
self.assertNotEqual(car.row[Column.STEERING_TORQUE], Star.EMPTY, f"{car.name} has no torque star")
def test_year_format(self):
for car in self.all_cars:
with self.subTest(car=car):
self.assertIsNone(re.search(r"\d{4}-\d{4}", car.name), f"Format years correctly: {car.name}")
def METHOD_NAME(self):
for car in self.all_cars:
with self.subTest(car=car):
if car.name == "comma body":
raise unittest.SkipTest
car_part_type = [p.part_type for p in car.car_parts.all_parts()]
car_parts = list(car.car_parts.all_parts())
self.assertTrue(len(car_parts) > 0, f"Need to specify car parts: {car.name}")
self.assertTrue(car_part_type.count(PartType.connector) == 1, f"Need to specify one harness connector: {car.name}")
self.assertTrue(car_part_type.count(PartType.mount) == 1, f"Need to specify one mount: {car.name}")
self.assertTrue(Cable.right_angle_obd_c_cable_1_5ft in car_parts, f"Need to specify a right angle OBD-C cable (1.5ft): {car.name}")
if __name__ == "__main__":
unittest.main()
|
3,886 |
restore file
|
#
# Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For any questions about this software or licensing,
# please email [email protected] or [email protected].
#
import os
import sys
import time
import subprocess
import logging
from threading import Timer
from framework import TCTPyCliTest
from framework import Config
from framework import logit
class MMCloudTest(TCTPyCliTest):
mmcloud_cmd = "mmcloudgateway"
def __init__(self, description):
# start_clear_bd = False --> don't clear test base directory
super(MMCloudTest, self).__init__(description, clear_base_dir = False)
def setup(self):
if hasattr(self, 'filename') and hasattr(self, 'filesize'):
self.create_file_on_fs(self.filename, self.filesize)
super(MMCloudTest, self).setup()
def run(self):
super(MMCloudTest, self).run()
def teardown(self):
# No, thanks!
pass
# Create a file on GPFS
def create_file_on_fs(self, filename, filesize):
file_to_create = os.path.join(self.working_dir, filename)
logging.info("Creating file %s with size %dB", file_to_create, filesize)
with open(file_to_create, 'wb') as fout:
fout.write(os.urandom(filesize))
fout.flush()
# Modify/Update on file
def update_file_on_fs(self, filename, update_size):
file_to_update = os.path.join(self.working_dir, filename)
with open(file_to_update, 'ab') as fout:
fout.write(os.urandom(update_size))
fout.flush()
# Delete file(stub) on GPFS
def delete_local_file(self, filename):
cmd = "rm -f %s" % (filename)
self.with_cli(cmd)
return self
"""
Manual Tiering Operations
"""
# Migrate file to cloud
def migrate_file(self, filename):
cmd = "%s files migrate %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# List file details
def list_file(self, filename):
cmd = "%s files list %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# Recall file from cloud
def recall_file(self, filename):
cmd = "%s files recall %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# Restore file from cloud (when original stub file on GPFS was deleted)
def restore_file_with_dry_run(self, filename):
cmd = "%s files restore -v --dry-run %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# Restore file from cloud (when original stub file on GPFS was deleted)
def METHOD_NAME(self, filename):
cmd = "%s files restore %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# If a migrated file is removed from the file system, reconcile removes the
# corresponding cloud objects and references contained in cloud directory
def reconcile_files(self, option):
cmd = "%s files reconcile %s" % (self.mmcloud_cmd, option)
self.with_cli(cmd)
return self
# Delete local file and cloud data
def delete_with_delete_local_file(self, filename, keep_last_cloud_file = 'No'):
cmd = "%s files delete --delete-local-file %s" % (self.mmcloud_cmd, filename)
if keep_last_cloud_file != 'No':
cmd = "%s files delete --delete-local-file --keep-last-cloud-file %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# Recall cloud data back to local file and delete cloud data
def delete_with_recall_cloud_file(self, filename, keep_last_cloud_file = 'No'):
cmd = "%s files delete --recall-cloud-file %s" % (self.mmcloud_cmd, filename)
if keep_last_cloud_file != 'No':
cmd = "%s files delete --recall-cloud-file --keep-last-cloud-file %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# Only delete the extended attribute (requires the file data to be present locally.
# If the file is in the Non-Resident state, this operation will fail.)
def delete_with_require_local_file(self, filename, keep_last_cloud_file = 'No'):
cmd = "%s files delete --require-local-file %s" % (self.mmcloud_cmd, filename)
if keep_last_cloud_file != 'No':
cmd = "%s files delete --require-local-file --keep-last-cloud-file %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
# List files present on cloud
def cloud_list(self, path):
cmd = "%s files cloudList --path %s --recursive" % (self.mmcloud_cmd, path)
self.with_cli(cmd)
return self
# List versions of file migrated to cloud
def cloud_list_with_file_versions(self, filename):
cmd = "%s files cloudList --file-versions %s" % (self.mmcloud_cmd, filename)
self.with_cli(cmd)
return self
|
3,887 |
format config error
|
import argparse
from typing import (
Any,
Collection,
Dict,
Iterable,
Iterator,
List,
Literal,
MutableMapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
import jinja2
from synapse.config import ( # noqa: F401
account_validity,
api,
appservice,
auth,
background_updates,
cache,
captcha,
cas,
consent,
database,
emailconfig,
experimental,
federation,
jwt,
key,
logger,
metrics,
modules,
oembed,
oidc,
password_auth_providers,
push,
ratelimiting,
redis,
registration,
repository,
retention,
room,
room_directory,
saml2,
server,
server_notices,
spam_checker,
sso,
stats,
third_party_event_rules,
tls,
tracer,
user_directory,
voip,
workers,
)
from synapse.types import StrSequence
class ConfigError(Exception):
def __init__(self, msg: str, path: Optional[StrSequence] = None):
self.msg = msg
self.path = path
def METHOD_NAME(e: ConfigError) -> Iterator[str]: ...
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS: str
MISSING_REPORT_STATS_SPIEL: str
MISSING_SERVER_NAME: str
def path_exists(file_path: str) -> bool: ...
TRootConfig = TypeVar("TRootConfig", bound="RootConfig")
class RootConfig:
server: server.ServerConfig
experimental: experimental.ExperimentalConfig
tls: tls.TlsConfig
database: database.DatabaseConfig
logging: logger.LoggingConfig
ratelimiting: ratelimiting.RatelimitConfig
media: repository.ContentRepositoryConfig
oembed: oembed.OembedConfig
captcha: captcha.CaptchaConfig
voip: voip.VoipConfig
registration: registration.RegistrationConfig
account_validity: account_validity.AccountValidityConfig
metrics: metrics.MetricsConfig
api: api.ApiConfig
appservice: appservice.AppServiceConfig
key: key.KeyConfig
saml2: saml2.SAML2Config
cas: cas.CasConfig
sso: sso.SSOConfig
oidc: oidc.OIDCConfig
jwt: jwt.JWTConfig
auth: auth.AuthConfig
email: emailconfig.EmailConfig
worker: workers.WorkerConfig
authproviders: password_auth_providers.PasswordAuthProviderConfig
push: push.PushConfig
spamchecker: spam_checker.SpamCheckerConfig
room: room.RoomConfig
userdirectory: user_directory.UserDirectoryConfig
consent: consent.ConsentConfig
stats: stats.StatsConfig
servernotices: server_notices.ServerNoticesConfig
roomdirectory: room_directory.RoomDirectoryConfig
thirdpartyrules: third_party_event_rules.ThirdPartyRulesConfig
tracing: tracer.TracerConfig
redis: redis.RedisConfig
modules: modules.ModulesConfig
caches: cache.CacheConfig
federation: federation.FederationConfig
retention: retention.RetentionConfig
background_updates: background_updates.BackgroundUpdateConfig
config_classes: List[Type["Config"]] = ...
config_files: List[str]
def __init__(self, config_files: Collection[str] = ...) -> None: ...
def invoke_all(
self, func_name: str, *args: Any, **kwargs: Any
) -> MutableMapping[str, Any]: ...
@classmethod
def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ...
def parse_config_dict(
self, config_dict: Dict[str, Any], config_dir_path: str, data_dir_path: str
) -> None: ...
def generate_config(
self,
config_dir_path: str,
data_dir_path: str,
server_name: str,
generate_secrets: bool = ...,
report_stats: Optional[bool] = ...,
open_private_ports: bool = ...,
listeners: Optional[Any] = ...,
tls_certificate_path: Optional[str] = ...,
tls_private_key_path: Optional[str] = ...,
) -> str: ...
@classmethod
def load_or_generate_config(
cls: Type[TRootConfig], description: str, argv: List[str]
) -> Optional[TRootConfig]: ...
@classmethod
def load_config(
cls: Type[TRootConfig], description: str, argv: List[str]
) -> TRootConfig: ...
@classmethod
def add_arguments_to_parser(
cls, config_parser: argparse.ArgumentParser
) -> None: ...
@classmethod
def load_config_with_parser(
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv: List[str]
) -> Tuple[TRootConfig, argparse.Namespace]: ...
def generate_missing_files(
self, config_dict: dict, config_dir_path: str
) -> None: ...
@overload
def reload_config_section(
self, section_name: Literal["caches"]
) -> cache.CacheConfig: ...
@overload
def reload_config_section(self, section_name: str) -> "Config": ...
class Config:
root: RootConfig
default_template_dir: str
def __init__(self, root_config: Optional[RootConfig] = ...) -> None: ...
@staticmethod
def parse_size(value: Union[str, int]) -> int: ...
@staticmethod
def parse_duration(value: Union[str, int]) -> int: ...
@staticmethod
def abspath(file_path: Optional[str]) -> str: ...
@classmethod
def path_exists(cls, file_path: str) -> bool: ...
@classmethod
def check_file(cls, file_path: str, config_name: str) -> str: ...
@classmethod
def ensure_directory(cls, dir_path: str) -> str: ...
@classmethod
def read_file(cls, file_path: str, config_name: str) -> str: ...
def read_template(self, filenames: str) -> jinja2.Template: ...
def read_templates(
self,
filenames: List[str],
custom_template_directories: Optional[Iterable[str]] = None,
) -> List[jinja2.Template]: ...
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: ...
def find_config_files(search_paths: List[str]) -> List[str]: ...
class ShardedWorkerHandlingConfig:
instances: List[str]
def __init__(self, instances: List[str]) -> None: ...
def should_handle(self, instance_name: str, key: str) -> bool: ... # noqa: F811
class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
def get_instance(self, key: str) -> str: ... # noqa: F811
def read_file(file_path: Any, config_path: Iterable[str]) -> str: ...
|
3,888 |
restore tilt gravity calibration points
|
from app.models import BrewPiDevice, Beer, FermentationProfile
from gravity.models import GravitySensor, GravityLog, TiltTempCalibrationPoint, TiltGravityCalibrationPoint, \
TiltConfiguration, TiltBridge, IspindelConfiguration, IspindelGravityCalibrationPoint
from constance import config
def restore_brewpi_devices(obj_list:list, update:bool) -> list:
"""Loop through a list of BrewPiDevice object dicts, call each one's from_dict() method, and then save the object"""
restore_status = []
for obj_dict in obj_list:
device = BrewPiDevice.from_dict(obj_dict, update=update)
device.save()
restore_status.append({'uuid': device.uuid, 'success': True})
return restore_status
def restore_beers(obj_list:list, update:bool) -> list:
"""Loop through a list of Beer object dicts, call each one's from_dict() method, and then save the object"""
restore_status = []
for obj_dict in obj_list:
beer = Beer.from_dict(obj_dict, update=update)
beer.save()
restore_status.append({'uuid': beer.uuid, 'success': True})
return restore_status
def restore_fermentation_profiles(obj_list:list, update:bool) -> list:
"""Loop through a list of FermentationProfile object dicts, call each one's from_dict() method, and then save the
object. This also implicitly restores all associated FermentationProfilePoint objects."""
restore_status = []
for obj_dict in obj_list:
profile = FermentationProfile.load_from_dict(obj_dict, update=update)
restore_status.append({'uuid': profile.uuid, 'success': True})
return restore_status
# Gravity functions
def restore_gravity_sensors(obj_list:list, update:bool) -> list:
"""Loop through a list of GravitySensor object dicts, call each one's from_dict() method, and then save the object"""
restore_status = []
for obj_dict in obj_list:
sensor = GravitySensor.from_dict(obj_dict, update=update)
sensor.save()
restore_status.append({'uuid': sensor.uuid, 'success': True})
return restore_status
def restore_gravity_logs(obj_list:list, update:bool) -> list:
"""Loop through a list of GravityLog object dicts, call each one's from_dict() method, and then save the object"""
restore_status = []
for obj_dict in obj_list:
log = GravityLog.from_dict(obj_dict, update=update)
log.save()
restore_status.append({'uuid': log.uuid, 'success': True})
return restore_status
def restore_tilt_temp_calibration_points(obj_list:list, update:bool) -> list:
"""Loop through a list of TiltTempCalibrationPoint object dicts, call each one's from_dict() method, and then save
the object"""
restore_status = []
for obj_dict in obj_list:
point = TiltTempCalibrationPoint.from_dict(obj_dict, update=update)
point.save()
restore_status.append({'uuid': point.uuid, 'success': True})
return restore_status
def METHOD_NAME(obj_list:list, update:bool) -> list:
"""Loop through a list of TiltGravityCalibrationPoint object dicts, call each one's from_dict() method, and then
save the object"""
restore_status = []
for obj_dict in obj_list:
point = TiltGravityCalibrationPoint.from_dict(obj_dict, update=update)
point.save()
restore_status.append({'uuid': point.uuid, 'success': True})
return restore_status
def restore_tilt_configurations(obj_list:list, update:bool) -> list:
"""Loop through a list of TiltConfiguration object dicts, call each one's from_dict() method, and then save the
object"""
restore_status = []
for obj_dict in obj_list:
tilt_config = TiltConfiguration.from_dict(obj_dict, update=update)
tilt_config.save()
restore_status.append({'uuid': tilt_config.uuid, 'success': True})
return restore_status
def restore_tiltbridges(obj_list:list, update:bool) -> list:
"""Loop through a list of TiltBridge object dicts, call each one's from_dict() method, and then save the object"""
restore_status = []
for obj_dict in obj_list:
bridge = TiltBridge.from_dict(obj_dict, update=update)
bridge.save()
restore_status.append({'uuid': bridge.uuid, 'success': True})
return restore_status
def restore_ispindel_configurations(obj_list:list, update:bool) -> list:
"""Loop through a list of IspindelConfiguration object dicts, call each one's from_dict() method, and then save the
object"""
restore_status = []
for obj_dict in obj_list:
ispindel_config = IspindelConfiguration.from_dict(obj_dict, update=update)
ispindel_config.save()
restore_status.append({'uuid': ispindel_config.uuid, 'success': True})
return restore_status
def restore_ispindel_gravity_calibration_points(obj_list:list, update:bool) -> list:
"""Loop through a list of IspindelGravityCalibrationPoint object dicts, call each one's from_dict() method, and
then save the object"""
restore_status = []
for obj_dict in obj_list:
point = IspindelGravityCalibrationPoint.from_dict(obj_dict, update=update)
point.save()
restore_status.append({'uuid': point.uuid, 'success': True})
return restore_status
def restore_fermentrack_configuration_options(obj_dict:dict):
"""Work through a dict containing all the Constance options, updating each setting to match what we were passed"""
if 'BREWERY_NAME' in obj_dict:
config.BREWERY_NAME = obj_dict['BREWERY_NAME']
if 'DATE_TIME_FORMAT_DISPLAY' in obj_dict:
config.DATE_TIME_FORMAT_DISPLAY = obj_dict['DATE_TIME_FORMAT_DISPLAY']
if 'REQUIRE_LOGIN_FOR_DASHBOARD' in obj_dict:
config.REQUIRE_LOGIN_FOR_DASHBOARD = obj_dict['REQUIRE_LOGIN_FOR_DASHBOARD']
if 'TEMPERATURE_FORMAT' in obj_dict:
config.TEMPERATURE_FORMAT = obj_dict['TEMPERATURE_FORMAT']
if 'GRAVITY_DISPLAY_FORMAT' in obj_dict:
config.GRAVITY_DISPLAY_FORMAT = obj_dict['GRAVITY_DISPLAY_FORMAT']
if 'USER_HAS_COMPLETED_CONFIGURATION' in obj_dict:
config.USER_HAS_COMPLETED_CONFIGURATION = obj_dict['USER_HAS_COMPLETED_CONFIGURATION']
if 'TEMP_CONTROL_SUPPORT_ENABLED' in obj_dict:
config.TEMP_CONTROL_SUPPORT_ENABLED = obj_dict['TEMP_CONTROL_SUPPORT_ENABLED']
if 'GRAVITY_SUPPORT_ENABLED' in obj_dict:
config.GRAVITY_SUPPORT_ENABLED = obj_dict['GRAVITY_SUPPORT_ENABLED']
# if 'LAST_GIT_CHECK' in obj_dict:
# config.LAST_GIT_CHECK = obj_dict['LAST_GIT_CHECK']
# if 'GIT_UPDATE_TYPE' in obj_dict:
# config.GIT_UPDATE_TYPE = obj_dict['GIT_UPDATE_TYPE']
if 'ALLOW_GIT_BRANCH_SWITCHING' in obj_dict:
config.ALLOW_GIT_BRANCH_SWITCHING = obj_dict['ALLOW_GIT_BRANCH_SWITCHING']
if 'PREFERRED_TIMEZONE' in obj_dict:
config.PREFERRED_TIMEZONE = obj_dict['PREFERRED_TIMEZONE']
if 'GRAPH_BEER_TEMP_COLOR' in obj_dict:
config.GRAPH_BEER_TEMP_COLOR = obj_dict['GRAPH_BEER_TEMP_COLOR']
if 'GRAPH_BEER_SET_COLOR' in obj_dict:
config.GRAPH_BEER_SET_COLOR = obj_dict['GRAPH_BEER_SET_COLOR']
if 'GRAPH_FRIDGE_TEMP_COLOR' in obj_dict:
config.GRAPH_FRIDGE_TEMP_COLOR = obj_dict['GRAPH_FRIDGE_TEMP_COLOR']
if 'GRAPH_FRIDGE_SET_COLOR' in obj_dict:
config.GRAPH_FRIDGE_SET_COLOR = obj_dict['GRAPH_FRIDGE_SET_COLOR']
if 'GRAPH_ROOM_TEMP_COLOR' in obj_dict:
config.GRAPH_ROOM_TEMP_COLOR = obj_dict['GRAPH_ROOM_TEMP_COLOR']
if 'GRAPH_GRAVITY_COLOR' in obj_dict:
config.GRAPH_GRAVITY_COLOR = obj_dict['GRAPH_GRAVITY_COLOR']
if 'GRAPH_GRAVITY_TEMP_COLOR' in obj_dict:
config.GRAPH_GRAVITY_TEMP_COLOR = obj_dict['GRAPH_GRAVITY_TEMP_COLOR']
if 'CUSTOM_THEME' in obj_dict:
config.CUSTOM_THEME = obj_dict['CUSTOM_THEME']
|
3,889 |
db for read
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2023 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404
from graphql_jwt.compat import get_operation_name
from graphql_jwt.settings import jwt_settings
from . import tenant
def allow_any(info, **kwargs):
# This code is based on S.B. answer to StackOverflow question
# "How to solve 'NoneType' object has no attribute 'fields' in
# Graphene-django" (https://stackoverflow.com/a/71296685).
try:
operation_name = get_operation_name(info.operation.operation).title()
operation_type = info.schema.get_type(operation_name)
if hasattr(operation_type, 'fields'):
field = operation_type.fields.get(info.field_name)
if field is None:
return False
else:
return False
graphene_type = getattr(field.type, "graphene_type", None)
return graphene_type is not None and issubclass(
graphene_type, tuple(jwt_settings.JWT_ALLOW_ANY_CLASSES)
)
except Exception as e:
return False
class TenantDatabaseMiddleware:
"""
Middleware to select a database depending on the user and the header.
When the pair user-header is not available for any tenant it returns a 404 error.
For unauthenticated users it will return the 'default' database to allow login.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
database = tenant.tenant_from_username_header(request)
if database:
tenant.set_db_tenant(database)
response = self.get_response(request)
tenant.unset_db_tenant()
return response
else:
raise Http404("Tenant not found in SortingHat.")
class TenantDatabaseRouter:
"""
This class routes database queries to the right database.
Queries to applications with labels in 'auth_app_labels' will use the 'default' database.
Queries to 'core.tenant' model will use the 'default' database too.
Queries to a different model will obtain the database name from a threading local variable
that is set for every request using a middleware.
"""
auth_app_labels = {'auth', 'contenttypes', 'admin'}
def METHOD_NAME(self, model, **hints):
if model._meta.app_label in self.auth_app_labels:
return 'default'
elif model._meta.app_label == 'core' and model._meta.model_name == 'tenant':
return 'default'
return tenant.get_db_tenant()
def db_for_write(self, model, **hints):
if model._meta.app_label in self.auth_app_labels:
return 'default'
elif model._meta.app_label == 'core' and model._meta.model_name == 'tenant':
return 'default'
return tenant.get_db_tenant()
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in the auth or contenttypes apps is
involved.
"""
if (
obj1._meta.app_label in self.auth_app_labels or
obj2._meta.app_label in self.auth_app_labels
):
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
Make sure the 'auth', 'contenttypes', 'admin' and 'core.tenant' apps
and models only appear in the 'default' database. Don't include any
other model in that database.
"""
if app_label in self.auth_app_labels:
return db == 'default'
elif app_label == 'core' and model_name == 'tenant':
return db == 'default'
elif db == 'default':
return False
else:
return None
|
3,890 |
link
|
import sys
from _typeshed import BytesPath, FileDescriptorOrPath, GenericPath, ReadableBuffer, StrOrBytesPath, StrPath
from asyncio.events import AbstractEventLoop
from collections.abc import Sequence
from os import _ScandirIterator, stat_result
from typing import Any, AnyStr, overload
from aiofiles import ospath
from aiofiles.ospath import wrap as wrap
__all__ = [
"path",
"stat",
"rename",
"renames",
"replace",
"remove",
"unlink",
"mkdir",
"makedirs",
"rmdir",
"removedirs",
"link",
"symlink",
"readlink",
"listdir",
"scandir",
"access",
"wrap",
]
if sys.platform != "win32":
__all__ += ["statvfs", "sendfile"]
path = ospath
async def stat(
path: FileDescriptorOrPath,
*,
dir_fd: int | None = None,
follow_symlinks: bool = True,
loop: AbstractEventLoop | None = ...,
executor: Any = ...,
) -> stat_result: ...
async def rename(
src: StrOrBytesPath,
dst: StrOrBytesPath,
*,
src_dir_fd: int | None = None,
dst_dir_fd: int | None = None,
loop: AbstractEventLoop | None = ...,
executor: Any = ...,
) -> None: ...
async def renames(
old: StrOrBytesPath, new: StrOrBytesPath, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> None: ...
async def replace(
src: StrOrBytesPath,
dst: StrOrBytesPath,
*,
src_dir_fd: int | None = None,
dst_dir_fd: int | None = None,
loop: AbstractEventLoop | None = ...,
executor: Any = ...,
) -> None: ...
async def remove(
path: StrOrBytesPath, *, dir_fd: int | None = None, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> None: ...
async def unlink(
path: StrOrBytesPath, *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> None: ...
async def mkdir(
path: StrOrBytesPath, mode: int = 511, *, dir_fd: int | None = None, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> None: ...
async def makedirs(
name: StrOrBytesPath, mode: int = 511, exist_ok: bool = False, *, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> None: ...
async def METHOD_NAME(
src: StrOrBytesPath,
dst: StrOrBytesPath,
*,
src_dir_fd: int | None = ...,
dst_dir_fd: int | None = ...,
follow_symlinks: bool = ...,
loop: AbstractEventLoop | None = ...,
executor: Any = ...,
) -> None: ...
async def symlink(
src: StrOrBytesPath,
dst: StrOrBytesPath,
target_is_directory: bool = ...,
*,
dir_fd: int | None = ...,
loop: AbstractEventLoop | None = ...,
executor: Any = ...,
) -> None: ...
async def readlink(
path: AnyStr, *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> AnyStr: ...
async def rmdir(
path: StrOrBytesPath, *, dir_fd: int | None = None, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> None: ...
async def removedirs(name: StrOrBytesPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> None: ...
@overload
async def scandir(path: None = None, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> _ScandirIterator[str]: ...
@overload
async def scandir(path: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> _ScandirIterator[str]: ...
@overload
async def scandir(
path: GenericPath[AnyStr], *, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> _ScandirIterator[AnyStr]: ...
@overload
async def listdir(path: StrPath | None, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> list[str]: ...
@overload
async def listdir(path: BytesPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> list[bytes]: ...
@overload
async def listdir(path: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> list[str]: ...
async def access(
path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True
) -> bool: ...
if sys.platform != "win32":
from os import statvfs_result
@overload
async def sendfile(
out_fd: int, in_fd: int, offset: int | None, count: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> int: ...
@overload
async def sendfile(
out_fd: int,
in_fd: int,
offset: int,
count: int,
headers: Sequence[ReadableBuffer] = ...,
trailers: Sequence[ReadableBuffer] = ...,
flags: int = ...,
*,
loop: AbstractEventLoop | None = ...,
executor: Any = ...,
) -> int: ... # FreeBSD and Mac OS X only
async def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only
|
3,891 |
exp
|
#!/usr/bin/env python3
#
# Cross Platform and Multi Architecture Advanced Binary Emulation Framework
#
from eth_utils.toolz import (
curry,
)
from ... import constants
from ..._utils.numeric import (
unsigned_to_signed,
signed_to_unsigned,
ceil8,
)
from ...vm.computation import BaseComputation
def add(computation: BaseComputation) -> None:
"""
Addition
"""
left, right = computation.stack_pop_ints(2)
result = (left + right) & constants.UINT_256_MAX
computation.stack_push_int(result)
def addmod(computation: BaseComputation) -> None:
"""
Modulo Addition
"""
left, right, mod = computation.stack_pop_ints(3)
if mod == 0:
result = 0
else:
result = (left + right) % mod
computation.stack_push_int(result)
def sub(computation: BaseComputation) -> None:
"""
Subtraction
"""
left, right = computation.stack_pop_ints(2)
result = (left - right) & constants.UINT_256_MAX
computation.stack_push_int(result)
def mod(computation: BaseComputation) -> None:
"""
Modulo
"""
value, mod = computation.stack_pop_ints(2)
if mod == 0:
result = 0
else:
result = value % mod
computation.stack_push_int(result)
def smod(computation: BaseComputation) -> None:
"""
Signed Modulo
"""
value, mod = map(
unsigned_to_signed,
computation.stack_pop_ints(2),
)
pos_or_neg = -1 if value < 0 else 1
if mod == 0:
result = 0
else:
result = (abs(value) % abs(mod) * pos_or_neg) & constants.UINT_256_MAX
computation.stack_push_int(signed_to_unsigned(result))
def mul(computation: BaseComputation) -> None:
"""
Multiplication
"""
left, right = computation.stack_pop_ints(2)
result = (left * right) & constants.UINT_256_MAX
computation.stack_push_int(result)
def mulmod(computation: BaseComputation) -> None:
"""
Modulo Multiplication
"""
left, right, mod = computation.stack_pop_ints(3)
if mod == 0:
result = 0
else:
result = (left * right) % mod
computation.stack_push_int(result)
def div(computation: BaseComputation) -> None:
"""
Division
"""
numerator, denominator = computation.stack_pop_ints(2)
if denominator == 0:
result = 0
else:
result = (numerator // denominator) & constants.UINT_256_MAX
computation.stack_push_int(result)
def sdiv(computation: BaseComputation) -> None:
"""
Signed Division
"""
numerator, denominator = map(
unsigned_to_signed,
computation.stack_pop_ints(2),
)
pos_or_neg = -1 if numerator * denominator < 0 else 1
if denominator == 0:
result = 0
else:
result = (pos_or_neg * (abs(numerator) // abs(denominator)))
computation.stack_push_int(signed_to_unsigned(result))
@curry
def METHOD_NAME(computation: BaseComputation, gas_per_byte: int) -> None:
"""
Exponentiation
"""
base, exponent = computation.stack_pop_ints(2)
bit_size = exponent.bit_length()
byte_size = ceil8(bit_size) // 8
if exponent == 0:
result = 1
elif base == 0:
result = 0
else:
result = pow(base, exponent, constants.UINT_256_CEILING)
computation.consume_gas(
gas_per_byte * byte_size,
reason="EXP: exponent bytes",
)
computation.stack_push_int(result)
def signextend(computation: BaseComputation) -> None:
"""
Signed Extend
"""
bits, value = computation.stack_pop_ints(2)
if bits <= 31:
testbit = bits * 8 + 7
sign_bit = (1 << testbit)
if value & sign_bit:
result = value | (constants.UINT_256_CEILING - sign_bit)
else:
result = value & (sign_bit - 1)
else:
result = value
computation.stack_push_int(result)
def shl(computation: BaseComputation) -> None:
"""
Bitwise left shift
"""
shift_length, value = computation.stack_pop_ints(2)
if shift_length >= 256:
result = 0
else:
result = (value << shift_length) & constants.UINT_256_MAX
computation.stack_push_int(result)
def shr(computation: BaseComputation) -> None:
"""
Bitwise right shift
"""
shift_length, value = computation.stack_pop_ints(2)
if shift_length >= 256:
result = 0
else:
result = (value >> shift_length) & constants.UINT_256_MAX
computation.stack_push_int(result)
def sar(computation: BaseComputation) -> None:
"""
Arithmetic bitwise right shift
"""
shift_length, value = computation.stack_pop_ints(2)
value = unsigned_to_signed(value)
if shift_length >= 256:
result = 0 if value >= 0 else constants.UINT_255_NEGATIVE_ONE
else:
result = (value >> shift_length) & constants.UINT_256_MAX
computation.stack_push_int(result)
|
3,892 |
shutdown
|
# -*- coding: utf-8
# backend
# *******
import sys
import traceback
from twisted.application import service
from twisted.internet import reactor, defer
from twisted.python.log import ILogObserver
from twisted.python.log import addObserver
from twisted.web import resource, server
from globaleaks.jobs import job, jobs_list
from globaleaks.services import tor
from globaleaks.db import create_db, init_db, update_db, \
sync_refresh_tenant_cache, sync_clean_untracked_files, sync_initialize_snimap
from globaleaks.rest.api import APIResourceWrapper
from globaleaks.settings import Settings
from globaleaks.state import State
from globaleaks.utils.log import log, openLogFile, logFormatter, LogObserver
from globaleaks.utils.sock import listen_tcp_on_sock, listen_tls_on_sock
# Set Gzip Encoder compression level to 1 prioritizing speed over compression
server.GzipEncoderFactory.compressLevel = 1
def fail_startup(excep):
log.err("ERROR: Cannot start GlobaLeaks. Please manually examine the exception.")
log.err("EXCEPTION: %s", excep)
log.debug('TRACE: %s', traceback.format_exc(excep))
if reactor.running:
reactor.stop()
class Request(server.Request):
log_ip_and_ua = False
class Site(server.Site):
requestFactory = Request
def _openLogFile(self, path):
return openLogFile(path, Settings.log_file_size, Settings.num_log_files)
class Service(service.Service):
_shutdown = False
def __init__(self):
self.state = State
self.arw = resource.EncodingResourceWrapper(APIResourceWrapper(), [server.GzipEncoderFactory()])
self.api_factory = Site(self.arw, logPath=Settings.accesslogfile, logFormatter=logFormatter)
self.api_factory.displayTracebacks = False
def startService(self):
reactor.callLater(0, self.deferred_start)
def METHOD_NAME(self):
d = defer.Deferred()
def _shutdown(_):
if self._shutdown:
return
self._shutdown = True
self.state.orm_tp.stop()
d.callback(None)
reactor.callLater(30, _shutdown, None)
self.stop_jobs().addBoth(_shutdown)
return d
def start_jobs(self):
for j in jobs_list:
self.state.jobs.append(j())
self.state.tor = tor.Tor()
self.state.services.append(self.state.tor)
self.state.jobs_monitor = job.JobsMonitor(self.state.jobs)
def stop_jobs(self):
deferred_list = []
for job in self.state.jobs + self.state.services:
deferred_list.append(defer.maybeDeferred(job.stop))
if self.state.jobs_monitor is not None:
deferred_list.append(self.state.jobs_monitor.stop())
self.state.jobs_monitor = None
return defer.DeferredList(deferred_list)
def _deferred_start(self):
ret = update_db()
if ret == -1:
reactor.stop()
return
if ret == 0:
create_db()
init_db()
sync_clean_untracked_files()
if self.state.settings.migrate_only:
reactor.stop()
return
sync_refresh_tenant_cache()
sync_initialize_snimap()
self.state.orm_tp.start()
reactor.addSystemEventTrigger('before', 'shutdown', self.METHOD_NAME)
for sock in self.state.http_socks:
listen_tcp_on_sock(reactor, sock.fileno(), self.api_factory)
for sock in self.state.https_socks:
listen_tls_on_sock(reactor,
fd=sock.fileno(),
contextFactory=self.state.snimap,
factory=self.api_factory)
self.start_jobs()
self.print_listening_interfaces()
@defer.inlineCallbacks
def deferred_start(self):
try:
yield self._deferred_start()
except Exception as excep:
fail_startup(excep)
def print_listening_interfaces(self):
print("GlobaLeaks is now running and accessible at the following urls:")
tenant_cache = self.state.tenants[1].cache
if self.state.settings.devel_mode:
print("- [HTTPS]: https://127.0.0.1:8443")
elif tenant_cache.reachable_via_web:
hostname = tenant_cache.hostname if tenant_cache.hostname else '0.0.0.0'
print("- [HTTPS]: https://%s" % hostname)
if tenant_cache.onionservice:
print("- [Tor]: http://%s" % tenant_cache.onionservice)
try:
application = service.Application('GlobaLeaks')
logfile = openLogFile(Settings.logfile, Settings.log_file_size, Settings.num_log_files)
if Settings.nodaemon:
addObserver(LogObserver(logfile).emit)
else:
application.setComponent(ILogObserver, LogObserver(logfile).emit)
Service().setServiceParent(application)
except Exception as excep:
fail_startup(excep)
# Exit with non-zero exit code to signal systemd/systemV
sys.exit(55)
|
3,893 |
name
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetAuthorizationResult',
'AwaitableGetAuthorizationResult',
'get_authorization',
'get_authorization_output',
]
@pulumi.output_type
class GetAuthorizationResult:
"""
ExpressRoute Circuit Authorization
"""
def __init__(__self__, express_route_authorization_id=None, express_route_authorization_key=None, express_route_id=None, id=None, METHOD_NAME=None, provisioning_state=None, type=None):
if express_route_authorization_id and not isinstance(express_route_authorization_id, str):
raise TypeError("Expected argument 'express_route_authorization_id' to be a str")
pulumi.set(__self__, "express_route_authorization_id", express_route_authorization_id)
if express_route_authorization_key and not isinstance(express_route_authorization_key, str):
raise TypeError("Expected argument 'express_route_authorization_key' to be a str")
pulumi.set(__self__, "express_route_authorization_key", express_route_authorization_key)
if express_route_id and not isinstance(express_route_id, str):
raise TypeError("Expected argument 'express_route_id' to be a str")
pulumi.set(__self__, "express_route_id", express_route_id)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", METHOD_NAME)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(METHOD_NAME="expressRouteAuthorizationId")
def express_route_authorization_id(self) -> str:
"""
The ID of the ExpressRoute Circuit Authorization
"""
return pulumi.get(self, "express_route_authorization_id")
@property
@pulumi.getter(METHOD_NAME="expressRouteAuthorizationKey")
def express_route_authorization_key(self) -> str:
"""
The key of the ExpressRoute Circuit Authorization
"""
return pulumi.get(self, "express_route_authorization_key")
@property
@pulumi.getter(METHOD_NAME="expressRouteId")
def express_route_id(self) -> Optional[str]:
"""
The ID of the ExpressRoute Circuit
"""
return pulumi.get(self, "express_route_id")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(METHOD_NAME="provisioningState")
def provisioning_state(self) -> str:
"""
The state of the ExpressRoute Circuit Authorization provisioning
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetAuthorizationResult(GetAuthorizationResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAuthorizationResult(
express_route_authorization_id=self.express_route_authorization_id,
express_route_authorization_key=self.express_route_authorization_key,
express_route_id=self.express_route_id,
id=self.id,
METHOD_NAME=self.METHOD_NAME,
provisioning_state=self.provisioning_state,
type=self.type)
def get_authorization(authorization_name: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAuthorizationResult:
"""
ExpressRoute Circuit Authorization
Azure REST API version: 2022-05-01.
:param str authorization_name: Name of the ExpressRoute Circuit Authorization in the private cloud
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['authorizationName'] = authorization_name
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:avs:getAuthorization', __args__, opts=opts, typ=GetAuthorizationResult).value
return AwaitableGetAuthorizationResult(
express_route_authorization_id=pulumi.get(__ret__, 'express_route_authorization_id'),
express_route_authorization_key=pulumi.get(__ret__, 'express_route_authorization_key'),
express_route_id=pulumi.get(__ret__, 'express_route_id'),
id=pulumi.get(__ret__, 'id'),
METHOD_NAME=pulumi.get(__ret__, 'name'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_authorization)
def get_authorization_output(authorization_name: Optional[pulumi.Input[str]] = None,
private_cloud_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAuthorizationResult]:
"""
ExpressRoute Circuit Authorization
Azure REST API version: 2022-05-01.
:param str authorization_name: Name of the ExpressRoute Circuit Authorization in the private cloud
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
|
3,894 |
alt weight changed
|
# (C) Copyright 2005-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
"""
Test the 'add_trait_listener', 'remove_trait_listener' interface to the
HasTraits class.
"""
import contextlib
import io
import logging
import sys
import threading
import time
import unittest
from traits.api import HasTraits, Str, Int, Float, Any, Event
from traits.api import push_exception_handler, pop_exception_handler
@contextlib.contextmanager
def captured_stderr():
"""
Return a context manager that directs all stderr output to a string.
"""
new_stderr = io.StringIO()
original_stderr = sys.stderr
sys.stderr = new_stderr
try:
yield new_stderr
finally:
sys.stderr = original_stderr
class GenerateEvents(HasTraits):
name = Str
age = Int
weight = Float
events = {} # dict of events
class ListenEvents(HasTraits):
# 'GenerateEvents' event interface:
# the events are stored in the dict 'events'
def _name_changed(self, object, name, old, new):
events["_name_changed"] = (name, old, new)
def _age_changed(self, object, name, old, new):
events["_age_changed"] = (name, old, new)
def _weight_changed(self, object, name, old, new):
events["_weight_changed"] = (name, old, new)
def alt_name_changed(self, object, name, old, new):
events["alt_name_changed"] = (name, old, new)
def METHOD_NAME(self, object, name, old, new):
events["alt_weight_changed"] = (name, old, new)
class GenerateFailingEvents(HasTraits):
name = Str
def _name_changed(self):
raise RuntimeError
class TestListeners(unittest.TestCase):
def test_listeners(self):
global events
# FIXME: comparing floats
ge = GenerateEvents()
le = ListenEvents()
# Starting test: No Listeners
ge.trait_set(name="Joe", age=22, weight=152.0)
# Adding default listener
ge.add_trait_listener(le)
events = {}
ge.trait_set(name="Mike", age=34, weight=178.0)
self.assertEqual(
events,
{
"_age_changed": ("age", 22, 34),
"_weight_changed": ("weight", 152.0, 178.0),
"_name_changed": ("name", "Joe", "Mike"),
},
)
# Adding alternate listener
ge.add_trait_listener(le, "alt")
events = {}
ge.trait_set(name="Gertrude", age=39, weight=108.0)
self.assertEqual(
events,
{
"_age_changed": ("age", 34, 39),
"_name_changed": ("name", "Mike", "Gertrude"),
"_weight_changed": ("weight", 178.0, 108.0),
"alt_name_changed": ("name", "Mike", "Gertrude"),
"alt_weight_changed": ("weight", 178.0, 108.0),
},
)
# Removing default listener
ge.remove_trait_listener(le)
events = {}
ge.trait_set(name="Sally", age=46, weight=118.0)
self.assertEqual(
events,
{
"alt_name_changed": ("name", "Gertrude", "Sally"),
"alt_weight_changed": ("weight", 108.0, 118.0),
},
)
# Removing alternate listener
ge.remove_trait_listener(le, "alt")
events = {}
ge.trait_set(name="Ralph", age=29, weight=198.0)
self.assertEqual(events, {})
def test_trait_exception_handler_can_access_exception(self):
""" Tests if trait exception handlers can access the traceback of the
exception.
"""
from traits import trait_notifiers
def _handle_exception(obj, name, old, new):
self.assertIsNotNone(sys.exc_info()[0])
ge = GenerateFailingEvents()
try:
trait_notifiers.push_exception_handler(
_handle_exception, reraise_exceptions=False, main=True
)
ge.trait_set(name="John Cleese")
finally:
trait_notifiers.pop_exception_handler()
def test_exceptions_logged(self):
# Check that default exception handling logs the exception.
ge = GenerateFailingEvents()
traits_logger = logging.getLogger("traits")
with self.assertLogs(
logger=traits_logger, level=logging.ERROR) as log_watcher:
ge.name = "Terry Jones"
self.assertEqual(len(log_watcher.records), 1)
log_record = log_watcher.records[0]
self.assertIn(
"Exception occurred in traits notification handler",
log_record.message,
)
_, exc_value, exc_traceback = log_record.exc_info
self.assertIsInstance(exc_value, RuntimeError)
self.assertIsNotNone(exc_traceback)
class A(HasTraits):
exception = Any
foo = Event
def foo_changed_handler(self):
pass
def foo_writer(a, stop_event):
while not stop_event.is_set():
try:
a.foo = True
except Exception as e:
a.exception = e
class TestRaceCondition(unittest.TestCase):
def setUp(self):
push_exception_handler(
handler=lambda *args: None, reraise_exceptions=True, main=True
)
def tearDown(self):
pop_exception_handler()
def test_listener_thread_safety(self):
# Regression test for GitHub issue #56
a = A()
stop_event = threading.Event()
t = threading.Thread(target=foo_writer, args=(a, stop_event))
t.start()
for _ in range(100):
a.on_trait_change(a.foo_changed_handler, "foo")
time.sleep(0.0001) # encourage thread-switch
a.on_trait_change(a.foo_changed_handler, "foo", remove=True)
stop_event.set()
t.join()
self.assertTrue(a.exception is None)
def test_listener_deleted_race(self):
# Regression test for exception that occurred when the listener_deleted
# method is called after the dispose method on a
# TraitsChangeNotifyWrapper.
class SlowListener(HasTraits):
def handle_age_change(self):
time.sleep(1.0)
def worker_thread(event_source, start_event):
# Wait until the listener is set up on the main thread, then fire
# the event.
start_event.wait()
event_source.age = 11
def main_thread(event_source, start_event):
listener = SlowListener()
event_source.on_trait_change(listener.handle_age_change, "age")
start_event.set()
# Allow time to make sure that we're in the middle of handling an
# event.
time.sleep(0.5)
event_source.on_trait_change(
listener.handle_age_change, "age", remove=True
)
# Previously, a ValueError would be raised on the worker thread
# during (normal refcount-based) garbage collection. That
# ValueError is ignored by the Python system, so the only
# visible effect is the output to stderr.
with captured_stderr() as s:
start_event = threading.Event()
event_source = GenerateEvents(age=10)
t = threading.Thread(
target=worker_thread, args=(event_source, start_event)
)
t.start()
main_thread(event_source, start_event)
t.join()
self.assertNotIn("Exception", s.getvalue())
|
3,895 |
get word offsets
|
# A part of NonVisual Desktop Access (NVDA)
# This file is covered by the GNU General Public License.
# See the file COPYING for more details.
# Copyright (C) 2010-2022 NV Access Limited, Soronel Haetir, Babbage B.V., Francisco Del Roio,
# Leonard de Ruijter
import objbase
import comtypes
from locationHelper import RectLTWH
from logHandler import log
import textInfos.offsets
from NVDAObjects.behaviors import EditableText, EditableTextWithoutAutoSelectDetection
from NVDAObjects.window import Window
from comtypes.automation import IDispatch
from NVDAObjects.window import DisplayModelEditableText
from NVDAObjects.IAccessible import IAccessible
from NVDAObjects.UIA import UIA, WpfTextView, UIATextInfo
from enum import IntEnum
import appModuleHandler
import controlTypes
import threading
import UIAHandler
# A few helpful constants
# vsWindowType Enum
class VsWindowType(IntEnum):
ToolWindow = 15
Document = 16
Output = 17
# Scroll bar selector
SB_HORZ = 0
SB_VERT = 1
class AppModule(appModuleHandler.AppModule):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._DTECache = {}
vsMajor, vsMinor, rest = self.productVersion.split(".", 2)
self.vsMajor, self.vsMinor = int(vsMajor), int(vsMinor)
def chooseNVDAObjectOverlayClasses(self, obj, clsList):
if WpfTextView in clsList:
clsList.remove(WpfTextView)
clsList.insert(0, VsWpfTextView)
# Only use this overlay class if the top level automation object for the IDE can be retrieved,
# as it will not work otherwise.
elif obj.windowClassName == "VsTextEditPane" and self.DTE:
try:
clsList.remove(DisplayModelEditableText)
except ValueError:
pass
clsList[0:0] = [VsTextEditPane, EditableTextWithoutAutoSelectDetection]
elif (
(self.vsMajor == 15 and self.vsMinor >= 3)
or self.vsMajor >= 16
):
if obj.role == controlTypes.Role.TREEVIEWITEM and obj.windowClassName == "LiteTreeView32":
clsList.insert(0, ObjectsTreeItem)
def _getDTE(self):
# Retrieve the top level automation object for the IDE
bctx = objbase.CreateBindCtx()
ROT = objbase.GetRunningObjectTable()
for mon in ROT:
displayName = mon.GetDisplayName(bctx, None)
if displayName == f"!VisualStudio.DTE.{self.vsMajor}.0:{self.processID}":
return comtypes.client.dynamic.Dispatch(ROT.GetObject(mon).QueryInterface(IDispatch))
else:
# None found.
log.debugWarning("No top level automation object found", exc_info=True)
return None
def _get_DTE(self):
thread = threading.get_ident()
# Return the already fetched instance if there is one.
DTE = self._DTECache.get(thread)
if DTE:
return DTE
DTE = self._DTECache[thread] = self._getDTE()
return DTE
class VsWpfTextViewTextInfo(UIATextInfo):
def _getLineNumberString(self, textRange):
# Visual Studio exposes line numbers as part of the actual text.
# We want to store the line number in a format field instead.
lineNumberRange = textRange.Clone()
lineNumberRange.MoveEndpointByRange(
UIAHandler.TextPatternRangeEndpoint_End,
lineNumberRange,
UIAHandler.TextPatternRangeEndpoint_Start
)
return lineNumberRange.GetText(-1)
def _getFormatFieldAtRange(self, textRange, formatConfig, ignoreMixedValues=False):
formatField = super()._getFormatFieldAtRange(textRange, formatConfig, ignoreMixedValues=ignoreMixedValues)
if not formatField or not formatConfig['reportLineNumber']:
return formatField
lineNumberStr = self._getLineNumberString(textRange)
if lineNumberStr:
try:
formatField.field['line-number'] = int(lineNumberStr)
except ValueError:
log.debugWarning(
f"Couldn't parse {lineNumberStr} as integer to report a line number",
exc_info=True
)
return formatField
def _getTextFromUIARange(self, textRange):
text = super()._getTextFromUIARange(textRange)
lineNumberStr = self._getLineNumberString(textRange)
return text[(0 if not lineNumberStr else len(lineNumberStr)):]
class VsWpfTextView(WpfTextView):
TextInfo = VsWpfTextViewTextInfo
class VsTextEditPaneTextInfo(textInfos.offsets.OffsetsTextInfo):
def _get__selectionObject(self):
window = self.obj._window
if window.Type == VsWindowType.Document:
selection = window.Selection
elif window.Type == VsWindowType.Output:
selection = window.Object.ActivePane.TextDocument.Selection
elif window.Type == VsWindowType.ToolWindow:
selection = window.Object.TextDocument.Selection
else:
raise RuntimeError(f"Unknown window type: {window.Kind}")
self._selectionObject = selection
return selection
def _createEditPoint(self):
return self._selectionObject.ActivePoint.CreateEditPoint()
def _getCaretOffset(self):
return self._createEditPoint().AbsoluteCharOffset - 1
def _setCaretOffset(self, offset):
self._selectionObject.MoveToAbsoluteOffset(offset + 1)
def _setSelectionOffsets(self, start, end):
self._selectionObject.MoveToAbsoluteOffset(start + 1)
self._selectionObject.MoveToAbsoluteOffset(end + 1, True)
def _getSelectionOffsets(self):
caretPos = self._getCaretOffset()
anchorPos = self._selectionObject.AnchorPoint.CreateEditPoint().AbsoluteCharOffset - 1
return min(caretPos, anchorPos), max(caretPos, anchorPos)
def _getTextRange(self, start, end):
editPointStart = self._createEditPoint()
editPointStart.MoveToAbsoluteOffset(start + 1)
return editPointStart.GetText(end - start)
def METHOD_NAME(self, offset):
editPointEnd = self._createEditPoint()
editPointEnd.MoveToAbsoluteOffset(offset + 1)
editPointEnd.WordRight()
editPointStart = editPointEnd.CreateEditPoint()
editPointStart.WordLeft()
return editPointStart.AbsoluteCharOffset - 1, editPointEnd.AbsoluteCharOffset - 1
def _getLineOffsets(self, offset):
editPointStart = self._createEditPoint()
editPointStart.MoveToAbsoluteOffset(offset + 1)
editPointStart.StartOfLine()
editPointEnd = editPointStart.CreateEditPoint()
editPointEnd.EndOfLine()
# Offsets are one based and exclusive
return editPointStart.AbsoluteCharOffset - 1, editPointEnd.AbsoluteCharOffset
def _getLineNumFromOffset(self, offset):
editPoint = self._createEditPoint()
editPoint.MoveToAbsoluteOffset(offset + 1)
return editPoint.Line
def _getStoryLength(self):
editPoint = self._createEditPoint()
editPoint.EndOfDocument()
return editPoint.AbsoluteCharOffset - 1
class VsTextEditPane(EditableText, Window):
def _get_TextInfo(self):
try:
if self._window.Type in iter(VsWindowType):
return VsTextEditPaneTextInfo
else:
log.debugWarning(
f"Retrieved Visual Studio window object, but unknown type: {self._window.Type}"
)
except Exception:
log.debugWarning("Couldn't retrieve Visual Studio window object", exc_info=True)
return super().TextInfo
def initOverlayClass(self):
self._window = self.appModule.DTE.ActiveWindow
def _get_location(self):
if not isinstance(self, UIA):
return RectLTWH(
self._window.Left,
self._window.Top,
self._window.Width,
self._window.Height
)
return super().location
def event_valueChange(self):
pass
class ObjectsTreeItem(IAccessible):
def _get_focusRedirect(self):
"""
Returns the correct focused item in the object explorer trees
"""
if controlTypes.State.FOCUSED not in self.states:
# Object explorer tree views have a bad IAccessible implementation.
# When expanding a primary node and going to secondary node, the
# focus is placed to the next root node, so we need to redirect
# it to the real focused widget. Fortunately, the states are
# still correct and we can detect if this is really focused or not.
return self.objectWithFocus()
def _get_positionInfo(self):
return {
"level": int(self.IAccessibleObject.accValue(self.IAccessibleChildID))
}
|
3,896 |
search checklist templates
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from ..common import CommonService_pb2 as common_dot_CommonService__pb2
from ..registry import ChecklistService_pb2 as registry_dot_ChecklistService__pb2
class ChecklistServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.searchChecklistItemValues = channel.unary_unary(
'/ai.verta.registry.ChecklistService/searchChecklistItemValues',
request_serializer=registry_dot_ChecklistService__pb2.SearchChecklistItemValues.SerializeToString,
response_deserializer=registry_dot_ChecklistService__pb2.SearchChecklistItemValues.Response.FromString,
)
self.METHOD_NAME = channel.unary_unary(
'/ai.verta.registry.ChecklistService/searchChecklistTemplates',
request_serializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.SerializeToString,
response_deserializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.Response.FromString,
)
self.searchChecklistTemplatesV1 = channel.unary_unary(
'/ai.verta.registry.ChecklistService/searchChecklistTemplatesV1',
request_serializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.SerializeToString,
response_deserializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.Response.FromString,
)
self.setChecklistItemValues = channel.unary_unary(
'/ai.verta.registry.ChecklistService/setChecklistItemValues',
request_serializer=registry_dot_ChecklistService__pb2.SetChecklistItemValues.SerializeToString,
response_deserializer=registry_dot_ChecklistService__pb2.SetChecklistItemValues.Response.FromString,
)
self.setChecklistTemplate = channel.unary_unary(
'/ai.verta.registry.ChecklistService/setChecklistTemplate',
request_serializer=registry_dot_ChecklistService__pb2.SetChecklistTemplate.SerializeToString,
response_deserializer=registry_dot_ChecklistService__pb2.SetChecklistTemplate.Response.FromString,
)
self.deleteChecklistTemplate = channel.unary_unary(
'/ai.verta.registry.ChecklistService/deleteChecklistTemplate',
request_serializer=registry_dot_ChecklistService__pb2.DeleteChecklistTemplate.SerializeToString,
response_deserializer=common_dot_CommonService__pb2.Empty.FromString,
)
class ChecklistServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def searchChecklistItemValues(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def METHOD_NAME(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def searchChecklistTemplatesV1(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def setChecklistItemValues(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def setChecklistTemplate(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteChecklistTemplate(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ChecklistServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'searchChecklistItemValues': grpc.unary_unary_rpc_method_handler(
servicer.searchChecklistItemValues,
request_deserializer=registry_dot_ChecklistService__pb2.SearchChecklistItemValues.FromString,
response_serializer=registry_dot_ChecklistService__pb2.SearchChecklistItemValues.Response.SerializeToString,
),
'searchChecklistTemplates': grpc.unary_unary_rpc_method_handler(
servicer.METHOD_NAME,
request_deserializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.FromString,
response_serializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.Response.SerializeToString,
),
'searchChecklistTemplatesV1': grpc.unary_unary_rpc_method_handler(
servicer.searchChecklistTemplatesV1,
request_deserializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.FromString,
response_serializer=registry_dot_ChecklistService__pb2.SearchChecklistTemplates.Response.SerializeToString,
),
'setChecklistItemValues': grpc.unary_unary_rpc_method_handler(
servicer.setChecklistItemValues,
request_deserializer=registry_dot_ChecklistService__pb2.SetChecklistItemValues.FromString,
response_serializer=registry_dot_ChecklistService__pb2.SetChecklistItemValues.Response.SerializeToString,
),
'setChecklistTemplate': grpc.unary_unary_rpc_method_handler(
servicer.setChecklistTemplate,
request_deserializer=registry_dot_ChecklistService__pb2.SetChecklistTemplate.FromString,
response_serializer=registry_dot_ChecklistService__pb2.SetChecklistTemplate.Response.SerializeToString,
),
'deleteChecklistTemplate': grpc.unary_unary_rpc_method_handler(
servicer.deleteChecklistTemplate,
request_deserializer=registry_dot_ChecklistService__pb2.DeleteChecklistTemplate.FromString,
response_serializer=common_dot_CommonService__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'ai.verta.registry.ChecklistService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
3,897 |
execute operations
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network private-dns zone wait",
)
class Wait(AAZWaitCommand):
"""Place the CLI in a waiting state until a condition is met.
"""
_aaz_info = {
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/privatednszones/{}", "2018-09-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self.METHOD_NAME()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="Name of the Private DNS zone.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
return cls._args_schema
def METHOD_NAME(self):
self.pre_operations()
self.PrivateZonesGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=False)
return result
class PrivateZonesGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/privateDnsZones/{privateZoneName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"privateZoneName", self.ctx.args.name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2018-09-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.etag = AAZStrType()
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.location = AAZStrType()
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType(
flags={"client_flatten": True},
)
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.max_number_of_record_sets = AAZIntType(
serialized_name="maxNumberOfRecordSets",
flags={"read_only": True},
)
properties.max_number_of_virtual_network_links = AAZIntType(
serialized_name="maxNumberOfVirtualNetworkLinks",
flags={"read_only": True},
)
properties.max_number_of_virtual_network_links_with_registration = AAZIntType(
serialized_name="maxNumberOfVirtualNetworkLinksWithRegistration",
flags={"read_only": True},
)
properties.number_of_record_sets = AAZIntType(
serialized_name="numberOfRecordSets",
flags={"read_only": True},
)
properties.number_of_virtual_network_links = AAZIntType(
serialized_name="numberOfVirtualNetworkLinks",
flags={"read_only": True},
)
properties.number_of_virtual_network_links_with_registration = AAZIntType(
serialized_name="numberOfVirtualNetworkLinksWithRegistration",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _WaitHelper:
"""Helper class for Wait"""
__all__ = ["Wait"]
|
3,898 |
create bento servicer
|
from __future__ import annotations
import logging
import sys
import typing as t
from typing import TYPE_CHECKING
import anyio
from ......exceptions import BentoMLException
from ......exceptions import InvalidArgument
from ......grpc.utils import grpc_status_code
from ......grpc.utils import import_generated_stubs
from ......grpc.utils import import_grpc
from ......grpc.utils import validate_proto_fields
from .....utils import LazyLoader
from .....utils import is_async_callable
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from logging import _ExcInfoType as ExcInfoType # type: ignore (private warning)
import grpc
from google.protobuf import struct_pb2
from ......grpc.types import BentoServicerContext
from ......grpc.v1 import service_pb2 as pb
from ......grpc.v1 import service_pb2_grpc as services
from .....service.service import Service
else:
grpc, _ = import_grpc()
pb, services = import_generated_stubs(version="v1")
struct_pb2 = LazyLoader("struct_pb2", globals(), "google.protobuf.struct_pb2")
def log_exception(request: pb.Request, exc_info: ExcInfoType) -> None:
# gRPC will always send a POST request.
logger.error("Exception on /%s [POST]", request.api_name, exc_info=exc_info)
def METHOD_NAME(service: Service) -> services.BentoServiceServicer:
"""
This is the actual implementation of BentoServicer.
Main inference entrypoint will be invoked via /bentoml.grpc.<version>.BentoService/Call
"""
class BentoServiceImpl(services.BentoServiceServicer):
"""An asyncio implementation of BentoService servicer."""
async def Call( # type: ignore (no async types) # pylint: disable=invalid-overridden-method
self,
request: pb.Request,
context: BentoServicerContext,
) -> pb.Response | None:
if request.api_name not in service.apis:
raise InvalidArgument(
f"given 'api_name' is not defined in {service.name}",
) from None
api = service.apis[request.api_name]
response = pb.Response()
output = None
# NOTE: since IODescriptor.proto_fields is a tuple, the order is preserved.
# This is important so that we know the order of fields to process.
# We will use fields descriptor to determine how to process that request.
try:
# we will check if the given fields list contains a pb.Multipart.
input_proto = getattr(
request,
validate_proto_fields(request.WhichOneof("content"), api.input),
)
input_data = await api.input.from_proto(input_proto)
if is_async_callable(api.func):
if api.multi_input:
output = await api.func(**input_data)
else:
output = await api.func(input_data)
else:
if api.multi_input:
output = await anyio.to_thread.run_sync(api.func, **input_data)
else:
output = await anyio.to_thread.run_sync(api.func, input_data)
res = await api.output.to_proto(output)
# TODO(aarnphm): support multiple proto fields
response = pb.Response(**{api.output.proto_fields[0]: res})
except BentoMLException as e:
log_exception(request, sys.exc_info())
if output is not None:
import inspect
signature = inspect.signature(api.output.to_proto)
param = next(iter(signature.parameters.values()))
ann = ""
if param is not inspect.Parameter.empty:
ann = param.annotation
# more descriptive errors if output is available
logger.error(
"Function '%s' has 'input=%s,output=%s' as IO descriptor, and returns 'result=%s', while expected return type is '%s'",
api.name,
api.input,
api.output,
type(output),
ann,
)
await context.abort(code=grpc_status_code(e), details=e.message)
except (RuntimeError, TypeError, NotImplementedError):
log_exception(request, sys.exc_info())
await context.abort(
code=grpc.StatusCode.INTERNAL,
details="A runtime error has occurred, see stacktrace from logs.",
)
except Exception: # pylint: disable=broad-except
log_exception(request, sys.exc_info())
await context.abort(
code=grpc.StatusCode.INTERNAL,
details="An error has occurred in BentoML user code when handling this request, find the error details in server logs.",
)
return response
async def ServiceMetadata( # type: ignore (no async types) # pylint: disable=invalid-overridden-method
self,
request: pb.ServiceMetadataRequest, # pylint: disable=unused-argument
context: BentoServicerContext, # pylint: disable=unused-argument
) -> pb.ServiceMetadataResponse:
return pb.ServiceMetadataResponse(
name=service.name,
docs=service.doc,
apis=[
pb.ServiceMetadataResponse.InferenceAPI(
name=api.name,
docs=api.doc,
input=make_descriptor_spec(
api.input.to_spec(), pb.ServiceMetadataResponse
),
output=make_descriptor_spec(
api.output.to_spec(), pb.ServiceMetadataResponse
),
)
for api in service.apis.values()
],
)
return BentoServiceImpl()
if TYPE_CHECKING:
NestedDictStrAny = dict[str, dict[str, t.Any] | t.Any]
TupleAny = tuple[t.Any, ...]
def _tuple_converter(d: NestedDictStrAny | None) -> NestedDictStrAny | None:
# handles case for struct_pb2.Value where nested items are tuple.
# if that is the case, then convert to list.
# This dict is only one level deep, as we don't allow nested Multipart.
if d is not None:
for key, value in d.items():
if isinstance(value, tuple):
d[key] = list(t.cast("TupleAny", value))
elif isinstance(value, dict):
d[key] = _tuple_converter(t.cast("NestedDictStrAny", value))
return d
def make_descriptor_spec(
spec: dict[str, t.Any] | None, pb: type[pb.ServiceMetadataResponse]
) -> pb.ServiceMetadataResponse.DescriptorMetadata:
from .....io_descriptors.json import parse_dict_to_proto
if spec is not None:
descriptor_id = spec.pop("id")
return pb.DescriptorMetadata(
descriptor_id=descriptor_id,
attributes=struct_pb2.Struct(
fields={
key: parse_dict_to_proto(
_tuple_converter(value), struct_pb2.Value()
)
for key, value in spec.items()
}
),
)
return pb.DescriptorMetadata()
|
3,899 |
package
|
import os
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.env import Environment, VirtualBuildEnv
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
from conan.tools.gnu import AutotoolsToolchain, Autotools
from conan.tools.microsoft import check_min_vs, is_msvc, unix_path, unix_path_package_info_legacy
from conan.tools.scm import Version
required_conan_version = ">=1.57.0"
class GetTextConan(ConanFile):
name = "gettext"
package_type = "application"
description = "An internationalization and localization system for multilingual programs"
topics = ("intl", "libintl", "i18n")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://www.gnu.org/software/gettext"
license = "GPL-3.0-or-later"
settings = "os", "arch", "compiler", "build_type"
@property
def _settings_build(self):
return getattr(self, "settings_build", self.settings)
def export_sources(self):
export_conandata_patches(self)
def configure(self):
self.settings.rm_safe("compiler.libcxx")
self.settings.rm_safe("compiler.cppstd")
def requirements(self):
self.requires("libiconv/1.17")
def build_requirements(self):
if self._settings_build.os == "Windows" and not self.conf.get("tools.microsoft.bash:path", check_type=str):
self.win_bash = True
self.tool_requires("msys2/cci.latest")
if is_msvc(self):
self.build_requires("automake/1.16.5")
def validate(self):
if Version(self.version) < "0.21" and is_msvc(self):
raise ConanInvalidConfiguration("MSVC builds of gettext for versions < 0.21 are not supported.") # FIXME: it used to be possible. What changed?
def package_id(self):
del self.info.settings.compiler
def source(self):
get(self, **self.conan_data["sources"][self.version],
destination=self.source_folder, strip_root=True)
def generate(self):
env = VirtualBuildEnv(self)
env.generate()
tc = AutotoolsToolchain(self)
libiconv = self.dependencies["libiconv"]
libiconv_root = unix_path(self, libiconv.package_folder)
tc.configure_args.extend([
"HELP2MAN=/bin/true",
"EMACS=no",
"--datarootdir=${prefix}/res",
"--with-libiconv-prefix={}".format(libiconv_root),
"--disable-shared",
"--disable-static",
"--disable-nls",
"--disable-dependency-tracking",
"--enable-relocatable",
"--disable-c++",
"--disable-java",
"--disable-csharp",
"--disable-libasprintf",
"--disable-curses",
])
if is_msvc(self):
if check_min_vs(self, "180", raise_invalid=False):
tc.extra_cflags.append("-FS") #TODO: reference github issue
# The flag above `--with-libiconv-prefix` fails to correctly detect libiconv on windows+msvc
# so it needs an extra nudge. We could use `AutotoolsDeps` but it's currently affected by the
# following outstanding issue: https://github.com/conan-io/conan/issues/12784
iconv_includedir = unix_path(self, libiconv.cpp_info.aggregated_components().includedirs[0])
iconv_libdir = unix_path(self, libiconv.cpp_info.aggregated_components().libdirs[0])
tc.extra_cflags.append(f"-I{iconv_includedir}")
tc.extra_ldflags.append(f"-L{iconv_libdir}")
env = Environment()
compile_wrapper = self.dependencies.build["automake"].conf_info.get("user.automake:compile-wrapper")
lib_wrapper = self.dependencies.build["automake"].conf_info.get("user.automake:lib-wrapper")
env.define("CC", "{} cl -nologo".format(unix_path(self, compile_wrapper)))
env.define("LD", "link -nologo")
env.define("NM", "dumpbin -symbols")
env.define("STRIP", ":")
env.define("AR", "{} lib".format(unix_path(self, lib_wrapper)))
env.define("RANLIB", ":")
# One of the checks performed by the configure script requires this as a preprocessor flag
# rather than a C compiler flag
env.prepend("CPPFLAGS", f"-I{iconv_includedir}")
windres_arch = {"x86": "i686", "x86_64": "x86-64"}[str(self.settings.arch)]
env.define("RC", f"windres --target=pe-{windres_arch}")
env.vars(self).save_script("conanbuild_msvc")
tc.generate()
def build(self):
apply_conandata_patches(self)
autotools = Autotools(self)
autotools.configure()
autotools.make()
def METHOD_NAME(self):
autotools = Autotools(self)
autotools.install()
copy(self, pattern="COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
rmdir(self, os.path.join(self.package_folder, "lib"))
rmdir(self, os.path.join(self.package_folder, "include"))
rmdir(self, os.path.join(self.package_folder, "share", "doc"))
rmdir(self, os.path.join(self.package_folder, "share", "info"))
rmdir(self, os.path.join(self.package_folder, "share", "man"))
def package_info(self):
self.cpp_info.libdirs = []
self.cpp_info.includedirs = []
aclocal = os.path.join(self.package_folder, "res", "aclocal")
autopoint = os.path.join(self.package_folder, "bin", "autopoint")
self.buildenv_info.append_path("ACLOCAL_PATH", aclocal)
self.buildenv_info.define_path("AUTOPOINT", autopoint)
# TODO: the following can be removed when the recipe supports Conan >= 2.0 only
bindir = os.path.join(self.package_folder, "bin")
self.output.info("Appending PATH environment variable: {}".format(bindir))
self.env_info.PATH.append(bindir)
self.output.info("Appending AUTOMAKE_CONAN_INCLUDES environment variable: {}".format(aclocal))
self.env_info.AUTOMAKE_CONAN_INCLUDES.append(unix_path_package_info_legacy(self, aclocal))
self.output.info("Setting AUTOPOINT environment variable: {}".format(autopoint))
self.env_info.AUTOPOINT = unix_path_package_info_legacy(self, autopoint)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.