id
int64 0
300k
| label
stringlengths 1
74
⌀ | text
stringlengths 4k
8k
|
---|---|---|
4,300 |
set up
|
# coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the controller managing incoming feedback reports."""
from __future__ import annotations
import datetime
from core import android_validation_constants
from core.domain import app_feedback_report_domain
from core.platform import models
from core.tests import test_utils
from typing import Any, Dict, Sequence
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import app_feedback_report_models
(app_feedback_report_models,) = models.Registry.import_models(
[models.Names.APP_FEEDBACK_REPORT])
REPORT_JSON: app_feedback_report_domain.AndroidFeedbackReportDict = {
'platform_type': 'android',
'android_report_info_schema_version': 1,
'app_context': {
'entry_point': {
'entry_point_name': 'navigation_drawer',
'entry_point_exploration_id': None,
'entry_point_story_id': None,
'entry_point_topic_id': None,
'entry_point_subtopic_id': None,
},
'text_size': 'large_text_size',
'text_language_code': 'en',
'audio_language_code': 'en',
'only_allows_wifi_download_and_update': True,
'automatically_update_topics': False,
'account_is_profile_admin': False,
'event_logs': ['example', 'event'],
'logcat_logs': ['example', 'log']
},
'device_context': {
'android_device_model': 'example_model',
'android_sdk_version': 23,
'build_fingerprint': 'example_fingerprint_id',
'network_type': 'wifi'
},
'report_submission_timestamp_sec': 1615519337,
'report_submission_utc_offset_hrs': 0,
'system_context': {
'platform_version': '0.1-alpha-abcdef1234',
'package_version_code': 1,
'android_device_country_locale_code': 'in',
'android_device_language_locale_code': 'en'
},
'user_supplied_feedback': {
'report_type': 'suggestion',
'category': 'language_suggestion',
'user_feedback_selected_items': [],
'user_feedback_other_text_input': 'french'
}
}
# Webtest requires explicit str-types rather than UNICODE for headers.
ANDROID_API_KEY_STRING = str(android_validation_constants.ANDROID_API_KEY) # pylint: disable=disallowed-function-calls
ANDROID_APP_PACKAGE_NAME_STRING = str( # pylint: disable=disallowed-function-calls
android_validation_constants.ANDROID_APP_PACKAGE_NAME)
ANDROID_APP_VERSION_NAME_STRING = str('1.0.0-flavor-commithash') # pylint: disable=disallowed-function-calls
ANDROID_APP_VERSION_CODE_STRING = str('2') # pylint: disable=disallowed-function-calls
class IncomingAndroidFeedbackReportHandlerTests(test_utils.GenericTestBase):
def METHOD_NAME(self) -> None:
super().METHOD_NAME()
self.payload = {
'report': REPORT_JSON
}
def test_incoming_report_saves_to_storage(self) -> None:
# Webapp header values must be Python str types otherwise an
# AssertionError for "not a string" is thrown.
headers = {
'api_key': ANDROID_API_KEY_STRING,
'app_package_name': ANDROID_APP_PACKAGE_NAME_STRING,
'app_version_name': ANDROID_APP_VERSION_NAME_STRING,
'app_version_code': ANDROID_APP_VERSION_CODE_STRING
}
self._post_json_with_test_headers(self.payload, headers)
report_model_class = app_feedback_report_models.AppFeedbackReportModel
all_reports: Sequence[
app_feedback_report_models.AppFeedbackReportModel] = (
report_model_class.get_all().fetch())
self.assertEqual(len(all_reports), 1)
report_model: app_feedback_report_models.AppFeedbackReportModel = (
all_reports[0])
self.assertEqual(report_model.platform, 'android')
self.assertEqual(
report_model.submitted_on,
datetime.datetime.fromtimestamp(1615519337))
def test_incoming_report_with_invalid_headers_raises_exception(
self
) -> None:
token = self.get_new_csrf_token()
# Webtest requires explicit str-types headers.
invalid_headers = {
'api_key': str('bad_key'), # pylint: disable=disallowed-function-calls
'app_package_name': str('bad_package_name'), # pylint: disable=disallowed-function-calls
'app_version_name': str('bad_version_name'), # pylint: disable=disallowed-function-calls
'app_version_code': str('bad_version_code'), # pylint: disable=disallowed-function-calls
}
response = self.post_json(
android_validation_constants.INCOMING_ANDROID_FEEDBACK_REPORT_URL,
self.payload, headers=invalid_headers, csrf_token=token,
expected_status_int=401)
self.assertEqual(
response['error'],
'The incoming request is not a valid Oppia Android request.')
def test_incoming_report_with_no_headers_raises_exception(self) -> None:
token = self.get_new_csrf_token()
self.post_json(
android_validation_constants.INCOMING_ANDROID_FEEDBACK_REPORT_URL,
self.payload, csrf_token=token, expected_status_int=500)
# Here we use type Any because this method can return JSON response Dict
# whose values can contain different types of values, like int, bool,
# str and other types too.
def _post_json_with_test_headers(
self,
payload: Dict[
str, app_feedback_report_domain.AndroidFeedbackReportDict
],
headers: Dict[str, str],
expected_status: int=200
) -> Dict[str, Any]:
"""Sends a post request usint str-type representations of the header
values so that header validation is successful.
Args:
payload: dict. The request payload of a feedback report.
headers: dict. The request headers; values must be str-type for
webtest to properly parse them.
expected_status: int. The expected response status of the
request.
Returns:
dict. The JSON response for the request in dict form.
"""
# Webapp requires the header values to be str-types, so they must have
# parity for the tests to correctly check these fields.
token = self.get_new_csrf_token()
with self.swap(
android_validation_constants, 'ANDROID_API_KEY',
ANDROID_API_KEY_STRING):
with self.swap(
android_validation_constants, 'ANDROID_APP_PACKAGE_NAME',
ANDROID_APP_PACKAGE_NAME_STRING):
return (
self.post_json(
android_validation_constants.INCOMING_ANDROID_FEEDBACK_REPORT_URL, # pylint: disable=line-too-long
payload, headers=headers, csrf_token=token,
expected_status_int=expected_status
)
)
|
4,301 |
test create forcefield from url iterator
|
import pytest
from openff.toolkit import ForceField
class StaleForceFieldTests:
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_create_forcefield_from_file_list(self):
# These offxml files are located in package data path, which is automatically installed and searched
file_paths = [smirnoff99Frosst_offxml_file_path, tip3p_offxml_file_path]
# Create a forcefield from multiple offxml files
ForceField(file_paths)
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_create_forcefield_from_file_path_iterator(self):
# These offxml files are located in package data path, which is automatically installed and searched
file_paths = [smirnoff99Frosst_offxml_file_path, tip3p_offxml_file_path]
# A generator should work as well
ForceField(iter(file_paths))
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_create_gbsa(self):
"""Test reading of ffxml files with GBSA support."""
ForceField("test_forcefields/Frosst_AlkEthOH_GBSA.offxml")
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_create_forcefield_from_url(self):
urls = [
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/test_forcefield.offxml",
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/tip3p.offxml",
]
# Test creation with smirnoff99frosst URL
ForceField(urls[0])
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_create_forcefield_from_url_list(self):
urls = [
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/test_forcefield.offxml",
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/tip3p.offxml",
]
# Test creation with multiple URLs
ForceField(urls)
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def METHOD_NAME(self):
urls = [
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/test_forcefield.offxml",
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/tip3p.offxml",
]
# A generator should work as well
ForceField(iter(urls))
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_charge_increment(self):
"""Test parameter assignment using smirnoff99Frosst on laromustine with ChargeIncrementModel."""
molecules_file_path = get_data_file_path("molecules/laromustine_tripos.mol2")
molecule = Molecule.from_file(molecules_file_path)
forcefield = ForceField(
["test_forcefields/test_forcefield.offxml", "chargeincrement-test"]
)
check_system_creation_from_molecule(forcefield, molecule)
# TODO: We can't implement a test for chargeincrement yet because we
# haven't settled on a SMIRNOFF spec for chargeincrementmodel
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_create_system_molecules_parmatfrosst_gbsa(self):
"""Test creation of a System object from small molecules to test parm@frosst force field with GBSA support."""
molecules_file_path = get_data_file_path(
"molecules/AlkEthOH_test_filt1_tripos.mol2"
)
check_parameter_assignment(
offxml_file_path="test_forcefields/Frosst_AlkEthOH_GBSA.offxml",
molecules_file_path=molecules_file_path,
)
# TODO: Figure out if we just want to check that energy is finite (this is what the original test did,
# or compare numerically to a reference system.
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_deep_copy(self):
force_field = ForceField(smirnoff99Frosst_offxml_file_path)
# Deep copy
force_field2 = copy.deepcopy(force_field)
assert_forcefields_equal(
force_field,
force_field2,
"ForceField deep copy does not match original ForceField",
)
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
# TODO: This should check the output of forcefield.to_dict
def test_serialize(self):
force_field = ForceField(smirnoff99Frosst_offxml_file_path)
# Serialize/deserialize
serialized_forcefield = force_field.__getstate__()
force_field2 = ForceField.__setstate__(serialized_forcefield)
assert_forcefields_equal(
force_field,
force_field2,
"Deserialized serialized ForceField does not match original ForceField",
)
@pytest.mark.skip(reason="Needs to be updated for 0.2.0 syntax")
def test_electrostatics_options(self):
"""Test parameter assignment using smirnoff99Frosst on laromustine with various long-range electrostatics options."""
from functools import partial
molecules_file_path = get_data_file_path("molecules/laromustine_tripos.mol2")
molecule = Molecule.from_file(molecules_file_path)
forcefield = ForceField(
[smirnoff99Frosst_offxml_file_path, charge_increment_offxml_file_path]
)
for method in ["PME", "reaction-field", "Coulomb"]:
# Change electrostatics method
forcefield.forces["Electrostatics"].method = method
f = partial(check_system_creation_from_molecule, forcefield, molecule)
f.description = "Testing {} parameter assignment using molecule {}".format(
offxml_file_path, molecule.name
)
# yield f
# TODO: Implement a similar test, where we compare OpenMM energy evals from an
# AMBER-parameterized system to OFF-parameterized systems
class StaleIOTests:
@pytest.mark.skip(reason="Needs to be updated for 1.0.0 syntax")
def test_to_xml(self):
forcefield = ForceField(smirnoff99Frosst_offxml_filename)
# Retrieve XML as a string
xml = forcefield.to_xml()
# Restore ForceField from XML
forcefield2 = ForceField(xml)
assert_forcefields_equal(
cls.forcefield,
forcefield2,
"ForceField serialized to XML does not match original ForceField",
)
# TODO: Remove ForceField from this whole file. All tests should be for converting between hierarchical SMIRNOFF
# dicts and XML
@pytest.mark.skip(reason="Needs to be updated for 1.0.0 syntax")
def test_save(self):
"""Test writing and reading of SMIRNOFF in XML format."""
forcefield = ForceField(smirnoff99Frosst_offxml_filename)
# Write XML to a file
with TemporaryDirectory() as tmpdir:
offxml_tmpfile = os.path.join(tmpdir, "forcefield.offxml")
forcefield.save(offxml_tmpfile)
forcefield2 = ForceField(offxml_tmpfile)
assert_forcefields_equal(
cls.forcefield,
forcefield2,
"ForceField written to .offxml does not match original ForceField",
)
|
4,302 |
test wer 7
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test error rate."""
import unittest
from paddlespeech.s2t.utils import error_rate
class TestParse(unittest.TestCase):
def test_wer_1(self):
ref = 'i UM the PHONE IS i LEFT THE portable PHONE UPSTAIRS last night'
hyp = 'i GOT IT TO the FULLEST i LOVE TO portable FROM OF STORES last '\
'night'
word_error_rate = error_rate.wer(ref, hyp)
self.assertTrue(abs(word_error_rate - 0.769230769231) < 1e-6)
def test_wer_2(self):
ref = 'as any in england i would say said gamewell proudly that is '\
'in his day'
hyp = 'as any in england i would say said came well proudly that is '\
'in his day'
word_error_rate = error_rate.wer(ref, hyp)
self.assertTrue(abs(word_error_rate - 0.1333333) < 1e-6)
def test_wer_3(self):
ref = 'the lieutenant governor lilburn w boggs afterward governor '\
'was a pronounced mormon hater and throughout the period of '\
'the troubles he manifested sympathy with the persecutors'
hyp = 'the lieutenant governor little bit how bags afterward '\
'governor was a pronounced warman hater and throughout the '\
'period of th troubles he manifests sympathy with the '\
'persecutors'
word_error_rate = error_rate.wer(ref, hyp)
self.assertTrue(abs(word_error_rate - 0.2692307692) < 1e-6)
def test_wer_4(self):
ref = 'the wood flamed up splendidly under the large brewing copper '\
'and it sighed so deeply'
hyp = 'the wood flame do splendidly under the large brewing copper '\
'and its side so deeply'
word_error_rate = error_rate.wer(ref, hyp)
self.assertTrue(abs(word_error_rate - 0.2666666667) < 1e-6)
def test_wer_5(self):
ref = 'all the morning they trudged up the mountain path and at noon '\
'unc and ojo sat on a fallen tree trunk and ate the last of '\
'the bread which the old munchkin had placed in his pocket'
hyp = 'all the morning they trudged up the mountain path and at noon '\
'unc in ojo sat on a fallen tree trunk and ate the last of '\
'the bread which the old munchkin had placed in his pocket'
word_error_rate = error_rate.wer(ref, hyp)
self.assertTrue(abs(word_error_rate - 0.027027027) < 1e-6)
def test_wer_6(self):
ref = 'i UM the PHONE IS i LEFT THE portable PHONE UPSTAIRS last night'
word_error_rate = error_rate.wer(ref, ref)
self.assertEqual(word_error_rate, 0.0)
def METHOD_NAME(self):
ref = ' '
hyp = 'Hypothesis sentence'
with self.assertRaises(ValueError):
word_error_rate = error_rate.wer(ref, hyp)
def test_cer_1(self):
ref = 'werewolf'
hyp = 'weae wolf'
char_error_rate = error_rate.cer(ref, hyp)
self.assertTrue(abs(char_error_rate - 0.25) < 1e-6)
def test_cer_2(self):
ref = 'werewolf'
hyp = 'weae wolf'
char_error_rate = error_rate.cer(ref, hyp, remove_space=True)
self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)
def test_cer_3(self):
ref = 'were wolf'
hyp = 'were wolf'
char_error_rate = error_rate.cer(ref, hyp)
self.assertTrue(abs(char_error_rate - 0.0) < 1e-6)
def test_cer_4(self):
ref = 'werewolf'
char_error_rate = error_rate.cer(ref, ref)
self.assertEqual(char_error_rate, 0.0)
def test_cer_5(self):
ref = u'我是中国人'
hyp = u'我是 美洲人'
char_error_rate = error_rate.cer(ref, hyp)
self.assertTrue(abs(char_error_rate - 0.6) < 1e-6)
def test_cer_6(self):
ref = u'我 是 中 国 人'
hyp = u'我 是 美 洲 人'
char_error_rate = error_rate.cer(ref, hyp, remove_space=True)
self.assertTrue(abs(char_error_rate - 0.4) < 1e-6)
def test_cer_7(self):
ref = u'我是中国人'
char_error_rate = error_rate.cer(ref, ref)
self.assertFalse(char_error_rate, 0.0)
def test_cer_8(self):
ref = ''
hyp = 'Hypothesis'
with self.assertRaises(ValueError):
char_error_rate = error_rate.cer(ref, hyp)
if __name__ == '__main__':
unittest.main()
|
4,303 |
attempt
|
from flask import Blueprint
from CTFd.models import (
ChallengeFiles,
Challenges,
Fails,
Flags,
Hints,
Solves,
Tags,
db,
)
from CTFd.plugins import register_plugin_assets_directory
from CTFd.plugins.flags import FlagException, get_flag_class
from CTFd.utils.uploads import delete_file
from CTFd.utils.user import get_ip
class BaseChallenge(object):
id = None
name = None
templates = {}
scripts = {}
challenge_model = Challenges
@classmethod
def create(cls, request):
"""
This method is used to process the challenge creation request.
:param request:
:return:
"""
data = request.form or request.get_json()
challenge = cls.challenge_model(**data)
db.session.add(challenge)
db.session.commit()
return challenge
@classmethod
def read(cls, challenge):
"""
This method is in used to access the data of a challenge in a format processable by the front end.
:param challenge:
:return: Challenge object, data dictionary to be returned to the user
"""
data = {
"id": challenge.id,
"name": challenge.name,
"value": challenge.value,
"description": challenge.description,
"connection_info": challenge.connection_info,
"next_id": challenge.next_id,
"category": challenge.category,
"state": challenge.state,
"max_attempts": challenge.max_attempts,
"type": challenge.type,
"type_data": {
"id": cls.id,
"name": cls.name,
"templates": cls.templates,
"scripts": cls.scripts,
},
}
return data
@classmethod
def update(cls, challenge, request):
"""
This method is used to update the information associated with a challenge. This should be kept strictly to the
Challenges table and any child tables.
:param challenge:
:param request:
:return:
"""
data = request.form or request.get_json()
for attr, value in data.items():
setattr(challenge, attr, value)
db.session.commit()
return challenge
@classmethod
def delete(cls, challenge):
"""
This method is used to delete the resources used by a challenge.
:param challenge:
:return:
"""
Fails.query.filter_by(challenge_id=challenge.id).delete()
Solves.query.filter_by(challenge_id=challenge.id).delete()
Flags.query.filter_by(challenge_id=challenge.id).delete()
files = ChallengeFiles.query.filter_by(challenge_id=challenge.id).all()
for f in files:
delete_file(f.id)
ChallengeFiles.query.filter_by(challenge_id=challenge.id).delete()
Tags.query.filter_by(challenge_id=challenge.id).delete()
Hints.query.filter_by(challenge_id=challenge.id).delete()
Challenges.query.filter_by(id=challenge.id).delete()
cls.challenge_model.query.filter_by(id=challenge.id).delete()
db.session.commit()
@classmethod
def METHOD_NAME(cls, challenge, request):
"""
This method is used to check whether a given input is right or wrong. It does not make any changes and should
return a boolean for correctness and a string to be shown to the user. It is also in charge of parsing the
user's input from the request itself.
:param challenge: The Challenge object from the database
:param request: The request the user submitted
:return: (boolean, string)
"""
data = request.form or request.get_json()
submission = data["submission"].strip()
flags = Flags.query.filter_by(challenge_id=challenge.id).all()
for flag in flags:
try:
if get_flag_class(flag.type).compare(flag, submission):
return True, "Correct"
except FlagException as e:
return False, str(e)
return False, "Incorrect"
@classmethod
def solve(cls, user, team, challenge, request):
"""
This method is used to insert Solves into the database in order to mark a challenge as solved.
:param team: The Team object from the database
:param chal: The Challenge object from the database
:param request: The request the user submitted
:return:
"""
data = request.form or request.get_json()
submission = data["submission"].strip()
solve = Solves(
user_id=user.id,
team_id=team.id if team else None,
challenge_id=challenge.id,
ip=get_ip(req=request),
provided=submission,
)
db.session.add(solve)
db.session.commit()
@classmethod
def fail(cls, user, team, challenge, request):
"""
This method is used to insert Fails into the database in order to mark an answer incorrect.
:param team: The Team object from the database
:param chal: The Challenge object from the database
:param request: The request the user submitted
:return:
"""
data = request.form or request.get_json()
submission = data["submission"].strip()
wrong = Fails(
user_id=user.id,
team_id=team.id if team else None,
challenge_id=challenge.id,
ip=get_ip(request),
provided=submission,
)
db.session.add(wrong)
db.session.commit()
class CTFdStandardChallenge(BaseChallenge):
id = "standard" # Unique identifier used to register challenges
name = "standard" # Name of a challenge type
templates = { # Templates used for each aspect of challenge editing & viewing
"create": "/plugins/challenges/assets/create.html",
"update": "/plugins/challenges/assets/update.html",
"view": "/plugins/challenges/assets/view.html",
}
scripts = { # Scripts that are loaded when a template is loaded
"create": "/plugins/challenges/assets/create.js",
"update": "/plugins/challenges/assets/update.js",
"view": "/plugins/challenges/assets/view.js",
}
# Route at which files are accessible. This must be registered using register_plugin_assets_directory()
route = "/plugins/challenges/assets/"
# Blueprint used to access the static_folder directory.
blueprint = Blueprint(
"standard", __name__, template_folder="templates", static_folder="assets"
)
challenge_model = Challenges
def get_chal_class(class_id):
"""
Utility function used to get the corresponding class from a class ID.
:param class_id: String representing the class ID
:return: Challenge class
"""
cls = CHALLENGE_CLASSES.get(class_id)
if cls is None:
raise KeyError
return cls
"""
Global dictionary used to hold all the Challenge Type classes used by CTFd. Insert into this dictionary to register
your Challenge Type.
"""
CHALLENGE_CLASSES = {"standard": CTFdStandardChallenge}
def load(app):
register_plugin_assets_directory(app, base_path="/plugins/challenges/assets/")
|
4,304 |
test scaler depth thor
|
import hashlib
import os
import imageio
import numpy as np
from torchvision.transforms import transforms
from allenact.utils.tensor_utils import ScaleBothSides
from constants import ABS_PATH_OF_TOP_LEVEL_DIR
to_pil = transforms.ToPILImage() # Same as used by the vision sensors
class TestPillowRescaling(object):
def _load_thor_img(self) -> np.ndarray:
img_path = os.path.join(
ABS_PATH_OF_TOP_LEVEL_DIR, "docs/img/iTHOR_framework.jpg"
)
img = imageio.imread(img_path)
return img
def _get_img_hash(self, img: np.ndarray) -> str:
img_hash = hashlib.sha1(np.ascontiguousarray(img))
return img_hash.hexdigest()
def _random_rgb_image(self, width: int, height: int, seed: int) -> np.ndarray:
s = np.random.get_state()
np.random.seed(seed)
img = np.random.randint(
low=0, high=256, size=(width, height, 3), dtype=np.uint8
)
np.random.set_state(s)
return img
def _random_depthmap(
self, width: int, height: int, max_depth: float, seed: int
) -> np.ndarray:
s = np.random.get_state()
np.random.seed(seed)
img = max_depth * np.random.rand(width, height, 1)
np.random.set_state(s)
return np.float32(img)
def test_scaler_rgb_thor(self):
thor_img_arr = np.uint8(self._load_thor_img())
assert (
self._get_img_hash(thor_img_arr)
== "80ff8a342b4f74966796eee91babde31409d0457"
)
img = to_pil(thor_img_arr)
scaler = ScaleBothSides(width=75, height=75)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "2c47057aa188240cb21b2edc39e0f269c1085bac"
)
scaler = ScaleBothSides(width=500, height=600)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "faf0be2b9ec9bfd23a1b7b465c86ad961d03c259"
)
def test_scaler_rgb_random(self):
arr = self._random_rgb_image(width=100, height=100, seed=1)
assert self._get_img_hash(arr) == "d01bd8ba151ab790fde9a8cc29aa8a3c63147334"
img = to_pil(arr)
scaler = ScaleBothSides(width=60, height=60)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "22473537e50d5e39abeeec4f92dbfde51c754010"
)
scaler = ScaleBothSides(width=1000, height=800)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "5e5b955981e4ee3b5e22287536040d001a31fbd3"
)
def METHOD_NAME(self):
thor_depth_arr = 5 * np.float32(self._load_thor_img()).sum(-1)
thor_depth_arr /= thor_depth_arr.max()
assert (
self._get_img_hash(thor_depth_arr)
== "d3c1474400ba57ed78f52cf4ba6a4c2a1d90516c"
)
img = to_pil(thor_depth_arr)
scaler = ScaleBothSides(width=75, height=75)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "6a879beb6bed49021e438c1e3af7a62c428a44d8"
)
scaler = ScaleBothSides(width=500, height=600)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "79f11fb741ae638afca40125e4c501f54b22cc01"
)
def test_scaler_depth_random(self):
depth_arr = self._random_depthmap(width=96, height=103, max_depth=5.0, seed=1)
assert (
self._get_img_hash(depth_arr) == "cbd8ca127951ffafb6848536d9d731970a5397e9"
)
img = to_pil(depth_arr)
scaler = ScaleBothSides(width=60, height=60)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "5bed173f2d783fb2badcde9b43904ef85a1a5820"
)
scaler = ScaleBothSides(width=1000, height=800)
scaled_img = np.array(scaler(img))
assert (
self._get_img_hash(scaled_img) == "9dceb7f77d767888f24a84c00913c0cf4ccd9d49"
)
if __name__ == "__main__":
TestPillowRescaling().test_scaler_rgb_thor()
TestPillowRescaling().test_scaler_rgb_random()
TestPillowRescaling().METHOD_NAME()
TestPillowRescaling().test_scaler_depth_random()
|
4,305 |
add arguments
|
from django.core.management import BaseCommand
from corehq.pillows.app_submission_tracker import (
SqlAppFormSubmissionTrackerReindexerFactory,
UserAppFormSubmissionReindexerFactory,
)
from corehq.pillows.application import AppReindexerFactory
from corehq.pillows.case import SqlCaseReindexerFactory
from corehq.pillows.case_search import (
CaseSearchReindexerFactory,
ResumableCaseSearchReindexerFactory,
)
from corehq.pillows.domain import DomainReindexerFactory
from corehq.pillows.group import GroupReindexerFactory
from corehq.pillows.groups_to_user import GroupToUserReindexerFactory
from corehq.pillows.sms import SmsReindexerFactory
from corehq.pillows.synclog import UpdateUserSyncHistoryReindexerFactory
from corehq.pillows.user import UserReindexerFactory
from corehq.pillows.xform import SqlFormReindexerFactory
from corehq.util.test_utils import unit_testing_only
USAGE = """Reindex a pillowtop index.
To get help for a specific reindexer user:
./manage.py ptop_reindexer_v2 [reindexer] -h
"""
FACTORIES = [
DomainReindexerFactory,
UserReindexerFactory,
GroupReindexerFactory,
GroupToUserReindexerFactory,
SqlCaseReindexerFactory,
SqlFormReindexerFactory,
CaseSearchReindexerFactory,
ResumableCaseSearchReindexerFactory,
SmsReindexerFactory,
AppReindexerFactory,
SqlAppFormSubmissionTrackerReindexerFactory,
UpdateUserSyncHistoryReindexerFactory,
UserAppFormSubmissionReindexerFactory,
]
FACTORIES_BY_SLUG = {
factory.slug: factory
for factory in FACTORIES
}
@unit_testing_only
def reindex_and_clean(slug, **options):
reindexer = FACTORIES_BY_SLUG[slug](**options).build()
reindexer.clean()
reindexer.reindex()
class SubCommand(BaseCommand):
subcommands = {}
def run_from_argv(self, argv):
self.subcommand = None
if len(argv) >= 3 and argv[2] in self.subcommands:
self.subcommand = argv[2]
argv = argv[0:2] + argv[3:]
super(SubCommand, self).run_from_argv(argv)
else:
super(SubCommand, self).run_from_argv(argv)
def create_parser(self, prog_name, command_name):
parser = super(SubCommand, self).create_parser(prog_name, command_name)
if self.subcommand:
self.add_subcommand_arguments(parser, self.subcommand)
return parser
def METHOD_NAME(self, parser):
if not self.subcommand:
parser.add_argument(
'subcommand',
choices=list(self.subcommands),
)
self.add_global_arguments(parser)
def add_subcommand_arguments(self, parser, subcommand):
pass
def add_global_arguments(self, parser):
pass
class Command(SubCommand):
help = USAGE
subcommands = FACTORIES_BY_SLUG
def add_global_arguments(self, parser):
parser.add_argument(
'--cleanup',
action='store_true',
dest='cleanup',
default=False,
help='Clean index (delete data) before reindexing.'
)
parser.add_argument(
'--noinput',
action='store_true',
dest='noinput',
default=False,
help='Skip important confirmation warnings.'
)
def add_subcommand_arguments(self, parser, subcommand):
FACTORIES_BY_SLUG[subcommand].METHOD_NAME(parser)
def handle(self, **options):
cleanup = options.pop('cleanup')
noinput = options.pop('noinput')
for option in [
'settings',
'pythonpath',
'verbosity',
'traceback',
'no_color',
'force_color',
'skip_checks'
]:
options.pop(option, None)
def confirm():
return input("Are you sure you want to delete the current index (if it exists)? y/n\n") == 'y'
factory = FACTORIES_BY_SLUG[self.subcommand](**options)
reindexer = factory.build()
if cleanup and (noinput or confirm()):
reindexer.clean()
reindexer.reindex()
|
4,306 |
group
|
from abc import ABC
from logging import Logger
from fastapi import Depends
from pydantic import UUID4
from sqlalchemy.orm import Session
from mealie.core.config import get_app_dirs, get_app_settings
from mealie.core.dependencies.dependencies import get_admin_user, get_current_user, get_integration_id
from mealie.core.exceptions import mealie_registered_exceptions
from mealie.core.root_logger import get_logger
from mealie.core.settings.directories import AppDirectories
from mealie.core.settings.settings import AppSettings
from mealie.db.db_setup import generate_session
from mealie.lang import local_provider
from mealie.lang.providers import Translator
from mealie.repos.all_repositories import AllRepositories
from mealie.routes._base.checks import OperationChecks
from mealie.schema.user.user import GroupInDB, PrivateUser
from mealie.services.event_bus_service.event_bus_service import EventBusService
from mealie.services.event_bus_service.event_types import EventDocumentDataBase, EventTypes
class _BaseController(ABC):
session: Session = Depends(generate_session)
translator: Translator = Depends(local_provider)
_repos: AllRepositories | None
_logger: Logger | None
_settings: AppSettings | None
_folders: AppDirectories | None
@property
def t(self):
return self.translator.t if self.translator else local_provider().t
@property
def repos(self):
if not self._repos:
self._repos = AllRepositories(self.session)
return self._repos
@property
def logger(self) -> Logger:
if not self._logger:
self._logger = get_logger()
return self._logger
@property
def settings(self) -> AppSettings:
if not self._settings:
self._settings = get_app_settings()
return self._settings
@property
def folders(self) -> AppDirectories:
if not self._folders:
self._folders = get_app_dirs()
return self._folders
class Config:
arbitrary_types_allowed = True
class BasePublicController(_BaseController):
"""
This is a public class for all User restricted controllers in the API.
It includes the common SharedDependencies and some common methods used
by all Admin controllers.
"""
...
class BaseUserController(_BaseController):
"""
This is a base class for all User restricted controllers in the API.
It includes the common SharedDependencies and some common methods used
by all Admin controllers.
"""
user: PrivateUser = Depends(get_current_user)
integration_id: str = Depends(get_integration_id)
translator: Translator = Depends(local_provider)
# Manual Cache
_checks: OperationChecks
def registered_exceptions(self, ex: type[Exception]) -> str:
registered = {
**mealie_registered_exceptions(self.translator),
}
return registered.get(ex, self.t("generic.server-error"))
@property
def group_id(self) -> UUID4:
return self.user.group_id
@property
def METHOD_NAME(self) -> GroupInDB:
return self.repos.groups.get_one(self.group_id)
@property
def checks(self) -> OperationChecks:
if not self._checks:
self._checks = OperationChecks(self.user)
return self._checks
class BaseAdminController(BaseUserController):
"""
This is a base class for all Admin restricted controllers in the API.
It includes the common Shared Dependencies and some common methods used
by all Admin controllers.
"""
user: PrivateUser = Depends(get_admin_user)
class BaseCrudController(BaseUserController):
"""
Base class for all CRUD controllers to facilitate common CRUD functions.
"""
event_bus: EventBusService = Depends(EventBusService.create)
def publish_event(self, event_type: EventTypes, document_data: EventDocumentDataBase, message: str = "") -> None:
self.event_bus.dispatch(
integration_id=self.integration_id,
group_id=self.group_id,
event_type=event_type,
document_data=document_data,
message=message,
)
|
4,307 |
test linestring
|
import numpy as np
import pytest
import shapely
from shapely import LinearRing, LineString, Point
from shapely.coords import CoordinateSequence
def test_from_coordinate_sequence():
# From coordinate tuples
line = LineString([(1.0, 2.0), (3.0, 4.0)])
assert len(line.coords) == 2
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
line = LineString([(1.0, 2.0), (3.0, 4.0)])
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
def test_from_coordinate_sequence_3D():
line = LineString([(1.0, 2.0, 3.0), (3.0, 4.0, 5.0)])
assert line.has_z
assert line.coords[:] == [(1.0, 2.0, 3.0), (3.0, 4.0, 5.0)]
def test_from_points():
# From Points
line = LineString([Point(1.0, 2.0), Point(3.0, 4.0)])
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
line = LineString([Point(1.0, 2.0), Point(3.0, 4.0)])
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
def test_from_mix():
# From mix of tuples and Points
line = LineString([Point(1.0, 2.0), (2.0, 3.0), Point(3.0, 4.0)])
assert line.coords[:] == [(1.0, 2.0), (2.0, 3.0), (3.0, 4.0)]
def test_from_linestring():
# From another linestring
line = LineString([(1.0, 2.0), (3.0, 4.0)])
copy = LineString(line)
assert copy.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
assert copy.geom_type == "LineString"
def test_from_linearring():
coords = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0)]
ring = LinearRing(coords)
copy = LineString(ring)
assert copy.coords[:] == coords
assert copy.geom_type == "LineString"
def test_from_linestring_z():
coords = [(1.0, 2.0, 3.0), (4.0, 5.0, 6.0)]
line = LineString(coords)
copy = LineString(line)
assert copy.coords[:] == coords
assert copy.geom_type == "LineString"
def test_from_generator():
gen = (coord for coord in [(1.0, 2.0), (3.0, 4.0)])
line = LineString(gen)
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
def test_from_empty():
line = LineString()
assert line.is_empty
assert isinstance(line.coords, CoordinateSequence)
assert line.coords[:] == []
line = LineString([])
assert line.is_empty
assert isinstance(line.coords, CoordinateSequence)
assert line.coords[:] == []
def test_from_numpy():
# Construct from a numpy array
line = LineString(np.array([[1.0, 2.0], [3.0, 4.0]]))
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
def test_numpy_empty_linestring_coords():
# Check empty
line = LineString([])
la = np.asarray(line.coords)
assert la.shape == (0, 2)
def test_numpy_object_array():
geom = LineString([(0.0, 0.0), (0.0, 1.0)])
ar = np.empty(1, object)
ar[:] = [geom]
assert ar[0] == geom
@pytest.mark.filterwarnings("ignore:Creating an ndarray from ragged nested sequences:")
def test_from_invalid_dim():
# TODO(shapely-2.0) better error message?
# pytest.raises(ValueError, match="at least 2 coordinate tuples|at least 2 coordinates"):
with pytest.raises(shapely.GEOSException):
LineString([(1, 2)])
# exact error depends on numpy version
with pytest.raises((ValueError, TypeError)):
LineString([(1, 2, 3), (4, 5)])
with pytest.raises((ValueError, TypeError)):
LineString([(1, 2), (3, 4, 5)])
msg = r"The ordinate \(last\) dimension should be 2 or 3, got {}"
with pytest.raises(ValueError, match=msg.format(4)):
LineString([(1, 2, 3, 4), (4, 5, 6, 7)])
with pytest.raises(ValueError, match=msg.format(1)):
LineString([(1,), (4,)])
def test_from_single_coordinate():
"""Test for issue #486"""
coords = [[-122.185933073564, 37.3629353839073]]
with pytest.raises(shapely.GEOSException):
ls = LineString(coords)
ls.geom_type # caused segfault before fix
class TestLineString:
def METHOD_NAME(self):
# From coordinate tuples
line = LineString([(1.0, 2.0), (3.0, 4.0)])
assert len(line.coords) == 2
assert line.coords[:] == [(1.0, 2.0), (3.0, 4.0)]
# Bounds
assert line.bounds == (1.0, 2.0, 3.0, 4.0)
# Coordinate access
assert tuple(line.coords) == ((1.0, 2.0), (3.0, 4.0))
assert line.coords[0] == (1.0, 2.0)
assert line.coords[1] == (3.0, 4.0)
with pytest.raises(IndexError):
line.coords[2] # index out of range
# Geo interface
assert line.__geo_interface__ == {
"type": "LineString",
"coordinates": ((1.0, 2.0), (3.0, 4.0)),
}
def test_linestring_empty(self):
# Test Non-operability of Null geometry
l_null = LineString()
assert l_null.wkt == "LINESTRING EMPTY"
assert l_null.length == 0.0
def test_equals_argument_order(self):
"""
Test equals predicate functions correctly regardless of the order
of the inputs. See issue #317.
"""
coords = ((0, 0), (1, 0), (1, 1), (0, 0))
ls = LineString(coords)
lr = LinearRing(coords)
assert ls.__eq__(lr) is False # previously incorrectly returned True
assert lr.__eq__(ls) is False
assert (ls == lr) is False
assert (lr == ls) is False
ls_clone = LineString(coords)
lr_clone = LinearRing(coords)
assert ls.__eq__(ls_clone) is True
assert lr.__eq__(lr_clone) is True
assert (ls == ls_clone) is True
assert (lr == lr_clone) is True
def test_numpy_linestring_coords(self):
from numpy.testing import assert_array_equal
line = LineString([(1.0, 2.0), (3.0, 4.0)])
expected = np.array([[1.0, 2.0], [3.0, 4.0]])
# Coordinate sequences can be adapted as well
la = np.asarray(line.coords)
assert_array_equal(la, expected)
def test_linestring_immutable():
line = LineString([(1.0, 2.0), (3.0, 4.0)])
with pytest.raises(AttributeError):
line.coords = [(-1.0, -1.0), (1.0, 1.0)]
with pytest.raises(TypeError):
line.coords[0] = (-1.0, -1.0)
def test_linestring_array_coercion():
# don't convert to array of coordinates, keep objects
line = LineString([(1.0, 2.0), (3.0, 4.0)])
arr = np.array(line)
assert arr.ndim == 0
assert arr.size == 1
assert arr.dtype == np.dtype("object")
assert arr.item() == line
|
4,308 |
test glob nested directory
|
import glob
import os
import os.path
import shutil
import sys
import unittest
from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink, create_empty_file
class GlobTests(unittest.TestCase):
def norm(self, *parts):
return os.path.normpath(os.path.join(self.tempdir, *parts))
def mktemp(self, *parts):
filename = self.norm(*parts)
base, file = os.path.split(filename)
if not os.path.exists(base):
os.makedirs(base)
create_empty_file(filename)
def setUp(self):
self.tempdir = TESTFN + "_dir"
self.mktemp("a", "D")
self.mktemp("aab", "F")
self.mktemp(".aa", "G")
self.mktemp(".bb", "H")
self.mktemp("aaa", "zzzF")
self.mktemp("ZZZ")
self.mktemp("a", "bcd", "EF")
self.mktemp("a", "bcd", "efg", "ha")
if can_symlink():
os.symlink(self.norm("broken"), self.norm("sym1"))
os.symlink("broken", self.norm("sym2"))
os.symlink(os.path.join("a", "bcd"), self.norm("sym3"))
def tearDown(self):
shutil.rmtree(self.tempdir)
def glob(self, *parts):
if len(parts) == 1:
pattern = parts[0]
else:
pattern = os.path.join(*parts)
p = os.path.join(self.tempdir, pattern)
res = glob.glob(p)
self.assertEqual(list(glob.iglob(p)), res)
bres = [os.fsencode(x) for x in res]
# Bytes globbing is not support on MicroPython
# self.assertEqual(glob.glob(os.fsencode(p)), bres)
# self.assertEqual(list(glob.iglob(os.fsencode(p))), bres)
return res
def assertSequencesEqual_noorder(self, l1, l2):
l1 = list(l1)
l2 = list(l2)
self.assertEqual(set(l1), set(l2))
self.assertEqual(sorted(l1), sorted(l2))
def test_glob_literal(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob("a"), [self.norm("a")])
eq(self.glob("a", "D"), [self.norm("a", "D")])
eq(self.glob("aab"), [self.norm("aab")])
eq(self.glob("zymurgy"), [])
res = glob.glob("*")
self.assertEqual({type(r) for r in res}, {str})
res = glob.glob(os.path.join(os.curdir, "*"))
self.assertEqual({type(r) for r in res}, {str})
# res = glob.glob(b'*')
# self.assertEqual({type(r) for r in res}, {bytes})
# res = glob.glob(os.path.join(os.fsencode(os.curdir), b'*'))
# self.assertEqual({type(r) for r in res}, {bytes})
def test_glob_one_directory(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob("a*"), map(self.norm, ["a", "aab", "aaa"]))
eq(self.glob("*a"), map(self.norm, ["a", "aaa"]))
eq(self.glob(".*"), map(self.norm, [".aa", ".bb"]))
eq(self.glob("?aa"), map(self.norm, ["aaa"]))
eq(self.glob("aa?"), map(self.norm, ["aaa", "aab"]))
eq(self.glob("aa[ab]"), map(self.norm, ["aaa", "aab"]))
eq(self.glob("*q"), [])
def METHOD_NAME(self):
eq = self.assertSequencesEqual_noorder
if os.path.normcase("abCD") == "abCD":
# case-sensitive filesystem
eq(self.glob("a", "bcd", "E*"), [self.norm("a", "bcd", "EF")])
else:
# case insensitive filesystem
eq(
self.glob("a", "bcd", "E*"),
[self.norm("a", "bcd", "EF"), self.norm("a", "bcd", "efg")],
)
eq(self.glob("a", "bcd", "*g"), [self.norm("a", "bcd", "efg")])
def test_glob_directory_names(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob("*", "D"), [self.norm("a", "D")])
eq(self.glob("*", "*a"), [])
eq(self.glob("a", "*", "*", "*a"), [self.norm("a", "bcd", "efg", "ha")])
eq(self.glob("?a?", "*F"), [self.norm("aaa", "zzzF"), self.norm("aab", "F")])
def test_glob_directory_with_trailing_slash(self):
# Patterns ending with a slash shouldn't match non-dirs
res = glob.glob(self.norm("Z*Z") + os.sep)
self.assertEqual(res, [])
res = glob.glob(self.norm("ZZZ") + os.sep)
self.assertEqual(res, [])
# When there is a wildcard pattern which ends with os.sep, glob()
# doesn't blow up.
res = glob.glob(self.norm("aa*") + os.sep)
self.assertEqual(len(res), 2)
# either of these results is reasonable
self.assertIn(
set(res),
[
{self.norm("aaa"), self.norm("aab")},
{self.norm("aaa") + os.sep, self.norm("aab") + os.sep},
],
)
@unittest.skip("unsupported on MicroPython")
def test_glob_bytes_directory_with_trailing_slash(self):
# Same as test_glob_directory_with_trailing_slash, but with a
# bytes argument.
res = glob.glob(os.fsencode(self.norm("Z*Z") + os.sep))
self.assertEqual(res, [])
res = glob.glob(os.fsencode(self.norm("ZZZ") + os.sep))
self.assertEqual(res, [])
res = glob.glob(os.fsencode(self.norm("aa*") + os.sep))
self.assertEqual(len(res), 2)
# either of these results is reasonable
self.assertIn(
set(res),
[
{os.fsencode(self.norm("aaa")), os.fsencode(self.norm("aab"))},
{os.fsencode(self.norm("aaa") + os.sep), os.fsencode(self.norm("aab") + os.sep)},
],
)
@skip_unless_symlink
def test_glob_symlinks(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob("sym3"), [self.norm("sym3")])
eq(self.glob("sym3", "*"), [self.norm("sym3", "EF"), self.norm("sym3", "efg")])
self.assertIn(
self.glob("sym3" + os.sep), [[self.norm("sym3")], [self.norm("sym3") + os.sep]]
)
eq(
self.glob("*", "*F"),
[self.norm("aaa", "zzzF"), self.norm("aab", "F"), self.norm("sym3", "EF")],
)
@skip_unless_symlink
def test_glob_broken_symlinks(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob("sym*"), [self.norm("sym1"), self.norm("sym2"), self.norm("sym3")])
eq(self.glob("sym1"), [self.norm("sym1")])
eq(self.glob("sym2"), [self.norm("sym2")])
@unittest.skipUnless(sys.platform == "win32", "Win32 specific test")
def test_glob_magic_in_drive(self):
eq = self.assertSequencesEqual_noorder
eq(glob.glob("*:"), [])
eq(glob.glob(b"*:"), [])
eq(glob.glob("?:"), [])
eq(glob.glob(b"?:"), [])
eq(glob.glob("\\\\?\\c:\\"), ["\\\\?\\c:\\"])
eq(glob.glob(b"\\\\?\\c:\\"), [b"\\\\?\\c:\\"])
eq(glob.glob("\\\\*\\*\\"), [])
eq(glob.glob(b"\\\\*\\*\\"), [])
def test_main():
run_unittest(GlobTests)
if __name__ == "__main__":
test_main()
|
4,309 |
test second christmas day
|
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Authors: dr-prodigy <[email protected]> (c) 2017-2023
# ryanss <[email protected]> (c) 2014-2017
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from holidays.countries.latvia import Latvia, LV, LVA
from tests.common import TestCase
class TestLatvia(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass(Latvia, years=range(1990, 2050))
def test_country_aliases(self):
self.assertCountryAliases(Latvia, LV, LVA)
def test_no_holidays(self):
self.assertNoHolidays(Latvia(years=1989))
def test_special_holidays(self):
self.assertHoliday(
"2018-07-09",
"2018-09-24",
"2023-05-29",
"2023-07-10",
)
def test_new_years(self):
self.assertHolidayName("Jaunais Gads", (f"{year}-01-01" for year in range(1990, 2050)))
def test_good_friday(self):
self.assertHolidayName(
"Lielā Piektdiena",
"2019-04-19",
"2020-04-10",
"2021-04-02",
"2022-04-15",
"2023-04-07",
)
def test_easter(self):
self.assertHolidayName(
"Lieldienas",
"2019-04-21",
"2020-04-12",
"2021-04-04",
"2022-04-17",
"2023-04-09",
)
def test_easter_monday(self):
self.assertHolidayName(
"Otrās Lieldienas",
"2019-04-22",
"2020-04-13",
"2021-04-05",
"2022-04-18",
"2023-04-10",
)
def test_labor_day(self):
self.assertHolidayName("Darba svētki", (f"{year}-05-01" for year in range(1990, 2050)))
def test_restoration_of_independence_day(self):
name = "Latvijas Republikas Neatkarības atjaunošanas diena"
self.assertHolidayName(name, (f"{year}-05-04" for year in range(2002, 2050)))
self.assertNoHoliday(f"{year}-05-04" for year in range(1990, 2002))
self.assertNoHolidayName(name, range(1990, 2002))
dt = (
"2008-05-05",
"2013-05-06",
"2014-05-05",
"2019-05-06",
)
self.assertHolidayName(f"{name} (brīvdiena)", dt)
self.assertNoNonObservedHoliday(dt)
def test_mothers_day(self):
self.assertHolidayName(
"Mātes diena",
"2019-05-12",
"2020-05-10",
"2021-05-09",
"2022-05-08",
"2023-05-14",
)
def test_midsummer_eve(self):
self.assertHolidayName("Līgo diena", (f"{year}-06-23" for year in range(1990, 2050)))
def test_midsummer_day(self):
self.assertHolidayName("Jāņu diena", (f"{year}-06-24" for year in range(1990, 2050)))
def test_republic_proclamation_day(self):
name = "Latvijas Republikas proklamēšanas diena"
self.assertHolidayName(name, (f"{year}-11-18" for year in range(1990, 2050)))
dt = (
"2007-11-19",
"2012-11-19",
"2017-11-20",
"2018-11-19",
"2023-11-20",
)
self.assertHolidayName(f"{name} (brīvdiena)", dt)
self.assertNoNonObservedHoliday(dt)
def test_christmas_eve(self):
name = "Ziemassvētku vakars"
self.assertHolidayName(name, (f"{year}-12-24" for year in range(2007, 2050)))
self.assertNoHoliday(f"{year}-12-24" for year in range(1990, 2007))
self.assertNoHolidayName(name, range(1990, 2007))
def test_christmas_day(self):
self.assertHolidayName("Ziemassvētki", (f"{year}-12-25" for year in range(1990, 2050)))
def METHOD_NAME(self):
self.assertHolidayName(
"Otrie Ziemassvētki", (f"{year}-12-26" for year in range(1990, 2050))
)
def test_new_years_eve(self):
self.assertHolidayName("Vecgada vakars", (f"{year}-12-31" for year in range(1990, 2050)))
def test_l10n_default(self):
self.assertLocalizedHolidays(
("2022-01-01", "Jaunais Gads"),
("2022-04-15", "Lielā Piektdiena"),
("2022-04-17", "Lieldienas"),
("2022-04-18", "Otrās Lieldienas"),
("2022-05-01", "Darba svētki"),
("2022-05-04", "Latvijas Republikas Neatkarības atjaunošanas diena"),
("2022-05-08", "Mātes diena"),
("2022-06-23", "Līgo diena"),
("2022-06-24", "Jāņu diena"),
("2022-11-18", "Latvijas Republikas proklamēšanas diena"),
("2022-12-24", "Ziemassvētku vakars"),
("2022-12-25", "Ziemassvētki"),
("2022-12-26", "Otrie Ziemassvētki"),
("2022-12-31", "Vecgada vakars"),
)
def test_l10n_en_us(self):
self.assertLocalizedHolidays(
"en_US",
("2022-01-01", "New Year's Day"),
("2022-04-15", "Good Friday"),
("2022-04-17", "Easter"),
("2022-04-18", "Easter Monday"),
("2022-05-01", "Labor Day"),
("2022-05-04", "Restoration of Independence Day"),
("2022-05-08", "Mother's Day"),
("2022-06-23", "Midsummer Eve"),
("2022-06-24", "Midsummer Day"),
("2022-11-18", "Republic of Latvia Proclamation Day"),
("2022-12-24", "Christmas Eve"),
("2022-12-25", "Christmas Day"),
("2022-12-26", "Second Day of Christmas"),
("2022-12-31", "New Year's Eve"),
)
def test_l10n_uk(self):
self.assertLocalizedHolidays(
"uk",
("2022-01-01", "Новий рік"),
("2022-04-15", "Страсна пʼятниця"),
("2022-04-17", "Великдень"),
("2022-04-18", "Великодній понеділок"),
("2022-05-01", "День праці"),
("2022-05-04", "День відновлення незалежности Латвійської Республіки"),
("2022-05-08", "День матері"),
("2022-06-23", "Ліго"),
("2022-06-24", "Янів день"),
("2022-11-18", "День проголошення Латвійської Республіки"),
("2022-12-24", "Святий вечір"),
("2022-12-25", "Різдво Христове"),
("2022-12-26", "Другий день Різдва"),
("2022-12-31", "Переддень Нового року"),
)
|
4,310 |
parse times
|
from datetime import datetime, time, timedelta
from city_scrapers_core.constants import (
ADVISORY_COMMITTEE,
BOARD,
FORUM,
NOT_CLASSIFIED,
)
from city_scrapers_core.items import Meeting
from city_scrapers_core.spiders import CityScrapersSpider
class ChiSsa62Spider(CityScrapersSpider):
name = "chi_ssa_62"
agency = "Chicago Special Service Area #62 Sauganash"
timezone = "America/Chicago"
start_urls = ["http://escc60646.com/our_events/?date1=all"]
def parse(self, response):
"""
`parse` should always `yield` Meeting items.
"""
for item in response.xpath('//li[@class="event "]'):
times = self.METHOD_NAME(item)
if times[0] is None or (datetime.now() - times[0]) > timedelta(days=90):
continue
title = self._parse_title(item)
classification = self._parse_classification(item)
if title and classification is not NOT_CLASSIFIED:
meeting = Meeting(
title=title,
description=self._parse_description(item),
classification=classification,
start=times[0],
end=times[1],
all_day=False,
time_notes="",
location=self._parse_location(item),
links=self._parse_links(item),
source=self._parse_source(item),
)
meeting["status"] = self._get_status(meeting)
meeting["id"] = self._get_id(meeting)
yield meeting
@staticmethod
def _parse_title(item):
"""Parse or generate meeting title."""
title = item.xpath(".//div[@class='event-title']/h3/a/text()").get()
if title:
title = title.replace("SSA #62", "").strip()
return title
@staticmethod
def _parse_description(item):
"""Parse or generate meeting description."""
return item.xpath(
"string(.//div[@class='event-content']/p[not(descendant::a)])"
).get()
@staticmethod
def _parse_classification(item):
"""Parse or generate classification from allowed options."""
title = item.xpath(".//div[@class='event-title']/h3/a/text()").get()
if title:
if "board" in title.lower():
return BOARD
elif "advisory" in title.lower() or "commission" in title.lower():
return ADVISORY_COMMITTEE
elif "forum" in title.lower():
return FORUM
return NOT_CLASSIFIED
@staticmethod
def METHOD_NAME(item):
"""Parse start and end datetimes as a naive datetime object."""
month = item.xpath('.//div[@class="event-month"]/text()').get().strip()
day = item.xpath('.//div[@class="event-day"]/text()').get().strip()
year = item.xpath('.//div[@class="event-year"]/text()').get().strip()
ddate = datetime.strptime("%s %s %s" % (month, day, year), "%b %d %Y").date()
time_str = item.xpath('.//span[@class="event-time"]/text()').get()
if time_str is None:
return None, None
times = time_str.strip().replace(".", "")
if not ("a" in times or "p" in times) or "cancel" in times.lower():
return datetime.combine(ddate, time()), None
ttimes = times.split("-")
ampm = ttimes[0].split(" ")[1]
try:
starttime = datetime.strptime(ttimes[0].strip(), "%I:%M %p").time()
except ValueError:
# Ignoring error with Halloween
return None, None
start = datetime.combine(ddate, starttime)
if len(ttimes) > 1:
if not ("a" in ttimes[1] or "p" in ttimes[1]):
ttimes[1] = ttimes[1] + " " + ampm
endtime = datetime.strptime(ttimes[1].strip(), "%I:%M %p").time()
end = datetime.combine(ddate, endtime)
else:
end = None
return start, end
@staticmethod
def _parse_location(item):
"""Parse or generate location."""
location = item.xpath('.//span[@class="event-location"]/text()').get().strip()
location_split = location.split(",")
if len(location_split) == 1 or any(char.isdigit() for char in location[0]):
name = ""
address = location.strip()
else:
name = location_split[0].strip()
address = ", ".join(x.strip() for x in location_split[1:])
address += ", Chicago, IL"
return {"name": name, "address": address}
@staticmethod
def _parse_links(item):
"""Parse or generate links."""
title = item.xpath(".//div[@class='event-content']/p/a/text()").get()
href = item.xpath(".//div[@class='event-content']/p/a/@href").get()
if title is None:
return []
return [{"href": href, "title": title}]
@staticmethod
def _parse_source(item):
"""Parse or generate source."""
return item.xpath(".//div[@class='event-title']/h3/a/@href").get()
|
4,311 |
test priority sort under blank
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# https://www.opensource.org/licenses/mit-license.php
#
###############################################################################
import sys
import os
import unittest
# Ensure python finds the local simpletap module
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from basetest import Task, TestCase
class TestPrioritySorting(TestCase):
@classmethod
def setUpClass(cls):
"""Executed once before any test in the class"""
cls.t = Task()
cls.t.config("verbose", "nothing")
cls.t("add H pri:H")
cls.t("add M pri:M")
cls.t("add L pri:L")
cls.t("add _")
def setUp(self):
"""Executed before each test in the class"""
def test_priority_sort_under_H(self):
"""Verify priority.under:H works"""
code, out, err = self.t("priority.under:H ls")
self.assertNotIn("H", out)
self.assertIn("M", out)
self.assertIn("L", out)
self.assertIn("_", out)
def test_priority_sort_under_M(self):
"""Verify priority.under:M works"""
code, out, err = self.t("priority.under:M ls")
self.assertNotIn("H", out)
self.assertNotIn("M", out)
self.assertIn("L", out)
self.assertIn("_", out)
def test_priority_sort_under_L(self):
"""Verify priority.under:L works"""
code, out, err = self.t("priority.under:L ls")
self.assertNotIn("H", out)
self.assertNotIn("M", out)
self.assertNotIn("L", out)
self.assertIn("_", out)
def METHOD_NAME(self):
"""Verify priority.under: works"""
code, out, err = self.t.runError("priority.under: ls")
# No output with 'rc.verbose:nothing'
def test_priority_sort_over_H(self):
"""Verify priority.over:H works"""
code, out, err = self.t.runError("priority.over:H ls")
# No output with 'rc.verbose:nothing'
def test_priority_sort_over_M(self):
"""Verify priority.over:M works"""
code, out, err = self.t("priority.over:M ls")
self.assertIn("H", out)
self.assertNotIn("M", out)
self.assertNotIn("L", out)
self.assertNotIn("_", out)
def test_priority_sort_over_L(self):
"""Verify priority.over:L works"""
code, out, err = self.t("priority.over:L ls")
self.assertIn("H", out)
self.assertIn("M", out)
self.assertNotIn("L", out)
self.assertNotIn("_", out)
def test_priority_sort_over_blank(self):
"""Verify priority.over: works"""
code, out, err = self.t("priority.over: ls")
self.assertIn("H", out)
self.assertIn("M", out)
self.assertIn("L", out)
self.assertNotIn("_", out)
if __name__ == "__main__":
from simpletap import TAPTestRunner
unittest.main(testRunner=TAPTestRunner())
# vim: ai sts=4 et sw=4 ft=python
|
4,312 |
test symmetry
|
from pyiron_atomistics import Project
import numpy as np
import unittest
import os
class TestSphinx(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.file_location = os.path.dirname(os.path.abspath(__file__))
cls.project = Project('SPX_CHECK_ALL')
cls.a_Fe = 2.83
cls.a_Al = 4.024
@classmethod
def tearDownClass(cls):
cls.project.remove(enable=True)
def test_Fe_nonmag(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Fe_nonmag')
job.structure = self.project.create.structure.ase.bulk('Fe', a=self.a_Fe)
job.calc_static()
job.run()
self.assertLess(
np.linalg.norm(job['output/generic/forces']),
1.0e-4,
'Forces wrong'
)
self.assertTrue(
np.allclose(
job.structure.positions,
job['output/generic/positions'][-1],
),
'Positions not correctly parsed'
)
self.assertTrue(
np.allclose(
job.structure.cell,
job['output/generic/cells'][-1]
),
'Cells not correctly parsed'
)
self.assertFalse(
'atom_spins' in job['output/generic/dft'].list_nodes(),
'spins present'
)
self.assertAlmostEqual(
job['output/generic/volume'][-1],
np.linalg.det(job.structure.cell),
4,
msg='Volume wrong'
)
self.assertTrue(
np.allclose(
job.structure.positions, job['output/generic/positions'][0]
),
'Positions not parsed properly'
)
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Fe_ferro')
job.structure = self.project.create.structure.ase.bulk('Fe', a=self.a_Fe)
job.structure.set_initial_magnetic_moments(len(job.structure) * [2])
job.calc_static()
job.run()
self.assertLess(
self.project.load('spx_Fe_ferro')['output/generic/energy_tot'][0],
self.project.load('spx_Fe_nonmag')['output/generic/energy_tot'][0],
'BCC Fe erromagnetic state has lower energy than nonmagnetic state'
)
def test_Fe_ferro_C(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Fe_ferro_C')
job.structure = self.project.create.structure.ase.bulk('Fe', a=self.a_Fe)
job.structure.set_initial_magnetic_moments(len(job.structure) * [2])
job.structure += self.project.create_atoms(
elements=['C'], positions=[[0, 0, 0.5 * self.a_Fe]], magmoms=[0]
)
job.calc_static()
job.run()
self.assertTrue(
np.allclose(job.structure.positions, job['output/generic/positions'][-1]),
'Positions not correctly parsed'
)
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Al')
job.structure = self.project.create.structure.ase.bulk('Al', a=self.a_Al)
job.calc_static()
job.run()
job = job.restart(from_charge_density=False, from_wave_functions=False)
job.run()
self.assertTrue(
'spx_Al_restart' in list(self.project.job_table().job), 'restart job not found'
)
self.assertAlmostEqual(
self.project.load('spx_Al')['output/generic/energy_tot'][-1],
self.project.load('spx_Al_restart')['output/generic/energy_tot'][-1],
4,
msg='Energy value after restart too different'
)
def test_Al_minimize(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Al_minimize')
job.structure = self.project.create.structure.ase.bulk('Al', a=self.a_Al, cubic=True)
job.structure.positions[0, 0] += 0.01
job.calc_minimize()
job.run()
E = job['output/generic/energy_tot']
self.assertGreater(E[0], E[-1], 'Energy not decreased')
def test_check_overlap(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_check_overlap')
job.structure = self.project.create.structure.ase.bulk('Fe', a=2.832)
job.set_check_overlap(False)
job.calc_static()
job.run()
def METHOD_NAME(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_symmetry')
job.structure = self.project.create.structure.ase.bulk('Fe', a=2.832)
job.fix_symmetry = False
job.calc_static()
job.run()
def test_Fe_ferro_constraint(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Fe_ferro_constraint')
job.structure = self.project.create.structure.ase.bulk('Fe', a=self.a_Fe)
job.structure.set_initial_magnetic_moments(len(job.structure) * [2])
job.fix_spin_constraint = True
job.calc_static()
job.run()
self.assertTrue(
np.allclose(
job['output/generic/dft/atom_spins'],
job.structure.get_initial_magnetic_moments()
),
'Magnetic moments either not properly parsed or constraining not working'
)
def test_Al_save_memory(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Al_save_memory')
job.structure = self.project.create.structure.ase.bulk('Al', a=self.a_Al)
job.input['SaveMemory'] = True
job.calc_static()
job.run()
def test_Al_interactive(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'spx_Al_interactive')
job.structure = self.project.create.structure.ase.bulk('Al', a=self.a_Al)
job.structure.positions[0, 0] += 0.01
job.server.run_mode.interactive = True
job.calc_static()
minim = job.create_job(self.project.job_type.SxExtOptInteractive, 'sxextopt_Al')
minim.run()
def test_nonmodal2(self):
job = self.project.create_job(self.project.job_type.Sphinx, 'nonmodal2')
job.structure = self.project.create.structure.ase.bulk('Al', a=self.a_Al)
job.calc_static()
job.save()
job_reload = self.project.load(job.job_name)
job_reload.run()
self.assertTrue(job['output/generic/dft/bands_e_fermi'] is not None)
self.assertTrue(job_reload.status.finished)
def test_sxextopt_Fe(self):
spx = self.project.create_job('Sphinx', 'spx_sxextopt_Fe')
spx.structure = self.project.create.structure.ase.bulk('Fe', a=2)
spx.structure.set_initial_magnetic_moments(len(spx.structure) * [2])
spx.server.run_mode.interactive = True
spx.calc_static()
sxextopt = self.project.create_job('SxExtOptInteractive', 'sxextopt_Fe')
sxextopt.ref_job = spx
sxextopt.save()
sxextopt = self.project.load('sxextopt_Fe')
sxextopt.run()
if __name__ == "__main__":
unittest.main()
|
4,313 |
set label
|
from fontTools import ttLib
from fontTools.misc.textTools import safeEval
from fontTools.ttLib.tables.DefaultTable import DefaultTable
import sys
import os
import logging
log = logging.getLogger(__name__)
class TTXParseError(Exception):
pass
BUFSIZE = 0x4000
class XMLReader(object):
def __init__(
self, fileOrPath, ttFont, progress=None, quiet=None, contentOnly=False
):
if fileOrPath == "-":
fileOrPath = sys.stdin
if not hasattr(fileOrPath, "read"):
self.file = open(fileOrPath, "rb")
self._closeStream = True
else:
# assume readable file object
self.file = fileOrPath
self._closeStream = False
self.ttFont = ttFont
self.progress = progress
if quiet is not None:
from fontTools.misc.loggingTools import deprecateArgument
deprecateArgument("quiet", "configure logging instead")
self.quiet = quiet
self.root = None
self.contentStack = []
self.contentOnly = contentOnly
self.stackSize = 0
def read(self, rootless=False):
if rootless:
self.stackSize += 1
if self.progress:
self.file.seek(0, 2)
fileSize = self.file.tell()
self.progress.set(0, fileSize // 100 or 1)
self.file.seek(0)
self._parseFile(self.file)
if self._closeStream:
self.close()
if rootless:
self.stackSize -= 1
def close(self):
self.file.close()
def _parseFile(self, file):
from xml.parsers.expat import ParserCreate
parser = ParserCreate()
parser.StartElementHandler = self._startElementHandler
parser.EndElementHandler = self._endElementHandler
parser.CharacterDataHandler = self._characterDataHandler
pos = 0
while True:
chunk = file.read(BUFSIZE)
if not chunk:
parser.Parse(chunk, 1)
break
pos = pos + len(chunk)
if self.progress:
self.progress.set(pos // 100)
parser.Parse(chunk, 0)
def _startElementHandler(self, name, attrs):
if self.stackSize == 1 and self.contentOnly:
# We already know the table we're parsing, skip
# parsing the table tag and continue to
# stack '2' which begins parsing content
self.contentStack.append([])
self.stackSize = 2
return
stackSize = self.stackSize
self.stackSize = stackSize + 1
subFile = attrs.get("src")
if subFile is not None:
if hasattr(self.file, "name"):
# if file has a name, get its parent directory
dirname = os.path.dirname(self.file.name)
else:
# else fall back to using the current working directory
dirname = os.getcwd()
subFile = os.path.join(dirname, subFile)
if not stackSize:
if name != "ttFont":
raise TTXParseError("illegal root tag: %s" % name)
if self.ttFont.reader is None and not self.ttFont.tables:
sfntVersion = attrs.get("sfntVersion")
if sfntVersion is not None:
if len(sfntVersion) != 4:
sfntVersion = safeEval('"' + sfntVersion + '"')
self.ttFont.sfntVersion = sfntVersion
self.contentStack.append([])
elif stackSize == 1:
if subFile is not None:
subReader = XMLReader(subFile, self.ttFont, self.progress)
subReader.read()
self.contentStack.append([])
return
tag = ttLib.xmlToTag(name)
msg = "Parsing '%s' table..." % tag
if self.progress:
self.progress.METHOD_NAME(msg)
log.info(msg)
if tag == "GlyphOrder":
tableClass = ttLib.GlyphOrder
elif "ERROR" in attrs or ("raw" in attrs and safeEval(attrs["raw"])):
tableClass = DefaultTable
else:
tableClass = ttLib.getTableClass(tag)
if tableClass is None:
tableClass = DefaultTable
if tag == "loca" and tag in self.ttFont:
# Special-case the 'loca' table as we need the
# original if the 'glyf' table isn't recompiled.
self.currentTable = self.ttFont[tag]
else:
self.currentTable = tableClass(tag)
self.ttFont[tag] = self.currentTable
self.contentStack.append([])
elif stackSize == 2 and subFile is not None:
subReader = XMLReader(subFile, self.ttFont, self.progress, contentOnly=True)
subReader.read()
self.contentStack.append([])
self.root = subReader.root
elif stackSize == 2:
self.contentStack.append([])
self.root = (name, attrs, self.contentStack[-1])
else:
l = []
self.contentStack[-1].append((name, attrs, l))
self.contentStack.append(l)
def _characterDataHandler(self, data):
if self.stackSize > 1:
# parser parses in chunks, so we may get multiple calls
# for the same text node; thus we need to append the data
# to the last item in the content stack:
# https://github.com/fonttools/fonttools/issues/2614
if (
data != "\n"
and self.contentStack[-1]
and isinstance(self.contentStack[-1][-1], str)
and self.contentStack[-1][-1] != "\n"
):
self.contentStack[-1][-1] += data
else:
self.contentStack[-1].append(data)
def _endElementHandler(self, name):
self.stackSize = self.stackSize - 1
del self.contentStack[-1]
if not self.contentOnly:
if self.stackSize == 1:
self.root = None
elif self.stackSize == 2:
name, attrs, content = self.root
self.currentTable.fromXML(name, attrs, content, self.ttFont)
self.root = None
class ProgressPrinter(object):
def __init__(self, title, maxval=100):
print(title)
def set(self, val, maxval=None):
pass
def increment(self, val=1):
pass
def METHOD_NAME(self, text):
print(text)
|
4,314 |
test two pairs first ranked by largest
|
# These tests are auto-generated with test data from:
# https://github.com/exercism/problem-specifications/tree/main/exercises/poker/canonical-data.json
# File last updated on 2023-07-19
import unittest
from poker import (
best_hands,
)
class PokerTest(unittest.TestCase):
def test_single_hand_always_wins(self):
self.assertEqual(best_hands(["4S 5S 7H 8D JC"]), ["4S 5S 7H 8D JC"])
def test_highest_card_out_of_all_hands_wins(self):
self.assertEqual(
best_hands(["4D 5S 6S 8D 3C", "2S 4C 7S 9H 10H", "3S 4S 5D 6H JH"]),
["3S 4S 5D 6H JH"],
)
def test_a_tie_has_multiple_winners(self):
self.assertEqual(
best_hands(
[
"4D 5S 6S 8D 3C",
"2S 4C 7S 9H 10H",
"3S 4S 5D 6H JH",
"3H 4H 5C 6C JD",
]
),
["3S 4S 5D 6H JH", "3H 4H 5C 6C JD"],
)
def test_multiple_hands_with_the_same_high_cards_tie_compares_next_highest_ranked_down_to_last_card(
self,
):
self.assertEqual(
best_hands(["3S 5H 6S 8D 7H", "2S 5D 6D 8C 7S"]), ["3S 5H 6S 8D 7H"]
)
def test_one_pair_beats_high_card(self):
self.assertEqual(
best_hands(["4S 5H 6C 8D KH", "2S 4H 6S 4D JH"]), ["2S 4H 6S 4D JH"]
)
def test_highest_pair_wins(self):
self.assertEqual(
best_hands(["4S 2H 6S 2D JH", "2S 4H 6C 4D JD"]), ["2S 4H 6C 4D JD"]
)
def test_two_pairs_beats_one_pair(self):
self.assertEqual(
best_hands(["2S 8H 6S 8D JH", "4S 5H 4C 8C 5C"]), ["4S 5H 4C 8C 5C"]
)
def test_both_hands_have_two_pairs_highest_ranked_pair_wins(self):
self.assertEqual(
best_hands(["2S 8H 2D 8D 3H", "4S 5H 4C 8S 5D"]), ["2S 8H 2D 8D 3H"]
)
def test_both_hands_have_two_pairs_with_the_same_highest_ranked_pair_tie_goes_to_low_pair(
self,
):
self.assertEqual(
best_hands(["2S QS 2C QD JH", "JD QH JS 8D QC"]), ["JD QH JS 8D QC"]
)
def test_both_hands_have_two_identically_ranked_pairs_tie_goes_to_remaining_card_kicker(
self,
):
self.assertEqual(
best_hands(["JD QH JS 8D QC", "JS QS JC 2D QD"]), ["JD QH JS 8D QC"]
)
def test_both_hands_have_two_pairs_that_add_to_the_same_value_win_goes_to_highest_pair(
self,
):
self.assertEqual(
best_hands(["6S 6H 3S 3H AS", "7H 7S 2H 2S AC"]), ["7H 7S 2H 2S AC"]
)
def METHOD_NAME(self):
self.assertEqual(
best_hands(["5C 2S 5S 4H 4C", "6S 2S 6H 7C 2C"]), ["6S 2S 6H 7C 2C"]
)
def test_three_of_a_kind_beats_two_pair(self):
self.assertEqual(
best_hands(["2S 8H 2H 8D JH", "4S 5H 4C 8S 4H"]), ["4S 5H 4C 8S 4H"]
)
def test_both_hands_have_three_of_a_kind_tie_goes_to_highest_ranked_triplet(self):
self.assertEqual(
best_hands(["2S 2H 2C 8D JH", "4S AH AS 8C AD"]), ["4S AH AS 8C AD"]
)
def test_with_multiple_decks_two_players_can_have_same_three_of_a_kind_ties_go_to_highest_remaining_cards(
self,
):
self.assertEqual(
best_hands(["4S AH AS 7C AD", "4S AH AS 8C AD"]), ["4S AH AS 8C AD"]
)
def test_a_straight_beats_three_of_a_kind(self):
self.assertEqual(
best_hands(["4S 5H 4C 8D 4H", "3S 4D 2S 6D 5C"]), ["3S 4D 2S 6D 5C"]
)
def test_aces_can_end_a_straight_10_j_q_k_a(self):
self.assertEqual(
best_hands(["4S 5H 4C 8D 4H", "10D JH QS KD AC"]), ["10D JH QS KD AC"]
)
def test_aces_can_start_a_straight_a_2_3_4_5(self):
self.assertEqual(
best_hands(["4S 5H 4C 8D 4H", "4D AH 3S 2D 5C"]), ["4D AH 3S 2D 5C"]
)
def test_aces_cannot_be_in_the_middle_of_a_straight_q_k_a_2_3(self):
self.assertEqual(
best_hands(["2C 3D 7H 5H 2S", "QS KH AC 2D 3S"]), ["2C 3D 7H 5H 2S"]
)
def test_both_hands_with_a_straight_tie_goes_to_highest_ranked_card(self):
self.assertEqual(
best_hands(["4S 6C 7S 8D 5H", "5S 7H 8S 9D 6H"]), ["5S 7H 8S 9D 6H"]
)
def test_even_though_an_ace_is_usually_high_a_5_high_straight_is_the_lowest_scoring_straight(
self,
):
self.assertEqual(
best_hands(["2H 3C 4D 5D 6H", "4S AH 3S 2D 5H"]), ["2H 3C 4D 5D 6H"]
)
def test_flush_beats_a_straight(self):
self.assertEqual(
best_hands(["4C 6H 7D 8D 5H", "2S 4S 5S 6S 7S"]), ["2S 4S 5S 6S 7S"]
)
def test_both_hands_have_a_flush_tie_goes_to_high_card_down_to_the_last_one_if_necessary(
self,
):
self.assertEqual(
best_hands(["4H 7H 8H 9H 6H", "2S 4S 5S 6S 7S"]), ["4H 7H 8H 9H 6H"]
)
def test_full_house_beats_a_flush(self):
self.assertEqual(
best_hands(["3H 6H 7H 8H 5H", "4S 5H 4C 5D 4H"]), ["4S 5H 4C 5D 4H"]
)
def test_both_hands_have_a_full_house_tie_goes_to_highest_ranked_triplet(self):
self.assertEqual(
best_hands(["4H 4S 4D 9S 9D", "5H 5S 5D 8S 8D"]), ["5H 5S 5D 8S 8D"]
)
def test_with_multiple_decks_both_hands_have_a_full_house_with_the_same_triplet_tie_goes_to_the_pair(
self,
):
self.assertEqual(
best_hands(["5H 5S 5D 9S 9D", "5H 5S 5D 8S 8D"]), ["5H 5S 5D 9S 9D"]
)
def test_four_of_a_kind_beats_a_full_house(self):
self.assertEqual(
best_hands(["4S 5H 4D 5D 4H", "3S 3H 2S 3D 3C"]), ["3S 3H 2S 3D 3C"]
)
def test_both_hands_have_four_of_a_kind_tie_goes_to_high_quad(self):
self.assertEqual(
best_hands(["2S 2H 2C 8D 2D", "4S 5H 5S 5D 5C"]), ["4S 5H 5S 5D 5C"]
)
def test_with_multiple_decks_both_hands_with_identical_four_of_a_kind_tie_determined_by_kicker(
self,
):
self.assertEqual(
best_hands(["3S 3H 2S 3D 3C", "3S 3H 4S 3D 3C"]), ["3S 3H 4S 3D 3C"]
)
def test_straight_flush_beats_four_of_a_kind(self):
self.assertEqual(
best_hands(["4S 5H 5S 5D 5C", "7S 8S 9S 6S 10S"]), ["7S 8S 9S 6S 10S"]
)
def test_aces_can_end_a_straight_flush_10_j_q_k_a(self):
self.assertEqual(
best_hands(["KC AH AS AD AC", "10C JC QC KC AC"]), ["10C JC QC KC AC"]
)
def test_aces_can_start_a_straight_flush_a_2_3_4_5(self):
self.assertEqual(
best_hands(["KS AH AS AD AC", "4H AH 3H 2H 5H"]), ["4H AH 3H 2H 5H"]
)
def test_aces_cannot_be_in_the_middle_of_a_straight_flush_q_k_a_2_3(self):
self.assertEqual(
best_hands(["2C AC QC 10C KC", "QH KH AH 2H 3H"]), ["2C AC QC 10C KC"]
)
def test_both_hands_have_a_straight_flush_tie_goes_to_highest_ranked_card(self):
self.assertEqual(
best_hands(["4H 6H 7H 8H 5H", "5S 7S 8S 9S 6S"]), ["5S 7S 8S 9S 6S"]
)
def test_even_though_an_ace_is_usually_high_a_5_high_straight_flush_is_the_lowest_scoring_straight_flush(
self,
):
self.assertEqual(
best_hands(["2H 3H 4H 5H 6H", "4D AD 3D 2D 5D"]), ["2H 3H 4H 5H 6H"]
)
|
4,315 |
get receivers
|
from celery import shared_task
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.core.exceptions import ObjectDoesNotExist
from django.core.mail import EmailMultiAlternatives, get_connection
from django.core.paginator import Paginator
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.utils.timezone import now
from grandchallenge.core.templatetags.bleach import md2html
from grandchallenge.emails.models import Email
from grandchallenge.emails.utils import SendActionChoices
from grandchallenge.subdomains.utils import reverse
def METHOD_NAME(action):
if action == SendActionChoices.MAILING_LIST:
receivers = (
get_user_model()
.objects.filter(
user_profile__receive_newsletter=True, is_active=True
)
.order_by("pk")
)
elif action == SendActionChoices.STAFF:
receivers = (
get_user_model()
.objects.filter(is_staff=True, is_active=True)
.order_by("pk")
)
elif action == SendActionChoices.CHALLENGE_ADMINS:
receivers = (
get_user_model()
.objects.filter(
groups__admins_of_challenge__isnull=False,
user_profile__receive_newsletter=True,
is_active=True,
)
.distinct()
.order_by("pk")
)
elif action == SendActionChoices.READER_STUDY_EDITORS:
receivers = (
get_user_model()
.objects.filter(
groups__editors_of_readerstudy__isnull=False,
user_profile__receive_newsletter=True,
is_active=True,
)
.distinct()
.order_by("pk")
)
elif action == SendActionChoices.ALGORITHM_EDITORS:
receivers = (
get_user_model()
.objects.filter(
groups__editors_of_algorithm__isnull=False,
user_profile__receive_newsletter=True,
is_active=True,
)
.distinct()
.order_by("pk")
)
return receivers
def send_mass_html_email(datatuple):
connection = get_connection()
messages = []
for subject, message, sender, recipient, html in datatuple:
email = EmailMultiAlternatives(
subject, message, sender, recipient, connection=connection
)
email.attach_alternative(html, "text/html")
messages.append(email)
return connection.send_messages(messages)
@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS["acks-late-micro-short"])
def send_bulk_email(action, email_pk):
try:
email = Email.objects.filter(sent=False).get(pk=email_pk)
except ObjectDoesNotExist:
return
subject = email.subject
body = email.body
html_body = md2html(body)
receivers = METHOD_NAME(action=action)
paginator = Paginator(receivers, 100)
site = Site.objects.get_current()
if email.status_report:
start_page = email.status_report["last_processed_batch"]
else:
start_page = 0
for page_nr in paginator.page_range[start_page:]:
messages = []
for recipient in paginator.page(page_nr).object_list:
user = get_user_model().objects.get(pk=recipient.pk)
link = reverse(
"profile-update", kwargs={"username": user.username}
)
html_content = render_to_string(
"vendored/mailgun_transactional_emails/action.html",
{
"title": subject,
"username": user.username,
"content": html_body,
"link": link,
},
)
html_content_without_linebreaks = html_content.replace("\n", "")
text_content = strip_tags(html_content_without_linebreaks)
messages.append(
(
f"[{site.domain.lower()}] {subject}",
text_content,
settings.DEFAULT_FROM_EMAIL,
[user.email],
html_content_without_linebreaks,
)
)
send_mass_html_email(messages)
email.status_report = {"last_processed_batch": page_nr}
email.save()
email.sent = True
email.sent_at = now()
email.status_report = None
email.save()
|
4,316 |
connect selection changed
|
from enigma import eEPGCache, eListbox, eListboxPythonMultiContent, eServiceReference, eSize
from Components.GUIComponent import GUIComponent
from Tools.Alternatives import CompareWithAlternatives
from Tools.Directories import SCOPE_CURRENT_SKIN, resolveFilename
from Tools.LoadPixmap import LoadPixmap
from skin import parseScale
class EPGListBase(GUIComponent):
def __init__(self, session, selChangedCB=None):
GUIComponent.__init__(self)
self.session = session
self.onSelChanged = []
if selChangedCB is not None:
self.onSelChanged.append(selChangedCB)
self.l = eListboxPythonMultiContent()
self.epgcache = eEPGCache.getInstance()
# Load the common clock icons.
self.clocks = [
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_pre.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_post.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_prepost.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_zap.png")),
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_zaprec.png"))
]
self.selclocks = [
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_selpre.png")) or self.clocks[0],
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_selpost.png")) or self.clocks[1],
LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_selprepost.png")) or self.clocks[2],
self.clocks[3],
self.clocks[4],
self.clocks[5]
]
self.autotimericon = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "icons/epgclock_autotimer.png"))
try:
from Plugins.SystemPlugins.IceTV import loadIceTVIcon
self.icetvicon = loadIceTVIcon("epgclock_icetv.png")
except ImportError:
self.icetvicon = None
self.listHeight = 0
self.listWidth = 0
self.skinItemHeight = 0
self.numberOfRows = 0
def applySkin(self, desktop, screen):
if self.skinAttributes is not None:
attribs = []
for (attrib, value) in self.skinAttributes:
if attrib == "itemHeight":
self.skinItemHeight = parseScale(value)
elif attrib == "NumberOfRows": # for compatibility with ATV skins
self.numberOfRows = int(value)
else:
attribs.append((attrib, value))
self.skinAttributes = attribs
rc = GUIComponent.applySkin(self, desktop, screen)
self.skinListHeight = self.listHeight = self.instance.size().height()
self.listWidth = self.instance.size().width()
self.setFontsize()
self.setItemsPerPage()
return rc
def setItemsPerPage(self, defaultItemHeight=54):
numberOfRows = self.epgConfig.itemsperpage.value or self.numberOfRows
itemHeight = (self.skinListHeight // numberOfRows if numberOfRows > 0 else self.skinItemHeight) or defaultItemHeight
self.l.setItemHeight(itemHeight)
self.instance.resize(eSize(self.listWidth, self.skinListHeight // itemHeight * itemHeight))
self.listHeight = self.instance.size().height()
self.listWidth = self.instance.size().width()
self.itemHeight = itemHeight
def getEventFromId(self, service, eventId):
event = None
if self.epgcache is not None and eventId is not None:
event = self.epgcache.lookupEventId(service.ref, eventId)
return event
def getSelectionPosition(self):
# Adjust absolute index to index in displayed view
rowCount = self.listHeight // self.itemHeight
index = self.l.getCurrentSelectionIndex() % rowCount
sely = self.instance.position().y() + self.itemHeight * index
if sely >= self.instance.position().y() + self.listHeight:
sely -= self.listHeight
return self.listWidth, sely
def getIndexFromService(self, serviceref):
if serviceref is not None:
for x in range(len(self.list)):
if CompareWithAlternatives(self.list[x][0], serviceref):
return x
if CompareWithAlternatives(self.list[x][1], serviceref):
return x
return None
def getCurrentIndex(self):
return self.instance.getCurrentIndex()
def moveToService(self, serviceref):
if not serviceref:
return
newIdx = self.getIndexFromService(serviceref)
if newIdx is None:
newIdx = 0
self.setCurrentIndex(newIdx)
def setCurrentIndex(self, index):
if self.instance is not None:
self.instance.moveSelectionTo(index)
def moveTo(self, dir):
if self.instance is not None:
self.instance.moveSelection(dir)
def getCurrent(self):
tmp = self.l.getCurrentSelection()
if tmp is None:
return None, None
service = eServiceReference(tmp[0])
eventId = tmp[1]
event = self.getEventFromId(service, eventId)
return event, service
def METHOD_NAME(func):
if not self.onSelChanged.count(func):
self.onSelChanged.append(func)
def disconnectSelectionChanged(func):
self.onSelChanged.remove(func)
def selectionChanged(self):
for x in self.onSelChanged:
if x is not None:
x()
GUI_WIDGET = eListbox
def selectionEnabled(self, enabled):
if self.instance is not None:
self.instance.setSelectionEnable(enabled)
def getPixmapsForTimer(self, timer, matchType, selected=False):
if timer is None:
return (None, None)
autoTimerIcon = None
if matchType == 3:
# recording whole event, add timer type onto pixmap lookup index
matchType += 2 if timer.always_zap else 1 if timer.justplay else 0
autoTimerIcon = self.icetvicon if hasattr(timer, "ice_timer_id") and timer.ice_timer_id else (self.autotimericon if timer.isAutoTimer else None)
return self.selclocks[matchType] if selected else self.clocks[matchType], autoTimerIcon
def queryEPG(self, list):
try:
return self.epgcache.lookupEvent(list)
except:
print("[EPGListBase] queryEPG failed\n", list)
import traceback
traceback.print_exc()
return []
|
4,317 |
simulate
|
# SPDX-License-Identifier: Apache-2.0
#
# The OpenSearch Contributors require contributions made to
# this file be licensed under the Apache-2.0 license or a
# compatible open source license.
#
# Modifications Copyright OpenSearch Contributors. See
# GitHub history for details.
#
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Collection, MutableMapping, Optional, Tuple, Union
from .utils import NamespacedClient
class IngestClient(NamespacedClient):
def get_pipeline(
self,
*,
id: Optional[Any] = ...,
master_timeout: Optional[Any] = ...,
cluster_manager_timeout: Optional[Any] = ...,
summary: Optional[Any] = ...,
pretty: Optional[bool] = ...,
human: Optional[bool] = ...,
error_trace: Optional[bool] = ...,
format: Optional[str] = ...,
filter_path: Optional[Union[str, Collection[str]]] = ...,
request_timeout: Optional[Union[int, float]] = ...,
ignore: Optional[Union[int, Collection[int]]] = ...,
opaque_id: Optional[str] = ...,
http_auth: Optional[Union[str, Tuple[str, str]]] = ...,
api_key: Optional[Union[str, Tuple[str, str]]] = ...,
params: Optional[MutableMapping[str, Any]] = ...,
headers: Optional[MutableMapping[str, str]] = ...,
) -> Any: ...
def put_pipeline(
self,
id: Any,
*,
body: Any,
master_timeout: Optional[Any] = ...,
cluster_manager_timeout: Optional[Any] = ...,
timeout: Optional[Any] = ...,
pretty: Optional[bool] = ...,
human: Optional[bool] = ...,
error_trace: Optional[bool] = ...,
format: Optional[str] = ...,
filter_path: Optional[Union[str, Collection[str]]] = ...,
request_timeout: Optional[Union[int, float]] = ...,
ignore: Optional[Union[int, Collection[int]]] = ...,
opaque_id: Optional[str] = ...,
http_auth: Optional[Union[str, Tuple[str, str]]] = ...,
api_key: Optional[Union[str, Tuple[str, str]]] = ...,
params: Optional[MutableMapping[str, Any]] = ...,
headers: Optional[MutableMapping[str, str]] = ...,
) -> Any: ...
def delete_pipeline(
self,
id: Any,
*,
master_timeout: Optional[Any] = ...,
cluster_manager_timeout: Optional[Any] = ...,
timeout: Optional[Any] = ...,
pretty: Optional[bool] = ...,
human: Optional[bool] = ...,
error_trace: Optional[bool] = ...,
format: Optional[str] = ...,
filter_path: Optional[Union[str, Collection[str]]] = ...,
request_timeout: Optional[Union[int, float]] = ...,
ignore: Optional[Union[int, Collection[int]]] = ...,
opaque_id: Optional[str] = ...,
http_auth: Optional[Union[str, Tuple[str, str]]] = ...,
api_key: Optional[Union[str, Tuple[str, str]]] = ...,
params: Optional[MutableMapping[str, Any]] = ...,
headers: Optional[MutableMapping[str, str]] = ...,
) -> Any: ...
def METHOD_NAME(
self,
*,
body: Any,
id: Optional[Any] = ...,
verbose: Optional[Any] = ...,
pretty: Optional[bool] = ...,
human: Optional[bool] = ...,
error_trace: Optional[bool] = ...,
format: Optional[str] = ...,
filter_path: Optional[Union[str, Collection[str]]] = ...,
request_timeout: Optional[Union[int, float]] = ...,
ignore: Optional[Union[int, Collection[int]]] = ...,
opaque_id: Optional[str] = ...,
http_auth: Optional[Union[str, Tuple[str, str]]] = ...,
api_key: Optional[Union[str, Tuple[str, str]]] = ...,
params: Optional[MutableMapping[str, Any]] = ...,
headers: Optional[MutableMapping[str, str]] = ...,
) -> Any: ...
def processor_grok(
self,
*,
pretty: Optional[bool] = ...,
human: Optional[bool] = ...,
error_trace: Optional[bool] = ...,
format: Optional[str] = ...,
filter_path: Optional[Union[str, Collection[str]]] = ...,
request_timeout: Optional[Union[int, float]] = ...,
ignore: Optional[Union[int, Collection[int]]] = ...,
opaque_id: Optional[str] = ...,
http_auth: Optional[Union[str, Tuple[str, str]]] = ...,
api_key: Optional[Union[str, Tuple[str, str]]] = ...,
params: Optional[MutableMapping[str, Any]] = ...,
headers: Optional[MutableMapping[str, str]] = ...,
) -> Any: ...
def geo_ip_stats(
self,
*,
pretty: Optional[bool] = ...,
human: Optional[bool] = ...,
error_trace: Optional[bool] = ...,
format: Optional[str] = ...,
filter_path: Optional[Union[str, Collection[str]]] = ...,
request_timeout: Optional[Union[int, float]] = ...,
ignore: Optional[Union[int, Collection[int]]] = ...,
opaque_id: Optional[str] = ...,
http_auth: Optional[Union[str, Tuple[str, str]]] = ...,
api_key: Optional[Union[str, Tuple[str, str]]] = ...,
params: Optional[MutableMapping[str, Any]] = ...,
headers: Optional[MutableMapping[str, str]] = ...,
) -> Any: ...
|
4,318 |
test add remove column
|
import uuid
import sqlalchemy
from alembic.autogenerate import compare_metadata
from django.test.testcases import TestCase, SimpleTestCase
from nose.tools import assert_list_equal
from corehq.sql_db.connections import connection_manager, DEFAULT_ENGINE_ID
from ..alembic_diffs import (
DiffTypes,
SimpleDiff,
get_migration_context,
get_tables_to_rebuild,
reformat_alembic_diffs,
)
def test_flatten_raw_diffs():
raw_diffs = [
[('diff1', None)],
[('diff2', None)],
('diff3', None),
]
flattened = reformat_alembic_diffs(raw_diffs)
assert_list_equal(flattened, [
SimpleDiff('diff1', None, None, ('diff1', None)),
SimpleDiff('diff2', None, None, ('diff1', None)),
SimpleDiff('diff3', None, None, ('diff1', None)),
])
class TestAlembicDiffs(TestCase):
@classmethod
def setUpClass(cls):
super(TestAlembicDiffs, cls).setUpClass()
cls.engine = connection_manager.get_engine()
cls.metadata = sqlalchemy.MetaData()
cls.table_name = 'diff_table_' + uuid.uuid4().hex
sqlalchemy.Table(
cls.table_name, cls.metadata,
sqlalchemy.Column('user_id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('user_name', sqlalchemy.String(16), nullable=False),
sqlalchemy.Column('email_address', sqlalchemy.String(60), key='email'),
sqlalchemy.Column('password', sqlalchemy.String(20), nullable=False),
)
cls.metadata.create_all(cls.engine)
@classmethod
def tearDownClass(cls):
cls.metadata.drop_all(cls.engine)
connection_manager.dispose_engine(DEFAULT_ENGINE_ID)
super(TestAlembicDiffs, cls).tearDownClass()
def setUp(self):
self.transaction_context = self.engine.begin()
self.connection = self.transaction_context.__enter__()
def tearDown(self):
self.transaction_context.__exit__(None, None, None)
def test_table_filter(self):
migration_context = get_migration_context(self.engine.connect(), [self.table_name])
sqlalchemy.Table('new_table', self.metadata)
raw_diffs = compare_metadata(migration_context, self.metadata)
diffs = reformat_alembic_diffs(raw_diffs)
self.assertEqual(0, len(diffs))
def test_add_remove_table(self):
metadata = sqlalchemy.MetaData()
sqlalchemy.Table('new_table', metadata)
self._test_diffs(metadata, {
SimpleDiff(DiffTypes.ADD_TABLE, 'new_table', None, None),
SimpleDiff(DiffTypes.REMOVE_TABLE, self.table_name, None, None),
})
def METHOD_NAME(self):
metadata = sqlalchemy.MetaData()
sqlalchemy.Table(
self.table_name, metadata,
sqlalchemy.Column('user_id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('user_name', sqlalchemy.String(16), nullable=False),
sqlalchemy.Column('email_address', sqlalchemy.String(60), key='email'),
sqlalchemy.Column('new_password', sqlalchemy.String(20), nullable=False)
)
diffs = self._test_diffs(metadata, {
SimpleDiff(DiffTypes.ADD_COLUMN, self.table_name, 'new_password', None),
SimpleDiff(DiffTypes.REMOVE_COLUMN, self.table_name, 'password', None)
})
# check that we can get the column via the property
self.assertIsNotNone(diffs[0].column)
self.assertIsNotNone(diffs[1].column)
def test_modify_column(self):
metadata = sqlalchemy.MetaData()
sqlalchemy.Table(
self.table_name, metadata,
sqlalchemy.Column('user_id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('user_name', sqlalchemy.String(16), nullable=True),
sqlalchemy.Column('email_address', sqlalchemy.String(60), key='email'),
sqlalchemy.Column('password', sqlalchemy.Integer, nullable=False)
)
self._test_diffs(metadata, {
SimpleDiff(DiffTypes.MODIFY_TYPE, self.table_name, 'password', None),
SimpleDiff(DiffTypes.MODIFY_NULLABLE, self.table_name, 'user_name', None),
})
def _test_diffs(self, metadata, expected_diffs):
migration_context = get_migration_context(
self.engine.connect(),
[self.table_name, 'new_table']
)
raw_diffs = compare_metadata(migration_context, metadata)
diffs = reformat_alembic_diffs(raw_diffs)
self.assertEqual(set(diffs), expected_diffs)
return diffs
class TestTablesToRebuild(SimpleTestCase):
def test_filter_by_type(self):
diffs = {
SimpleDiff(type_, type_, None, None)
for type_ in DiffTypes.ALL
}
tables = get_tables_to_rebuild(diffs)
self.assertEqual(
tables,
set(DiffTypes.TYPES_FOR_REBUILD)
)
|
4,319 |
test plugins connection aws ssm put file
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from io import StringIO
import pytest
from unittest.mock import patch, MagicMock
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_data_pipeline.py requires the python modules 'boto3' and 'botocore'")
class TestConnectionBaseClass:
@patch("os.path.exists")
@patch("subprocess.Popen")
@patch("select.poll")
@patch("boto3.client")
def test_plugins_connection_aws_ssm_start_session(self, boto_client, s_poll, s_popen, mock_ospe):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.get_option = MagicMock()
conn.get_option.side_effect = ["i1234", "executable", "abcd", "i1234"]
conn.host = "abc"
mock_ospe.return_value = True
boto3 = MagicMock()
boto3.client("ssm").return_value = MagicMock()
conn.start_session = MagicMock()
conn._session_id = MagicMock()
conn._session_id.return_value = "s1"
s_popen.return_value.stdin.write = MagicMock()
s_poll.return_value = MagicMock()
s_poll.return_value.register = MagicMock()
s_popen.return_value.poll = MagicMock()
s_popen.return_value.poll.return_value = None
conn._stdin_readline = MagicMock()
conn._stdin_readline.return_value = "abc123"
conn.SESSION_START = "abc"
conn.start_session()
@patch("random.choice")
def test_plugins_connection_aws_ssm_exec_command(self, r_choice):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
r_choice.side_effect = ["a", "a", "a", "a", "a", "b", "b", "b", "b", "b"]
conn.MARK_LENGTH = 5
conn._session = MagicMock()
conn._session.stdin.write = MagicMock()
conn._wrap_command = MagicMock()
conn._wrap_command.return_value = "cmd1"
conn._flush_stderr = MagicMock()
conn._windows = MagicMock()
conn._windows.return_value = True
conn._session.poll = MagicMock()
conn._session.poll.return_value = None
conn._timeout = MagicMock()
conn._poll_stdout = MagicMock()
conn._poll_stdout.poll = MagicMock()
conn._poll_stdout.poll.return_value = True
conn._session.stdout = MagicMock()
conn._session.stdout.readline = MagicMock()
conn._post_process = MagicMock()
conn._post_process.return_value = "test"
conn._session.stdout.readline.side_effect = iter(["aaaaa\n", "Hi\n", "0\n", "bbbbb\n"])
conn.get_option = MagicMock()
conn.get_option.return_value = 1
returncode = "a"
stdout = "b"
return (returncode, stdout, conn._flush_stderr)
def test_plugins_connection_aws_ssm_prepare_terminal(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.is_windows = MagicMock()
conn.is_windows.return_value = True
def test_plugins_connection_aws_ssm_wrap_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.is_windows = MagicMock()
conn.is_windows.return_value = True
return "windows1"
def test_plugins_connection_aws_ssm_post_process(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.is_windows = MagicMock()
conn.is_windows.return_value = True
conn.stdout = MagicMock()
returncode = 0
return returncode, conn.stdout
@patch("subprocess.Popen")
def test_plugins_connection_aws_ssm_flush_stderr(self, s_popen):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.poll_stderr = MagicMock()
conn.poll_stderr.register = MagicMock()
conn.stderr = None
s_popen.poll().return_value = 123
return conn.stderr
# XXX This isn't doing anything
# def test_plugins_connection_aws_ssm_get_url(self):
# pc = PlayContext()
# new_stdin = StringIO()
# connection_loader.get('community.aws.aws_ssm', pc, new_stdin)
# boto3 = MagicMock()
# boto3.client('s3').return_value = MagicMock()
# boto3.generate_presigned_url.return_value = MagicMock()
# return (boto3.generate_presigned_url.return_value)
@patch("os.path.exists")
def METHOD_NAME(self, mock_ospe):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn._connect = MagicMock()
conn._file_transport_command = MagicMock()
conn._file_transport_command.return_value = (0, "stdout", "stderr")
conn.put_file("/in/file", "/out/file")
def test_plugins_connection_aws_ssm_fetch_file(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn._connect = MagicMock()
conn._file_transport_command = MagicMock()
conn._file_transport_command.return_value = (0, "stdout", "stderr")
conn.fetch_file("/in/file", "/out/file")
@patch("subprocess.check_output")
@patch("boto3.client")
def test_plugins_connection_file_transport_command(self, boto_client, s_check_output):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.get_option = MagicMock()
conn.get_option.side_effect = ["1", "2", "3", "4", "5"]
conn._get_url = MagicMock()
conn._get_url.side_effect = ["url1", "url2"]
boto3 = MagicMock()
boto3.client("s3").return_value = MagicMock()
conn.get_option.return_value = 1
get_command = MagicMock()
put_command = MagicMock()
conn.exec_command = MagicMock()
conn.exec_command.return_value = (put_command, None, False)
conn.download_fileobj = MagicMock()
conn.exec_command(put_command, in_data=None, sudoable=False)
conn.exec_command(get_command, in_data=None, sudoable=False)
@patch("subprocess.check_output")
def test_plugins_connection_aws_ssm_close(self, s_check_output):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get("community.aws.aws_ssm", pc, new_stdin)
conn.instance_id = "i-12345"
conn._session_id = True
conn.get_option = MagicMock()
conn.get_option.side_effect = ["/abc", "pqr"]
conn._session = MagicMock()
conn._session.terminate = MagicMock()
conn._session.communicate = MagicMock()
conn._terminate_session = MagicMock()
conn._terminate_session.return_value = ""
conn._session_id = MagicMock()
conn._session_id.return_value = "a"
conn._client = MagicMock()
conn.close()
|
4,320 |
init attributes
|
# -*- coding: utf-8 -*-
############################ Copyrights and license ############################
# #
# Copyright 2018 bbi-yggy <[email protected]> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
from __future__ import absolute_import
import github.GithubObject
# NOTE: There is currently no way to get cards "in triage" for a project.
# https://platform.github.community/t/moving-github-project-cards-that-are-in-triage/3784
#
# See also https://developer.github.com/v4/object/projectcard for the next generation GitHub API,
# which may point the way to where the API is likely headed and what might come back to v3. E.g. ProjectCard.content member.
class ProjectCard(github.GithubObject.CompletableGithubObject):
"""
This class represents Project Cards. The reference can be found here https://developer.github.com/v3/projects/cards
"""
def __repr__(self):
return self.get__repr__({"id": self._id.value})
@property
def archived(self):
"""
:type: bool
"""
return self._archived.value
@property
def column_url(self):
"""
:type: string
"""
return self._column_url.value
@property
def content_url(self):
"""
:type: string
"""
return self._content_url.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
return self._created_at.value
@property
def creator(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
return self._creator.value
@property
def id(self):
"""
:type: integer
"""
return self._id.value
@property
def node_id(self):
"""
:type: string
"""
return self._node_id.value
@property
def note(self):
"""
:type: string
"""
return self._note.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
return self._url.value
# Note that the content_url for any card will be an "issue" URL, from
# which you can retrieve either an Issue or a PullRequest. Unforunately
# the API doesn't make it clear which you are dealing with.
def get_content(self, content_type=github.GithubObject.NotSet):
"""
:calls: `GET /repos/:owner/:repo/pulls/:number <https://developer.github.com/v3/pulls/#get-a-single-pull-request>`_
:rtype: :class:`github.PullRequest.PullRequest` or :class:`github.Issue.Issue`
"""
if self.content_url is None:
return None
if content_type == "PullRequest":
headers, data = self._requester.requestJsonAndCheck(
"GET", self.content_url.replace("issues", "pulls")
)
return github.PullRequest.PullRequest(
self._requester, headers, data, completed=True
)
elif content_type is github.GithubObject.NotSet or content_type == "Issue":
headers, data = self._requester.requestJsonAndCheck("GET", self.content_url)
return github.Issue.Issue(self._requester, headers, data, completed=True)
else:
assert False, "Unknown content type: %s" % content_type
def METHOD_NAME(self):
self._archived = github.GithubObject.NotSet
self._column_url = github.GithubObject.NotSet
self._content_url = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._creator = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._node_id = github.GithubObject.NotSet
self._note = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "archived" in attributes: # pragma no branch
self._archived = self._makeBoolAttribute(attributes["archived"])
if "column_url" in attributes: # pragma no branch
self._column_url = self._makeStringAttribute(attributes["column_url"])
if "content_url" in attributes: # pragma no branch
self._content_url = self._makeStringAttribute(attributes["content_url"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "creator" in attributes: # pragma no branch
self._creator = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["creator"]
)
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "node_id" in attributes: # pragma no branch
self._node_id = self._makeStringAttribute(attributes["node_id"])
if "note" in attributes: # pragma no branch
self._note = self._makeStringAttribute(attributes["note"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
|
4,321 |
delete distinct counts
|
from sentry.tsdb.base import BaseTSDB
class DummyTSDB(BaseTSDB):
"""
A no-op time-series storage.
"""
def incr(self, model, key, timestamp=None, count=1, environment_id=None):
self.validate_arguments([model], [environment_id])
def merge(self, model, destination, sources, timestamp=None, environment_ids=None):
environment_ids = (set(environment_ids) if environment_ids is not None else set()).union(
[None]
)
self.validate_arguments([model], environment_ids)
def delete(self, models, keys, start=None, end=None, timestamp=None, environment_ids=None):
environment_ids = (set(environment_ids) if environment_ids is not None else set()).union(
[None]
)
self.validate_arguments(models, environment_ids)
def get_range(
self,
model,
keys,
start,
end,
rollup=None,
environment_ids=None,
use_cache=False,
jitter_value=None,
tenant_ids=None,
referrer_suffix=None,
):
self.validate_arguments([model], environment_ids if environment_ids is not None else [None])
_, series = self.get_optimal_rollup_series(start, end, rollup)
return {k: [(ts, 0) for ts in series] for k in keys}
def record(self, model, key, values, timestamp=None, environment_id=None):
self.validate_arguments([model], [environment_id])
def get_distinct_counts_series(
self, model, keys, start, end=None, rollup=None, environment_id=None
):
self.validate_arguments([model], [environment_id])
_, series = self.get_optimal_rollup_series(start, end, rollup)
return {k: [(ts, 0) for ts in series] for k in keys}
def get_distinct_counts_totals(
self,
model,
keys,
start,
end=None,
rollup=None,
environment_id=None,
use_cache=False,
jitter_value=None,
tenant_ids=None,
referrer_suffix=None,
):
self.validate_arguments([model], [environment_id])
return {k: 0 for k in keys}
def get_distinct_counts_union(
self, model, keys, start, end=None, rollup=None, environment_id=None
):
self.validate_arguments([model], [environment_id])
return 0
def merge_distinct_counts(
self, model, destination, sources, timestamp=None, environment_ids=None
):
environment_ids = (set(environment_ids) if environment_ids is not None else set()).union(
[None]
)
self.validate_arguments([model], environment_ids)
def METHOD_NAME(
self, models, keys, start=None, end=None, timestamp=None, environment_ids=None
):
environment_ids = (set(environment_ids) if environment_ids is not None else set()).union(
[None]
)
self.validate_arguments(models, environment_ids)
def record_frequency_multi(self, requests, timestamp=None, environment_id=None):
self.validate_arguments([model for model, request in requests], [environment_id])
def get_most_frequent(
self, model, keys, start, end=None, rollup=None, limit=None, environment_id=None
):
self.validate_arguments([model], [environment_id])
return {key: [] for key in keys}
def get_most_frequent_series(
self, model, keys, start, end=None, rollup=None, limit=None, environment_id=None
):
self.validate_arguments([model], [environment_id])
rollup, series = self.get_optimal_rollup_series(start, end, rollup)
return {key: [(timestamp, {}) for timestamp in series] for key in keys}
def get_frequency_series(self, model, items, start, end=None, rollup=None, environment_id=None):
self.validate_arguments([model], [environment_id])
rollup, series = self.get_optimal_rollup_series(start, end, rollup)
results = {}
for key, members in items.items():
result = results[key] = []
for timestamp in series:
result.append((timestamp, {k: 0.0 for k in members}))
return results
def get_frequency_totals(self, model, items, start, end=None, rollup=None, environment_id=None):
self.validate_arguments([model], [environment_id])
results = {}
for key, members in items.items():
results[key] = {member: 0.0 for member in members}
return results
def merge_frequencies(self, model, destination, sources, timestamp=None, environment_ids=None):
environment_ids = list(
(set(environment_ids) if environment_ids is not None else set()).union([None])
)
self.validate_arguments([model], environment_ids)
def delete_frequencies(
self, models, keys, start=None, end=None, timestamp=None, environment_ids=None
):
environment_ids = (set(environment_ids) if environment_ids is not None else set()).union(
[None]
)
self.validate_arguments(models, environment_ids)
def flush(self):
pass
|
4,322 |
test to edge index
|
import os.path as osp
import torch
import torch_geometric.typing
from torch_geometric.profile import benchmark
from torch_geometric.testing import is_full_test, withPackage
from torch_geometric.typing import SparseTensor
from torch_geometric.utils import (
dense_to_sparse,
is_sparse,
is_torch_sparse_tensor,
to_edge_index,
to_torch_coo_tensor,
to_torch_csc_tensor,
to_torch_csr_tensor,
)
def test_dense_to_sparse():
adj = torch.tensor([
[3.0, 1.0],
[2.0, 0.0],
])
edge_index, edge_attr = dense_to_sparse(adj)
assert edge_index.tolist() == [[0, 0, 1], [0, 1, 0]]
assert edge_attr.tolist() == [3, 1, 2]
if is_full_test():
jit = torch.jit.script(dense_to_sparse)
edge_index, edge_attr = jit(adj)
assert edge_index.tolist() == [[0, 0, 1], [0, 1, 0]]
assert edge_attr.tolist() == [3, 1, 2]
adj = torch.tensor([[
[3.0, 1.0],
[2.0, 0.0],
], [
[0.0, 1.0],
[0.0, 2.0],
]])
edge_index, edge_attr = dense_to_sparse(adj)
assert edge_index.tolist() == [[0, 0, 1, 2, 3], [0, 1, 0, 3, 3]]
assert edge_attr.tolist() == [3, 1, 2, 1, 2]
if is_full_test():
jit = torch.jit.script(dense_to_sparse)
edge_index, edge_attr = jit(adj)
assert edge_index.tolist() == [[0, 0, 1, 2, 3], [0, 1, 0, 3, 3]]
assert edge_attr.tolist() == [3, 1, 2, 1, 2]
def test_dense_to_sparse_bipartite():
edge_index, edge_attr = dense_to_sparse(torch.rand(2, 10, 5))
assert edge_index[0].max() == 19
assert edge_index[1].max() == 9
def test_is_torch_sparse_tensor():
x = torch.randn(5, 5)
assert not is_torch_sparse_tensor(x)
assert is_torch_sparse_tensor(x.to_sparse())
if torch_geometric.typing.WITH_TORCH_SPARSE:
assert not is_torch_sparse_tensor(SparseTensor.from_dense(x))
def test_is_sparse():
x = torch.randn(5, 5)
assert not is_sparse(x)
assert is_sparse(x.to_sparse())
if torch_geometric.typing.WITH_TORCH_SPARSE:
assert is_sparse(SparseTensor.from_dense(x))
def test_to_torch_coo_tensor():
edge_index = torch.tensor([
[0, 1, 1, 2, 2, 3],
[1, 0, 2, 1, 3, 2],
])
edge_attr = torch.randn(edge_index.size(1), 8)
adj = to_torch_coo_tensor(edge_index, is_coalesced=False)
assert adj.is_coalesced()
assert adj.size() == (4, 4)
assert adj.layout == torch.sparse_coo
assert torch.allclose(adj.indices(), edge_index)
adj = to_torch_coo_tensor(edge_index, is_coalesced=True)
assert adj.is_coalesced()
assert adj.size() == (4, 4)
assert adj.layout == torch.sparse_coo
assert torch.allclose(adj.indices(), edge_index)
adj = to_torch_coo_tensor(edge_index, size=6)
assert adj.size() == (6, 6)
assert adj.layout == torch.sparse_coo
assert torch.allclose(adj.indices(), edge_index)
adj = to_torch_coo_tensor(edge_index, edge_attr)
assert adj.size() == (4, 4, 8)
assert adj.layout == torch.sparse_coo
assert torch.allclose(adj.indices(), edge_index)
assert torch.allclose(adj.values(), edge_attr)
if is_full_test():
jit = torch.jit.script(to_torch_coo_tensor)
adj = jit(edge_index, edge_attr)
assert adj.size() == (4, 4, 8)
assert adj.layout == torch.sparse_coo
assert torch.allclose(adj.indices(), edge_index)
assert torch.allclose(adj.values(), edge_attr)
def test_to_torch_csr_tensor():
edge_index = torch.tensor([
[0, 1, 1, 2, 2, 3],
[1, 0, 2, 1, 3, 2],
])
adj = to_torch_csr_tensor(edge_index)
assert adj.size() == (4, 4)
assert adj.layout == torch.sparse_csr
assert torch.allclose(adj.to_sparse_coo().coalesce().indices(), edge_index)
edge_weight = torch.randn(edge_index.size(1))
adj = to_torch_csr_tensor(edge_index, edge_weight)
assert adj.size() == (4, 4)
assert adj.layout == torch.sparse_csr
coo = adj.to_sparse_coo().coalesce()
assert torch.allclose(coo.indices(), edge_index)
assert torch.allclose(coo.values(), edge_weight)
if torch_geometric.typing.WITH_PT20:
edge_attr = torch.randn(edge_index.size(1), 8)
adj = to_torch_csr_tensor(edge_index, edge_attr)
assert adj.size() == (4, 4, 8)
assert adj.layout == torch.sparse_csr
coo = adj.to_sparse_coo().coalesce()
assert torch.allclose(coo.indices(), edge_index)
assert torch.allclose(coo.values(), edge_attr)
@withPackage('torch>=1.12.0')
def test_to_torch_csc_tensor():
edge_index = torch.tensor([
[0, 1, 1, 2, 2, 3],
[1, 0, 2, 1, 3, 2],
])
adj = to_torch_csc_tensor(edge_index)
assert adj.size() == (4, 4)
assert adj.layout == torch.sparse_csc
adj_coo = adj.to_sparse_coo().coalesce()
if torch_geometric.typing.WITH_PT20:
assert torch.allclose(adj_coo.indices(), edge_index)
else:
assert torch.allclose(adj_coo.indices().flip([0]), edge_index)
edge_weight = torch.randn(edge_index.size(1))
adj = to_torch_csc_tensor(edge_index, edge_weight)
assert adj.size() == (4, 4)
assert adj.layout == torch.sparse_csc
adj_coo = adj.to_sparse_coo().coalesce()
if torch_geometric.typing.WITH_PT20:
assert torch.allclose(adj_coo.indices(), edge_index)
assert torch.allclose(adj_coo.values(), edge_weight)
else:
perm = adj_coo.indices()[0].argsort()
assert torch.allclose(adj_coo.indices()[:, perm], edge_index)
assert torch.allclose(adj_coo.values()[perm], edge_weight)
if torch_geometric.typing.WITH_PT20:
edge_attr = torch.randn(edge_index.size(1), 8)
adj = to_torch_csc_tensor(edge_index, edge_attr)
assert adj.size() == (4, 4, 8)
assert adj.layout == torch.sparse_csc
assert torch.allclose(adj.to_sparse_coo().coalesce().indices(),
edge_index)
assert torch.allclose(adj.to_sparse_coo().coalesce().values(),
edge_attr)
@withPackage('torch>=2.1.0')
def test_to_torch_coo_tensor_save_load(tmp_path):
edge_index = torch.tensor([
[0, 1, 1, 2, 2, 3],
[1, 0, 2, 1, 3, 2],
])
adj = to_torch_coo_tensor(edge_index, is_coalesced=False)
assert adj.is_coalesced()
path = osp.join(tmp_path, 'adj.t')
torch.save(adj, path)
adj = torch.load(path)
assert adj.is_coalesced()
def METHOD_NAME():
adj = torch.tensor([
[0., 1., 0., 0.],
[1., 0., 1., 0.],
[0., 1., 0., 1.],
[0., 0., 1., 0.],
]).to_sparse()
edge_index, edge_attr = to_edge_index(adj)
assert edge_index.tolist() == [[0, 1, 1, 2, 2, 3], [1, 0, 2, 1, 3, 2]]
assert edge_attr.tolist() == [1., 1., 1., 1., 1., 1.]
if is_full_test():
jit = torch.jit.script(to_edge_index)
edge_index, edge_attr = jit(adj)
assert edge_index.tolist() == [[0, 1, 1, 2, 2, 3], [1, 0, 2, 1, 3, 2]]
assert edge_attr.tolist() == [1., 1., 1., 1., 1., 1.]
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--device', type=str, default='cuda')
args = parser.parse_args()
num_nodes, num_edges = 10_000, 200_000
edge_index = torch.randint(num_nodes, (2, num_edges), device=args.device)
benchmark(
funcs=[
SparseTensor.from_edge_index, to_torch_coo_tensor,
to_torch_csr_tensor, to_torch_csc_tensor
],
func_names=['SparseTensor', 'To COO', 'To CSR', 'To CSC'],
args=(edge_index, None, (num_nodes, num_nodes)),
num_steps=50 if args.device == 'cpu' else 500,
num_warmups=10 if args.device == 'cpu' else 100,
)
|
4,323 |
test can change
|
# Copyright 2012 Christoph Reiter
# 2020 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from unittest.mock import Mock
import quodlibet.config
from quodlibet import app
from quodlibet.formats import AudioFile
from quodlibet.plugins.editing import EditTagsPlugin
from quodlibet.qltk.edittags import (SplitValues, SplitDisc, SplitTitle,
SplitArranger, AddTagDialog,
AudioFileGroup, EditTags, ListEntry,
Comment, EditTagsPluginHandler)
from quodlibet.qltk.properties import SongProperties
from tests import TestCase, init_fake_app, destroy_fake_app
class DummyEditPlugin(EditTagsPlugin):
activations = []
def activated(self, tag, value):
self.activations.append((tag, value))
return super().activated(tag, value)
class TEditTags(TestCase):
def setUp(self):
init_fake_app()
quodlibet.config.init()
def tearDown(self):
quodlibet.config.quit()
destroy_fake_app()
def test_items(self):
SplitValues("foo", "bar").destroy()
SplitDisc("foo", "bar").destroy()
SplitTitle("foo", "bar").destroy()
SplitArranger("foo", "bar").destroy()
def test_addtag_dialog(self):
AddTagDialog(None, ["artist"], app.library).destroy()
def test_edit_tags_starts(self):
props = SongProperties(app.library, [], quodlibet.app.window)
EditTags(props, app.library)
def test_edit_tags_popup_menu(self):
song = AudioFile({"~filename": "/dev/null", "artist": "Person",
"album": "Dj Bars of FOO"})
props = SongProperties(app.library, [song], app.window)
box = EditTags(props, app.library)
# Add a fake plugin
plugin_cls = DummyEditPlugin
box.handler = Mock(EditTagsPluginHandler)
box.handler.plugins = [plugin_cls]
model = box._view.get_model()
# Make sure there's a row
tag, value = "artist", song("artist")
entry = ListEntry(tag, Comment(value))
model.append(row=[entry])
box._group_info = AudioFileGroup([song])
box._view.select_by_func(lambda _: True)
# Prevent weird mouse stuff failing in tests
box._view.ensure_popup_selection = lambda: False
box._popup_menu(box._view, props)
box.show()
assert plugin_cls.activations == [(tag, value)]
class GroupSong(AudioFile):
def __init__(self, can_multiple=True, can_change=True, cant_change=[]):
self._can_multiple = can_multiple
self._can_change = can_change
self._cant_change = cant_change
def can_multiple_values(self, key=None):
if key is None:
return self._can_multiple
if self._can_multiple is True:
return True
return key in self._can_multiple
def can_change(self, key=None):
if key is None:
return self._can_change
if self._can_change is True:
return key not in self._cant_change
return key in self._can_change
class TAudioFileGroup(TestCase):
def test_multiple_values(self):
group = AudioFileGroup([GroupSong(True), GroupSong(True)])
self.assertTrue(group.can_multiple_values() is True)
self.assertTrue(group.can_multiple_values("foo") is True)
group = AudioFileGroup([GroupSong(["ha"]), GroupSong(True)])
self.assertEqual(group.can_multiple_values(), {"ha"})
self.assertFalse(group.can_multiple_values("foo"))
self.assertTrue(group.can_multiple_values("ha"))
group = AudioFileGroup([GroupSong(["foo", "ha"]), GroupSong(["ha"])])
self.assertEqual(group.can_multiple_values(), {"ha"})
self.assertFalse(group.can_multiple_values("foo"))
self.assertTrue(group.can_multiple_values("ha"))
def METHOD_NAME(self):
group = AudioFileGroup(
[GroupSong(can_change=True), GroupSong(can_change=True)])
self.assertTrue(group.can_change() is True)
self.assertTrue(group.can_change("foo") is True)
group = AudioFileGroup(
[GroupSong(can_change=["foo", "ha"]),
GroupSong(can_change=["ha"])])
self.assertEqual(group.can_change(), {"ha"})
self.assertFalse(group.can_change("foo"))
self.assertTrue(group.can_change("ha"))
group = AudioFileGroup([GroupSong(), GroupSong(cant_change=["baz"])])
self.assertTrue(group.can_change())
self.assertFalse(group.can_change("baz"))
|
4,324 |
my detect version
|
# Copyright 2023, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Hacks for scons that we apply.
We block some tools from the standard scan, there is e.g. no need to ask
what fortran version we have installed to compile with Nuitka.
Also we hack the gcc version detection to fix some bugs in it, and to avoid
scanning for g++ when we have a gcc installer, but only if that is not too
version.
"""
import os
import re
import SCons.Tool.gcc # pylint: disable=I0021,import-error
from SCons.Script import Environment # pylint: disable=I0021,import-error
from nuitka.Tracing import scons_details_logger
from nuitka.utils.Execution import executeProcess
from nuitka.utils.FileOperations import openTextFile
from nuitka.utils.Utils import isLinux, isMacOS
from .SconsUtils import decodeData, getExecutablePath, isGccName
# Cache for detected versions.
v_cache = {}
# Prevent these programs from being found, avoiding the burden of tool init.
_blocked_tools = (
# TODO: Where the fallback is needed, g++ needs to scanned or else it
# cannot be used.
# "g++",
"c++",
"f95",
"f90",
"f77",
"gfortran",
"ifort",
"javah",
"tar",
"dmd",
"gdc",
"flex",
"bison",
"ranlib",
"ar",
"ldc2",
"pdflatex",
"pdftex",
"latex",
"tex",
"dvipdf",
"dvips",
"gs",
"swig",
"ifl",
"rpcgen",
"rpmbuild",
"bk",
"p4",
"m4",
"ml",
"icc",
"sccs",
"rcs",
"cvs",
"as",
"gas",
"nasm",
)
def METHOD_NAME(cc):
if isGccName(cc) or "clang" in cc:
command = (
cc,
"-dumpversion",
)
else:
command = (
cc,
"--version",
)
stdout, stderr, exit_code = executeProcess(command)
if exit_code != 0:
scons_details_logger.info(
"Error, error exit from '%s' (%d) gave %r." % (command, exit_code, stderr)
)
return None
line = stdout.splitlines()[0]
if str is not bytes and type(line) is bytes:
line = decodeData(line)
line = line.strip()
match = re.findall(r"[0-9]+(?:\.[0-9]+)+", line)
if match:
version = match[0]
else:
# gcc 8 or higher
version = line.strip()
version = tuple(int(part) for part in version.split("."))
return version
def myDetectVersion(env, cc):
"""Return the version of the GNU compiler, or None if it is not a GNU compiler."""
cc = env.subst(cc)
if not cc:
return None
if "++" in os.path.basename(cc):
return None
# Make path absolute, to improve cache hit rate.
cc = getExecutablePath(cc, env)
if cc is None:
return None
if cc not in v_cache:
v_cache[cc] = METHOD_NAME(cc)
scons_details_logger.info("CC '%s' version check gives %r" % (cc, v_cache[cc]))
return v_cache[cc]
def myDetect(self, progs):
# Don't consider Fortran, tar, D, c++, we don't need it. We do manual
# fallback
for blocked_tool in _blocked_tools:
if blocked_tool in progs:
return None
# Note: Actually, with our inline copy, this is maybe not supposed to
# happen at all
return orig_detect(self, progs)
# The original value will be used in our form.
orig_detect = Environment.Detect
def getEnhancedToolDetect():
SCons.Tool.gcc.detect_version = myDetectVersion
# Allow CondaCC to be detected if it is in PATH.
if isLinux():
SCons.Tool.gcc.compilers.insert(0, "x86_64-conda-linux-gnu-gcc")
if isMacOS() and "CONDA_TOOLCHAIN_BUILD" in os.environ:
SCons.Tool.gcc.compilers.insert(
0, "%s-clang" % os.environ["CONDA_TOOLCHAIN_BUILD"]
)
return myDetect
def makeGccUseLinkerFile(source_files, module_mode, env):
tmp_linker_filename = os.path.join(env.source_dir, "@link_input.txt")
# Note: For Windows, it's done in mingw.py because of its use of
# a class rather than a string here, that is not working for the
# monkey patching.
if type(env["SHLINKCOM"]) is str:
env["SHLINKCOM"] = env["SHLINKCOM"].replace(
"$SOURCES", "@%s" % env.get("ESCAPE", lambda x: x)(tmp_linker_filename)
)
env["LINKCOM"] = env["LINKCOM"].replace(
"$SOURCES", "@%s" % env.get("ESCAPE", lambda x: x)(tmp_linker_filename)
)
with openTextFile(tmp_linker_filename, "w") as tmpfile:
for filename in source_files:
filename = ".".join(filename.split(".")[:-1]) + (
".os" if module_mode and os.name != "nt" else ".o"
)
if os.name == "nt":
filename = filename.replace(os.path.sep, "/")
tmpfile.write('"%s"\n' % filename)
tmpfile.write(env.subst("$SOURCES"))
|
4,325 |
add args
|
# The MIT License (MIT)
# Copyright © 2021 Yuma Rao
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the “Software”), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
# the Software.
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import argparse
import bittensor
from rich import print
from typing import List, Dict
from torch import FloatTensor
from langchain.llms import AlephAlpha
class AlephAlphaMiner(bittensor.BasePromptingMiner):
@classmethod
def check_config(cls, config: "bittensor.Config"):
assert (
config.aleph.api_key != None
), "the miner requires passing --aleph.api_key as an argument of the config."
@classmethod
def METHOD_NAME(cls, parser: argparse.ArgumentParser):
parser.add_argument(
"--aleph.api_key", type=str, help="AlephAlpha API key.", required=True
)
parser.add_argument(
"--aleph.model",
type=str,
help="Model name to use.",
default="luminous-base",
)
parser.add_argument(
"--aleph.maximum_tokens",
type=int,
help="The maximum number of tokens to be generated.",
default=64,
)
parser.add_argument(
"--aleph.temperature",
type=float,
help="A non-negative float that tunes the degree of randomness in generation.",
default=0.0,
)
parser.add_argument(
"--aleph.stop_sequences",
type=List[str],
help="Stop tokens.",
default=["user: ", "bot: ", "system: "],
)
parser.add_argument(
"--aleph.top_k",
type=int,
help="Number of most likely tokens to consider at each step.",
default=0,
)
parser.add_argument(
"--aleph.top_p",
type=float,
help="Total probability mass of tokens to consider at each step.",
default=0.0,
)
def __init__(self):
super(AlephAlphaMiner, self).__init__()
print(self.config)
self.model = AlephAlpha(
aleph_alpha_api_key=self.config.aleph.api_key,
model=self.config.aleph.model,
maximum_tokens=self.config.aleph.maximum_tokens,
temperature=self.config.aleph.temperature,
top_k=self.config.aleph.top_k,
top_p=self.config.aleph.top_p,
stop_sequences=self.config.aleph.stop_sequences,
)
def backward(
self, messages: List[Dict[str, str]], response: str, rewards: FloatTensor
) -> str:
pass
@staticmethod
def _process_history(history: List[Dict[str, str]]) -> str:
processed_history = ""
for message in history:
if message["role"] == "system":
processed_history += "system: " + message["content"] + "\n"
if message["role"] == "assistant":
processed_history += "assistant: " + message["content"] + "\n"
if message["role"] == "user":
processed_history += "user: " + message["content"] + "\n"
return processed_history
def blacklist(self, forward_call: "bittensor.BittensorCall"):
return False
def forward(self, messages: List[Dict[str, str]]) -> str:
bittensor.logging.info("messages", str(messages))
history = self._process_history(messages)
bittensor.logging.info("history", str(history))
resp = self.model(history)
bittensor.logging.info("response", str(resp))
return resp
if __name__ == "__main__":
bittensor.utils.version_checking()
AlephAlphaMiner().run()
|
4,326 |
administration
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetInstanceDetailsResult',
'AwaitableGetInstanceDetailsResult',
'get_instance_details',
'get_instance_details_output',
]
@pulumi.output_type
class GetInstanceDetailsResult:
"""
Represents an instance of a DFP instance resource.
"""
def __init__(__self__, METHOD_NAME=None, id=None, location=None, name=None, provisioning_state=None, system_data=None, tags=None, type=None):
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'administration' to be a dict")
pulumi.set(__self__, "administration", METHOD_NAME)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def METHOD_NAME(self) -> Optional['outputs.DFPInstanceAdministratorsResponse']:
"""
A collection of DFP instance administrators
"""
return pulumi.get(self, "administration")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
Location of the DFP resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The current deployment state of DFP resource. The provisioningState is to indicate states for resource provisioning.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Key-value pairs of additional resource provisioning properties.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetInstanceDetailsResult(GetInstanceDetailsResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetInstanceDetailsResult(
METHOD_NAME=self.METHOD_NAME,
id=self.id,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
system_data=self.system_data,
tags=self.tags,
type=self.type)
def get_instance_details(instance_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetInstanceDetailsResult:
"""
Gets details about the specified instances.
:param str instance_name: The name of the instance. It must be a minimum of 3 characters, and a maximum of 63.
:param str resource_group_name: The name of the Azure Resource group of which a given DFP instance is part. This name must be at least 1 character in length, and no more than 90.
"""
__args__ = dict()
__args__['instanceName'] = instance_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:dynamics365fraudprotection/v20210201preview:getInstanceDetails', __args__, opts=opts, typ=GetInstanceDetailsResult).value
return AwaitableGetInstanceDetailsResult(
METHOD_NAME=pulumi.get(__ret__, 'administration'),
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
system_data=pulumi.get(__ret__, 'system_data'),
tags=pulumi.get(__ret__, 'tags'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_instance_details)
def get_instance_details_output(instance_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetInstanceDetailsResult]:
"""
Gets details about the specified instances.
:param str instance_name: The name of the instance. It must be a minimum of 3 characters, and a maximum of 63.
:param str resource_group_name: The name of the Azure Resource group of which a given DFP instance is part. This name must be at least 1 character in length, and no more than 90.
"""
...
|
4,327 |
test training init command tutorial zenodo
|
"""Tests for the ``training_init`` command."""
import os
from .test_utils import (
CliTestCase,
skip_if_environ,
TEST_DATA_DIR,
)
class CmdTrainingInitTestCase(CliTestCase):
"""Container class defining test cases for the ``training_init`` command."""
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_by_default(self):
"""Test training_init command with only topic name."""
with self._isolate():
training_init_command = ["training_init", "--topic_name", "test"]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_topic(self):
"""Test training_init command to create new topic."""
with self._isolate():
# working test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--topic_title",
"Topic title",
"--topic_target",
"use",
"--topic_summary",
"Summary",
]
self._check_exit_code(training_init_command, exit_code=0)
# failing test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--topic_title",
"Topic title",
"--topic_target",
"test",
"--topic_summary",
"Summary",
]
self._check_exit_code(training_init_command, exit_code=2)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_no_topic(self):
"""Test training_init command with tutorial but no topic."""
with self._isolate():
# working test
training_init_command = ["training_init", "--tutorial_name", "test"]
self._check_exit_code(training_init_command, exit_code=2)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial(self):
"""Test training_init command to create new tutorial."""
with self._isolate():
# working test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--tutorial_name",
"test",
"--tutorial_title",
"Title of the tutorial",
"--hands_on",
"--slides",
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def METHOD_NAME(self):
"""Test training_init command to create new tutorial with zenodo."""
with self._isolate():
datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml")
# not working test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--tutorial_name",
"test",
"--zenodo_link",
"https://zenodo.org/record/1321885",
]
self._check_exit_code(training_init_command, exit_code=1)
# working
training_init_command = [
"training_init",
"--topic_name",
"test",
"--tutorial_name",
"test",
"--zenodo_link",
"https://zenodo.org/record/1321885",
"--datatypes",
datatype,
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_local_wf(self):
"""Test training_init command to create new tutorial with local workflow."""
with self._isolate():
test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga")
# working test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--tutorial_name",
"test",
"--workflow",
test_workflow,
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_remote_wf(self):
"""Test training_init command to create new tutorial with workflow on running instance."""
with self._isolate():
# not working test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--tutorial_name",
"test",
"--workflow_id",
"ID",
]
self._check_exit_code(training_init_command, exit_code=1)
# working test
training_init_command = [
"training_init",
"--topic_name",
"test",
"--tutorial_name",
"test",
"--workflow_id",
"ID",
"--galaxy_url",
"https://usegalaxy.eu/",
"--galaxy_api_key",
"API",
]
self._check_exit_code(training_init_command, exit_code=0)
|
4,328 |
read seg
|
"""I/O for SEG format.
This is the output of DNAcopy segmentation, widely used to serialize segment
data.
The format is BED-like, but with a header row included and the
columns:
- ID, "sampleName"
- chrom, "chromosome"
- loc.start, "start"
- loc.end, "end"
- num.mark, "nbrOfLoci" (optional)
- seg.mean, "mean"
See: https://software.broadinstitute.org/software/igv/SEG
"""
import collections
import csv
import logging
import math
from itertools import zip_longest
import pandas as pd
from Bio.File import as_handle
LOG2_10 = math.log(10, 2) # To convert log10 values to log2
CSV_ERRORS = (
# Base class for pandas parsing errors, including CSV
pd.errors.ParserError,
# Raised by the pandas 'python' CSV parser, at some point, I think
csv.Error,
)
def METHOD_NAME(
infile, sample_id=None, chrom_names=None, chrom_prefix=None, from_log10=False
):
"""Read one sample from a SEG file.
Parameters
----------
sample_id : string, int or None
If a string identifier, return the sample matching that ID. If a
positive integer, return the sample at that index position, counting
from 0. If None (default), return the first sample in the file.
chrom_names : dict
Map (string) chromosome IDs to names. (Applied before chrom_prefix.)
e.g. {'23': 'X', '24': 'Y', '25': 'M'}
chrom_prefix : str
Prepend this string to chromosome names. (Usually 'chr' or None)
from_log10 : bool
Convert values from log10 to log2.
Returns
-------
DataFrame of the selected sample's segments.
"""
results = parse_seg(infile, chrom_names, chrom_prefix, from_log10)
if isinstance(sample_id, int):
# Select sample by index number
for i, (_sid, dframe) in enumerate(results):
if i == sample_id:
return dframe
else:
raise IndexError(f"No sample index {sample_id} found in SEG file")
elif isinstance(sample_id, str):
# Select sample by name
for sid, dframe in results:
if sid == sample_id:
return dframe
else:
raise IndexError(f"No sample ID '{sample_id}' found in SEG file")
else:
# Select the first sample
sid, dframe = next(results)
try:
next(results)
except StopIteration:
pass
else:
logging.warning(
"WARNING: SEG file contains multiple samples; "
"returning the first sample '%s'",
sid,
)
return dframe
def parse_seg(infile, chrom_names=None, chrom_prefix=None, from_log10=False):
"""Parse a SEG file as an iterable of samples.
Coordinates are automatically converted from 1-indexed to half-open
0-indexed (Python-style indexing).
Parameters
----------
chrom_names : dict
Map (string) chromosome IDs to names. (Applied before chrom_prefix.)
e.g. {'23': 'X', '24': 'Y', '25': 'M'}
chrom_prefix : str
Prepend this string to chromosome names. (Usually 'chr' or None)
from_log10 : bool
Convert values from log10 to log2.
Yields
------
Tuple of (string sample ID, DataFrame of segments)
"""
# Scan through any leading garbage to find the header
with as_handle(infile) as handle:
n_tabs = None
for line in handle:
n_tabs = line.count("\t")
if n_tabs == 0:
# Skip misc. R output (e.g. "WARNING...") before the header
continue
if n_tabs == 5:
col_names = [
"sample_id",
"chromosome",
"start",
"end",
"probes",
"log2",
]
elif n_tabs == 4:
col_names = ["sample_id", "chromosome", "start", "end", "log2"]
else:
raise ValueError(
f"SEG format expects 5 or 6 columns; found {n_tabs + 1}: {line}"
)
break
else:
raise ValueError("SEG file contains no data")
# Parse the SEG file contents
try:
dframe = pd.read_csv(
handle,
sep="\t",
names=col_names,
header=None,
# * pandas.io.common.CParserError: Error
# tokenizing data. C error: Calling
# read(nbytes) on source failed. Try
# engine='python'.
engine="python",
# * engine='c' only:
# na_filter=False,
# dtype={
# 'sample_id': 'str',
# 'chromosome': 'str',
# 'start': 'int',
# 'end': 'int',
# 'log2': 'float'
# },
)
dframe["sample_id"] = dframe["sample_id"].astype("str")
dframe["chromosome"] = dframe["chromosome"].astype("str")
except CSV_ERRORS as err:
raise ValueError(
f"Unexpected dataframe contents:\n{err}\n" + next(handle)
) from err
# Calculate values for output columns
if chrom_names:
dframe["chromosome"] = dframe["chromosome"].replace(chrom_names)
if chrom_prefix:
dframe["chromosome"] = dframe["chromosome"].apply(lambda c: chrom_prefix + c)
if from_log10:
dframe["log2"] *= LOG2_10
dframe["gene"] = "-"
dframe["start"] -= 1
keep_columns = dframe.columns.drop(["sample_id"])
for sid, sample in dframe.groupby(by="sample_id", sort=False):
yield sid, sample.loc[:, keep_columns]
def write_seg(dframe, sample_id=None, chrom_ids=None):
"""Format a dataframe or list of dataframes as SEG.
To put multiple samples into one SEG table, pass `dframe` and `sample_id`
as equal-length lists of data tables and sample IDs in matching order.
"""
assert sample_id is not None
if isinstance(dframe, pd.DataFrame):
first = dframe
first_sid = sample_id
sids = dframes = None
else:
assert not isinstance(sample_id, str)
dframes = iter(dframe)
sids = iter(sample_id)
first = next(dframes)
first_sid = next(sids)
if chrom_ids in (None, True):
chrom_ids = create_chrom_ids(first)
results = [format_seg(first, first_sid, chrom_ids)]
if dframes is not None:
# Unpack matching lists of data and sample IDs
results.extend(
format_seg(subframe, sid, chrom_ids)
for subframe, sid in zip_longest(dframes, sids)
)
return pd.concat(results)
def format_seg(dframe, sample_id, chrom_ids):
"""Transform `dframe` contents to match SEG format."""
assert dframe is not None
assert sample_id is not None
chroms = dframe.chromosome.replace(chrom_ids) if chrom_ids else dframe.chromosome
rename_cols = {"log2": "seg.mean", "start": "loc.start", "end": "loc.end"}
# NB: in some programs the "sampleName" column is labeled "ID"
reindex_cols = ["ID", "chrom", "loc.start", "loc.end", "seg.mean"]
if "probes" in dframe:
rename_cols["probes"] = "num.mark" # or num_probes
reindex_cols.insert(-1, "num.mark")
return (
dframe.assign(ID=sample_id, chrom=chroms, start=dframe.start + 1)
.rename(columns=rename_cols)
.reindex(columns=reindex_cols)
)
def create_chrom_ids(segments):
"""Map chromosome names to integers in the order encountered."""
mapping = collections.OrderedDict(
(chrom, i + 1)
for i, chrom in enumerate(segments.chromosome.drop_duplicates())
if str(i + 1) != chrom
)
return mapping
|
4,329 |
on message received
|
"""
This module works with CAN data in ASCII log files (*.log).
It is is compatible with "candump -L" from the canutils program
(https://github.com/linux-can/can-utils).
"""
import logging
from typing import Any, Generator, TextIO, Union
from can.message import Message
from ..typechecking import StringPathLike
from .generic import TextIOMessageReader, TextIOMessageWriter
log = logging.getLogger("can.io.canutils")
CAN_MSG_EXT = 0x80000000
CAN_ERR_FLAG = 0x20000000
CAN_ERR_BUSERROR = 0x00000080
CAN_ERR_DLC = 8
CANFD_BRS = 0x01
CANFD_ESI = 0x02
class CanutilsLogReader(TextIOMessageReader):
"""
Iterator over CAN messages from a .log Logging File (candump -L).
.. note::
.log-format looks for example like this:
``(0.0) vcan0 001#8d00100100820100``
"""
file: TextIO
def __init__(
self,
file: Union[StringPathLike, TextIO],
**kwargs: Any,
) -> None:
"""
:param file: a path-like object or as file-like object to read from
If this is a file-like object, is has to opened in text
read mode, not binary read mode.
"""
super().__init__(file, mode="r")
def __iter__(self) -> Generator[Message, None, None]:
for line in self.file:
# skip empty lines
temp = line.strip()
if not temp:
continue
channel_string: str
if temp[-2:].lower() in (" r", " t"):
timestamp_string, channel_string, frame, is_rx_string = temp.split()
is_rx = is_rx_string.strip().lower() == "r"
else:
timestamp_string, channel_string, frame = temp.split()
is_rx = True
timestamp = float(timestamp_string[1:-1])
can_id_string, data = frame.split("#", maxsplit=1)
channel: Union[int, str]
if channel_string.isdigit():
channel = int(channel_string)
else:
channel = channel_string
is_extended = len(can_id_string) > 3
can_id = int(can_id_string, 16)
is_fd = False
brs = False
esi = False
if data and data[0] == "#":
is_fd = True
fd_flags = int(data[1])
brs = bool(fd_flags & CANFD_BRS)
esi = bool(fd_flags & CANFD_ESI)
data = data[2:]
if data and data[0].lower() == "r":
is_remote_frame = True
if len(data) > 1:
dlc = int(data[1:])
else:
dlc = 0
data_bin = None
else:
is_remote_frame = False
dlc = len(data) // 2
data_bin = bytearray()
for i in range(0, len(data), 2):
data_bin.append(int(data[i : (i + 2)], 16))
if can_id & CAN_ERR_FLAG and can_id & CAN_ERR_BUSERROR:
msg = Message(timestamp=timestamp, is_error_frame=True)
else:
msg = Message(
timestamp=timestamp,
arbitration_id=can_id & 0x1FFFFFFF,
is_extended_id=is_extended,
is_remote_frame=is_remote_frame,
is_fd=is_fd,
is_rx=is_rx,
bitrate_switch=brs,
error_state_indicator=esi,
dlc=dlc,
data=data_bin,
channel=channel,
)
yield msg
self.stop()
class CanutilsLogWriter(TextIOMessageWriter):
"""Logs CAN data to an ASCII log file (.log).
This class is is compatible with "candump -L".
If a message has a timestamp smaller than the previous one (or 0 or None),
it gets assigned the timestamp that was written for the last message.
It the first message does not have a timestamp, it is set to zero.
"""
def __init__(
self,
file: Union[StringPathLike, TextIO],
channel: str = "vcan0",
append: bool = False,
**kwargs: Any,
):
"""
:param file: a path-like object or as file-like object to write to
If this is a file-like object, is has to opened in text
write mode, not binary write mode.
:param channel: a default channel to use when the message does not
have a channel set
:param bool append: if set to `True` messages are appended to
the file, else the file is truncated
"""
mode = "a" if append else "w"
super().__init__(file, mode=mode)
self.channel = channel
self.last_timestamp = None
def METHOD_NAME(self, msg):
# this is the case for the very first message:
if self.last_timestamp is None:
self.last_timestamp = msg.timestamp or 0.0
# figure out the correct timestamp
if msg.timestamp is None or msg.timestamp < self.last_timestamp:
timestamp = self.last_timestamp
else:
timestamp = msg.timestamp
channel = msg.channel if msg.channel is not None else self.channel
if isinstance(channel, int) or isinstance(channel, str) and channel.isdigit():
channel = f"can{channel}"
framestr = f"({timestamp:f}) {channel}"
if msg.is_error_frame:
framestr += f" {CAN_ERR_FLAG | CAN_ERR_BUSERROR:08X}#"
elif msg.is_extended_id:
framestr += f" {msg.arbitration_id:08X}#"
else:
framestr += f" {msg.arbitration_id:03X}#"
if msg.is_error_frame:
eol = "\n"
else:
eol = " R\n" if msg.is_rx else " T\n"
if msg.is_remote_frame:
framestr += f"R{eol}"
else:
if msg.is_fd:
fd_flags = 0
if msg.bitrate_switch:
fd_flags |= CANFD_BRS
if msg.error_state_indicator:
fd_flags |= CANFD_ESI
framestr += f"#{fd_flags:X}"
framestr += f"{msg.data.hex().upper()}{eol}"
self.file.write(framestr)
|
4,330 |
get ref url
|
# -*- coding: utf-8 -*-
from sqlalchemy.orm import Session
import tempfile
import transaction
from wsgidav import util
from wsgidav.dav_error import DAVError
from wsgidav.dav_error import HTTP_FORBIDDEN
from tracim_backend.app_models.contents import content_type_list
from tracim_backend.exceptions import TracimException
from tracim_backend.lib.core.content import ContentApi
from tracim_backend.models.data import ActionDescription
from tracim_backend.models.data import Content
from tracim_backend.models.data import Workspace
from tracim_backend.models.revision_protection import new_revision
class HistoryType(object):
Deleted = "deleted"
Archived = "archived"
Standard = "standard"
All = "all"
class SpecialFolderExtension(object):
Deleted = "/.deleted"
Archived = "/.archived"
History = "/.history"
class FakeFileStream(object):
"""
Fake a FileStream that we're giving to wsgidav to receive data and create files / new revisions
There's two scenarios :
- when a new file is created, wsgidav will call the method createEmptyResource and except to get a _DAVResource
which should have both 'beginWrite' and 'endWrite' method implemented
- when a file which already exists is updated, he's going to call the 'beginWrite' function of the _DAVResource
to get a filestream and write content in it
In the first case scenario, the transfer takes two part : it first create the resource (createEmptyResource)
then add its content (beginWrite, write, close..). If we went without this class, we would create two revision
of the file upon creating a new file, which is not what we want.
"""
def __init__(
self,
session: Session,
content_api: ContentApi,
workspace: Workspace,
path: str,
file_name: str = "",
content: Content = None,
parent: Content = None,
):
"""
:param content_api:
:param workspace:
:param path:
:param file_name:
:param content:
:param parent:
"""
# TODO - G.M - 2019-06-13 - use true streaming mechanism,
# instead of a true streaming mechanism we use
# a temporary file to avoid big file in memory without needing to refactor all
# upload mechanism of WebDAV
# see https://github.com/tracim/tracim/issues/1911
self.temp_file = tempfile.NamedTemporaryFile(suffix="tracim_webdav_upload_")
self._session = session
self._file_name = file_name if file_name != "" else self._content.file_name
self._content = content
self._api = content_api
self._workspace = workspace
self._parent = parent
self._path = path
def METHOD_NAME(self) -> str:
"""
As wsgidav expect to receive a _DAVResource upon creating a new resource, this method's result is used
by Windows client to establish both file's path and file's name
"""
return self._path
def beginWrite(self, contentType) -> "FakeFileStream":
"""
Called by wsgidav, it expect a filestream which possess both 'write' and 'close' operation to write
the file content.
"""
return self
def endWrite(self, withErrors: bool):
"""
Called by request_server when finished writing everything.
As we call operation to create new content or revision in the close operation, called before endWrite, there
is nothing to do here.
"""
pass
def write(self, s: str):
"""
Called by request_server when writing content to files, we put it inside a filestream
"""
self.temp_file.write(s)
def close(self):
"""
Called by request_server when the file content has been written. We either add a new content or create
a new revision
"""
self.temp_file.seek(0)
if self._content is None:
self.create_file()
else:
self.update_file()
transaction.commit()
self.temp_file.close()
def create_file(self):
"""
Called when this is a new file; will create a new Content initialized with the correct content
"""
is_temporary = self._file_name.startswith(".~") or self._file_name.startswith("~")
try:
with self._session.no_autoflush:
file = self._api.create(
filename=self._file_name,
content_type_slug=content_type_list.File.slug,
workspace=self._workspace,
parent=self._parent,
is_temporary=is_temporary,
do_save=False,
)
self._api.update_file_data(
file,
self._file_name,
util.guessMimeType(self._file_name),
self.temp_file,
)
except TracimException as exc:
raise DAVError(HTTP_FORBIDDEN) from exc
self._api.save(file, ActionDescription.CREATION)
def update_file(self):
"""
Called when we're updating an existing content; we create a new revision and update the file content
"""
try:
with new_revision(session=self._session, content=self._content, tm=transaction.manager):
self._api.update_file_data(
self._content,
self._file_name,
util.guessMimeType(self._content.file_name),
self.temp_file,
)
except TracimException as exc:
raise DAVError(HTTP_FORBIDDEN) from exc
self._api.save(self._content, ActionDescription.REVISION)
def supportEtag(self):
return False
|
4,331 |
find caller
|
# Copyright 2019 ICON Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
from logging import DEBUG, INFO, WARNING, ERROR, currentframe
from .utils import LoggerUtil
default_logger = logging.Logger("python")
# This code is mainly copied from the python logging module, with minor modifications
# _srcfile is used when walking the stack to check when we've got the first
# caller stack frame.
#
if hasattr(sys, 'frozen'): #support for py2exe
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
elif __file__[-4:].lower() in ['.pyc', '.pyo']:
_srcfile = __file__[:-4] + '.py'
else:
_srcfile = __file__
_srcfile = os.path.normcase(_srcfile)
class Logger(object):
@classmethod
def load_config(cls, config: dict, handler=None):
LoggerUtil.apply_config(default_logger, config, handler)
@classmethod
def print_config(cls, config: dict):
LoggerUtil.print_config(default_logger, config)
@classmethod
def isDebugEnabled(cls) -> bool:
return default_logger.isEnabledFor(DEBUG)
@classmethod
def debug(cls, msg: str, tag: str = "DEBUG"):
if default_logger.isEnabledFor(DEBUG):
cls._log(DEBUG, LoggerUtil.make_log_msg(tag, msg))
@classmethod
def info(cls, msg: str, tag: str = "INFO"):
if default_logger.isEnabledFor(INFO):
cls._log(INFO, LoggerUtil.make_log_msg(tag, msg))
@classmethod
def warning(cls, msg: str, tag: str = "WARN"):
if default_logger.isEnabledFor(WARNING):
# redirect warning to info
cls.info(msg, tag)
@classmethod
def error(cls, msg: str, tag: str = "ERROR"):
if default_logger.isEnabledFor(ERROR):
# redirect error to info
cls.info(msg, tag)
@classmethod
def exception(cls, msg: str, tag: str = "LOG"):
if default_logger.isEnabledFor(DEBUG):
cls._log(INFO, LoggerUtil.make_log_msg(tag, msg), exc_info=True)
elif default_logger.isEnabledFor(INFO):
cls._log(INFO, LoggerUtil.make_log_msg(tag, msg))
@classmethod
def _log(cls, level, msg, args=None, exc_info=None, extra=None):
"""
Low-level logging routine which creates a LogRecord and then calls
all the handlers of this logger to handle the record.
"""
# Add wrapping functionality here.
if _srcfile:
# IronPython doesn't track Python frames, so findCaller throws an
# exception on some versions of IronPython. We trap it here so that
# IronPython can use logging.
try:
fn, lno, func = cls.METHOD_NAME()
except ValueError:
fn, lno, func = "(unknown file)", 0, "(unknown function)"
else:
fn, lno, func = "(unknown file)", 0, "(unknown function)"
if exc_info:
if not isinstance(exc_info, tuple):
exc_info = sys.exc_info()
record = default_logger.makeRecord(
default_logger.name, level, fn, lno, msg, args, exc_info, func, extra)
default_logger.handle(record)
@classmethod
def METHOD_NAME(cls):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
f = currentframe()
# On some versions of IronPython, currentframe() returns None if
# IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)"
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
rv = (co.co_filename, f.f_lineno, co.co_name)
break
return rv
class SystemLogger(Logger):
@classmethod
def exception(cls, msg: str, tag: str = "LOG"):
if default_logger.isEnabledFor(WARNING):
cls._log(WARNING, LoggerUtil.make_log_msg(tag, msg), exc_info=True)
|
4,332 |
get duplicated values
|
import binascii
import os
import secrets
from dataclasses import dataclass
from typing import List, Literal, Optional, Tuple, Type, Union, overload
import graphene
from django.core.exceptions import ValidationError
from graphene import ObjectType
from graphql.error import GraphQLError
from ....plugins.const import APP_ID_PREFIX
from ....thumbnail import FILE_NAME_MAX_LENGTH
from ....webhook.event_types import WebhookEventAsyncType
from ..validators import validate_if_int_or_uuid
def snake_to_camel_case(name):
"""Convert snake_case variable name to camelCase."""
if isinstance(name, str):
split_name = name.split("_")
return split_name[0] + "".join(map(str.capitalize, split_name[1:]))
return name
def str_to_enum(name):
"""Create an enum value from a string."""
return name.replace(" ", "_").replace("-", "_").upper()
def get_duplicates_items(first_list, second_list):
"""Return items that appear on both provided lists."""
if first_list and second_list:
return set(first_list) & set(second_list)
return []
def METHOD_NAME(values):
"""Return set of duplicated values."""
if values:
return {value for value in values if values.count(value) > 1}
return {}
@overload
def from_global_id_or_error(
global_id: str,
only_type: Union[ObjectType, str, None] = None,
raise_error: Literal[True] = True,
) -> Tuple[str, str]:
...
@overload
def from_global_id_or_error(
global_id: str,
only_type: Union[Type[ObjectType], str, None] = None,
raise_error: bool = False,
) -> Union[Tuple[str, str], Tuple[str, None]]:
...
def from_global_id_or_error(
global_id: str,
only_type: Union[Type[ObjectType], str, None] = None,
raise_error: bool = False,
):
"""Resolve global ID or raise GraphQLError.
Validates if given ID is a proper ID handled by Saleor.
Valid IDs formats, base64 encoded:
'app:<int>:<str>' : External app ID with 'app' prefix
'<type>:<int>' : Internal ID containing object type and ID as integer
'<type>:<UUID>' : Internal ID containing object type and UUID
Optionally validate the object type, if `only_type` is provided,
raise GraphQLError when `raise_error` is set to True.
Returns tuple: (type, id).
"""
try:
type_, id_ = graphene.Node.from_global_id(global_id)
except (binascii.Error, UnicodeDecodeError, ValueError):
raise GraphQLError(f"Couldn't resolve id: {global_id}.")
if type_ == APP_ID_PREFIX:
id_ = global_id
else:
if not validate_if_int_or_uuid(id_):
raise GraphQLError(f"Error occurred during ID - {global_id} validation.")
if only_type and str(type_) != str(only_type):
if not raise_error:
return type_, None
raise GraphQLError(f"Must receive a {only_type} id.")
return type_, id_
def from_global_id_or_none(
global_id, only_type: Union[ObjectType, str, None] = None, raise_error: bool = False
):
if not global_id:
return None
return from_global_id_or_error(global_id, only_type, raise_error)[1]
def to_global_id_or_none(instance):
class_name = instance.__class__.__name__
if instance is None or instance.pk is None:
return None
return graphene.Node.to_global_id(class_name, instance.pk)
def add_hash_to_file_name(file):
"""Add unique text fragment to the file name to prevent file overriding."""
file_name, format = os.path.splitext(file._name)
file_name = file_name[:FILE_NAME_MAX_LENGTH]
hash = secrets.token_hex(nbytes=4)
new_name = f"{file_name}_{hash}{format}"
file._name = new_name
def raise_validation_error(field=None, message=None, code=None):
raise ValidationError({field: ValidationError(message, code=code)})
def ext_ref_to_global_id_or_error(model, external_reference):
"""Convert external reference to global id."""
internal_id = (
model.objects.filter(external_reference=external_reference)
.values_list("id", flat=True)
.first()
)
if internal_id:
return graphene.Node.to_global_id(model.__name__, internal_id)
else:
raise_validation_error(
field="externalReference",
message=f"Couldn't resolve to a node: {external_reference}",
code="not_found",
)
@dataclass
class WebhookEventInfo:
type: str
description: Optional[str] = None
CHECKOUT_CALCULATE_TAXES_MESSAGE = (
"Optionally triggered when checkout prices are expired."
)
def message_webhook_events(webhook_events: List[WebhookEventInfo]) -> str:
description = "\n\nTriggers the following webhook events:"
for event in webhook_events:
webhook_type = "async" if event.type in WebhookEventAsyncType.ALL else "sync"
description += f"\n- {event.type.upper()} ({webhook_type})"
if event.description:
description += f": {event.description}"
return description
|
4,333 |
prepare
|
# Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for ingesting mlmodel services
"""
from abc import ABC, abstractmethod
from typing import Any, Iterable, List, Optional, Set
from metadata.generated.schema.api.data.createMlModel import CreateMlModelRequest
from metadata.generated.schema.entity.data.mlmodel import (
MlFeature,
MlHyperParameter,
MlModel,
MlStore,
)
from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import (
OpenMetadataConnection,
)
from metadata.generated.schema.entity.services.mlmodelService import (
MlModelConnection,
MlModelService,
)
from metadata.generated.schema.metadataIngestion.mlmodelServiceMetadataPipeline import (
MlModelServiceMetadataPipeline,
)
from metadata.generated.schema.metadataIngestion.workflow import (
Source as WorkflowSource,
)
from metadata.ingestion.api.delete import delete_entity_from_source
from metadata.ingestion.api.models import Either
from metadata.ingestion.api.steps import Source
from metadata.ingestion.api.topology_runner import TopologyRunnerMixin
from metadata.ingestion.models.delete_entity import DeleteEntity
from metadata.ingestion.models.topology import (
NodeStage,
ServiceTopology,
TopologyNode,
create_source_context,
)
from metadata.ingestion.ometa.ometa_api import OpenMetadata
from metadata.ingestion.source.connections import get_connection, get_test_connection_fn
from metadata.utils import fqn
from metadata.utils.logger import ingestion_logger
logger = ingestion_logger()
class MlModelServiceTopology(ServiceTopology):
"""
Defines the hierarchy in MlModel Services.
service -> MlModel
We could have a topology validator. We can only consume
data that has been produced by any parent node.
"""
root = TopologyNode(
producer="get_services",
stages=[
NodeStage(
type_=MlModelService,
context="mlmodel_service",
processor="yield_create_request_mlmodel_service",
overwrite=False,
must_return=True,
),
],
children=["mlmodel"],
post_process=["mark_mlmodels_as_deleted"],
)
mlmodel = TopologyNode(
producer="get_mlmodels",
stages=[
NodeStage(
type_=MlModel,
context="mlmodels",
processor="yield_mlmodel",
consumer=["mlmodel_service"],
),
],
)
class MlModelServiceSource(TopologyRunnerMixin, Source, ABC):
"""
Base class for MlModel services.
It implements the topology and context
"""
source_config: MlModelServiceMetadataPipeline
config: WorkflowSource
# Big union of types we want to fetch dynamically
service_connection: MlModelConnection.__fields__["config"].type_
topology = MlModelServiceTopology()
context = create_source_context(topology)
mlmodel_source_state: Set = set()
def __init__(
self,
config: WorkflowSource,
metadata_config: OpenMetadataConnection,
):
super().__init__()
self.config = config
self.metadata_config = metadata_config
self.metadata = OpenMetadata(metadata_config)
self.service_connection = self.config.serviceConnection.__root__.config
self.source_config: MlModelServiceMetadataPipeline = (
self.config.sourceConfig.config
)
self.connection = get_connection(self.service_connection)
# Flag the connection for the test connection
self.connection_obj = self.connection
self.test_connection()
self.client = self.connection
def get_services(self) -> Iterable[WorkflowSource]:
yield self.config
def yield_create_request_mlmodel_service(self, config: WorkflowSource):
yield Either(
right=self.metadata.get_create_service_from_source(
entity=MlModelService, config=config
)
)
@abstractmethod
def get_mlmodels(self, *args, **kwargs) -> Iterable[Any]:
"""
Method to list all models to process.
Here is where filtering happens
"""
@abstractmethod
def yield_mlmodel(self, *args, **kwargs) -> Iterable[Either[CreateMlModelRequest]]:
"""Method to return MlModel Entities"""
@abstractmethod
def _get_hyper_params(self, *args, **kwargs) -> Optional[List[MlHyperParameter]]:
"""Get the Hyper Parameters from the MlModel"""
@abstractmethod
def _get_ml_store(self, *args, **kwargs) -> Optional[MlStore]:
"""Get the Ml Store from the model version object"""
@abstractmethod
def _get_ml_features(self, *args, **kwargs) -> Optional[List[MlFeature]]:
"""Pick up features"""
@abstractmethod
def _get_algorithm(self, *args, **kwargs) -> str:
"""Return the algorithm for a given model"""
def close(self):
"""By default, nothing to close"""
def test_connection(self) -> None:
test_connection_fn = get_test_connection_fn(self.service_connection)
test_connection_fn(self.metadata, self.connection_obj, self.service_connection)
def mark_mlmodels_as_deleted(self) -> Iterable[Either[DeleteEntity]]:
"""Method to mark the mlmodels as deleted"""
if self.source_config.markDeletedMlModels:
yield from delete_entity_from_source(
metadata=self.metadata,
entity_type=MlModel,
entity_source_state=self.mlmodel_source_state,
mark_deleted_entity=self.source_config.markDeletedMlModels,
params={
"service": self.context.mlmodel_service.fullyQualifiedName.__root__
},
)
def register_record(self, mlmodel_request: CreateMlModelRequest) -> None:
"""
Mark the mlmodel record as scanned and update
the mlmodel_source_state
"""
mlmodel_fqn = fqn.build(
self.metadata,
entity_type=MlModel,
service_name=mlmodel_request.service.__root__,
mlmodel_name=mlmodel_request.name.__root__,
)
self.mlmodel_source_state.add(mlmodel_fqn)
def METHOD_NAME(self):
"""By default, nothing to prepare"""
|
4,334 |
work on bugs task
|
# Copyright © 2019 Red Hat, Inc. and others.
#
# This file is part of Bodhi.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Asynchronous tasks for Bodhi."""
import logging
import sys
import typing
import celery
from bodhi.server import bugs, buildsys, initialize_db
from bodhi.server.config import config
from bodhi.server.exceptions import ExternalCallException
from bodhi.server.util import pyfile_to_module
# Workaround https://github.com/celery/celery/issues/5416
if celery.version_info < (4, 3) and sys.version_info >= (3, 7): # pragma: no cover
from re import Pattern
from celery.app.routes import re as routes_re
routes_re._pattern_type = Pattern
log = logging.getLogger('bodhi')
# The Celery app object.
app = celery.Celery()
app.config_from_object(pyfile_to_module(config["celery_config"], "celeryconfig"))
def _do_init():
config.load_config()
initialize_db(config)
buildsys.setup_buildsystem(config)
bugs.set_bugtracker()
@app.task(name="compose", ignore_result=True)
def compose(api_version: int, **kwargs):
"""Trigger the compose.
All arguments besides the ``api_version`` will be transmitted to the task handler.
Args:
api_version: Version of the task API. Change it if the handling of the
arguments have changed in the task handler.
"""
# Import here to avoid an import loop.
# The compose task is routed independently in the configuration, therefore
# the task will not be attempted on a host that does not have the composer
# installed.
from bodhi.server.tasks.composer import ComposerHandler
log.info("Received a compose order")
_do_init()
composer = ComposerHandler()
composer.run(api_version=api_version, data=kwargs)
@app.task(name="handle_update", ignore_result=True)
def handle_update(api_version: int, **kwargs):
"""Trigger the Updates handler.
All arguments besides the ``api_version`` will be transmitted to the task handler.
Args:
api_version: Version of the task API. Change it if the handling of the
arguments have changed in the task handler.
"""
from .updates import UpdatesHandler # Avoid an import loop
log.info("Received an update handling order")
_do_init()
handler = UpdatesHandler()
handler.run(api_version=api_version, data=kwargs)
@app.task(name="approve_testing")
def approve_testing_task(**kwargs):
"""Trigger the approve testing job. This is a periodic task."""
from .approve_testing import main
log.info("Received an approve testing order")
_do_init()
main()
@app.task(name="check_policies")
def check_policies_task(**kwargs):
"""Trigger the check policies job. This is a periodic task."""
from .check_policies import main
log.info("Received a check policies order")
_do_init()
main()
@app.task(name="check_signed_builds")
def check_signed_builds_task(**kwargs):
"""Trigger the check signed builds job. This is a periodic task."""
from .check_signed_builds import main
log.info("Received a check signed builds order")
_do_init()
main()
@app.task(name="clean_old_composes")
def clean_old_composes_task(num_to_keep: int, **kwargs):
"""Trigger the clean old composes job. This is a periodic task."""
from .clean_old_composes import main
log.info("Received a clean old composes order")
_do_init()
main(num_to_keep)
@app.task(name="expire_overrides")
def expire_overrides_task(**kwargs):
"""Trigger the expire overrides job. This is a periodic task."""
from .expire_overrides import main
log.info("Received a expire overrides order")
_do_init()
main()
@app.task(name="handle_side_and_related_tags", ignore_result=True)
def handle_side_and_related_tags_task(
builds: typing.List[str],
pending_signing_tag: str,
from_tag: str,
pending_testing_tag: typing.Optional[str] = None,
candidate_tag: typing.Optional[str] = None):
"""Handle side-tags and related tags for updates in Koji."""
from .handle_side_and_related_tags import main
log.info("Received an order for handling update tags")
_do_init()
main(builds, pending_signing_tag, from_tag, pending_testing_tag, candidate_tag)
@app.task(name="tag_update_builds", ignore_result=True)
def tag_update_builds_task(tag: str, builds: typing.List[str]):
"""Handle tagging builds for an update in Koji."""
from .tag_update_builds import main
log.info("Received an order to tag builds for an update")
_do_init()
main(tag, builds)
@app.task(name="bodhi.server.tasks.work_on_bugs", autoretry_for=(ExternalCallException,),
retry_kwargs={'max_retries': 5}, retry_backoff=True)
def METHOD_NAME(update: str, bugs: typing.List[int]):
"""Iterate the list of bugs, retrieving information from Bugzilla and modifying them."""
from .work_on_bugs import main
log.info("Received an order to fetch bugs and update their details")
_do_init()
main(update, bugs)
@app.task(name="bodhi.server.tasks.fetch_test_cases", autoretry_for=(ExternalCallException,),
retry_kwargs={'max_retries': 5}, retry_backoff=True)
def fetch_test_cases_task(update: str):
"""Query the wiki for test cases for each package on the given update."""
from .fetch_test_cases import main
log.info("Received an order to fetch test cases")
_do_init()
main(update)
|
4,335 |
suite
|
import os
import yaml
from unittest.mock import patch
from tempfile import mkdtemp
from shutil import rmtree
import unittest
from beets import ui
from beets import config
from test.helper import TestHelper
from beets.library import Library
class ConfigCommandTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = Library(':memory:')
self.temp_dir = mkdtemp()
if 'EDITOR' in os.environ:
del os.environ['EDITOR']
os.environ['BEETSDIR'] = self.temp_dir
self.config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(self.config_path, 'w') as file:
file.write('library: lib\n')
file.write('option: value\n')
file.write('password: password_value')
self.cli_config_path = os.path.join(self.temp_dir, 'cli_config.yaml')
with open(self.cli_config_path, 'w') as file:
file.write('option: cli overwrite')
config.clear()
config['password'].redact = True
config._materialized = False
def tearDown(self):
rmtree(self.temp_dir)
def _run_with_yaml_output(self, *args):
output = self.run_with_output(*args)
return yaml.safe_load(output)
def test_show_user_config(self):
output = self._run_with_yaml_output('config', '-c')
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'password_value')
def test_show_user_config_with_defaults(self):
output = self._run_with_yaml_output('config', '-dc')
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'password_value')
self.assertEqual(output['library'], 'lib')
self.assertEqual(output['import']['timid'], False)
def test_show_user_config_with_cli(self):
output = self._run_with_yaml_output('--config', self.cli_config_path,
'config')
self.assertEqual(output['library'], 'lib')
self.assertEqual(output['option'], 'cli overwrite')
def test_show_redacted_user_config(self):
output = self._run_with_yaml_output('config')
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'REDACTED')
def test_show_redacted_user_config_with_defaults(self):
output = self._run_with_yaml_output('config', '-d')
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'REDACTED')
self.assertEqual(output['import']['timid'], False)
def test_config_paths(self):
output = self.run_with_output('config', '-p')
paths = output.split('\n')
self.assertEqual(len(paths), 2)
self.assertEqual(paths[0], self.config_path)
def test_config_paths_with_cli(self):
output = self.run_with_output('--config', self.cli_config_path,
'config', '-p')
paths = output.split('\n')
self.assertEqual(len(paths), 3)
self.assertEqual(paths[0], self.cli_config_path)
def test_edit_config_with_editor_env(self):
os.environ['EDITOR'] = 'myeditor'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'myeditor', 'myeditor', self.config_path)
def test_edit_config_with_automatic_open(self):
with patch('beets.util.open_anything') as open:
open.return_value = 'please_open'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'please_open', 'please_open', self.config_path)
def test_config_editor_not_found(self):
with self.assertRaises(ui.UserError) as user_error:
with patch('os.execlp') as execlp:
execlp.side_effect = OSError('here is problem')
self.run_command('config', '-e')
self.assertIn('Could not edit configuration',
str(user_error.exception))
self.assertIn('here is problem', str(user_error.exception))
def test_edit_invalid_config_file(self):
with open(self.config_path, 'w') as file:
file.write('invalid: [')
config.clear()
config._materialized = False
os.environ['EDITOR'] = 'myeditor'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'myeditor', 'myeditor', self.config_path)
def METHOD_NAME():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
4,336 |
get results
|
# This file is part of the MapProxy project.
# Copyright (C) 2011 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MAX_MAP_ASYNC_THREADS = 20
try:
import Queue
except ImportError:
import queue as Queue
import sys
import threading
from mapproxy.config import base_config
from mapproxy.config import local_base_config
from mapproxy.compat import PY2
import logging
log_system = logging.getLogger('mapproxy.system')
class AsyncResult(object):
def __init__(self, result=None, exception=None):
self.result = result
self.exception = exception
def __repr__(self):
return "<AsyncResult result='%s' exception='%s'>" % (
self.result, self.exception)
def _result_iter(results, use_result_objects=False):
for result in results:
if use_result_objects:
exception = None
if (isinstance(result, tuple) and len(result) == 3 and
isinstance(result[1], Exception)):
exception = result
result = None
yield AsyncResult(result, exception)
else:
yield result
class ThreadWorker(threading.Thread):
def __init__(self, task_queue, result_queue):
threading.Thread.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
self.base_config = base_config()
def run(self):
with local_base_config(self.base_config):
while True:
task = self.task_queue.get()
if task is None:
self.task_queue.task_done()
break
exec_id, func, args = task
try:
result = func(*args)
except Exception:
result = sys.exc_info()
self.result_queue.put((exec_id, result))
self.task_queue.task_done()
def _consume_queue(queue):
"""
Get all items from queue.
"""
while not queue.empty():
try:
queue.get(block=False)
queue.task_done()
except Queue.Empty:
pass
class ThreadPool(object):
def __init__(self, size=4):
self.pool_size = size
self.task_queue = Queue.Queue()
self.result_queue = Queue.Queue()
self.pool = None
def map_each(self, func_args, raise_exceptions):
"""
args should be a list of function arg tuples.
map_each calls each function with the given arg.
"""
if self.pool_size < 2:
for func, arg in func_args:
try:
yield func(*arg)
except Exception:
yield sys.exc_info()
return
self.pool = self._init_pool()
i = 0
for i, (func, arg) in enumerate(func_args):
self.task_queue.put((i, func, arg))
results = {}
next_result = 0
for value in self.METHOD_NAME(next_result, results, raise_exceptions):
yield value
next_result += 1
self.task_queue.join()
for value in self.METHOD_NAME(next_result, results, raise_exceptions):
yield value
next_result += 1
self.shutdown()
def _single_call(self, func, args, use_result_objects):
try:
result = func(*args)
except Exception:
if not use_result_objects:
raise
result = sys.exc_info()
return _result_iter([result], use_result_objects)
def map(self, func, *args, **kw):
return list(self.imap(func, *args, **kw))
def imap(self, func, *args, **kw):
use_result_objects = kw.get('use_result_objects', False)
if len(args[0]) == 1:
return self._single_call(func, next(iter(zip(*args))), use_result_objects)
return _result_iter(self.map_each([(func, arg) for arg in zip(*args)], raise_exceptions=not use_result_objects),
use_result_objects)
def starmap(self, func, args, **kw):
use_result_objects = kw.get('use_result_objects', False)
if len(args[0]) == 1:
return self._single_call(func, args[0], use_result_objects)
return _result_iter(self.map_each([(func, arg) for arg in args], raise_exceptions=not use_result_objects),
use_result_objects)
def starcall(self, args, **kw):
def call(func, *args):
return func(*args)
return self.starmap(call, args, **kw)
def METHOD_NAME(self, next_result, results, raise_exceptions):
for i, value in self._fetch_results(raise_exceptions):
if i == next_result:
yield value
next_result += 1
while next_result in results:
yield results.pop(next_result)
next_result += 1
else:
results[i] = value
def _fetch_results(self, raise_exceptions):
while not self.task_queue.empty() or not self.result_queue.empty():
task_result = self.result_queue.get()
if (raise_exceptions and isinstance(task_result[1], tuple) and
len(task_result[1]) == 3 and
isinstance(task_result[1][1], Exception)):
self.shutdown(force=True)
exc_class, exc, tb = task_result[1]
if PY2:
exec('raise exc_class, exc, tb')
else:
raise exc.with_traceback(tb)
yield task_result
def shutdown(self, force=False):
"""
Send shutdown sentinel to all executor threads. If `force` is True,
clean task_queue and result_queue.
"""
if force:
_consume_queue(self.task_queue)
_consume_queue(self.result_queue)
for _ in range(self.pool_size):
self.task_queue.put(None)
def _init_pool(self):
if self.pool_size < 2:
return []
pool = []
for _ in range(self.pool_size):
t = ThreadWorker(self.task_queue, self.result_queue)
t.daemon = True
t.start()
pool.append(t)
return pool
def imap(func, *args):
pool = ThreadPool(min(len(args[0]), MAX_MAP_ASYNC_THREADS))
return pool.imap(func, *args)
def starmap(func, args):
pool = ThreadPool(min(len(args[0]), MAX_MAP_ASYNC_THREADS))
return pool.starmap(func, args)
def starcall(args):
pool = ThreadPool(min(len(args[0]), MAX_MAP_ASYNC_THREADS))
return pool.starcall(args)
def run_non_blocking(func, args, kw={}):
return func(*args, **kw)
Pool = ThreadPool
|
4,337 |
print compartment info
|
#
# @file printMulti.cs
# @brief multi print example
# @author Frank Bergmann
#
# <!--------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright (C) 2009-2013 jointly by the following organizations:
# 1. California Institute of Technology, Pasadena, CA, USA
# 2. EMBL European Bioinformatics Institute (EMBL-EBI), Hinxton, UK
#
# Copyright (C) 2006-2008 by the California Institute of Technology,
# Pasadena, CA, USA
#
# Copyright (C) 2002-2005 jointly by the following organizations:
# 1. California Institute of Technology, Pasadena, CA, USA
# 2. Japan Science and Technology Agency, Japan
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# ------------------------------------------------------------------------ -->
#
import sys
from libsbml import *
def METHOD_NAME(compartment):
plugin = compartment.getPlugin("multi")
name = compartment.getName() if compartment.isSetName()else compartment.getId()
print("Compartment {0}: isType = {1}".format(name, plugin.getIsType()))
for i in range(plugin.getNumCompartmentReferences()):
current = plugin.getCompartmentReference(i)
print(" compartmentReference: {0}".format(current.getCompartment()))
print('')
def printSpeciesInfo(species):
plugin = species.getPlugin("multi")
name = species.getName() if species.isSetName() else species.getId()
print("Species {0}: speciesType = {1}".format(name, plugin.getSpeciesType()))
for i in range(plugin.getNumOutwardBindingSites()):
current = plugin.getOutwardBindingSite(i)
print(" outwardBindingSite bindingStatus={0} component={1}".format(
BindingStatus_toString(current.getBindingStatus()), current.getComponent()))
for i in range(plugin.getNumSpeciesFeatures()):
current = plugin.getSpeciesFeature(i)
print(" speciesFeature speciesFeatureType={0} occur={1}".format(current.getSpeciesFeatureType(), current.getOccur()))
for j in range(current.getNumSpeciesFeatureValues()):
element = current.getSpeciesFeatureValue(j)
print(" speciesFeatureValue value={0}".format(element.getValue()))
print('')
def printMultiMathReferences(node):
if node is None:
return
for i in range(node.getNumChildren()):
current = node.getChild(i)
printMultiMathReferences(current)
plugin = node.getPlugin("multi")
if plugin is None:
return
if plugin.isSetRepresentationType():
print(" math representationType={0}".format(plugin.getRepresentationType()))
if plugin.isSetSpeciesReference():
print(" math speciesReference={0}".format(plugin.getSpeciesReference()))
def printReactionInfo(reaction):
name = reaction.getName() if reaction.isSetName() else reaction.getId()
isIntraSpeciesReaction = isinstance(reaction, IntraSpeciesReaction)
print("Reaction {0}: isIntraSpeciesReaction={1}".format(name, isIntraSpeciesReaction))
for i in range(reaction.getNumReactants()):
current = reaction.getReactant(i)
plugin = current.getPlugin("multi")
if plugin is None:
continue
print(" reactant {0}: compartmentReference={1}".format(current.getSpecies(), plugin.getCompartmentReference()))
for j in range(plugin.getNumSpeciesTypeComponentMapInProducts()):
element = plugin.getSpeciesTypeComponentMapInProduct(j)
print(" speciesTypeComponentMapInProduct: reactant={0} reactantComponent={1} productComponent={2}".format(
element.getReactant(), element.getReactantComponent(), element.getProductComponent()))
print('')
for i in range(reaction.getNumProducts()):
current = reaction.getProduct(i)
plugin = current.getPlugin("multi")
if plugin is None:
continue
print(" product {0}: compartmentReference={1}".format(current.getSpecies(), plugin.getCompartmentReference()))
for j in range(plugin.getNumSpeciesTypeComponentMapInProducts()):
element = plugin.getSpeciesTypeComponentMapInProduct(j)
print(" speciesTypeComponentMapInProduct: reactant={0} reactantComponent={1} productComponent={2}".format(
element.getReactant(), element.getReactantComponent(), element.getProductComponent()))
print('')
if not reaction.isSetKineticLaw() or not reaction.getKineticLaw().isSetMath():
print('')
return
printMultiMathReferences(reaction.getKineticLaw().getMath())
print('')
def printModelInfo(model):
plugin = model.getPlugin("multi")
for i in range (plugin.getNumMultiSpeciesTypes()):
current = plugin.getMultiSpeciesType(i)
isBindingSiteSpeciesType = isinstance(current, BindingSiteSpeciesType)
print("speciesType id={0} name={1} compartment={2} isBindingSiteSpeciesType={3}".format(current.getId(), current.getName(), current.getCompartment(), isBindingSiteSpeciesType))
for j in range(current.getNumSpeciesTypeInstances()):
element = current.getSpeciesTypeInstance(j)
print(" speciesTypeInstance id={0} name={1} speciesType={2}".format(element.getId(), element.getName(), element.getSpeciesType()))
for j in range(current.getNumSpeciesTypeComponentIndexes()):
element = current.getSpeciesTypeComponentIndex(j)
print(" speciesTypeComponentIndex id={0} component={1}".format(element.getId(), element.getComponent()))
for j in range(current.getNumInSpeciesTypeBonds()):
element = current.getInSpeciesTypeBond(j)
print(" inSpeciesTypeBond bindingSite1={0} bindingSite2={1}".format(element.getBindingSite1(), element.getBindingSite2()))
print('')
print('')
def printMulti(fileName):
document = readSBMLFromFile(fileName)
if document.getNumErrors(LIBSBML_SEV_ERROR) > 0:
document.printErrors()
return
model = document.getModel()
# print multi model information
printModelInfo(model)
# print multi compartment information
for i in range(model.getNumCompartments()):
METHOD_NAME(model.getCompartment(i))
# print multi species information
for i in range(model.getNumSpecies()):
printSpeciesInfo(model.getSpecies(i))
# print multi reaction information
for i in range(model.getNumReactions()):
printReactionInfo(model.getReaction(i))
if __name__ == "__main__":
if len(sys.argv) < 2:
print("usage: printMulti sbml-file")
else:
printMulti(sys.argv[1])
|
4,338 |
test clean successful stage outs
|
#!/usr/bin/env python
"""
testing file manager
"""
from __future__ import print_function
import logging
import os.path
import shutil
import tempfile
import unittest
import WMCore.Storage.StageOutError
from WMCore.Storage.FileManager import StageInMgr, StageOutMgr, DeleteMgr
class FileManagerTest(unittest.TestCase):
def setUp(self):
self.testDir = None
def tearDown(self):
if (self.testDir != None):
try:
shutil.rmtree(self.testDir)
except Exception:
# meh, if it fails, I guess something weird happened
pass
def testStageFile(self):
pass
# def stageFile(self, fileToStage, stageOut = True):
def testDelete(self):
pass
# def deleteLFN(self, lfn):
def testInitialiseSiteConf(self):
pass
# def initialiseSiteConf(self):
def testInitialiseOverride(self):
# def initialiseOverride(self):
pass
def testGetTransferDetails(self):
pass
# def getTransferDetails(self, lfn, currentMethod):
def testStageIn(self):
pass
def testStageOut(self):
pass
# def stageIn(self,fileToStage):
# def stageOut(self,fileToStage):
def test_doTransfer(self):
pass
# def _doTransfer(self, currentMethod, methodCounter, lfn, pfn, stageOut):
def METHOD_NAME(self):
pass
# def cleanSuccessfulStageOuts(self):
def testSearchTFC(self):
pass
# def searchTFC(self, lfn):
def testStageOutMgrWrapperWin(self):
fileForTransfer = {'LFN': '/etc/hosts', \
'PFN': 'file:///etc/hosts', \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageOutMgr(**{
'command': 'test-win',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': ''})
wrapper(fileForTransfer)
def testStageOutMgrWrapperFail(self):
fileForTransfer = {'LFN': 'failtest', \
'PFN': 'failtest', \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageOutMgr(numberOfRetries=1,
retryPauseTime=0, **{
'command': 'test-fail',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': ''})
self.assertRaises(WMCore.Storage.StageOutError.StageOutError, wrapper.__call__, fileForTransfer)
def testStageOutMgrWrapperRealCopy(self):
self.testDir = tempfile.mkdtemp()
fileForTransfer = {'LFN': '/etc/hosts', \
'PFN': '/etc/hosts', \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageOutMgr(**{
'command': 'cp',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': self.testDir})
wrapper(fileForTransfer)
self.assertTrue(os.path.exists(os.path.join(self.testDir, '/etc/hosts')))
def testStageOutMgrWrapperRealCopyFallback(self):
self.testDir = tempfile.mkdtemp()
fileForTransfer = {'LFN': '/etc/hosts', \
'PFN': '/etc/hosts', \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageOutMgr(**{
'command': 'testFallbackToOldBackend',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': self.testDir})
wrapper(fileForTransfer)
self.assertTrue(os.path.exists(os.path.join(self.testDir, '/etc/hosts')))
def testStageInMgrWrapperWin(self):
fileForTransfer = {'LFN': '/etc/hosts', \
'PFN': '/etc/hosts', \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageInMgr(**{
'command': 'test-win',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': ''})
wrapper(fileForTransfer)
def testStageInMgrWrapperFail(self):
fileForTransfer = {'LFN': 'failtest', \
'PFN': 'failtest', \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageInMgr(numberOfRetries=1,
retryPauseTime=0, **{
'command': 'test-fail',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': ''})
self.assertRaises(WMCore.Storage.StageOutError.StageOutError, wrapper.__call__, fileForTransfer)
def testStageInMgrWrapperRealCopy(self):
self.testDir = tempfile.mkdtemp()
shutil.copy('/etc/hosts', self.testDir + '/INPUT')
fileForTransfer = {'LFN': '/INPUT', \
'PFN': '%s/etc/hosts' % self.testDir, \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageInMgr(**{
'command': 'cp',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': self.testDir})
wrapper(fileForTransfer)
def testStageInMgrWrapperRealCopyFallback(self):
self.testDir = tempfile.mkdtemp()
shutil.copy('/etc/hosts', self.testDir + '/INPUT')
fileForTransfer = {'LFN': '/INPUT', \
'PFN': '%s/etc/hosts' % self.testDir, \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageInMgr(**{
'command': 'testFallbackToOldBackend',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': self.testDir})
wrapper(fileForTransfer)
def testDeleteMgrWrapper(self):
self.testDir = tempfile.mkdtemp()
shutil.copy('/etc/hosts', self.testDir + '/INPUT')
fileForTransfer = {'LFN': '/INPUT', \
'PFN': '%s/etc/hosts' % self.testDir, \
'PNN': None, \
'StageOutCommand': None}
wrapper = StageInMgr(**{
'command': 'cp',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': self.testDir})
retval = wrapper(fileForTransfer)
print("got the retval %s" % retval)
wrapper = DeleteMgr(**{
'command': 'cp',
'option': '',
'phedex-node': 'test-win',
'lfn-prefix': self.testDir})
wrapper(retval)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main()
|
4,339 |
test protect unauthorized
|
from c2corg_api.models.document import Document, DocumentGeometry
from c2corg_api.models.waypoint import Waypoint, WaypointLocale
from c2corg_api.tests.views import BaseTestRest
from c2corg_api.views.document import DocumentRest
class BaseProtectTest(BaseTestRest):
def setUp(self): # noqa
super(BaseProtectTest, self).setUp()
contributor_id = self.global_userids['contributor']
self.waypoint = Waypoint(
waypoint_type='summit', elevation=2203)
self.locale = WaypointLocale(
lang='en', title='Mont Granier', description='...')
self.waypoint.locales.append(self.locale)
self.waypoint.geometry = DocumentGeometry(
geom='SRID=3857;POINT(635956 5723604)')
self.session.add(self.waypoint)
self.session.flush()
DocumentRest.create_new_version(self.waypoint, contributor_id)
self.waypoint2 = Waypoint(
protected=True,
waypoint_type='summit', elevation=2203)
self.locale2 = WaypointLocale(
lang='en', title='Mont Granier2', description='...')
self.waypoint2.locales.append(self.locale2)
self.waypoint2.geometry = DocumentGeometry(
geom='SRID=3857;POINT(635956 5723604)')
self.session.add(self.waypoint2)
self.session.flush()
DocumentRest.create_new_version(self.waypoint2, contributor_id)
self.session.flush()
def is_protected(self, document_id):
document = self.session.query(Document).get(document_id)
self.session.refresh(document)
return document.protected
class TestDocumentProtectRest(BaseProtectTest):
def setUp(self): # noqa
super(TestDocumentProtectRest, self).setUp()
self._prefix = '/documents/protect'
def METHOD_NAME(self):
self.app_post_json(self._prefix, {}, status=403)
headers = self.add_authorization_header(username='contributor')
self.app_post_json(self._prefix, {}, headers=headers, status=403)
def test_protect(self):
request_body = {
'document_id': self.waypoint.document_id
}
headers = self.add_authorization_header(username='moderator')
self.app_post_json(
self._prefix, request_body, status=200, headers=headers)
self.assertTrue(self.is_protected(self.waypoint.document_id))
def test_protect_already_protected_user(self):
""" Test that protecting an already protected document
does not raise an error.
"""
request_body = {
'document_id': self.waypoint2.document_id
}
headers = self.add_authorization_header(username='moderator')
self.app_post_json(
self._prefix, request_body, status=200, headers=headers)
self.assertTrue(self.is_protected(self.waypoint2.document_id))
def test_protected_invalid_document_id(self):
request_body = {
'document_id': -1
}
headers = self.add_authorization_header(username='moderator')
self.app_post_json(
self._prefix, request_body, status=400, headers=headers)
class TestDocumentUnprotectRest(BaseProtectTest):
def setUp(self): # noqa
super(TestDocumentUnprotectRest, self).setUp()
self._prefix = '/documents/unprotect'
def test_unprotect_unauthorized(self):
self.app_post_json(self._prefix, {}, status=403)
headers = self.add_authorization_header(username='contributor')
self.app_post_json(self._prefix, {}, headers=headers, status=403)
def test_unprotect(self):
request_body = {
'document_id': self.waypoint2.document_id
}
headers = self.add_authorization_header(username='moderator')
self.app_post_json(
self._prefix, request_body, status=200, headers=headers)
self.assertFalse(self.is_protected(self.waypoint2.document_id))
def test_unprotect_already_unprotected_user(self):
""" Test that unprotecting an already unprotected document
does not raise an error.
"""
request_body = {
'document_id': self.waypoint.document_id
}
headers = self.add_authorization_header(username='moderator')
self.app_post_json(
self._prefix, request_body, status=200, headers=headers)
self.assertFalse(self.is_protected(self.waypoint.document_id))
def test_protected_invalid_document_id(self):
request_body = {
'document_id': -1
}
headers = self.add_authorization_header(username='moderator')
self.app_post_json(
self._prefix, request_body, status=400, headers=headers)
|
4,340 |
test should get obj type name list
|
import unittest
import os
from unittest import mock
from unittest.mock import MagicMock
import aim.sdk.utils as utils
class TestUtils(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_should_search_aim_repo_not_found_back_slash(self):
# arrange
path = '/'
# act
path, found = utils.search_aim_repo(path)
# assert
self.assertFalse(found)
self.assertIsNone(path)
@mock.patch('os.path.exists')
def test_should_search_aim_repo_not_found_dot_back_slash(self, mock_os_path_exists: mock.MagicMock):
# arrange
path = '/'
built_dir = os.path.dirname(__file__)
build_dir_split = built_dir.split(os.sep)
mock_os_path_exists.side_effect = [False] * len(build_dir_split)
# act
path, found = utils.search_aim_repo(path)
# assert
self.assertFalse(found)
self.assertIsNone(path)
def test_should_generate_run_hash_default_len(self):
# arrange
expected_hash_len = 24
# act
actual_hash = utils.generate_run_hash()
# assert
self.assertIsInstance(actual_hash, str)
self.assertEqual(expected_hash_len, len(actual_hash))
def test_should_generate_run_hash_twelve_char_long(self):
# arrange
hash_length = 12
expected_hash_len = 12
# act
actual_hash = utils.generate_run_hash(hash_length)
# assert
self.assertIsInstance(actual_hash, str)
self.assertEqual(expected_hash_len, len(actual_hash))
@mock.patch('uuid.uuid4')
def test_should_generate_run_hash_six_char_long(self, mock_uuid: MagicMock):
# arrange
expected_uuid_hex = '16710b81-ccab-4409-bd79-50a770b565a6'
mock_uuid.return_value.hex = expected_uuid_hex
hash_length = 6
expected_hash_len = 6
# act
actual_hash = utils.generate_run_hash(hash_length)
# assert
self.assertIsInstance(actual_hash, str)
self.assertEqual(expected_hash_len, len(actual_hash))
self.assertEqual(expected_uuid_hex[:expected_hash_len], actual_hash)
mock_uuid.assert_called_once()
def test_should_get_obj_type_name_str(self):
# arrange
obj = 'hello aim!'
expected_type = 'str'
# act
actual_type = utils.get_object_typename(obj)
# assert
self.assertEqual(expected_type, actual_type)
def test_should_get_clean_repo_path(self):
# arrange
path = '/'
expected_repo_path = os.getcwd()
# act
actual_repo_path = utils.clean_repo_path(path)
# assert
self.assertEqual(expected_repo_path, actual_repo_path)
def test_should_get_clean_repo_path_empty_str(self):
# arrange
path = ''
expected_repo_path = ''
# act
actual_repo_path = utils.clean_repo_path(path)
# assert
self.assertEqual(expected_repo_path, actual_repo_path)
def METHOD_NAME(self):
# arrange
obj = [None]
expected_type = 'list(unknown)'
# act
actual_type = utils.get_object_typename(obj)
# assert
self.assertEqual(expected_type, actual_type)
def test_should_get_obj_type_name_bool(self):
# arrange
obj = True
expected_type = 'int'
# act
actual_type = utils.get_object_typename(obj)
# assert
self.assertEqual(expected_type, actual_type)
def test_should_check_types_compatibility_int(self):
# arrange
data_type = 'int'
base_data_type = 'int'
# act
actual_compatibility = utils.check_types_compatibility(data_type, base_data_type, None)
# assert
self.assertTrue(actual_compatibility)
def test_should_check_types_compatibility_base_list(self):
# arrange
data_type = 'list(str)'
base_data_type = 'list'
# act
actual_compatibility = utils.check_types_compatibility(data_type, base_data_type, None)
# assert
self.assertTrue(actual_compatibility)
def test_should_check_types_compatibility_base_list_str(self):
# arrange
data_type = 'list'
base_data_type = 'list(str)'
# act
actual_compatibility = utils.check_types_compatibility(data_type, base_data_type, None)
# assert
self.assertTrue(actual_compatibility)
|
4,341 |
test cuts basic
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desitarget.cmx.
"""
import unittest
import sys
from pkg_resources import resource_filename
import os.path
from uuid import uuid4
import numbers
import warnings
from astropy.io import fits
from astropy.table import Table
import fitsio
import numpy as np
import healpy as hp
from desitarget import io
from desitarget.cmx import cmx_cuts as cuts
_macos = sys.platform == 'darwin'
class TestCMX(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.datadir = resource_filename('desitarget.test', 't')
cls.tractorfiles = sorted(io.list_tractorfiles(cls.datadir))
cls.sweepfiles = sorted(io.list_sweepfiles(cls.datadir))
cls.cmxdir = resource_filename('desitarget.test', 't3')
# ADM find which HEALPixels are covered by test sweeps files.
cls.nside = 32
pixlist = []
for fn in cls.sweepfiles:
objs = fitsio.read(fn)
theta, phi = np.radians(90-objs["DEC"]), np.radians(objs["RA"])
pixels = hp.ang2pix(cls.nside, theta, phi, nest=True)
pixlist.append(pixels)
cls.pix = np.unique(pixlist)
# ADM set up the GAIA_DIR environment variable.
cls.gaiadir_orig = os.getenv("GAIA_DIR")
os.environ["GAIA_DIR"] = resource_filename('desitarget.test', 't4')
@classmethod
def tearDownClass(cls):
# ADM reset GAIA_DIR environment variable.
if cls.gaiadir_orig is not None:
os.environ["GAIA_DIR"] = cls.gaiadir_orig
def setUp(self):
# Treat a specific warning as an error (could turn off if this
# becomes problematic)
warnings.filterwarnings('error', '.*Calling nonzero on 0d arrays.*')
def METHOD_NAME(self):
"""Test cuts work with either data or filenames
"""
# ADM test for tractor files.
# ADM No QSO cuts for speed. This doesn't affect coverage.
cmx, pshift = cuts.apply_cuts(self.tractorfiles[0],
cmxdir=self.cmxdir, noqso=True)
data = io.read_tractor(self.tractorfiles[0])
cmx2, pshift2 = cuts.apply_cuts(data,
cmxdir=self.cmxdir, noqso=True)
self.assertTrue(np.all(cmx == cmx2))
self.assertTrue(np.all(pshift == pshift2))
# ADM test for sweeps files.
# ADM No QSO cuts for speed. This doesn't affect coverage.
cmx, pshift = cuts.apply_cuts(self.sweepfiles[0],
cmxdir=self.cmxdir, noqso=True)
data = io.read_tractor(self.sweepfiles[0])
cmx2, pshift2 = cuts.apply_cuts(data,
cmxdir=self.cmxdir, noqso=True)
self.assertTrue(np.all(cmx == cmx2))
self.assertTrue(np.all(pshift == pshift2))
def _test_table_row(self, targets):
"""Test cuts work with tables from several I/O libraries
"""
# ADM add the DR7/DR8 data columns if they aren't there yet.
# ADM can remove this once DR8 is finalized.
if "MASKBITS" not in targets.dtype.names:
targets = io.add_dr8_columns(targets)
cmx, pshift = cuts.apply_cuts(targets,
cmxdir=self.cmxdir)
self.assertEqual(len(cmx), len(targets))
cmx, pshift = cuts.apply_cuts(targets[0],
cmxdir=self.cmxdir)
self.assertTrue(isinstance(cmx, numbers.Integral), 'CMX_TARGET mask not an int')
def test_astropy_fits(self):
"""Test astropy.fits I/O library
"""
targets = fits.getdata(self.tractorfiles[0])
self._test_table_row(targets)
def test_astropy_table(self):
"""Test astropy tables I/O library
"""
targets = Table.read(self.tractorfiles[0])
self._test_table_row(targets)
def test_numpy_ndarray(self):
"""Test fitsio I/O library
"""
targets = fitsio.read(self.tractorfiles[0], upper=True)
self._test_table_row(targets)
def test_select_targets(self):
"""Test select targets works with either data or filenames
"""
# ADM parallelization across pixels only works for sweep files.
for filelist in [self.sweepfiles]:
# ADM No QSO cuts and limit to pixels for speed.
# ADM This doesn't affect coverage.
targets = cuts.select_targets(filelist, numproc=1, test=True,
cmxdir=self.cmxdir, noqso=True,
nside=self.nside, pixlist=self.pix)
t1 = cuts.select_targets(filelist[0:1], numproc=1, test=True,
cmxdir=self.cmxdir, noqso=True,
nside=self.nside, pixlist=self.pix)
t2 = cuts.select_targets(filelist[0], numproc=1, test=True,
cmxdir=self.cmxdir, noqso=True,
nside=self.nside, pixlist=self.pix)
for col in t1.dtype.names:
try:
notNaN = ~np.isnan(t1[col])
except TypeError: # - can't check string columns for NaN
notNaN = np.ones(len(t1), dtype=bool)
self.assertTrue(np.all(t1[col][notNaN] == t2[col][notNaN]))
def test_missing_files(self):
"""Test the code will die gracefully if input files are missing
"""
with self.assertRaises(ValueError):
targets = cuts.select_targets(['blat.foo1234', ], numproc=1)
@unittest.skipIf(_macos, "Skipping parallel test that fails on macOS.")
def test_parallel_select(self):
"""Test multiprocessing parallelization works
"""
for nproc in [1, 2]:
# ADM parallelization across pixels only works for sweep files.
for filelist in [self.sweepfiles]:
# ADM No QSO cuts for speed. Doesn't affect coverage.
targets = cuts.select_targets(filelist, numproc=nproc, test=True,
cmxdir=self.cmxdir, noqso=True,
nside=self.nside, pixlist=self.pix)
self.assertTrue('CMX_TARGET' in targets.dtype.names)
self.assertEqual(len(targets), np.count_nonzero(targets['CMX_TARGET']))
if __name__ == '__main__':
unittest.main()
def test_suite():
"""Allows testing of only this module with the command:
python setup.py test -m desitarget.test.test_cmx
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
4,342 |
test lists unique by tuple funcs
|
# This file is part of Hypothesis, which may be found at
# https://github.com/HypothesisWorks/hypothesis/
#
# Copyright the Hypothesis Authors.
# Individual contributors are listed in AUTHORS.rst and the git log.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
from collections import OrderedDict
from random import Random
import pytest
from hypothesis import given, settings
from hypothesis.strategies import (
booleans,
dictionaries,
fixed_dictionaries,
frozensets,
integers,
lists,
none,
nothing,
sets,
text,
tuples,
)
from tests.common.debug import find_any, minimal
from tests.common.utils import flaky
@pytest.mark.parametrize(
("col", "strat"),
[
((), tuples()),
([], lists(none(), max_size=0)),
(set(), sets(none(), max_size=0)),
(frozenset(), frozensets(none(), max_size=0)),
({}, fixed_dictionaries({})),
({}, fixed_dictionaries({}, optional={})),
(OrderedDict(), fixed_dictionaries(OrderedDict(), optional=OrderedDict())),
({}, fixed_dictionaries({}, optional={1: booleans()})),
({0: False}, fixed_dictionaries({0: booleans()}, optional={1: booleans()})),
({}, fixed_dictionaries({}, optional={(): booleans(), 0: booleans()})),
([], lists(nothing())),
([], lists(nothing(), unique=True)),
],
)
def test_find_empty_collection_gives_empty(col, strat):
assert minimal(strat, lambda x: True) == col
@pytest.mark.parametrize(
("coltype", "strat"), [(list, lists), (set, sets), (frozenset, frozensets)]
)
def test_find_non_empty_collection_gives_single_zero(coltype, strat):
assert minimal(strat(integers()), bool) == coltype((0,))
@pytest.mark.parametrize(
("coltype", "strat"), [(list, lists), (set, sets), (frozenset, frozensets)]
)
def test_minimizes_to_empty(coltype, strat):
assert minimal(strat(integers()), lambda x: True) == coltype()
def test_minimizes_list_of_lists():
xs = minimal(lists(lists(booleans())), lambda x: any(x) and not all(x))
xs.sort()
assert xs == [[], [False]]
@given(sets(integers(0, 100), min_size=2, max_size=10))
@settings(max_examples=100)
def test_sets_are_size_bounded(xs):
assert 2 <= len(xs) <= 10
def test_ordered_dictionaries_preserve_keys():
r = Random()
keys = list(range(100))
r.shuffle(keys)
x = fixed_dictionaries(OrderedDict([(k, booleans()) for k in keys])).example()
assert list(x.keys()) == keys
@given(fixed_dictionaries({}, optional={0: booleans(), 1: nothing(), 2: booleans()}))
def test_fixed_dictionaries_with_optional_and_empty_keys(d):
assert 1 not in d
@pytest.mark.parametrize("n", range(10))
def test_lists_of_fixed_length(n):
assert minimal(lists(integers(), min_size=n, max_size=n), lambda x: True) == [0] * n
@pytest.mark.parametrize("n", range(10))
def test_sets_of_fixed_length(n):
x = minimal(sets(integers(), min_size=n, max_size=n), lambda x: True)
assert len(x) == n
if not n:
assert x == set()
else:
assert x == set(range(min(x), min(x) + n))
@pytest.mark.parametrize("n", range(10))
def test_dictionaries_of_fixed_length(n):
x = set(
minimal(
dictionaries(integers(), booleans(), min_size=n, max_size=n), lambda x: True
).keys()
)
if not n:
assert x == set()
else:
assert x == set(range(min(x), min(x) + n))
@pytest.mark.parametrize("n", range(10))
def test_lists_of_lower_bounded_length(n):
x = minimal(lists(integers(), min_size=n), lambda x: sum(x) >= 2 * n)
assert n <= len(x) <= 2 * n
assert all(t >= 0 for t in x)
assert len(x) == n or all(t > 0 for t in x)
assert sum(x) == 2 * n
@flaky(min_passes=1, max_runs=3)
def test_can_find_unique_lists_of_non_set_order():
# This test checks that our strategy for unique lists doesn't accidentally
# depend on the iteration order of sets.
#
# Unfortunately, that means that *this* test has to rely on set iteration
# order. That makes it tricky to debug on CPython, because set iteration
# order changes every time the process is launched.
#
# To get around this, define the PYTHONHASHSEED environment variable to
# a consistent value. This could be 0, or it could be the PYTHONHASHSEED
# value listed in a failure log from CI.
ls = minimal(
lists(text(), min_size=2, unique=True),
lambda x: list(set(reversed(x))) != x, # noqa: C414 # yes, reverse inside set
)
assert len(set(ls)) == len(ls)
assert len(ls) == 2
def test_can_draw_empty_list_from_unsatisfiable_strategy():
assert find_any(lists(integers().filter(lambda s: False))) == []
def test_can_draw_empty_set_from_unsatisfiable_strategy():
assert find_any(sets(integers().filter(lambda s: False))) == set()
@given(lists(sets(none()), min_size=10))
def test_small_sized_sets(x):
pass
def test_minimize_dicts_with_incompatible_keys():
assert minimal(
fixed_dictionaries({1: booleans(), "hi": lists(booleans())}), lambda x: True
) == {1: False, "hi": []}
@given(
lists(
tuples(integers(), integers()),
min_size=2,
unique_by=(lambda x: x[0], lambda x: x[1]),
)
)
def METHOD_NAME(ls):
firstitems, seconditems = zip(*ls)
assert len(set(firstitems)) == len(firstitems)
assert len(set(seconditems)) == len(seconditems)
|
4,343 |
assert float
|
#!/usr/bin/env python
from __future__ import division
import sys, os
current_line = ""
def time_to_millis(time_in):
time_in.strip()
time_in.replace(" ", "")
result = "NaN"
if "ms" in time_in:
result = str(METHOD_NAME(time_in.replace("ms", "")))
elif "s" in time_in:
result = str(METHOD_NAME(time_in.replace("s", "")) * 1000.0)
elif "m" in time_in:
result = str(METHOD_NAME(time_in.replace("m", "")) * 60.0 * 1000.0)
elif "h" in time_in:
result = str(METHOD_NAME(time_in.replace("h", "")) * 60.0 * 60.0 * 1000.0)
return result
def size_to_MiB(size_in):
size_in = format_token(size_in)
base_size = 1.0
if "b" in size_in:
base_size = 8.0
size_in = size_in.replace("i", "")
size_in = size_in.replace("I", "")
size_in = size_in.replace("b", "")
size_in = size_in.replace("B", "")
if "G" in size_in or "g" in size_in:
size_in = str(
METHOD_NAME(size_in.replace("G", "").replace("g", "")) / 1024.0 / 1024.0 / base_size
)
elif "M" in size_in or "m" in size_in:
size_in = str(METHOD_NAME(size_in.replace("M", "").replace("m", "")) / 1024.0 / base_size)
elif "K" in size_in or "k" in size_in:
size_in = str(METHOD_NAME(size_in.replace("K", "").replace("k", "")) / 1024.0 / base_size)
# we are now in byte
return str(METHOD_NAME(size_in) * 1024.0 * 1024.0)
def contains(line_in, on_tokens):
for token in on_tokens:
if token not in line_in:
return False
return True
def METHOD_NAME(input_str):
try:
return float(input_str)
except ValueError:
global current_line
print("Error, line: " + current_line)
exit(1)
return str(flt)
def format_token(input_str):
return (
input_str.strip()
.replace(" ", "")
.replace("(", "")
.replace(")", "")
.replace("%", "")
.replace(",", "")
)
# 1 indexed
def get_token(line_in, index):
return str(line_in.split(" ")[index - 1])
def stringify(value_map, key):
if key not in value_map:
return str(float("NaN"))
else:
return str(value_map[key])
def get_all_value(value_map, key_list):
return_str = ""
for key in key_list:
return_str += stringify(value_map, key) + ","
# lose the last comma
return return_str
def get_all_key(value_map, key_list):
return_str = ""
for key in key_list:
return_str += key + ","
return return_str
def insert_string(value_map, key, input_str):
value_map[key] = format_token(input_str)
def insert_decimal(value_map, key, input_str):
value_map[key] = str(METHOD_NAME(format_token(input_str)))
def parse_line(benchmarks, line):
line.strip()
line = " ".join(line.split())
global current_line
current_line = line
if contains(line, {"Executing simulation with", "threads"}):
insert_decimal(benchmarks, "max_thread_count", get_token(line, 8))
elif contains(line, {"Simulating", "vectors"}):
insert_decimal(benchmarks, "number_of_vectors", get_token(line, 2))
elif contains(line, {"Nodes:"}):
insert_decimal(benchmarks, "number_of_nodes", get_token(line, 2))
elif contains(line, {"Connections:"}):
insert_decimal(benchmarks, "number_of_connections", get_token(line, 2))
elif contains(line, {"Threads:"}):
insert_decimal(benchmarks, "used_threads", get_token(line, 2))
elif contains(line, {"Degree:"}):
insert_decimal(benchmarks, "degree", get_token(line, 2))
elif contains(line, {"Stages:"}):
insert_decimal(benchmarks, "number_of_stages", get_token(line, 2))
elif contains(line, {"Simulation time:"}):
insert_decimal(benchmarks, "simulation_time", time_to_millis(get_token(line, 3)))
elif contains(line, {"Elapsed time:"}):
insert_decimal(benchmarks, "elapsed_time", time_to_millis(get_token(line, 3)))
elif contains(line, {"Coverage:"}):
insert_decimal(benchmarks, "percent_coverage", get_token(line, 3))
elif contains(line, {"Odin ran with exit status:"}):
insert_decimal(benchmarks, "exit_code", get_token(line, 6))
elif contains(line, {"Odin II took", "seconds", "(max_rss"}):
insert_decimal(benchmarks, "total_time", time_to_millis(get_token(line, 4) + "s"))
insert_decimal(benchmarks, "max_rss", size_to_MiB(get_token(line, 7) + get_token(line, 8)))
elif contains(line, {"context-switches #"}):
insert_decimal(benchmarks, "context_switches", get_token(line, 1))
elif contains(line, {"cpu-migrations #"}):
insert_decimal(benchmarks, "cpu_migration", get_token(line, 1))
elif contains(line, {"page-faults #"}):
insert_decimal(benchmarks, "page_faults", get_token(line, 1))
elif contains(line, {"stalled-cycles-frontend #"}):
insert_decimal(benchmarks, "stalled_cycle_frontend", get_token(line, 1))
elif contains(line, {"stalled-cycles-backend #"}):
insert_decimal(benchmarks, "stalled_cycle_backend", get_token(line, 1))
elif contains(line, {"cycles #"}):
insert_decimal(benchmarks, "cycles", get_token(line, 1))
elif contains(line, {"branches #"}):
insert_decimal(benchmarks, "branches", get_token(line, 1))
elif contains(line, {"branch-misses #"}):
insert_decimal(benchmarks, "branch_misses", get_token(line, 1))
elif contains(line, {"LLC-loads #"}):
insert_decimal(benchmarks, "llc_loads", get_token(line, 1))
elif contains(line, {"LLC-load-misses #"}):
insert_decimal(benchmarks, "llc_load_miss", get_token(line, 1))
elif contains(line, {"CPU:"}):
insert_decimal(benchmarks, "percent_cpu_usage", get_token(line, 2))
elif contains(line, {"Minor PF:"}):
insert_decimal(benchmarks, "minor_page_faults", get_token(line, 3))
def main():
benchmarks = {}
key_list = [
"max_thread_count",
"number_of_vectors",
"number_of_nodes",
"number_of_connections",
"used_threads",
"degree",
"number_of_stages",
"simulation_time",
"elapsed_time",
"percent_coverage",
"exit_code",
"total_time",
"max_rss",
"context_switches",
"cpu_migration",
"page_faults",
"stalled_cycle_frontend",
"stalled_cycle_backend",
"cycles",
"branches",
"branch_misses",
"llc_loads",
"llc_load_miss",
"percent_cpu_usage",
"minor_page_faults",
]
if len(sys.argv) < 4:
print(
"Wrong number of argument, expecting ./exec <input.log> <output.csv> <... (header value pair)>"
)
exit(-1)
log_file_to_parse = sys.argv[1]
output_file = sys.argv[2]
fileContext = open(log_file_to_parse, "r")
for wholeLine in fileContext:
parse_line(benchmarks, wholeLine)
f = open(output_file, "w+")
header_in = ""
values_in = ""
i = 0
while i < (len(sys.argv) - 3):
header_in += sys.argv[i + 3] + ","
values_in += sys.argv[i + 4] + ","
i += 2
header_in += get_all_key(benchmarks, key_list) + "\n"
values_in += get_all_value(benchmarks, key_list) + "\n"
f.write(header_in)
f.write(values_in)
f.close()
exit(0)
if __name__ == "__main__":
main()
|
4,344 |
test avi event rfc5424
|
# Copyright 2019 Splunk, Inc.
#
# Use of this source code is governed by a BSD-2-clause-style
# license that can be found in the LICENSE-BSD2 file or at
# https://opensource.org/licenses/BSD-2-Clause
import shortuuid
from jinja2 import Environment, select_autoescape
from .sendmessage import sendsingle
from .splunkutils import splunk_single
from .timeutils import time_operations
import datetime
import pytest
env = Environment(autoescape=select_autoescape(default_for_string=False))
test_rfc5424 = [
r'{{ mark }}1 {{ iso }} {{ host }} aer01-abc-cde-fgh 0 711603 - "adf":1,"virtualservice":"virtualservice-12345-678-9810-b456-123456","vs_ip":"10.0.0.1","client_ip":"10.0.0.1","client_src_port":123,"client_dest_port":123,"start_timestamp":"2020-05-07T14:11:52.550629Z","report_timestamp":"2020-05-07T14:11:52.550629Z","connection_ended":1,"mss":1500,"rx_bytes":99,"rx_pkts":1,"service_engine":"aer01-abc-cde-fgh","log_id":711603,"server_ip":"0.0.0.0","server_conn_src_ip":"0.0.0.0","significant_log":["ADF_CLIENT_DNS_FAILED_GS_DOWN"],"dns_fqdn":"abc-cde-efg.cisco.com","dns_qtype":"DNS_RECORD_A","gslbservice":"gslbservice-xyz","gslbservice_name":"Naga-GSLB","dns_etype":"DNS_ENTRY_GSLB","protocol":"PROTOCOL_UDP","dns_request":{"question_count":1,"identifier":12345},"vs_name":"aer01-abc-cde-fgh"'
]
test_data_rfc = [
r'{{ mark }}{{ date }} {{ avi_time }} {{ host }} Avi-Controller - - - INFO [abc-cde.gen: reason: Syslog for Confiqg Events occured] At 2020-04-07 15:27:10+00:00 event USER_AUTHORIZED_BY_RULE occurred on object abc-cde.gen in tenant admin as User abc-cde.gen was authorized by mapping rule user is member of groups "["abcd-efgh-ij-klmn"]" and ignore user attribute values.'
]
test_data_JSON = [
r'{{ mark }}{{ date }} {{ avi_time }} {{ host }} Avi-Controller - - - INFO [abc-cde.gen: reason: Syslog for Config Events occured] {"level": "ALERT_LOW", "timestamp": "2020-04-07 15:35:26", "obj_name": "abc-cde.gen", "tenant_uuid": "admin", "summary": "Syslog for Config Events occured", "obj_key": "abc-cde.gen", "reason": "threshold_exceeded", "obj_uuid": "abc-cde.gen", "related_objects": [""], "threshold": 0, "events": [{"obj_type": "USER", "tenant_name": "", "event_id": "USER_AUTHORIZED_BY_RULE", "related_uuids": ["abc-cde.gen"], "event_details": {"config_user_authrz_rule_details": {"roles": "readonly-all", "tenants": "All Tenants", "user": "abc-cde.gen", "rule": "user is member of groups \"[\"abcd-efgh-ij-klmn\"]\" and ignore user attribute values"}}, "event_description": "User abc-cde.gen was authorized by mapping rule user is member of groups \"[\"abcd-efgh-ij-klmn\"]\" and ignore user attribute values", "module": "CONFIG", "report_timestamp": "2020-04-07 15:35:26", "internal": "EVENT_EXTERNAL", "event_pages": ["EVENT_PAGE_AUDIT", "EVENT_PAGE_ALL"], "context": "EVENT_CONTEXT_CONFIG", "obj_name": "abc-cde.gen", "obj_uuid": "abc-cde.gen", "tenant": "admin"}], "name": "abc-syslog"}'
]
test_data_no_host = [
r'{{ mark }} {{ bsd }} {{ host }} [{{ date }} {{ avi_time }}: Avi-Controller: INFO: ] [abc-cde.gen: reason: Syslog for Config Events occured] At 2020-04-07 15:32:09+00:00 event USER_AUTHORIZED_BY_RULE occurred on object abc-cde.gen in tenant admin as User abc-cde.gen was authorized by mapping rule user is member of groups "["abcd-efgh-ij-klmn"]" and ignore user attribute values. {{ host }} '
]
@pytest.mark.parametrize("event", test_data_rfc)
def test_avi_event_rfc(
record_property, setup_splunk, setup_sc4s, get_host_key, event
):
host = get_host_key
dt = datetime.datetime.now()
_, bsd, _, date, _, _, epoch = time_operations(dt)
avi_time = dt.strftime("%H:%M:%S,%f")[:-3]
epoch = epoch[:-3]
mt = env.from_string(event + "\n")
message = mt.render(mark="<46>", bsd=bsd, host=host, date=date, avi_time=avi_time)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string(
'search _time={{ epoch }} index=netops host="{{ host }}" sourcetype="avi:events"'
)
search = st.render(epoch=epoch, host=host)
result_count, _ = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", result_count)
record_property("message", message)
assert result_count == 1
@pytest.mark.parametrize("event", test_data_JSON)
def test_avi_event_JSON(
record_property, setup_splunk, setup_sc4s, get_host_key, event
):
host = get_host_key
dt = datetime.datetime.now()
_, bsd, _, date, _, _, epoch = time_operations(dt)
avi_time = dt.strftime("%H:%M:%S,%f")[:-3]
epoch = epoch[:-3]
mt = env.from_string(event + "\n")
message = mt.render(mark="<46>", bsd=bsd, host=host, date=date, avi_time=avi_time)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string(
'search _time={{ epoch }} index=netops host="{{ host }}" sourcetype="avi:events"'
)
search = st.render(epoch=epoch, host=host)
result_count, _ = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", result_count)
record_property("message", message)
assert result_count == 1
@pytest.mark.parametrize("event", test_data_no_host)
def test_avi_event_no_host(
record_property, setup_splunk, setup_sc4s, get_host_key, event
):
host = get_host_key
dt = datetime.datetime.now()
_, bsd, _, date, _, _, epoch = time_operations(dt)
avi_time = dt.strftime("%H:%M:%S,%f")[:-3]
epoch = epoch[:-3]
mt = env.from_string(event + "\n")
message = mt.render(mark="<46>", bsd=bsd, date=date, avi_time=avi_time, host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string(
'search _time={{ epoch }} index=netops sourcetype="avi:events" {{ host }}'
)
search = st.render(epoch=epoch, host=host)
result_count, _ = splunk_single(setup_splunk, search)
record_property("resultCount", result_count)
record_property("message", message)
assert result_count == 1
@pytest.mark.parametrize("event", test_rfc5424)
def METHOD_NAME(
record_property, setup_splunk, setup_sc4s, get_host_key, event
):
host = get_host_key
dt = datetime.datetime.now()
iso, _, _, _, _, _, epoch = time_operations(dt)
# Tune time functions
epoch = epoch[:-3]
mt = env.from_string(event + "\n")
message = mt.render(mark="<134>", iso=iso, host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string('search _time={{ epoch }} index=netops sourcetype="avi:logs"')
search = st.render(epoch=epoch, host=host)
result_count, _ = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", result_count)
record_property("message", message)
assert result_count == 1
|
4,345 |
test incorrectly formatted variables
|
import pytest
from unittest import mock
from awx.main.models import UnifiedJob, UnifiedJobTemplate, WorkflowJob, WorkflowJobNode, WorkflowApprovalTemplate, Job, User, Project, JobTemplate, Inventory
from awx.main.constants import JOB_VARIABLE_PREFIXES
def METHOD_NAME():
bad_data = '{"bar":"foo'
accepted, ignored, errors = UnifiedJobTemplate().accept_or_ignore_variables(bad_data)
assert not accepted
assert ignored == bad_data
assert 'Cannot parse as JSON' in str(errors['extra_vars'][0])
def test_unified_job_workflow_attributes():
with mock.patch('django.db.ConnectionRouter.db_for_write'):
job = UnifiedJob(id=1, name="job-1", launch_type="workflow")
job.unified_job_node = WorkflowJobNode(workflow_job=WorkflowJob(pk=1))
assert job.spawned_by_workflow is True
assert job.workflow_job_id == 1
def mock_on_commit(f):
f()
@pytest.fixture
def unified_job(mocker):
mocker.patch.object(UnifiedJob, 'can_cancel', return_value=True)
j = UnifiedJob()
j.status = 'pending'
j.cancel_flag = None
j.save = mocker.MagicMock()
j.websocket_emit_status = mocker.MagicMock()
j.fallback_cancel = mocker.MagicMock()
return j
def test_cancel(unified_job):
with mock.patch('awx.main.models.unified_jobs.connection.on_commit', wraps=mock_on_commit):
unified_job.cancel()
assert unified_job.cancel_flag is True
assert unified_job.status == 'canceled'
assert unified_job.job_explanation == ''
# Note: the websocket emit status check is just reflecting the state of the current code.
# Some more thought may want to go into only emitting canceled if/when the job record
# status is changed to canceled. Unlike, currently, where it's emitted unconditionally.
unified_job.websocket_emit_status.assert_called_with("canceled")
assert [(args, kwargs) for args, kwargs in unified_job.save.call_args_list] == [
((), {'update_fields': ['cancel_flag', 'start_args']}),
((), {'update_fields': ['status']}),
]
def test_cancel_job_explanation(unified_job):
job_explanation = 'giggity giggity'
with mock.patch('awx.main.models.unified_jobs.connection.on_commit'):
unified_job.cancel(job_explanation=job_explanation)
assert unified_job.job_explanation == job_explanation
assert [(args, kwargs) for args, kwargs in unified_job.save.call_args_list] == [
((), {'update_fields': ['cancel_flag', 'start_args', 'job_explanation']}),
((), {'update_fields': ['status']}),
]
def test_organization_copy_to_jobs():
"""
All unified job types should infer their organization from their template organization
"""
for cls in UnifiedJobTemplate.__subclasses__():
if cls is WorkflowApprovalTemplate:
continue # these do not track organization
assert 'organization' in cls._get_unified_job_field_names(), cls
def test_log_representation():
"""
Common representation used inside of log messages
"""
uj = UnifiedJob(status='running', id=4)
job = Job(status='running', id=4)
assert job.log_format == 'job 4 (running)'
assert uj.log_format == 'unified_job 4 (running)'
class TestMetaVars:
"""
Corresponding functional test exists for cases with indirect relationships
"""
def test_job_metavars(self):
maker = User(username='joe', pk=47, id=47)
inv = Inventory(name='example-inv', id=45)
result_hash = {}
for name in JOB_VARIABLE_PREFIXES:
result_hash['{}_job_id'.format(name)] = 42
result_hash['{}_job_launch_type'.format(name)] = 'manual'
result_hash['{}_user_name'.format(name)] = 'joe'
result_hash['{}_user_email'.format(name)] = ''
result_hash['{}_user_first_name'.format(name)] = ''
result_hash['{}_user_last_name'.format(name)] = ''
result_hash['{}_user_id'.format(name)] = 47
result_hash['{}_inventory_id'.format(name)] = 45
result_hash['{}_inventory_name'.format(name)] = 'example-inv'
result_hash['{}_execution_node'.format(name)] = 'example-exec-node'
assert (
Job(name='fake-job', pk=42, id=42, launch_type='manual', created_by=maker, inventory=inv, execution_node='example-exec-node').awx_meta_vars()
== result_hash
)
def test_project_update_metavars(self):
data = Job(
name='fake-job',
pk=40,
id=40,
launch_type='manual',
project=Project(name='jobs-sync', scm_revision='12345444'),
job_template=JobTemplate(name='jobs-jt', id=92, pk=92),
).awx_meta_vars()
for name in JOB_VARIABLE_PREFIXES:
assert data['{}_project_revision'.format(name)] == '12345444'
assert '{}_job_template_id'.format(name) in data
assert data['{}_job_template_id'.format(name)] == 92
assert data['{}_job_template_name'.format(name)] == 'jobs-jt'
|
4,346 |
test force set system identity valid input
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
from argparse import Namespace
from azure.cli.core.azclierror import InvalidArgumentValueError
from ...._app_managed_identity_validator import (validate_app_force_set_system_identity_or_warning,
validate_app_force_set_user_identity_or_warning)
FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_0 = "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/fake-rg/providers/microsoft.managedidentity/userassignedidentities/fake-identity-name-0"
FAKE_UPPER_USER_IDENTITY_RESOURCE_ID_0 = FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_0.upper()
FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_1 = "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/fake-rg/providers/microsoft.managedidentity/userassignedidentities/fake-identity-name-1"
FAKE_UPPER_USER_IDENTITY_RESOURCE_ID_1 = FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_1.upper()
class TestAppForceSetSystemIdentityValitor(unittest.TestCase):
def METHOD_NAME(self):
ns = Namespace(system_assigned="DISAble")
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue("disable", ns.system_assigned)
def test_force_set_system_identity_valid_input_2(self):
ns = Namespace(system_assigned="disable")
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue("disable", ns.system_assigned)
def test_force_set_system_identity_valid_input_3(self):
ns = Namespace(system_assigned="DISABLE")
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue("disable", ns.system_assigned)
def test_force_set_system_identity_valid_input_4(self):
ns = Namespace(system_assigned="enAble")
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue("enable", ns.system_assigned)
def test_force_set_system_identity_valid_input_5(self):
ns = Namespace(system_assigned="enable")
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue("enable", ns.system_assigned)
def test_force_set_system_identity_valid_input_6(self):
ns = Namespace(system_assigned="ENABLE")
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue("enable", ns.system_assigned)
def test_force_set_system_identity_invalid_input(self):
ns = Namespace(system_assigned="randomestring")
with self.assertRaises(InvalidArgumentValueError) as context:
validate_app_force_set_system_identity_or_warning(ns)
self.assertTrue('Allowed values for "system-assigned" are:' in str(context.exception))
class TestAppForceSetUserIdentityValitor(unittest.TestCase):
def test_valid_input_1(self):
ns = Namespace(user_assigned=["DISable"])
validate_app_force_set_user_identity_or_warning(ns)
self.assertEquals("disable", ns.user_assigned[0])
def test_valid_input_2(self):
ns = Namespace(user_assigned=["disable"])
validate_app_force_set_user_identity_or_warning(ns)
self.assertEquals("disable", ns.user_assigned[0])
def test_valid_input_3(self):
ns = Namespace(user_assigned=["DISABLE"])
validate_app_force_set_user_identity_or_warning(ns)
self.assertEquals("disable", ns.user_assigned[0])
def test_valid_input_4(self):
ns = Namespace(user_assigned=[FAKE_UPPER_USER_IDENTITY_RESOURCE_ID_0])
validate_app_force_set_user_identity_or_warning(ns)
self.assertEquals(FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_0, ns.user_assigned[0])
def test_valid_input_5(self):
ns = Namespace(user_assigned=[FAKE_UPPER_USER_IDENTITY_RESOURCE_ID_0, FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_1])
validate_app_force_set_user_identity_or_warning(ns)
self.assertEquals(FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_0, ns.user_assigned[0])
self.assertEquals(FAKE_LOWER_USER_IDENTITY_RESOURCE_ID_1, ns.user_assigned[1])
def test_invalid_input_1(self):
ns = Namespace(user_assigned=["random_input"])
with self.assertRaises(InvalidArgumentValueError) as context:
validate_app_force_set_user_identity_or_warning(ns)
self.assertTrue('Allowed values for "user-assigned" are:' in str(context.exception))
def test_invalid_input_2(self):
ns = Namespace(user_assigned=["ua1", "ua2"])
with self.assertRaises(InvalidArgumentValueError) as context:
validate_app_force_set_user_identity_or_warning(ns)
self.assertTrue('Invalid user-assigned managed identity resource ID' in str(context.exception))
|
4,347 |
die
|
#!/usr/bin/env python3
#
# Permission to use, copy, modify, distribute, and sell this software
# and its documentation for any purpose is hereby granted without
# fee, provided that the above copyright notice appear in all copies
# and that both that copyright notice and this permission notice
# appear in supporting documentation, and that the name of Red Hat
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission. Red
# Hat makes no representations about the suitability of this software
# for any purpose. It is provided "as is" without express or implied
# warranty.
#
# THE AUTHORS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
import argparse
import configparser
import sys
from pathlib import Path
try:
import libevdev
import pyudev
except ModuleNotFoundError as e:
print("Error: {}".format(str(e)), file=sys.stderr)
print(
"One or more python modules are missing. Please install those "
"modules and re-run this tool."
)
sys.exit(1)
class Ansi:
clearline = "\x1B[K"
@classmethod
def up(cls, count):
return f"\x1B[{count}A"
@classmethod
def down(cls, count):
return f"\x1B[{count}B"
@classmethod
def right(cls, count):
return f"\x1B[{count}C"
@classmethod
def left(cls, count):
return f"\x1B[{count}D"
def METHOD_NAME(msg):
print(msg, file=sys.stderr)
sys.exit(1)
def select_device():
context = pyudev.Context()
for device in context.list_devices(subsystem="input"):
if device.get("ID_INPUT_TABLET", 0) and (device.device_node or "").startswith(
"/dev/input/event"
):
name = device.get("NAME", None)
if not name:
name = next(
(p.get("NAME") for p in device.ancestors if p.get("NAME")),
"unknown",
)
print("Using {}: {}".format(name or "unknown", device.device_node))
return device.device_node
METHOD_NAME("Unable to find a tablet device.")
def record_events(ns):
with open(ns.device_path, "rb") as fd:
d = libevdev.Device(fd)
if not d.absinfo[libevdev.EV_ABS.ABS_MISC]:
METHOD_NAME("Device only supports generic styli")
tool_bits = set(
c for c in libevdev.EV_KEY.codes if c.name.startswith("BTN_TOOL_")
)
styli = {} # dict of (type, serial) = proximity_state
current_type, current_serial = 0, 0
in_prox = False
dirty = False
print("Please put tool in proximity")
try:
while True:
for event in d.events():
if event.matches(libevdev.EV_ABS.ABS_MISC):
if event.value != 0:
current_type = event.value
dirty = True
elif event.matches(libevdev.EV_MSC.MSC_SERIAL):
if event.value != 0:
current_serial = event.value & 0xFFFFFFFF
dirty = True
elif event.code in tool_bits:
# print(f'Current prox: {event.value}')
in_prox = event.value != 0
dirty = True
elif event.matches(libevdev.EV_SYN.SYN_REPORT) and dirty:
dirty = False
print(
f"{Ansi.up(len(styli))}{Ansi.left(10000)}{Ansi.clearline}",
end="",
)
styli[(current_type, current_serial)] = in_prox
for s, prox in styli.items():
tid, serial = s
print(
f"Tool id {tid:#x} serial {serial:#x} in-proximity: {prox} "
)
except KeyboardInterrupt:
print("Terminating")
return [s[0] for s in styli.keys()]
def load_data_files():
lookup_paths = (
("./data/",),
("@DATADIR@", "@ETCDIR@"),
("/usr/share/libwacom/", "/etc/libwacom/"),
)
stylusfiles = []
for paths in lookup_paths:
stylusfiles = []
for p in paths:
files = list(Path(p).glob("*.stylus"))
if files:
stylusfiles += files
if any(stylusfiles):
break
else:
METHOD_NAME("Unable to find a libwacom.stylus data file")
print(f'Using stylus file(s): {", ".join([str(s) for s in stylusfiles])}')
styli = {}
for path in stylusfiles:
config = configparser.ConfigParser()
config.read(path)
for stylus_id in config.sections():
sid = int(stylus_id, 16)
styli[sid] = config[stylus_id].get("Group", sid)
return styli
def main():
parser = argparse.ArgumentParser(description="Tool to show tablet stylus ids")
parser.add_argument(
"device_path", nargs="?", default=None, help="Path to the /dev/input/event node"
)
ns = parser.parse_args()
if not ns.device_path:
ns.device_path = select_device()
all_styli = load_data_files()
styli = record_events(ns)
groups = []
for sid in styli:
if sid in all_styli:
groups.append(all_styli[sid])
else:
print(f"Unknown stylus id {sid:#x}. New entry needed")
print("Suggested line for .tablet file:")
print(f"Styli={';'.join(set(groups))}")
if __name__ == "__main__":
try:
main()
except PermissionError:
METHOD_NAME("Insufficient permissions, please run me as root")
|
4,348 |
parse args
|
# Copyright (c) 2019-2023 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import ast
import importlib
import os
import sys
import copy
from pathlib import Path
import onnx
import torch
import torch.onnx
INPUT_DTYPE_TO_TORCH = {
'bool': torch.bool,
'double': torch.double,
'float': torch.float,
'half': torch.half,
'int32': torch.int32,
'int8': torch.int8,
'long': torch.long,
'short': torch.short,
'uint8': torch.uint8,
}
class prepend_to_path:
def __init__(self, paths):
self._preprended_paths = paths
self._original_path = None
def __enter__(self):
self._original_path = copy.deepcopy(sys.path)
if self._preprended_paths is not None:
sys.path = self._preprended_paths + sys.path
def __exit__(self, type, value, traceback):
if self._original_path is not None:
sys.path = self._original_path
def is_sequence(element):
return isinstance(element, (list, tuple))
def shapes_arg(values):
"""Checks that the argument represents a tensor shape or a sequence of tensor shapes"""
shapes = ast.literal_eval(values)
if not is_sequence(shapes):
raise argparse.ArgumentTypeError(
'{!r}: must be a sequence'.format(shapes))
if not all(is_sequence(shape) for shape in shapes):
shapes = (shapes, )
for shape in shapes:
if not is_sequence(shape):
raise argparse.ArgumentTypeError(
'{!r}: must be a sequence'.format(shape))
for value in shape:
if not isinstance(value, int) or value < 0:
raise argparse.ArgumentTypeError(
'Argument {!r} must be a positive integer'.format(value))
return shapes
def model_parameter(parameter):
param, value = parameter.split('=', 1)
try:
value = eval(value, {}, {})
except NameError as err:
print('Cannot evaluate {!r} value in {}. For string values use "{}=\'{}\'" (with all quotes).'
.format(value, parameter, param, value))
sys.exit(err)
return param, value
def METHOD_NAME():
"""Parse input arguments"""
parser = argparse.ArgumentParser(
description='Conversion of pretrained models from PyTorch to ONNX')
parser.add_argument('--model-name', type=str, required=True,
help='Model to convert. May be class name or name of constructor function')
parser.add_argument('--weights', type=str,
help='Path to the weights in PyTorch\'s format')
parser.add_argument('--input-shapes', metavar='SHAPE[,SHAPE...]', type=shapes_arg, required=True,
help='Comma-separated shapes of the input blobs. Example: [1,1,256,256],[1,3,256,256],...')
parser.add_argument('--output-file', type=Path, required=True,
help='Path to the output ONNX model')
parser.add_argument('--model-path', type=str, action='append', dest='model_paths',
help='Path to PyTorch model\'s source code')
parser.add_argument('--import-module', type=str, required=True,
help='Name of module, which contains model\'s constructor')
parser.add_argument('--input-names', type=str, metavar='L[,L...]', required=True,
help='Comma-separated names of the input layers')
parser.add_argument('--output-names', type=str, metavar='L[,L...]', required=True,
help='Comma-separated names of the output layers')
parser.add_argument('--model-param', type=model_parameter, default=[], action='append',
help='Pair "name"="value" of model constructor parameter')
parser.add_argument('--inputs-dtype', type=str, required=False, choices=INPUT_DTYPE_TO_TORCH, default='float',
help='Data type for inputs')
parser.add_argument('--conversion-param', type=model_parameter, default=[], action='append',
help='Additional parameter for export')
parser.add_argument('--opset_version', type=int,
default=11, help='The ONNX opset version')
return parser.METHOD_NAME()
def load_model(model_name, weights, model_paths, module_name, model_params):
"""Import model and load pretrained weights"""
with prepend_to_path(model_paths):
try:
module = importlib.import_module(module_name)
creator = getattr(module, model_name)
model = creator(**model_params)
except ImportError as err:
if model_paths:
print('Module {} in {} doesn\'t exist. Check import path and name'.format(
model_name, os.pathsep.join(model_paths)))
else:
print(
'Module {} doesn\'t exist. Check if it is installed'.format(model_name))
sys.exit(err)
except AttributeError as err:
print('ERROR: Module {} contains no class or function with name {}!'
.format(module_name, model_name))
sys.exit(err)
try:
if weights:
model.load_state_dict(torch.load(weights, map_location='cpu'))
except RuntimeError as err:
print('ERROR: Weights from {} cannot be loaded for model {}! Check matching between model and weights'.format(
weights, model_name))
sys.exit(err)
return model
@torch.no_grad()
def convert_to_onnx(model, input_shapes, output_file, input_names, output_names, inputs_dtype, conversion_params,
opset_version):
"""Convert PyTorch model to ONNX and check the resulting onnx model"""
output_file.parent.mkdir(parents=True, exist_ok=True)
model.eval()
dummy_inputs = tuple(
torch.zeros(input_shape, dtype=INPUT_DTYPE_TO_TORCH[inputs_dtype])
for input_shape in input_shapes)
model(*dummy_inputs)
torch.onnx.export(model, dummy_inputs, str(output_file), verbose=False, opset_version=opset_version,
input_names=input_names.split(','), output_names=output_names.split(','), **conversion_params)
model = onnx.load(str(output_file))
try:
onnx.checker.check_model(model)
print('ONNX check passed successfully.')
except onnx.onnx_cpp2py_export.checker.ValidationError as exc:
sys.exit('ONNX check failed with error: ' + str(exc))
def main():
args = METHOD_NAME()
model = load_model(args.model_name, args.weights,
args.model_paths, args.import_module, dict(args.model_param))
convert_to_onnx(model, args.input_shapes, args.output_file, args.input_names, args.output_names, args.inputs_dtype,
dict(args.conversion_param), args.opset_version)
if __name__ == '__main__':
main()
|
4,349 |
put
|
"""Pyro transport module for kombu.
Pyro transport, and Kombu Broker daemon.
Requires the :mod:`Pyro4` library to be installed.
Features
========
* Type: Virtual
* Supports Direct: Yes
* Supports Topic: Yes
* Supports Fanout: No
* Supports Priority: No
* Supports TTL: No
Connection String
=================
To use the Pyro transport with Kombu, use an url of the form:
.. code-block::
pyro://localhost/kombu.broker
The hostname is where the transport will be looking for a Pyro name server,
which is used in turn to locate the kombu.broker Pyro service.
This broker can be launched by simply executing this transport module directly,
with the command: ``python -m kombu.transport.pyro``
Transport Options
=================
"""
from __future__ import annotations
import sys
from queue import Empty, Queue
from kombu.exceptions import reraise
from kombu.log import get_logger
from kombu.utils.objects import cached_property
from . import virtual
try:
import Pyro4 as pyro
from Pyro4.errors import NamingError
from Pyro4.util import SerializerBase
except ImportError: # pragma: no cover
pyro = NamingError = SerializerBase = None
DEFAULT_PORT = 9090
E_NAMESERVER = """\
Unable to locate pyro nameserver on host {0.hostname}\
"""
E_LOOKUP = """\
Unable to lookup '{0.virtual_host}' in pyro nameserver on host {0.hostname}\
"""
logger = get_logger(__name__)
class Channel(virtual.Channel):
"""Pyro Channel."""
def close(self):
super().close()
if self.shared_queues:
self.shared_queues._pyroRelease()
def queues(self):
return self.shared_queues.get_queue_names()
def _new_queue(self, queue, **kwargs):
if queue not in self.queues():
self.shared_queues.new_queue(queue)
def _has_queue(self, queue, **kwargs):
return self.shared_queues.has_queue(queue)
def _get(self, queue, timeout=None):
queue = self._queue_for(queue)
return self.shared_queues.get(queue)
def _queue_for(self, queue):
if queue not in self.queues():
self.shared_queues.new_queue(queue)
return queue
def METHOD_NAME(self, queue, message, **kwargs):
queue = self._queue_for(queue)
self.shared_queues.put(queue, message)
def _size(self, queue):
return self.shared_queues.size(queue)
def _delete(self, queue, *args, **kwargs):
self.shared_queues.delete(queue)
def _purge(self, queue):
return self.shared_queues.purge(queue)
def after_reply_message_received(self, queue):
pass
@cached_property
def shared_queues(self):
return self.connection.shared_queues
class Transport(virtual.Transport):
"""Pyro Transport."""
Channel = Channel
#: memory backend state is global.
# TODO: To be checked whether state can be per-Transport
global_state = virtual.BrokerState()
default_port = DEFAULT_PORT
driver_type = driver_name = 'pyro'
def __init__(self, client, **kwargs):
super().__init__(client, **kwargs)
self.state = self.global_state
def _open(self):
logger.debug("trying Pyro nameserver to find the broker daemon")
conninfo = self.client
try:
nameserver = pyro.locateNS(host=conninfo.hostname,
port=self.default_port)
except NamingError:
reraise(NamingError, NamingError(E_NAMESERVER.format(conninfo)),
sys.exc_info()[2])
try:
# name of registered pyro object
uri = nameserver.lookup(conninfo.virtual_host)
return pyro.Proxy(uri)
except NamingError:
reraise(NamingError, NamingError(E_LOOKUP.format(conninfo)),
sys.exc_info()[2])
def driver_version(self):
return pyro.__version__
@cached_property
def shared_queues(self):
return self._open()
if pyro is not None:
SerializerBase.register_dict_to_class("queue.Empty",
lambda cls, data: Empty())
@pyro.expose
@pyro.behavior(instance_mode="single")
class KombuBroker:
"""Kombu Broker used by the Pyro transport.
You have to run this as a separate (Pyro) service.
"""
def __init__(self):
self.queues = {}
def get_queue_names(self):
return list(self.queues)
def new_queue(self, queue):
if queue in self.queues:
return # silently ignore the fact that queue already exists
self.queues[queue] = Queue()
def has_queue(self, queue):
return queue in self.queues
def get(self, queue):
return self.queues[queue].get(block=False)
def put(self, queue, message):
self.queues[queue].put(message)
def size(self, queue):
return self.queues[queue].qsize()
def delete(self, queue):
del self.queues[queue]
def purge(self, queue):
while True:
try:
self.queues[queue].get(blocking=False)
except Empty:
break
# launch a Kombu Broker daemon with the command:
# ``python -m kombu.transport.pyro``
if __name__ == "__main__":
print("Launching Broker for Kombu's Pyro transport.")
with pyro.Daemon() as daemon:
print("(Expecting a Pyro name server at {}:{})"
.format(pyro.config.NS_HOST, pyro.config.NS_PORT))
with pyro.locateNS() as ns:
print("You can connect with Kombu using the url "
"'pyro://{}/kombu.broker'".format(pyro.config.NS_HOST))
uri = daemon.register(KombuBroker)
ns.register("kombu.broker", uri)
daemon.requestLoop()
|
4,350 |
test operator
|
import kdb, unittest
class Key(unittest.TestCase):
def setUp(self):
self.key = kdb.Key("user:/foo/bar",
kdb.KEY_VALUE, "value",
kdb.KEY_META, "by", "manuel",
kdb.KEY_META, "owner", "myowner"
)
self.bkey = kdb.Key("system:/bkey",
kdb.KEY_VALUE, b"bvalue\0\0",
kdb.KEY_END,
kdb.KEY_META, "lost", "lost"
)
def test_ctor(self):
self.assertIsInstance(self.key, kdb.Key)
self.assertIsInstance(self.bkey, kdb.Key)
k = kdb.Key("/cascading/key")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key("spec:/key")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key("proc:/key")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key("dir:/key")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key("user:/key")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key("system:/key")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key()
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
with self.assertRaises(kdb.KeyInvalidName):
k = kdb.Key("wrongname")
k = kdb.Key("user:/foo")
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key(self.key)
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
k = kdb.Key(self.key.dup())
self.assertIsInstance(k, kdb.Key)
self.assertTrue(k.isValid())
self.assertEqual(k, self.key)
k.name = "user:/copied"
self.assertNotEqual(k, self.key)
def METHOD_NAME(self):
self.assertNotEqual(self.key, self.bkey)
self.assertEqual(kdb.Key(self.key), self.key)
self.assertEqual(self.key, kdb.Key("user:/foo/bar", kdb.KEY_META, "owner", "myowner"))
self.assertEqual(kdb.Key(), kdb.Key())
self.assertNotEqual(kdb.Key("user:/key1"), kdb.Key("user:/key2"))
self.assertTrue(kdb.Key("user:/key1") == kdb.Key("user:/key1"))
self.assertTrue(kdb.Key("user:/key1") != kdb.Key("user:/key2"))
self.assertTrue(kdb.Key("user:/key1") < kdb.Key("user:/key2"))
self.assertTrue(kdb.Key("user:/key1") <= kdb.Key("user:/key2"))
self.assertTrue(kdb.Key("user:/key2") > kdb.Key("user:/key1"))
self.assertTrue(kdb.Key("user:/key2") >= kdb.Key("user:/key1"))
self.assertTrue(bool(self.key))
self.assertTrue(bool(self.bkey))
self.assertEqual(str(self.key), "user:/foo/bar")
self.assertEqual(str(self.bkey), "system:/bkey")
self.assertEqual(len(self.key), 3)
self.assertEqual(len(self.bkey), 2)
self.assertEqual(repr(self.key), "kdb.Key('user:/foo/bar')")
self.assertEqual(repr(self.bkey), "kdb.Key('system:/bkey')")
with self.assertRaises(TypeError):
hash(kdb.Key("user:/not_name_locked"))
def test_properties(self):
self.assertEqual(self.key.name, "user:/foo/bar")
self.assertEqual(self.key.value, "value")
self.assertEqual(self.key.basename, "bar")
self.assertEqual(self.bkey.name, "system:/bkey")
self.assertEqual(self.bkey.value, b"bvalue\0\0")
self.assertEqual(self.bkey.basename, "bkey")
k = kdb.Key("user:/key1", kdb.KEY_VALUE, "value")
self.assertFalse(k.isBinary())
self.assertIsNone(k.getMeta("binary"))
k.name = "system:/key2"
k.basename = "key3"
k.value = b"bvalue\0\0"
self.assertEqual(k.name, "system:/key3")
self.assertEqual(k.value, b"bvalue\0\0")
self.assertTrue(k.isBinary())
self.assertIsInstance(self.bkey.getMeta("binary"), kdb.Key)
self.assertEqual(kdb.Key("user:/key1", "value").value, "value")
self.assertEqual(kdb.Key("user:/key1", b"bvalue\0\0").value, b"bvalue\0\0")
k = kdb.Key("user:/key2")
with self.assertRaises(kdb.KeyInvalidName):
k.name = "foo"
def test_functions(self):
self.assertTrue(self.key.isUser())
self.assertTrue(self.bkey.isSystem())
self.assertTrue(self.key.isString())
self.assertTrue(self.bkey.isBinary())
self.assertTrue(self.key.isBelow(kdb.Key("user:/foo")))
self.assertFalse(self.key.isNameLocked())
self.assertFalse(self.key.isValueLocked())
self.assertFalse(self.key.isMetaLocked())
k = kdb.Key("user:/key1", kdb.KEY_VALUE, "value")
self.assertEqual(k.get(), "value")
k.set(b"bvalue\0\0")
self.assertEqual(k.get(), b"bvalue\0\0")
def test_meta(self):
self.assertIsInstance(self.key.getMeta("owner"), kdb.Key)
self.assertEqual(self.key.getMeta("owner").name, "meta:/owner")
self.assertEqual(self.key.getMeta("owner").value, "myowner")
self.assertEqual(self.key.getMeta("by").value, "manuel")
self.assertTrue(self.key.getMeta("by").isNameLocked())
self.assertTrue(self.key.getMeta("by").isValueLocked())
self.assertTrue(self.key.getMeta("by").isMetaLocked())
self.assertFalse(self.key.hasMeta("doesnt_exist"))
self.assertIsNone(self.key.getMeta("doesnt_exist"))
self.assertTrue(bool(self.bkey.getMeta("binary")))
self.assertIsNone(self.bkey.getMeta("owner"))
k = kdb.Key("user:/key1")
k.setMeta("foo", "bar")
self.assertEqual(k.getMeta("foo").value, "bar")
k = kdb.Key("user:/key1", { "foo2": "bar2", "foo3": "bar3" })
self.assertEqual(k.getMeta("foo2").value, "bar2")
self.assertEqual(k.getMeta("foo3").value, "bar3")
self.assertEqual(sum(1 for _ in self.key.getMeta()), 2)
self.assertEqual(sum(1 for _ in self.bkey.getMeta()), 1)
def test_python_copy(self):
import copy
k = copy.copy(self.key)
self.assertEqual(k, self.key)
k.name = "user:/copied"
self.assertNotEqual(k, self.key)
def test_iterator(self):
k = kdb.Key("user:/a\/b/c")
self.assertEqual(sum(1 for _ in k), 3)
self.assertEqual(sum(1 for _ in reversed(k)), 3)
self.assertEqual(iter(k).value(), "".join([chr(kdb.KEY_NS_USER)]))
self.assertEqual(reversed(k).value(), "c")
def test_helpers(self):
with self.assertRaises(ValueError):
kdb.Key("user:/noarray").array_elements()
parts = kdb.Key("user:/some/array/#_12").array_elements()
self.assertEqual(parts.index, 12)
self.assertEqual(parts.name, "user:/some/array")
self.assertEqual(parts.basename, "array")
if __name__ == '__main__':
unittest.main()
|
4,351 |
pad list
|
# Copyright (c) 2020 Mobvoi Inc (Binbin Zhang)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Modified from ESPnet(https://github.com/espnet/espnet)
"""Unility functions for Transformer."""
import math
from typing import List, Tuple
import torch
from torch.nn.utils.rnn import pad_sequence
IGNORE_ID = -1
def METHOD_NAME(xs: List[torch.Tensor], pad_value: int):
"""Perform padding for the list of tensors.
Args:
xs (List): List of Tensors [(T_1, `*`), (T_2, `*`), ..., (T_B, `*`)].
pad_value (float): Value for padding.
Returns:
Tensor: Padded tensor (B, Tmax, `*`).
Examples:
>>> x = [torch.ones(4), torch.ones(2), torch.ones(1)]
>>> x
[tensor([1., 1., 1., 1.]), tensor([1., 1.]), tensor([1.])]
>>> pad_list(x, 0)
tensor([[1., 1., 1., 1.],
[1., 1., 0., 0.],
[1., 0., 0., 0.]])
"""
n_batch = len(xs)
max_len = max([x.size(0) for x in xs])
pad = torch.zeros(n_batch, max_len, dtype=xs[0].dtype, device=xs[0].device)
pad = pad.fill_(pad_value)
for i in range(n_batch):
pad[i, :xs[i].size(0)] = xs[i]
return pad
def add_blank(ys_pad: torch.Tensor, blank: int,
ignore_id: int) -> torch.Tensor:
""" Prepad blank for transducer predictor
Args:
ys_pad (torch.Tensor): batch of padded target sequences (B, Lmax)
blank (int): index of <blank>
Returns:
ys_in (torch.Tensor) : (B, Lmax + 1)
Examples:
>>> blank = 0
>>> ignore_id = -1
>>> ys_pad
tensor([[ 1, 2, 3, 4, 5],
[ 4, 5, 6, -1, -1],
[ 7, 8, 9, -1, -1]], dtype=torch.int32)
>>> ys_in = add_blank(ys_pad, 0, -1)
>>> ys_in
tensor([[0, 1, 2, 3, 4, 5],
[0, 4, 5, 6, 0, 0],
[0, 7, 8, 9, 0, 0]])
"""
bs = ys_pad.size(0)
_blank = torch.tensor([blank],
dtype=torch.long,
requires_grad=False,
device=ys_pad.device)
_blank = _blank.repeat(bs).unsqueeze(1) # [bs,1]
out = torch.cat([_blank, ys_pad], dim=1) # [bs, Lmax+1]
return torch.where(out == ignore_id, blank, out)
def add_sos_eos(ys_pad: torch.Tensor, sos: int, eos: int,
ignore_id: int) -> Tuple[torch.Tensor, torch.Tensor]:
"""Add <sos> and <eos> labels.
Args:
ys_pad (torch.Tensor): batch of padded target sequences (B, Lmax)
sos (int): index of <sos>
eos (int): index of <eeos>
ignore_id (int): index of padding
Returns:
ys_in (torch.Tensor) : (B, Lmax + 1)
ys_out (torch.Tensor) : (B, Lmax + 1)
Examples:
>>> sos_id = 10
>>> eos_id = 11
>>> ignore_id = -1
>>> ys_pad
tensor([[ 1, 2, 3, 4, 5],
[ 4, 5, 6, -1, -1],
[ 7, 8, 9, -1, -1]], dtype=torch.int32)
>>> ys_in,ys_out=add_sos_eos(ys_pad, sos_id , eos_id, ignore_id)
>>> ys_in
tensor([[10, 1, 2, 3, 4, 5],
[10, 4, 5, 6, 11, 11],
[10, 7, 8, 9, 11, 11]])
>>> ys_out
tensor([[ 1, 2, 3, 4, 5, 11],
[ 4, 5, 6, 11, -1, -1],
[ 7, 8, 9, 11, -1, -1]])
"""
_sos = torch.tensor([sos],
dtype=torch.long,
requires_grad=False,
device=ys_pad.device)
_eos = torch.tensor([eos],
dtype=torch.long,
requires_grad=False,
device=ys_pad.device)
ys = [y[y != ignore_id] for y in ys_pad] # parse padded ys
ys_in = [torch.cat([_sos, y], dim=0) for y in ys]
ys_out = [torch.cat([y, _eos], dim=0) for y in ys]
return METHOD_NAME(ys_in, eos), METHOD_NAME(ys_out, ignore_id)
def reverse_pad_list(ys_pad: torch.Tensor,
ys_lens: torch.Tensor,
pad_value: float = -1.0) -> torch.Tensor:
"""Reverse padding for the list of tensors.
Args:
ys_pad (tensor): The padded tensor (B, Tokenmax).
ys_lens (tensor): The lens of token seqs (B)
pad_value (int): Value for padding.
Returns:
Tensor: Padded tensor (B, Tokenmax).
Examples:
>>> x
tensor([[1, 2, 3, 4], [5, 6, 7, 0], [8, 9, 0, 0]])
>>> pad_list(x, 0)
tensor([[4, 3, 2, 1],
[7, 6, 5, 0],
[9, 8, 0, 0]])
"""
r_ys_pad = pad_sequence([(torch.flip(y.int()[:i], [0]))
for y, i in zip(ys_pad, ys_lens)], True,
pad_value)
return r_ys_pad
def th_accuracy(pad_outputs: torch.Tensor, pad_targets: torch.Tensor,
ignore_label: int) -> float:
"""Calculate accuracy.
Args:
pad_outputs (Tensor): Prediction tensors (B * Lmax, D).
pad_targets (LongTensor): Target label tensors (B, Lmax).
ignore_label (int): Ignore label id.
Returns:
float: Accuracy value (0.0 - 1.0).
"""
pad_pred = pad_outputs.view(pad_targets.size(0), pad_targets.size(1),
pad_outputs.size(1)).argmax(2)
mask = pad_targets != ignore_label
numerator = torch.sum(
pad_pred.masked_select(mask) == pad_targets.masked_select(mask))
denominator = torch.sum(mask)
return float(numerator) / float(denominator)
def get_rnn(rnn_type: str) -> torch.nn.Module:
assert rnn_type in ["rnn", "lstm", "gru"]
if rnn_type == "rnn":
return torch.nn.RNN
elif rnn_type == "lstm":
return torch.nn.LSTM
else:
return torch.nn.GRU
def get_activation(act):
"""Return activation function."""
# Lazy load to avoid unused import
from wenet.transformer.swish import Swish
activation_funcs = {
"hardtanh": torch.nn.Hardtanh,
"tanh": torch.nn.Tanh,
"relu": torch.nn.ReLU,
"selu": torch.nn.SELU,
"swish": getattr(torch.nn, "SiLU", Swish),
"gelu": torch.nn.GELU
}
return activation_funcs[act]()
def get_subsample(config):
input_layer = config["encoder_conf"]["input_layer"]
assert input_layer in ["conv2d", "conv2d6", "conv2d8"]
if input_layer == "conv2d":
return 4
elif input_layer == "conv2d6":
return 6
elif input_layer == "conv2d8":
return 8
def remove_duplicates_and_blank(hyp: List[int]) -> List[int]:
new_hyp: List[int] = []
cur = 0
while cur < len(hyp):
if hyp[cur] != 0:
new_hyp.append(hyp[cur])
prev = cur
while cur < len(hyp) and hyp[cur] == hyp[prev]:
cur += 1
return new_hyp
def replace_duplicates_with_blank(hyp: List[int]) -> List[int]:
new_hyp: List[int] = []
cur = 0
while cur < len(hyp):
new_hyp.append(hyp[cur])
prev = cur
cur += 1
while cur < len(hyp) and hyp[cur] == hyp[prev] and hyp[cur] != 0:
new_hyp.append(0)
cur += 1
return new_hyp
def log_add(args: List[int]) -> float:
"""
Stable log add
"""
if all(a == -float('inf') for a in args):
return -float('inf')
a_max = max(args)
lsp = math.log(sum(math.exp(a - a_max) for a in args))
return a_max + lsp
|
4,352 |
schemas
|
from sqlalchemy.testing.requirements import Requirements
from alembic import util
from alembic.util import sqla_compat
from ..testing import exclusions
class SuiteRequirements(Requirements):
@property
def METHOD_NAME(self):
"""Target database must support external schemas, and have one
named 'test_schema'."""
return exclusions.open()
@property
def autocommit_isolation(self):
"""target database should support 'AUTOCOMMIT' isolation level"""
return exclusions.closed()
@property
def materialized_views(self):
"""needed for sqlalchemy compat"""
return exclusions.closed()
@property
def unique_constraint_reflection(self):
def doesnt_have_check_uq_constraints(config):
from sqlalchemy import inspect
insp = inspect(config.db)
try:
insp.get_unique_constraints("x")
except NotImplementedError:
return True
except TypeError:
return True
except Exception:
pass
return False
return exclusions.skip_if(doesnt_have_check_uq_constraints)
@property
def sequences(self):
"""Target database must support SEQUENCEs."""
return exclusions.only_if(
[lambda config: config.db.dialect.supports_sequences],
"no sequence support",
)
@property
def foreign_key_match(self):
return exclusions.open()
@property
def foreign_key_constraint_reflection(self):
return exclusions.open()
@property
def check_constraints_w_enforcement(self):
"""Target database must support check constraints
and also enforce them."""
return exclusions.open()
@property
def reflects_pk_names(self):
return exclusions.closed()
@property
def reflects_fk_options(self):
return exclusions.closed()
@property
def sqlalchemy_14(self):
return exclusions.skip_if(
lambda config: not util.sqla_14,
"SQLAlchemy 1.4 or greater required",
)
@property
def sqlalchemy_1x(self):
return exclusions.skip_if(
lambda config: util.sqla_2,
"SQLAlchemy 1.x test",
)
@property
def sqlalchemy_2(self):
return exclusions.skip_if(
lambda config: not util.sqla_2,
"SQLAlchemy 2.x test",
)
@property
def comments(self):
return exclusions.only_if(
lambda config: config.db.dialect.supports_comments
)
@property
def alter_column(self):
return exclusions.open()
@property
def computed_columns(self):
return exclusions.closed()
@property
def computed_columns_api(self):
return exclusions.only_if(
exclusions.BooleanPredicate(sqla_compat.has_computed)
)
@property
def computed_reflects_normally(self):
return exclusions.only_if(
exclusions.BooleanPredicate(sqla_compat.has_computed_reflection)
)
@property
def computed_reflects_as_server_default(self):
return exclusions.closed()
@property
def computed_doesnt_reflect_as_server_default(self):
return exclusions.closed()
@property
def autoincrement_on_composite_pk(self):
return exclusions.closed()
@property
def fk_ondelete_is_reflected(self):
return exclusions.closed()
@property
def fk_onupdate_is_reflected(self):
return exclusions.closed()
@property
def fk_onupdate(self):
return exclusions.open()
@property
def fk_ondelete_restrict(self):
return exclusions.open()
@property
def fk_onupdate_restrict(self):
return exclusions.open()
@property
def fk_ondelete_noaction(self):
return exclusions.open()
@property
def fk_initially(self):
return exclusions.closed()
@property
def fk_deferrable(self):
return exclusions.closed()
@property
def fk_deferrable_is_reflected(self):
return exclusions.closed()
@property
def fk_names(self):
return exclusions.open()
@property
def integer_subtype_comparisons(self):
return exclusions.open()
@property
def no_name_normalize(self):
return exclusions.skip_if(
lambda config: config.db.dialect.requires_name_normalize
)
@property
def identity_columns(self):
return exclusions.closed()
@property
def identity_columns_alter(self):
return exclusions.closed()
@property
def identity_columns_api(self):
return exclusions.only_if(
exclusions.BooleanPredicate(sqla_compat.has_identity)
)
@property
def supports_identity_on_null(self):
return exclusions.closed()
|
4,353 |
get file diff
|
from future import standard_library
standard_library.install_aliases()
import base64
import copy
from io import BytesIO
import os
import re
import tempfile
from multiprocessing.dummy import Pool
import requests
from django.conf import settings
from django.core.files.storage import default_storage
from le_utils.constants import file_formats
from PIL import Image
from PIL import ImageFile
from contentcuration.api import write_raw_content_to_storage
from contentcuration.models import File
from contentcuration.models import generate_object_storage_name
ImageFile.LOAD_TRUNCATED_IMAGES = True
THUMBNAIL_WIDTH = 400
def create_file_from_contents(contents, ext=None, node=None, preset_id=None, uploaded_by=None):
checksum, _, path = write_raw_content_to_storage(contents, ext=ext)
result = File(
file_format_id=ext,
file_size=default_storage.size(path),
checksum=checksum,
preset_id=preset_id,
contentnode=node,
uploaded_by=uploaded_by
)
result.file_on_disk.name = path
result.save()
return result
def METHOD_NAME(files):
"""Given a list of filenames as strings, find the filenames that aren't in our
storage, and return.
"""
# We use a thread pool in here, making direct HEAD requests to the storage URL
# to see if the objects exist.
# The threaded method is found to be the fastest -- see
# https://gist.github.com/aronasorman/57b8c01e5ed2b7cbf876e7734b7b9f38
# for benchmarking details.
ret = []
session = requests.Session()
def check_file_url(f):
filepath = generate_object_storage_name(os.path.splitext(f)[0], f)
url = "/".join([settings.AWS_S3_ENDPOINT_URL, settings.AWS_S3_BUCKET_NAME, filepath])
resp = session.head(url)
if resp.status_code != 200:
ret.append(f)
# use a pool of 3 threads to make our queries
pool = Pool(3)
pool.map(check_file_url, files)
return ret
def duplicate_file(file_object, node=None, assessment_item=None, preset_id=None, save=True):
if not file_object:
return None
file_copy = copy.copy(file_object)
file_copy.id = None
file_copy.contentnode = node
file_copy.assessment_item = assessment_item
file_copy.preset_id = preset_id or file_object.preset_id
if save:
file_copy.save()
return file_copy
def get_thumbnail_encoding(filename, dimension=THUMBNAIL_WIDTH):
"""
Generates a base64 encoding for a thumbnail
Args:
filename (str): thumbnail to generate encoding from (must be in storage already)
dimension (int, optional): desired width of thumbnail. Defaults to 400.
Returns base64 encoding of resized thumbnail
"""
if filename.startswith("data:image"):
return filename
checksum, ext = os.path.splitext(filename.split("?")[0])
outbuffer = BytesIO()
# make sure the aspect ratio between width and height is 16:9
thumbnail_size = [dimension, round(dimension / 1.77)]
try:
if not filename.startswith(settings.STATIC_ROOT):
filename = generate_object_storage_name(checksum, filename)
inbuffer = default_storage.open(filename, 'rb')
else:
inbuffer = open(filename, 'rb')
if not inbuffer:
raise AssertionError
with Image.open(inbuffer) as image:
image_format = image.format
# Note: Image.thumbnail ensures that the image will fit in the
# specified thumbnail size, but it retains the original image's
# aspect ratio. So a square image will remain square rather
# than being distorted to a 16:9 aspect ratio. This removes
# the need to make any changes like cropping the image.
image.thumbnail(thumbnail_size, Image.ANTIALIAS)
image.save(outbuffer, image_format)
return "data:image/{};base64,{}".format(ext[1:], base64.b64encode(outbuffer.getvalue()).decode('utf-8'))
finally:
# Try to close the inbuffer if it has been created
try:
inbuffer.close()
except UnboundLocalError:
pass
outbuffer.close()
BASE64_REGEX_STR = r'data:image\/([A-Za-z]*);base64,((?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)*)'
BASE64_REGEX = re.compile(BASE64_REGEX_STR, flags=re.IGNORECASE)
def get_base64_encoding(text):
""" get_base64_encoding: Get the first base64 match or None
Args:
text (str): text to check for base64 encoding
Returns: First match in text
"""
return BASE64_REGEX.search(text)
def write_base64_to_file(encoding, fpath_out):
""" write_base64_to_file: Convert base64 image to file
Args:
encoding (str): base64 encoded string
fpath_out (str): path to file to write
Returns: None
"""
encoding_match = get_base64_encoding(encoding)
if not encoding_match:
raise AssertionError("Error writing to file: Invalid base64 encoding")
with open(fpath_out, "wb") as target_file:
target_file.write(base64.decodebytes(encoding_match.group(2).encode('utf-8')))
def create_thumbnail_from_base64(encoding, file_format_id=file_formats.PNG, preset_id=None, uploaded_by=None):
"""
Takes encoding and makes it into a file object
Args:
encoding (str): base64 to make into an image file
file_format_id (str): what the extension should be
preset_id (str): what the preset should be
uploaded_by (<User>): who uploaded the image
Returns <File> object with the file_on_disk being the image file generated from the encoding
"""
fd, path = tempfile.mkstemp()
try:
write_base64_to_file(encoding, path)
with open(path, 'rb') as tf:
return create_file_from_contents(tf.read(), ext=file_format_id, preset_id=preset_id, uploaded_by=uploaded_by)
finally:
os.close(fd)
|
4,354 |
test middleware allowed hosts
|
from typing import TYPE_CHECKING, Any, cast
import pytest
from litestar import get
from litestar.config.allowed_hosts import AllowedHostsConfig
from litestar.exceptions import ImproperlyConfiguredException
from litestar.middleware import MiddlewareProtocol
from litestar.middleware.allowed_hosts import AllowedHostsMiddleware
from litestar.status_codes import HTTP_200_OK, HTTP_400_BAD_REQUEST
from litestar.testing import create_test_client
if TYPE_CHECKING:
from litestar.types import Receive, Scope, Send
class DummyApp(MiddlewareProtocol): # pyright: ignore
async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None:
return
def test_allowed_hosts_middleware() -> None:
@get(path="/")
def handler() -> None:
...
client = create_test_client(route_handlers=[handler], allowed_hosts=["*.example.com", "moishe.zuchmir.com"])
unpacked_middleware = []
cur = client.app.asgi_router.root_route_map_node.children["/"].asgi_handlers["GET"][0]
while hasattr(cur, "app"):
unpacked_middleware.append(cur)
cur = cast("Any", cur.app)
unpacked_middleware.append(cur)
assert len(unpacked_middleware) == 4
allowed_hosts_middleware = cast("Any", unpacked_middleware[1])
assert isinstance(allowed_hosts_middleware, AllowedHostsMiddleware)
assert allowed_hosts_middleware.allowed_hosts_regex.pattern == ".*\\.example.com$|moishe.zuchmir.com" # type: ignore
def test_allowed_hosts_middleware_hosts_regex() -> None:
config = AllowedHostsConfig(allowed_hosts=["*.example.com", "moishe.zuchmir.com"])
middleware = AllowedHostsMiddleware(app=DummyApp(), config=config) # type: ignore
assert middleware.allowed_hosts_regex is not None
assert middleware.allowed_hosts_regex.pattern == ".*\\.example.com$|moishe.zuchmir.com"
assert middleware.allowed_hosts_regex.fullmatch("www.example.com")
assert middleware.allowed_hosts_regex.fullmatch("other.example.com")
assert middleware.allowed_hosts_regex.fullmatch("x.y.z.example.com")
assert middleware.allowed_hosts_regex.fullmatch("moishe.zuchmir.com")
assert not middleware.allowed_hosts_regex.fullmatch("www.example.x.com")
assert not middleware.allowed_hosts_regex.fullmatch("josh.zuchmir.com")
assert not middleware.allowed_hosts_regex.fullmatch("x.moishe.zuchmir.com")
assert not middleware.allowed_hosts_regex.fullmatch("moishe.zuchmir.x.com")
def test_allowed_hosts_middleware_redirect_regex() -> None:
config = AllowedHostsConfig(
allowed_hosts=["*.example.com", "www.moishe.zuchmir.com", "www.yada.bada.bing.io", "example.com"]
)
middleware = AllowedHostsMiddleware(app=DummyApp(), config=config) # type: ignore
assert middleware.redirect_domains is not None
assert middleware.redirect_domains.pattern == "moishe.zuchmir.com|yada.bada.bing.io"
assert middleware.redirect_domains.fullmatch("moishe.zuchmir.com")
assert middleware.redirect_domains.fullmatch("yada.bada.bing.io")
def METHOD_NAME() -> None:
@get("/")
def handler() -> dict:
return {"hello": "world"}
config = AllowedHostsConfig(allowed_hosts=["*.example.com", "moishe.zuchmir.com"])
with create_test_client(handler, allowed_hosts=config) as client:
client.base_url = "http://x.example.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_200_OK
client.base_url = "http://x.y.example.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_200_OK
client.base_url = "http://moishe.zuchmir.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_200_OK
client.base_url = "http://x.moishe.zuchmir.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_400_BAD_REQUEST
client.base_url = "http://x.example.x.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_400_BAD_REQUEST
def test_middleware_allow_all() -> None:
@get("/")
def handler() -> dict:
return {"hello": "world"}
# contrived case - but if "*" is in hosts, we allow all.
config = AllowedHostsConfig(allowed_hosts=["*", "*.example.com", "moishe.zuchmir.com"])
with create_test_client(handler, allowed_hosts=config) as client:
client.base_url = "http://any.domain.allowed.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_200_OK
def test_middleware_redirect_on_www_by_default() -> None:
@get("/")
def handler() -> dict:
return {"hello": "world"}
config = AllowedHostsConfig(allowed_hosts=["www.moishe.zuchmir.com"])
with create_test_client(handler, allowed_hosts=config) as client:
client.base_url = "http://moishe.zuchmir.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_200_OK
assert str(response.url) == "http://www.moishe.zuchmir.com/"
def test_middleware_does_not_redirect_when_off() -> None:
@get("/")
def handler() -> dict:
return {"hello": "world"}
config = AllowedHostsConfig(allowed_hosts=["www.moishe.zuchmir.com"], www_redirect=False)
with create_test_client(handler, allowed_hosts=config) as client:
client.base_url = "http://moishe.zuchmir.com" # type: ignore
response = client.get("/")
assert response.status_code == HTTP_400_BAD_REQUEST
def test_validation_raises_for_wrong_wildcard_domain() -> None:
with pytest.raises(ImproperlyConfiguredException):
AllowedHostsConfig(allowed_hosts=["www.moishe.*.com"])
|
4,355 |
projection path
|
from abc import ABC, abstractmethod
from datetime import datetime
class QAMetric(ABC):
def __init__(self):
# ...
pass
@property
@abstractmethod
def METHOD_NAME(self):
pass
@property
@abstractmethod
def actual_path(self):
pass
@property
@abstractmethod
def name(self):
pass
@property
@abstractmethod
def threshold(self):
""" Can be either a percent or a raw value cutoff. If it's a raw cut off we
need to overwrite the method isAboveThreshold to handle correctly
"""
pass
@classmethod
def diff(cls, value1, value2):
return round(abs(value2 - value1), 4)
@classmethod
def threshold_diff_value(cls, value1, value2):
average = (value2 + value1) / 2
if average < 20:
# small numbers changes are kind of confusing handle them a lil different
return False
return (abs(average - value1) / value1) * 100
@classmethod
def isAboveThreshold(cls, value1, value2):
return cls.threshold_diff_value(value1, value2) > cls.threshold
class HospitalBedsRequiredTS(QAMetric):
name = "HospitalBedsRequiredTS"
METHOD_NAME = ["timeseries", "hospitalBedsRequired"]
actual_path = None
threshold = 10
class HospitalBedCapacityTS(QAMetric):
name = "HospitalBedCapacityTS"
METHOD_NAME = ["timeseries", "hospitalBedCapacity"]
actual_path = None
threshold = 10
class ICUBedCovidUsageTS(QAMetric):
name = "ICUBedCovidUsageTS"
METHOD_NAME = ["timeseries", "ICUBedsInUse"]
actual_path = ["actualsTimeseries", "ICUBeds", "currentUsageCovid"]
threshold = 10
class ICUBedTotalUsageTS(QAMetric):
name = "ICUBedTotalUsageTS"
METHOD_NAME = None
actual_path = ["actualsTimeseries", "ICUBeds", "currentUsageTotal"]
threshold = 10
class ICUBedTotalCapacityTS(QAMetric):
name = "ICUBedTotalCapacityTS"
METHOD_NAME = ["timeseries", "ICUBedCapacity"]
actual_path = ["actualsTimeseries", "ICUBeds", "totalCapacity"]
threshold = 10
class RtIndicatorTS(QAMetric):
name = "RtIndicatorTS"
METHOD_NAME = ["timeseries", "RtIndicator"]
actual_path = None
threshold = 0.1
@classmethod
def threshold_diff_value(cls, value1, value2):
return cls.diff(value2, value1)
class RtIndicatorCI90TS(QAMetric):
name = "RtIndicatorCI90TS"
METHOD_NAME = ["timeseries", "RtIndicatorCI90"]
actual_path = None
threshold = 0.1
@classmethod
def threshold_diff_value(cls, value1, value2):
return cls.diff(value2, value1)
class CumulativePositiveTestsTS(QAMetric):
name = "CumulativePositiveTestsTS"
METHOD_NAME = ["timeseries", "cumulativePositiveTests"]
actual_path = ["actualsTimeseries", "cumulativePositiveTests"]
threshold = 10
class CumulativeNegativeTestsTS(QAMetric):
name = "CumulativeNegativeTestsTS"
METHOD_NAME = ["timeseries", "cumulativeNegativeTests"]
actual_path = ["actualsTimeseries", "cumulativeNegativeTests"]
threshold = 10
class CumulativeInfectedTS(QAMetric):
name = "CumulativeInfectedTS"
METHOD_NAME = ["timeseries", "cumulativeInfected"]
actual_path = None
threshold = 10
class CumulativeDeathsTS(QAMetric):
name = "CumulativeDeathsTS"
METHOD_NAME = ["timeseries", "cumulativeDeaths"]
actual_path = None
threshold = 10
class CurrentTestPositiveRate(QAMetric):
name = "CurrentTestPositiveRate"
METHOD_NAME = None
actual_path = ["actuals", "cumulativePositiveTests"]
threshold = 10
class Population(QAMetric):
name = "Population"
METHOD_NAME = None
actual_path = ["actuals", "population"]
threshold = 0
class CurrentRT(QAMetric):
name = "rt"
METHOD_NAME = ["projections", "Rt"]
actual_path = None
threshold = 0.1
class HospitalShortageStartDate(QAMetric):
name = "HospitalShortageStartDate"
METHOD_NAME = ["projections", "totalHospitalBeds", "shortageStartDate"]
actual_path = None
threshold = 1
@classmethod
def threshold_diff_value(cls, value1, value2):
return cls.diff(value2, value1)
@classmethod
def diff(cls, value1, value2):
value1_date = datetime.strptime(value1, "%Y-%m-%d")
value2_date = datetime.strptime(value2, "%Y-%m-%d")
delta = value1_date - value1_date
return abs(delta.days)
CURRENT_METRICS = [
CurrentTestPositiveRate,
Population,
CurrentRT,
HospitalShortageStartDate,
]
TIMESERIES_METRICS = [
HospitalBedsRequiredTS,
HospitalBedCapacityTS,
ICUBedCovidUsageTS,
ICUBedTotalUsageTS,
ICUBedTotalCapacityTS,
RtIndicatorTS,
RtIndicatorCI90TS,
CumulativePositiveTestsTS,
CumulativeNegativeTestsTS,
CumulativeInfectedTS,
]
|
4,356 |
test all
|
# The Hazard Library
# Copyright (C) 2012-2023 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module
:mod:`openquake.hazardlib.gsim.raghukanth_iyengar_2007_test`
defines
:class:`RaghukanthIyengar2007TestCase`
:class:`RaghukanthIyengar2007KoynaWarnaTestCase`
:class:`RaghukanthIyengar2007SouthernTestCase`
:class:`RaghukanthIyengar2007WesternCentralTestCase`
for testing of
:class:`openquake.hazardlib.gsim.raghukanth_iyengar_2007.RaghukanthIyengar2007`
and subclasses of same.
"""
import warnings
import numpy as np
from openquake.hazardlib import gsim
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
from openquake.hazardlib.gsim.raghukanth_iyengar_2007 import (
RaghukanthIyengar2007,
RaghukanthIyengar2007KoynaWarna,
RaghukanthIyengar2007Southern,
RaghukanthIyengar2007WesternCentral)
class RaghukanthIyengar2007TestCase(BaseGSIMTestCase):
"""
Mean value data obtained by digitizing Figure 5 using
http://arohatgi.info/WebPlotDigitizer/app/ .
"""
GSIM_CLASS = RaghukanthIyengar2007
MEAN_FILE = 'RAIY07/RAIY07_PI_MEAN.csv'
SIGMA_FILE = 'RAIY07/RAIY07_PI_STD_TOTAL.csv'
TOL_PERCENT = 11.
def METHOD_NAME(self):
# Ensure that means match reference dataset
self.check(self.MEAN_FILE, self.SIGMA_FILE,
max_discrep_percentage=self.TOL_PERCENT)
def test_warning(self):
# Warning should be thrown for any vs30 below limit for NEHRP class D
ctx = gsim.base.RuptureContext()
ctx.sids = np.uint32([0])
# set reasonable default values
gmpe = self.GSIM_CLASS()
ctx.mag = np.array([6.5])
ctx.rhypo = ctx.rrup = np.array([100.])
im_type = sorted(gmpe.COEFFS_BEDROCK.sa_coeffs)[0]
std_types = list(gmpe.DEFINED_FOR_STANDARD_DEVIATION_TYPES)
# set critical value to trigger warning
ctx.vs30 = np.array([170.])
with warnings.catch_warnings(record=True) as warning_stream:
warnings.simplefilter('always')
mean = gmpe.get_mean_and_stddevs(
ctx, ctx, ctx, im_type, std_types)[0]
# confirm type and content of warning
assert len(warning_stream) == 1
assert issubclass(warning_stream[-1].category, UserWarning)
assert 'not supported' in str(warning_stream[-1].message).lower()
assert np.all(np.isnan(mean))
class RaghukanthIyengar2007KoynaWarnaTestCase(RaghukanthIyengar2007TestCase):
"""
Mean bedrock motions obtained by digitizing Figure 3 using
http://arohatgi.info/WebPlotDigitizer/app/ .
"""
GSIM_CLASS = RaghukanthIyengar2007KoynaWarna
MEAN_FILE = 'RAIY07/RAIY07_KW_MEAN.csv'
SIGMA_FILE = 'RAIY07/RAIY07_KW_STD_TOTAL.csv'
TOL_PERCENT = 1.5
class RaghukanthIyengar2007SouthernTestCase(RaghukanthIyengar2007TestCase):
"""
Mean bedrock motions obtained by digitizing Figure 3 using
http://arohatgi.info/WebPlotDigitizer/app/ .
"""
GSIM_CLASS = RaghukanthIyengar2007Southern
MEAN_FILE = 'RAIY07/RAIY07_SI_MEAN.csv'
SIGMA_FILE = 'RAIY07/RAIY07_SI_STD_TOTAL.csv'
TOL_PERCENT = 10.
class RaghukanthIyengar2007WesternCentralTestCase(
RaghukanthIyengar2007TestCase):
"""
Mean bedrock motions obtained by digitizing Figure 3 using
http://arohatgi.info/WebPlotDigitizer/app/ .
"""
GSIM_CLASS = RaghukanthIyengar2007WesternCentral
MEAN_FILE = 'RAIY07/RAIY07_WC_MEAN.csv'
SIGMA_FILE = 'RAIY07/RAIY07_WC_STD_TOTAL.csv'
TOL_PERCENT = 2.
|
4,357 |
eb transferred
|
# -*- test-case-name: twisted.names.test.test_names -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
__all__ = ["SecondaryAuthority", "SecondaryAuthorityService"]
from twisted.application import service
from twisted.internet import defer, task
from twisted.names import client, common, dns, resolve
from twisted.names.authority import FileAuthority
from twisted.python import failure, log
from twisted.python.compat import nativeString
class SecondaryAuthorityService(service.Service):
"""
A service that keeps one or more authorities up to date by doing hourly
zone transfers from a master.
@ivar primary: IP address of the master.
@type primary: L{str}
@ivar domains: An authority for each domain mirrored from the master.
@type domains: L{list} of L{SecondaryAuthority}
"""
calls = None
_port = 53
def __init__(self, primary, domains):
"""
@param primary: The IP address of the server from which to perform
zone transfers.
@type primary: L{str}
@param domains: A sequence of domain names for which to perform
zone transfers.
@type domains: L{list} of L{bytes}
"""
self.primary = nativeString(primary)
self.domains = [SecondaryAuthority(primary, d) for d in domains]
@classmethod
def fromServerAddressAndDomains(cls, serverAddress, domains):
"""
Construct a new L{SecondaryAuthorityService} from a tuple giving a
server address and a C{str} giving the name of a domain for which this
is an authority.
@param serverAddress: A two-tuple, the first element of which is a
C{str} giving an IP address and the second element of which is a
C{int} giving a port number. Together, these define where zone
transfers will be attempted from.
@param domains: Domain names for which to perform zone transfers.
@type domains: sequence of L{bytes}
@return: A new instance of L{SecondaryAuthorityService}.
"""
primary, port = serverAddress
service = cls(primary, [])
service._port = port
service.domains = [
SecondaryAuthority.fromServerAddressAndDomain(serverAddress, d)
for d in domains
]
return service
def getAuthority(self):
"""
Get a resolver for the transferred domains.
@rtype: L{ResolverChain}
"""
return resolve.ResolverChain(self.domains)
def startService(self):
service.Service.startService(self)
self.calls = [task.LoopingCall(d.transfer) for d in self.domains]
i = 0
from twisted.internet import reactor
for c in self.calls:
# XXX Add errbacks, respect proper timeouts
reactor.callLater(i, c.start, 60 * 60)
i += 1
def stopService(self):
service.Service.stopService(self)
for c in self.calls:
c.stop()
class SecondaryAuthority(FileAuthority):
"""
An Authority that keeps itself updated by performing zone transfers.
@ivar primary: The IP address of the server from which zone transfers will
be attempted.
@type primary: L{str}
@ivar _port: The port number of the server from which zone transfers will
be attempted.
@type _port: L{int}
@ivar domain: The domain for which this is the secondary authority.
@type domain: L{bytes}
@ivar _reactor: The reactor to use to perform the zone transfers, or
L{None} to use the global reactor.
"""
transferring = False
soa = records = None
_port = 53
_reactor = None
def __init__(self, primaryIP, domain):
"""
@param domain: The domain for which this will be the secondary
authority.
@type domain: L{bytes} or L{str}
"""
# Yep. Skip over FileAuthority.__init__. This is a hack until we have
# a good composition-based API for the complicated DNS record lookup
# logic we want to share.
common.ResolverBase.__init__(self)
self.primary = nativeString(primaryIP)
self.domain = dns.domainString(domain)
@classmethod
def fromServerAddressAndDomain(cls, serverAddress, domain):
"""
Construct a new L{SecondaryAuthority} from a tuple giving a server
address and a C{bytes} giving the name of a domain for which this is an
authority.
@param serverAddress: A two-tuple, the first element of which is a
C{str} giving an IP address and the second element of which is a
C{int} giving a port number. Together, these define where zone
transfers will be attempted from.
@param domain: A C{bytes} giving the domain to transfer.
@type domain: L{bytes}
@return: A new instance of L{SecondaryAuthority}.
"""
primary, port = serverAddress
secondary = cls(primary, domain)
secondary._port = port
return secondary
def transfer(self):
"""
Attempt a zone transfer.
@returns: A L{Deferred} that fires with L{None} when attempted zone
transfer has completed.
"""
# FIXME: This logic doesn't avoid duplicate transfers
# https://twistedmatrix.com/trac/ticket/9754
if self.transferring: # <-- never true
return
self.transfering = True # <-- speling
reactor = self._reactor
if reactor is None:
from twisted.internet import reactor
resolver = client.Resolver(
servers=[(self.primary, self._port)], reactor=reactor
)
return (
resolver.lookupZone(self.domain)
.addCallback(self._cbZone)
.addErrback(self._ebZone)
)
def _lookup(self, name, cls, type, timeout=None):
if not self.soa or not self.records:
# No transfer has occurred yet. Fail non-authoritatively so that
# the caller can try elsewhere.
return defer.fail(failure.Failure(dns.DomainError(name)))
return FileAuthority._lookup(self, name, cls, type, timeout)
def _cbZone(self, zone):
ans, _, _ = zone
self.records = r = {}
for rec in ans:
if not self.soa and rec.type == dns.SOA:
self.soa = (rec.name.name.lower(), rec.payload)
else:
r.setdefault(rec.name.name.lower(), []).append(rec.payload)
def _ebZone(self, failure):
log.msg(
"Updating %s from %s failed during zone transfer"
% (self.domain, self.primary)
)
log.err(failure)
def update(self):
self.transfer().addCallbacks(self._cbTransferred, self.METHOD_NAME)
def _cbTransferred(self, result):
self.transferring = False
def METHOD_NAME(self, failure):
self.transferred = False
log.msg(
"Transferring %s from %s failed after zone transfer"
% (self.domain, self.primary)
)
log.err(failure)
|
4,358 |
test try busy
|
from contextlib import contextmanager
from multiprocessing import get_all_start_methods
from multiprocessing import get_start_method
from multiprocessing import Pipe
from multiprocessing import Process
from multiprocessing import set_start_method
from time import sleep
from django.db import transaction
from django.test.testcases import SimpleTestCase
from django_concurrent_tests.management.commands.concurrent_call_wrapper import use_test_databases
from mock import mock
from mock import patch
from pytest import mark
from pytest import raises
from contentcuration.db.advisory_lock import advisory_lock
from contentcuration.db.advisory_lock import AdvisoryLockBusy
from contentcuration.db.advisory_lock import execute_lock
from contentcuration.db.advisory_lock import try_advisory_lock
TEST_LOCK = 1337
# flake8 doesn't like the parameterized formatting
# flake8: noqa
@mark.parametrize("key1, key2, unlock, session, shared, wait, expected_query", [
# transaction level
(1, None, False, False, False, True, "SELECT pg_advisory_xact_lock(%s) AS lock;"),
(3, None, False, False, True, True, "SELECT pg_advisory_xact_lock_shared(%s) AS lock;"),
(4, None, False, False, True, False, "SELECT pg_try_advisory_xact_lock_shared(%s) AS lock;"),
(5, None, False, False, False, False, "SELECT pg_try_advisory_xact_lock(%s) AS lock;"),
(6, 1, False, False, False, True, "SELECT pg_advisory_xact_lock(%s, %s) AS lock;"),
(7, 2, False, False, True, True, "SELECT pg_advisory_xact_lock_shared(%s, %s) AS lock;"),
(8, 3, False, False, True, False, "SELECT pg_try_advisory_xact_lock_shared(%s, %s) AS lock;"),
(9, 4, False, False, False, False, "SELECT pg_try_advisory_xact_lock(%s, %s) AS lock;"),
# session level
(10, None, False, True, False, True, "SELECT pg_advisory_lock(%s) AS lock;"),
(11, None, True, True, False, True, "SELECT pg_advisory_unlock(%s) AS lock;"),
(12, None, False, True, True, True, "SELECT pg_advisory_lock_shared(%s) AS lock;"),
(13, None, True, True, True, True, "SELECT pg_advisory_unlock_shared(%s) AS lock;"),
(14, None, False, True, False, False, "SELECT pg_try_advisory_lock(%s) AS lock;"),
(15, None, True, True, False, False, "SELECT pg_try_advisory_unlock(%s) AS lock;"),
(16, None, False, True, True, False, "SELECT pg_try_advisory_lock_shared(%s) AS lock;"),
(17, None, True, True, True, False, "SELECT pg_try_advisory_unlock_shared(%s) AS lock;"),
(18, 1, False, True, False, True, "SELECT pg_advisory_lock(%s, %s) AS lock;"),
(19, 2, True, True, False, True, "SELECT pg_advisory_unlock(%s, %s) AS lock;"),
(20, 3, False, True, True, True, "SELECT pg_advisory_lock_shared(%s, %s) AS lock;"),
(21, 4, True, True, True, True, "SELECT pg_advisory_unlock_shared(%s, %s) AS lock;"),
(22, 5, False, True, False, False, "SELECT pg_try_advisory_lock(%s, %s) AS lock;"),
(23, 6, True, True, False, False, "SELECT pg_try_advisory_unlock(%s, %s) AS lock;"),
(24, 7, False, True, True, False, "SELECT pg_try_advisory_lock_shared(%s, %s) AS lock;"),
(25, 8, True, True, True, False, "SELECT pg_try_advisory_unlock_shared(%s, %s) AS lock;"),
])
def test_execute_lock(key1, key2, unlock, session, shared, wait, expected_query):
with patch("contentcuration.db.advisory_lock.connection") as conn:
cursor = mock.Mock()
conn.cursor.return_value.__enter__.return_value = cursor
conn.in_atomic_block.return_value = not session
cursor.execute.return_value = True
with execute_lock(key1, key2=key2, unlock=unlock, session=session, shared=shared, wait=wait) as c:
assert c == cursor
expected_params = [key1]
if key2 is not None:
expected_params.append(key2)
query, params = cursor.execute.call_args_list[0][0]
assert query == expected_query
assert params == expected_params
@mark.parametrize("unlock, in_atomic_block", [
(False, False),
(True, False),
(True, True),
])
def test_execute_lock__not_implemented(unlock, in_atomic_block):
with patch("contentcuration.db.advisory_lock.connection") as conn:
conn.in_atomic_block = in_atomic_block
with raises(NotImplementedError):
with execute_lock(99, key2=99, unlock=unlock, session=False, shared=False, wait=False):
pass
START_SIGNAL = 'START_SIGNAL'
END_SIGNAL = 'END_SIGNAL'
SLEEP_SEC = 0.1
def wait_for(conn, signal):
while True:
msg = conn.recv()
if msg == signal:
break
sleep(SLEEP_SEC)
def child_lock(conn, shared):
# make sure we're connecting to the test database
use_test_databases()
with transaction.atomic():
advisory_lock(TEST_LOCK, shared=shared)
sleep(SLEEP_SEC)
conn.send(START_SIGNAL)
wait_for(conn, END_SIGNAL)
# set to spawn, otherwise process would inherit connections, meaning queries would still be in
# the same transaction. If we can't use spawn, then we'll mark the test skipped
skipped = True
start_method = get_start_method(allow_none=True)
if start_method == "spawn":
skipped = False
elif start_method is None and "spawn" in get_all_start_methods():
set_start_method("spawn")
skipped = False
@mark.skipif(skipped, reason="Requires spawn capability")
class AdvisoryLockDatabaseTest(SimpleTestCase):
"""
Test case that creates simultaneous locking situations
"""
# this test manages its own transactions
allow_database_queries = True
databases = ["default"]
@contextmanager
def child_lock(self, shared=False):
parent_conn, child_conn = Pipe()
p = Process(target=child_lock, args=(child_conn, shared))
p.start()
try:
with transaction.atomic():
wait_for(parent_conn, START_SIGNAL)
yield parent_conn
finally:
parent_conn.send(END_SIGNAL)
p.join(2)
@mark.timeout(30)
def test_shared(self):
with self.child_lock(shared=True):
# this won't raise an error because shared mode should allow
# both locks simultaneously
try_advisory_lock(TEST_LOCK, shared=True)
@mark.timeout(30)
def METHOD_NAME(self):
with self.child_lock(shared=False):
# since the lock should already be acquired, this will raise the error
with raises(AdvisoryLockBusy):
try_advisory_lock(TEST_LOCK)
|
4,359 |
main
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests colabs using Jupyter notebook."""
from __future__ import print_function
import os
import sys
from absl import app
from absl import flags
from absl import logging
from nbconvert.preprocessors import CellExecutionError
from nbconvert.preprocessors import ExecutePreprocessor
import nbformat
flags.DEFINE_string('single_colab', None, 'Path to a single colab to run.')
flags.DEFINE_string(
'output_dir',
'/tmp/notebook_tests',
'Full path for executed notebooks and artifacts.',
)
flags.DEFINE_boolean(
'debug', False, 'Debug logging if true. Otherwise info only.'
)
flags.DEFINE_boolean(
'override_pip_install_agents',
True,
'If true a replace is done to prevent notebooks from '
'installing tf-agents (often tf-agents-nightly)',
)
FLAGS = flags.FLAGS
def execute_test(file_path, result_path):
"""Executes a single notebook.
Args:
file_path: Path to the notebook to execute.
result_path: Path to store the resulting notebook.
Returns:
bool: True if the notebook does not have any errors, False otherwise.
Raises:
Exception if an unexpected error occurs executing the notebook.
"""
try:
with open(file_path, 'r') as f:
filedata = f.read()
if FLAGS.override_pip_install_agents:
# Replaces pip install tf-agents with a noop. If this gets any bigger,
# refactor
filedata = filedata.replace(
'pip install tf-agents-nightly[reverb]', 'pip --version'
)
filedata = filedata.replace(
'pip install tf-agents-nightly', 'pip --version'
)
filedata = filedata.replace(
'pip install tf-agents[reverb]', 'pip --version'
)
filedata = filedata.replace(
'pip install --pre tf-agents[reverb]', 'pip --version'
)
filedata = filedata.replace('pip install tf-agents', 'pip --version')
filedata = filedata.replace(
'pip install --pre tf-agents', 'pip --version'
)
nb = nbformat.reads(filedata, as_version=4)
ep = ExecutePreprocessor(timeout=3600, kernel_name='python3')
try:
ep.preprocess(nb, {'metadata': {'path': FLAGS.output_dir}})
except CellExecutionError as cex:
logging.error('ERROR executing:%s', file_path)
logging.error(cex)
return False
with open(result_path, 'w', encoding='utf-8') as fo:
nbformat.write(nb, fo)
return True
except Exception as e: # pylint: disable=W0703
logging.error('Unexpected ERROR: in %s', file_path)
logging.error(e)
def get_test_suite():
"""Returns list of all notebooks to run."""
colab_path = './'
test_notebooks = []
for dirpath, _, filenames in os.walk(colab_path):
for filename in filenames:
if filename.endswith('ipynb'):
if '7_SAC_minitaur_tutorial.ipynb' in filename:
logging.info(
'Skipping 7_SAC_minitaur_tutorial.ipynb. It takes 8 hours to run.'
)
continue
test_notebooks.append(os.path.join(dirpath, filename))
else:
logging.debug('Skipping non-notebook file:%s', filename)
continue
return test_notebooks
def run():
"""Runs all notebooks and reports results."""
os.makedirs(FLAGS.output_dir, exist_ok=True)
if FLAGS.single_colab:
filenames = [FLAGS.single_colab]
else:
filenames = get_test_suite()
passed = []
failed = []
filenames.sort()
for filename in filenames:
logging.info('Testing %s ...', filename)
result_path = os.path.join(
FLAGS.output_dir, 'executed_' + os.path.basename(filename)
)
if execute_test(filename, result_path):
passed.append(filename)
else:
failed.append(filename)
logging.info('\n\n################# Report #################')
logging.info('%d passed, %d failed', len(passed), len(failed))
for p_result in passed:
logging.info('%s OK', p_result)
for f_result in failed:
logging.info('%s FAILED', f_result)
if failed:
sys.exit(1)
def METHOD_NAME(_):
logging.set_verbosity(logging.INFO)
if FLAGS.debug:
logging.set_verbosity(logging.DEBUG)
run()
if __name__ == '__main__':
app.run(METHOD_NAME)
|
4,360 |
delete objects
|
import io
import logging
from minio import Minio
from minio.error import InvalidResponseError
from urllib3.poolmanager import PoolManager
from .base import Repository
logger = logging.getLogger(__name__)
class MINIORepository(Repository):
"""
"""
client = None
def __init__(self, config):
super().__init__()
try:
access_key = config['storage_access_key']
except Exception:
access_key = 'minio'
try:
secret_key = config['storage_secret_key']
except Exception:
secret_key = 'minio123'
try:
self.bucket = config['storage_bucket']
except Exception:
self.bucket = 'fedn-models'
try:
self.context_bucket = config['context_bucket']
except Exception:
self.bucket = 'fedn-context'
try:
self.secure_mode = bool(config['storage_secure_mode'])
except Exception:
self.secure_mode = False
if not self.secure_mode:
print(
"\n\n\nWARNING : S3/MINIO RUNNING IN **INSECURE** MODE! THIS IS NOT FOR PRODUCTION!\n\n\n")
if self.secure_mode:
manager = PoolManager(
num_pools=100, cert_reqs='CERT_NONE', assert_hostname=False)
self.client = Minio("{0}:{1}".format(config['storage_hostname'], config['storage_port']),
access_key=access_key,
secret_key=secret_key,
secure=self.secure_mode, http_client=manager)
else:
self.client = Minio("{0}:{1}".format(config['storage_hostname'], config['storage_port']),
access_key=access_key,
secret_key=secret_key,
secure=self.secure_mode)
# TODO: generalize
self.context_bucket = 'fedn-context'
self.create_bucket(self.context_bucket)
self.create_bucket(self.bucket)
def create_bucket(self, bucket_name):
"""
:param bucket_name:
"""
found = self.client.bucket_exists(bucket_name)
if not found:
try:
self.client.make_bucket(bucket_name)
except InvalidResponseError:
raise
def set_artifact(self, instance_name, instance, is_file=False, bucket=''):
""" Instance must be a byte-like object. """
if bucket == '':
bucket = self.bucket
if is_file:
self.client.fput_object(bucket, instance_name, instance)
else:
try:
self.client.put_object(
bucket, instance_name, io.BytesIO(instance), len(instance))
except Exception as e:
raise Exception("Could not load data into bytes {}".format(e))
return True
def get_artifact(self, instance_name, bucket=''):
"""
:param instance_name:
:param bucket:
:return:
"""
if bucket == '':
bucket = self.bucket
try:
data = self.client.get_object(bucket, instance_name)
return data.read()
except Exception as e:
raise Exception("Could not fetch data from bucket, {}".format(e))
def get_artifact_stream(self, instance_name):
"""
:param instance_name:
:return:
"""
try:
data = self.client.get_object(self.bucket, instance_name)
return data
except Exception as e:
raise Exception("Could not fetch data from bucket, {}".format(e))
def list_artifacts(self):
"""
:return:
"""
objects_to_delete = []
try:
objs = self.client.list_objects(self.bucket)
for obj in objs:
print(obj.object_name)
objects_to_delete.append(obj.object_name)
except Exception:
raise Exception(
"Could not list models in bucket {}".format(self.bucket))
return objects_to_delete
def delete_artifact(self, instance_name, bucket=[]):
"""
:param instance_name:
:param bucket:
"""
if not bucket:
bucket = self.bucket
try:
self.client.remove_object(bucket, instance_name)
except InvalidResponseError as err:
print(err)
print('Could not delete artifact: {}'.format(instance_name))
def METHOD_NAME(self):
"""
"""
objects_to_delete = self.list_artifacts()
try:
# Remove list of objects.
errors = self.client.remove_objects(
self.bucket, objects_to_delete
)
for del_err in errors:
print("Deletion Error: {}".format(del_err))
except Exception:
print('Could not delete objects: {}'.format(objects_to_delete))
|
4,361 |
config options string
|
from builtins import str
from collections import OrderedDict
from django.db import models
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.contrib.auth.models import User
import simplejson as json
from tendenci.apps.memberships.models import MembershipType
from tendenci.apps.perms.models import TendenciBaseModel
from tendenci.apps.invoices.models import Invoice
from tendenci.apps.reports.utils import get_ct_nice_name
REPORT_TYPE_CHOICES = (
('invoices', "Invoices"),
)
CONFIG_OPTIONS = {
'invoice_display': {
"label": "Which invoices",
"options": OrderedDict(sorted({
'all': {
"label": 'All Invoices',
"filter": {}
},
'no-balance': {
"label": 'No Open Balance',
"filter": {"balance": 0}
},
'has-balance': {
"label": 'Has an Open Balance',
"filter": {"balance__gt": 0}
}
}.items()))
},
'invoice_status': {
"label": "What Status",
"options": OrderedDict(sorted({
'all': {
"label": 'All Statuses',
"filter": {}
},
'no-balance': {
"label": 'Only Estimates',
"filter": {"status_detail__iexact": "estimate"}
},
'has-balance': {
"label": 'Only Tendered',
"filter": {"status_detail__iexact": "tendered"}
}
}.items()))
}
}
class Report(TendenciBaseModel):
"""
A Report represents a set of configurations for reporting
on data from other models.
"""
type = models.CharField(max_length=100)
config = models.TextField(blank=True)
class Meta:
verbose_name = _('Report')
verbose_name_plural = _('Reports')
def __str__(self):
return "%s %s " % (self.pk, str(self.type))
def get_absolute_url(self):
return reverse('report_detail', args=[self.pk])
def runs(self):
return Run.objects.filter(report=self).order_by('-create_dt')
def config_options_dict(self):
if self.config:
return json.loads(self.config)
return u''
def config_options(self):
if self.config:
options = json.loads(self.config)
output = []
for opt_key, opt_val in options.items():
if opt_key in CONFIG_OPTIONS:
config_option = CONFIG_OPTIONS[opt_key]
config_dict = {
'label': config_option['label'],
'value': config_option['options'][opt_val]['label']
}
output.append(config_dict)
elif opt_key == "invoice_object_type":
value = ", ".join(sorted([get_ct_nice_name(i) for i in opt_val]))
if sorted(opt_val) == sorted([str(i['object_type']) for i in Invoice.objects.values('object_type').distinct()]):
value = "All Apps"
config_dict = {
'label': "Which Apps",
'value': value
}
output.append(config_dict)
elif opt_key == "invoice_membership_filter":
try:
item = MembershipType.objects.get(pk=opt_val)
output.append({
'label': 'Membership Filter',
'value': '%s members only' % item.name
})
except:
pass
return output
return u''
def METHOD_NAME(self):
if self.config_options():
return '; '.join([i['value'] for i in self.config_options()])
return u''
RUN_STATUS_CHOICES = (
('unstarted', 'Unstarted'),
('running', 'Running'),
('complete', 'Complete'),
('error', 'Error'),
)
RUN_TYPE_CHOICES = (
('html', 'HTML'),
('html-extended', 'HTML Extended'),
('html-summary', 'HTML Summary')
)
class Run(models.Model):
"""
A Run tracks the start, end, and output of generating
the results from a Report object.
A Report can be 'run' multiple times with different
range start and end times as well as output in different
modes like HTML or PDF.
"""
report = models.ForeignKey(Report, on_delete=models.CASCADE)
create_dt = models.DateTimeField(auto_now_add=True)
start_dt = models.DateTimeField(null=True)
complete_dt = models.DateTimeField(null=True)
range_start_dt = models.DateTimeField(null=True)
range_end_dt = models.DateTimeField(null=True)
creator = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
creator_username = models.CharField(max_length=200, default="", blank=True)
status = models.CharField(choices=RUN_STATUS_CHOICES, max_length=20, default="unstarted")
output = models.TextField(blank=True)
output_type = models.CharField(choices=RUN_TYPE_CHOICES, max_length=20, default="html")
class Meta:
verbose_name = _('Run')
verbose_name_plural = _('Runs')
def __str__(self):
return "Run %s for report %s" % (self.pk, self.report.pk)
def get_absolute_url(self):
return reverse('report_run_detail', args=[self.report.pk, self.pk])
def get_output_url(self):
return reverse('report_run_output', args=[self.report.pk, self.pk])
|
4,362 |
get file
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
import datetime
import time
import pytz
from apache_beam.io.aws.clients.s3 import messages
class FakeFile(object):
def __init__(self, bucket, key, contents, etag=None):
self.bucket = bucket
self.key = key
self.contents = contents
self.last_modified = time.time()
if not etag:
self.etag = '"%s-1"' % ('x' * 32)
else:
self.etag = etag
def get_metadata(self):
last_modified_datetime = None
if self.last_modified:
last_modified_datetime = datetime.datetime.fromtimestamp(
self.last_modified, pytz.utc)
return messages.Item(
self.etag,
self.key,
last_modified_datetime,
len(self.contents),
mime_type=None)
class FakeS3Client(object):
def __init__(self):
self.files = {}
self.list_continuation_tokens = {}
self.multipart_uploads = {}
# boto3 has different behavior when running some operations against a bucket
# that exists vs. against one that doesn't. To emulate that behavior, the
# mock client keeps a set of bucket names that it knows "exist".
self.known_buckets = set()
def add_file(self, f):
self.files[(f.bucket, f.key)] = f
if f.bucket not in self.known_buckets:
self.known_buckets.add(f.bucket)
def METHOD_NAME(self, bucket, obj):
try:
return self.files[bucket, obj]
except:
raise messages.S3ClientError('Not Found', 404)
def delete_file(self, bucket, obj):
del self.files[(bucket, obj)]
def get_object_metadata(self, request):
r"""Retrieves an object's metadata.
Args:
request: (GetRequest) input message
Returns:
(Item) The response message.
"""
# TODO: Do we want to mock out a lack of credentials?
file_ = self.METHOD_NAME(request.bucket, request.object)
return file_.get_metadata()
def list(self, request):
bucket = request.bucket
prefix = request.prefix or ''
matching_files = []
for file_bucket, file_name in sorted(iter(self.files)):
if bucket == file_bucket and file_name.startswith(prefix):
file_object = self.METHOD_NAME(file_bucket, file_name).get_metadata()
matching_files.append(file_object)
if not matching_files:
message = 'Tried to list nonexistent S3 path: s3://%s/%s' % (
bucket, prefix)
raise messages.S3ClientError(message, 404)
# Handle pagination.
items_per_page = 5
if not request.continuation_token:
range_start = 0
else:
if request.continuation_token not in self.list_continuation_tokens:
raise ValueError('Invalid page token.')
range_start = self.list_continuation_tokens[request.continuation_token]
del self.list_continuation_tokens[request.continuation_token]
result = messages.ListResponse(
items=matching_files[range_start:range_start + items_per_page])
if range_start + items_per_page < len(matching_files):
next_range_start = range_start + items_per_page
next_continuation_token = '_page_token_%s_%s_%d' % (
bucket, prefix, next_range_start)
self.list_continuation_tokens[next_continuation_token] = next_range_start
result.next_token = next_continuation_token
return result
def get_range(self, request, start, end):
r"""Retrieves an object.
Args:
request: (GetRequest) request
Returns:
(bytes) The response message.
"""
file_ = self.METHOD_NAME(request.bucket, request.object)
# Replicates S3's behavior, per the spec here:
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35
if start < 0 or end <= start:
return file_.contents
return file_.contents[start:end]
def delete(self, request):
if request.bucket not in self.known_buckets:
raise messages.S3ClientError('The specified bucket does not exist', 404)
if (request.bucket, request.object) in self.files:
self.delete_file(request.bucket, request.object)
else:
# S3 doesn't raise an error if you try to delete a nonexistent file from
# an extant bucket
return
def delete_batch(self, request):
deleted, failed, errors = [], [], []
for object in request.objects:
try:
delete_request = messages.DeleteRequest(request.bucket, object)
self.delete(delete_request)
deleted.append(object)
except messages.S3ClientError as e:
failed.append(object)
errors.append(e)
return messages.DeleteBatchResponse(deleted, failed, errors)
def copy(self, request):
src_file = self.METHOD_NAME(request.src_bucket, request.src_key)
dest_file = FakeFile(
request.dest_bucket, request.dest_key, src_file.contents)
self.add_file(dest_file)
def create_multipart_upload(self, request):
# Create hash of bucket and key
# Store upload_id internally
upload_id = request.bucket + request.object
self.multipart_uploads[upload_id] = {}
return messages.UploadResponse(upload_id)
def upload_part(self, request):
# Save off bytes passed to internal data store
upload_id, part_number = request.upload_id, request.part_number
if part_number < 0 or not isinstance(part_number, int):
raise messages.S3ClientError(
'Param validation failed on part number', 400)
if upload_id not in self.multipart_uploads:
raise messages.S3ClientError('The specified upload does not exist', 404)
self.multipart_uploads[upload_id][part_number] = request.bytes
etag = '"%s"' % ('x' * 32)
return messages.UploadPartResponse(etag, part_number)
def complete_multipart_upload(self, request):
MIN_PART_SIZE = 5 * 2**10 # 5 KiB
parts_received = self.multipart_uploads[request.upload_id]
# Check that we got all the parts that they intended to send
part_numbers_to_confirm = set(part['PartNumber'] for part in request.parts)
# Make sure all the expected parts are present
if part_numbers_to_confirm != set(parts_received.keys()):
raise messages.S3ClientError(
'One or more of the specified parts could not be found', 400)
# Sort by part number
sorted_parts = sorted(parts_received.items(), key=lambda pair: pair[0])
sorted_bytes = [bytes_ for (_, bytes_) in sorted_parts]
# Make sure that the parts aren't too small (except the last part)
part_sizes = [len(bytes_) for bytes_ in sorted_bytes]
if any(size < MIN_PART_SIZE for size in part_sizes[:-1]):
e_message = """
All parts but the last must be larger than %d bytes
""" % MIN_PART_SIZE
raise messages.S3ClientError(e_message, 400)
# String together all bytes for the given upload
final_contents = b''.join(sorted_bytes)
# Create FakeFile object
num_parts = len(parts_received)
etag = '"%s-%d"' % ('x' * 32, num_parts)
file_ = FakeFile(request.bucket, request.object, final_contents, etag=etag)
# Store FakeFile in self.files
self.add_file(file_)
|
4,363 |
test build results with index
|
import pytest
import xdsl.dialects.pdl as pdl
from xdsl.dialects.builtin import ArrayAttr, IntegerAttr, StringAttr, i32, i64
from xdsl.ir import Block
from xdsl.utils.exceptions import VerifyException
from xdsl.utils.test_value import TestSSAValue
type_type = pdl.TypeType()
attribute_type = pdl.AttributeType()
value_type = pdl.ValueType()
operation_type = pdl.OperationType()
block = Block(
arg_types=[
type_type,
attribute_type,
value_type,
operation_type,
]
)
type_val, attr_val, val_val, op_val = block.args
def test_build_anc():
anc = pdl.ApplyNativeConstraintOp("anc", [type_val])
assert anc.attributes["name"] == StringAttr("anc")
assert anc.args == (type_val,)
def test_build_anr():
anr = pdl.ApplyNativeRewriteOp("anr", [type_val], [attribute_type])
assert anr.attributes["name"] == StringAttr("anr")
assert anr.args == (type_val,)
assert len(anr.results) == 1
assert [r.type for r in anr.results] == [attribute_type]
def test_build_rewrite():
r = pdl.RewriteOp(
name="r", root=None, external_args=[type_val, attr_val], body=None
)
assert r.attributes["name"] == StringAttr("r")
assert r.external_args == (type_val, attr_val)
assert len(r.results) == 0
assert r.body is None
r1 = pdl.RewriteOp(name="r", root=None, external_args=[type_val, attr_val])
assert r1.body is not None
def test_build_operation_replace():
operation = pdl.OperationOp(
op_name="operation",
attribute_value_names=ArrayAttr([StringAttr("name")]),
operand_values=[val_val],
attribute_values=[attr_val],
type_values=[type_val],
)
assert operation.opName == StringAttr("operation")
assert operation.attributeValueNames == ArrayAttr([StringAttr("name")])
assert operation.operand_values == (val_val,)
assert operation.attribute_values == (attr_val,)
assert operation.type_values == (type_val,)
replace = pdl.ReplaceOp(op_value=op_val, repl_operation=operation.results[0])
replace.verify()
assert replace.op_value == op_val
assert replace.repl_operation == operation.results[0]
assert replace.repl_values == ()
replace = pdl.ReplaceOp(op_value=op_val, repl_values=[val_val])
replace.verify()
assert replace.op_value == op_val
assert replace.repl_operation is None
assert replace.repl_values == (val_val,)
with pytest.raises(VerifyException):
replace = pdl.ReplaceOp(op_value=op_val)
replace.verify()
with pytest.raises(VerifyException):
replace = pdl.ReplaceOp(
op_value=op_val, repl_operation=operation.results[0], repl_values=[val_val]
)
replace.verify()
def test_build_result():
res = pdl.ResultOp(IntegerAttr.from_int_and_width(1, 32), parent=op_val)
assert res.index == IntegerAttr.from_int_and_width(1, 32)
assert res.parent_ == op_val
def test_build_resultS():
res = pdl.ResultsOp(op_val)
assert res.parent_ == op_val
assert res.index is None
assert res.val.type == pdl.RangeType(pdl.ValueType())
def METHOD_NAME():
res = pdl.ResultsOp(op_val, IntegerAttr.from_int_and_width(1, 32))
assert res.parent_ == op_val
assert res.index == IntegerAttr.from_int_and_width(1, 32)
assert res.val.type == pdl.RangeType(pdl.ValueType())
def test_build_results_with_index_and_type():
res = pdl.ResultsOp(op_val, IntegerAttr.from_int_and_width(1, 32), pdl.ValueType())
assert res.parent_ == op_val
assert res.index == IntegerAttr.from_int_and_width(1, 32)
assert res.val.type == pdl.ValueType()
def test_build_type():
pdl_type = pdl.TypeOp()
assert pdl_type.constantType is None
pdl_type = pdl.TypeOp(i32)
assert pdl_type.constantType == i32
def test_build_types():
pdl_type = pdl.TypesOp()
assert pdl_type.constantTypes is None
pdl_type = pdl.TypesOp((i32, i64, i32))
assert pdl_type.constantTypes == ArrayAttr((i32, i64, i32))
def test_build_operand():
operand = pdl.OperandOp(val_val)
assert operand.value_type == val_val
def test_range():
val1 = TestSSAValue(pdl.ValueType())
val2 = TestSSAValue(pdl.RangeType(pdl.ValueType()))
val3 = TestSSAValue(pdl.ValueType())
range_op = pdl.RangeOp((val1, val2, val3))
assert range_op.arguments == (val1, val2, val3)
assert range_op.result.type == pdl.RangeType(pdl.ValueType())
def test_empty_range():
return_type = pdl.RangeType(pdl.ValueType())
empty_range = pdl.RangeOp((), return_type)
assert len(empty_range.arguments) == 0
assert empty_range.result.type == return_type
def test_range_cannot_infer():
with pytest.raises(ValueError):
pdl.RangeOp(()) # Cannot infer return type
|
4,364 |
wsgi app
|
# pytest configuration for weasyl db test fixture.
# The filename conftest.py is magical, do not change.
import errno
import json
import os
import shutil
import pytest
import pyramid.testing
from sqlalchemy.dialects.postgresql import psycopg2
from webtest import TestApp as TestApp_
from weasyl import config
config._in_test = True # noqa
from libweasyl import cache
from libweasyl.cache import ThreadCacheProxy
from libweasyl.configuration import configure_libweasyl
from libweasyl.models.tables import metadata
from weasyl import (
commishinfo,
define,
emailer,
macro,
media,
middleware,
)
from weasyl.controllers.routes import setup_routes_and_views
from weasyl.wsgi import make_wsgi_app
cache.region.configure(
'dogpile.cache.memory',
wrap=[ThreadCacheProxy],
)
define.metric = lambda *a, **kw: None
configure_libweasyl(
dbsession=define.sessionmaker,
base_file_path=macro.MACRO_STORAGE_ROOT,
staff_config_dict={},
media_link_formatter_callback=media.format_media_link,
)
@pytest.fixture(scope='session', autouse=True)
def setupdb(request):
define.engine.execute('DROP SCHEMA public CASCADE')
define.engine.execute('CREATE SCHEMA public')
define.engine.execute('CREATE EXTENSION HSTORE')
define.engine.execute('CREATE EXTENSION FUZZYSTRMATCH')
# hstore oids changed; de-memoize them and create new connections
define.engine.dialect._hstore_oids = psycopg2.PGDialect_psycopg2._hstore_oids.__get__(define.engine.dialect)
define.engine.dispose()
define.meta.create_all(define.engine)
@pytest.fixture(autouse=True)
def empty_storage():
try:
os.mkdir(macro.MACRO_STORAGE_ROOT)
except OSError as e:
if e.errno == errno.EEXIST:
raise Exception("Storage directory should not exist when running tests")
raise
os.mkdir(macro.MACRO_SYS_LOG_PATH)
os.mkdir(os.path.join(macro.MACRO_STORAGE_ROOT, 'static'))
os.mkdir(os.path.join(macro.MACRO_STORAGE_ROOT, 'static', 'media'))
os.symlink('ad', os.path.join(macro.MACRO_STORAGE_ROOT, 'static', 'media', 'ax'))
try:
yield
finally:
shutil.rmtree(macro.MACRO_STORAGE_ROOT)
@pytest.fixture(autouse=True)
def setup_request_environment(request):
pyramid_request = pyramid.testing.DummyRequest()
pyramid_request.set_property(middleware.pg_connection_request_property, name='pg_connection', reify=True)
pyramid_request.set_property(middleware.userid_request_property, name='userid', reify=True)
pyramid_request.web_input = middleware.web_input_request_method
pyramid_request.environ['HTTP_X_FORWARDED_FOR'] = '127.0.0.1'
pyramid_request.client_addr = '127.0.0.1'
setup_routes_and_views(pyramid.testing.setUp(request=pyramid_request))
def tear_down():
pyramid_request.pg_connection.close()
pyramid.testing.tearDown()
request.addfinalizer(tear_down)
@pytest.fixture(autouse=True)
def lower_bcrypt_rounds(monkeypatch):
monkeypatch.setattr(macro, 'MACRO_BCRYPT_ROUNDS', 4)
@pytest.fixture(autouse=True)
def drop_email(monkeypatch):
def drop_send(mailto, subject, content):
pass
monkeypatch.setattr(emailer, 'send', drop_send)
@pytest.fixture
def db(request):
db = define.connect()
def tear_down():
""" Clears all rows from the test database. """
db.flush()
for table in reversed(metadata.sorted_tables):
db.execute(table.delete())
request.addfinalizer(tear_down)
if request.cls is not None:
request.cls.db = db
return db
@pytest.fixture(name='cache')
def cache_(request):
cache.region.configure(
'dogpile.cache.memory',
wrap=[ThreadCacheProxy],
replace_existing_backend=True,
)
yield
ThreadCacheProxy.zap_cache()
@pytest.fixture(autouse=True)
def template_cache():
define._template_cache.clear()
@pytest.fixture(autouse=True)
def no_csrf(monkeypatch):
monkeypatch.setattr(define, 'is_csrf_valid', lambda request: True)
@pytest.fixture(autouse=True)
def deterministic_marketplace_tests(monkeypatch):
rates = """{"base":"USD","date":"2017-04-03","rates":{"AUD":1.3143,"BGN":1.8345,"BRL":3.1248,"CAD":1.3347,"CHF":1.002,"CNY":6.8871,"CZK":25.367,"DKK":6.9763,"GBP":0.79974,"HKD":7.7721,"HRK":6.9698,"HUF":289.54,"IDR":13322.0,"ILS":3.6291,"INR":64.985,"JPY":111.28,"KRW":1117.2,"MXN":18.74,"MYR":4.4275,"NOK":8.5797,"NZD":1.4282,"PHP":50.142,"PLN":3.9658,"RON":4.2674,"RUB":56.355,"SEK":8.9246,"SGD":1.3975,"THB":34.385,"TRY":3.6423,"ZAR":13.555,"EUR":0.938}}"""
def _fetch_rates():
return json.loads(rates)
monkeypatch.setattr(commishinfo, '_fetch_rates', _fetch_rates)
@pytest.fixture(scope='session')
def METHOD_NAME():
return make_wsgi_app(configure_cache=False)
class TestApp(TestApp_):
def do_request(self, req, status=None, expect_errors=None):
if 'wsgi.input' in req.environ:
req.environ['wsgi.input_terminated'] = True
return super().do_request(req, status, expect_errors)
@pytest.fixture()
def app(METHOD_NAME):
return TestApp(METHOD_NAME, extra_environ={'HTTP_X_FORWARDED_FOR': '::1'})
|
4,365 |
test send sms fake to phone number
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
from devtools_testutils import AzureRecordedTestCase, is_live, recorded_by_proxy, set_bodiless_matcher
from _shared.utils import get_http_logging_policy
from devtools_testutils.fake_credentials import FakeTokenCredential
from azure.core.exceptions import HttpResponseError
from azure.identity import DefaultAzureCredential
from acs_sms_test_case import ACSSMSTestCase
from azure.communication.sms import SmsClient
class TestClient(ACSSMSTestCase):
def setup_method(self):
super().setUp()
set_bodiless_matcher()
@recorded_by_proxy
def test_send_sms_single(self):
sms_client = self.create_client_from_connection_string()
# calling send() with sms values
sms_responses = sms_client.send(
from_=self.phone_number,
to=self.phone_number,
message="Hello World via SMS")
assert len(sms_responses) == 1
self.verify_successful_sms_response(sms_responses[0])
@recorded_by_proxy
def test_send_sms_multiple_with_options(self):
sms_client = self.create_client_from_connection_string()
# calling send() with sms values
sms_responses = sms_client.send(
from_=self.phone_number,
to=[self.phone_number, self.phone_number],
message="Hello World via SMS",
enable_delivery_report=True, # optional property
tag="custom-tag") # optional property
assert len(sms_responses) == 2
self.verify_successful_sms_response(sms_responses[0])
self.verify_successful_sms_response(sms_responses[1])
@recorded_by_proxy
def test_send_sms_from_managed_identity(self):
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
sms_client = SmsClient(
self.endpoint,
credential,
http_logging_policy=get_http_logging_policy()
)
# calling send() with sms values
sms_responses = sms_client.send(
from_=self.phone_number,
to=[self.phone_number],
message="Hello World via SMS")
assert len(sms_responses) == 1
self.verify_successful_sms_response(sms_responses[0])
@recorded_by_proxy
def test_send_sms_fake_from_phone_number(self):
sms_client = self.create_client_from_connection_string()
with pytest.raises(HttpResponseError) as ex:
# calling send() with sms values
sms_client.send(
from_="+15550000000",
to=[self.phone_number],
message="Hello World via SMS")
assert str(
ex.value.status_code) == "401"
assert ex.value.message is not None
@recorded_by_proxy
def METHOD_NAME(self):
sms_client = self.create_client_from_connection_string()
with pytest.raises(HttpResponseError) as ex:
sms_responses = sms_client.send(
from_=self.phone_number,
to=["Ad155500000000000"],
message="Hello World via SMS")
assert str(ex.value.status_code == "400")
@recorded_by_proxy
def test_send_sms_unauthorized_from_phone_number(self):
sms_client = self.create_client_from_connection_string()
with pytest.raises(HttpResponseError) as ex:
# calling send() with sms values
sms_client.send(
from_="+14255550123",
to=[self.phone_number],
message="Hello World via SMS")
assert str(ex.value.status_code) == "401"
assert ex.value.message is not None
@pytest.mark.live_test_only
@recorded_by_proxy
def test_send_sms_unique_message_ids(self):
sms_client = self.create_client_from_connection_string()
# calling send() with sms values
sms_responses_1 = sms_client.send(
from_=self.phone_number,
to=[self.phone_number],
message="Hello World via SMS")
# calling send() again with the same sms values
sms_responses_2 = sms_client.send(
from_=self.phone_number,
to=[self.phone_number],
message="Hello World via SMS")
self.verify_successful_sms_response(sms_responses_1[0])
self.verify_successful_sms_response(sms_responses_2[0])
# message ids should be unique due to having a different idempotency key
assert sms_responses_1[0].message_id != sms_responses_2[0].message_id
def verify_successful_sms_response(self, sms_response):
if self.is_live:
assert sms_response.to == self.phone_number
assert sms_response.message_id is not None
assert sms_response.http_status_code == 202
assert sms_response.error_message is None
assert sms_response.successful
def create_client_from_connection_string(self):
return SmsClient.from_connection_string(
self.connection_str,
http_logging_policy=get_http_logging_policy()
)
|
4,366 |
power
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Rotation around the Y axis."""
import math
from math import pi
from typing import Optional, Union
import numpy
from qiskit.circuit.controlledgate import ControlledGate
from qiskit.circuit.gate import Gate
from qiskit.circuit.quantumregister import QuantumRegister
from qiskit.circuit.parameterexpression import ParameterValueType
class RYGate(Gate):
r"""Single-qubit rotation about the Y axis.
Can be applied to a :class:`~qiskit.circuit.QuantumCircuit`
with the :meth:`~qiskit.circuit.QuantumCircuit.ry` method.
**Circuit symbol:**
.. parsed-literal::
┌───────┐
q_0: ┤ Ry(ϴ) ├
└───────┘
**Matrix Representation:**
.. math::
\newcommand{\th}{\frac{\theta}{2}}
RY(\theta) = \exp\left(-i \th Y\right) =
\begin{pmatrix}
\cos\left(\th\right) & -\sin\left(\th\right) \\
\sin\left(\th\right) & \cos\left(\th\right)
\end{pmatrix}
"""
def __init__(self, theta: ParameterValueType, label: Optional[str] = None):
"""Create new RY gate."""
super().__init__("ry", 1, [theta], label=label)
def _define(self):
"""
gate ry(theta) a { r(theta, pi/2) a; }
"""
# pylint: disable=cyclic-import
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .r import RGate
q = QuantumRegister(1, "q")
qc = QuantumCircuit(q, name=self.name)
rules = [(RGate(self.params[0], pi / 2), [q[0]], [])]
for instr, qargs, cargs in rules:
qc._append(instr, qargs, cargs)
self.definition = qc
def control(
self,
num_ctrl_qubits: int = 1,
label: Optional[str] = None,
ctrl_state: Optional[Union[str, int]] = None,
):
"""Return a (multi-)controlled-RY gate.
Args:
num_ctrl_qubits (int): number of control qubits.
label (str or None): An optional label for the gate [Default: None]
ctrl_state (int or str or None): control state expressed as integer,
string (e.g. '110'), or None. If None, use all 1s.
Returns:
ControlledGate: controlled version of this gate.
"""
if num_ctrl_qubits == 1:
gate = CRYGate(self.params[0], label=label, ctrl_state=ctrl_state)
gate.base_gate.label = self.label
return gate
return super().control(num_ctrl_qubits=num_ctrl_qubits, label=label, ctrl_state=ctrl_state)
def inverse(self):
r"""Return inverted RY gate.
:math:`RY(\lambda)^{\dagger} = RY(-\lambda)`
"""
return RYGate(-self.params[0])
def __array__(self, dtype=None):
"""Return a numpy.array for the RY gate."""
cos = math.cos(self.params[0] / 2)
sin = math.sin(self.params[0] / 2)
return numpy.array([[cos, -sin], [sin, cos]], dtype=dtype)
def METHOD_NAME(self, exponent: float):
"""Raise gate to a power."""
(theta,) = self.params
return RYGate(exponent * theta)
class CRYGate(ControlledGate):
r"""Controlled-RY gate.
Can be applied to a :class:`~qiskit.circuit.QuantumCircuit`
with the :meth:`~qiskit.circuit.QuantumCircuit.cry` method.
**Circuit symbol:**
.. parsed-literal::
q_0: ────■────
┌───┴───┐
q_1: ┤ Ry(ϴ) ├
└───────┘
**Matrix representation:**
.. math::
\newcommand{\th}{\frac{\theta}{2}}
CRY(\theta)\ q_0, q_1 =
I \otimes |0\rangle\langle 0| + RY(\theta) \otimes |1\rangle\langle 1| =
\begin{pmatrix}
1 & 0 & 0 & 0 \\
0 & \cos\left(\th\right) & 0 & -\sin\left(\th\right) \\
0 & 0 & 1 & 0 \\
0 & \sin\left(\th\right) & 0 & \cos\left(\th\right)
\end{pmatrix}
.. note::
In Qiskit's convention, higher qubit indices are more significant
(little endian convention). In many textbooks, controlled gates are
presented with the assumption of more significant qubits as control,
which in our case would be q_1. Thus a textbook matrix for this
gate will be:
.. parsed-literal::
┌───────┐
q_0: ┤ Ry(ϴ) ├
└───┬───┘
q_1: ────■────
.. math::
\newcommand{\th}{\frac{\theta}{2}}
CRY(\theta)\ q_1, q_0 =
|0\rangle\langle 0| \otimes I + |1\rangle\langle 1| \otimes RY(\theta) =
\begin{pmatrix}
1 & 0 & 0 & 0 \\
0 & 1 & 0 & 0 \\
0 & 0 & \cos\left(\th\right) & -\sin\left(\th\right) \\
0 & 0 & \sin\left(\th\right) & \cos\left(\th\right)
\end{pmatrix}
"""
def __init__(
self,
theta: ParameterValueType,
label: Optional[str] = None,
ctrl_state: Optional[Union[str, int]] = None,
):
"""Create new CRY gate."""
super().__init__(
"cry",
2,
[theta],
num_ctrl_qubits=1,
label=label,
ctrl_state=ctrl_state,
base_gate=RYGate(theta),
)
def _define(self):
"""
gate cry(lambda) a,b
{ u3(lambda/2,0,0) b; cx a,b;
u3(-lambda/2,0,0) b; cx a,b;
}
"""
# pylint: disable=cyclic-import
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .x import CXGate
# q_0: ─────────────■───────────────■──
# ┌─────────┐┌─┴─┐┌─────────┐┌─┴─┐
# q_1: ┤ Ry(λ/2) ├┤ X ├┤ Ry(λ/2) ├┤ X ├
# └─────────┘└───┘└─────────┘└───┘
q = QuantumRegister(2, "q")
qc = QuantumCircuit(q, name=self.name)
rules = [
(RYGate(self.params[0] / 2), [q[1]], []),
(CXGate(), [q[0], q[1]], []),
(RYGate(-self.params[0] / 2), [q[1]], []),
(CXGate(), [q[0], q[1]], []),
]
for instr, qargs, cargs in rules:
qc._append(instr, qargs, cargs)
self.definition = qc
def inverse(self):
"""Return inverse CRY gate (i.e. with the negative rotation angle)."""
return CRYGate(-self.params[0], ctrl_state=self.ctrl_state)
def __array__(self, dtype=None):
"""Return a numpy.array for the CRY gate."""
half_theta = float(self.params[0]) / 2
cos = math.cos(half_theta)
sin = math.sin(half_theta)
if self.ctrl_state:
return numpy.array(
[[1, 0, 0, 0], [0, cos, 0, -sin], [0, 0, 1, 0], [0, sin, 0, cos]], dtype=dtype
)
else:
return numpy.array(
[[cos, 0, -sin, 0], [0, 1, 0, 0], [sin, 0, cos, 0], [0, 0, 0, 1]], dtype=dtype
)
|
4,367 |
test email sent to group
|
#########################################################################
#
# Copyright (C) 2020 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from unittest.mock import patch
from django.contrib.auth.models import Group
from user_messages.models import Thread, Message, GroupMemberThread
from geonode.people.models import Profile
from geonode.tests.base import GeoNodeBaseTestSupport
from geonode.messaging.notifications import message_received_notification
class TestSendEmail(GeoNodeBaseTestSupport):
def setUp(self):
self.p = Profile.objects.create(username="test", email="[email protected]")
self.p1 = Profile.objects.create(username="test1")
self.sender = Profile.objects.create(username="sender", email="[email protected]")
self.t = Thread.objects.create(
subject="test",
)
self.t2 = Thread.objects.create(
subject="test2",
)
GroupMemberThread.objects.create(thread=self.t2, group=Group.objects.get(pk=1), user=self.p)
GroupMemberThread.objects.create(thread=self.t2, group=Group.objects.get(pk=1), user=self.p1)
self.m2 = Message.objects.create(content="test", thread=self.t2, sender=self.sender)
self.m = Message.objects.create(content="test", thread=self.t, sender=self.sender)
self.t.single_users.add(self.p)
self.t.single_users.add(self.p1)
self.t2.group_users.add(self.p)
self.t2.group_users.add(self.p1)
@patch("geonode.notifications_backend.EmailBackend.deliver")
def test_email_sent(self, email_message):
with self.settings(
ASYNC_SIGNALS=False,
NOTIFICATION_ENABLED=True,
NOTIFICATIONS_MODULE="pinax.notifications",
PINAX_NOTIFICATIONS_QUEUE_ALL=False,
):
message_received_notification(message=self.m)
email_message.assert_called_once()
@patch("geonode.notifications_backend.EmailBackend.deliver")
def test_email_sent_many(self, email_message):
with self.settings(
ASYNC_SIGNALS=False,
NOTIFICATION_ENABLED=True,
NOTIFICATIONS_MODULE="pinax.notifications",
PINAX_NOTIFICATIONS_QUEUE_ALL=False,
):
self.p1.email = "[email protected]"
self.p1.save()
message_received_notification(message=self.m)
self.assertEqual(email_message.call_count, 2)
@patch("geonode.notifications_backend.EmailBackend.deliver")
def METHOD_NAME(self, email_message):
with self.settings(
ASYNC_SIGNALS=False,
NOTIFICATION_ENABLED=True,
NOTIFICATIONS_MODULE="pinax.notifications",
PINAX_NOTIFICATIONS_QUEUE_ALL=False,
):
self.p1.email = "[email protected]"
self.p1.save()
message_received_notification(message=self.m2)
self.assertEqual(email_message.call_count, 2)
@patch("geonode.notifications_backend.EmailBackend.deliver")
def test_email_sent_to_group_single(self, email_message):
with self.settings(
ASYNC_SIGNALS=False,
NOTIFICATION_ENABLED=True,
NOTIFICATIONS_MODULE="pinax.notifications",
PINAX_NOTIFICATIONS_QUEUE_ALL=False,
):
message_received_notification(message=self.m2)
self.assertEqual(email_message.call_count, 1)
|
4,368 |
test context processor no tos
|
from django.urls import reverse
from ...users.test import AuthenticatedUserTestCase
from ..context_processors import legal_links
from ..models import Agreement
class MockRequest:
def __init__(self, user):
self.user = user
self.frontend_context = {}
def get_host(self):
return "testhost.com"
class PrivacyPolicyTests(AuthenticatedUserTestCase):
def setUp(self):
super().setUp()
Agreement.objects.invalidate_cache()
def tearDown(self):
Agreement.objects.invalidate_cache()
def test_context_processor_no_policy(self):
"""context processor has no TOS link"""
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": None,
"TERMS_OF_SERVICE_TITLE": None,
"TERMS_OF_SERVICE_URL": None,
"PRIVACY_POLICY_ID": None,
"PRIVACY_POLICY_TITLE": None,
"PRIVACY_POLICY_URL": None,
"misago_agreement": None,
},
)
def test_context_processor_misago_policy(self):
"""context processor has TOS link to Misago view"""
agreement = Agreement.objects.create(
type=Agreement.TYPE_PRIVACY, text="Lorem ipsum", is_active=True
)
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": None,
"TERMS_OF_SERVICE_TITLE": None,
"TERMS_OF_SERVICE_URL": None,
"PRIVACY_POLICY_ID": agreement.id,
"PRIVACY_POLICY_TITLE": "Privacy policy",
"PRIVACY_POLICY_URL": reverse("misago:privacy-policy"),
"misago_agreement": {
"type": "Privacy policy",
"title": "Privacy policy",
"link": None,
"text": "<p>Lorem ipsum</p>",
},
},
)
def test_context_processor_remote_policy(self):
"""context processor has TOS link to remote url"""
agreement = Agreement.objects.create(
type=Agreement.TYPE_PRIVACY, link="http://test.com", is_active=True
)
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": None,
"TERMS_OF_SERVICE_TITLE": None,
"TERMS_OF_SERVICE_URL": None,
"PRIVACY_POLICY_ID": agreement.id,
"PRIVACY_POLICY_TITLE": "Privacy policy",
"PRIVACY_POLICY_URL": "http://test.com",
"misago_agreement": {
"type": "Privacy policy",
"title": "Privacy policy",
"link": "http://test.com",
"text": None,
},
},
)
# set misago view too
agreement.text = "Lorem ipsum"
agreement.save()
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": None,
"TERMS_OF_SERVICE_TITLE": None,
"TERMS_OF_SERVICE_URL": None,
"PRIVACY_POLICY_ID": agreement.id,
"PRIVACY_POLICY_TITLE": "Privacy policy",
"PRIVACY_POLICY_URL": "http://test.com",
"misago_agreement": {
"type": "Privacy policy",
"title": "Privacy policy",
"link": "http://test.com",
"text": "<p>Lorem ipsum</p>",
},
},
)
class TermsOfServiceTests(AuthenticatedUserTestCase):
def setUp(self):
super().setUp()
Agreement.objects.invalidate_cache()
def tearDown(self):
Agreement.objects.invalidate_cache()
def METHOD_NAME(self):
"""context processor has no TOS link"""
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": None,
"TERMS_OF_SERVICE_TITLE": None,
"TERMS_OF_SERVICE_URL": None,
"PRIVACY_POLICY_ID": None,
"PRIVACY_POLICY_TITLE": None,
"PRIVACY_POLICY_URL": None,
"misago_agreement": None,
},
)
def test_context_processor_misago_tos(self):
"""context processor has TOS link to Misago view"""
agreement = Agreement.objects.create(
type=Agreement.TYPE_TOS, text="Lorem ipsum", is_active=True
)
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": agreement.id,
"TERMS_OF_SERVICE_TITLE": "Terms of service",
"TERMS_OF_SERVICE_URL": reverse("misago:terms-of-service"),
"PRIVACY_POLICY_ID": None,
"PRIVACY_POLICY_TITLE": None,
"PRIVACY_POLICY_URL": None,
"misago_agreement": {
"type": "Terms of service",
"title": "Terms of service",
"link": None,
"text": "<p>Lorem ipsum</p>",
},
},
)
def test_context_processor_remote_tos(self):
"""context processor has TOS link to remote url"""
agreement = Agreement.objects.create(
type=Agreement.TYPE_TOS, link="http://test.com", is_active=True
)
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": agreement.id,
"TERMS_OF_SERVICE_TITLE": "Terms of service",
"TERMS_OF_SERVICE_URL": "http://test.com",
"PRIVACY_POLICY_ID": None,
"PRIVACY_POLICY_TITLE": None,
"PRIVACY_POLICY_URL": None,
"misago_agreement": {
"type": "Terms of service",
"title": "Terms of service",
"link": "http://test.com",
"text": None,
},
},
)
# set misago view too
agreement.text = "Lorem ipsum"
agreement.save()
context_dict = legal_links(MockRequest(self.user))
self.assertEqual(
context_dict,
{
"TERMS_OF_SERVICE_ID": agreement.id,
"TERMS_OF_SERVICE_TITLE": "Terms of service",
"TERMS_OF_SERVICE_URL": "http://test.com",
"PRIVACY_POLICY_ID": None,
"PRIVACY_POLICY_TITLE": None,
"PRIVACY_POLICY_URL": None,
"misago_agreement": {
"type": "Terms of service",
"title": "Terms of service",
"link": "http://test.com",
"text": "<p>Lorem ipsum</p>",
},
},
)
|
4,369 |
test fetch guild preview
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Nekokatt
# Copyright (c) 2021-present davfsa
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import mock
import pytest
from hikari import channels
from hikari.events import typing_events
from tests.hikari import hikari_test_helpers
class TestTypingEvent:
@pytest.fixture()
def event(self):
cls = hikari_test_helpers.mock_class_namespace(
typing_events.TypingEvent, channel_id=123, user_id=456, timestamp=object(), shard=object()
)
return cls()
def test_get_user_when_no_cache(self, event):
event = hikari_test_helpers.mock_class_namespace(typing_events.TypingEvent, app=None)()
assert event.get_user() is None
def test_get_user(self, event):
assert event.get_user() is event.app.cache.get_user.return_value
def test_trigger_typing(self, event):
event.app.rest.trigger_typing = mock.Mock()
result = event.trigger_typing()
event.app.rest.trigger_typing.assert_called_once_with(123)
assert result is event.app.rest.trigger_typing.return_value
class TestGuildTypingEvent:
@pytest.fixture()
def event(self):
cls = hikari_test_helpers.mock_class_namespace(typing_events.GuildTypingEvent)
return cls(
channel_id=123,
timestamp=object(),
shard=object(),
guild_id=789,
member=mock.Mock(id=456, app=mock.Mock(rest=mock.AsyncMock())),
)
def test_app_property(self, event):
assert event.app is event.member.app
def test_get_channel_when_no_cache(self):
event = hikari_test_helpers.mock_class_namespace(typing_events.GuildTypingEvent, app=None, init_=False)()
assert event.get_channel() is None
@pytest.mark.parametrize("guild_channel_impl", [channels.GuildNewsChannel, channels.GuildTextChannel])
def test_get_channel(self, event, guild_channel_impl):
event.app.cache.get_guild_channel = mock.Mock(return_value=mock.Mock(spec_set=guild_channel_impl))
result = event.get_channel()
assert result is event.app.cache.get_guild_channel.return_value
event.app.cache.get_guild_channel.assert_called_once_with(123)
@pytest.mark.asyncio()
async def test_get_guild_when_no_cache(self):
event = hikari_test_helpers.mock_class_namespace(typing_events.GuildTypingEvent, app=None, init_=False)()
assert event.get_guild() is None
def test_get_guild_when_available(self, event):
result = event.get_guild()
assert result is event.app.cache.get_available_guild.return_value
event.app.cache.get_available_guild.assert_called_once_with(789)
event.app.cache.get_unavailable_guild.assert_not_called()
def test_get_guild_when_unavailable(self, event):
event.app.cache.get_available_guild.return_value = None
result = event.get_guild()
assert result is event.app.cache.get_unavailable_guild.return_value
event.app.cache.get_unavailable_guild.assert_called_once_with(789)
event.app.cache.get_available_guild.assert_called_once_with(789)
def test_user_id(self, event):
assert event.user_id == event.member.id
assert event.user_id == 456
@pytest.mark.asyncio()
@pytest.mark.parametrize("guild_channel_impl", [channels.GuildNewsChannel, channels.GuildTextChannel])
async def test_fetch_channel(self, event, guild_channel_impl):
event.app.rest.fetch_channel = mock.AsyncMock(return_value=mock.Mock(spec_set=guild_channel_impl))
await event.fetch_channel()
event.app.rest.fetch_channel.assert_awaited_once_with(123)
@pytest.mark.asyncio()
async def test_fetch_guild(self, event):
await event.fetch_guild()
event.app.rest.fetch_guild.assert_awaited_once_with(789)
@pytest.mark.asyncio()
async def METHOD_NAME(self, event):
await event.fetch_guild_preview()
event.app.rest.fetch_guild_preview.assert_awaited_once_with(789)
@pytest.mark.asyncio()
async def test_fetch_member(self, event):
await event.fetch_member()
event.app.rest.fetch_member.assert_awaited_once_with(789, 456)
@pytest.mark.asyncio()
class TestDMTypingEvent:
@pytest.fixture()
def event(self):
cls = hikari_test_helpers.mock_class_namespace(typing_events.DMTypingEvent)
return cls(
channel_id=123, timestamp=object(), shard=object(), app=mock.Mock(rest=mock.AsyncMock()), user_id=456
)
async def test_fetch_channel(self, event):
event.app.rest.fetch_channel = mock.AsyncMock(return_value=mock.Mock(spec_set=channels.DMChannel))
await event.fetch_channel()
event.app.rest.fetch_channel.assert_awaited_once_with(123)
async def test_fetch_user(self, event):
await event.fetch_user()
event.app.rest.fetch_user.assert_awaited_once_with(456)
|
4,370 |
build validation result from rest object
|
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# pylint: disable=protected-access
import logging
import typing
import msrest
from azure.ai.ml._vendor.azure_resources.models import (
Deployment,
DeploymentProperties,
DeploymentValidateResult,
ErrorResponse,
)
from azure.ai.ml.entities._mixins import RestTranslatableMixin
from .core import MutableValidationResult, ValidationResultBuilder
module_logger = logging.getLogger(__name__)
class PreflightResource(msrest.serialization.Model):
"""Specified resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
"type": {"key": "type", "type": "str"},
"name": {"key": "name", "type": "str"},
"location": {"key": "location", "type": "str"},
"api_version": {"key": "apiversion", "type": "str"},
"properties": {"key": "properties", "type": "object"},
}
def __init__(self, **kwargs):
super(PreflightResource, self).__init__(**kwargs)
self.name = kwargs.get("name", None)
self.type = kwargs.get("type", None)
self.location = kwargs.get("location", None)
self.properties = kwargs.get("properties", None)
self.api_version = kwargs.get("api_version", None)
class ValidationTemplateRequest(msrest.serialization.Model):
"""Export resource group template request parameters.
:param resources: The rest objects to be validated.
:type resources: list[_models.Resource]
:param options: The export template options. A CSV-formatted list containing zero or more of
the following: 'IncludeParameterDefaultValue', 'IncludeComments',
'SkipResourceNameParameterization', 'SkipAllParameterization'.
:type options: str
"""
_attribute_map = {
"resources": {"key": "resources", "type": "[PreflightResource]"},
"content_version": {"key": "contentVersion", "type": "str"},
"parameters": {"key": "parameters", "type": "object"},
"_schema": {
"key": "$schema",
"type": "str",
"default": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
},
}
def __init__(self, **kwargs):
super(ValidationTemplateRequest, self).__init__(**kwargs)
self._schema = kwargs.get("_schema", None)
self.content_version = kwargs.get("content_version", None)
self.parameters = kwargs.get("parameters", None)
self.resources = kwargs.get("resources", None)
class RemoteValidatableMixin(RestTranslatableMixin):
@classmethod
def _get_resource_type(cls) -> str:
"""Return resource type to be used in remote validation.
Should be overridden by subclass.
:return: The resource type
:rtype: str
"""
raise NotImplementedError()
def _get_resource_name_version(self) -> typing.Tuple[str, str]:
"""Return resource name and version to be used in remote validation.
Should be overridden by subclass.
:return: The name and version
:rtype: typing.Tuple[str, str]
"""
raise NotImplementedError()
def _to_preflight_resource(self, location: str, workspace_name: str) -> PreflightResource:
"""Return the preflight resource to be used in remote validation.
:param location: The location of the resource.
:type location: str
:param workspace_name: The workspace name
:type workspace_name: str
:return: The preflight resource
:rtype: PreflightResource
"""
name, version = self._get_resource_name_version()
return PreflightResource(
type=self._get_resource_type(),
name=f"{workspace_name}/{name}/{version}",
location=location,
properties=self._to_rest_object().properties,
api_version="2023-03-01-preview",
)
def _build_rest_object_for_remote_validation(self, location: str, workspace_name: str) -> Deployment:
return Deployment(
properties=DeploymentProperties(
mode="Incremental",
template=ValidationTemplateRequest(
_schema="https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
content_version="1.0.0.0",
parameters={},
resources=[self._to_preflight_resource(location=location, workspace_name=workspace_name)],
),
)
)
@classmethod
def METHOD_NAME(cls, rest_obj: DeploymentValidateResult) -> MutableValidationResult:
"""Create a validation result from a rest object. Note that the created validation result does not have
target_obj so should only be used for merging.
:param rest_obj: The Deployment Validate REST obj
:type rest_obj: DeploymentValidateResult
:return: The validation result created from rest_obj
:rtype: MutableValidationResult
"""
if not rest_obj.error or not rest_obj.error.details:
return ValidationResultBuilder.success()
result = MutableValidationResult(target_obj=None)
details: typing.List[ErrorResponse] = rest_obj.error.details
for detail in details:
result.append_error(
message=detail.message,
yaml_path=detail.target.replace("/", "."),
error_code=detail.code,
# will always be UserError for now, not sure if innerError can be passed back
)
return result
|
4,371 |
temppath
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
# Modifications copyright (C) 2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import contextlib
import multiprocessing
import os
import shutil
import sys
import tempfile
import time
import traceback
import cloudpickle
import mock
from horovod.runner.util.threads import in_thread
def mpi_env_rank_and_size():
"""Get MPI rank and size from environment variables and return them as a
tuple of integers.
Most MPI implementations have an `mpirun` or `mpiexec` command that will
run an MPI executable and set up all communication necessary between the
different processors. As part of that set up, they will set environment
variables that contain the rank and size of the MPI_COMM_WORLD
communicator. We can read those environment variables from Python in order
to ensure that `hvd.rank()` and `hvd.size()` return the expected values.
Since MPI is just a standard, not an implementation, implementations
typically choose their own environment variable names. This function tries
to support several different implementation, but really it only needs to
support whatever implementation we want to use for the TensorFlow test
suite.
If this is not running under MPI, then defaults of rank zero and size one
are returned. (This is appropriate because when you call MPI_Init in an
application not started with mpirun, it will create a new independent
communicator with only one process in it.)
"""
rank_env = 'PMI_RANK OMPI_COMM_WORLD_RANK'.split()
size_env = 'PMI_SIZE OMPI_COMM_WORLD_SIZE'.split()
for rank_var, size_var in zip(rank_env, size_env):
rank = os.environ.get(rank_var)
size = os.environ.get(size_var)
if rank is not None and size is not None:
return int(rank), int(size)
# Default to rank zero and size one if there are no environment variables
return 0, 1
def delay(func, seconds):
"""Delays the execution of func in a separate thread by given seconds."""
def fn():
time.sleep(seconds)
func()
return in_thread(target=fn)
def wait(func, timeout=None):
"""Wait for func to return True until timeout."""
start = int(time.time())
while not func():
time.sleep(0.1)
if timeout is not None and int(time.time()) - start > timeout:
raise TimeoutError('Timed out waiting for func to return True')
@contextlib.contextmanager
def capture(stdout=None, stderr=None):
out = sys.stdout
err = sys.stderr
if stdout is not None:
sys.stdout = stdout
if stderr is not None:
sys.stderr = stderr
try:
yield
finally:
if stdout is not None:
sys.stdout.seek(0)
sys.stdout = out
if stderr is not None:
sys.stderr.seek(0)
sys.stderr = err
@contextlib.contextmanager
def tempdir():
dirpath = tempfile.mkdtemp()
try:
yield dirpath
finally:
shutil.rmtree(dirpath)
@contextlib.contextmanager
def METHOD_NAME():
dir_path = tempfile.TemporaryDirectory()
path = os.path.join(dir_path.name,'temp_test_file')
try:
yield path
finally:
dir_path.cleanup()
@contextlib.contextmanager
def override_args(tool=None, *args):
old = sys.argv[:]
try:
if tool:
sys.argv[0] = tool
sys.argv[1:] = args
yield
finally:
sys.argv = old
@contextlib.contextmanager
def override_env(env):
old = os.environ
try:
os.environ = env
yield
finally:
os.environ = old
@contextlib.contextmanager
def undo(fn):
try:
yield
finally:
fn()
@contextlib.contextmanager
def is_built(gloo_is_built, mpi_is_built):
"""
Patches the gloo_built and mpi_built methods called from horovod.runner.run.run_controller
to return the given booleans. That method is used by horovod.spark.run to determine which
controller to use. Patching these methods allows to test horovod.spark.run without an MPI
implementation to be installed.
:param gloo_is_built: boolean returned by gloo_built
:param mpi_is_built: boolean returned by mpi_built
:return: mocked gloo_built and mpi_built methods
"""
with mock.patch("horovod.runner.launch.gloo_built", return_value=gloo_is_built) as g:
with mock.patch("horovod.runner.launch.mpi_built", return_value=mpi_is_built) as m:
yield g, m
@contextlib.contextmanager
def mpi_implementation_flags(flags=["--mock-mpi-impl-flags"],
binding_args=["--mock-mpi-binding-args"],
mpi=None):
"""
Patches the _get_mpi_implementation_flags method used by horovod.runner.mpi_run to retrieve
MPI implementation specific command line flags. Patching this method allows to test mpi_run
without an MPI implementation to be installed.
:param flags: mock flags
:return: the mocked method
"""
with mock.patch("horovod.runner.mpi_run._get_mpi_implementation_flags", return_value=(flags, binding_args, mpi)) as m:
yield m
@contextlib.contextmanager
def lsf_and_jsrun(lsf_exists, jsrun_installed):
"""
Patches the lsf.LSFUtils.using_lsf and is_jsrun_installed methods called from
horovod.runner.run.run_controller to return the given booleans.
:param lsf_exists: boolean returned by lsf.LSFUtils.using_lsf
:param jsrun_installed: boolean returned by is_jsrun_installed
:return: mocked methods
"""
with mock.patch("horovod.runner.launch.lsf.LSFUtils.using_lsf", return_value=lsf_exists) as u:
with mock.patch("horovod.runner.launch.is_jsrun_installed", return_value=jsrun_installed) as i:
yield u, i
def _subproc_wrapper(fn, queue, *args, **kwargs):
fn = cloudpickle.loads(fn)
try:
results = fn(*args, **kwargs)
except Exception as e:
traceback.print_exc(file=sys.stderr)
results = e
queue.put(results)
def spawn(fn):
def wrapped_fn(*args, **kwargs):
ctx = multiprocessing.get_context('spawn')
queue = ctx.Queue()
p = ctx.Process(
target=_subproc_wrapper,
args=(cloudpickle.dumps(fn), queue, *args),
kwargs=kwargs)
p.start()
p.join()
results = queue.get()
if isinstance(results, Exception):
raise RuntimeError(f'Spawned subprocess raised {type(results).__name__}, '
f'check log output above for stack trace.')
return results
return wrapped_fn
def skip_or_fail_gpu_test(test, message):
"""Fails the test if GPUs are required, otherwise skips."""
if int(os.environ.get('HOROVOD_TEST_GPU', 0)):
test.fail(message)
else:
test.skipTest(message)
|
4,372 |
login
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_HTTPError
from ..utils import (
ExtractorError,
int_or_none,
urlencode_postdata,
)
class AtresPlayerIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?atresplayer\.com/[^/]+/[^/]+/[^/]+/[^/]+/(?P<display_id>.+?)_(?P<id>[0-9a-f]{24})'
_NETRC_MACHINE = 'atresplayer'
_TESTS = [
{
'url': 'https://www.atresplayer.com/antena3/series/pequenas-coincidencias/temporada-1/capitulo-7-asuntos-pendientes_5d4aa2c57ed1a88fc715a615/',
'info_dict': {
'id': '5d4aa2c57ed1a88fc715a615',
'ext': 'mp4',
'title': 'Capítulo 7: Asuntos pendientes',
'description': 'md5:7634cdcb4d50d5381bedf93efb537fbc',
'duration': 3413,
},
'params': {
'format': 'bestvideo',
},
'skip': 'This video is only available for registered users'
},
{
'url': 'https://www.atresplayer.com/lasexta/programas/el-club-de-la-comedia/temporada-4/capitulo-10-especial-solidario-nochebuena_5ad08edf986b2855ed47adc4/',
'only_matching': True,
},
{
'url': 'https://www.atresplayer.com/antena3/series/el-secreto-de-puente-viejo/el-chico-de-los-tres-lunares/capitulo-977-29-12-14_5ad51046986b2886722ccdea/',
'only_matching': True,
},
]
_API_BASE = 'https://api.atresplayer.com/'
def _real_initialize(self):
self.METHOD_NAME()
def _handle_error(self, e, code):
if isinstance(e.cause, compat_HTTPError) and e.cause.code == code:
error = self._parse_json(e.cause.read(), None)
if error.get('error') == 'required_registered':
self.raise_login_required()
raise ExtractorError(error['error_description'], expected=True)
raise
def METHOD_NAME(self):
username, password = self._get_login_info()
if username is None:
return
self._request_webpage(
self._API_BASE + 'login', None, 'Downloading login page')
try:
target_url = self._download_json(
'https://account.atresmedia.com/api/login', None,
'Logging in', headers={
'Content-Type': 'application/x-www-form-urlencoded'
}, data=urlencode_postdata({
'username': username,
'password': password,
}))['targetUrl']
except ExtractorError as e:
self._handle_error(e, 400)
self._request_webpage(target_url, None, 'Following Target URL')
def _real_extract(self, url):
display_id, video_id = re.match(self._VALID_URL, url).groups()
try:
episode = self._download_json(
self._API_BASE + 'client/v1/player/episode/' + video_id, video_id)
except ExtractorError as e:
self._handle_error(e, 403)
title = episode['titulo']
formats = []
for source in episode.get('sources', []):
src = source.get('src')
if not src:
continue
src_type = source.get('type')
if src_type == 'application/vnd.apple.mpegurl':
formats.extend(self._extract_m3u8_formats(
src, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
elif src_type == 'application/dash+xml':
formats.extend(self._extract_mpd_formats(
src, video_id, mpd_id='dash', fatal=False))
self._sort_formats(formats)
heartbeat = episode.get('heartbeat') or {}
omniture = episode.get('omniture') or {}
get_meta = lambda x: heartbeat.get(x) or omniture.get(x)
return {
'display_id': display_id,
'id': video_id,
'title': title,
'description': episode.get('descripcion'),
'thumbnail': episode.get('imgPoster'),
'duration': int_or_none(episode.get('duration')),
'formats': formats,
'channel': get_meta('channel'),
'season': get_meta('season'),
'episode_number': int_or_none(get_meta('episodeNumber')),
}
|
4,373 |
pause tracing
|
# SPDX-License-Identifier: MIT
import datetime
from m1n1.constructutils import show_struct_trace, Ver
from m1n1.utils import *
Ver.set_version(hv.u)
trace_device("/arm-io/sgx", True)
#trace_device("/arm-io/pmp", True)
#trace_device("/arm-io/gfx-asc", False)
from m1n1.trace.agx import AGXTracer
AGXTracer = AGXTracer._reloadcls(True)
agx_tracer = AGXTracer(hv, "/arm-io/gfx-asc", verbose=1)
agx_tracer.trace_kernmap = False
agx_tracer.trace_kernva = False
agx_tracer.trace_usermap = False
sgx = hv.adt["/arm-io/sgx"]
freqs = []
voltages = []
for j in range(8):
for i, v in enumerate(voltages):
if j != 0:
v = 1
sgx.perf_states[i+j*len(voltages)].freq = freqs[i] * 1000000
sgx.perf_states[i+j*len(voltages)].volt = v
sgx.perf_states_sram[i+j*len(voltages)].freq = freqs[i] * 1000000
sgx.perf_states_sram[i+j*len(voltages)].volt = 1
if j >= 1:
getattr(sgx, f"perf_states{j}")[i].freq = freqs[i] * 1000000
getattr(sgx, f"perf_states{j}")[i].volt = v
getattr(sgx, f"perf_states_sram{j}")[i].freq = freqs[i] * 1000000
getattr(sgx, f"perf_states_sram{j}")[i].volt = 1
def after_init():
plat = hv.adt.compatible[0].lower()
fname = f"initdata/{datetime.datetime.now().isoformat()}-{plat}.log"
idlog = open(fname, "w")
print(f"Platform: {plat}", file=idlog)
fw = hv.adt["/chosen"].firmware_version.split(b"\0")[0].decode("ascii")
print(f"Firmware: {fw}", file=idlog)
sfw = hv.adt["/chosen"].system_firmware_version
print(f"System firmware: {sfw}", file=idlog)
print(file=idlog)
print("ADT SGX:", file=idlog)
print(sgx, file=idlog)
open("adt_hv.txt","w").write(str(hv.adt))
print("InitData:", file=idlog)
print(agx_tracer.state.initdata, file=idlog)
power = [int(i) for i in agx_tracer.state.initdata.regionB.hwdata_b.rel_max_powers]
volt = [int(i[0]) for i in agx_tracer.state.initdata.regionB.hwdata_b.voltages]
freq = [int(i) for i in agx_tracer.state.initdata.regionB.hwdata_b.frequencies]
print("p/v", [p/max(1, v) for p,v in zip(power,volt)])
print("p/f", [p/max(1, f) for p,f in zip(power,freq)])
print("p/v2", [p/max(1, (v*v)) for p,v in zip(power,volt)])
hv.reboot()
agx_tracer.after_init_hook = after_init
#agx_tracer.encoder_id_filter = lambda i: (i >> 16) == 0xc0de
agx_tracer.start()
def resume_tracing(ctx):
fname = f"{datetime.datetime.now().isoformat()}.log"
hv.set_logfile(open(f"gfxlogs/{fname}", "a"))
agx_tracer.resume()
return True
def METHOD_NAME(ctx):
agx_tracer.pause()
hv.set_logfile(None)
return True
hv.add_hvcall(100, resume_tracing)
hv.add_hvcall(101, METHOD_NAME)
mode = TraceMode.SYNC
trace_range(irange(agx_tracer.gpu_region, agx_tracer.gpu_region_size), mode=mode, name="gpu_region")
trace_range(irange(agx_tracer.gfx_shared_region, agx_tracer.gfx_shared_region_size), mode=mode, name="gfx_shared_region")
## Trace the entire mmio range around the GPU
node = hv.adt["/arm-io/sgx"]
addr, size = node.get_reg(0)
hv.trace_range(irange(addr, 0x1000000), TraceMode.SYNC, name="sgx")
#hv.trace_range(irange(addr, 0x1000000), TraceMode.OFF, name="sgx")
hv.trace_range(irange(0x204017030, 8), TraceMode.SYNC, name="faultcode")
trace_device("/arm-io/sgx", True)
trace_device("/arm-io/gfx-asc", False)
def trace_all_gfx_io():
# These are all the IO ranges that get mapped into the UAT iommu pagetable
# Trace them so we can see if any of them are being written by the CPU
# page (8): fa010020000 ... fa010023fff -> 000000020e100000 [8000020e100447]
hv.trace_range(irange(0x20e100000, 0x4000), mode=TraceMode.SYNC)
# page (10): fa010028000 ... fa01002bfff -> 000000028e104000 [c000028e104447]
hv.trace_range(irange(0x20e100000, 0x4000), mode=TraceMode.SYNC)
# page (22): fa010058000 ... fa01005bfff -> 000000028e494000 [8000028e494447]
hv.trace_range(irange(0x28e494000, 0x4000), mode=TraceMode.SYNC)
# page (28): fa010070000 ... fa010073fff -> 0000000204d60000 [c0000204d60447]
hv.trace_range(irange(0x204d60000, 0x4000), mode=TraceMode.SYNC)
# page (30): fa010078000 ... fa01007bfff -> 0000000200000000 [c0000200000447]
# to
# page (83): fa01014c000 ... fa01014ffff -> 00000002000d4000 [c00002000d4447]
hv.trace_range(irange(0x200000000, 0xd5000), mode=TraceMode.SYNC)
# page (84): fa010150000 ... fa010153fff -> 0000000201000000 [c0000201000447]
#page (137): fa010224000 ... fa010227fff -> 00000002010d4000 [c00002010d4447]
hv.trace_range(irange(0x201000000, 0xd5000), mode=TraceMode.SYNC)
# page (138): fa010228000 ... fa01022bfff -> 0000000202000000 [c0000202000447]
# page (191): fa0102fc000 ... fa0102fffff -> 00000002020d4000 [c00002020d4447]
hv.trace_range(irange(0x202000000, 0xd5000), mode=TraceMode.SYNC)
# page (192): fa010300000 ... fa010303fff -> 0000000203000000 [c0000203000447]
hv.trace_range(irange(0x203000000, 0xd5000), mode=TraceMode.SYNC)
hv.trace_range(irange(0x204000000, 0xd5000), mode=TraceMode.SYNC)
hv.trace_range(irange(0x205000000, 0xd5000), mode=TraceMode.SYNC)
hv.trace_range(irange(0x206000000, 0xd5000), mode=TraceMode.SYNC)
hv.trace_range(irange(0x207000000, 0xd5000), mode=TraceMode.SYNC)
# page (464): fa010740000 ... fa010743fff -> 00000002643c4000 [c00002643c4447]
hv.trace_range(irange(0x2643c4000, 0x4000), mode=TraceMode.SYNC)
# page (466): fa010748000 ... fa01074bfff -> 000000028e3d0000 [c000028e3d0447]
hv.trace_range(irange(0x28e3d0000, 0x4000), mode=TraceMode.SYNC)
# page (468): fa010750000 ... fa010753fff -> 000000028e3c0000 [8000028e3c0447]
hv.trace_range(irange(0x28e3c0000, 0x4000), mode=TraceMode.SYNC)
# page (8): f9100020000 ... f9100023fff -> 0000000406000000 [60000406000447]
# page (263): f910041c000 ... f910041ffff -> 00000004063fc000 [600004063fc447]
hv.trace_range(irange(0x2643c4000, 0x63fc000), mode=TraceMode.SYNC)
def trace_gpu_irqs():
# Trace sgx interrupts
node = hv.adt["/arm-io/sgx"]
for irq in getattr(node, "interrupts"):
hv.trace_irq(f"{node.name} {irq}", irq, 1, hv.IRQTRACE_IRQ)
## Trace gfx-asc interrupts
#node = hv.adt["/arm-io/gfx-asc"]
#for irq in getattr(node, "interrupts"):
#hv.trace_irq(f"{node.name} {irq}", irq, 1, hv.IRQTRACE_IRQ)
trace_gpu_irqs()
|
4,374 |
get service key
|
from typing import Dict, List, Optional, Union
from hikaru.model.rel_1_26 import Container, Volume
from kubernetes.client import V1Container, V1Volume
from pydantic import BaseModel
class EnvVar(BaseModel):
name: str
value: str
class Resources(BaseModel):
limits: Dict[str, str]
requests: Dict[str, str]
def __eq__(self, other):
if not isinstance(other, Resources):
return NotImplemented
return self.limits == other.limits and self.requests == other.requests
class ContainerInfo(BaseModel):
name: str
image: str
env: List[EnvVar]
resources: Resources
ports: List[int] = []
@staticmethod
def get_container_info(container: V1Container) -> "ContainerInfo":
env = (
[EnvVar(name=env.name, value=env.value) for env in container.env if env.name and env.value]
if container.env
else []
)
limits = container.resources.limits if container.resources.limits else {}
requests = container.resources.requests if container.resources.requests else {}
resources = Resources(limits=limits, requests=requests)
ports = [p.container_port for p in container.ports] if container.ports else []
return ContainerInfo(name=container.name, image=container.image, env=env, resources=resources, ports=ports)
@staticmethod
def get_container_info_k8(container: Container) -> "ContainerInfo":
env = (
[EnvVar(name=env.name, value=env.value) for env in container.env if env.name and env.value]
if container.env
else []
)
limits = container.resources.limits if container.resources.limits else {}
requests = container.resources.requests if container.resources.requests else {}
resources = Resources(limits=limits, requests=requests)
ports = [p.containerPort for p in container.ports] if container.ports else []
return ContainerInfo(name=container.name, image=container.image, env=env, resources=resources, ports=ports)
def __eq__(self, other):
if not isinstance(other, ContainerInfo):
return NotImplemented
return (
self.name == other.name
and self.image == other.image
and self.resources == other.resources
and sorted(self.env, key=lambda x: x.name) == sorted(other.env, key=lambda x: x.name)
)
class VolumeInfo(BaseModel):
name: str
persistent_volume_claim: Optional[Dict[str, str]]
@staticmethod
def get_volume_info(volume: Union[V1Volume, Volume]) -> "VolumeInfo":
if hasattr(volume, "persistent_volume_claim") and hasattr(volume.persistent_volume_claim, "claim_name"):
return VolumeInfo(
name=volume.name, persistent_volume_claim={"claim_name": volume.persistent_volume_claim.claim_name}
)
return VolumeInfo(name=volume.name)
class ServiceConfig(BaseModel):
labels: Dict[str, str]
containers: List[ContainerInfo]
volumes: List[VolumeInfo]
def __eq__(self, other):
if not isinstance(other, ServiceConfig):
return NotImplemented
# pydantic comparison bug of nested lists and dicts not in the same order
return (
sorted(self.containers, key=lambda x: x.name) == sorted(other.containers, key=lambda x: x.name)
and sorted(self.volumes, key=lambda x: x.name) == sorted(other.volumes, key=lambda x: x.name)
and self.labels != other.labels
)
class ServiceInfo(BaseModel):
resource_version: int = 0
name: str
service_type: str
namespace: str
classification: str = "None"
deleted: bool = False
service_config: Optional[ServiceConfig]
ready_pods: int = 0
total_pods: int = 0
is_helm_release: Optional[bool]
def METHOD_NAME(self) -> str:
return f"{self.namespace}/{self.service_type}/{self.name}"
def __eq__(self, other):
if not isinstance(other, ServiceInfo):
return NotImplemented
return (
self.name == other.name and
self.service_type == other.service_type and
self.namespace == other.namespace and
self.classification == other.classification and
self.is_helm_release == other.is_helm_release and
self.deleted == other.deleted and
self.service_config == other.service_config and
self.ready_pods == other.ready_pods and
self.total_pods == other.total_pods
)
|
4,375 |
authentication details
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetConnectorResult',
'AwaitableGetConnectorResult',
'get_connector',
'get_connector_output',
]
@pulumi.output_type
class GetConnectorResult:
"""
The connector setting
"""
def __init__(__self__, METHOD_NAME=None, hybrid_compute_settings=None, id=None, name=None, type=None):
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'authentication_details' to be a dict")
pulumi.set(__self__, "authentication_details", METHOD_NAME)
if hybrid_compute_settings and not isinstance(hybrid_compute_settings, dict):
raise TypeError("Expected argument 'hybrid_compute_settings' to be a dict")
pulumi.set(__self__, "hybrid_compute_settings", hybrid_compute_settings)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="authenticationDetails")
def METHOD_NAME(self) -> Optional[Any]:
"""
Settings for authentication management, these settings are relevant only for the cloud connector.
"""
return pulumi.get(self, "authentication_details")
@property
@pulumi.getter(name="hybridComputeSettings")
def hybrid_compute_settings(self) -> Optional['outputs.HybridComputeSettingsPropertiesResponse']:
"""
Settings for hybrid compute management. These settings are relevant only for Arc autoProvision (Hybrid Compute).
"""
return pulumi.get(self, "hybrid_compute_settings")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
class AwaitableGetConnectorResult(GetConnectorResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetConnectorResult(
METHOD_NAME=self.METHOD_NAME,
hybrid_compute_settings=self.hybrid_compute_settings,
id=self.id,
name=self.name,
type=self.type)
def get_connector(connector_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetConnectorResult:
"""
Details of a specific cloud account connector
Azure REST API version: 2020-01-01-preview.
:param str connector_name: Name of the cloud account connector
"""
__args__ = dict()
__args__['connectorName'] = connector_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:security:getConnector', __args__, opts=opts, typ=GetConnectorResult).value
return AwaitableGetConnectorResult(
METHOD_NAME=pulumi.get(__ret__, 'authentication_details'),
hybrid_compute_settings=pulumi.get(__ret__, 'hybrid_compute_settings'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_connector)
def get_connector_output(connector_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetConnectorResult]:
"""
Details of a specific cloud account connector
Azure REST API version: 2020-01-01-preview.
:param str connector_name: Name of the cloud account connector
"""
...
|
4,376 |
expand
|
import torch
from torch.autograd import Function
from torch.autograd.function import once_differentiable
from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
__all__ = ["Dirichlet"]
# This helper is exposed for testing.
def _Dirichlet_backward(x, concentration, grad_output):
total = concentration.sum(-1, True).expand_as(concentration)
grad = torch._dirichlet_grad(x, concentration, total)
return grad * (grad_output - (x * grad_output).sum(-1, True))
class _Dirichlet(Function):
@staticmethod
def forward(ctx, concentration):
x = torch._sample_dirichlet(concentration)
ctx.save_for_backward(x, concentration)
return x
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
x, concentration = ctx.saved_tensors
return _Dirichlet_backward(x, concentration, grad_output)
class Dirichlet(ExponentialFamily):
r"""
Creates a Dirichlet distribution parameterized by concentration :attr:`concentration`.
Example::
>>> # xdoctest: +IGNORE_WANT("non-deterministic")
>>> m = Dirichlet(torch.tensor([0.5, 0.5]))
>>> m.sample() # Dirichlet distributed with concentration [0.5, 0.5]
tensor([ 0.1046, 0.8954])
Args:
concentration (Tensor): concentration parameter of the distribution
(often referred to as alpha)
"""
arg_constraints = {
"concentration": constraints.independent(constraints.positive, 1)
}
support = constraints.simplex
has_rsample = True
def __init__(self, concentration, validate_args=None):
if concentration.dim() < 1:
raise ValueError(
"`concentration` parameter must be at least one-dimensional."
)
self.concentration = concentration
batch_shape, event_shape = concentration.shape[:-1], concentration.shape[-1:]
super().__init__(batch_shape, event_shape, validate_args=validate_args)
def METHOD_NAME(self, batch_shape, _instance=None):
new = self._get_checked_instance(Dirichlet, _instance)
batch_shape = torch.Size(batch_shape)
new.concentration = self.concentration.METHOD_NAME(batch_shape + self.event_shape)
super(Dirichlet, new).__init__(
batch_shape, self.event_shape, validate_args=False
)
new._validate_args = self._validate_args
return new
def rsample(self, sample_shape=()):
shape = self._extended_shape(sample_shape)
concentration = self.concentration.METHOD_NAME(shape)
return _Dirichlet.apply(concentration)
def log_prob(self, value):
if self._validate_args:
self._validate_sample(value)
return (
torch.xlogy(self.concentration - 1.0, value).sum(-1)
+ torch.lgamma(self.concentration.sum(-1))
- torch.lgamma(self.concentration).sum(-1)
)
@property
def mean(self):
return self.concentration / self.concentration.sum(-1, True)
@property
def mode(self):
concentrationm1 = (self.concentration - 1).clamp(min=0.0)
mode = concentrationm1 / concentrationm1.sum(-1, True)
mask = (self.concentration < 1).all(axis=-1)
mode[mask] = torch.nn.functional.one_hot(
mode[mask].argmax(axis=-1), concentrationm1.shape[-1]
).to(mode)
return mode
@property
def variance(self):
con0 = self.concentration.sum(-1, True)
return (
self.concentration
* (con0 - self.concentration)
/ (con0.pow(2) * (con0 + 1))
)
def entropy(self):
k = self.concentration.size(-1)
a0 = self.concentration.sum(-1)
return (
torch.lgamma(self.concentration).sum(-1)
- torch.lgamma(a0)
- (k - a0) * torch.digamma(a0)
- ((self.concentration - 1.0) * torch.digamma(self.concentration)).sum(-1)
)
@property
def _natural_params(self):
return (self.concentration,)
def _log_normalizer(self, x):
return x.lgamma().sum(-1) - torch.lgamma(x.sum(-1))
|
4,377 |
get replication recovery services provider
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetReplicationRecoveryServicesProviderResult',
'AwaitableGetReplicationRecoveryServicesProviderResult',
'get_replication_recovery_services_provider',
'get_replication_recovery_services_provider_output',
]
@pulumi.output_type
class GetReplicationRecoveryServicesProviderResult:
"""
Provider details.
"""
def __init__(__self__, id=None, location=None, name=None, properties=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource Location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.RecoveryServicesProviderPropertiesResponse':
"""
Provider properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource Type
"""
return pulumi.get(self, "type")
class AwaitableGetReplicationRecoveryServicesProviderResult(GetReplicationRecoveryServicesProviderResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetReplicationRecoveryServicesProviderResult(
id=self.id,
location=self.location,
name=self.name,
properties=self.properties,
type=self.type)
def METHOD_NAME(fabric_name: Optional[str] = None,
provider_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetReplicationRecoveryServicesProviderResult:
"""
Gets the details of registered recovery services provider.
:param str fabric_name: Fabric name.
:param str provider_name: Recovery services provider name.
:param str resource_group_name: The name of the resource group where the recovery services vault is present.
:param str resource_name: The name of the recovery services vault.
"""
__args__ = dict()
__args__['fabricName'] = fabric_name
__args__['providerName'] = provider_name
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:recoveryservices/v20230401:getReplicationRecoveryServicesProvider', __args__, opts=opts, typ=GetReplicationRecoveryServicesProviderResult).value
return AwaitableGetReplicationRecoveryServicesProviderResult(
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(METHOD_NAME)
def get_replication_recovery_services_provider_output(fabric_name: Optional[pulumi.Input[str]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetReplicationRecoveryServicesProviderResult]:
"""
Gets the details of registered recovery services provider.
:param str fabric_name: Fabric name.
:param str provider_name: Recovery services provider name.
:param str resource_group_name: The name of the resource group where the recovery services vault is present.
:param str resource_name: The name of the recovery services vault.
"""
...
|
4,378 |
logout with server docker mode
|
import os
import platform
import click
from fedml.computing.scheduler.comm_utils import sys_utils
from .device_server_constants import ServerConstants
from .device_server_runner import FedMLServerRunner
def login_with_server_docker_mode(userid, version, docker_rank):
account_id = userid
# Get os name
sys_name = platform.system()
if sys_name == "Darwin":
sys_name = "MacOS"
# Get data directory
cur_dir = ServerConstants.get_fedml_home_dir()
# Set default version if the version argument is empty
if version == "":
version = "release"
# Set registry server and image path based on the version.
if version == "dev":
image_dir = "/x6k8q1x9"
elif version == "release":
image_dir = "/x6k8q1x9"
elif version == "test":
image_dir = "/s8w2q1c1"
registry_server = "public.ecr.aws"
# Set image tags based on the version
tag = version
# Set client agent image path and client image path
client_image_name = "fedml-edge-server:" + tag
image_path = image_dir + "/" + client_image_name
edge_server_image = registry_server + image_path
# Get device id based on your machine MAC address.
os_name = sys_name
device_id = "{}@Rank{}".format(FedMLServerRunner.get_device_id(), str(docker_rank))
# Set environment variables for client agent docker
env_account_id = account_id
env_version = version
env_current_running_dir = cur_dir
env_current_os_name = os_name
env_current_device_id = device_id
# Cleanup the running docker
click.echo("Your FedML edge server is being deployed, please wait for a moment...")
# Pull client agent docker
fedml_docker_name = "fedml_edge_server_{}".format(str(docker_rank))
click.echo("Now is pulling fedml docker server.........................")
os.system("docker pull " + edge_server_image)
click.echo("Now is opening fedml docker server.........................")
docker_stop_proc = ServerConstants.exec_console_with_shell_script_list(['docker', 'stop', fedml_docker_name])
_, _, _ = ServerConstants.get_console_pipe_out_err_results(docker_stop_proc)
docker_rm_proc = ServerConstants.exec_console_with_shell_script_list(['docker', 'rm', fedml_docker_name])
_, _, _ = ServerConstants.get_console_pipe_out_err_results(docker_rm_proc)
# Compose the command for running the client agent docker
fedml_server_home_dir = os.path.join(env_current_running_dir, "docker", "rank-"+str(docker_rank))
os.makedirs(fedml_server_home_dir, exist_ok=True)
docker_run_cmd = "docker run --name " + fedml_docker_name + \
" -v " + fedml_server_home_dir + ":/home/fedml/fedml-server" + \
" --env ACCOUNT_ID=" + str(env_account_id) + \
" --env FEDML_VERSION=" + env_version + \
" --env SERVER_DEVICE_ID=" + env_current_device_id + \
" --env SERVER_OS_NAME=" + env_current_os_name + \
" -d " + edge_server_image
# Run the client agent docker
os.system(docker_run_cmd)
# Get the running state for the client agent docker
docker_ps_process = ServerConstants.exec_console_with_shell_script_list(['docker', 'ps', '-a'],
should_capture_stdout_err=True)
ret_code, out, err = ServerConstants.get_console_pipe_out_err_results(docker_ps_process)
is_deployment_ok = False
if out is not None:
out_str = sys_utils.decode_our_err_result(out)
if str(out_str).find(fedml_docker_name) != -1 and str(out_str).find("Up") != -1:
is_deployment_ok = True
if err is not None:
err_str = sys_utils.decode_our_err_result(err)
if str(err_str).find(fedml_docker_name) != -1 and str(err_str).find("Up") != -1:
is_deployment_ok = True
if is_deployment_ok:
print("\n\nCongratulations, your device is connected to the FedML MLOps platform successfully!")
print(
"Your unique device ID is "
+ str(env_current_device_id)
+ "\n"
)
logs_with_server_docker_mode(docker_rank)
else:
click.echo("Oops, you failed to deploy the FedML client agent.")
click.echo("Please check whether your Docker Application is installed and running normally!")
def METHOD_NAME(docker_rank):
fedml_docker_name = "fedml_edge_server_{}".format(str(docker_rank))
click.echo("Logout.........................")
os.system("docker stop {}".format(fedml_docker_name))
os.system("docker rm {}".format(fedml_docker_name))
def logs_with_server_docker_mode(docker_rank):
fedml_docker_name = "fedml_edge_server_{}".format(str(docker_rank))
docker_name_format = 'name={}'.format(fedml_docker_name)
docker_name_proc = ServerConstants.exec_console_with_shell_script_list(['docker', 'ps', '-aqf', docker_name_format],
should_capture_stdout_err=True)
_, out_id, err_id = ServerConstants.get_console_pipe_out_err_results(docker_name_proc)
if out_id is not None:
out_id_str = sys_utils.decode_our_err_result(out_id)
docker_logs_cmd = 'docker logs -f {}'.format(out_id_str)
os.system(docker_logs_cmd)
if __name__ == "__main__":
login_with_server_docker_mode("214", "dev", 1)
#logout_with_server_docker_mode(1)
|
4,379 |
reset state
|
from keras_core import backend
from keras_core import initializers
from keras_core import ops
from keras_core.api_export import keras_core_export
from keras_core.utils.naming import auto_name
from keras_core.utils.tracking import Tracker
@keras_core_export(["keras_core.Metric", "keras_core.metrics.Metric"])
class Metric:
"""Encapsulates metric logic and state.
Args:
name: (Optional) string name of the metric instance.
dtype: (Optional) data type of the metric result.
Standalone usage:
```python
m = SomeMetric(...)
for input in ...:
m.update_state(input)
print('Final result: ', m.result())
```
Usage with `compile()` API:
```python
model = keras_core.Sequential()
model.add(keras_core.layers.Dense(64, activation='relu'))
model.add(keras_core.layers.Dense(64, activation='relu'))
model.add(keras_core.layers.Dense(10, activation='softmax'))
model.compile(optimizer=keras_core.optimizers.RMSprop(0.01),
loss=keras_core.losses.CategoricalCrossentropy(),
metrics=[keras_core.metrics.CategoricalAccuracy()])
data = np.random.random((1000, 32))
labels = np.random.random((1000, 10))
model.fit(data, labels, epochs=10)
```
To be implemented by subclasses:
* `__init__()`: All state variables should be created in this method by
calling `self.add_variable()` like: `self.var = self.add_variable(...)`
* `update_state()`: Has all updates to the state variables like:
`self.var.assign(...)`.
* `result()`: Computes and returns a scalar value or a dict of scalar values
for the metric from the state variables.
Example subclass implementation:
```python
class BinaryTruePositives(Metric):
def __init__(self, name='binary_true_positives', **kwargs):
super().__init__(name=name, **kwargs)
self.true_positives = self.add_variable(
shape=(),
initializer='zeros',
name='true_positives'
)
def update_state(self, y_true, y_pred, sample_weight=None):
y_true = ops.cast(y_true, "bool")
y_pred = ops.cast(y_pred, "bool")
values = ops.logical_and(
ops.equal(y_true, True), ops.equal(y_pred, True))
values = ops.cast(values, self.dtype)
if sample_weight is not None:
sample_weight = ops.cast(sample_weight, self.dtype)
sample_weight = ops.broadcast_to(
sample_weight, ops.shape(values)
)
values = ops.multiply(values, sample_weight)
self.true_positives.assign(self.true_positives + ops.sum(values))
def result(self):
return self.true_positives
```
"""
def __init__(self, dtype=None, name=None):
self.name = name or auto_name(self.__class__.__name__)
self._dtype = dtype
self._metrics = []
self._variables = []
self._tracker = Tracker(
{
"variables": (
lambda x: isinstance(x, backend.Variable),
self._variables,
),
"metrics": (lambda x: isinstance(x, Metric), self._metrics),
}
)
def METHOD_NAME(self):
"""Reset all of the metric state variables.
This function is called between epochs/steps,
when a metric is evaluated during training.
"""
for v in self.variables:
v.assign(ops.zeros(v.shape, dtype=v.dtype))
def update_state(self, *args, **kwargs):
"""Accumulate statistics for the metric."""
raise NotImplementedError
def stateless_update_state(self, metric_variables, *args, **kwargs):
if len(metric_variables) != len(self.variables):
raise ValueError(
"Argument `metric_variables` must be a list of tensors "
f"corresponding 1:1 to {self.__class__.__name__}().variables. "
f"Received list with length {len(metric_variables)}, but "
f"expected {len(self.variables)} variables."
)
# Gather variable mapping
mapping = list(zip(self.variables, metric_variables))
# Call in stateless scope
with backend.StatelessScope(state_mapping=mapping) as scope:
self.update_state(*args, **kwargs)
# Gather updated variables
metric_variables = []
for v in self.variables:
new_v = scope.get_current_value(v)
if new_v is not None:
metric_variables.append(new_v)
else:
metric_variables.append(v)
return metric_variables
def result(self):
"""Compute the current metric value.
Returns:
A scalar tensor, or a dictionary of scalar tensors.
"""
raise NotImplementedError
def stateless_result(self, metric_variables):
if len(metric_variables) != len(self.variables):
raise ValueError(
"Argument `metric_variables` must be a list of tensors "
f"corresponding 1:1 to {self.__class__.__name__}().variables. "
f"Received list with length {len(metric_variables)}, but "
f"expected {len(self.variables)} variables."
)
# Gather variable mapping
mapping = list(zip(self.variables, metric_variables))
# Call in stateless scope
with backend.StatelessScope(state_mapping=mapping):
res = self.result()
return res
@property
def dtype(self):
return self._dtype
def add_variable(self, shape, initializer, dtype=None, name=None):
self._check_super_called()
with backend.name_scope(self.name, caller=self):
initializer = initializers.get(initializer)
variable = backend.Variable(
initializer=initializer,
shape=shape,
dtype=dtype,
trainable=False,
name=name,
)
# Prevent double-tracking
self._tracker.add_to_store("variables", variable)
return variable
def add_weight(self, shape=(), initializer=None, dtype=None, name=None):
# Backwards compatibility alias
return self.add_variable(
shape=shape, initializer=initializer, dtype=dtype, name=name
)
@property
def variables(self):
variables = self._variables[:]
for metric in self._metrics:
variables.extend(metric._variables)
return variables
def __call__(self, *args, **kwargs):
self._check_super_called()
self.update_state(*args, **kwargs)
return self.result()
def get_config(self):
"""Return the serializable config of the metric."""
return {"name": self.name, "dtype": self.dtype}
@classmethod
def from_config(cls, config):
return cls(**config)
def __setattr__(self, name, value):
# Track Variables, Layers, Metrics
if hasattr(self, "_tracker"):
value = self._tracker.track(value)
return super().__setattr__(name, value)
def _check_super_called(self):
if not hasattr(self, "_tracker"):
raise RuntimeError(
"You forgot to call `super().__init__()` "
"in the `__init__()` method. Go add it!"
)
|
4,380 |
get sample list
|
from flask import Blueprint, Response, jsonify, request
from mxcubecore import HardwareRepository as HWR
def init_route(app, server, url_prefix):
bp = Blueprint("sample_changer", __name__, url_prefix=url_prefix)
@bp.route("/samples_list", methods=["GET"])
@server.restrict
def METHOD_NAME():
app.sample_changer.METHOD_NAME()
return jsonify(app.lims.sample_list_get())
@bp.route("/sync_with_crims", methods=["GET"])
@server.require_control
@server.restrict
def sync_with_crims():
return app.sample_changer.sync_with_crims()
@bp.route("/state", methods=["GET"])
@server.restrict
def get_sc_state():
state = HWR.beamline.sample_changer.get_status().upper()
return jsonify({"state": state})
@bp.route("/loaded_sample", methods=["GET"])
@server.restrict
def get_loaded_sample():
address, barcode = app.sample_changer.get_loaded_sample()
return jsonify({"address": address, "barcode": barcode})
@bp.route("/contents", methods=["GET"])
@server.restrict
def get_sc_contents_view():
return jsonify(app.sample_changer.get_sc_contents())
@bp.route("/select/<loc>", methods=["GET"])
@server.require_control
@server.require_control
@server.restrict
def select_location(loc):
HWR.beamline.sample_changer.select(loc)
return app.sample_changer.get_sc_contents()
@bp.route("/scan/<loc>", methods=["GET"])
@server.require_control
@server.restrict
def scan_location(loc):
# do a recursive scan
HWR.beamline.sample_changer.scan(loc, True)
return app.sample_changer.get_sc_contents()
@bp.route("/unmount_current", methods=["POST"])
@server.require_control
@server.restrict
def unmount_current():
try:
res = app.sample_changer.unmount_current()
except Exception as ex:
res = (
"Cannot unload sample",
409,
{"Content-Type": "application/json", "message": str(ex)},
)
return jsonify(res)
@bp.route("/mount", methods=["POST"])
@server.require_control
@server.restrict
def mount_sample():
resp = Response(status=200)
try:
resp = jsonify(app.sample_changer.mount_sample(request.get_json()))
except Exception as ex:
resp = (
"Cannot load sample",
409,
{"Content-Type": "application/json", "message": str(ex)},
)
return resp
@bp.route("/unmount", methods=["POST"])
@server.require_control
@server.restrict
def unmount_sample():
try:
resp = jsonify(
app.sample_changer.unmount_sample(request.get_json()["sample"])
)
except Exception as ex:
return (
"Cannot unload sample",
409,
{"Content-Type": "application/json", "message": str(ex)},
)
return resp
@bp.route("/capacity", methods=["GET"])
@server.restrict
def get_sc_capacity():
try:
ret = app.sample_changer.get_capacity()
except Exception:
return Response(status=409)
else:
return jsonify(capacity=ret)
@bp.route("/get_maintenance_cmds", methods=["GET"])
@server.restrict
def get_maintenance_cmds():
try:
ret = app.sample_changer.get_maintenance_cmds()
except Exception:
return Response(status=409)
else:
return jsonify(cmds=ret)
@bp.route("/get_global_state", methods=["GET"])
@server.restrict
def get_global_state():
try:
ret = app.sample_changer.get_global_state()
if ret:
state, cmdstate, msg = ret
else:
return jsonify(ret)
except Exception:
return Response(status=409)
else:
return jsonify(state=state, commands_state=cmdstate, message=msg)
@bp.route("/get_initial_state", methods=["GET"])
@server.restrict
def get_initial_state():
return jsonify(app.sample_changer.get_initial_state())
@bp.route("/send_command/<cmdparts>/<args>", methods=["GET"])
@server.require_control
@server.restrict
def send_command(cmdparts, args=None):
try:
ret = HWR.beamline.sample_changer_maintenance.send_command(cmdparts, args)
except Exception as ex:
msg = str(ex)
msg = msg.replace("\n", " - ")
return (
"Cannot execute command",
406,
{"Content-Type": "application/json", "message": msg},
)
else:
return jsonify(response=ret)
return bp
|
4,381 |
ra dec2 xy
|
""" Plots points on an all-sky sinusoidal projection plot. """
import numpy as np
import matplotlib.pyplot as plt
class AllSkyPlot(object):
def __init__(self, ax_handle=None):
self.ra0 = 180.0
if ax_handle is None:
self.fig = plt.figure()
self.ax = self.fig.add_subplot(1, 1, 1, facecolor='black')
else:
self.ax = ax_handle
self.fig = plt.gcf()
# Set background color
self.fig.patch.set_facecolor('black')
# Set equal aspect ratio
self.ax.set_aspect('equal')
# # Set tick color
# self.ax.tick_params(axis='x', colors='0.5')
# self.ax.tick_params(axis='y', colors='0.5')
# Turn off ticks
self.ax.tick_params(labeltop=False, labelright=False, labelbottom=False, labelleft=False)
self.plotGrid()
def METHOD_NAME(self, ra, dec):
# Compute projected coordinates
x = ((180 - ra)%360 - self.ra0)*np.cos(np.radians(dec))
y = dec
return x, y
def plot(self, ra_array, dec_array, max_break_deg=30, **kwargs):
# If there are more than one point, check for 0/360 wraparounds in RA
if isinstance(ra_array, list) or isinstance(ra_array, np.ndarray):
ra_array = np.array(ra_array)
ra_array = (180 - ra_array)%360
dec_array = np.array(dec_array)
coord_list = []
# Find large breaks in RA and plot them separately
ra_diff = np.abs(ra_array[:-1] - ra_array[1:])
break_indices = np.where(ra_diff > max_break_deg)[0]
if not len(break_indices):
coord_list = [[ra_array, dec_array]]
else:
prev_break_idx = 0
for break_idx in break_indices:
ra_temp = ra_array[prev_break_idx:break_idx + 1]
dec_temp = dec_array[prev_break_idx:break_idx + 1]
prev_break_idx = break_idx + 1
coord_list.append([ra_temp, dec_temp])
coord_list.append([ra_array[break_idx + 1:], dec_array[break_idx + 1:]])
else:
coord_list = [[180 - ra_array, dec_array]]
# Plot all segments
for i, (ra_temp, dec_temp) in enumerate(coord_list):
x, y = self.METHOD_NAME(180 - ra_temp, dec_temp)
# Make sure that all plotted lines have the same color
if i > 0:
color = plt_handle[0].get_color()
# Add color to kwargs
if 'color' not in kwargs:
kwargs['color'] = color
plt_handle = self.ax.plot(x, y, **kwargs)
def scatter(self, ra_array, dec_array, **kwargs):
x, y = self.METHOD_NAME(ra_array, dec_array)
self.ax.scatter(x, y, **kwargs)
def plotGrid(self, step=15):
# Plot a meridian and parallel grid
ra_grid = np.sort(np.append(np.arange(0, 360 + step, step), [180.0001]))
dec_grid = np.arange(-90, 90 + step, step)
# Plot meridians
for ra in ra_grid[:-1]:
# Increase number of points for meridian plot so they are smoother
step_finer = step/5
dec_arr = np.arange(-90, 90 + step_finer, step_finer)
ra_temp = np.zeros_like(dec_arr) + ra
x_grid, y_grid = self.METHOD_NAME(ra_temp, dec_arr)
self.ax.plot(x_grid, y_grid, linestyle='dotted', alpha=0.5, color='silver')
# Plot parallels
for dec in dec_grid:
dec_temp = np.zeros_like(ra_grid) + dec
self.plot(ra_grid, dec_temp, linestyle='dotted', alpha=0.5, color='silver')
# Plot dec ticks
for dec in dec_grid[::2]:
x, y = self.METHOD_NAME(0, dec)
if dec > 0:
va = 'bottom'
else:
va = 'top'
self.ax.text(x, y, "{:+d}$^\circ$".format(dec), color='0.5', ha='center', va=va, size=7)
# Plot every other RA tick and skip 0 and 360
ra_ticks = np.sort(np.append(np.arange(0, 360, 2*step), [180.0001]))
for ra in ra_ticks:
# Offset RA so 0 starts in the middle and increases to the left
#ra_text = (180 - ra)%360
x, y = self.METHOD_NAME(ra, 0)
self.ax.text(x, y, "{:+d}$^\circ$".format(int(ra)), color='0.5', ha='center', va='top', size=7)
def beautify(self):
self.ax.set_xlim([-180, 180])
self.ax.set_ylim([-90, 90])
self.fig.tight_layout()
def show(self):
self.beautify()
plt.show()
if __name__ == "__main__":
allsky_plot = AllSkyPlot()
ra_array = np.arange(0, 2000, 1)
dec_array = np.linspace(-90, 90, len(ra_array))
allsky_plot.plot(ra_array, dec_array, color='green', linestyle='dashed')
allsky_plot.show(
|
4,382 |
test slope
|
# Copyright (C) 2021 Open Source Robotics Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import unittest
from gz.math7 import Line2d
from gz.math7 import Vector2d
class TestLine2d(unittest.TestCase):
def test_construction(self):
line_a = Line2d(0, 0, 10, 10)
self.assertAlmostEqual(line_a[0].x(), 0.0)
self.assertAlmostEqual(line_a[0].y(), 0.0)
self.assertAlmostEqual(line_a[1].x(), 10.0)
self.assertAlmostEqual(line_a[1].y(), 10.0)
line_b = Line2d(Vector2d(1, 2), Vector2d(3, 4))
self.assertAlmostEqual(line_b[0].x(), 1.0)
self.assertAlmostEqual(line_b[0].y(), 2.0)
self.assertAlmostEqual(line_b[1].x(), 3.0)
self.assertAlmostEqual(line_b[1].y(), 4.0)
self.assertAlmostEqual(line_b[2].x(), line_b[1].x())
def test_length(self):
line_a = Line2d(0, 0, 10, 10)
self.assertAlmostEqual(line_a.length(), math.sqrt(200), delta=1e-10)
def METHOD_NAME(self):
line = Line2d(0, 0, 10, 10)
self.assertAlmostEqual(line.slope(), 1.0, delta=1e-10)
line = Line2d(0, 0, 0, 10)
self.assertTrue(math.isnan(line.slope()))
line = Line2d(-10, 0, 100, 0)
self.assertAlmostEqual(line.slope(), 0.0)
def test_parallel_line(self):
# Line is always parallel with itself
line = Line2d(0, 0, 10, 0)
self.assertTrue(line.parallel(line, 1e-10))
# Degenerate line segment
# Still expect Line is parallel with itself
line = Line2d(0, 0, 0, 0)
self.assertTrue(line.parallel(line, 1e-10))
line_a = Line2d(0, 0, 10, 0)
line_b = Line2d(0, 0, 10, 0)
self.assertTrue(line_a.parallel(line_b, 1e-10))
line_b.set(0, 0, 0, 10)
self.assertFalse(line_a.parallel(line_b))
line_b.set(0, 10, 10, 10)
self.assertTrue(line_a.parallel(line_b))
line_b.set(0, 10, 10, 10.00001)
self.assertFalse(line_a.parallel(line_b, 1e-10))
self.assertFalse(line_a.parallel(line_b))
self.assertTrue(line_a.parallel(line_b, 1e-3))
def test_collinear_line(self):
# Line is always collinear with itself
line = Line2d(0, 0, 10, 0)
self.assertTrue(line.collinear(line, 1e-10))
line_a = Line2d(0, 0, 10, 0)
line_b = Line2d(0, 0, 10, 0)
self.assertTrue(line_a.collinear(line_b, 1e-10))
line_b.set(0, 10, 10, 10)
self.assertFalse(line_a.collinear(line_b))
line_b.set(9, 0, 10, 0.00001)
self.assertFalse(line_a.collinear(line_b, 1e-10))
self.assertFalse(line_a.collinear(line_b))
self.assertTrue(line_a.collinear(line_b, 1e-3))
def test_collinear_point(self):
line_a = Line2d(0, 0, 10, 0)
pt = Vector2d(0, 0)
self.assertTrue(line_a.collinear(pt))
pt_line = Line2d(pt, pt)
self.assertTrue(line_a.collinear(pt_line))
pt.set(1000, 0)
self.assertTrue(line_a.collinear(pt, 1e-10))
pt_line = Line2d(pt, pt)
self.assertTrue(line_a.parallel(pt_line))
self.assertFalse(line_a.intersect(pt_line))
self.assertFalse(line_a.collinear(pt_line, 1e-10))
pt.set(10, 0)
pt_line.set(pt, pt)
self.assertTrue(line_a.collinear(pt_line, 1e-10))
pt.set(0, 0.00001)
self.assertFalse(line_a.collinear(pt))
self.assertTrue(line_a.collinear(pt, 1e-3))
pt_line = Line2d(pt, pt)
self.assertFalse(line_a.collinear(pt_line))
self.assertTrue(line_a.parallel(pt_line))
self.assertFalse(line_a.intersect(pt_line))
self.assertTrue(line_a.intersect(pt_line, 1e-2))
self.assertTrue(line_a.collinear(pt_line, 1e-3))
pt.set(0, -0.00001)
self.assertFalse(line_a.collinear(pt))
self.assertTrue(line_a.collinear(pt, 1e-3))
pt_line = Line2d(pt, pt)
self.assertFalse(line_a.collinear(pt_line))
self.assertTrue(line_a.collinear(pt_line, 1e-4))
def test_intersect(self):
pt = Vector2d()
# parallel horizontal lines
line_a = Line2d(1, 1, 2, 1)
line_b = Line2d(1, 2, 2, 2)
self.assertFalse(line_a.intersect(line_b, pt))
# parallel vertical lines
line_a.set(1, 1, 1, 10)
line_b.set(2, 1, 2, 10)
self.assertFalse(line_a.intersect(line_b, pt))
# Two lines that form an inverted T with a gap
line_a.set(1, 1, 1, 10)
line_b.set(0, 0, 2, 0)
self.assertFalse(line_a.intersect(line_b, pt))
# Two lines that form a T with a gap
line_a.set(1, 1, 1, 10)
line_b.set(0, 10.1, 2, 10.1)
self.assertFalse(line_a.intersect(line_b, pt))
# Two lines that form an inverted T with a gap
line_a.set(0, -10, 0, 10)
line_b.set(1, 0, 10, 0)
self.assertFalse(line_a.intersect(line_b, pt))
# Two lines that form a T with a gap
line_a.set(0, -10, 0, 10)
line_b.set(-1, 0, -10, 0)
self.assertFalse(line_a.intersect(line_b, pt))
# Two collinear lines, one starts where the other stopped
line_a.set(1, 1, 1, 10)
line_b.set(1, 10, 1, 11)
self.assertTrue(line_a.intersect(line_b, pt))
self.assertEqual(pt, Vector2d(1, 10))
# Two collinear lines, one overlaps the other
line_a.set(0, 0, 0, 10)
line_b.set(0, 9, 0, 11)
self.assertTrue(line_a.intersect(line_b, pt))
self.assertEqual(pt, Vector2d(0, 9))
# Two collinear lines, one overlaps the other
line_a.set(0, 0, 0, 10)
line_b.set(0, -10, 0, 1)
self.assertTrue(line_a.intersect(line_b, pt))
self.assertEqual(pt, Vector2d(0, 1))
# Two intersecting lines
line_a.set(0, 0, 10, 10)
line_b.set(0, 10, 10, 0)
self.assertTrue(line_a.intersect(line_b, pt))
self.assertEqual(pt, Vector2d(5, 5))
def test_equality(self):
line_a = Line2d(1, 1, 2, 1)
line_b = Line2d(1, 2, 2, 2)
self.assertTrue(line_a != line_b)
self.assertTrue(line_a == line_a)
line_b.set(1, 1, 2, 1.1)
self.assertFalse(line_a == line_b)
line_b.set(1, 1, 2.1, 1)
self.assertFalse(line_a == line_b)
line_b.set(1, 1.1, 2, 1)
self.assertFalse(line_a == line_b)
line_b.set(1.1, 1, 2, 1)
self.assertFalse(line_a == line_b)
def test_serialization(self):
line = Line2d(0, 1, 2, 3)
self.assertEqual(str(line), "0 1 2 3")
if __name__ == '__main__':
unittest.main()
|
4,383 |
probe
|
# SPDX-License-Identifier: Apache-2.0
#
# The OpenSearch Contributors require contributions made to
# this file be licensed under the Apache-2.0 license or a
# compatible open source license.
# Modifications Copyright OpenSearch Contributors. See
# GitHub history for details.
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import re
from osbenchmark import exceptions
from osbenchmark.utils import io, process
MIN_REQUIRED_MAJOR_VERSION = 2
VERSION_REGEX = r'.* ([0-9]+)\.([0-9]+)\..*'
def probed(f):
def METHOD_NAME(src, *args, **kwargs):
# Probe for -C
try:
out, _, status = process.run_subprocess_with_out_and_err(
"git -C {} --version".format(io.escape_path(src)))
except FileNotFoundError:
status = 1
if status != 0:
raise exceptions.SystemSetupError("Error invoking 'git', please install (or re-install).")
match = re.search(VERSION_REGEX, out)
if not match or int(match.group(1)) < MIN_REQUIRED_MAJOR_VERSION:
raise exceptions.SystemSetupError("OpenSearch Benchmark requires at least version 2 of git. "
f"You have {out}. Please update git.")
return f(src, *args, **kwargs)
return METHOD_NAME
def is_working_copy(src):
"""
Checks whether the given directory is a git working copy.
:param src: A directory. May or may not exist.
:return: True iff the given directory is a git working copy.
"""
return os.path.exists(src) and os.path.exists(os.path.join(src, ".git"))
def clone(src, remote):
io.ensure_dir(src)
# Don't swallow subprocess output, user might need to enter credentials...
if process.run_subprocess_with_logging("git clone %s %s" % (remote, io.escape_path(src))):
raise exceptions.SupplyError("Could not clone from [%s] to [%s]" % (remote, src))
@probed
def fetch(src, remote="origin"):
if process.run_subprocess_with_logging("git -C {0} fetch --prune --tags {1}".format(io.escape_path(src), remote)):
raise exceptions.SupplyError("Could not fetch source tree from [%s]" % remote)
@probed
def checkout(src_dir, branch="main"):
if process.run_subprocess_with_logging("git -C {0} checkout {1}".format(io.escape_path(src_dir), branch)):
raise exceptions.SupplyError("Could not checkout [%s]. Do you have uncommitted changes?" % branch)
@probed
def rebase(src_dir, remote="origin", branch="main"):
checkout(src_dir, branch)
if process.run_subprocess_with_logging("git -C {0} rebase {1}/{2}".format(io.escape_path(src_dir), remote, branch)):
raise exceptions.SupplyError("Could not rebase on branch [%s]" % branch)
@probed
def pull(src_dir, remote="origin", branch="main"):
fetch(src_dir, remote)
rebase(src_dir, remote, branch)
@probed
def pull_ts(src_dir, ts):
fetch(src_dir)
clean_src = io.escape_path(src_dir)
revision = process.run_subprocess_with_output(
"git -C {0} rev-list -n 1 --before=\"{1}\" --date=iso8601 origin/main".format(clean_src, ts))[0].strip()
if process.run_subprocess_with_logging("git -C {0} checkout {1}".format(clean_src, revision)):
raise exceptions.SupplyError("Could not checkout source tree for timestamped revision [%s]" % ts)
@probed
def pull_revision(src_dir, revision):
fetch(src_dir)
if process.run_subprocess_with_logging("git -C {0} checkout {1}".format(io.escape_path(src_dir), revision)):
raise exceptions.SupplyError("Could not checkout source tree for revision [%s]" % revision)
@probed
def head_revision(src_dir):
return process.run_subprocess_with_output("git -C {0} rev-parse --short HEAD".format(
io.escape_path(src_dir)))[0].strip()
@probed
def current_branch(src_dir):
return process.run_subprocess_with_output("git -C {0} rev-parse --abbrev-ref HEAD".format(
io.escape_path(src_dir)))[0].strip()
@probed
def branches(src_dir, remote=True):
clean_src = io.escape_path(src_dir)
if remote:
# Because compatability issues with Git 2.40.0+, updated --format='%(refname:short)' to --format='%(refname)'
return _cleanup_remote_branch_names(process.run_subprocess_with_output(
"git -C {src} for-each-ref refs/remotes/ --format='%(refname)'".format(src=clean_src)))
else:
return _cleanup_local_branch_names(
process.run_subprocess_with_output(
"git -C {src} for-each-ref refs/heads/ --format='%(refname:short)'".format(src=clean_src)))
@probed
def tags(src_dir):
return _cleanup_tag_names(process.run_subprocess_with_output("git -C {0} tag".format(io.escape_path(src_dir))))
def _cleanup_remote_branch_names(branch_names):
return [(b[b.rindex("/") + 1:]).strip() for b in branch_names if not b.endswith("/HEAD")]
def _cleanup_local_branch_names(branch_names):
return [b.strip() for b in branch_names if not b.endswith("HEAD")]
def _cleanup_tag_names(tag_names):
return [t.strip() for t in tag_names]
|
4,384 |
url parameters
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network vpn-connection shared-key show",
)
class Show(AAZCommand):
"""Retrieve a VPN connection shared key.
:example: View the shared key of a connection.
az network vpn-connection shared-key show -g MyResourceGroup --connection-name MyConnection
:example: Retrieve a VPN connection shared key.
az network vpn-connection shared-key show --connection-name MyConnection --resource-group MyResourceGroup --subscription MySubscription
"""
_aaz_info = {
"version": "2015-06-15",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/connections/{}/sharedkey", "2015-06-15"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.connection_name = AAZStrArg(
options=["--connection-name"],
help="Connection name.",
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.VirtualNetworkGatewayConnectionsGetSharedKey(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class VirtualNetworkGatewayConnectionsGetSharedKey(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey",
**self.METHOD_NAME
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def METHOD_NAME(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"virtualNetworkGatewayConnectionName", self.ctx.args.connection_name,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2015-06-15",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.value = AAZStrType()
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
__all__ = ["Show"]
|
4,385 |
is outside build area
|
# Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from copy import deepcopy
from typing import cast, Dict, List, Optional
from UM.Application import Application
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Polygon import Polygon # For typing.
from UM.Scene.SceneNode import SceneNode
from UM.Scene.SceneNodeDecorator import SceneNodeDecorator # To cast the deepcopy of every decorator back to SceneNodeDecorator.
import cura.CuraApplication # To get the build plate.
from cura.Settings.ExtruderStack import ExtruderStack # For typing.
from cura.Settings.SettingOverrideDecorator import SettingOverrideDecorator # For per-object settings.
class CuraSceneNode(SceneNode):
"""Scene nodes that are models are only seen when selecting the corresponding build plate
Note that many other nodes can just be UM SceneNode objects.
"""
def __init__(self, parent: Optional["SceneNode"] = None, visible: bool = True, name: str = "", no_setting_override: bool = False) -> None:
super().__init__(parent = parent, visible = visible, name = name)
if not no_setting_override:
self.addDecorator(SettingOverrideDecorator()) # Now we always have a getActiveExtruderPosition, unless explicitly disabled
self._outside_buildarea = False
def setOutsideBuildArea(self, new_value: bool) -> None:
self._outside_buildarea = new_value
def METHOD_NAME(self) -> bool:
return self._outside_buildarea or self.callDecoration("getBuildPlateNumber") < 0
def isVisible(self) -> bool:
return super().isVisible() and self.callDecoration("getBuildPlateNumber") == cura.CuraApplication.CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate
def isSelectable(self) -> bool:
return super().isSelectable() and self.callDecoration("getBuildPlateNumber") == cura.CuraApplication.CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate
def isSupportMesh(self) -> bool:
per_mesh_stack = self.callDecoration("getStack")
if not per_mesh_stack:
return False
return per_mesh_stack.getProperty("support_mesh", "value")
def getPrintingExtruder(self) -> Optional[ExtruderStack]:
"""Get the extruder used to print this node. If there is no active node, then the extruder in position zero is returned
TODO The best way to do it is by adding the setActiveExtruder decorator to every node when is loaded
"""
global_container_stack = Application.getInstance().getGlobalContainerStack()
if global_container_stack is None:
return None
per_mesh_stack = self.callDecoration("getStack")
extruders = global_container_stack.extruderList
# Use the support extruder instead of the active extruder if this is a support_mesh
if per_mesh_stack:
if per_mesh_stack.getProperty("support_mesh", "value"):
return extruders[int(global_container_stack.getExtruderPositionValueWithDefault("support_extruder_nr"))]
# It's only set if you explicitly choose an extruder
extruder_id = self.callDecoration("getActiveExtruder")
for extruder in extruders:
# Find out the extruder if we know the id.
if extruder_id is not None:
if extruder_id == extruder.getId():
return extruder
else: # If the id is unknown, then return the extruder in the position 0
try:
if extruder.getMetaDataEntry("position", default = "0") == "0": # Check if the position is zero
return extruder
except ValueError:
continue
# This point should never be reached
return None
def getDiffuseColor(self) -> List[float]:
"""Return the color of the material used to print this model"""
printing_extruder = self.getPrintingExtruder()
material_color = "#808080" # Fallback color
if printing_extruder is not None and printing_extruder.material:
material_color = printing_extruder.material.getMetaDataEntry("color_code", default = material_color)
# Colors are passed as rgb hex strings (eg "#ffffff"), and the shader needs
# an rgba list of floats (eg [1.0, 1.0, 1.0, 1.0])
return [
int(material_color[1:3], 16) / 255,
int(material_color[3:5], 16) / 255,
int(material_color[5:7], 16) / 255,
1.0
]
def collidesWithAreas(self, areas: List[Polygon]) -> bool:
"""Return if any area collides with the convex hull of this scene node"""
convex_hull = self.callDecoration("getPrintingArea")
if convex_hull:
if not convex_hull.isValid():
return False
# Check for collisions between provided areas and the object
for area in areas:
overlap = convex_hull.intersectsPolygon(area)
if overlap is None:
continue
return True
return False
def _calculateAABB(self) -> None:
"""Override of SceneNode._calculateAABB to exclude non-printing-meshes from bounding box"""
self._aabb = None
if self._mesh_data:
self._aabb = self._mesh_data.getExtents(self.getWorldTransformation(copy = False))
for child in self.getAllChildren():
if child.callDecoration("isNonPrintingMesh"):
# Non-printing-meshes inside a group should not affect push apart or drop to build plate
continue
child_bb = child.getBoundingBox()
if child_bb is None or child_bb.minimum == child_bb.maximum:
# Child had a degenerate bounding box, such as an empty group. Don't count it along.
continue
if self._aabb is None:
self._aabb = child_bb
else:
self._aabb = self._aabb + child_bb
if self._aabb is None: # No children that should be included? Just use your own position then, but it's an invalid AABB.
position = self.getWorldPosition()
self._aabb = AxisAlignedBox(minimum = position, maximum = position)
def __deepcopy__(self, memo: Dict[int, object]) -> "CuraSceneNode":
"""Taken from SceneNode, but replaced SceneNode with CuraSceneNode"""
copy = CuraSceneNode(no_setting_override = True) # Setting override will be added later
copy.setTransformation(self.getLocalTransformation(copy= False))
copy.setMeshData(self._mesh_data)
copy.setVisible(cast(bool, deepcopy(self._visible, memo)))
copy.source_mime_type = cast(str, deepcopy(self.source_mime_type, memo))
copy._selectable = cast(bool, deepcopy(self._selectable, memo))
copy._name = cast(str, deepcopy(self._name, memo))
for decorator in self._decorators:
copy.addDecorator(cast(SceneNodeDecorator, deepcopy(decorator, memo)))
for child in self._children:
copy.addChild(cast(SceneNode, deepcopy(child, memo)))
self.calculateBoundingBoxMesh()
return copy
def transformChanged(self) -> None:
self._transformChanged()
|
4,386 |
callback
|
import contextlib
import gc
import inspect
import os
import traceback
import sqlalchemy
import sqlalchemy.event
from sqlalchemy import create_engine, text
from .. import core, get_halo
from ..config import testing_db_backend, testing_db_password, testing_db_user
def _as_halos(hlist, session=None):
if session is None:
session = core.get_default_session()
rvals = []
for h in hlist:
if h is None:
rvals.append(None)
elif isinstance(h, core.halo.SimulationObjectBase):
rvals.append(h)
elif isinstance(h, list) or isinstance(h, tuple):
rvals.append(_as_halos(h, session))
else:
rvals.append(get_halo(h, session))
return rvals
def _halos_to_strings(hlist):
if len(hlist)==0:
return "(empty list)"
else:
strlist = []
for h in hlist:
if isinstance(h, list) or isinstance(h, tuple):
strlist.append(_halos_to_strings(h))
elif isinstance(h,str):
strlist.append(h)
else:
strlist.append(h.path)
return str(strlist)
def halolists_equal(hl1, hl2, session=None):
"""Return True if hl1 and hl2 are equivalent lists of halos"""
hl1 = _as_halos(hl1)
hl2 = _as_halos(hl2)
return len(hl1)==len(hl2) and all([h1==h2 for h1, h2 in zip(hl1,hl2)])
def assert_halolists_equal(hl1, hl2, session=None):
equal = halolists_equal(hl1, hl2, session=None)
assert equal, "Not equal: %s %s"%(_halos_to_strings(hl1),_halos_to_strings(hl2))
@contextlib.contextmanager
def autorevert():
old_session = core.get_default_session()
connection = core.get_default_engine().connect()
transaction = connection.begin()
isolated_session = core.Session(bind=connection)
core.set_default_session(isolated_session)
yield
transaction.rollback()
isolated_session.close()
connection.close()
core.set_default_session(old_session)
@contextlib.contextmanager
def assert_connections_all_closed():
num_connections = [0,0]
connection_details = {}
def on_checkout(dbapi_conn, connection_rec, connection_proxy):
num_connections[0]+=1
num_connections[1]+=1
connection_details[id(connection_rec)] = traceback.extract_stack()
def on_checkin(dbapi_conn, connection_rec):
if id(connection_rec) in connection_details:
num_connections[0]-=1
del connection_details[id(connection_rec)]
gc.collect()
sqlalchemy.event.listen(core.get_default_engine().pool, 'checkout', on_checkout)
sqlalchemy.event.listen(core.get_default_engine().pool, 'checkin', on_checkin)
yield
gc.collect()
sqlalchemy.event.remove(core.get_default_engine().pool, 'checkout', on_checkout)
sqlalchemy.event.remove(core.get_default_engine().pool, 'checkin', on_checkin)
for k,v in connection_details.items():
print("object id",k,"not checked in; was created here:")
for line in traceback.format_list(v):
print(" ",line)
assert num_connections[0]==0, "%d (of %d) connections were not closed"%(num_connections[0], num_connections[1])
class SqlExecutionTracker:
"""Logs queries performed against the given sqlalchemy connection.
Based on https://stackoverflow.com/questions/19073099/how-to-count-sqlalchemy-queries-in-unit-tests
Usage:
with SqlExecutionCounter(conn) as ctr:
conn.execute("SELECT 1")
conn.execute("SELECT 1")
assert ctr.count == 2
assert "select" in ctr
assert "update" not in ctr
"""
def __init__(self, conn=None):
if conn is None:
conn = core.get_default_engine()
self.conn = conn
self._queries = []
self._stacks = []
def __enter__(self):
sqlalchemy.event.listen(self.conn, 'after_execute', self.METHOD_NAME)
return self
def __exit__(self, *_):
sqlalchemy.event.remove(self.conn, 'after_execute', self.METHOD_NAME)
@property
def count(self):
return len(self._queries)
def get_statement(self, i):
return self._queries[i]
def count_statements_containing(self, search_string):
return sum(self.statements_contain(search_string))
def traceback_statements_containing(self, search_string):
return [tb for include, tb in zip(self.statements_contain(search_string),
self._stacks)
if include]
def statements_contain(self, search_string):
return [search_string.lower() in q.lower() for q in self._queries]
def __contains__(self, search_string):
return (any(self.statements_contain(search_string)))
def METHOD_NAME(self, conn, query, *_):
self._queries.append(str(query))
self._stacks.append("".join(traceback.format_list(traceback.extract_stack()[:-2])))
def init_blank_db_for_testing(**init_kwargs):
core.close_db()
try:
os.mkdir("test_dbs")
except OSError:
pass
caller_fname = os.path.basename(inspect.getframeinfo(inspect.currentframe().f_back)[0])[:-3]
testing_db_name = init_kwargs.pop("testing_db_name", caller_fname)
if testing_db_backend == "sqlite":
db_name = f"test_dbs/%s.db"%testing_db_name
try:
os.remove(db_name)
except OSError:
pass
core.init_db(f"sqlite:///{db_name}", **init_kwargs)
else:
db_url = f"{testing_db_backend}://{testing_db_user}:{testing_db_password}@localhost"
engine = create_engine(db_url)
with engine.connect() as conn:
conn.execute(text("COMMIT"))
conn.execute(text(f"DROP DATABASE IF EXISTS {testing_db_name}"))
conn.execute(text("COMMIT"))
conn.execute(text(f"CREATE DATABASE {testing_db_name}"))
core.init_db(f"{db_url}/{testing_db_name}", **init_kwargs)
|
4,387 |
stop
|
# Modified to work with Exaile - Brian Parma
#
# Copyright (C) 2008 Erik Hetzner
# This file is part of Spydaap. Spydaap is free software: you can
# redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# Spydaap is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Spydaap. If not, see <https://www.gnu.org/licenses/>.
import http.server
import socketserver
import logging
import select
import socket
import os
import spydaap
import spydaap.daap
import spydaap.metadata
import spydaap.containers
import spydaap.cache
import spydaap.server
import spydaap.zeroconfimpl
from xl import common, event, xdg
# Notes for debugging:
# You might want to run
# handle SIGPIPE nostop
# when debugging this code in gdb.
"""
Notes for hunting down errors:
If you run this plugin and a client stops playback on any file, expect this
traceback:
Exception happened during processing of request from ('192.168.122.1', 34394)
Traceback (most recent call last):
File "/usr/lib64/python2.7/SocketServer.py", line 596, in process_request_thread
self.finish_request(request, client_address)
File "/usr/lib64/python2.7/SocketServer.py", line 331, in finish_request
self.RequestHandlerClass(request, client_address, self)
File "/usr/lib64/python2.7/SocketServer.py", line 654, in __init__
self.finish()
File "/usr/lib64/python2.7/SocketServer.py", line 713, in finish
self.wfile.close()
File "/usr/lib64/python2.7/socket.py", line 283, in close
self.flush()
File "/usr/lib64/python2.7/socket.py", line 307, in flush
self._sock.sendall(view[write_offset:write_offset+buffer_size])
error: [Errno 32] broken pipe
This traceback is a result of getting a SIGPIPE, which is expected. The fact
that it is not being handled in the python standard library is a bug which has
been reported to https://bugs.python.org/issue14574
See also: https://stackoverflow.com/questions/6063416/
"""
logger = logging.getLogger('daapserver')
__all__ = ['DaapServer']
class MyThreadedHTTPServer(socketserver.ThreadingMixIn, http.server.HTTPServer):
"""Handle requests in a separate thread."""
timeout = 1
daemon_threads = True
def __init__(self, *args):
if ':' in args[0][0]:
self.address_family = socket.AF_INET6
http.server.HTTPServer.__init__(self, *args)
class DaapServer:
def __init__(self, library, name=spydaap.server_name, host='', port=spydaap.port):
# Thread.__init__(self)
self.host = host
self.port = port
self.library = library
self.name = name
self.httpd = None
self.handler = None
self.__cache = spydaap.cache.Cache(os.path.join(xdg.cache_home, 'daapserver'))
self.__cache.clean()
# Set a callback that will let us propagate library changes to clients
event.add_callback(self.update_rev, 'libraries_modified', library.collection)
def update_rev(self, *args):
if self.handler is not None:
# Updating the server revision, so if a client checks
# it can see the library has changed
self.handler.daap_server_revision += 1
logger.info(
'Libraries Changed, incrementing revision to %d.'
% self.handler.daap_server_revision
)
self.__cache.clean()
def set(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
@common.threaded
def run(self):
self.zeroconf = spydaap.zeroconfimpl.ZeroconfImpl(
self.name, self.port, stype="_daap._tcp"
)
self.handler = spydaap.server.makeDAAPHandlerClass(
str(self.name), self.__cache, self.library, []
)
self.httpd = MyThreadedHTTPServer((self.host, self.port), self.handler)
# signal.signal(signal.SIGTERM, make_shutdown(httpd))
# signal.signal(signal.SIGHUP, rebuild_cache)
if self.httpd.address_family == socket.AF_INET:
self.zeroconf.publish(ipv4=True, ipv6=False)
else:
self.zeroconf.publish(ipv4=False, ipv6=True)
try:
try:
logger.info("DAAP server: Listening.")
self.httpd.serve_forever()
except select.error:
pass
except KeyboardInterrupt:
self.httpd.shutdown()
logger.info("DAAP server: Shutting down.")
self.zeroconf.unpublish()
self.httpd = None
def start(self):
if self.httpd is None:
self.run()
return True
return False
def METHOD_NAME(self):
if self.httpd is not None:
self.httpd.shutdown()
self.httpd.socket.close()
return True
return False
def stop_server(self):
self.METHOD_NAME()
# def rebuild_cache(signum=None, frame=None):
# md_cache.build(os.path.abspath(spydaap.media_path))
# container_cache.clean()
# container_cache.build(md_cache)
# cache.clean()
# def really_main():
# rebuild_cache()
# zeroconf = spydaap.zeroconfimpl.ZeroconfImpl(spydaap.server_name,
# spydaap.port,
# stype="_daap._tcp")
# zeroconf.publish()
# logger.warning("Listening.")
# httpd = MyThreadedHTTPServer(('0.0.0.0', spydaap.port),
# spydaap.server.makeDAAPHandlerClass(spydaap.server_name, cache, md_cache, container_cache))
#
## signal.signal(signal.SIGTERM, make_shutdown(httpd))
## signal.signal(signal.SIGHUP, rebuild_cache)
#
# try:
# try:
# httpd.serve_forever()
# except select.error:
# pass
# except KeyboardInterrupt:
# httpd.force_stop()
# logger.warning("Shutting down.")
# zeroconf.unpublish()
# def main():
# really_main()
# if __name__ == "__main__":
# main()
|
4,388 |
tags
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetCostCategoryResult',
'AwaitableGetCostCategoryResult',
'get_cost_category',
'get_cost_category_output',
]
@pulumi.output_type
class GetCostCategoryResult:
"""
A collection of values returned by getCostCategory.
"""
def __init__(__self__, cost_category_arn=None, default_value=None, effective_end=None, effective_start=None, id=None, name=None, rule_version=None, rules=None, split_charge_rules=None, METHOD_NAME=None):
if cost_category_arn and not isinstance(cost_category_arn, str):
raise TypeError("Expected argument 'cost_category_arn' to be a str")
pulumi.set(__self__, "cost_category_arn", cost_category_arn)
if default_value and not isinstance(default_value, str):
raise TypeError("Expected argument 'default_value' to be a str")
pulumi.set(__self__, "default_value", default_value)
if effective_end and not isinstance(effective_end, str):
raise TypeError("Expected argument 'effective_end' to be a str")
pulumi.set(__self__, "effective_end", effective_end)
if effective_start and not isinstance(effective_start, str):
raise TypeError("Expected argument 'effective_start' to be a str")
pulumi.set(__self__, "effective_start", effective_start)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if rule_version and not isinstance(rule_version, str):
raise TypeError("Expected argument 'rule_version' to be a str")
pulumi.set(__self__, "rule_version", rule_version)
if rules and not isinstance(rules, list):
raise TypeError("Expected argument 'rules' to be a list")
pulumi.set(__self__, "rules", rules)
if split_charge_rules and not isinstance(split_charge_rules, list):
raise TypeError("Expected argument 'split_charge_rules' to be a list")
pulumi.set(__self__, "split_charge_rules", split_charge_rules)
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", METHOD_NAME)
@property
@pulumi.getter(name="costCategoryArn")
def cost_category_arn(self) -> str:
return pulumi.get(self, "cost_category_arn")
@property
@pulumi.getter(name="defaultValue")
def default_value(self) -> str:
"""
Default value for the cost category.
"""
return pulumi.get(self, "default_value")
@property
@pulumi.getter(name="effectiveEnd")
def effective_end(self) -> str:
"""
Effective end data of your Cost Category.
"""
return pulumi.get(self, "effective_end")
@property
@pulumi.getter(name="effectiveStart")
def effective_start(self) -> str:
"""
Effective state data of your Cost Category.
"""
return pulumi.get(self, "effective_start")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="ruleVersion")
def rule_version(self) -> str:
"""
Rule schema version in this particular Cost Category.
"""
return pulumi.get(self, "rule_version")
@property
@pulumi.getter
def rules(self) -> Sequence['outputs.GetCostCategoryRuleResult']:
"""
Configuration block for the `Expression` object used to categorize costs. See below.
"""
return pulumi.get(self, "rules")
@property
@pulumi.getter(name="splitChargeRules")
def split_charge_rules(self) -> Sequence['outputs.GetCostCategorySplitChargeRuleResult']:
"""
Configuration block for the split charge rules used to allocate your charges between your Cost Category values. See below.
"""
return pulumi.get(self, "split_charge_rules")
@property
@pulumi.getter
def METHOD_NAME(self) -> Mapping[str, str]:
"""
Configuration block for the specific `Tag` to use for `Expression`. See below.
"""
return pulumi.get(self, "tags")
class AwaitableGetCostCategoryResult(GetCostCategoryResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCostCategoryResult(
cost_category_arn=self.cost_category_arn,
default_value=self.default_value,
effective_end=self.effective_end,
effective_start=self.effective_start,
id=self.id,
name=self.name,
rule_version=self.rule_version,
rules=self.rules,
split_charge_rules=self.split_charge_rules,
METHOD_NAME=self.METHOD_NAME)
def get_cost_category(cost_category_arn: Optional[str] = None,
METHOD_NAME: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCostCategoryResult:
"""
Provides details about a specific CostExplorer Cost Category.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.costexplorer.get_cost_category(cost_category_arn="costCategoryARN")
```
:param str cost_category_arn: Unique name for the Cost Category.
:param Mapping[str, str] tags: Configuration block for the specific `Tag` to use for `Expression`. See below.
"""
__args__ = dict()
__args__['costCategoryArn'] = cost_category_arn
__args__['tags'] = METHOD_NAME
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:costexplorer/getCostCategory:getCostCategory', __args__, opts=opts, typ=GetCostCategoryResult).value
return AwaitableGetCostCategoryResult(
cost_category_arn=pulumi.get(__ret__, 'cost_category_arn'),
default_value=pulumi.get(__ret__, 'default_value'),
effective_end=pulumi.get(__ret__, 'effective_end'),
effective_start=pulumi.get(__ret__, 'effective_start'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
rule_version=pulumi.get(__ret__, 'rule_version'),
rules=pulumi.get(__ret__, 'rules'),
split_charge_rules=pulumi.get(__ret__, 'split_charge_rules'),
METHOD_NAME=pulumi.get(__ret__, 'tags'))
@_utilities.lift_output_func(get_cost_category)
def get_cost_category_output(cost_category_arn: Optional[pulumi.Input[str]] = None,
METHOD_NAME: Optional[pulumi.Input[Optional[Mapping[str, str]]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCostCategoryResult]:
"""
Provides details about a specific CostExplorer Cost Category.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.costexplorer.get_cost_category(cost_category_arn="costCategoryARN")
```
:param str cost_category_arn: Unique name for the Cost Category.
:param Mapping[str, str] tags: Configuration block for the specific `Tag` to use for `Expression`. See below.
"""
...
|
4,389 |
test modulated
|
import numpy as np
import pytest
from aspire.image import Image
from aspire.utils import gaussian_2d, utest_tolerance
from aspire.utils.coor_trans import grid_2d
from aspire.utils.random import randn
from aspire.volume import Volume
# Parameter combinations for testing 2D bases
# Each tuple represents (resolution in pixels, datatype of basis)
basis_params_2d = [
(8, np.float32),
(8, np.float64),
(16, np.float32),
(16, np.float64),
(32, np.float32),
(32, np.float64),
]
basis_params_3d = [
(8, np.float32),
(8, np.float64),
]
pswf_params_2d = [
(8, np.float64),
]
def show_basis_params(basis):
# print descriptive test name for parametrized test
# run pytest with option -rA to see explicitly
return f"{basis.nres}-{basis.dtype}"
class Steerable2DMixin:
"""
Inheriting Test class will expect all Steerable2DMixin functions to take a Basis object
as a parameter.
"""
def testIndices(self, basis):
ell_max = basis.ell_max
k_max = basis.k_max
indices = basis.indices()
i = 0
for ell in range(ell_max + 1):
if ell == 0:
sgns = [1]
else:
sgns = [1, -1]
for sgn in sgns:
for k in range(k_max[ell]):
assert indices["ells"][i] == ell
assert indices["sgns"][i] == sgn
assert indices["ks"][i] == k
i += 1
def testGaussianExpand(self, basis):
# Offset slightly
x0 = 0.50
y0 = 0.75
L = basis.nres
# Want sigma to be as large as possible without the Gaussian
# spilling too much outside the central disk.
sigma = L / 8
im1 = gaussian_2d(L, mu=(x0, y0), sigma=sigma, dtype=basis.dtype)
coef = basis.expand(im1)
im2 = basis.evaluate(coef)
if isinstance(im2, Image):
im2 = im2.asnumpy()
im2 = im2[0]
# For small L there's too much clipping at high freqs to get 1e-3
# accuracy.
if L < 32:
atol = 1e-2
else:
atol = 1e-3
assert im1.shape == im2.shape
assert np.allclose(im1, im2, atol=atol)
def testIsotropic(self, basis):
L = basis.nres
sigma = L / 8
im = gaussian_2d(L, sigma=sigma, dtype=basis.dtype)
coef = basis.expand(im)
ells = basis.indices()["ells"]
energy_outside = np.sum(np.abs(coef[ells != 0]) ** 2)
energy_total = np.sum(np.abs(coef) ** 2)
energy_ratio = energy_outside / energy_total
assert energy_ratio < 0.01
def METHOD_NAME(self, basis):
L = basis.nres
if L < 32:
pytest.skip()
ell = 1
sigma = L / 8
im = gaussian_2d(L, sigma=sigma, dtype=basis.dtype)
g2d = grid_2d(L)
for trig_fun in (np.sin, np.cos):
im1 = im * trig_fun(ell * g2d["phi"])
coef = basis.expand(im1)
ells = basis.indices()["ells"]
energy_outside = np.sum(np.abs(coef[ells != ell]) ** 2)
energy_total = np.sum(np.abs(coef) ** 2)
energy_ratio = energy_outside / energy_total
assert energy_ratio < 0.10
def testEvaluateExpand(self, basis):
coef1 = randn(basis.count, seed=self.seed)
coef1 = coef1.astype(basis.dtype)
im = basis.evaluate(coef1)
if isinstance(im, Image):
im = im.asnumpy()
coef2 = basis.expand(im)[0]
assert coef1.shape == coef2.shape
assert np.allclose(coef1, coef2, atol=utest_tolerance(basis.dtype))
def testAdjoint(self, basis):
u = randn(basis.count, seed=self.seed)
u = u.astype(basis.dtype)
Au = basis.evaluate(u)
if isinstance(Au, Image):
Au = Au.asnumpy()
x = Image(randn(*basis.sz, seed=self.seed), dtype=basis.dtype)
ATx = basis.evaluate_t(x)
Au_dot_x = np.sum(Au * x.asnumpy())
u_dot_ATx = np.sum(u * ATx)
assert Au_dot_x.shape == u_dot_ATx.shape
assert np.isclose(Au_dot_x, u_dot_ATx)
class UniversalBasisMixin:
"""
Inheriting Test class will expect all UniversalBasisMixin functions to take a Basis object
as a parameter.
"""
def getClass(self, basis):
if basis.ndim == 2:
return Image
elif basis.ndim == 3:
return Volume
def testEvaluate(self, basis):
# evaluate should take a NumPy array of type basis.coefficient_dtype
# and return an Image/Volume
_class = self.getClass(basis)
result = basis.evaluate(np.zeros((basis.count), dtype=basis.coefficient_dtype))
assert isinstance(result, _class)
def testEvaluate_t(self, basis):
# evaluate_t should take an Image/Volume and return a NumPy array of type
# basis.coefficient_dtype
_class = self.getClass(basis)
result = basis.evaluate_t(
_class(np.zeros((basis.nres,) * basis.ndim, dtype=basis.dtype))
)
assert isinstance(result, np.ndarray)
assert result.dtype == basis.coefficient_dtype
def testExpand(self, basis):
_class = self.getClass(basis)
# expand should take an Image/Volume and return a NumPy array of type
# basis.coefficient_dtype
result = basis.expand(
_class(np.zeros((basis.nres,) * basis.ndim, dtype=basis.dtype))
)
assert isinstance(result, np.ndarray)
assert result.dtype == basis.coefficient_dtype
def testInitWithIntSize(self, basis):
# make sure we can instantiate with just an int as a shortcut
assert (basis.nres,) * basis.ndim == basis.__class__(basis.nres).sz
|
4,390 |
inject result data outputs
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from pipeline.component_framework.component import Component
from pipeline.core.flow import Service, StaticIntervalGenerator
from pipeline.core.flow.io import StringItemSchema
from plugin_service.conf import PLUGIN_LOGGER
from plugin_service.exceptions import PluginServiceException
from plugin_service.plugin_client import PluginServiceApiClient
from django.utils.translation import ugettext_lazy as _
logger = logging.getLogger(PLUGIN_LOGGER)
class State:
EMPTY = 1
POLL = 2
CALLBACK = 3
SUCCESS = 4
FAIL = 5
UNFINISHED_STATES = {State.POLL, State.CALLBACK}
class RemotePluginService(Service):
interval = StaticIntervalGenerator(5)
def outputs_format(self):
return [
self.OutputItem(
name="Trace ID", key="trace_id", type="string", schema=StringItemSchema(description="Trace ID")
),
]
def execute(self, data, parent_data):
plugin_code = data.get_one_of_inputs("plugin_code")
plugin_version = data.get_one_of_inputs("plugin_version")
try:
plugin_client = PluginServiceApiClient(plugin_code)
except PluginServiceException as e:
message = _(f"第三方插件client初始化失败, 错误内容: {e}")
logger.error(message)
data.set_outputs("ex_data", message)
return False
detail_result = plugin_client.get_detail(plugin_version)
if not detail_result["result"]:
message = _(f"获取第三方插件详情失败, 错误内容: {detail_result['message']}")
logger.error(message)
data.set_outputs("ex_data", message)
return False
plugin_context = dict(
[
(key, parent_data.inputs[key])
for key in detail_result["data"]["context_inputs"]["properties"].keys()
if key in parent_data.inputs
]
)
ok, result_data = plugin_client.invoke(plugin_version, {"inputs": data.inputs, "context": plugin_context})
if not ok:
message = _(
f"调用第三方插件invoke接口错误, 错误内容: {result_data['message']}, trace_id: {result_data.get('trace_id')}"
)
logger.error(message)
data.set_outputs("ex_data", message)
return False
data.set_outputs("trace_id", result_data["trace_id"])
self.METHOD_NAME(data, result_data)
state = result_data["state"]
if state == State.FAIL:
data.set_outputs("ex_data", result_data["err"])
return False
if state in UNFINISHED_STATES:
setattr(self, "__need_schedule__", True)
return True
def schedule(self, data, parent_data, callback_data=None):
plugin_code = data.get_one_of_inputs("plugin_code")
trace_id = data.get_one_of_outputs("trace_id")
try:
plugin_client = PluginServiceApiClient(plugin_code)
except PluginServiceException as e:
message = _(f"第三方插件client初始化失败, 错误内容: {e}")
logger.error(message)
data.set_outputs("ex_data", message)
return False
ok, result_data = plugin_client.get_schedule(trace_id)
if not ok:
message = (
f"remote plugin service schedule error: {result_data['message']}, "
f"trace_id: {result_data.get('trace_id') or trace_id}"
)
logger.error(message)
data.set_outputs("ex_data", message)
return False
self.METHOD_NAME(data, result_data)
state = result_data["state"]
if state == State.FAIL:
message = _("请通过第三方节点日志查看任务失败原因")
logger.error(message)
logger.error(f"[remote plugin service state failed]: {result_data}")
data.set_outputs("ex_data", result_data["outputs"].get("err") or message)
return False
if state in UNFINISHED_STATES:
setattr(self, "__need_schedule__", True)
if state == State.SUCCESS:
self.finish_schedule()
return True
@staticmethod
def METHOD_NAME(data, result_data):
outputs = result_data.get("outputs") or {}
for key, output in outputs.items():
data.set_outputs(key, output)
class RemotePluginComponent(Component):
code = "remote_plugin"
name = "RemotePlugin"
bound_service = RemotePluginService
version = "1.0.0"
|
4,391 |
make dummy inst
|
from collections.abc import Generator
import pytest
import qcodes.validators as vals
from qcodes.parameters import Parameter, create_on_off_val_mapping
from qcodes.tests.instrument_mocks import DummyInstrument
from .conftest import ParameterMemory
@pytest.fixture(name="dummyinst")
def METHOD_NAME() -> Generator[DummyInstrument, None, None]:
inst = DummyInstrument('dummy_holder')
try:
yield inst
finally:
inst.close()
def test_val_mapping_basic() -> None:
# We store value external to cache
# to allow testing of interaction with cache
mem = ParameterMemory()
p = Parameter('p', set_cmd=mem.set, get_cmd=mem.get,
val_mapping={'off': 0, 'on': 1},
vals=vals.Enum('off', 'on'))
p('off')
assert p.cache.raw_value == 0
assert mem.get() == 0
assert p() == 'off'
mem.set(1)
assert p() == 'on'
# implicit mapping to ints
mem.set('0')
assert p() == 'off'
# unrecognized response
mem.set(2)
with pytest.raises(KeyError):
p()
mem.set(1)
p.cache.set('off')
assert p.get_latest() == 'off'
# Nothing has been passed to the "instrument" at ``cache.set``
# call, hence the following assertions should hold
assert mem.get() == 1
assert p() == 'on'
assert p.get_latest() == 'on'
def test_val_mapping_with_parsers() -> None:
# We store value external to cache
# to allow testing of interaction with cache
mem = ParameterMemory()
# # set_parser with val_mapping
# Parameter('p', set_cmd=mem.set, get_cmd=mem.get,
# val_mapping={'off': 0, 'on': 1},
# set_parser=mem.parse_set_p)
# get_parser with val_mapping
p = Parameter('p', set_cmd=mem.set_p_prefixed,
get_cmd=mem.get, get_parser=mem.strip_prefix,
val_mapping={'off': 0, 'on': 1},
vals=vals.Enum('off', 'on'))
p('off')
assert mem.get() == 'PVAL: 0'
# this is slight strange. Since it uses a custom set_cmd
# rather than a set_parser the raw_value does not match
# what is actually sent to the instrument
assert p.cache.raw_value == 0
assert p() == 'off'
mem.set('PVAL: 1')
assert p() == 'on'
p.cache.set('off')
assert p.get_latest() == 'off'
# Nothing has been passed to the "instrument" at ``cache.set``
# call, hence the following assertions should hold
assert mem.get() == 'PVAL: 1'
assert p() == 'on'
assert p.get_latest() == 'on'
assert p.cache.get() == 'on'
def test_on_off_val_mapping() -> None:
instrument_value_for_on = 'on_'
instrument_value_for_off = 'off_'
parameter_return_value_for_on = True
parameter_return_value_for_off = False
mem = ParameterMemory()
p = Parameter('p', set_cmd=mem.set, get_cmd=mem.get,
val_mapping=create_on_off_val_mapping(
on_val=instrument_value_for_on,
off_val=instrument_value_for_off))
test_data = [(instrument_value_for_on,
parameter_return_value_for_on,
('On', 'on', 'ON', 1, True)),
(instrument_value_for_off,
parameter_return_value_for_off,
('Off', 'off', 'OFF', 0, False))]
for instr_value, parameter_return_value, inputs in test_data:
for inp in inputs:
# Setting parameter with any of the `inputs` is allowed
p(inp)
# For any value from the `inputs`, what gets send to the
# instrument is on_val/off_val which are specified in
# `create_on_off_val_mapping`
assert mem.get() == instr_value
# When getting a value of the parameter, only specific
# values are returned instead of `inputs`
assert p() == parameter_return_value
def test_val_mapping_on_instrument(dummyinst: DummyInstrument) -> None:
dummyinst.add_parameter('myparameter', set_cmd=None, get_cmd=None,
val_mapping={'A': 0, 'B': 1})
dummyinst.myparameter('A')
assert dummyinst.myparameter() == 'A'
assert dummyinst.myparameter() == 'A'
assert dummyinst.myparameter.raw_value == 0
|
4,392 |
test check virtual
|
"""
:codeauthor: Gareth J. Greenaway <[email protected]>
"""
import copy
import logging
import pytest
from saltfactories.utils import random_string
import salt.proxy.netmiko_px as netmiko_proxy
from tests.support.mock import MagicMock, patch
log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def proxy_minion_config_module(salt_master_factory):
factory = salt_master_factory.salt_proxy_minion_daemon(
random_string("proxy-minion-"),
)
return factory.config
@pytest.fixture
def proxy_minion_config(proxy_minion_config_module):
return copy.deepcopy(proxy_minion_config_module)
@pytest.fixture
def configure_loader_modules():
return {netmiko_proxy: {}}
class MockNetmikoConnection:
def is_alive(self):
return False
def send_config_set(self, *args, **kwargs):
return args, kwargs
def METHOD_NAME():
"""
check netmiko_proxy virtual method - return value
"""
with patch.object(netmiko_proxy, "HAS_NETMIKO", True):
result = netmiko_proxy.__virtual__()
assert "netmiko" in result
expected = (
False,
"The netmiko proxy module requires netmiko library to be installed.",
)
with patch.object(netmiko_proxy, "HAS_NETMIKO", False):
result = netmiko_proxy.__virtual__()
assert expected == result
def test_init_skip_connect_on_init_true(proxy_minion_config):
"""
check netmiko_proxy init method
"""
proxy_minion_config["skip_connect_on_init"] = True
assert netmiko_proxy.init(proxy_minion_config)
assert "netmiko_device" in netmiko_proxy.__context__
netmiko_device = netmiko_proxy.__context__["netmiko_device"]
assert "args" in netmiko_device
assert "initialized" in netmiko_device
assert not netmiko_device["initialized"]
assert "up" in netmiko_device
assert netmiko_device["up"]
assert "always_alive" in netmiko_device
assert "connection" not in netmiko_device
def test_init_skip_connect_on_init_false(proxy_minion_config):
"""
check netmiko_proxy init method
"""
proxy_minion_config["skip_connect_on_init"] = False
mock_make_con = MagicMock()
with patch.object(netmiko_proxy, "make_con", mock_make_con):
assert netmiko_proxy.init(proxy_minion_config) is None
assert "netmiko_device" in netmiko_proxy.__context__
netmiko_device = netmiko_proxy.__context__["netmiko_device"]
assert "args" in netmiko_device
assert "initialized" in netmiko_device
assert netmiko_device["initialized"]
assert "up" in netmiko_device
assert netmiko_device["up"]
assert "always_alive" in netmiko_device
assert "connection" in netmiko_device
def test_init_connection_timeout(proxy_minion_config):
"""
check that connection_timeout is removed from args
before being passed along.
"""
proxy_minion_config["connection_timeout"] = 60
mock_make_con = MagicMock()
with patch.object(netmiko_proxy, "make_con", mock_make_con):
assert netmiko_proxy.init(proxy_minion_config) is None
assert "netmiko_device" in netmiko_proxy.__context__
netmiko_device = netmiko_proxy.__context__["netmiko_device"]
assert "args" in netmiko_device
assert "connection_timeout" not in netmiko_device["args"]
def test_make_con(proxy_minion_config):
"""
check netmiko_proxy make_con method
"""
proxy_minion_config["skip_connect_on_init"] = True
netmiko_proxy.init(proxy_minion_config)
mock_connection = MockNetmikoConnection
with patch.object(netmiko_proxy, "ConnectHandler", mock_connection, create=True):
result = netmiko_proxy.make_con()
assert result is not None
def test_make_con_raise_exception(proxy_minion_config):
"""
check netmiko_proxy make_con method
"""
class ExpectedException(Exception):
"""
Custom exception class expected to raise as a side_effect
"""
def raise_exception(*arg, **kwarg):
raise ExpectedException("expected")
proxy_minion_config["skip_connect_on_init"] = True
netmiko_proxy.init(proxy_minion_config)
with patch.object(netmiko_proxy, "DEFAULT_CONNECTION_TIMEOUT", 0), patch.object(
netmiko_proxy, "ConnectHandler", raise_exception, create=True
):
with pytest.raises(ExpectedException):
netmiko_proxy.make_con(0)
def test_ping(proxy_minion_config):
"""
check netmiko_proxy ping method
"""
proxy_minion_config["skip_connect_on_init"] = True
netmiko_proxy.init(proxy_minion_config)
result = netmiko_proxy.ping()
assert result is True
def test_alive(proxy_minion_config, subtests):
"""
check netmiko_proxy alive method
"""
# Always alive False with skip_connect_on_init on True
# should return alive as True
with subtests.test("skip_connect_on_init=True, proxy_always_alive=False"):
proxy_minion_config["skip_connect_on_init"] = True
proxy_minion_config["proxy_always_alive"] = False
netmiko_proxy.init(proxy_minion_config)
result = netmiko_proxy.alive(proxy_minion_config)
assert result
# Always alive True with skip_connect_on_init on True
# should return alive as False
with subtests.test("skip_connect_on_init=True, proxy_always_alive=True"):
proxy_minion_config["skip_connect_on_init"] = True
proxy_minion_config["proxy_always_alive"] = True
netmiko_proxy.init(proxy_minion_config)
result = netmiko_proxy.alive(proxy_minion_config)
assert not result
# Always alive True with skip_connect_on_init on False
# should return alive as True
with subtests.test("skip_connect_on_init=False, proxy_always_alive=True"):
proxy_minion_config["skip_connect_on_init"] = False
proxy_minion_config["proxy_always_alive"] = True
mock_make_con = MagicMock()
with patch.object(netmiko_proxy, "make_con", mock_make_con):
netmiko_proxy.init(proxy_minion_config)
result = netmiko_proxy.alive(proxy_minion_config)
assert result
def test_initialized(proxy_minion_config):
"""
check netmiko_proxy alive method
"""
proxy_minion_config["skip_connect_on_init"] = True
netmiko_proxy.init(proxy_minion_config)
result = netmiko_proxy.initialized()
assert not result
# Always alive True with skip_connect_on_init on False
# should return alive as True
proxy_minion_config["skip_connect_on_init"] = False
mock_make_con = MagicMock()
with patch.object(netmiko_proxy, "make_con", mock_make_con):
netmiko_proxy.init(proxy_minion_config)
result = netmiko_proxy.initialized()
assert result
|
4,393 |
init
|
# (C) Copyright 2004-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
""" Defines the HTML "editor" for the QT4 user interface toolkit.
HTML editors interpret and display HTML-formatted text, but do not
modify it.
"""
import webbrowser
from pyface.qt import QtCore, QtGui
from traits.api import Str
from .editor import Editor
try:
from pyface.qt import QtWebKit
# Subclass of QWebPage for QtWebEngine support
class ExternallyOpeningWebPage(QtWebKit.QWebPage):
"""QWebEnginePage subclass that opens links in system browser
This subclass is only used when we are given a QWebEnginePage which is
pretending to be a QWebPage and we want the open_external feature
of the Editor.
This overrides the acceptNavigationRequest method to open links
in an external browser. All other navigation requests are handled
internally as per the base class.
"""
def acceptNavigationRequest(self, url, type, isMainFrame):
if type == QtWebKit.QWebPage.NavigationTypeLinkClicked:
webbrowser.open_new(url.toString())
return False
else:
return super().acceptNavigationRequest(url, type, isMainFrame)
WebView = QtWebKit.QWebView
HAS_WEB_VIEW = True
except Exception:
WebView = QtGui.QTextBrowser
HAS_WEB_VIEW = False
# -------------------------------------------------------------------------
# 'SimpleEditor' class:
# -------------------------------------------------------------------------
class SimpleEditor(Editor):
"""Simple style editor for HTML."""
# -------------------------------------------------------------------------
# Trait definitions:
# -------------------------------------------------------------------------
#: Is the HTML editor scrollable? This values override the default.
scrollable = True
#: External objects referenced in the HTML are relative to this URL
base_url = Str()
def METHOD_NAME(self, parent):
"""Finishes initializing the editor by creating the underlying toolkit
widget.
"""
self.control = WebView()
self.control.setSizePolicy(
QtGui.QSizePolicy.Policy.Expanding, QtGui.QSizePolicy.Policy.Expanding
)
if self.factory.open_externally:
if HAS_WEB_VIEW:
page = self.control.page()
if hasattr(page, 'setLinkDelegationPolicy'):
# QtWebKit
page.setLinkDelegationPolicy(
QtWebKit.QWebPage.DelegateAllLinks
)
page.linkClicked.connect(self._link_clicked)
else:
# QtWebEngine pretending to be QtWebKit
# We need the subclass defined above instead of the
# regular web page so that links are opened externally
page = ExternallyOpeningWebPage(self.control)
self.control.setPage(page)
else:
# take over handling clicks on links
self.control.setOpenLinks(False)
self.control.anchorClicked.connect(self._link_clicked)
self.base_url = self.factory.base_url
self.sync_value(self.factory.base_url_name, "base_url", "from")
def dispose(self):
"""Disposes of the contents of an editor."""
if self.control is not None and self.factory.open_externally:
if HAS_WEB_VIEW:
page = self.control.page()
if hasattr(page, 'setLinkDelegationPolicy'):
# QtWebKit-only cleanup
page.linkClicked.disconnect(self._link_clicked)
else:
# QTextBrowser clean-up
self.control.anchorClicked.disconnect(self._link_clicked)
super().dispose()
def update_editor(self):
"""Updates the editor when the object trait changes external to the
editor.
"""
text = self.str_value
if self.factory.format_text:
text = self.factory.parse_text(text)
if self.base_url and HAS_WEB_VIEW:
url = self.base_url
if not url.endswith("/"):
url += "/"
self.control.setHtml(text, QtCore.QUrl.fromLocalFile(url))
else:
self.control.setHtml(text)
# -- Event Handlers -------------------------------------------------------
def _base_url_changed(self):
self.update_editor()
def _link_clicked(self, url):
webbrowser.open_new(url.toString())
|
4,394 |
package id
|
import os
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, cmake_layout, CMakeToolchain
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
from conan.tools.microsoft import check_min_vs, is_msvc
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration
required_conan_version = ">=1.52.0"
class UTConan(ConanFile):
name = "boost-ext-ut"
description = ("C++20 single header/single module, "
"macro-free micro Unit Testing Framework")
topics = ("ut", "header-only", "unit-test", "test", "tdd", "bdd")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://boost-ext.github.io/ut/"
license = "BSL-1.0"
settings = "os", "compiler", "arch", "build_type"
no_copy_source = True
options = { "disable_module": [True, False], }
default_options = { "disable_module": False, }
@property
def _minimum_cpp_standard(self):
return 17 if self.settings.compiler in ["clang", "gcc"] and Version(self.version) <= "1.1.8" else 20
@property
def _minimum_compilers_version(self):
return {
"apple-clang": "11" if Version(self.version) < "1.1.8" else "12",
"clang": "9",
"gcc": "9",
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if Version(self.version) <= "1.1.8":
del self.options.disable_module
elif is_msvc(self):
self.options.disable_module = True
def layout(self):
cmake_layout(self, src_folder="src")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._minimum_cpp_standard)
if Version(self.version) <= "1.1.8" and is_msvc(self):
raise ConanInvalidConfiguration(f"{self.ref} may not be built with MSVC. "
"Please use at least version 1.1.9 with MSVC.")
if is_msvc(self):
check_min_vs(self, "192")
if not self.options.get_safe("disable_module", True):
self.output.warn("The 'disable_module' option must be enabled when using MSVC.")
if not is_msvc(self):
min_version = self._minimum_compilers_version.get(
str(self.settings.compiler))
if not min_version:
self.output.warn(f"{self.ref} recipe lacks information about the {self.settings.compiler} "
"compiler support.")
else:
if Version(self.settings.compiler.version) < min_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._minimum_cpp_standard} support. "
f"The current compiler {self.settings.compiler} {self.settings.compiler.version} does not support it.")
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.cache_variables["BOOST_UT_BUILD_BENCHMARKS"] = False
tc.cache_variables["BOOST_UT_BUILD_EXAMPLES"] = False
tc.cache_variables["BOOST_UT_BUILD_TESTS"] = not self.conf.get("tools.build:skip_test", default=True, check_type=bool)
tc.cache_variables["PROJECT_DISABLE_VERSION_SUFFIX"] = True
disable_module = self.options.get_safe("disable_module")
if disable_module:
tc.cache_variables["BOOST_UT_DISABLE_MODULE"] = disable_module
tc.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, "LICENSE*", self.source_folder, os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def METHOD_NAME(self):
self.info.clear()
def package_info(self):
newer_than_1_1_8 = Version(self.version) > "1.1.8"
namespace = "Boost" if newer_than_1_1_8 else "boost"
self.cpp_info.set_property("cmake_file_name", "ut")
self.cpp_info.set_property("cmake_target_name", f"{namespace}::ut")
self.cpp_info.names["cmake_find_package"] = namespace
self.cpp_info.names["cmake_find_package_multi"] = namespace
self.cpp_info.filenames["cmake_find_package"] = "ut"
self.cpp_info.filenames["cmake_find_package_multi"] = "ut"
self.cpp_info.components["ut"].names["cmake_find_package"] = "ut"
self.cpp_info.components["ut"].names["cmake_find_package_multi"] = "ut"
if newer_than_1_1_8:
self.cpp_info.components["ut"].includedirs = [os.path.join("include", "ut-" + self.version, "include")]
if self.options.get_safe("disable_module"):
self.cpp_info.components["ut"].defines = ["BOOST_UT_DISABLE_MODULE=1"]
|
4,395 |
exists
|
#!/usr/bin/python3
"""Fetch OSTree commits from an repository
Uses ostree to pull specific commits from (remote) repositories
at the provided `url`. Can verify the commit, if one or more
gpg keys are provided via `gpgkeys`.
"""
import os
import subprocess
import sys
import uuid
from osbuild import sources
from osbuild.util.ostree import show
from osbuild.util.rhsm import Subscriptions
SCHEMA = """
"additionalProperties": false,
"definitions": {
"item": {
"description": "The commits to fetch indexed their checksum",
"type": "object",
"additionalProperties": false,
"patternProperties": {
"[0-9a-f]{5,64}": {
"type": "object",
"additionalProperties": false,
"required": ["remote"],
"properties": {
"remote": {
"type": "object",
"additionalProperties": false,
"required": ["url"],
"properties": {
"url": {
"type": "string",
"description": "URL of the repository."
},
"contenturl": {
"type": "string",
"description": "content URL of the repository."
},
"gpgkeys": {
"type": "array",
"items": {
"type": "string",
"description": "GPG keys to verify the commits"
}
},
"secrets": {
"type": "object",
"additionalProperties": false,
"required": [
"name"
],
"properties": {
"name": {
"type": "string",
"description": "Name of the secrets provider."
}
}
}
}
}
}
}
}
}
},
"properties": {
"items": {"$ref": "#/definitions/item"},
"commits": {"$ref": "#/definitions/item"}
},
"oneOf": [{
"required": ["items"]
}, {
"required": ["commits"]
}]
"""
def ostree(*args, _input=None, **kwargs):
args = list(args) + [f'--{k}={v}' for k, v in kwargs.items()]
print("ostree " + " ".join(args), file=sys.stderr)
subprocess.run(["ostree"] + args,
encoding="utf-8",
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
input=_input,
check=True)
class OSTreeSource(sources.SourceService):
content_type = "org.osbuild.ostree"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.repo = None
def fetch_one(self, checksum, desc):
commit = checksum
remote = desc["remote"]
url = remote["url"]
gpg = remote.get("gpgkeys", [])
uid = str(uuid.uuid4())
remote_add_args = []
if not gpg:
remote_add_args = ["--no-gpg-verify"]
if "contenturl" in remote:
remote_add_args.append(f"--contenturl={remote['contenturl']}")
if remote.get("secrets", {}).get("name") == "org.osbuild.rhsm.consumer":
secrets = Subscriptions.get_consumer_secrets()
remote_add_args.append(f"--set=tls-client-key-path={secrets['consumer_key']}")
remote_add_args.append(f"--set=tls-client-cert-path={secrets['consumer_cert']}")
ostree("remote", "add",
uid, url,
*remote_add_args,
repo=self.repo)
for key in gpg:
ostree("remote", "gpg-import", "--stdin", uid,
repo=self.repo, _input=key)
# Transfer the commit: remote → cache
print(f"pulling {commit}", file=sys.stderr)
ostree("pull", uid, commit, repo=self.repo)
# Remove the temporary remotes again
ostree("remote", "delete", uid,
repo=self.repo)
def setup(self, args):
super().setup(args)
# Prepare the cache and the output repo
self.repo = os.path.join(self.cache, "repo")
ostree("init", mode="archive", repo=self.repo)
# Make sure the cache repository uses locks to protect the metadata during
# shared access. This is the default since `2018.5`, but lets document this
# explicitly here.
ostree("config", "set", "repo.locking", "true", repo=self.repo)
# pylint: disable=[no-self-use]
def METHOD_NAME(self, checksum, _desc):
try:
show(self.repo, checksum)
except RuntimeError:
return False
return True
def main():
service = OSTreeSource.from_args(sys.argv[1:])
service.main()
if __name__ == '__main__':
main()
|
4,396 |
test quorum
|
#!/usr/bin/env python3
# group: rw quick
#
# Test the rate limit of QMP events
#
# Copyright (C) 2016 Igalia, S.L.
# Author: Alberto Garcia <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import iotests
imgs = (os.path.join(iotests.test_dir, 'quorum0.img'),
os.path.join(iotests.test_dir, 'quorum1.img'),
os.path.join(iotests.test_dir, 'quorum2.img'))
img_conf = (os.path.join(iotests.test_dir, 'quorum0.conf'),
os.path.join(iotests.test_dir, 'quorum1.conf'),
os.path.join(iotests.test_dir, 'quorum2.conf'))
event_rate = 1000000000
sector_size = 512
offset = 10
class TestQuorumEvents(iotests.QMPTestCase):
read_pattern = 'quorum'
def create_blkdebug_file(self, blkdebug_file, bad_sector):
file = open(blkdebug_file, 'w')
file.write('''
[inject-error]
event = "read_aio"
errno = "5"
sector = "%d"
''' % bad_sector)
file.close()
@iotests.skip_if_unsupported(['quorum'])
def setUp(self):
driveopts = ['driver=quorum', 'vote-threshold=2']
driveopts.append('read-pattern=%s' % self.read_pattern)
for i in range(len(imgs)):
iotests.qemu_img('create', '-f', iotests.imgfmt, imgs[i], '1M')
self.create_blkdebug_file(img_conf[i], i + offset)
driveopts.append('children.%d.driver=%s' % (i, iotests.imgfmt))
driveopts.append('children.%d.file.driver=blkdebug' % i)
driveopts.append('children.%d.file.config=%s' % (i, img_conf[i]))
driveopts.append('children.%d.file.image.filename=%s' % (i, imgs[i]))
driveopts.append('children.%d.node-name=img%d' % (i, i))
self.vm = iotests.VM()
self.vm.add_drive(None, opts = ','.join(driveopts))
self.vm.launch()
def tearDown(self):
self.vm.shutdown()
for i in range(len(imgs)):
os.remove(imgs[i])
os.remove(img_conf[i])
def do_check_event(self, node, sector = 0):
if node == None:
self.assertEqual(self.vm.get_qmp_event(), None)
return
for event in self.vm.get_qmp_events(wait=True):
if event['event'] == 'QUORUM_REPORT_BAD':
self.assert_qmp(event, 'data/node-name', node)
self.assert_qmp(event, 'data/sector-num', sector)
def METHOD_NAME(self):
# Generate an error and get an event
self.vm.hmp_qemu_io("drive0", "aio_read %d %d" %
(offset * sector_size, sector_size))
self.vm.qtest("clock_step 10")
self.do_check_event('img0', offset)
# I/O errors in the same child: only one event is emitted
delay = 10
for i in range(3):
self.vm.hmp_qemu_io("drive0", "aio_read %d %d" %
(offset * sector_size, sector_size))
self.vm.qtest("clock_step %d" % delay)
self.do_check_event(None)
# Wait enough so the event is finally emitted
self.vm.qtest("clock_step %d" % (2 * event_rate))
self.do_check_event('img0', offset)
# I/O errors in the same child: all events are emitted
delay = 2 * event_rate
for i in range(3):
self.vm.hmp_qemu_io("drive0", "aio_read %d %d" %
(offset * sector_size, sector_size))
self.vm.qtest("clock_step %d" % delay)
self.do_check_event('img0', offset)
# I/O errors in different children: all events are emitted
delay = 10
for i in range(len(imgs)):
self.vm.hmp_qemu_io("drive0", "aio_read %d %d" %
((offset + i) * sector_size, sector_size))
self.vm.qtest("clock_step %d" % delay)
# In fifo mode only errors in the first child are detected
if i > 0 and self.read_pattern == 'fifo':
self.do_check_event(None)
else:
self.do_check_event('img%d' % i, offset + i)
# I/O errors in different children: all events are emitted
delay = 2 * event_rate
for i in range(len(imgs)):
self.vm.hmp_qemu_io("drive0", "aio_read %d %d" %
((offset + i) * sector_size, sector_size))
self.vm.qtest("clock_step %d" % delay)
# In fifo mode only errors in the first child are detected
if i > 0 and self.read_pattern == 'fifo':
self.do_check_event(None)
else:
self.do_check_event('img%d' % i, offset + i)
# No more pending events
self.do_check_event(None)
class TestFifoQuorumEvents(TestQuorumEvents):
read_pattern = 'fifo'
if __name__ == '__main__':
iotests.verify_quorum()
iotests.main(supported_fmts=["raw"],
supported_protocols=["file"])
|
4,397 |
episode done
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
from abc import ABC, abstractmethod
from queue import Queue
from parlai.core.agents import Agent
class ChatServiceAgent(Agent, ABC):
"""
Base class for a person on a chat serivce that can act in a ParlAI world.
"""
def __init__(self, opt, manager, receiver_id, task_id):
super().__init__(opt)
self.manager = manager
self.id = receiver_id
self.task_id = task_id
self.acted_packets = {}
self._data = {}
self.msg_queue = Queue()
self.observed_packets = {}
self.message_request_time = None
self.stored_data = {}
self.message_partners = []
# initialize stored data
self.set_stored_data()
@property
def data(self):
"""
ChatServiceAgent data property.
"""
return self._data
@data.setter
def data(self, value):
"""
Setter for ChatServiceAgent.data.
The data within a ChatServiceAgent is persistent, in the sense that keys
_cannot_ be removed from the data. This is important to ensure persistence
of agent state across various parts of the ChatService pipeline.
To ensure this property, we call `agent._data.update(value)` when explicitly
setting the `data` property of an agent. This protects against cases where,
e.g., the `__init__` function sets a property for the agent, and then
later someone manually sets `agent.data = new_data`.
"""
self._data.update(value)
@abstractmethod
def observe(self, act):
"""
Send an agent a message through the manager.
"""
pass
def _send_payload(self, receiver_id, data, quick_replies=None, persona_id=None):
"""
Send a payload through the message manager.
:param receiver_id:
int identifier for agent to send message to
:param data:
object data to send
:param quick_replies:
list of quick replies
:param persona_id:
identifier of persona
:return:
a dictionary of a json response from the manager observing a payload
"""
return self.manager.observe_payload(
receiver_id, data, quick_replies, persona_id
)
@abstractmethod
def put_data(self, message):
"""
Put data into the message queue if it hasn't already been seen.
"""
pass
def _queue_action(self, action, act_id, act_data=None):
"""
Add an action to the queue with given id and info if it hasn't already been
seen.
:param action:
action to be added to message queue
:param act_id:
an identifier to check if the action has been seen or to
mark the action as seen
:param act_data:
any data about the given action you may want to record when
marking it as seen
"""
if act_id not in self.acted_packets:
self.acted_packets[act_id] = act_data
self.msg_queue.put(action)
def set_stored_data(self):
"""
Gets agent state data from manager.
"""
agent_state = self.manager.get_agent_state(self.id)
if agent_state is not None and hasattr(agent_state, 'stored_data'):
self.stored_data = agent_state.stored_data
def get_new_act_message(self):
"""
Get a new act message if one exists, return None otherwise.
"""
if not self.msg_queue.empty():
return self.msg_queue.get()
return None
def act(self):
"""
Pulls a message from the message queue.
If none exist returns None.
"""
msg = self.get_new_act_message()
return msg
def _check_timeout(self, timeout=None):
"""
Return whether enough time has passed than the timeout amount.
"""
if timeout:
return time.time() - self.message_request_time > timeout
return False
def act_blocking(self, timeout=None):
"""
Repeatedly loop until we retrieve a message from the queue.
"""
while True:
if self.message_request_time is None:
self.message_request_time = time.time()
msg = self.act()
if msg is not None:
self.message_request_time = None
return msg
if self._check_timeout(timeout):
return None
time.sleep(0.2)
def METHOD_NAME(self):
"""
Return whether or not this agent believes the conversation to be done.
"""
return self.manager.shutting_down
|
4,398 |
decode definition
|
from typing import TYPE_CHECKING
from trezor.messages import EthereumNetworkInfo, EthereumTokenInfo
from trezor.wire import DataError
if TYPE_CHECKING:
from typing import TypeVar
from typing_extensions import Self
DefType = TypeVar("DefType", EthereumNetworkInfo, EthereumTokenInfo)
def METHOD_NAME(definition: bytes, expected_type: type[DefType]) -> DefType:
from trezor.crypto.cosi import verify as cosi_verify
from trezor.crypto.hashlib import sha256
from trezor.enums import EthereumDefinitionType
from trezor.protobuf import decode as protobuf_decode
from trezor.utils import BufferReader
from apps.common import readers
from . import definitions_constants as consts
# check network definition
r = BufferReader(definition)
expected_type_number = EthereumDefinitionType.NETWORK
# TODO: can't check equality of MsgDefObjs now, so we check the name
if expected_type.MESSAGE_NAME == EthereumTokenInfo.MESSAGE_NAME:
expected_type_number = EthereumDefinitionType.TOKEN
try:
# first check format version
if r.read_memoryview(len(consts.FORMAT_VERSION)) != consts.FORMAT_VERSION:
raise DataError("Invalid Ethereum definition")
# second check the type of the data
if r.get() != expected_type_number:
raise DataError("Definition type mismatch")
# third check data version
if readers.read_uint32_le(r) < consts.MIN_DATA_VERSION:
raise DataError("Definition is outdated")
# get payload
payload_length = readers.read_uint16_le(r)
payload = r.read_memoryview(payload_length)
# at the end compute Merkle tree root hash using
# provided leaf data (payload with prefix) and proof
hasher = sha256(b"\x00")
hasher.update(memoryview(definition)[: r.offset])
hash = hasher.digest()
proof_length = r.get()
for _ in range(proof_length):
proof_entry = r.read_memoryview(32)
hash_a = min(hash, proof_entry)
hash_b = max(hash, proof_entry)
hasher = sha256(b"\x01")
hasher.update(hash_a)
hasher.update(hash_b)
hash = hasher.digest()
sigmask = r.get()
signature = r.read_memoryview(64)
if r.remaining_count():
raise DataError("Invalid Ethereum definition")
except EOFError:
raise DataError("Invalid Ethereum definition")
# verify signature
result = cosi_verify(signature, hash, consts.THRESHOLD, consts.PUBLIC_KEYS, sigmask)
if __debug__:
debug_result = cosi_verify(
signature, hash, consts.THRESHOLD, consts.DEV_PUBLIC_KEYS, sigmask
)
result = result or debug_result
if not result:
raise DataError("Invalid definition signature")
# decode it if it's OK
try:
return protobuf_decode(payload, expected_type, True)
except ValueError:
raise DataError("Invalid Ethereum definition")
class Definitions:
"""Class that holds Ethereum definitions - network and tokens.
Prefers built-in definitions over encoded ones.
"""
def __init__(
self, network: EthereumNetworkInfo, tokens: dict[bytes, EthereumTokenInfo]
) -> None:
self.network = network
self._tokens = tokens
@classmethod
def from_encoded(
cls,
encoded_network: bytes | None,
encoded_token: bytes | None,
chain_id: int | None = None,
slip44: int | None = None,
) -> Self:
from .networks import UNKNOWN_NETWORK, by_chain_id, by_slip44
network = UNKNOWN_NETWORK
tokens: dict[bytes, EthereumTokenInfo] = {}
# if we have a built-in definition, use it
if chain_id is not None:
network = by_chain_id(chain_id)
elif slip44 is not None:
network = by_slip44(slip44)
else:
# ignore encoded definitions if we can't match them to request details
return cls(UNKNOWN_NETWORK, {})
if network is UNKNOWN_NETWORK and encoded_network is not None:
network = METHOD_NAME(encoded_network, EthereumNetworkInfo)
if network is UNKNOWN_NETWORK:
# ignore tokens if we don't have a network
return cls(UNKNOWN_NETWORK, {})
if chain_id is not None and network.chain_id != chain_id:
raise DataError("Network definition mismatch")
if slip44 is not None and network.slip44 != slip44:
raise DataError("Network definition mismatch")
# get token definition
if encoded_token is not None:
token = METHOD_NAME(encoded_token, EthereumTokenInfo)
# Ignore token if it doesn't match the network instead of raising an error.
# This might help us in the future if we allow multiple networks/tokens
# in the same message.
if token.chain_id == network.chain_id:
tokens[token.address] = token
return cls(network, tokens)
def get_token(self, address: bytes) -> EthereumTokenInfo:
from .tokens import UNKNOWN_TOKEN, token_by_chain_address
# if we have a built-in definition, use it
token = token_by_chain_address(self.network.chain_id, address)
if token is not None:
return token
if address in self._tokens:
return self._tokens[address]
return UNKNOWN_TOKEN
|
4,399 |
handle poi
|
# Copyright (c) 2001-2022, Hove and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Hove (www.hove.com).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from jormungandr.parking_space_availability.abstract_provider_manager import AbstractProviderManager
from jormungandr.utils import can_connect_to_database
import logging
import datetime
POI_TYPE_ID = 'poi_type:amenity:bicycle_rental'
class BssProviderManager(AbstractProviderManager):
def __init__(self, bss_providers_configuration, providers_getter=None, update_interval=60):
super(BssProviderManager, self).__init__()
self._bss_providers_legacy = []
self._bss_providers = {}
self._bss_providers_last_update = {}
self._last_update = datetime.datetime(1970, 1, 1)
self._update_interval = update_interval
self._providers_getter = providers_getter
for configuration in bss_providers_configuration:
arguments = configuration.get('args', {})
self._bss_providers_legacy.append(self._init_class(configuration['class'], arguments))
def update_config(self):
if (
self._last_update + datetime.timedelta(seconds=self._update_interval) > datetime.datetime.utcnow()
or not self._providers_getter
):
return
logger = logging.getLogger(__name__)
# If database is not accessible we update the value of self._last_update and exit
if not can_connect_to_database():
logger.debug('Database is not accessible')
self._last_update = datetime.datetime.utcnow()
return
logger.debug('updating bss providers')
self._last_update = datetime.datetime.utcnow()
try:
providers = self._providers_getter()
except Exception as e:
logger.exception('No access to table bss_provider (error: {})'.format(e))
# database is not accessible, so let's use the values already present in self._bss_providers and
# self._bss_providers_legacy
# avoid sending query to the database for another update_interval
self._last_update = datetime.datetime.utcnow()
return
if not providers:
logger.debug('all providers have be disabled')
self._bss_providers = {}
self._bss_providers_last_update = {}
return
for provider in providers:
# it's a new bss provider or it has been updated, we add it
if (
provider.id not in self._bss_providers_last_update
or provider.last_update() > self._bss_providers_last_update[provider.id]
):
self.update_provider(provider)
# remove deleted providers
for to_delete in set(self._bss_providers.keys()) - {p.id for p in providers}:
del self._bss_providers[to_delete]
del self._bss_providers_last_update[to_delete]
logger.info('deleting bss provider %s', to_delete)
def update_provider(self, provider):
logger = logging.getLogger(__name__)
try:
self._bss_providers[provider.id] = self._init_class(provider.klass, provider.full_args())
self._bss_providers_last_update[provider.id] = provider.last_update()
except Exception:
logger.exception('impossible to initialize bss provider')
def METHOD_NAME(self, item):
if 'poi_type' in item and item['poi_type']['id'] == POI_TYPE_ID:
provider = self._find_provider(item)
if provider:
item['stands'] = provider.get_informations(item)
return provider
return None
# TODO use public version everywhere
def _get_providers(self):
self.update_config()
# providers from the database have priority on legacies providers
return list(self._bss_providers.values()) + self._bss_providers_legacy
def get_providers(self):
return self._get_providers()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.