id
int64 0
300k
| label
stringlengths 1
74
⌀ | text
stringlengths 4k
8k
|
---|---|---|
1,600 |
test international womens day
|
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Authors: dr-prodigy <[email protected]> (c) 2017-2023
# ryanss <[email protected]> (c) 2014-2017
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from holidays.countries.russia import Russia, RU, RUS
from tests.common import TestCase
class TestRussia(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass(Russia, years=range(1991, 2025))
def test_country_aliases(self):
self.assertCountryAliases(Russia, RU, RUS)
def test_no_holidays(self):
self.assertNoHolidays(Russia(years=1990))
def test_special_holidays(self):
self.assertHoliday(
"2023-02-24",
"2023-05-08",
)
def test_new_year(self):
name_1 = "Новый год"
name_2 = "Новогодние каникулы"
self.assertHolidayName(name_1, (f"{year}-01-01" for year in range(1991, 2005)))
self.assertHolidayName(name_1, (f"{year}-01-02" for year in range(1993, 2005)))
self.assertNoHoliday(f"{year}-01-02" for year in range(1991, 1992))
for year in range(2005, 2025):
self.assertHolidayName(
name_2,
f"{year}-01-01",
f"{year}-01-02",
f"{year}-01-03",
f"{year}-01-04",
f"{year}-01-05",
)
for year in range(2013, 2025):
self.assertHolidayName(name_2, f"{year}-01-06", f"{year}-01-08")
for year in range(1991, 2005):
self.assertNoHoliday(f"{year}-01-03", f"{year}-01-04", f"{year}-01-05")
for year in range(1991, 2013):
self.assertNoHoliday(f"{year}-01-06", f"{year}-01-08")
self.assertNoHolidayName(name_1, range(2005, 2025))
self.assertNoHolidayName(name_2, range(1991, 2005))
def test_christmas_day(self):
self.assertHolidayName(
"Рождество Христово", (f"{year}-01-07" for year in range(1991, 2025))
)
def test_defender_of_fatherland_day(self):
name = "День защитника Отечества"
self.assertHolidayName(name, (f"{year}-02-23" for year in range(2002, 2025)))
self.assertNoHoliday(f"{year}-02-23" for year in range(1991, 2002))
self.assertNoHolidayName(name, range(1991, 2002))
def METHOD_NAME(self):
self.assertHolidayName(
"Международный женский день", (f"{year}-03-08" for year in range(1991, 2025))
)
def test_labor_day(self):
name_1 = "День международной солидарности трудящихся"
name_2 = "Праздник Весны и Труда"
self.assertHolidayName(name_1, "1991-05-01", "1991-05-02")
self.assertHolidayName(name_2, (f"{year}-05-01" for year in range(1992, 2025)))
self.assertHolidayName(name_2, (f"{year}-05-02" for year in range(1992, 2005)))
self.assertNoHoliday(f"{year}-05-02" for year in range(2005, 2025))
self.assertNoHolidayName(name_1, range(1992, 2025))
self.assertNoHolidayName(name_2, 1991)
def test_victory_day(self):
self.assertHolidayName("День Победы", (f"{year}-05-09" for year in range(1991, 2025)))
def test_russia_day(self):
name_1 = "День принятия Декларации о государственном суверенитете Российской Федерации"
name_2 = "День России"
self.assertHolidayName(name_1, (f"{year}-06-12" for year in range(1992, 2002)))
self.assertHolidayName(name_2, (f"{year}-06-12" for year in range(2002, 2025)))
self.assertNoHoliday("1991-06-12")
self.assertNoHolidayName(name_1, 1991, range(2002, 2025))
self.assertNoHolidayName(name_2, range(1991, 2002))
def test_unity_day(self):
name = "День народного единства"
self.assertHolidayName(name, (f"{year}-11-04" for year in range(2005, 2025)))
self.assertNoHoliday(f"{year}-11-04" for year in range(1991, 2005))
self.assertNoHolidayName(name, range(1991, 2005))
def test_october_revolution(self):
name_1 = "Годовщина Великой Октябрьской социалистической революции"
name_2 = "День согласия и примирения"
self.assertHolidayName(name_1, (f"{year}-11-07" for year in range(1991, 1996)))
self.assertHolidayName(name_1, "1991-11-08")
self.assertHolidayName(name_2, (f"{year}-11-07" for year in range(1996, 2005)))
self.assertNoHoliday(f"{year}-11-07" for year in range(2005, 2025))
self.assertNoHoliday(f"{year}-11-08" for year in range(1992, 2025))
self.assertNoHolidayName(name_1, range(1996, 2025))
self.assertNoHolidayName(name_2, range(1991, 1996), range(2005, 2025))
def test_2018(self):
self.assertHolidays(
Russia(years=2018),
("2018-01-01", "Новогодние каникулы"),
("2018-01-02", "Новогодние каникулы"),
("2018-01-03", "Новогодние каникулы"),
("2018-01-04", "Новогодние каникулы"),
("2018-01-05", "Новогодние каникулы"),
("2018-01-06", "Новогодние каникулы"),
("2018-01-07", "Рождество Христово"),
("2018-01-08", "Новогодние каникулы"),
("2018-02-23", "День защитника Отечества"),
("2018-03-08", "Международный женский день"),
("2018-05-01", "Праздник Весны и Труда"),
("2018-05-09", "День Победы"),
("2018-06-12", "День России"),
("2018-11-04", "День народного единства"),
)
def test_l10n_default(self):
self.assertLocalizedHolidays(
("2018-01-01", "Новогодние каникулы"),
("2018-01-02", "Новогодние каникулы"),
("2018-01-03", "Новогодние каникулы"),
("2018-01-04", "Новогодние каникулы"),
("2018-01-05", "Новогодние каникулы"),
("2018-01-06", "Новогодние каникулы"),
("2018-01-07", "Рождество Христово"),
("2018-01-08", "Новогодние каникулы"),
("2018-02-23", "День защитника Отечества"),
("2018-03-08", "Международный женский день"),
("2018-05-01", "Праздник Весны и Труда"),
("2018-05-09", "День Победы"),
("2018-06-12", "День России"),
("2018-11-04", "День народного единства"),
)
def test_l10n_en_us(self):
self.assertLocalizedHolidays(
"en_US",
("2018-01-01", "New Year Holidays"),
("2018-01-02", "New Year Holidays"),
("2018-01-03", "New Year Holidays"),
("2018-01-04", "New Year Holidays"),
("2018-01-05", "New Year Holidays"),
("2018-01-06", "New Year Holidays"),
("2018-01-07", "Christmas Day"),
("2018-01-08", "New Year Holidays"),
("2018-02-23", "Fatherland Defender's Day"),
("2018-03-08", "International Women's Day"),
("2018-05-01", "Holiday of Spring and Labor"),
("2018-05-09", "Victory Day"),
("2018-06-12", "Russia Day"),
("2018-11-04", "Unity Day"),
)
|
1,601 |
forms save
|
# flake8: noqa
from django.core.exceptions import ImproperlyConfigured
from django.db import transaction
from django.forms import models as model_forms
from django.forms.formsets import all_valid
from django.http import HttpResponseRedirect
from django.utils.encoding import force_str
from django.views import generic
class MultiFormMixin(generic.base.ContextMixin):
forms = {}
success_url = None
prefix = None
def forms_invalid(self, forms):
"""
If any form is invalid, re-render the context data with the
data-filled forms and errors.
"""
return self.render_to_response(self.get_context_data(forms=forms))
def forms_valid(self, forms):
"""
If all forms are valid, redirect to the supplied URL.
"""
return HttpResponseRedirect(self.get_success_url())
def get_context_data(self, **kwargs):
"""
Insert the forms into the context dict.
"""
if "forms" not in kwargs:
kwargs["forms"] = self.get_forms()
return super().get_context_data(**kwargs)
def get_forms(self):
"""
Returns instances of the forms to be used in this view.
"""
forms = {}
for name in self.forms.keys():
form_class = self.get_form_class(name)
if form_class:
forms[name] = form_class(**self.get_form_kwargs(name))
return forms
def _get_from_name(self, name, key, default=None):
form = self.forms.get(name)
if form:
return form.get(key, default)
def get_form_class(self, name):
"""
Returns the form class to be used with the named form.
"""
return self._get_from_name(name, "form_class")
def get_initial(self, name):
"""
Returns the initial data to use for the named form.
"""
initial = self._get_from_name(name, "initial", {})
return initial.copy()
def get_prefix(self, name):
"""
Returns the prefix to use for the named form.
"""
if self.prefix:
return "{}_{}".format(self.prefix, name)
return name
def get_form_kwargs(self, name):
"""
Returns the keyword arguments for instantiating the named form.
"""
kwargs = {
"initial": self.get_initial(name),
"prefix": self.get_prefix(name),
}
if self.request.method in ("POST", "PUT"):
kwargs.update(
{
"data": self.request.POST,
"files": self.request.FILES,
}
)
return kwargs
def get_success_url(self):
"""
Returns the supplied success URL.
"""
if self.success_url:
# Forcing possible reverse_lazy evaluation
url = force_str(self.success_url)
else:
raise ImproperlyConfigured("No URL to redirect to. Provide a success_url.")
return url
class MultiModelFormMixin(MultiFormMixin):
objects = {}
def forms_valid(self, forms):
"""
If all forms are valid, save the associated models.
"""
self.objects = self.METHOD_NAME(forms)
self.forms_save_m2m(forms)
return super().forms_valid(forms)
def METHOD_NAME(self, forms, commit=True):
"""
Save all the forms in one transaction.
"""
objects = {}
with transaction.atomic():
for name in self.forms.keys():
if hasattr(forms[name], "save"):
objects[name] = forms[name].save(commit)
return objects
def forms_save_m2m(self, forms):
"""
Calls save_m2m on every form where it is available.
Has to be called after the forms have been saved.
"""
for form in forms.values():
if hasattr(form, "save_m2m"):
form.save_m2m()
def get_form_class(self, name):
"""
Returns the form class to be used with the named form.
"""
fields = self._get_from_name(name, "fields")
form_class = self._get_from_name(name, "form_class")
model = self._get_from_name(name, "model")
if fields is not None and form_class:
raise ImproperlyConfigured(
"Specifying both 'fields' and 'form_class' is not permitted."
)
if form_class:
return form_class
elif model is not None:
if fields is None:
raise ImproperlyConfigured(
"Using MultiModelFormMixin (base class of %s) without "
"the 'fields' attribute is prohibited." % self.__class__.__name__
)
return model_forms.modelform_factory(model, fields=fields)
def get_form_kwargs(self, name):
"""
Returns the keyword arguments for instantiating the named form.
"""
kwargs = super().get_form_kwargs(name)
instance = self.get_instance(name)
if instance:
kwargs.update({"instance": instance})
return kwargs
def get_instance(self, name):
"""
Returns the instance object used for instantiating the named form.
If no instance (None) is returned the django BaseModelForm
creates a default instance of the provided model.
"""
pass
class ProcessMultiFormView(generic.View):
def get(self, request, *args, **kwargs):
"""
Handles GET requests and instantiates a blank version of the form.
"""
return self.render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
"""
Handles POST requests, instantiating a form instance with the passed
POST variables and then checked for validity.
"""
forms = self.get_forms()
if all_valid(forms.values()):
return self.forms_valid(forms)
else:
return self.forms_invalid(forms)
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
class BaseMultiFormView(MultiFormMixin, ProcessMultiFormView):
"""
A base view for displaying multiple forms.
"""
class BaseMultiModelFormView(MultiModelFormMixin, ProcessMultiFormView):
"""
A base view for displaying multiple forms that may contain ModelForms.
"""
|
1,602 |
test gravity attribute
|
#! /usr/bin/env python
"""
Unit tests for landlab.components.flexure.flexure
"""
import numpy as np
import pytest
from landlab import RasterModelGrid
from landlab.components import Flexure
(_SHAPE, _SPACING, _ORIGIN) = ((20, 20), (10e3, 10e3), (0.0, 0.0))
def test_method_names():
grid = RasterModelGrid((20, 20), xy_spacing=10e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
assert Flexure(grid, method="airy").method == "airy"
assert Flexure(grid, method="flexure").method == "flexure"
with pytest.raises(ValueError):
Flexure(grid, method="bad-name")
def test_eet_attribute():
grid = RasterModelGrid((20, 20), xy_spacing=10e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
for val in (10e3, 1e3):
assert Flexure(grid, eet=val).eet == pytest.approx(val)
with pytest.raises(ValueError):
assert Flexure(grid, eet=-10e3)
def test_youngs_attribute():
grid = RasterModelGrid((20, 20), xy_spacing=10e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
for val in (10e3, 1e3):
assert Flexure(grid, youngs=val).youngs == pytest.approx(val)
def METHOD_NAME():
grid = RasterModelGrid((20, 20), xy_spacing=10e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
for val in (10e3, 1e3):
assert Flexure(grid, gravity=val).gravity == pytest.approx(val)
def test_rho_mantle_attribute():
grid = RasterModelGrid((20, 20), xy_spacing=10e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
for val in (10e3, 1e3):
assert Flexure(grid, rho_mantle=val).rho_mantle == pytest.approx(val)
def test_name(flex):
assert flex.name == "Flexure"
def test_input_var_names(flex):
assert flex.input_var_names == ("lithosphere__overlying_pressure_increment",)
def test_output_var_names(flex):
assert flex.output_var_names == ("lithosphere_surface__elevation_increment",)
def test_var_units(flex):
assert set(flex.input_var_names) | set(flex.output_var_names) == set(
dict(flex.units).keys()
)
assert flex.var_units("lithosphere_surface__elevation_increment") == "m"
assert flex.var_units("lithosphere__overlying_pressure_increment") == "Pa"
def test_grid_shape(flex):
assert flex.grid.number_of_node_rows == _SHAPE[0]
assert flex.grid.number_of_node_columns == _SHAPE[1]
def test_grid_x_extent(flex):
assert flex.grid.extent[1] == (_SHAPE[1] - 1) * _SPACING[1]
def test_grid_y_extent(flex):
assert flex.grid.extent[0] == (_SHAPE[0] - 1) * _SPACING[0]
def test_field_getters(flex):
for name in flex.grid["node"]:
field = flex.grid["node"][name]
assert isinstance(field, np.ndarray)
assert field.shape == (
flex.grid.number_of_node_rows * flex.grid.number_of_node_columns,
)
with pytest.raises(KeyError):
flex.grid["not_a_var_name"]
def test_field_initialized_to_zero(flex):
for name in flex.grid["node"]:
field = flex.grid["node"][name]
assert np.all(field == 0.0)
def test_update():
n = 11
n_mid = (n - 1) // 2
i_mid = np.ravel_multi_index((n_mid, n_mid), (n, n))
load_0 = 1e9
grid = RasterModelGrid((n, n), xy_spacing=1e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
flex = Flexure(grid, method="flexure")
load = grid.at_node["lithosphere__overlying_pressure_increment"]
load[i_mid] = load_0
flex.update()
dz = flex.grid.at_node["lithosphere_surface__elevation_increment"].reshape((n, n))
assert np.argmax(dz) == i_mid
assert dz[n_mid, n_mid] > 0.0
assert np.all(dz[:, n_mid::-1] == dz[:, n_mid:])
assert np.all(dz[n_mid::-1, :] == dz[n_mid:, :])
def test_subside_loads():
n, load_0 = 11, 1e9
grid = RasterModelGrid((n, n), xy_spacing=1e3)
grid.add_zeros("lithosphere__overlying_pressure_increment", at="node")
flex = Flexure(grid, method="flexure")
grid.at_node["lithosphere__overlying_pressure_increment"][0] = load_0
flex.update()
dz_expected = flex.grid.at_node["lithosphere_surface__elevation_increment"]
load = np.zeros((n, n))
load[0, 0] = load_0
dz = flex.subside_loads(load)
assert np.all(dz.flatten() == pytest.approx(dz_expected))
out = np.zeros((n, n))
dz = flex.subside_loads(load, out=out)
assert dz is out
|
1,603 |
test port bind failure recovery
|
"""Tests for kernel connection utilities"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import errno
import json
import os
from tempfile import TemporaryDirectory
from typing import no_type_check
from unittest.mock import patch
import pytest
import zmq
from traitlets.config.loader import Config
from ipykernel import connect
from ipykernel.kernelapp import IPKernelApp
from .utils import TemporaryWorkingDirectory
sample_info: dict = {
"ip": "1.2.3.4",
"transport": "ipc",
"shell_port": 1,
"hb_port": 2,
"iopub_port": 3,
"stdin_port": 4,
"control_port": 5,
"key": b"abc123",
"signature_scheme": "hmac-md5",
}
class DummyKernelApp(IPKernelApp):
def _default_shell_port(self):
return 0
def initialize(self, argv=None):
self.init_profile_dir()
self.init_connection_file()
def test_get_connection_file():
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = "kernel.json"
app = DummyKernelApp(config=cfg, connection_file=cf)
app.initialize()
profile_cf = os.path.join(app.connection_dir, cf)
assert profile_cf == app.abs_connection_file
with open(profile_cf, "w") as f:
f.write("{}")
assert os.path.exists(profile_cf)
assert connect.get_connection_file(app) == profile_cf
app.connection_file = cf
assert connect.get_connection_file(app) == profile_cf
def test_get_connection_info():
with TemporaryDirectory() as d:
cf = os.path.join(d, "kernel.json")
connect.write_connection_file(cf, **sample_info)
json_info = connect.get_connection_info(cf)
info = connect.get_connection_info(cf, unpack=True)
assert isinstance(json_info, str)
sub_info = {k: v for k, v in info.items() if k in sample_info}
assert sub_info == sample_info
info2 = json.loads(json_info)
info2["key"] = info2["key"].encode("utf-8")
sub_info2 = {k: v for k, v in info.items() if k in sample_info}
assert sub_info2 == sample_info
def test_port_bind_failure_raises(request):
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = "kernel.json"
app = DummyKernelApp(config=cfg, connection_file=cf)
request.addfinalizer(app.close)
app.initialize()
with patch.object(app, "_try_bind_socket") as mock_try_bind:
mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types")
with pytest.raises(zmq.ZMQError):
app.init_sockets()
assert mock_try_bind.call_count == 1
@no_type_check
def METHOD_NAME(request):
try:
errno.WSAEADDRINUSE
except AttributeError:
# Fake windows address in-use code
p = patch.object(errno, "WSAEADDRINUSE", 12345, create=True)
p.start()
request.addfinalizer(p.stop)
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = "kernel.json"
app = DummyKernelApp(config=cfg, connection_file=cf)
request.addfinalizer(app.close)
app.initialize()
with patch.object(app, "_try_bind_socket") as mock_try_bind:
mock_try_bind.side_effect = [
zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"),
zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"),
] + [0] * 100
# Shouldn't raise anything as retries will kick in
app.init_sockets()
def test_port_bind_failure_gives_up_retries(request):
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = "kernel.json"
app = DummyKernelApp(config=cfg, connection_file=cf)
request.addfinalizer(app.close)
app.initialize()
with patch.object(app, "_try_bind_socket") as mock_try_bind:
mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind")
with pytest.raises(zmq.ZMQError):
app.init_sockets()
assert mock_try_bind.call_count == 100
|
1,604 |
get versions
|
# -*- coding: utf-8 -*-
"""
(c) 2014-2020 - Copyright Red Hat Inc
Authors:
Pierre-Yves Chibon <[email protected]>
Ralph Bean <[email protected]>
Michal Konecny <[email protected]>
"""
from anitya.lib import xml2dict
from anitya.lib.backends import BaseBackend
from anitya.lib.exceptions import AnityaPluginException
class PypiBackend(BaseBackend):
"""The PyPI class for project hosted on PyPI."""
name = "PyPI"
examples = [
"https://pypi.python.org/pypi/arrow",
"https://pypi.org/project/fedmsg/",
]
@classmethod
def get_version(cls, project):
"""Method called to retrieve the latest version of the projects
provided, project that relies on the backend of this plugin.
:arg Project project: a :class:`anitya.db.models.Project` object whose backend
corresponds to the current plugin.
:return: the latest version found upstream
:return type: str
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the version cannot be retrieved correctly
"""
url = cls.get_version_url(project)
last_change = project.get_time_last_created_version()
try:
req = cls.call_url(url, last_change=last_change)
except Exception as err: # pragma: no cover
raise AnityaPluginException(f"Could not contact {url}") from err
# Not modified
if req.status_code == 304:
return None
try:
data = req.json()
except Exception as err: # pragma: no cover
raise AnityaPluginException(f"No JSON returned by {url}") from err
return data["info"]["version"]
@classmethod
def get_version_url(cls, project):
"""Method called to retrieve the url used to check for new version
of the project provided, project that relies on the backend of this plugin.
Attributes:
project (:obj:`anitya.db.models.Project`): Project object whose backend
corresponds to the current plugin.
Returns:
str: url used for version checking
"""
url = f"https://pypi.org/pypi/{project.name}/json"
return url
@classmethod
def METHOD_NAME(cls, project):
"""Method called to retrieve all the versions (that can be found)
of the projects provided, project that relies on the backend of
this plugin.
:arg Project project: a :class:`anitya.db.models.Project` object whose backend
corresponds to the current plugin.
:return: a list of all the possible releases found
:return type: list
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the versions cannot be retrieved correctly
"""
url = cls.get_version_url(project)
last_change = project.get_time_last_created_version()
try:
req = cls.call_url(url, last_change=last_change)
except Exception as err: # pragma: no cover
raise AnityaPluginException(f"Could not contact {url}") from err
# Not modified
if req.status_code == 304:
return []
try:
data = req.json()
except Exception as err: # pragma: no cover
raise AnityaPluginException(f"No JSON returned by {url}") from err
# Filter yanked versions
unyanked_versions = []
# Just return empty list if "releases" key is missing in json
if "releases" not in data:
return []
for version in data["releases"].keys():
if not data["releases"][version] == []:
if "yanked" in data["releases"][version][0]:
if data["releases"][version][0]["yanked"]:
continue
# Old releases doesn't contain metadata
unyanked_versions.append(version)
# Filter retrieved versions
filtered_versions = cls.filter_versions(
unyanked_versions,
project.version_filter,
)
return filtered_versions
@classmethod
def check_feed(cls):
"""Return a generator over the latest 40 uploads to PyPI
by querying an RSS feed.
"""
url = "https://pypi.org/rss/updates.xml"
try:
response = cls.call_url(url)
except Exception as err: # pragma: no cover
raise AnityaPluginException(f"Could not contact {url}") from err
try:
parser = xml2dict.XML2Dict()
data = parser.fromstring(response.text)
except Exception as err: # pragma: no cover
raise AnityaPluginException(f"No XML returned by {url}") from err
items = data["rss"]["channel"]["item"]
for entry in items:
title = entry["title"]["value"]
name, version = title.rsplit(None, 1)
homepage = f"https://pypi.org/project/{name}/"
yield name, homepage, cls.name, version
|
1,605 |
test create python bundle
|
import unittest
from os.path import join
from unittest import mock
from pythonforandroid.recipes.python3 import (
NDK_API_LOWER_THAN_SUPPORTED_MESSAGE,
)
from pythonforandroid.util import BuildInterruptingException, build_platform
from tests.recipes.recipe_lib_test import RecipeCtx
class TestPython3Recipe(RecipeCtx, unittest.TestCase):
"""
TestCase for recipe :mod:`~pythonforandroid.recipes.python3`
"""
recipe_name = "python3"
expected_compiler = (
f"/opt/android/android-ndk/toolchains/"
f"llvm/prebuilt/{build_platform}/bin/clang"
)
def test_property__libpython(self):
self.assertEqual(
self.recipe._libpython,
f'libpython{self.recipe.link_version}.so'
)
@mock.patch('pythonforandroid.recipes.python3.Path.is_file')
def test_should_build(self, mock_is_file):
# in case that python lib exists, we shouldn't trigger the build
self.assertFalse(self.recipe.should_build(self.arch))
# in case that python lib doesn't exist, we should trigger the build
mock_is_file.return_value = False
self.assertTrue(self.recipe.should_build(self.arch))
def test_include_root(self):
expected_include_dir = join(
self.recipe.get_build_dir(self.arch.arch), 'Include',
)
self.assertEqual(
expected_include_dir, self.recipe.include_root(self.arch.arch)
)
def test_link_root(self):
expected_link_root = join(
self.recipe.get_build_dir(self.arch.arch), 'android-build',
)
self.assertEqual(
expected_link_root, self.recipe.link_root(self.arch.arch)
)
@mock.patch("pythonforandroid.recipes.python3.subprocess.call")
def test_compile_python_files(self, mock_subprocess):
fake_compile_dir = '/fake/compile/dir'
hostpy = self.recipe.ctx.hostpython = '/fake/hostpython3'
self.recipe.compile_python_files(fake_compile_dir)
mock_subprocess.assert_called_once_with(
[hostpy, '-OO', '-m', 'compileall', '-b', '-f', fake_compile_dir],
)
@mock.patch("pythonforandroid.recipe.Recipe.check_recipe_choices")
@mock.patch("pythonforandroid.archs.find_executable")
def test_get_recipe_env(
self,
mock_find_executable,
mock_check_recipe_choices,
):
"""
Test that method
:meth:`~pythonforandroid.recipes.python3.Python3Recipe.get_recipe_env`
returns the expected flags
"""
mock_find_executable.return_value = self.expected_compiler
mock_check_recipe_choices.return_value = sorted(
self.ctx.recipe_build_order
)
env = self.recipe.get_recipe_env(self.arch)
self.assertIn('-fPIC -DANDROID', env["CFLAGS"])
self.assertEqual(env["CC"], self.arch.get_clang_exe(with_target=True))
# make sure that the mocked methods are actually called
mock_check_recipe_choices.assert_called()
def test_set_libs_flags(self):
# todo: properly check `Python3Recipe.set_lib_flags`
pass
# These decorators are to mock calls to `get_recipe_env`
# and `set_libs_flags`, since these calls are tested separately
@mock.patch("pythonforandroid.util.chdir")
@mock.patch("pythonforandroid.util.makedirs")
@mock.patch("pythonforandroid.archs.find_executable")
def test_build_arch(
self,
mock_find_executable,
mock_makedirs,
mock_chdir):
mock_find_executable.return_value = self.expected_compiler
# specific `build_arch` mocks
with mock.patch(
"builtins.open",
mock.mock_open(read_data="#define ZLIB_VERSION 1.1\nfoo")
) as mock_open_zlib, mock.patch(
"pythonforandroid.recipes.python3.sh.Command"
) as mock_sh_command, mock.patch(
"pythonforandroid.recipes.python3.sh.make"
) as mock_make, mock.patch(
"pythonforandroid.recipes.python3.sh.cp"
) as mock_cp:
self.recipe.build_arch(self.arch)
# make sure that the mocked methods are actually called
recipe_build_dir = self.recipe.get_build_dir(self.arch.arch)
sh_command_calls = {
f"{recipe_build_dir}/config.guess",
f"{recipe_build_dir}/configure",
}
for command in sh_command_calls:
self.assertIn(
mock.call(command),
mock_sh_command.mock_calls,
)
mock_open_zlib.assert_called()
self.assertEqual(mock_make.call_count, 1)
for make_call, kw in mock_make.call_args_list:
self.assertIn(
f'INSTSONAME={self.recipe._libpython}', make_call
)
mock_cp.assert_called_with(
"pyconfig.h", join(recipe_build_dir, 'Include'),
)
mock_makedirs.assert_called()
mock_chdir.assert_called()
def test_build_arch_wrong_ndk_api(self):
# we check ndk_api using recipe's ctx
self.recipe.ctx.ndk_api = 20
with self.assertRaises(BuildInterruptingException) as e:
self.recipe.build_arch(self.arch)
self.assertEqual(
e.exception.args[0],
NDK_API_LOWER_THAN_SUPPORTED_MESSAGE.format(
ndk_api=self.recipe.ctx.ndk_api,
min_ndk_api=self.recipe.MIN_NDK_API,
),
)
# restore recipe's ctx or we could get failures with other test,
# since we share `self.recipe with all the tests of the class
self.recipe.ctx.ndk_api = self.ctx.ndk_api
@mock.patch('shutil.copystat')
@mock.patch('shutil.copyfile')
@mock.patch("pythonforandroid.util.chdir")
@mock.patch("pythonforandroid.util.makedirs")
@mock.patch("pythonforandroid.util.walk")
@mock.patch("pythonforandroid.recipes.python3.sh.find")
@mock.patch("pythonforandroid.recipes.python3.sh.cp")
@mock.patch("pythonforandroid.recipes.python3.sh.zip")
@mock.patch("pythonforandroid.recipes.python3.subprocess.call")
def METHOD_NAME(
self,
mock_subprocess,
mock_sh_zip,
mock_sh_cp,
mock_sh_find,
mock_walk,
mock_makedirs,
mock_chdir,
mock_copyfile,
mock_copystat,
):
fake_compile_dir = '/fake/compile/dir'
simulated_walk_result = [
["/fake_dir", ["__pycache__", "Lib"], ["README", "setup.py"]],
["/fake_dir/Lib", ["ctypes"], ["abc.pyc", "abc.py"]],
["/fake_dir/Lib/ctypes", [], ["util.pyc", "util.py"]],
]
mock_walk.return_value = simulated_walk_result
self.recipe.create_python_bundle(fake_compile_dir, self.arch)
recipe_build_dir = self.recipe.get_build_dir(self.arch.arch)
modules_build_dir = join(
recipe_build_dir,
'android-build',
'build',
'lib.linux{}-{}-{}'.format(
'2' if self.recipe.version[0] == '2' else '',
self.arch.command_prefix.split('-')[0],
self.recipe.major_minor_version_string
))
expected_sp_paths = [
modules_build_dir,
join(recipe_build_dir, 'Lib'),
self.ctx.get_python_install_dir(self.arch.arch),
]
for n, (sp_call, kw) in enumerate(mock_subprocess.call_args_list):
self.assertEqual(sp_call[0][-1], expected_sp_paths[n])
# we expect two calls to `walk_valid_filens`
self.assertEqual(len(mock_walk.call_args_list), 2)
mock_sh_zip.assert_called()
mock_sh_cp.assert_called()
mock_sh_find.assert_called()
mock_makedirs.assert_called()
mock_chdir.assert_called()
mock_copyfile.assert_called()
mock_copystat.assert_called()
|
1,606 |
list ports
|
"""
Defines the OvsBridgeDevice class.
Copyright 2017 Red Hat, Inc.
Licensed under the GNU General Public License, version 2 as
published by the Free Software Foundation; see COPYING for details.
"""
__author__ = """
[email protected] (Ondrej Lichtner)
"""
import re
import pprint
from lnst.Common.ExecCmd import exec_cmd
from lnst.Common.DeviceError import DeviceError
from lnst.Devices.Device import Device
from lnst.Devices.SoftDevice import SoftDevice
class OvsBridgeDevice(SoftDevice):
_name_template = "t_ovsbr"
def __init__(self, ifmanager, *args, **kwargs):
super(OvsBridgeDevice, self).__init__(ifmanager)
self._type_init()
@classmethod
def _type_init(cls):
exec_cmd("systemctl start openvswitch.service", die_on_err=False)
def _create(self):
exec_cmd("ovs-vsctl add-br %s" % self.name)
def destroy(self):
exec_cmd("ovs-vsctl del-br %s" % self.name)
def _dict_to_keyvalues(self, options):
opts = ""
for opt_name, opt_value in options.items():
opts += " %s=%s" % (opt_name, opt_value)
return opts
def _interface_cmd(self, interface, options):
keyvalues = self._dict_to_keyvalues(options)
cmd = ""
if len(keyvalues):
cmd = " -- set Interface {} {}".format(interface, keyvalues)
return cmd
def _format_ovs_json_value(self, value):
formatted_value = None
if type(value) == list:
value_type = value[0]
if value_type == 'map':
formatted_value = value[1]
elif value_type == 'set':
formatted_value = value[1]
elif value_type == 'uuid':
formatted_value = value[1]
else:
raise Exception("Unknown type in ovs json output: {}".format(
value_type))
else:
formatted_value = value
return formatted_value
def _format_ovs_json(self, ovs_json):
headings = ovs_json['headings']
data = ovs_json['data']
formatted_data = []
for data_entry in data:
formatted_fields = {}
for i, entry_value in enumerate(data_entry):
formatted_fields[headings[i]] = self._format_ovs_json_value(entry_value)
formatted_data.append(formatted_fields)
return formatted_data
def METHOD_NAME(self):
out_json = exec_cmd("ovs-vsctl --format json list port",
log_outputs=False, json=True)[0]
return self._format_ovs_json(out_json)
def _list_interfaces(self):
out_json = exec_cmd("ovs-vsctl --format json list interface",
log_outputs=False, json=True)[0]
return self._format_ovs_json(out_json)
def port_add(self, device=None, port_options={}, interface_options={}):
if device is None:
dev_name = interface_options.get('name',
self._if_manager.assign_name(interface_options['type']))
else:
dev_name = device.name
exec_cmd("ovs-vsctl add-port {} {}{}{}".format(self.name, dev_name,
self._dict_to_keyvalues(port_options),
self._interface_cmd(dev_name, interface_options)))
iface = None
if 'type' in interface_options and interface_options['type'] == 'internal':
iface = self._if_manager.get_device_by_name(dev_name)
iface._enable()
return iface
def port_del(self, dev):
if isinstance(dev, Device):
exec_cmd("ovs-vsctl del-port %s %s" % (self.name, dev.name))
elif isinstance(dev, str):
exec_cmd("ovs-vsctl del-port %s %s" % (self.name, dev))
else:
raise DeviceError("Invalid port_del argument %s" % str(dev))
def bond_add(self, port_name, devices, **kwargs):
dev_names = ""
for dev in devices:
dev_names += " %s" % dev.name
options = ""
for opt_name, opt_value in kwargs.items():
options += " %s=%s" % (opt_name, opt_value)
exec_cmd("ovs-vsctl add-bond %s %s %s %s" % (self.name, port_name,
dev_names, options))
def bond_del(self, dev):
self.port_del(dev)
def tunnel_add(self, tunnel_type, options):
options_copy = options.copy()
options_copy['type'] = tunnel_type
self.port_add(device=None, interface_options=options_copy)
def tunnel_del(self, name):
self.port_del(name)
def flow_add(self, entry):
exec_cmd("ovs-ofctl add-flow %s '%s'" % (self.name, entry))
def flows_add(self, entries):
for entry in entries:
self.flow_add(entry)
def flows_del(self, entry):
exec_cmd("ovs-ofctl del-flows %s" % (self.name))
@property
def ports(self):
ports = self.METHOD_NAME()
interfaces = self._list_interfaces()
filtered_ports = {}
for port in ports:
port_iface_uuid = port['interfaces']
port_ifaces = [ iface for iface in interfaces if iface['_uuid'] == port_iface_uuid ]
if len(port_ifaces):
port_iface = port_ifaces[0]
filtered_ports[port['name']] = {
'interface': port_iface['name'],
'type': port_iface['type'],
'options': port_iface['options'],
}
return filtered_ports
@property
def tunnels(self):
tunnels = self.ports.copy()
for port in self.ports.keys():
if tunnels[port]['type'] in ['', 'internal']:
del tunnels[port]
return tunnels
@property
def bonds(self):
bonds = {}
bond_list = []
out = exec_cmd("ovs-appctl bond/list", log_outputs=False)[0]
for line in out.split('\n'):
if line:
bond_list.append(line.split('\t'))
for bond in bond_list[1:]:
bonds[bond[0]] = {'type' : bond[1], 'slaves' : bond[3]}
return bonds
@property
def flows_str(self):
flows = []
ignore_exprs = [r"cookie", r"duration", r"n_packets",
r"n_bytes", r"idle_age"]
out = exec_cmd("ovs-ofctl dump-flows %s" % self.name,
log_outputs=False)[0]
for line in out.split('\n'):
if line:
flows.append(line.split(', '))
for flow in flows[1:]:
for entry in list(flow):
for expr in ignore_exprs:
if re.search(expr, entry):
del flow[flow.index(entry)]
break
return pprint.pformat(flows[1:])
|
1,607 |
get sp i temperature
|
"""Class to interface with the SPI Rack Qutech Delft."""
from qblox_instruments import SpiRack
from qibo.config import log, raise_error
from qibolab.instruments.abstract import Instrument, InstrumentException
class SPI(Instrument):
property_wrapper = lambda parent, device, *parameter: property(
lambda self: device.get(parameter[0]),
lambda self, x: parent._set_device_parameter(device, *parameter, value=x),
)
def __init__(self, name, address):
super().__init__(name, address)
self.device: SpiRack = None
self.s4g_modules_settings = {}
self.d5a_modules_settings = {}
self.dacs = {}
self.device_parameters = {}
def connect(self):
"""Connects to the instrument using the IP address set in the runcard."""
if not self.is_connected:
for attempt in range(3):
try:
self.device = SpiRack(self.name, self.address)
self.is_connected = True
break
except KeyError as exc:
log.info(f"Unable to connect:\n{str(exc)}\nRetrying...")
self.name += "_" + str(attempt)
except Exception as exc:
log.info(f"Unable to connect:\n{str(exc)}\nRetrying...")
if not self.is_connected:
raise InstrumentException(self, f"Unable to connect to {self.name}")
else:
raise_error(Exception, "There is an open connection to the instrument already")
def _set_device_parameter(self, target, *parameters, value):
if self.is_connected:
key = target.name + "." + parameters[0]
if not key in self.device_parameters:
for parameter in parameters:
if not hasattr(target, parameter):
raise Exception(f"The instrument {self.name} does not have parameters {parameter}")
target.set(parameter, value)
self.device_parameters[key] = value
elif self.device_parameters[key] != value:
for parameter in parameters:
target.set(parameter, value)
self.device_parameters[key] = value
else:
raise Exception("There is no connection to the instrument {self.name}")
def setup(self, **kwargs):
# Init S4g and D5a modules in SPI mapped on runcard
if self.is_connected:
# TODO: Check data format from yml
# Make d5g modules optional in runcard
# Define span values in setup
# Implement parameters cache
# export current / voltage properties (and make them sweepable)
if "s4g_modules" in kwargs:
self.s4g_modules_settings = kwargs["s4g_modules"]
if "d5a_modules" in kwargs:
self.d5a_modules_settings = kwargs["d5a_modules"]
for channel, settings in self.s4g_modules_settings.items():
module_number = settings[0]
port_number = settings[1]
module_name = f"S4g_module{module_number}"
current = settings[2]
if not module_name in self.device.instrument_modules:
self.device.add_spi_module(settings[0], "S4g", module_name)
device = self.device.instrument_modules[module_name].instrument_modules["dac" + str(port_number - 1)]
self.dacs[channel] = type(
"S4g_dac",
(),
{
"current": self.property_wrapper(device, "current"),
"device": device,
},
)()
self.dacs[channel].device.span("range_min_bi")
# self.dacs[channel].current = current
for channel, settings in self.d5a_modules_settings.items():
module_number = settings[0]
port_number = settings[1]
module_name = f"D5a_module{module_number}"
voltage = settings[2]
if not module_name in self.device.instrument_modules:
self.device.add_spi_module(settings[0], "D5a", module_name)
device = self.device.instrument_modules[module_name].instrument_modules["dac" + str(port_number - 1)]
self.dacs[channel] = type(
"D5a_dac",
(),
{
"voltage": self.property_wrapper(device, "voltage"),
"device": device,
},
)()
self.dacs[channel].device.span("range_min_bi")
# self.dacs[channel].voltage = voltage
else:
raise_error(Exception, "There is no connection to the instrument")
def set_SPI_DACS_to_cero(self):
self.device.set_dacs_zero()
def get_SPI_IDN(self):
return self.device.IDN()
def METHOD_NAME(self):
return self.device.temperature()
def get_SPI_battery_voltage(self):
return self.device.battery_voltages()
def disconnect(self):
if self.is_connected:
self.is_connected = False
def close(self):
if self.is_connected:
self.device.close()
self.is_connected = False
def start(self):
# Set the dacs to the values stored for each qubit in the runcard
if self.is_connected:
for channel, settings in self.s4g_modules_settings.items():
current = settings[2]
# Check current current of the module and warning
if abs(self.dacs[channel].current) > 0.010:
log.info(
f"WARNING: S4g module {settings[0]} - port {settings[1]} current was: {self.dacs[channel].current}, now setting current to: {current}"
)
self.dacs[channel].current = current
for channel, settings in self.d5a_modules_settings.items():
voltage = settings[2]
# Check current current of the module and warning
if abs(self.dacs[channel].voltage) > 0.010:
log.info(
f"WARNING: D5a module {settings[0]} - port {settings[1]} voltage was: {self.dacs[channel].voltage}, now setting voltage to: {voltage}"
)
self.dacs[channel].voltage = voltage
def stop(self):
# if self.is_connected:
# self.device.set_dacs_zero()
return
|
1,608 |
get available firmware version
|
#!/usr/bin/env python
########################################################################
# DELLEMC S5224F
#
# Module contains an implementation of SONiC Platform Base API and
# provides the Components' (e.g., BIOS, CPLD, FPGA, BMC etc.) available in
# the platform
#
########################################################################
try:
import subprocess
from sonic_platform_base.component_base import ComponentBase
import sonic_platform.hwaccess as hwaccess
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
def get_bios_version():
return subprocess.check_output(['dmidecode', '-s',
'system-version']).decode('utf-8').strip()
def get_fpga_version():
val = hwaccess.pci_get_value('/sys/bus/pci/devices/0000:03:00.0/resource0', 0)
return '{}.{}'.format((val >> 8) & 0xff, val & 0xff)
def get_bmc_version():
return subprocess.check_output(
['cat', '/sys/class/ipmi/ipmi0/device/bmc/firmware_revision']
).decode('utf-8').strip()
def get_cpld_version(bus, i2caddr):
return '{}.{}'.format(hwaccess.i2c_get(bus, i2caddr, 1),
hwaccess.i2c_get(bus, i2caddr, 0)
)
def get_cpld0_version():
return get_cpld_version(601, 0x31)
def get_cpld1_version():
return get_cpld_version(600, 0x30)
class Component(ComponentBase):
"""DellEMC Platform-specific Component class"""
CHASSIS_COMPONENTS = [
['BIOS',
'Performs initialization of hardware components during booting',
get_bios_version
],
['FPGA',
'Used for managing the system LEDs',
get_fpga_version
],
['BMC',
'Platform management controller for on-board temperature monitoring, in-chassis power, Fan and LED control',
get_bmc_version
],
['System CPLD',
'Used for managing the CPU power sequence and CPU states',
get_cpld0_version
],
['Slave CPLD 1',
'Used for managing SFP28/QSFP28 port transceivers (SFP28 1-24, QSFP28 1-4)',
get_cpld1_version
]
]
def __init__(self, component_index = 0):
self.index = component_index
self.name = self.CHASSIS_COMPONENTS[self.index][0]
self.description = self.CHASSIS_COMPONENTS[self.index][1]
self.version = None
def get_name(self):
"""
Retrieves the name of the component
Returns:
A string containing the name of the component
"""
return self.name
def get_description(self):
"""
Retrieves the description of the component
Returns:
A string containing the description of the component
"""
return self.description
def get_firmware_version(self):
"""
Retrieves the firmware version of the component
Returns:
A string containing the firmware version of the component
"""
if self.version == None:
self.version = self.CHASSIS_COMPONENTS[self.index][2]()
return self.version
def install_firmware(self, image_path):
"""
Installs firmware to the component
Args:
image_path: A string, path to firmware image
Returns:
A boolean, True if install was successful, False if not
"""
return False
def get_presence(self):
"""
Retrieves the presence of the component
Returns:
bool: True if present, False if not
"""
return True
def get_model(self):
"""
Retrieves the part number of the component
Returns:
string: Part number of component
"""
return 'NA'
def get_serial(self):
"""
Retrieves the serial number of the component
Returns:
string: Serial number of component
"""
return 'NA'
def get_status(self):
"""
Retrieves the operational status of the component
Returns:
bool: True if component is operating properly, False if not
"""
return True
def get_position_in_parent(self):
"""
Retrieves 1-based relative physical position in parent device.
Returns:
integer: The 1-based relative physical position in parent
device or -1 if cannot determine the position
"""
return -1
def is_replaceable(self):
"""
Indicate whether component is replaceable.
Returns:
bool: True if it is replaceable.
"""
return False
def METHOD_NAME(self, image_path):
"""
Retrieves the available firmware version of the component
Note: the firmware version will be read from image
Args:
image_path: A string, path to firmware image
Returns:
A string containing the available firmware version of the component
"""
return "N/A"
def get_firmware_update_notification(self, image_path):
"""
Retrieves a notification on what should be done in order to complete
the component firmware update
Args:
image_path: A string, path to firmware image
Returns:
A string containing the component firmware update notification if required.
By default 'None' value will be used, which indicates that no actions are required
"""
return "None"
def update_firmware(self, image_path):
"""
Updates firmware of the component
This API performs firmware update: it assumes firmware installation and loading in a single call.
In case platform component requires some extra steps (apart from calling Low Level Utility)
to load the installed firmware (e.g, reboot, power cycle, etc.) - this will be done automatically by API
Args:
image_path: A string, path to firmware image
Raises:
RuntimeError: update failed
"""
return False
|
1,609 |
on service removed
|
# Copyright (c) 2019 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from queue import Queue
from threading import Thread, Event
from time import time
from typing import Optional
from zeroconf import Zeroconf, ServiceBrowser, ServiceStateChange, ServiceInfo
from UM.Logger import Logger
from UM.Signal import Signal
from cura.CuraApplication import CuraApplication
class ZeroConfClient:
"""The ZeroConfClient handles all network discovery logic.
It emits signals when new network services were found or disappeared.
"""
# The discovery protocol name for Ultimaker printers.
ZERO_CONF_NAME = u"_ultimaker._tcp.local."
# Signals emitted when new services were discovered or removed on the network.
addedNetworkCluster = Signal()
removedNetworkCluster = Signal()
def __init__(self) -> None:
self._zero_conf = None # type: Optional[Zeroconf]
self._zero_conf_browser = None # type: Optional[ServiceBrowser]
self._service_changed_request_queue = None # type: Optional[Queue]
self._service_changed_request_event = None # type: Optional[Event]
self._service_changed_request_thread = None # type: Optional[Thread]
def start(self) -> None:
"""The ZeroConf service changed requests are handled in a separate thread so we don't block the UI.
We can also re-schedule the requests when they fail to get detailed service info.
Any new or re-reschedule requests will be appended to the request queue and the thread will process them.
"""
self._service_changed_request_queue = Queue()
self._service_changed_request_event = Event()
try:
self._zero_conf = Zeroconf()
# CURA-6855 catch WinErrors
except OSError:
Logger.logException("e", "Failed to create zeroconf instance.")
return
self._service_changed_request_thread = Thread(target = self._handleOnServiceChangedRequests, daemon = True, name = "ZeroConfServiceChangedThread")
self._service_changed_request_thread.start()
self._zero_conf_browser = ServiceBrowser(self._zero_conf, self.ZERO_CONF_NAME, [self._queueService])
# Cleanup ZeroConf resources.
def stop(self) -> None:
if self._zero_conf is not None:
self._zero_conf.close()
self._zero_conf = None
if self._zero_conf_browser is not None:
self._zero_conf_browser.cancel()
self._zero_conf_browser = None
def _queueService(self, zeroconf: Zeroconf, service_type, name: str, state_change: ServiceStateChange) -> None:
"""Handles a change is discovered network services."""
item = (zeroconf, service_type, name, state_change)
if not self._service_changed_request_queue or not self._service_changed_request_event:
return
self._service_changed_request_queue.put(item)
self._service_changed_request_event.set()
def _handleOnServiceChangedRequests(self) -> None:
"""Callback for when a ZeroConf service has changes."""
if not self._service_changed_request_queue or not self._service_changed_request_event:
return
while True:
# Wait for the event to be set
self._service_changed_request_event.wait(timeout=5.0)
# Stop if the application is shutting down
if CuraApplication.getInstance().isShuttingDown():
return
self._service_changed_request_event.clear()
# Handle all pending requests
reschedule_requests = [] # A list of requests that have failed so later they will get re-scheduled
while not self._service_changed_request_queue.empty():
request = self._service_changed_request_queue.get()
zeroconf, service_type, name, state_change = request
try:
result = self._onServiceChanged(zeroconf, service_type, name, state_change)
if not result:
reschedule_requests.append(request)
except Exception:
Logger.logException("e", "Failed to get service info for [%s] [%s], the request will be rescheduled",
service_type, name)
reschedule_requests.append(request)
# Re-schedule the failed requests if any
if reschedule_requests:
for request in reschedule_requests:
self._service_changed_request_queue.put(request)
def _onServiceChanged(self, zero_conf: Zeroconf, service_type: str, name: str,
state_change: ServiceStateChange) -> bool:
"""Handler for zeroConf detection.
Return True or False indicating if the process succeeded.
Note that this function can take over 3 seconds to complete. Be careful calling it from the main thread.
"""
if state_change == ServiceStateChange.Added:
return self._onServiceAdded(zero_conf, service_type, name)
elif state_change == ServiceStateChange.Removed:
return self.METHOD_NAME(name)
return True
def _onServiceAdded(self, zero_conf: Zeroconf, service_type: str, name: str) -> bool:
"""Handler for when a ZeroConf service was added."""
# First try getting info from zero-conf cache
info = ServiceInfo(service_type, name, properties={})
for record in zero_conf.cache.entries_with_name(name.lower()):
info.update_record(zero_conf, time(), record)
for record in zero_conf.cache.entries_with_name(info.server):
info.update_record(zero_conf, time(), record)
if hasattr(info, "addresses") and info.addresses:
break
# Request more data if info is not complete
if not hasattr(info, "addresses") or not info.addresses:
new_info = zero_conf.get_service_info(service_type, name)
if new_info is not None:
info = new_info
if info and hasattr(info, "addresses") and info.addresses:
type_of_device = info.properties.get(b"type", None)
if type_of_device:
if type_of_device == b"printer":
address = '.'.join(map(str, info.addresses[0]))
self.addedNetworkCluster.emit(str(name), address, info.properties)
else:
Logger.log("w", "The type of the found device is '%s', not 'printer'." % type_of_device)
else:
Logger.log("w", "Could not get information about %s" % name)
return False
return True
def METHOD_NAME(self, name: str) -> bool:
"""Handler for when a ZeroConf service was removed."""
Logger.log("d", "ZeroConf service removed: %s" % name)
self.removedNetworkCluster.emit(str(name))
return True
|
1,610 |
check if service unavailable response is received
|
#!/usr/bin/env python3
# ====================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# ====================================
"""HttpClient base class."""
import os
import sys
import configuration3 as configuration
import serializerfactory
import locallogger
import re as regex
class HttpClient(object):
"""Base class to provide common attributes and functionality to all HttpClient implementation."""
ACCEPT_HEADER_KEY = "Accept"
CONTENT_TYPE_HEADER_KEY = "Content-Type"
CONNECTION_HEADER_KEY = "Connection"
USER_AGENT_HEADER_KEY = "User-Agent"
APP_JSON_HEADER_VALUE = "application/json"
KEEP_ALIVE_HEADER_VALUE = "keep-alive"
GET = "GET"
POST = "POST"
PUT = "PUT"
DELETE = "DELETE"
def __init__(self, cert_path, key_path, insecure=False, proxy_configuration=None):
self.cert_path = cert_path
self.key_path = key_path
self.insecure = insecure
self.proxy_configuration = proxy_configuration
# validate presence of cert/key in case they were removed after process creation
if (cert_path is not None and not os.path.isfile(self.cert_path)) or \
(key_path is not None and not os.path.isfile(self.key_path)):
print(cert_path)
raise Exception("Invalid certificate or key file path.")
self.default_headers = {self.ACCEPT_HEADER_KEY: self.APP_JSON_HEADER_VALUE,
self.CONNECTION_HEADER_KEY: self.KEEP_ALIVE_HEADER_VALUE,
self.USER_AGENT_HEADER_KEY: "AzureAutomationHybridWorker/" +
configuration.get_worker_version()}
self.json = serializerfactory.get_serializer(sys.version_info)
@staticmethod
def merge_headers(client_headers, request_headers):
"""Merges client_headers and request_headers into a single dictionary. If a request_header key is also present
in the client_headers, the request_header value will override the client_header one.
Args:
client_headers : dictionary, the default client's headers.
request_headers : dictionary, request specific headers.
Returns:
A dictionary containing a set of both the client_headers and the request_headers
"""
if request_headers is not None:
client_headers.update(request_headers.copy())
else:
request_headers = client_headers.copy()
return request_headers
def get(self, url, headers=None):
"""Issues a GET request to the provided url using the provided headers.
Args:
url : string , the URl.
headers : dictionary, contains the headers key value pair (defaults to None).
Returns:
A RequestResponse
"""
pass
def post(self, url, headers=None, data=None):
"""Issues a POST request to the provided url using the provided headers.
Args:
url : string , the URl.
headers : dictionary, contains the headers key value pair (defaults to None).
data : dictionary, contains the non-serialized request body (defaults to None).
Returns:
A RequestResponse
"""
pass
def put(self, url, headers=None, data=None):
"""Issues a PUT request to the provided url using the provided headers.
Args:
url : string , the URl.
headers : dictionary, contains the headers key value pair (defaults to None).
data : dictionary, contains the non-serialized request body (defaults to None).
Returns:
A RequestResponse
"""
pass
def delete(self, url, headers=None, data=None):
"""Issues a DELETE request to the provided url using the provided headers.
Args:
url : string , the URl.
headers : dictionary, contains the headers key value pair (defaults to None).
data : dictionary, contains the non-serialized request body (defaults to None).
Returns:
A RequestResponse
"""
pass
class RequestResponse(object):
"""Encapsulates all request response for http clients. Will also deserialize the response when the raw response
data is deserializable.
"""
@staticmethod
def METHOD_NAME(response_body):
SERVICE_UNAVAILABLE_STR = 'Service Unavailable'
HTML_HEADER_TAG_REGEX = '<h[0-9]+>(.*?)</h[0-9]+>'
HTML_BODY_TAG_REGEX = '<BODY>(.*?)</BODY>'
HTML_PARAGRAPH_TAG_REGEX = '<p>(.*?)</p>'
response_body = response_body.decode() if isinstance(response_body, bytes) else response_body
response_body = regex.compile(HTML_BODY_TAG_REGEX).findall(response_body)
if len(response_body) >= 1:
response_body = response_body[0]
headers = regex.compile(HTML_HEADER_TAG_REGEX).findall(response_body)
# explicit check of service unavailable
if len(headers) >= 1 and headers.__contains__(SERVICE_UNAVAILABLE_STR):
detailed_response = regex.compile(HTML_PARAGRAPH_TAG_REGEX).findall(response_body)
resultant_response = ""
if detailed_response is not None:
for response in detailed_response:
resultant_response = resultant_response + response + "\n"
return resultant_response
return None
def __init__(self, status_code, raw_response_data=None):
self.status_code = int(status_code)
self.raw_data = raw_response_data
self.json = serializerfactory.get_serializer(sys.version_info)
if raw_response_data is not None:
try:
self.raw_data = self.raw_data.decode() if isinstance(self.raw_data, bytes) else self.raw_data
self.deserialized_data = self.json.loads(self.raw_data)
except ValueError:
import tracer
self.deserialized_data = None
self.raw_data = self.raw_data.decode() if isinstance(self.raw_data, bytes) else self.raw_data
service_unavailable_check_result = self.METHOD_NAME(self.raw_data)
if service_unavailable_check_result is not None:
tracer.log_warning_trace("Request to service failed because the service was unavailable. Detailed response is %s" %(service_unavailable_check_result))
|
1,611 |
compile
|
# ./python/air/backend/linalg_on_tensors.py -*- Python -*-
#
# Copyright (C) 2022, Xilinx Inc.
# Copyright (C) 2022, Advanced Micro Devices, Inc.
# SPDX-License-Identifier: MIT
import torch
import torch_mlir.ir
import torch_mlir.passmanager
from torch_mlir.dynamo import make_simple_dynamo_backend
import air.mlir.ir
import air.mlir.passmanager
from torch_mlir_e2e_test.linalg_on_tensors_backends.refbackend import RefBackendLinalgOnTensorsBackend
from .abc import AirBackend
import air.compiler.util
import air.compiler.aircc.main as aircc
import ctypes
from pathlib import Path
from typing import List
path = Path(air.backend.__file__).resolve().parent
try:
ctypes.CDLL(f"{path}/../../../runtime_lib/airhost/libairhost_shared.so", mode=ctypes.RTLD_GLOBAL)
except:
pass
import air.mlir._mlir_libs._airRt as airrt
__all__ = [
"LinalgOnTensorsAirBackend",
"make_dynamo_backend",
"LINALG_MEMREF_TO_AIR_PIPELINE"
]
LINALG_MEMREF_TO_AIR_PIPELINE = "builtin.module("+",".join([
"air-linalg-codegen",
"canonicalize",
"cse",
"air-par-to-herd",
"air-copy-to-dma",
"canonicalize",
"cse"
])+")"
class LinalgOnTensorsAirBackend(AirBackend):
"""Main entry-point for the linalg-on-tensors based AIR backend.
This currently uses the torch-mlir linalg-on-tensors RefBackend
for JIT execution. aircc produces a RefBackend compatible wrapper
function for AIR generated host code. The wrapper is compiled and
executed by RefBackend when invoked from python. The load method
ensures that the AIR runtime is initialized and that the AIR binary
is loaded into memory before any compiled functions are invoked.
The unload method should be called to unload the binary and release
runtime resources.
"""
def __init__(self):
super().__init__()
self.handle = None
self.refbackend = RefBackendLinalgOnTensorsBackend()
def __del__(self):
self.unload()
def METHOD_NAME(self, imported_module: torch_mlir.ir.Module, pipeline=None,
verbose=False, segment_offset=None, segment_size=None):
"""Compiles an imported module, with a flat list of functions.
The module is expected to be in linalg-on-tensors + scalar code form.
Args:
imported_module: The MLIR module consisting of funcs in the torch
dialect.
pipeline: The custom lowering pipeline to use for lowering. First
`air.compiler.util.LINALG_TENSOR_TO_MEMREF_PIPELINE` is applied,
then `pipeline`.
The default is `air.backend.linalg_on_tensors.LINALG_MEMREF_TO_AIR_PIPELINE`
segment_offset: default location for generated segments as [colOffset, rowOffset]
segment_size: default size for generated segments as [numCols, numRows]
Returns:
An opaque, backend specific compiled artifact object that can be
passed to `load`.
"""
if segment_offset is None:
segment_offset = [7, 2]
if segment_size is None:
segment_size = [10, 6]
if pipeline is None:
pipeline = LINALG_MEMREF_TO_AIR_PIPELINE
if type(imported_module) is torch_mlir.ir.Module:
with imported_module.context:
pm = torch_mlir.passmanager.PassManager.parse('builtin.module(refback-mlprogram-bufferize)')
pm.run(imported_module)
with air.mlir.ir.Context():
air_module = air.mlir.ir.Module.parse(str(imported_module))
pm = air.mlir.passmanager.PassManager.parse(
air.compiler.util.LINALG_TENSOR_TO_MEMREF_PIPELINE)
if verbose:
print("Running MLIR pass pipeline: ",
air.compiler.util.LINALG_TENSOR_TO_MEMREF_PIPELINE)
pm.run(air_module)
if verbose:
print("Running MLIR pass pipeline: ", pipeline)
pm = air.mlir.passmanager.PassManager.parse(pipeline)
pm.run(air_module)
if verbose:
print("AIR Module:")
print(air_module)
aircc_options = ['torch.mlir', '--shared', '-o', 'torch.mlir.so']
aircc_options = aircc_options + \
[f"-row-offset={segment_offset[1]}",
f"-col-offset={segment_offset[0]}"]
aircc_options = aircc_options + \
[f"-num-rows={segment_size[1]}",
f"-num-cols={segment_size[0]}"]
if verbose:
aircc_options = aircc_options + ['-v']
aircc.run(air_module,aircc_options)
with open("air_project/refback.torch.mlir") as f:
imported_module = torch_mlir.ir.Module.parse(f.read(),imported_module.context)
return self.refbackend.METHOD_NAME(imported_module)
def load(self, module):
"""Load a compiled artifact into the air runtime."""
airrt.host.init()
a = airrt.host.get_agents()
q = airrt.host.queue_create(a[0])
self.handle = airrt.host.module_load_from_file("./torch.mlir.so", q)
return self.refbackend.load(module)
def unload(self):
"""Unload any loaded module and shutdown the air runtime."""
if self.handle:
airrt.host.module_unload(self.handle)
self.handle = None
airrt.host.shut_down()
def make_dynamo_backend(pipeline=None, verbose=False,
segment_offset=None, segment_size=None):
"""Make a PyTorch dynamo backend using LinalgOnTensorsAirBackend.
Args:
pipeline: The custom lowering pipeline to use for lowering. First
`air.compiler.util.LINALG_TENSOR_TO_MEMREF_PIPELINE` is applied,
then `pipeline`.
The default is `air.backend.linalg_on_tensors.LINALG_MEMREF_TO_AIR_PIPELINE`
verbose: enable verbose output
segment_offset: default location for generated segments as [colOffset, rowOffset]
segment_size: default size for generated segments as [numCols, numRows]
Returns:
A PyTorch dynamo backend
"""
backend = LinalgOnTensorsAirBackend()
@make_simple_dynamo_backend
def air_backend(fx_graph: torch.fx.GraphModule,
example_inputs: List[torch.Tensor]):
# get the linalg mlir of the model from torch_mlir
mlir_module = torch_mlir.METHOD_NAME(
fx_graph, example_inputs,
output_type=torch_mlir.OutputType.LINALG_ON_TENSORS)
# compile the mlir model with aircc
compiled = backend.METHOD_NAME(mlir_module, pipeline=pipeline,
verbose=verbose, segment_offset=segment_offset,
segment_size=segment_size)
# return a function for invoking the compiled model
def compiled_callable(*inputs):
inputs = [x.numpy() for x in inputs]
loaded = backend.load(compiled)
result = loaded.forward(*inputs)
backend.unload()
return torch.from_numpy(result)
return compiled_callable
return air_backen
|
1,612 |
get cloudwatch client
|
# Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module containing AWS Client
"""
from enum import Enum
from typing import Any, Optional
import boto3
from boto3 import Session
from pydantic import BaseModel
from metadata.generated.schema.security.credentials.awsCredentials import AWSCredentials
from metadata.ingestion.models.custom_pydantic import CustomSecretStr
from metadata.utils.logger import utils_logger
logger = utils_logger()
class AWSServices(Enum):
S3 = "s3"
CLOUDWATCH = "cloudwatch"
DYNAMO_DB = "dynamodb"
GLUE = "glue"
SAGEMAKER = "sagemaker"
KINESIS = "kinesis"
QUICKSIGHT = "quicksight"
ATHENA = "athena"
RDS = "rds"
class AWSAssumeRoleException(Exception):
"""
Exception class to handle assume role related issues
"""
class AWSAssumeRoleCredentialWrapper(BaseModel):
accessKeyId: str
secretAccessKey: CustomSecretStr
sessionToken: Optional[str]
class AWSClient:
"""
AWSClient creates a boto3 Session client based on AWSCredentials.
"""
def __init__(self, config: "AWSCredentials"):
self.config = (
config
if isinstance(config, AWSCredentials)
else (AWSCredentials.parse_obj(config) if config else config)
)
@staticmethod
def get_assume_role_config(
config: AWSCredentials,
) -> Optional[AWSAssumeRoleCredentialWrapper]:
"""
Get temporary credentials from assumed role
"""
session = AWSClient._get_session(
config.awsAccessKeyId,
config.awsSecretAccessKey,
config.awsSessionToken,
config.awsRegion,
config.profileName,
)
sts_client = session.client("sts")
if config.assumeRoleSourceIdentity:
resp = sts_client.assume_role(
RoleArn=config.assumeRoleArn,
RoleSessionName=config.assumeRoleSessionName,
SourceIdentity=config.assumeRoleSourceIdentity,
)
else:
resp = sts_client.assume_role(
RoleArn=config.assumeRoleArn,
RoleSessionName=config.assumeRoleSessionName,
)
if resp:
credentials = resp.get("Credentials", {})
return AWSAssumeRoleCredentialWrapper(
accessKeyId=credentials.get("AccessKeyId"),
secretAccessKey=credentials.get("SecretAccessKey"),
sessionToken=credentials.get("SessionToken"),
)
return None
@staticmethod
def _get_session(
aws_access_key_id: Optional[str],
aws_secret_access_key: Optional[CustomSecretStr],
aws_session_token: Optional[str],
aws_region: str,
profile=None,
) -> Session:
"""
The only required param for boto3 is the region.
The rest of credentials will have fallback strategies based on
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials
"""
return Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key.get_secret_value()
if aws_secret_access_key
else None,
aws_session_token=aws_session_token,
region_name=aws_region,
profile_name=profile,
)
def create_session(self) -> Session:
if self.config.assumeRoleArn:
assume_creds = AWSClient.get_assume_role_config(self.config)
if assume_creds:
return AWSClient._get_session(
assume_creds.accessKeyId,
assume_creds.secretAccessKey,
assume_creds.sessionToken,
self.config.awsRegion,
self.config.profileName,
)
return AWSClient._get_session(
self.config.awsAccessKeyId,
self.config.awsSecretAccessKey,
self.config.awsSessionToken,
self.config.awsRegion,
self.config.profileName,
)
def get_client(self, service_name: str) -> Any:
# initialize the client depending on the AWSCredentials passed
if self.config is not None:
logger.info(f"Getting AWS client for service [{service_name}]")
session = self.create_session()
if self.config.endPointURL is not None:
return session.client(
service_name=service_name, endpoint_url=self.config.endPointURL
)
return session.client(service_name=service_name)
logger.info(f"Getting AWS default client for service [{service_name}]")
# initialized with the credentials loaded from running machine
return boto3.client(service_name=service_name)
def get_resource(self, service_name: str) -> Any:
session = self.create_session()
if self.config.endPointURL is not None:
return session.resource(
service_name=service_name, endpoint_url=self.config.endPointURL
)
return session.resource(service_name=service_name)
def get_rds_client(self):
return self.get_client(AWSServices.RDS.value)
def get_s3_client(self):
return self.get_client(AWSServices.S3.value)
def METHOD_NAME(self):
return self.get_client(AWSServices.CLOUDWATCH.value)
def get_dynamo_client(self):
return self.get_resource(AWSServices.DYNAMO_DB.value)
def get_glue_client(self):
return self.get_client(AWSServices.GLUE.value)
def get_sagemaker_client(self):
return self.get_client(AWSServices.SAGEMAKER.value)
def get_kinesis_client(self):
return self.get_client(AWSServices.KINESIS.value)
def get_quicksight_client(self):
return self.get_client(AWSServices.QUICKSIGHT.value)
def get_athena_client(self):
return self.get_client(AWSServices.ATHENA.value)
|
1,613 |
test get yearly contributions
|
from scrape_up import github
class UserTest():
def __init__(self, username):
self.username = username
#SetUp
self.user = github.Users(username=self.username)
def test_followers(self):
followers = self.user.followers()
return followers
def test_following(self):
following = self.user.following()
return following
def test_get_avatar(self):
avatar = self.user.get_avatar()
return avatar
def test_get_bio(self):
bio = self.user.get_bio()
return bio
def test_get_repo(self):
repos = self.user.get_repo()
return repos
def test_repo_count(self):
repo_count = self.user.repo_count()
return repo_count
def test_star_count(self):
star_count = self.user.star_count()
return star_count
def METHOD_NAME(self):
contributions = self.user.get_yearly_contributions()
return contributions
def test_get_repositories(self):
repositories = self.user.get_repositories()
return repositories
def test_get_starred_repos(self):
starred_repos = self.user.get_starred_repos()
return starred_repos
def test_pull_requests(self):
pull_requests = self.user.pul_requests()
return pull_requests
def test_get_followers(self):
followers = self.user.get_followers()
return followers
def test_get_following_users(self):
following_users = self.user.get_following_users()
return following_users
def test_get_achievements(self):
achievements = self.user.get_achievements()
return achievements
def test_get_status(self):
status = self.user.get_status()
return status
def test_get_contribution_streak(self):
contribution_streak = self.user.get_contribution_streak()
return contribution_streak
def test_get_repository_details(self):
repository_details = self.user.get_repository_details()
return repository_details
def test_get_branch(self):
branches = self.user.get_branch()
return branches
def test_get_merged_pull_requests(self):
merged_pull_requests = self.user.get_merged_pull_requests()
return merged_pull_requests
class RepositoryTest:
def __init__(self, username, repo):
self.username = username
self.repo = repo
#SetUp
self.repository = github.Repository(self.username, repo)
def test_fork_count(self):
fork_count = self.repository.fork_count()
return fork_count
def test_get_contributors(self):
contributors = self.repository.get_contributors()
return contributors
def test_topics(self):
topics = self.repository.topics()
return topics
def test_pull_requests(self):
pull_requests = self.repository.pull_requests()
return pull_requests
def test_last_updated_at(self):
last_updated_at = self.repository.last_update_at()
return last_updated_at
def test_tags(self):
tags = self.repository.tags()
return tags
def test_releases(self):
releases = self.repository.releases()
return releases
def test_issues_count(self):
issues_count = self.repository.issues_count()
return issues_count
def test_readme(self):
readme_path = self.repository.readme()
return readme_path
def test_get_pull_requests_ids(self):
pull_requests_ids = self.repository.get_pull_requests_ids()
return pull_requests_ids
def test_get_issues(self):
issues = self.repository.get_issues()
return issues
def test_commits(self):
commits = self.repository.commits()
return commits
def test_get_readme(self):
readme = self.repository.get_readme()
return readme
def test_get_environment(self):
environment = self.repository.get_environment()
return environment
def test_watch_count(self):
watch_count = self.repository.watch_count()
return watch_count
def test_all_watchers(self):
watchers = self.repository.all_watchers()
return watchers
class IssueTest:
def __init__(self, username, repo, issue_no):
self.username = username
self.repo = repo
self.issue_no = issue_no
#SetUp
self.issue = github.Issue(self.username, self.repo, self.issue_no)
def test_assignees(self):
assignees = self.issue.assignees()
return assignees
def test_labels(self):
labels = self.issue.labels()
return labels
def test_opened_by(self):
opened_by = self.issue.opened_by()
return opened_by
def test_title(self):
title = self.issue.title()
return title
def test_is_milestone(self):
milestone = self.issue.is_milestone()
return milestone
def test_opened_at(self):
opened_at = self.issue.opened_at()
return opened_at
class PullRequestTest:
def __init__(self, username, repo, pr_no):
self.username= username
self.repo = repo
self.pr_no = pr_no
#SetUp
self.pull_request = github.PullRequest(self.username, self.repo, self.pr_no)
def test_commits(self):
commits = self.pull_request.commits()
return commits
def test_title(self):
title = self.pull_request.title()
return title
def test_labels(self):
labels = self.pull_request.labels()
return labels
def test_files_changed(self):
files_changed = self.pull_request.files_changed()
return files_changed
def test_reviewers(self):
reviewers = self.pull_request.reviewers()
return reviewers
class OrganizationTest:
def __init__(self, org_name):
self.org_name = org_name
#SetUp
self.organization = github.Organization(self.org_name)
def test_top_topics(self):
top_topics = self.organization.top_topics()
return top_topics
def test_followers(self):
followers = self.organization.followers()
return followers
def test_top_languages(self):
top_languages = self.organization.top_languages()
return top_languages
def test_avatar(self):
avatar = self.organization.avatar()
return avatar
def test_repositories(self):
repositories = self.organization.repositories()
return repositories
def test_people(self):
people = self.organization.people()
return people
def test_peoples(self):
peoples = self.organization.peoples()
return peoples
def test_get_location(self):
location = self.organization.get_location()
return location
def test_repository_details(self):
repository_details = self.organization.repository_details()
return repository_details
def test_pinned_repository(self):
pinned_repository = self.organization.pinned_repository()
return pinned_repository
def test_get_organization_links(self):
organization_links = self.organization.get_organization_links()
return organization_link
|
1,614 |
list tokens redirect
|
# ContentDB
# Copyright (C) 2018-21 rubenwardy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from flask import render_template, redirect, request, session, url_for, abort
from flask_babel import lazy_gettext
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import InputRequired, Length
from wtforms_sqlalchemy.fields import QuerySelectField
from app.models import db, User, APIToken, Permission
from app.utils import random_string
from . import bp
from ..users.settings import get_setting_tabs
class CreateAPIToken(FlaskForm):
name = StringField(lazy_gettext("Name"), [InputRequired(), Length(1, 30)])
package = QuerySelectField(lazy_gettext("Limit to package"), allow_blank=True,
get_pk=lambda a: a.id, get_label=lambda a: a.title)
submit = SubmitField(lazy_gettext("Save"))
@bp.route("/user/tokens/")
@login_required
def METHOD_NAME():
return redirect(url_for("api.list_tokens", username=current_user.username))
@bp.route("/users/<username>/tokens/")
@login_required
def list_tokens(username):
user = User.query.filter_by(username=username).first()
if user is None:
abort(404)
if not user.check_perm(current_user, Permission.CREATE_TOKEN):
abort(403)
return render_template("api/list_tokens.html", user=user, tabs=get_setting_tabs(user), current_tab="api_tokens")
@bp.route("/users/<username>/tokens/new/", methods=["GET", "POST"])
@bp.route("/users/<username>/tokens/<int:id>/edit/", methods=["GET", "POST"])
@login_required
def create_edit_token(username, id=None):
user = User.query.filter_by(username=username).first()
if user is None:
abort(404)
if not user.check_perm(current_user, Permission.CREATE_TOKEN):
abort(403)
is_new = id is None
token = None
access_token = None
if not is_new:
token = APIToken.query.get(id)
if token is None:
abort(404)
elif token.owner != user:
abort(403)
access_token = session.pop("token_" + str(token.id), None)
form = CreateAPIToken(formdata=request.form, obj=token)
form.package.query_factory = lambda: user.maintained_packages.all()
if form.validate_on_submit():
if is_new:
token = APIToken()
db.session.add(token)
token.owner = user
token.access_token = random_string(32)
form.populate_obj(token)
db.session.commit()
if is_new:
# Store token so it can be shown in the edit page
session["token_" + str(token.id)] = token.access_token
return redirect(url_for("api.create_edit_token", username=username, id=token.id))
return render_template("api/create_edit_token.html", user=user, form=form, token=token, access_token=access_token)
@bp.route("/users/<username>/tokens/<int:id>/reset/", methods=["POST"])
@login_required
def reset_token(username, id):
user = User.query.filter_by(username=username).first()
if user is None:
abort(404)
if not user.check_perm(current_user, Permission.CREATE_TOKEN):
abort(403)
token = APIToken.query.get(id)
if token is None:
abort(404)
elif token.owner != user:
abort(403)
token.access_token = random_string(32)
db.session.commit() # save
# Store token so it can be shown in the edit page
session["token_" + str(token.id)] = token.access_token
return redirect(url_for("api.create_edit_token", username=username, id=token.id))
@bp.route("/users/<username>/tokens/<int:id>/delete/", methods=["POST"])
@login_required
def delete_token(username, id):
user = User.query.filter_by(username=username).first()
if user is None:
abort(404)
if not user.check_perm(current_user, Permission.CREATE_TOKEN):
abort(403)
token = APIToken.query.get(id)
if token is None:
abort(404)
elif token.owner != user:
abort(403)
db.session.delete(token)
db.session.commit()
return redirect(url_for("api.list_tokens", username=username))
|
1,615 |
out bounce
|
"""
Define a series of easing functions for more natural-looking animations.
Taken from https://easings.net/ and translated from JavaScript.
"""
from math import cos, pi, sin, sqrt
def _in_out_expo(x: float) -> float:
"""https://easings.net/#easeInOutExpo"""
if 0 < x < 0.5:
return pow(2, 20 * x - 10) / 2
elif 0.5 <= x < 1:
return (2 - pow(2, -20 * x + 10)) / 2
else:
return x # x in (0, 1)
def _in_out_circ(x: float) -> float:
"""https://easings.net/#easeInOutCirc"""
if x < 0.5:
return (1 - sqrt(1 - pow(2 * x, 2))) / 2
else:
return (sqrt(1 - pow(-2 * x + 2, 2)) + 1) / 2
def _in_out_back(x: float) -> float:
"""https://easings.net/#easeInOutBack"""
c = 1.70158 * 1.525
if x < 0.5:
return (pow(2 * x, 2) * ((c + 1) * 2 * x - c)) / 2
else:
return (pow(2 * x - 2, 2) * ((c + 1) * (x * 2 - 2) + c) + 2) / 2
def _in_elastic(x: float) -> float:
"""https://easings.net/#easeInElastic"""
c = 2 * pi / 3
if 0 < x < 1:
return -pow(2, 10 * x - 10) * sin((x * 10 - 10.75) * c)
else:
return x # x in (0, 1)
def _in_out_elastic(x: float) -> float:
"""https://easings.net/#easeInOutElastic"""
c = 2 * pi / 4.5
if 0 < x < 0.5:
return -(pow(2, 20 * x - 10) * sin((20 * x - 11.125) * c)) / 2
elif 0.5 <= x < 1:
return (pow(2, -20 * x + 10) * sin((20 * x - 11.125) * c)) / 2 + 1
else:
return x # x in (0, 1)
def _out_elastic(x: float) -> float:
"""https://easings.net/#easeInOutElastic"""
c = 2 * pi / 3
if 0 < x < 1:
return pow(2, -10 * x) * sin((x * 10 - 0.75) * c) + 1
else:
return x # x in (0, 1)
def METHOD_NAME(x: float) -> float:
"""https://easings.net/#easeOutBounce"""
n, d = 7.5625, 2.75
if x < 1 / d:
return n * x * x
elif x < 2 / d:
x_ = x - 1.5 / d
return n * x_ * x_ + 0.75
elif x < 2.5 / d:
x_ = x - 2.25 / d
return n * x_ * x_ + 0.9375
else:
x_ = x - 2.625 / d
return n * x_ * x_ + 0.984375
def _in_bounce(x: float) -> float:
"""https://easings.net/#easeInBounce"""
return 1 - METHOD_NAME(1 - x)
def _in_out_bounce(x: float) -> float:
"""https://easings.net/#easeInOutBounce"""
if x < 0.5:
return (1 - METHOD_NAME(1 - 2 * x)) / 2
else:
return (1 + METHOD_NAME(2 * x - 1)) / 2
EASING = {
"none": lambda x: 1.0,
"round": lambda x: 0.0 if x < 0.5 else 1.0,
"linear": lambda x: x,
"in_sine": lambda x: 1 - cos((x * pi) / 2),
"in_out_sine": lambda x: -(cos(x * pi) - 1) / 2,
"out_sine": lambda x: sin((x * pi) / 2),
"in_quad": lambda x: x * x,
"in_out_quad": lambda x: 2 * x * x if x < 0.5 else 1 - pow(-2 * x + 2, 2) / 2,
"out_quad": lambda x: 1 - pow(1 - x, 2),
"in_cubic": lambda x: x * x * x,
"in_out_cubic": lambda x: 4 * x * x * x if x < 0.5 else 1 - pow(-2 * x + 2, 3) / 2,
"out_cubic": lambda x: 1 - pow(1 - x, 3),
"in_quart": lambda x: pow(x, 4),
"in_out_quart": lambda x: 8 * pow(x, 4) if x < 0.5 else 1 - pow(-2 * x + 2, 4) / 2,
"out_quart": lambda x: 1 - pow(1 - x, 4),
"in_quint": lambda x: pow(x, 5),
"in_out_quint": lambda x: 16 * pow(x, 5) if x < 0.5 else 1 - pow(-2 * x + 2, 5) / 2,
"out_quint": lambda x: 1 - pow(1 - x, 5),
"in_expo": lambda x: pow(2, 10 * x - 10) if x else 0,
"in_out_expo": _in_out_expo,
"out_expo": lambda x: 1 - pow(2, -10 * x) if x != 1 else 1,
"in_circ": lambda x: 1 - sqrt(1 - pow(x, 2)),
"in_out_circ": _in_out_circ,
"out_circ": lambda x: sqrt(1 - pow(x - 1, 2)),
"in_back": lambda x: 2.70158 * pow(x, 3) - 1.70158 * pow(x, 2),
"in_out_back": _in_out_back,
"out_back": lambda x: 1 + 2.70158 * pow(x - 1, 3) + 1.70158 * pow(x - 1, 2),
"in_elastic": _in_elastic,
"in_out_elastic": _in_out_elastic,
"out_elastic": _out_elastic,
"in_bounce": _in_bounce,
"in_out_bounce": _in_out_bounce,
"out_bounce": METHOD_NAME,
}
DEFAULT_EASING = "in_out_cubic"
DEFAULT_SCROLL_EASING = "out_cubic"
|
1,616 |
process py
|
import tensorflow as tf
import tensorflow_datasets as tfds
import tensorflow_text as tf_text
from transformers import TFBertForSequenceClassification
from transformers import TFDistilBertForSequenceClassification
tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
tf.config.experimental_connect_to_cluster(tpu)
tf.tpu.experimental.initialize_tpu_system(tpu)
strategy = tf.distribute.experimental.TPUStrategy(tpu)
ds_train, ds_info = tfds.load('amazon_us_reviews/Books_v1_02',
split='train[:5%]',
with_info=True,
data_dir="gs://weilin-bert-test")
MAX_SEQ_LEN = 512
bert_tokenizer = tf_text.BertTokenizer(
vocab_lookup_table='gs://weilin-bert-test/vocab.txt',
token_out_type=tf.int64,
lower_case=True)
def preprocessing_fn(inputs):
"""Preprocess input column of text into transformed columns of.
* input token ids
* input mask
* input type ids
"""
CLS_ID = tf.constant(101, dtype=tf.int64)
SEP_ID = tf.constant(102, dtype=tf.int64)
PAD_ID = tf.constant(0, dtype=tf.int64)
def tokenize_text(text, sequence_length=MAX_SEQ_LEN):
"""
Perform the BERT preprocessing from text -> input token ids
"""
# convert text into token ids
tokens = bert_tokenizer.tokenize(text)
# flatten the output ragged tensors
tokens = tokens.merge_dims(1, 2)[:, :sequence_length]
# Add start and end token ids to the id sequence
start_tokens = tf.fill([tf.shape(text)[0], 1], CLS_ID)
end_tokens = tf.fill([tf.shape(text)[0], 1], SEP_ID)
tokens = tokens[:, :sequence_length - 2]
tokens = tf.concat([start_tokens, tokens, end_tokens], axis=1)
# truncate sequences greater than MAX_SEQ_LEN
tokens = tokens[:, :sequence_length]
# pad shorter sequences with the pad token id
tokens = tokens.to_tensor(default_value=PAD_ID)
pad = sequence_length - tf.shape(tokens)[1]
tokens = tf.pad(tokens, [[0, 0], [0, pad]], constant_values=PAD_ID)
# and finally reshape the word token ids to fit the output
# data structure of TFT
return tf.reshape(tokens, [-1, sequence_length])
def preprocess_bert_input(text):
"""
Convert input text into the input_word_ids, input_mask, input_type_ids
"""
input_word_ids = tokenize_text(text)
input_mask = tf.cast(input_word_ids > 0, tf.int64)
input_mask = tf.reshape(input_mask, [-1, MAX_SEQ_LEN])
zeros_dims = tf.stack(tf.shape(input_mask))
input_type_ids = tf.fill(zeros_dims, 0)
input_type_ids = tf.cast(input_type_ids, tf.int64)
return (tf.squeeze(input_word_ids,
axis=0), tf.squeeze(input_mask, axis=0),
tf.squeeze(input_type_ids, axis=0))
input_word_ids, input_mask, input_type_ids = preprocess_bert_input(
[inputs['data']['review_body']])
return (dict({
'input_ids': input_word_ids,
'token_type_ids': input_type_ids,
'attention_mask': input_mask
}), inputs['data']['star_rating'])
from transformers import BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased')
def dataset_fn(ds):
return ds.filter(lambda x: x['data']['helpful_votes'] >= 7)
ds_train_filtered = ds_train.apply(dataset_fn)
def process(example):
return (dict(tokenizer(
example['data']['review_body'].numpy().decode('utf-8')),
truncation=True,
padding=True), example['data']['star_rating'].numpy())
def METHOD_NAME(inp1, inp2):
return [
dict(tokenizer(inp1.numpy().decode('utf-8')),
truncation=True,
padding=True),
inp2.numpy()
]
ds_train_filtered_2 = ds_train_filtered.map(preprocessing_fn)
tf.keras.mixed_precision.experimental.set_policy('mixed_bfloat16')
with strategy.scope():
model = TFBertForSequenceClassification.from_pretrained('bert-base-uncased',
num_labels=1)
optimizer = tf.keras.optimizers.Adam(learning_rate=5e-5)
model.compile(optimizer=optimizer,
loss=model.compute_loss) # can also use any keras loss fn
model.summary()
inuse_dataset = ds_train_filtered_2.shuffle(1000).batch(256).prefetch(
tf.data.experimental.AUTOTUNE)
model.fit(inuse_dataset, epochs=1, batch_size=256)
|
1,617 |
test almost identical vectors
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for kernelized_utils.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from absl.testing import parameterized
from tensorflow.python.framework import constant_op
from tensorflow.python.keras.utils import kernelized_utils
from tensorflow.python.platform import test
def _exact_gaussian(stddev):
return functools.partial(
kernelized_utils.exact_gaussian_kernel, stddev=stddev)
def _exact_laplacian(stddev):
return functools.partial(
kernelized_utils.exact_laplacian_kernel, stddev=stddev)
class KernelizedUtilsTest(test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=10.0), [[1.0]]),
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def test_equal_vectors(self, exact_kernel_fn, expected_values):
"""Identical vectors give exactly the identity kernel value."""
x = constant_op.constant([0.5, -0.5, -0.5, 0.5])
y = constant_op.constant([0.5, -0.5, -0.5, 0.5])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
# x and y are identical and therefore K(x, y) will be precisely equal to
# the identity value of the kernel.
self.assertAllClose(expected_values, exact_kernel, atol=1e-6)
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=10.0), [[1.0]]),
('laplacian', _exact_laplacian(stddev=50.0), [[1.0]]))
def METHOD_NAME(self, exact_kernel_fn, expected_values):
"""Almost identical vectors give the identity kernel value."""
x = constant_op.constant([1.0, 0.4, -2.1, -1.1])
y = constant_op.constant([1.01, 0.39, -2.099, -1.101])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
# x and y are almost identical and therefore K(x, y) will be almost equal to
# the identity value of the kernel.
self.assertAllClose(expected_values, exact_kernel, atol=1e-3)
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=1.0), [[0.99], [0.977]]),
('laplacian', _exact_laplacian(stddev=5.0), [[0.96], [0.94]]))
def test_similar_matrices(self, exact_kernel_fn, expected_values):
"""Pairwise "close" vectors give high kernel values (similarity scores)."""
x = constant_op.constant([1.0, 3.4, -2.1, 0.9, 3.3, -2.0], shape=[2, 3])
y = constant_op.constant([1.1, 3.35, -2.05])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
# The 2 rows of x are close to y. The pairwise kernel values (similarity
# scores) are somewhat close to the identity value of the kernel.
self.assertAllClose(expected_values, exact_kernel, atol=1e-2)
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=2.0), [[.997, .279], [.251, 1.],
[.164, 0.019]]),
('laplacian', _exact_laplacian(stddev=2.0), [[.904, .128], [.116, 1.],
[.07, 0.027]]))
def test_matrices_varying_similarity(self, exact_kernel_fn, expected_values):
"""Test matrices with row vectors of varying pairwise similarity."""
x = constant_op.constant([1.0, 2., -2., 0.9, 3.3, -1.0], shape=[3, 2])
y = constant_op.constant([1.1, 2.1, -2., 0.9], shape=[2, 2])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
self.assertAllClose(expected_values, exact_kernel, atol=1e-2)
@parameterized.named_parameters(
('gaussian', _exact_gaussian(stddev=1.0), [[0.0]]),
('laplacian', _exact_laplacian(stddev=1.0), [[0.0]]))
def test_completely_dissimilar_vectors(self, exact_kernel_fn,
expected_values):
"""Very dissimilar vectors give very low similarity scores."""
x = constant_op.constant([1.0, 3.4, -2.1, -5.1])
y = constant_op.constant([0.5, 2.1, 1.0, 3.0])
exact_kernel = exact_kernel_fn(x, y)
shape = exact_kernel.shape.as_list()
self.assertLen(shape, 2)
# x and y are very "far" from each other and so the corresponding kernel
# value will be very low.
self.assertAllClose(expected_values, exact_kernel, atol=1e-2)
if __name__ == '__main__':
test.main()
|
1,618 |
test mark boundaries
|
import numpy as np
import pytest
from numpy.testing import assert_array_equal, assert_allclose
from skimage._shared.utils import _supported_float_type
from skimage.segmentation import find_boundaries, mark_boundaries
white = (1, 1, 1)
def test_find_boundaries():
image = np.zeros((10, 10), dtype=np.uint8)
image[2:7, 2:7] = 1
ref = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
result = find_boundaries(image)
assert_array_equal(result, ref)
def test_find_boundaries_bool():
image = np.zeros((5, 5), dtype=bool)
image[2:5, 2:5] = True
ref = np.array([[False, False, False, False, False],
[False, False, True, True, True],
[False, True, True, True, True],
[False, True, True, False, False],
[False, True, True, False, False]], dtype=bool)
result = find_boundaries(image)
assert_array_equal(result, ref)
@pytest.mark.parametrize(
'dtype', [np.uint8, np.float16, np.float32, np.float64]
)
def METHOD_NAME(dtype):
image = np.zeros((10, 10), dtype=dtype)
label_image = np.zeros((10, 10), dtype=np.uint8)
label_image[2:7, 2:7] = 1
ref = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
marked = mark_boundaries(image, label_image, color=white, mode='thick')
assert marked.dtype == _supported_float_type(dtype)
result = np.mean(marked, axis=-1)
assert_array_equal(result, ref)
ref = np.array([[0, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[2, 2, 1, 1, 1, 1, 1, 2, 2, 0],
[2, 1, 1, 1, 1, 1, 1, 1, 2, 0],
[2, 1, 1, 2, 2, 2, 1, 1, 2, 0],
[2, 1, 1, 2, 0, 2, 1, 1, 2, 0],
[2, 1, 1, 2, 2, 2, 1, 1, 2, 0],
[2, 1, 1, 1, 1, 1, 1, 1, 2, 0],
[2, 2, 1, 1, 1, 1, 1, 2, 2, 0],
[0, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
marked = mark_boundaries(image, label_image, color=white,
outline_color=(2, 2, 2), mode='thick')
result = np.mean(marked, axis=-1)
assert_array_equal(result, ref)
def test_mark_boundaries_bool():
image = np.zeros((10, 10), dtype=bool)
label_image = np.zeros((10, 10), dtype=np.uint8)
label_image[2:7, 2:7] = 1
ref = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
marked = mark_boundaries(image, label_image, color=white, mode='thick')
result = np.mean(marked, axis=-1)
assert_array_equal(result, ref)
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
def test_mark_boundaries_subpixel(dtype):
labels = np.array([[0, 0, 0, 0],
[0, 0, 5, 0],
[0, 1, 5, 0],
[0, 0, 5, 0],
[0, 0, 0, 0]], dtype=np.uint8)
np.random.seed(0)
image = np.round(np.random.rand(*labels.shape), 2)
image = image.astype(dtype, copy=False)
marked = mark_boundaries(image, labels, color=white, mode='subpixel')
assert marked.dtype == _supported_float_type(dtype)
marked_proj = np.round(np.mean(marked, axis=-1), 2)
ref_result = np.array(
[[ 0.55, 0.63, 0.72, 0.69, 0.6 , 0.55, 0.54],
[ 0.45, 0.58, 0.72, 1. , 1. , 1. , 0.69],
[ 0.42, 0.54, 0.65, 1. , 0.44, 1. , 0.89],
[ 0.69, 1. , 1. , 1. , 0.69, 1. , 0.83],
[ 0.96, 1. , 0.38, 1. , 0.79, 1. , 0.53],
[ 0.89, 1. , 1. , 1. , 0.38, 1. , 0.16],
[ 0.57, 0.78, 0.93, 1. , 0.07, 1. , 0.09],
[ 0.2 , 0.52, 0.92, 1. , 1. , 1. , 0.54],
[ 0.02, 0.35, 0.83, 0.9 , 0.78, 0.81, 0.87]])
assert_allclose(marked_proj, ref_result, atol=0.01)
@pytest.mark.parametrize('mode', ['thick', 'inner', 'outer', 'subpixel'])
def test_boundaries_constant_image(mode):
"""A constant-valued image has not boundaries."""
ones = np.ones((8, 8), dtype=int)
b = find_boundaries(ones, mode=mode)
assert np.all(b == 0)
|
1,619 |
question
|
"""A PyQt5 dialog to show a message and let the user check a box
Example usage:
checked = OptionalMessageDialog.msg(self, "Disclaimer",
"This is beta software, and you are using it at your own risk!",
)
said_yes, checked = OptionalMessageDialog.question(self, "QtWidgets.Question",
"Are you sure you wish to do this?",
)
"""
#
# Copyright 2012-2014 ComicTagger Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
from PyQt5 import QtCore, QtWidgets
logger = logging.getLogger(__name__)
StyleMessage = 0
StyleQuestion = 1
class OptionalMessageDialog(QtWidgets.QDialog):
def __init__(
self, parent: QtWidgets.QWidget, style: int, title: str, msg: str, checked: bool = False, check_text: str = ""
) -> None:
super().__init__(parent)
self.setWindowTitle(title)
self.was_accepted = False
layout = QtWidgets.QVBoxLayout(self)
self.theLabel = QtWidgets.QLabel(msg)
self.theLabel.setWordWrap(True)
self.theLabel.setTextFormat(QtCore.Qt.TextFormat.RichText)
self.theLabel.setOpenExternalLinks(True)
self.theLabel.setTextInteractionFlags(
QtCore.Qt.TextInteractionFlag.TextSelectableByMouse
| QtCore.Qt.TextInteractionFlag.LinksAccessibleByMouse
| QtCore.Qt.TextInteractionFlag.LinksAccessibleByKeyboard
)
layout.addWidget(self.theLabel)
layout.insertSpacing(-1, 10)
if not check_text:
if style == StyleQuestion:
check_text = "Remember this answer"
else:
check_text = "Don't show this message again"
self.theCheckBox = QtWidgets.QCheckBox(check_text)
self.theCheckBox.setChecked(checked)
layout.addWidget(self.theCheckBox)
btnbox_style: QtWidgets.QDialogButtonBox.StandardButtons | QtWidgets.QDialogButtonBox.StandardButton
if style == StyleQuestion:
btnbox_style = QtWidgets.QDialogButtonBox.StandardButton.Yes | QtWidgets.QDialogButtonBox.StandardButton.No
else:
btnbox_style = QtWidgets.QDialogButtonBox.StandardButton.Ok
self.theButtonBox = QtWidgets.QDialogButtonBox(btnbox_style, parent=self)
self.theButtonBox.accepted.connect(self.accept)
self.theButtonBox.rejected.connect(self.reject)
layout.addWidget(self.theButtonBox)
def accept(self) -> None:
self.was_accepted = True
QtWidgets.QDialog.accept(self)
def reject(self) -> None:
self.was_accepted = False
QtWidgets.QDialog.reject(self)
@staticmethod
def msg(parent: QtWidgets.QWidget, title: str, msg: str, checked: bool = False, check_text: str = "") -> bool:
d = OptionalMessageDialog(parent, StyleMessage, title, msg, checked=checked, check_text=check_text)
d.exec()
return d.theCheckBox.isChecked()
@staticmethod
def METHOD_NAME(
parent: QtWidgets.QWidget, title: str, msg: str, checked: bool = False, check_text: str = ""
) -> tuple[bool, bool]:
d = OptionalMessageDialog(parent, StyleQuestion, title, msg, checked=checked, check_text=check_text)
d.exec()
return d.was_accepted, d.theCheckBox.isChecked()
@staticmethod
def msg_no_checkbox(
parent: QtWidgets.QWidget, title: str, msg: str, checked: bool = False, check_text: str = ""
) -> bool:
d = OptionalMessageDialog(parent, StyleMessage, title, msg, checked=checked, check_text=check_text)
d.theCheckBox.hide()
d.exec()
return d.theCheckBox.isChecked()
|
1,620 |
test invalid param
|
#!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
from essentia import *
from numpy import sort
class TestSlicer(TestCase):
def slice(self, startTimes, endTimes):
nSlices = len(startTimes)
if nSlices != len(endTimes):
print("Test cannot be computed")
exit(1)
input = list(range(max(endTimes)))
# expected values:
expected = []
orderedTimes = []
for i in range(nSlices):
time = (startTimes[i], endTimes[i])
orderedTimes.append(time)
orderedTimes = sorted(orderedTimes, key=lambda x: x[0])
for i in range(nSlices):
expected.append(input[orderedTimes[i][0]:orderedTimes[i][1]])
result = Slicer(startTimes = startTimes,
endTimes = endTimes,
timeUnits="samples")(input)
self.assertEqual(len(result), len(expected))
for i in range(nSlices):
self.assertEqualVector(result[i], expected[i])
def testEqualSize(self):
startTimes = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90]
endTimes = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
self.slice(startTimes, endTimes)
def testDifferentSize(self):
startTimes = [0, 11, 22, 33, 44, 55, 66, 77, 88, 99]
endTimes = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
self.slice(startTimes, endTimes)
def testOverlap(self):
startTimes = [0, 11, 22, 33, 44, 0, 6, 5, 88, 19]
endTimes = [30, 60, 45, 100, 50, 60, 10, 50, 100, 99]
self.slice(startTimes, endTimes)
def METHOD_NAME(self):
# startTime later than endTime:
startTimes = [35, 11]
endTimes = [30, 60]
self.assertConfigureFails(Slicer(), {'startTimes' : startTimes,
'endTimes' : endTimes})
self.assertConfigureFails(Slicer(), {'timeUnits' : 'unknown'})
def testEmpty(self):
startTimes = [0, 11]
endTimes = [30, 60]
result = Slicer(startTimes = startTimes,
endTimes = endTimes,
timeUnits="samples")([])
self.assertEqualVector(result, [])
def testOneSample(self):
startTimes = [0]
endTimes = [1.0/44100.0]
result = Slicer(startTimes = startTimes,
endTimes = endTimes,
timeUnits="seconds")([1])
self.assertEqualVector(result, [1])
def testVeryLargeStartAndEndTimes(self):
# no slices if times are beyond the input length:
startTimes = [100]
endTimes = [101]
result = Slicer(startTimes = startTimes,
endTimes = endTimes,
timeUnits="samples")([1]*50)
self.assertEqual(result, [])
def testEndTimePastEof(self):
# no slices if times are beyond the input length:
startTimes = [0]
endTimes = [100]
result = Slicer(startTimes = startTimes,
endTimes = endTimes,
timeUnits="seconds")([1])
self.assertEqualVector(result, [])
def Overflow(self):
self.assertConfigureFails(Slicer(), {'sampleRate' : 44100,
'startTimes' : [2147483649.0],
'endTimes' : [2147483649.5],
'timeUnits' : 'seconds'})
suite = allTests(TestSlicer)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
|
1,621 |
test
|
#
# Copyright (C) 2001-2023 NLTK Project
# Author: Masato Hagiwara <[email protected]>
# URL: <https://www.nltk.org/>
# For license information, see LICENSE.TXT
import sys
from nltk.corpus.reader import util
from nltk.corpus.reader.api import *
from nltk.corpus.reader.util import *
class ChasenCorpusReader(CorpusReader):
def __init__(self, root, fileids, encoding="utf8", sent_splitter=None):
self._sent_splitter = sent_splitter
CorpusReader.__init__(self, root, fileids, encoding)
def words(self, fileids=None):
return concat(
[
ChasenCorpusView(fileid, enc, False, False, False, self._sent_splitter)
for (fileid, enc) in self.abspaths(fileids, True)
]
)
def tagged_words(self, fileids=None):
return concat(
[
ChasenCorpusView(fileid, enc, True, False, False, self._sent_splitter)
for (fileid, enc) in self.abspaths(fileids, True)
]
)
def sents(self, fileids=None):
return concat(
[
ChasenCorpusView(fileid, enc, False, True, False, self._sent_splitter)
for (fileid, enc) in self.abspaths(fileids, True)
]
)
def tagged_sents(self, fileids=None):
return concat(
[
ChasenCorpusView(fileid, enc, True, True, False, self._sent_splitter)
for (fileid, enc) in self.abspaths(fileids, True)
]
)
def paras(self, fileids=None):
return concat(
[
ChasenCorpusView(fileid, enc, False, True, True, self._sent_splitter)
for (fileid, enc) in self.abspaths(fileids, True)
]
)
def tagged_paras(self, fileids=None):
return concat(
[
ChasenCorpusView(fileid, enc, True, True, True, self._sent_splitter)
for (fileid, enc) in self.abspaths(fileids, True)
]
)
class ChasenCorpusView(StreamBackedCorpusView):
"""
A specialized corpus view for ChasenReader. Similar to ``TaggedCorpusView``,
but this'll use fixed sets of word and sentence tokenizer.
"""
def __init__(
self,
corpus_file,
encoding,
tagged,
group_by_sent,
group_by_para,
sent_splitter=None,
):
self._tagged = tagged
self._group_by_sent = group_by_sent
self._group_by_para = group_by_para
self._sent_splitter = sent_splitter
StreamBackedCorpusView.__init__(self, corpus_file, encoding=encoding)
def read_block(self, stream):
"""Reads one paragraph at a time."""
block = []
for para_str in read_regexp_block(stream, r".", r"^EOS\n"):
para = []
sent = []
for line in para_str.splitlines():
_eos = line.strip() == "EOS"
_cells = line.split("\t")
w = (_cells[0], "\t".join(_cells[1:]))
if not _eos:
sent.append(w)
if _eos or (self._sent_splitter and self._sent_splitter(w)):
if not self._tagged:
sent = [w for (w, t) in sent]
if self._group_by_sent:
para.append(sent)
else:
para.extend(sent)
sent = []
if len(sent) > 0:
if not self._tagged:
sent = [w for (w, t) in sent]
if self._group_by_sent:
para.append(sent)
else:
para.extend(sent)
if self._group_by_para:
block.append(para)
else:
block.extend(para)
return block
def demo():
import nltk
from nltk.corpus.util import LazyCorpusLoader
jeita = LazyCorpusLoader("jeita", ChasenCorpusReader, r".*chasen", encoding="utf-8")
print("/".join(jeita.words()[22100:22140]))
print(
"\nEOS\n".join(
"\n".join("{}/{}".format(w[0], w[1].split("\t")[2]) for w in sent)
for sent in jeita.tagged_sents()[2170:2173]
)
)
def METHOD_NAME():
from nltk.corpus.util import LazyCorpusLoader
jeita = LazyCorpusLoader("jeita", ChasenCorpusReader, r".*chasen", encoding="utf-8")
assert isinstance(jeita.tagged_words()[0][1], str)
if __name__ == "__main__":
demo()
METHOD_NAME()
|
1,622 |
primary key
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'ListNamespaceKeysResult',
'AwaitableListNamespaceKeysResult',
'list_namespace_keys',
'list_namespace_keys_output',
]
@pulumi.output_type
class ListNamespaceKeysResult:
"""
Response for the POST request that returns Namespace or NotificationHub access keys (connection strings).
"""
def __init__(__self__, key_name=None, primary_connection_string=None, METHOD_NAME=None, secondary_connection_string=None, secondary_key=None):
if key_name and not isinstance(key_name, str):
raise TypeError("Expected argument 'key_name' to be a str")
pulumi.set(__self__, "key_name", key_name)
if primary_connection_string and not isinstance(primary_connection_string, str):
raise TypeError("Expected argument 'primary_connection_string' to be a str")
pulumi.set(__self__, "primary_connection_string", primary_connection_string)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'primary_key' to be a str")
pulumi.set(__self__, "primary_key", METHOD_NAME)
if secondary_connection_string and not isinstance(secondary_connection_string, str):
raise TypeError("Expected argument 'secondary_connection_string' to be a str")
pulumi.set(__self__, "secondary_connection_string", secondary_connection_string)
if secondary_key and not isinstance(secondary_key, str):
raise TypeError("Expected argument 'secondary_key' to be a str")
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> str:
"""
Gets or sets keyName of the created AuthorizationRule
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter(name="primaryConnectionString")
def primary_connection_string(self) -> str:
"""
Gets or sets primaryConnectionString of the AuthorizationRule.
"""
return pulumi.get(self, "primary_connection_string")
@property
@pulumi.getter(name="primaryKey")
def METHOD_NAME(self) -> str:
"""
Gets or sets primaryKey of the created AuthorizationRule.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter(name="secondaryConnectionString")
def secondary_connection_string(self) -> str:
"""
Gets or sets secondaryConnectionString of the created
AuthorizationRule
"""
return pulumi.get(self, "secondary_connection_string")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> str:
"""
Gets or sets secondaryKey of the created AuthorizationRule
"""
return pulumi.get(self, "secondary_key")
class AwaitableListNamespaceKeysResult(ListNamespaceKeysResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListNamespaceKeysResult(
key_name=self.key_name,
primary_connection_string=self.primary_connection_string,
METHOD_NAME=self.METHOD_NAME,
secondary_connection_string=self.secondary_connection_string,
secondary_key=self.secondary_key)
def list_namespace_keys(authorization_rule_name: Optional[str] = None,
namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListNamespaceKeysResult:
"""
Response for the POST request that returns Namespace or NotificationHub access keys (connection strings).
:param str authorization_rule_name: Authorization Rule Name
:param str namespace_name: Namespace name
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['authorizationRuleName'] = authorization_rule_name
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:notificationhubs/v20230901:listNamespaceKeys', __args__, opts=opts, typ=ListNamespaceKeysResult).value
return AwaitableListNamespaceKeysResult(
key_name=pulumi.get(__ret__, 'key_name'),
primary_connection_string=pulumi.get(__ret__, 'primary_connection_string'),
METHOD_NAME=pulumi.get(__ret__, 'primary_key'),
secondary_connection_string=pulumi.get(__ret__, 'secondary_connection_string'),
secondary_key=pulumi.get(__ret__, 'secondary_key'))
@_utilities.lift_output_func(list_namespace_keys)
def list_namespace_keys_output(authorization_rule_name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListNamespaceKeysResult]:
"""
Response for the POST request that returns Namespace or NotificationHub access keys (connection strings).
:param str authorization_rule_name: Authorization Rule Name
:param str namespace_name: Namespace name
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
|
1,623 |
forms
|
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
from typing import Any
from django import METHOD_NAME
from django.contrib.admin.options import ModelAdmin
from django.db.models import Model
from django.db.models.fields import AutoField
from django.METHOD_NAME import BaseForm
from django.METHOD_NAME.boundfield import BoundField
from django.METHOD_NAME.models import ModelForm
from django.METHOD_NAME.utils import ErrorDict, ErrorList
from django.METHOD_NAME.widgets import Media, Widget
from django.utils.safestring import SafeString
from typing_extensions import TypedDict
ACTION_CHECKBOX_NAME: str
class ActionForm(METHOD_NAME.Form):
action: Any
select_across: Any
checkbox: Any
class _PrepopulatedDict(TypedDict):
field: BoundField
dependencies: list[BoundField]
class AdminForm:
prepopulated_fields: list[_PrepopulatedDict]
model_admin: ModelAdmin | None
readonly_fields: Sequence[str]
form: ModelForm
fieldsets: list[tuple[Any, dict[str, list[str]]]]
def __init__(
self,
form: ModelForm,
fieldsets: list[tuple[Any, dict[str, list[str]]]],
prepopulated_fields: Mapping[str, Iterable[str]],
readonly_fields: Sequence[str] | None = ...,
model_admin: ModelAdmin | None = ...,
) -> None: ...
def __iter__(self) -> Iterator[Fieldset]: ...
@property
def errors(self) -> ErrorDict: ...
@property
def non_field_errors(self) -> Callable[[], ErrorList]: ...
@property
def media(self) -> Media: ...
class Fieldset:
form: ModelForm
classes: str
description: str | None
model_admin: ModelAdmin | None
readonly_fields: Sequence[str]
def __init__(
self,
form: ModelForm,
name: Any | None = ...,
readonly_fields: Sequence[str] = ...,
fields: Sequence[str] = ...,
classes: Iterable[str] = ...,
description: str | None = ...,
model_admin: ModelAdmin | None = ...,
) -> None: ...
@property
def media(self) -> Media: ...
def __iter__(self) -> Iterator[Fieldline]: ...
class Fieldline:
form: ModelForm
fields: Sequence[str]
has_visible_field: bool
model_admin: ModelAdmin | None
readonly_fields: Sequence[str]
def __init__(
self,
form: ModelForm,
field: str | Sequence[str],
readonly_fields: Sequence[str] | None = ...,
model_admin: ModelAdmin | None = ...,
) -> None: ...
def __iter__(self) -> Iterator[AdminField | AdminReadonlyField]: ...
def errors(self) -> SafeString: ...
class AdminField:
field: BoundField
is_first: bool
is_checkbox: bool
is_readonly: bool
def __init__(self, form: ModelForm, field: str, is_first: bool) -> None: ...
def label_tag(self) -> SafeString: ...
def errors(self) -> SafeString: ...
class _FieldDictT(TypedDict):
name: str
label: str
help_text: str
field: Callable[[Model], Any] | str
class AdminReadonlyField:
field: _FieldDictT
form: ModelForm
model_admin: ModelAdmin | None
is_first: bool
is_checkbox: bool
is_readonly: bool
empty_value_display: Any
def __init__(
self,
form: ModelForm,
field: Callable[[Model], Any] | str,
is_first: bool,
model_admin: ModelAdmin | None = ...,
) -> None: ...
def label_tag(self) -> SafeString: ...
def contents(self) -> SafeString: ...
class InlineAdminFormSet:
opts: Any
formset: Any
fieldsets: Any
model_admin: ModelAdmin | None
readonly_fields: Sequence[str]
prepopulated_fields: dict[str, Any]
classes: str
has_add_permission: bool
has_change_permission: bool
has_delete_permission: bool
has_view_permission: bool
def __init__(
self,
inline: Any,
formset: Any,
fieldsets: Any,
prepopulated_fields: dict[str, Any] | None = ...,
readonly_fields: Sequence[str] | None = ...,
model_admin: ModelAdmin | None = ...,
has_add_permission: bool = ...,
has_change_permission: bool = ...,
has_delete_permission: bool = ...,
has_view_permission: bool = ...,
) -> None: ...
def __iter__(self) -> Iterator[InlineAdminForm]: ...
def fields(self) -> Iterator[dict[str, dict[str, bool] | bool | Widget | str]]: ...
def inline_formset_data(self) -> str: ...
@property
def METHOD_NAME(self) -> list[BaseForm]: ...
@property
def non_form_errors(self) -> Callable[[], ErrorList]: ...
@property
def media(self) -> Media: ...
class InlineAdminForm(AdminForm):
formset: Any
original: bool | None
show_url: bool
absolute_url: str | None
def __init__(
self,
formset: Any,
form: ModelForm,
fieldsets: Any,
prepopulated_fields: Any,
original: bool | None,
readonly_fields: Sequence[str] | None = ...,
model_admin: ModelAdmin | None = ...,
view_on_site_url: str | None = ...,
) -> None: ...
def __iter__(self) -> Iterator[InlineFieldset]: ...
def needs_explicit_pk_field(self) -> bool | AutoField: ...
def pk_field(self) -> AdminField: ...
def fk_field(self) -> AdminField: ...
def deletion_field(self) -> AdminField: ...
class InlineFieldset(Fieldset):
formset: Any
def __init__(self, formset: Any, *args: Any, **kwargs: Any) -> None: ...
def __iter__(self) -> Iterator[Fieldline]: ...
class AdminErrorList(METHOD_NAME.utils.ErrorList):
def __init__(self, form: ModelForm, inline_formsets: Any) -> None: ...
|
1,624 |
main
|
#! /usr/bin/env python3
import os
from subprocess import call
import platform
import shutil
import time
dockerComposeFilename = 'docker-compose-coop.yml'
containerLocalHostAddressOrg = '127.0.0.1'
def AddVehicle(composeFile,containerLocalHostAddress,vehicleId,lastNetworkOctet,lastEndpointOctet,gossipBind,amasePort,configurationFile,tadPort1,tadPort2,tadPort3):
# backup old run directory and create new one.
runDir = 'Vehicle_{}'.format(vehicleId)
if os.path.isdir(runDir):
shutil.rmtree(runDir)
os.mkdir(runDir)
# fix container's host address and add gossip addresses
fin = open(configurationFile, 'r')
stringFileIn = fin.read()
# fix container's host address in the config file
stringFileIn = stringFileIn.replace(containerLocalHostAddressOrg,containerLocalHostAddress)
# change zyre config to use gossip
zyreEntryOrg = '<Bridge Type="LmcpObjectNetworkZeroMqZyreBridge" NetworkDevice="en0">'
zyreEntryNew = '<Bridge Type="LmcpObjectNetworkZeroMqZyreBridge" ZyreEndpoint="tcp://172.28.6.{0}:39801" GossipEndpoint="tcp://172.28.6.{1}:39810" GossipBind="{2}">'.format(lastNetworkOctet,lastEndpointOctet,gossipBind)
stringFileIn = stringFileIn.replace(zyreEntryOrg,zyreEntryNew)
#save the changes to a new file
cfgFileNew = '{0}_new.xml'.format(configurationFile)
fout = open('{0}/{1}'.format(runDir,cfgFileNew), 'w')
fout.write(stringFileIn)
fout.close()
# build new docker service (new container)
composeFile.write(' V{0}:\n'.format(vehicleId))
composeFile.write(' image: "uxas/uxas-deploy:x86_64"\n')
composeFile.write(' ports:\n')
composeFile.write(' - "{1}:{0}::{1}"\n'.format(containerLocalHostAddress,amasePort))
if tadPort1 and tadPort2:
composeFile.write(' - "{1}:{1}"\n'.format(containerLocalHostAddress,tadPort1))
composeFile.write(' - "{1}:{1}"\n'.format(containerLocalHostAddress,tadPort2))
if tadPort3:
composeFile.write(' - "{1}:{0}::{1}"\n'.format(containerLocalHostAddress,tadPort3))
composeFile.write(' volumes:\n')
composeFile.write(' - type: "bind"\n')
composeFile.write(' source: "./"\n')
composeFile.write(' target: "/working"\n')
composeFile.write(' networks:\n')
composeFile.write(' uxas_net_coop:\n')
composeFile.write(' ipv4_address: "172.28.6.{0}"\n'.format(lastNetworkOctet))
composeFile.write(' working_dir: /working/{0}\n'.format(runDir))
composeFile.write(' entrypoint: ["/uxas","-cfgPath", "{0}"]\n'.format(cfgFileNew))
composeFile.write('\n')
def BuildDockerComposeFile(dockerComposeFilename,containerLocalHostAddress):
composeFile = open(dockerComposeFilename,'w')
composeFile.write("version: '3.2'\n\n")
composeFile.write('services:\n')
vehicleId = 1000
lastNetworkOctet = 10
lastEndpointOctet = 10
amasePort = 7056
cfgFile = 'cfgDistributedCooperation_1000.xml'
gossipBind = 'true'
AddVehicle(composeFile,containerLocalHostAddress,vehicleId,lastNetworkOctet,lastEndpointOctet,gossipBind,amasePort,cfgFile,[],[],[])
vehicleId = 2000
lastNetworkOctet = 20
lastEndpointOctet = lastEndpointOctet
amasePort = 7057
cfgFile = 'cfgDistributedCooperation_2000.xml'
gossipBind = 'false'
AddVehicle(composeFile,containerLocalHostAddress,vehicleId,lastNetworkOctet,lastEndpointOctet,gossipBind,amasePort,cfgFile,5560,5561,9876)
composeFile.write('networks:\n')
composeFile.write(' default:\n')
composeFile.write(' external:\n')
composeFile.write(' name: bridge\n')
composeFile.write(' uxas_net_coop:\n')
composeFile.write(' driver: bridge\n')
composeFile.write(' ipam:\n')
composeFile.write(' driver: default\n')
composeFile.write(' config:\n')
composeFile.write(' -\n')
composeFile.write(' subnet: 172.28.6.0/24\n')
composeFile.write('\n')
composeFile.close()
def METHOD_NAME():
osType = platform.system()
containerLocalHostAddress = containerLocalHostAddressOrg # NOTE: see the file: `OpenUxAS/docker/00c_README_OS_Differences.md`
if osType =='Linux':
pass
elif osType =='Darwin':
containerLocalHostAddress = '192.168.65.2'
networkCfg = ''
elif osType =='Windows':
containerLocalHostAddress = '10.0.75.1'
networkCfg = ''
BuildDockerComposeFile(dockerComposeFilename,containerLocalHostAddress)
print('\n** close any running containers **')
cmd = 'docker-compose -f {} kill'.format(dockerComposeFilename)
print('{}\n'.format(cmd))
call(cmd,shell=True)
print('\n** start new containers **')
cmd = 'docker-compose -f {} up'.format(dockerComposeFilename)
print('{}\n'.format(cmd))
call(cmd,shell=True)
if __name__ == '__main__':
METHOD_NAME()
|
1,625 |
test
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('../')
from auto_scan_test import AutoScanTest, IgnoreReasons
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import unittest
import hypothesis
from hypothesis import given, settings, seed, example, assume, reproduce_failure
import hypothesis.strategies as st
import numpy as np
from functools import partial
class TestCropOp(AutoScanTest):
def __init__(self, *args, **kwargs):
AutoScanTest.__init__(self, *args, **kwargs)
self.enable_testing_on_place(
TargetType.Host,
PrecisionType.FP32,
DataLayoutType.Any,
thread=[1, 4])
def is_program_valid(self,
program_config: ProgramConfig,
predictor_config: CxxConfig) -> bool:
return True
def sample_program_configs(self, draw):
shape_value_min = 1
shape_value_max = 25
input_data_x_shape = draw(
st.lists(
st.integers(
min_value=shape_value_min, max_value=shape_value_max),
min_size=1,
max_size=6))
input_data_y_shape = input_data_x_shape
x_dims_size = len(input_data_x_shape)
input_x_data_type = draw(st.sampled_from([np.float32]))
has_input_y = draw(st.booleans())
has_input_offsets = draw(st.booleans())
offsets = [
np.random.randint(
low=0, high=input_data_x_shape[i]) for i in range(x_dims_size)
]
shape = [
input_data_x_shape[i] - offsets[i] for i in range(x_dims_size)
]
def gen_input_data_x_y(*args, **kwargs):
return np.random.randint(0, 20,
kwargs['shape']).astype(kwargs['dtype'])
def gen_input_data_offsets():
return np.array(offsets).astype(np.int32)
def GenOpInputsAndAttrs():
def GenOpInputs():
inputs = {"X": ["input_data_x"]}
inputs_tensor = {
"input_data_x": TensorConfig(data_gen=partial(
gen_input_data_x_y,
shape=input_data_x_shape,
dtype=input_x_data_type))
}
if has_input_y:
inputs["Y"] = ["input_data_y"]
inputs_tensor["input_data_y"] = TensorConfig(
data_gen=partial(
gen_input_data_x_y, shape=shape, dtype=np.int32))
if has_input_offsets:
inputs["Offsets"] = ["input_data_offsets"]
inputs_tensor["input_data_offsets"] = TensorConfig(
data_gen=partial(gen_input_data_offsets))
return inputs, inputs_tensor
inputs, inputs_tensor = GenOpInputs()
def GenOpAttrs():
attrs = {"offsets": offsets, "shape": shape}
if "Offsets" in inputs:
attrs['offsets'] = []
return attrs
attrs = GenOpAttrs()
return inputs, inputs_tensor, attrs
inputs, inputs_tensor, attrs = GenOpInputsAndAttrs()
crop_op = OpConfig(
type="crop",
inputs=inputs,
outputs={"Out": ["output_data"]},
attrs=attrs)
crop_op.outputs_dtype = {"output_data": input_x_data_type}
program_config = ProgramConfig(
ops=[crop_op],
weights={},
inputs=inputs_tensor,
outputs=["output_data"])
return program_config
def sample_predictor_configs(self):
config = CxxConfig()
return self.get_predictor_configs(), ["crop"], (1e-5, 1e-5)
def add_ignore_pass_case(self):
pass
def METHOD_NAME(self, *args, **kwargs):
self.run_and_statis(quant=False, max_examples=300)
if __name__ == "__main__":
unittest.main(argv=[''])
|
1,626 |
main
|
#!/usr/bin/env python3
import fnmatch
import os
import re
import ntpath
import sys
import argparse
def check_config_style(filepath):
bad_count_file = 0
def pushClosing(t):
closingStack.append(closing.expr)
closing << Literal( closingFor[t[0]] )
def popClosing():
closing << closingStack.pop()
with open(filepath, 'r', encoding='utf-8', errors='ignore') as file:
content = file.read()
# Store all brackets we find in this file, so we can validate everything on the end
brackets_list = []
# To check if we are in a comment block
isInCommentBlock = False
checkIfInComment = False
# Used in case we are in a line comment (//)
ignoreTillEndOfLine = False
# Used in case we are in a comment block (/* */). This is true if we detect a * inside a comment block.
# If the next character is a /, it means we end our comment block.
checkIfNextIsClosingBlock = False
# We ignore everything inside a string
isInString = False
# Used to store the starting type of a string, so we can match that to the end of a string
inStringType = '';
lastIsCurlyBrace = False
checkForSemiColumn = False
# Extra information so we know what line we find errors at
lineNumber = 1
indexOfCharacter = 0
# Parse all characters in the content of this file to search for potential errors
for c in content:
if (lastIsCurlyBrace):
lastIsCurlyBrace = False
if c == '\n': # Keeping track of our line numbers
lineNumber += 1 # so we can print accurate line number information when we detect a possible error
if (isInString): # while we are in a string, we can ignore everything else, except the end of the string
if (c == inStringType):
isInString = False
# if we are not in a comment block, we will check if we are at the start of one or count the () {} and []
elif (isInCommentBlock == False):
# This means we have encountered a /, so we are now checking if this is an inline comment or a comment block
if (checkIfInComment):
checkIfInComment = False
if c == '*': # if the next character after / is a *, we are at the start of a comment block
isInCommentBlock = True
elif (c == '/'): # Otherwise, will check if we are in an line comment
ignoreTillEndOfLine = True # and an line comment is a / followed by another / (//) We won't care about anything that comes after it
if (isInCommentBlock == False):
if (ignoreTillEndOfLine): # we are in a line comment, just continue going through the characters until we find an end of line
if (c == '\n'):
ignoreTillEndOfLine = False
else: # validate brackets
if (c == '"' or c == "'"):
isInString = True
inStringType = c
elif (c == '/'):
checkIfInComment = True
elif (c == '('):
brackets_list.append('(')
elif (c == ')'):
if (len(brackets_list) > 0 and brackets_list[-1] in ['{', '[']):
print("ERROR: Possible missing round bracket ')' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(')')
elif (c == '['):
brackets_list.append('[')
elif (c == ']'):
if (len(brackets_list) > 0 and brackets_list[-1] in ['{', '(']):
print("ERROR: Possible missing square bracket ']' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append(']')
elif (c == '{'):
brackets_list.append('{')
elif (c == '}'):
lastIsCurlyBrace = True
if (len(brackets_list) > 0 and brackets_list[-1] in ['(', '[']):
print("ERROR: Possible missing curly brace '}}' detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
brackets_list.append('}')
elif (c== '\t'):
print("ERROR: Tab detected at {0} Line number: {1}".format(filepath,lineNumber))
bad_count_file += 1
else: # Look for the end of our comment block
if (c == '*'):
checkIfNextIsClosingBlock = True;
elif (checkIfNextIsClosingBlock):
if (c == '/'):
isInCommentBlock = False
elif (c != '*'):
checkIfNextIsClosingBlock = False
indexOfCharacter += 1
if brackets_list.count('[') != brackets_list.count(']'):
print("ERROR: A possible missing square bracket [ or ] in file {0} [ = {1} ] = {2}".format(filepath,brackets_list.count('['),brackets_list.count(']')))
bad_count_file += 1
if brackets_list.count('(') != brackets_list.count(')'):
print("ERROR: A possible missing round bracket ( or ) in file {0} ( = {1} ) = {2}".format(filepath,brackets_list.count('('),brackets_list.count(')')))
bad_count_file += 1
if brackets_list.count('{') != brackets_list.count('}'):
print("ERROR: A possible missing curly brace {{ or }} in file {0} {{ = {1} }} = {2}".format(filepath,brackets_list.count('{'),brackets_list.count('}')))
bad_count_file += 1
return bad_count_file
def METHOD_NAME():
print("Validating Config Style")
sqf_list = []
bad_count = 0
parser = argparse.ArgumentParser()
parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="")
args = parser.parse_args()
# Allow running from root directory as well as from inside the tools directory
rootDir = "../addons"
if (os.path.exists("addons")):
rootDir = "addons"
for root, dirnames, filenames in os.walk(rootDir + '/' + args.module):
for filename in fnmatch.filter(filenames, '*.cpp'):
sqf_list.append(os.path.join(root, filename))
for filename in fnmatch.filter(filenames, '*.hpp'):
sqf_list.append(os.path.join(root, filename))
for filename in sqf_list:
bad_count = bad_count + check_config_style(filename)
print("------\nChecked {0} files\nErrors detected: {1}".format(len(sqf_list), bad_count))
if (bad_count == 0):
print("Config validation PASSED")
else:
print("Config validation FAILED")
return bad_count
if __name__ == "__main__":
sys.exit(METHOD_NAME())
|
1,627 |
reslice
|
from multiprocessing import Pool
import warnings
import numpy as np
from scipy.ndimage import affine_transform
from dipy.utils.multiproc import determine_num_processes
def _affine_transform(kwargs):
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message=".*scipy.*18.*",
category=UserWarning)
return affine_transform(**kwargs)
def METHOD_NAME(data, affine, zooms, new_zooms, order=1, mode='constant', cval=0,
num_processes=1):
""" Reslice data with new voxel resolution defined by ``new_zooms``.
Parameters
----------
data : array, shape (I,J,K) or (I,J,K,N)
3d volume or 4d volume with datasets
affine : array, shape (4,4)
mapping from voxel coordinates to world coordinates
zooms : tuple, shape (3,)
voxel size for (i,j,k) dimensions
new_zooms : tuple, shape (3,)
new voxel size for (i,j,k) after resampling
order : int, from 0 to 5
order of interpolation for resampling/reslicing,
0 nearest interpolation, 1 trilinear etc..
if you don't want any smoothing 0 is the option you need.
mode : string ('constant', 'nearest', 'reflect' or 'wrap')
Points outside the boundaries of the input are filled according
to the given mode.
cval : float
Value used for points outside the boundaries of the input if
mode='constant'.
num_processes : int, optional
Split the calculation to a pool of children processes. This only
applies to 4D `data` arrays. Default is 1. If < 0 the maximal number
of cores minus ``num_processes + 1`` is used (enter -1 to use as many
cores as possible). 0 raises an error.
Returns
-------
data2 : array, shape (I,J,K) or (I,J,K,N)
datasets resampled into isotropic voxel size
affine2 : array, shape (4,4)
new affine for the resampled image
Examples
--------
>>> from dipy.io.image import load_nifti
>>> from dipy.align.reslice import reslice
>>> from dipy.data import get_fnames
>>> f_name = get_fnames('aniso_vox')
>>> data, affine, zooms = load_nifti(f_name, return_voxsize=True)
>>> data.shape == (58, 58, 24)
True
>>> zooms
(4.0, 4.0, 5.0)
>>> new_zooms = (3.,3.,3.)
>>> new_zooms
(3.0, 3.0, 3.0)
>>> data2, affine2 = reslice(data, affine, zooms, new_zooms)
>>> data2.shape == (77, 77, 40)
True
"""
num_processes = determine_num_processes(num_processes)
# We are suppressing warnings emitted by scipy >= 0.18,
# described in https://github.com/dipy/dipy/issues/1107.
# These warnings are not relevant to us, as long as our offset
# input to scipy's affine_transform is [0, 0, 0]
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message=".*scipy.*18.*",
category=UserWarning)
new_zooms = np.array(new_zooms, dtype='f8')
zooms = np.array(zooms, dtype='f8')
R = new_zooms / zooms
new_shape = zooms / new_zooms * np.array(data.shape[:3])
new_shape = tuple(np.round(new_shape).astype('i8'))
kwargs = {'matrix': R, 'output_shape': new_shape, 'order': order,
'mode': mode, 'cval': cval}
if data.ndim == 3:
data2 = affine_transform(input=data, **kwargs)
if data.ndim == 4:
data2 = np.zeros(new_shape+(data.shape[-1],), data.dtype)
if num_processes == 1:
for i in range(data.shape[-1]):
affine_transform(input=data[..., i], output=data2[..., i],
**kwargs)
else:
params = []
for i in range(data.shape[-1]):
_kwargs = {'input': data[..., i]}
_kwargs.update(kwargs)
params.append(_kwargs)
pool = Pool(num_processes)
for i, res in enumerate(pool.imap(_affine_transform, params)):
data2[..., i] = res
pool.close()
Rx = np.eye(4)
Rx[:3, :3] = np.diag(R)
affine2 = np.dot(affine, Rx)
return data2, affine2
|
1,628 |
load required audio
|
import os
from glob import glob
from typing import Dict, List
import librosa
import numpy as np
import torch
import torchaudio
from scipy.io.wavfile import read
from TTS.utils.audio.torch_transforms import TorchSTFT
def load_wav_to_torch(full_path):
sampling_rate, data = read(full_path)
if data.dtype == np.int32:
norm_fix = 2**31
elif data.dtype == np.int16:
norm_fix = 2**15
elif data.dtype == np.float16 or data.dtype == np.float32:
norm_fix = 1.0
else:
raise NotImplementedError(f"Provided data dtype not supported: {data.dtype}")
return (torch.FloatTensor(data.astype(np.float32)) / norm_fix, sampling_rate)
def check_audio(audio, audiopath: str):
# Check some assumptions about audio range. This should be automatically fixed in load_wav_to_torch, but might not be in some edge cases, where we should squawk.
# '2' is arbitrarily chosen since it seems like audio will often "overdrive" the [-1,1] bounds.
if torch.any(audio > 2) or not torch.any(audio < 0):
print(f"Error with {audiopath}. Max={audio.max()} min={audio.min()}")
audio.clip_(-1, 1)
def read_audio_file(audiopath: str):
if audiopath[-4:] == ".wav":
audio, lsr = load_wav_to_torch(audiopath)
elif audiopath[-4:] == ".mp3":
audio, lsr = librosa.load(audiopath, sr=None)
audio = torch.FloatTensor(audio)
else:
assert False, f"Unsupported audio format provided: {audiopath[-4:]}"
# Remove any channel data.
if len(audio.shape) > 1:
if audio.shape[0] < 5:
audio = audio[0]
else:
assert audio.shape[1] < 5
audio = audio[:, 0]
return audio, lsr
def METHOD_NAME(audiopath: str):
audio, lsr = read_audio_file(audiopath)
audios = [torchaudio.functional.resample(audio, lsr, sampling_rate) for sampling_rate in (22050, 24000)]
for audio in audios:
check_audio(audio, audiopath)
return [audio.unsqueeze(0) for audio in audios]
def load_audio(audiopath, sampling_rate):
audio, lsr = read_audio_file(audiopath)
if lsr != sampling_rate:
audio = torchaudio.functional.resample(audio, lsr, sampling_rate)
check_audio(audio, audiopath)
return audio.unsqueeze(0)
TACOTRON_MEL_MAX = 2.3143386840820312
TACOTRON_MEL_MIN = -11.512925148010254
def denormalize_tacotron_mel(norm_mel):
return ((norm_mel + 1) / 2) * (TACOTRON_MEL_MAX - TACOTRON_MEL_MIN) + TACOTRON_MEL_MIN
def normalize_tacotron_mel(mel):
return 2 * ((mel - TACOTRON_MEL_MIN) / (TACOTRON_MEL_MAX - TACOTRON_MEL_MIN)) - 1
def dynamic_range_compression(x, C=1, clip_val=1e-5):
"""
PARAMS
------
C: compression factor
"""
return torch.log(torch.clamp(x, min=clip_val) * C)
def dynamic_range_decompression(x, C=1):
"""
PARAMS
------
C: compression factor used to compress
"""
return torch.exp(x) / C
def get_voices(extra_voice_dirs: List[str] = []):
dirs = extra_voice_dirs
voices: Dict[str, List[str]] = {}
for d in dirs:
subs = os.listdir(d)
for sub in subs:
subj = os.path.join(d, sub)
if os.path.isdir(subj):
voices[sub] = list(glob(f"{subj}/*.wav")) + list(glob(f"{subj}/*.mp3")) + list(glob(f"{subj}/*.pth"))
return voices
def load_voice(voice: str, extra_voice_dirs: List[str] = []):
if voice == "random":
return None, None
voices = get_voices(extra_voice_dirs)
paths = voices[voice]
if len(paths) == 1 and paths[0].endswith(".pth"):
return None, torch.load(paths[0])
else:
conds = []
for cond_path in paths:
c = METHOD_NAME(cond_path)
conds.append(c)
return conds, None
def load_voices(voices: List[str], extra_voice_dirs: List[str] = []):
latents = []
clips = []
for voice in voices:
if voice == "random":
if len(voices) > 1:
print("Cannot combine a random voice with a non-random voice. Just using a random voice.")
return None, None
clip, latent = load_voice(voice, extra_voice_dirs)
if latent is None:
assert (
len(latents) == 0
), "Can only combine raw audio voices or latent voices, not both. Do it yourself if you want this."
clips.extend(clip)
elif clip is None:
assert (
len(clips) == 0
), "Can only combine raw audio voices or latent voices, not both. Do it yourself if you want this."
latents.append(latent)
if len(latents) == 0:
return clips, None
else:
latents_0 = torch.stack([l[0] for l in latents], dim=0).mean(dim=0)
latents_1 = torch.stack([l[1] for l in latents], dim=0).mean(dim=0)
latents = (latents_0, latents_1)
return None, latents
def wav_to_univnet_mel(wav, do_normalization=False, device="cuda"):
stft = TorchSTFT(
n_fft=1024,
hop_length=256,
win_length=1024,
use_mel=True,
n_mels=100,
sample_rate=24000,
mel_fmin=0,
mel_fmax=12000,
)
stft = stft.to(device)
mel = stft(wav)
mel = dynamic_range_compression(mel)
if do_normalization:
mel = normalize_tacotron_mel(mel)
return mel
|
1,629 |
hex to int
|
#!/usr/bin/env python3
#
# Copyright (C), 2022 Intel Corporation.
# Copyright (c), 2018-2021, SISSA (International School for Advanced Studies).
#
# SPDX-License-Identifier: BSD-3-Clause
#
import sys, os
from decimal import Decimal
from copy import copy
import operator
import elementpath
# Allow this script to find the library module at misc/config_tools/library.
#
# TODO: Reshuffle the module structure of the configuration toolset for clearer imports.
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
import library.rdt as rdt
BaseParser = elementpath.XPath2Parser
class CustomParser(BaseParser):
if hasattr(BaseParser, "SYMBOLS"):
SYMBOLS = BaseParser.SYMBOLS | {
# Bit-wise operations
'bitwise-and',
'bits-of',
'has',
'duplicate-values',
'number-of-clos-id-needed',
}
method = CustomParser.method
function = CustomParser.function
###
# Custom functions
OPERATORS_MAP = {
'bitwise-and': operator.and_
}
def METHOD_NAME(value):
if hasattr(value, 'text'):
value = value.text
if isinstance(value, int):
return value
elif isinstance(value, (float, Decimal)):
return int(value)
elif isinstance(value, str) and value.startswith("0x"):
return int(value, base=16)
else:
raise TypeError('invalid type {!r} for integer'.format(type(value)))
@method(function('bitwise-and', nargs=2))
def evaluate(self, context=None):
def aux(op):
op1 = self.get_argument(context, 0)
op2 = self.get_argument(context, 1)
try:
return op(METHOD_NAME(op1), METHOD_NAME(op2))
except ValueError as err:
raise self.error('FORG0001', err) from None
except TypeError as err:
raise self.error('XPTY0004', err)
return aux(OPERATORS_MAP[self.symbol])
@method(function('bits-of', nargs=1))
def evaluate_bits_of(self, context=None):
op = self.get_argument(context)
try:
value = METHOD_NAME(op)
for idx, bit in enumerate(reversed(bin(value)[2:])):
if bit == '1':
yield idx
except TypeError as err:
raise self.error('XPTY0004', err)
@method(function('has', nargs=2))
def evaluate_has_function(self, context=None):
arg2 = self.get_argument(context, index=1, cls=str)
for item in self[0].select(context):
value = self.data_value(item)
if value == arg2:
return True
return False
@method(function('duplicate-values', nargs=1))
def select_duplicate_values_function(self, context=None):
def duplicate_values():
results = []
reported = []
for item in self[0].select(context):
value = self.data_value(item)
if context is not None:
context.item = value
if value in results:
if value not in reported:
yield value
reported.append(value)
else:
results.append(value)
yield from duplicate_values()
@method(function('number-of-clos-id-needed', nargs=1))
def evaluate_number_of_clos_id_needed(self, context=None):
op = self.get_argument(context, index=0)
if op is not None:
if isinstance(op, elementpath.TypedElement):
op = op.elem
# This function may be invoked when the xmlschema library parses the data check schemas, in which case `op` will
# be an object of class Xsd11Element. Only attempt to calculate the needed CLOS IDs when a real acrn-config node
# is given.
if hasattr(op, "xpath"):
return len(rdt.get_policy_list(op))
return 0
###
# Collection of counter examples
class Hashable:
def __init__(self, obj):
self.obj = obj
def __hash__(self):
return id(self.obj)
def copy_context(context):
ret = copy(context)
if hasattr(context, 'counter_example'):
ret.counter_example = dict()
return ret
def add_counter_example(context, private_context, kvlist):
if hasattr(context, 'counter_example'):
context.counter_example.update(kvlist)
if private_context:
context.counter_example.update(private_context.counter_example)
@method('every')
@method('some')
def evaluate(self, context=None):
if context is None:
raise self.missing_context()
some = self.symbol == 'some'
varrefs = [Hashable(self[k]) for k in range(0, len(self) - 1, 2)]
varnames = [self[k][0].value for k in range(0, len(self) - 1, 2)]
selectors = [self[k].select for k in range(1, len(self) - 1, 2)]
for results in copy(context).iter_product(selectors, varnames):
private_context = copy_context(context)
private_context.variables.update(x for x in zip(varnames, results))
if self.boolean_value([x for x in self[-1].select(private_context)]):
if some:
add_counter_example(context, private_context, zip(varrefs, results))
return True
elif not some:
add_counter_example(context, private_context, zip(varrefs, results))
return False
return not some
elementpath.XPath2Parser = CustomParser
|
1,630 |
test inference superresolution fp16
|
# coding=utf-8
# Copyright 2023 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import unittest
import numpy as np
import torch
from diffusers import DDIMScheduler, LDMSuperResolutionPipeline, UNet2DModel, VQModel
from diffusers.utils import PIL_INTERPOLATION
from diffusers.utils.testing_utils import (
enable_full_determinism,
floats_tensor,
load_image,
require_torch,
slow,
torch_device,
)
enable_full_determinism()
class LDMSuperResolutionPipelineFastTests(unittest.TestCase):
@property
def dummy_image(self):
batch_size = 1
num_channels = 3
sizes = (32, 32)
image = floats_tensor((batch_size, num_channels) + sizes, rng=random.Random(0)).to(torch_device)
return image
@property
def dummy_uncond_unet(self):
torch.manual_seed(0)
model = UNet2DModel(
block_out_channels=(32, 64),
layers_per_block=2,
sample_size=32,
in_channels=6,
out_channels=3,
down_block_types=("DownBlock2D", "AttnDownBlock2D"),
up_block_types=("AttnUpBlock2D", "UpBlock2D"),
)
return model
@property
def dummy_vq_model(self):
torch.manual_seed(0)
model = VQModel(
block_out_channels=[32, 64],
in_channels=3,
out_channels=3,
down_block_types=["DownEncoderBlock2D", "DownEncoderBlock2D"],
up_block_types=["UpDecoderBlock2D", "UpDecoderBlock2D"],
latent_channels=3,
)
return model
def test_inference_superresolution(self):
device = "cpu"
unet = self.dummy_uncond_unet
scheduler = DDIMScheduler()
vqvae = self.dummy_vq_model
ldm = LDMSuperResolutionPipeline(unet=unet, vqvae=vqvae, scheduler=scheduler)
ldm.to(device)
ldm.set_progress_bar_config(disable=None)
init_image = self.dummy_image.to(device)
generator = torch.Generator(device=device).manual_seed(0)
image = ldm(image=init_image, generator=generator, num_inference_steps=2, output_type="numpy").images
image_slice = image[0, -3:, -3:, -1]
assert image.shape == (1, 64, 64, 3)
expected_slice = np.array([0.8678, 0.8245, 0.6381, 0.6830, 0.4385, 0.5599, 0.4641, 0.6201, 0.5150])
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
@unittest.skipIf(torch_device != "cuda", "This test requires a GPU")
def METHOD_NAME(self):
unet = self.dummy_uncond_unet
scheduler = DDIMScheduler()
vqvae = self.dummy_vq_model
# put models in fp16
unet = unet.half()
vqvae = vqvae.half()
ldm = LDMSuperResolutionPipeline(unet=unet, vqvae=vqvae, scheduler=scheduler)
ldm.to(torch_device)
ldm.set_progress_bar_config(disable=None)
init_image = self.dummy_image.to(torch_device)
image = ldm(init_image, num_inference_steps=2, output_type="numpy").images
assert image.shape == (1, 64, 64, 3)
@slow
@require_torch
class LDMSuperResolutionPipelineIntegrationTests(unittest.TestCase):
def test_inference_superresolution(self):
init_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main"
"/vq_diffusion/teddy_bear_pool.png"
)
init_image = init_image.resize((64, 64), resample=PIL_INTERPOLATION["lanczos"])
ldm = LDMSuperResolutionPipeline.from_pretrained("duongna/ldm-super-resolution", device_map="auto")
ldm.set_progress_bar_config(disable=None)
generator = torch.manual_seed(0)
image = ldm(image=init_image, generator=generator, num_inference_steps=20, output_type="numpy").images
image_slice = image[0, -3:, -3:, -1]
assert image.shape == (1, 256, 256, 3)
expected_slice = np.array([0.7644, 0.7679, 0.7642, 0.7633, 0.7666, 0.7560, 0.7425, 0.7257, 0.6907])
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
|
1,631 |
test no composes html
|
# Copyright 2017-2019 Red Hat, Inc. and others.
#
# This file is part of Bodhi.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""This module contains tests for bodhi.server.services.composes."""
from pyramid import testing
from bodhi.server import models
from bodhi.server.services import composes
from .. import base
try:
# Pyramid >= 2.0
from pyramid.authorization import Allow, Everyone
except ImportError:
# Pyramid < 2.0
from pyramid.security import Allow, Everyone
class TestCompose__init__(base.BasePyTestCase):
"""This class contains tests for the Compose.__init__() method."""
def test___init__(self):
"""Assert the request is stored properly."""
request = testing.DummyRequest()
composes_resource = composes.Composes(request)
assert composes_resource.request is request
class TestCompose__acl__(base.BasePyTestCase):
"""This class contains tests for the Compose.__acl__() method."""
def test___acl__(self):
"""Assert the permissions are correct."""
request = testing.DummyRequest()
composes_resource = composes.Composes(request)
acls = composes_resource.__acl__()
assert acls == [(Allow, Everyone, 'view_composes')]
class TestComposeCollectionGet(base.BasePyTestCase):
"""This class contains tests for the Compose.collection_get() method."""
def test_default_accept(self):
"""Test that an Accept header of */* gets the default JSON response."""
update = models.Update.query.first()
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get('/composes/', status=200, headers={'Accept': '*/*'})
assert response.json == {'composes': [compose.__json__()]}
def METHOD_NAME(self):
"""Assert correct behavior for html interface when there are no composes."""
response = self.app.get('/composes/', status=200, headers={'Accept': 'text/html'})
# The Composes header should still appear in the page
assert 'no active composes' in response
def test_no_composes_json(self):
"""Assert correct behavior for json interface when there are no composes."""
response = self.app.get('/composes/', status=200, headers={'Accept': 'application/json'})
assert response.json == {'composes': []}
def test_with_compose_html(self):
"""Assert correct behavior for the html interface when there is a compose."""
update = models.Update.query.first()
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get('/composes/', status=200, headers={'Accept': 'text/html'})
# The Composes header should still appear in the page
assert '<h3 class="fw-bold m-0">Composes</h3>' in response
assert '/composes/{}/{}'.format(compose.release.name, compose.request.value) in response
assert compose.state.description in response
def test_with_compose_json(self):
"""Assert correct behavior for the json interface when there is a compose."""
update = models.Update.query.first()
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get('/composes/', status=200, headers={'Accept': 'application/json'})
assert response.json == {'composes': [compose.__json__()]}
class TestComposeGet(base.BasePyTestCase):
"""This class contains tests for the Compose.get() method."""
def test_404_compose(self):
"""Assert a 404 error code when there isn't a Compose matching the URL."""
release = models.Release.query.first()
self.app.get('/composes/{}/testing'.format(release.name), status=404,
headers={'Accept': 'text/html'})
def test_404_release(self):
"""Assert a 404 error code when the release component of the URL does not exist."""
self.app.get('/composes/dne/testing', status=404, headers={'Accept': 'text/html'})
def test_404_request(self):
"""Assert a 404 error code when the request component of the URL does not exist."""
release = models.Release.query.first()
self.app.get('/composes/{}/hahahwhatisthis'.format(release.name), status=404,
headers={'Accept': 'text/html'})
def test_with_compose_html(self):
"""Assert correct behavior from the html renderer when there is a compose."""
update = models.Update.query.first()
update.locked = True
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get(
'/composes/{}/{}'.format(compose.release.name, compose.request.value),
status=200, headers={'Accept': 'text/html'})
assert compose.state.description in response
assert '{} {}'.format(compose.release.name, compose.request.value) in response
assert update.get_title(amp=True, nvr=True, beautify=True) in response
def test_with_compose_json(self):
"""Assert correct behavior from the json renderer when there is a compose."""
update = models.Update.query.first()
update.locked = True
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get(
'/composes/{}/{}'.format(compose.release.name, compose.request.value),
status=200, headers={'Accept': 'application/json'})
assert response.json == {'compose': compose.__json__()}
|
1,632 |
dm hash buffer32
|
# Copyright 2020-2023 The Defold Foundation
# Copyright 2014-2020 King
# Copyright 2009-2014 Ragnar Svensson, Christian Murray
# Licensed under the Defold License version 1.0 (the "License"); you may not use
# this file except in compliance with the License.
#
# You may obtain a copy of the License, together with FAQs at
# https://www.defold.com/license
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import ctypes, os, sys, platform
# NOTE: The output here is parsed later on, so don't print invalid code!
if platform.architecture()[0] == '32bit':
raise Exception("32 bit hosts are not supported!")
machine = platform.machine() # x86_64 or arm64
if sys.platform == "darwin":
libname = "libdlib_shared.dylib"
libdir = "lib/%s-macos" % machine
elif sys.platform in ("linux", "linux2"): # support both python3 and python2
libname = "libdlib_shared.so"
libdir = "lib/x86_64-linux"
elif sys.platform == "win32":
libname = "dlib_shared.dll"
libdir = "lib/x86_64-win32"
dlib = None
try:
# First try to load from the build directory
# This is only used when running unit-tests. A bit budget but is works.
dlib = ctypes.cdll.LoadLibrary(os.path.join('build/default/src', libname))
except:
pass
if not dlib:
# If not found load from default location in DYNAMO_HOME
dlib = ctypes.cdll.LoadLibrary(os.path.join(os.environ['DYNAMO_HOME'], libdir, libname))
dlib.METHOD_NAME.argtypes = [ctypes.c_char_p, ctypes.c_uint32]
dlib.METHOD_NAME.restype = ctypes.c_uint32
dlib.dmHashBuffer64.argtypes = [ctypes.c_char_p, ctypes.c_uint32]
dlib.dmHashBuffer64.restype = ctypes.c_uint64
# DM_DLLEXPORT int _MaxCompressedSize(int uncompressed_size, int* max_compressed_size)
dlib.LZ4MaxCompressedSize.argtypes = [ctypes.c_int, ctypes.POINTER(ctypes.c_int)]
dlib.LZ4MaxCompressedSize.restype = ctypes.c_int
# DM_DLLEXPORT int _CompressBuffer(const void* buffer, uint32_t buffer_size, void* compressed_buffer, int* compressed_size)
dlib.LZ4CompressBuffer.argtypes = [ctypes.c_void_p, ctypes.c_uint32, ctypes.c_void_p, ctypes.POINTER(ctypes.c_int)]
dlib.LZ4CompressBuffer.restype = ctypes.c_int
# DM_DLLEXPORT int _DecompressBuffer(const void* buffer, uint32_t buffer_size, void* decompressed_buffer, uint32_t max_output, int* decompressed_size)
dlib.LZ4DecompressBuffer.argtypes = [ctypes.c_void_p, ctypes.c_uint32, ctypes.c_void_p, ctypes.c_uint32, ctypes.POINTER(ctypes.c_int)]
dlib.LZ4DecompressBuffer.restype = ctypes.c_int
# int EncryptXTeaCTR(uint8_t* data, uint32_t datalen, const uint8_t* key, uint32_t keylen)
dlib.EncryptXTeaCTR.argtypes = [ctypes.c_void_p, ctypes.c_uint32, ctypes.c_void_p, ctypes.c_uint32]
dlib.EncryptXTeaCTR.restype = ctypes.c_int
# int DecryptXTeaCTR(uint8_t* data, uint32_t datalen, const uint8_t* key, uint32_t keylen)
dlib.DecryptXTeaCTR.argtypes = [ctypes.c_void_p, ctypes.c_uint32, ctypes.c_void_p, ctypes.c_uint32]
dlib.DecryptXTeaCTR.restype = ctypes.c_int
def METHOD_NAME(buf):
return dlib.METHOD_NAME(buf.encode('ascii'), len(buf))
def dmHashBuffer64(buf):
return dlib.dmHashBuffer64(buf.encode('ascii'), len(buf))
def dmLZ4MaxCompressedSize(uncompressed_size):
mcs = ctypes.c_int()
res = dlib.LZ4MaxCompressedSize(uncompressed_size, ctypes.byref(mcs))
if res != 0:
raise Exception('dlib.LZ4MaxCompressedSize failed! Error code: ' % res)
return mcs.value
def dmLZ4CompressBuffer(buf, buf_len, max_out_len):
outbuf = ctypes.create_string_buffer(max_out_len)
outlen = ctypes.c_int()
res = dlib.LZ4CompressBuffer(buf, buf_len, outbuf, ctypes.byref(outlen))
if res != 0:
raise Exception('dlib.LZ4CompressBuffer failed! Error code: ' % res)
return ctypes.string_at(outbuf.raw, outlen.value)
def dmLZ4DecompressBuffer(buf, max_out_len):
outbuf = ctypes.create_string_buffer(max_out_len)
outlen = ctypes.c_int()
res = dlib.LZ4DecompressBuffer(buf, len(buf), outbuf, max_out_len, ctypes.byref(outlen))
if res != 0:
raise Exception('dlib.LZ4DecompressBuffer failed! Error code: ' % res)
return ctypes.string_at(outbuf.raw, outlen.value)
def dmEncryptXTeaCTR(buf, key):
outbuf = ctypes.create_string_buffer(buf)
res = dlib.EncryptXTeaCTR(outbuf, len(outbuf), key, len(key))
if res != 0:
raise Exception('dlib.EncryptXTeaCTR failed! Error code: ' % res)
return ctypes.string_at(outbuf.raw, len(buf))
def dmDecryptXTeaCTR(buf, key):
outbuf = ctypes.create_string_buffer(buf)
res = dlib.DecryptXTeaCTR(outbuf, len(outbuf), key, len(key))
if res != 0:
raise Exception('dlib.DecryptXTeaCTR failed! Error code: ' % res)
return ctypes.string_at(outbuf.raw, len(buf))
|
1,633 |
qtapp loop nonblocking
|
# -*- coding: utf-8 -*-
import sys
import utool as ut
from wbia.guitool.__PYQT__ import GUITOOL_PYQT_VERSION # NOQA
from wbia.guitool.__PYQT__ import QtWidgets # NOQA
from wbia.guitool.__PYQT__ import QtCore
ut.noinject(__name__, '[guitool.main]', DEBUG=False)
IS_ROOT_WINDOW = False
QAPP = None
VERBOSE = '--verbose' in sys.argv
QUIET = '--quiet' in sys.argv
def get_qtapp():
global QAPP
return QAPP
class GuitoolApplication(QtWidgets.QApplication):
"""
http://codeprogress.com/python/libraries/pyqt/showPyQTExample.php?index=378&key=QApplicationKeyPressGlobally
"""
def __init__(self, args):
super(GuitoolApplication, self).__init__(args)
self.log_keys = False
self.keylog = []
def notify(self, receiver, event):
if event.type() == QtCore.QEvent.KeyPress:
if self.log_keys:
key = event.text()
print('key = {!r}'.format(key))
self.keylog.append(key)
# QtWidgets.QMessageBox.information(
# None, "Received Key Press Event!!", "You Pressed: " + event.text())
# Call Base Class Method to Continue Normal Event Processing
return super(GuitoolApplication, self).notify(receiver, event)
def start_keylog(self):
self.log_keys = True
def ensure_qtapp():
global IS_ROOT_WINDOW
global QAPP
if QAPP is not None:
return QAPP, IS_ROOT_WINDOW
parent_qapp = QtCore.QCoreApplication.instance()
if parent_qapp is None: # if not in qtconsole
if not QUIET:
print('[guitool] Init new QApplication')
QAPP = GuitoolApplication(sys.argv)
if GUITOOL_PYQT_VERSION == 4:
QAPP.setStyle('plastique')
else:
# http://stackoverflow.com/questions/38154702/how-to-install-new-qstyle-for-pyqt
# QAPP.setStyle('Windows')
# QAPP.setStyle('WindowsXP')
# QAPP.setStyle('WindowsVista')
# available_styles = QtWidgets.QStyleFactory().keys()
# print('available_styles = %r' % (available_styles,))
# QAPP.setStyle('Fusion')
QAPP.setStyle('GTK+')
# QAPP.setStyle('windows')
# QAPP.setStyle('cleanlooks')
# QAPP.setStyle('motif')
# QAPP.setDesktopSettingsAware(True)
# QAPP.setStyle('cde')
# "windows", "motif", "cde", "plastique" and "cleanlooks" and depending on the platform, "windowsxp", "windowsvista" and "macintosh"
# print('QAPP = %r' % QAPP)
assert QAPP is not None
IS_ROOT_WINDOW = True
else:
if not QUIET:
print('[guitool] Using parent QApplication')
QAPP = parent_qapp
IS_ROOT_WINDOW = False
return QAPP, IS_ROOT_WINDOW
ensure_qapp = ensure_qtapp
def activate_qwindow(qwin):
global QAPP
if not QUIET:
print('[guitool] qapp.setActiveWindow(qwin)')
qwin.show()
QAPP.setActiveWindow(qwin)
def METHOD_NAME(qwin=None, **kwargs):
"""
Fixme:
In order to have a non-blocking qt application then the app must have been started
with IPython.lib.inputhook.enable_gui
import IPython.lib.inputhook
IPython.lib.inputhook.enable_gui('qt4')
Actually lib.inputhook is depricated
Maybe IPython.terminal.pt_inputhooks
import IPython.terminal.pt_inputhooks
inputhook = IPython.terminal.pt_inputhooks.get_inputhook_func('qt4')
"""
global QAPP
# from IPython.lib.inputhook import enable_qt4
import IPython.lib.guisupport
if not QUIET:
print('[guitool] Starting ipython qt hook')
# enable_qt4()
if GUITOOL_PYQT_VERSION == 4:
IPython.lib.guisupport.start_event_loop_qt4(QAPP)
else:
IPython.lib.guisupport.start_event_loop_qt5(QAPP)
# if '__PYQT__' in sys.modules:
# from wbia.guitool.__PYQT__ import QtCore
# from IPython.lib.inputhook import enable_qt4
# from IPython.lib.guisupport import start_event_loop_qt4
# qapp = QtCore.QCoreApplication.instance()
# # qapp.exec_()
# print('[ut.dbg] Starting ipython qt4 hook')
# enable_qt4()
# start_event_loop_qt4(qapp)
def remove_pyqt_input_hook():
QtCore.pyqtRemoveInputHook()
def qtapp_loop(
qwin=None,
ipy=False,
enable_activate_qwin=True,
frequency=420,
init_signals=True,
**kwargs,
):
r"""
Args:
qwin (None): (default = None)
ipy (bool): set to True if running with IPython (default = False)
enable_activate_qwin (bool): (default = True)
frequency (int): frequency to ping python interpreter (default = 420)
init_signals (bool): if False, handle terminal signals yourself (default = True)
CommandLine:
python -m wbia.guitool.guitool_main --test-qtapp_loop
"""
global QAPP
# if not QUIET and VERBOSE:
if not QUIET:
print('[guitool.qtapp_loop()] ENTERING')
print('[guitool.qtapp_loop()] starting qt app loop: qwin={!r}'.format(qwin))
if enable_activate_qwin and (qwin is not None):
activate_qwindow(qwin)
qwin.timer = ping_python_interpreter(frequency=frequency)
elif qwin is None:
print('[guitool] Warning: need to specify qwin for ctrl+c to work')
if init_signals:
# allow ctrl+c to exit the program
_init_signals()
if IS_ROOT_WINDOW:
if not QUIET:
print('[guitool.qtapp_loop()] qapp.exec_() # runing main loop')
if not ipy:
# old_excepthook = sys.excepthook
# def qt_excepthook(type_, value, traceback):
# print('QT EXCEPTION HOOK')
# old_excepthook(type_, value, traceback)
# #QAPP.quit()
# exit_application()
# sys.exit(1)
# sys.excepthook = qt_excepthook
try:
retcode = QAPP.exec_()
print('QAPP retcode = {!r}'.format(retcode))
QAPP.exit(retcode)
except Exception as ex:
print('QException: %r' % ex)
raise
else:
if not QUIET:
print('[guitool.qtapp_loop()] not execing')
if not QUIET:
print('[guitool.qtapp_loop()] EXITING')
def ping_python_interpreter(frequency=420): # 4200):
"""Create a QTimer which lets the python catch ctrl+c"""
if not QUIET and VERBOSE:
print('[guitool] pinging python interpreter for ctrl+c freq=%r' % frequency)
timer = QtCore.QTimer()
def ping_func():
# print('lub dub')
return None
timer.ping_func = ping_func
timer.timeout.connect(timer.ping_func)
timer.start(frequency)
return timer
# @atexit.register
def exit_application():
if ut.NOT_QUIET:
print('[guitool] exiting application')
QtWidgets.qApp.quit()
def _on_ctrl_c(signal, frame):
print('[guitool.guitool_main] Caught ctrl+c. sys.exit(0)...')
sys.exit(0)
# -----------------------
# private init functions
def _init_signals():
import signal
# print('initializing qt ctrl+c signal')
signal.signal(signal.SIGINT, _on_ctrl_c)
|
1,634 |
init vars
|
#!/usr/bin/env python
#============================================================================
# Copyright (C) Microsoft Corporation, All rights reserved.
#============================================================================
import os
import imp
import re
import codecs
import shutil
import string
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
conf_path = '/etc/opt/microsoft/omsagent/conf/omsagent.d/customlog.conf'
def METHOD_NAME(CustomLogObjects):
new_customlogs = []
if CustomLogObjects is not None :
for customlog in CustomLogObjects:
if customlog['LogName'].value is not None:
customlog['LogName']=customlog['LogName'].value
new_filepaths = []
if customlog['FilePath'] is not None and len(customlog['FilePath'].value) > 0:
for filepath in customlog['FilePath'].value:
if filepath is not None and len(filepath) > 0:
new_filepaths.append(filepath)
if len(new_filepaths) > 0:
customlog['FilePath'] = new_filepaths
new_customlogs.append(customlog)
CustomLogObjects = new_customlogs
def Set_Marshall(Name, EnableCustomLogConfiguration = False, CustomLogObjects = None):
METHOD_NAME(CustomLogObjects)
Set(EnableCustomLogConfiguration, CustomLogObjects)
return [0]
def Test_Marshall(Name, EnableCustomLogConfiguration = False, CustomLogObjects = None):
METHOD_NAME(CustomLogObjects)
return Test(EnableCustomLogConfiguration, CustomLogObjects)
def Get_Marshall(Name, EnableCustomLogConfiguration = False, CustomLogObjects = None):
arg_names = list(locals().keys())
METHOD_NAME(CustomLogObjects)
CurrentCustomLogObjects = Get(EnableCustomLogConfiguration, CustomLogObjects)
Name = protocol.MI_String(Name)
EnableCustomLogConfiguration = protocol.MI_Boolean(EnableCustomLogConfiguration)
for customlog in CurrentCustomLogObjects:
customlog['LogName'] = protocol.MI_String(customlog['LogName'])
if customlog['FilePath'] is not None and len(customlog['FilePath']):
customlog['FilePath'] = protocol.MI_StringA(customlog['FilePath'])
CustomLogObjects = protocol.MI_InstanceA(CurrentCustomLogObjects)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return 0, retd
def Set(EnableCustomLogConfiguration, CustomLogObjects):
if Test(EnableCustomLogConfiguration, CustomLogObjects) == [0]:
return [0]
UpdateConf(CustomLogObjects)
return [0]
def Test(EnableCustomLogConfiguration, CustomLogObjects):
if EnableCustomLogConfiguration:
CurrentCustomLogObjects = ReadConf()
if CurrentCustomLogObjects is None and CustomLogObjects is None:
return [0]
elif CurrentCustomLogObjects is None or CustomLogObjects is None:
return [-1]
sorted(CustomLogObjects, key = lambda p:p['LogName'])
for customlog in CustomLogObjects:
customlog['FilePath'].sort()
sorted(CurrentCustomLogObjects, key = lambda p:p['LogName'])
for customlog in CurrentCustomLogObjects:
customlog['FilePath'].sort()
if CustomLogObjects != CurrentCustomLogObjects:
return [-1]
return [0]
def Get(EnableCustomLogConfiguration, CustomLogObjects):
CurrentCustomLogObjects = ReadConf()
return CurrentCustomLogObjects
def ReadConf():
if not os.path.isfile(conf_path):
return [];
txt = codecs.open(conf_path, 'r', 'utf8').read()
customlog_src_srch_str=r'\n<source>\n type tail.*?path (.*?)\n.*?pos_file /var/opt/microsoft/omsagent/state/(.*?)\.pos.*?tag oms\.blob\.CustomLog\.(.*?)\.\*.*?format none.*?</source>\n'
customlog_src_srch=re.compile(customlog_src_srch_str,re.M|re.S)
new_customlogs=[]
sources=customlog_src_srch.findall(txt)
for source in sources:
s_filepaths=[]
if len(source[0]):
s_filepaths = source[0].split(',')
logname=source[1]
new_customlogs.append({'FilePath':s_filepaths,'LogName':logname})
return new_customlogs
def UpdateConf(CustomLogObjects):
header = '# this file is configured by the OMS service\n'
new_source = ''
if CustomLogObjects is not None:
for customlog in CustomLogObjects:
logname = customlog['LogName']
filepaths = ','.join(customlog['FilePath'])
new_source+='\n<source>\n type tail\n path ' + filepaths + '\n pos_file /var/opt/microsoft/omsagent/state/' + logname + '.pos\n read_from_head false\n tag oms.blob.CustomLog.' + logname + '.*\n format none\n</source>\n'
txt = header + new_source
if os.path.isfile(conf_path):
shutil.copy2(conf_path, conf_path + '.bak')
codecs.open(conf_path, 'w', 'utf8').write(txt)
os.system('sudo /opt/microsoft/omsagent/bin/service_control restart')
|
1,635 |
test service enable
|
import pytest
import salt.utils.path
import salt.utils.platform
import salt.utils.systemd
from tests.support.case import ModuleCase
@pytest.mark.destructive_test
@pytest.mark.windows_whitelisted
class ServiceModuleTest(ModuleCase):
"""
Module testing the service module
"""
def setUp(self):
self.service_name = "cron"
cmd_name = "crontab"
os_family = self.run_function("grains.get", ["os_family"])
os_release = self.run_function("grains.get", ["osrelease"])
if os_family == "RedHat":
if os_release[0] == "7":
self.skipTest(
"Disabled on CentOS 7 until we can fix SSH connection issues."
)
self.service_name = "crond"
elif os_family == "Arch":
self.service_name = "sshd"
cmd_name = "systemctl"
elif os_family == "NILinuxRT":
self.service_name = "syslog"
cmd_name = "syslog-ng"
elif os_family == "MacOS":
self.service_name = "com.apple.AirPlayXPCHelper"
elif salt.utils.platform.is_windows():
self.service_name = "Spooler"
self.pre_srv_status = self.run_function("service.status", [self.service_name])
self.pre_srv_enabled = (
True
if self.service_name in self.run_function("service.get_enabled")
else False
)
if (
salt.utils.path.which(cmd_name) is None
and not salt.utils.platform.is_windows()
):
self.skipTest("{} is not installed".format(cmd_name))
def tearDown(self):
post_srv_status = self.run_function("service.status", [self.service_name])
post_srv_enabled = (
True
if self.service_name in self.run_function("service.get_enabled")
else False
)
if post_srv_status != self.pre_srv_status:
if self.pre_srv_status:
self.run_function("service.enable", [self.service_name])
else:
self.run_function("service.disable", [self.service_name])
if post_srv_enabled != self.pre_srv_enabled:
if self.pre_srv_enabled:
self.run_function("service.enable", [self.service_name])
else:
self.run_function("service.disable", [self.service_name])
del self.service_name
@pytest.mark.flaky(max_runs=4)
@pytest.mark.slow_test
def test_service_status_running(self):
"""
test service.status execution module
when service is running
"""
self.run_function("service.start", [self.service_name])
check_service = self.run_function("service.status", [self.service_name])
self.assertTrue(check_service)
@pytest.mark.slow_test
def test_service_status_dead(self):
"""
test service.status execution module
when service is dead
"""
self.run_function("service.stop", [self.service_name])
check_service = self.run_function("service.status", [self.service_name])
self.assertFalse(check_service)
@pytest.mark.slow_test
def test_service_restart(self):
"""
test service.restart
"""
self.assertTrue(self.run_function("service.restart", [self.service_name]))
@pytest.mark.slow_test
def METHOD_NAME(self):
"""
test service.get_enabled and service.enable module
"""
# disable service before test
self.assertTrue(self.run_function("service.disable", [self.service_name]))
self.assertTrue(self.run_function("service.enable", [self.service_name]))
self.assertIn(self.service_name, self.run_function("service.get_enabled"))
@pytest.mark.slow_test
def test_service_disable(self):
"""
test service.get_disabled and service.disable module
"""
# enable service before test
self.assertTrue(self.run_function("service.enable", [self.service_name]))
self.assertTrue(self.run_function("service.disable", [self.service_name]))
if salt.utils.platform.is_darwin():
self.assertTrue(self.run_function("service.disabled", [self.service_name]))
else:
self.assertIn(self.service_name, self.run_function("service.get_disabled"))
@pytest.mark.slow_test
def test_service_disable_doesnot_exist(self):
"""
test service.get_disabled and service.disable module
when service name does not exist
"""
# enable service before test
srv_name = "doesnotexist"
enable = self.run_function("service.enable", [srv_name])
systemd = salt.utils.systemd.booted()
# check service was not enabled
try:
self.assertFalse(enable)
except AssertionError:
self.assertIn("ERROR", enable)
# check service was not disabled
if (
tuple(
self.run_function("grains.item", ["osrelease_info"])["osrelease_info"]
)
== (14, 0o4)
and not systemd
):
# currently upstart does not have a mechanism to report if disabling a service fails if does not exist
self.assertTrue(self.run_function("service.disable", [srv_name]))
elif (
self.run_function("grains.item", ["os"])["os"] == "Debian"
and self.run_function("grains.item", ["osmajorrelease"])["osmajorrelease"]
< 9
and systemd
):
# currently disabling a service via systemd that does not exist
# on Debian 8 results in a True return code
self.assertTrue(self.run_function("service.disable", [srv_name]))
else:
try:
disable = self.run_function("service.disable", [srv_name])
self.assertFalse(disable)
except AssertionError:
self.assertTrue("error" in disable.lower())
if salt.utils.platform.is_darwin():
self.assertEqual(
self.run_function("service.disabled", [srv_name]),
"ERROR: Service not found: {}".format(srv_name),
)
else:
self.assertNotIn(srv_name, self.run_function("service.get_disabled"))
@pytest.mark.skip_unless_on_windows
@pytest.mark.slow_test
def test_service_get_service_name(self):
"""
test service.get_service_name
"""
ret = self.run_function("service.get_service_name")
self.assertIn(self.service_name, ret.values())
|
1,636 |
test fetch guild
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Nekokatt
# Copyright (c) 2021-present davfsa
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import mock
import pytest
from hikari import channels
from hikari.events import typing_events
from tests.hikari import hikari_test_helpers
class TestTypingEvent:
@pytest.fixture()
def event(self):
cls = hikari_test_helpers.mock_class_namespace(
typing_events.TypingEvent, channel_id=123, user_id=456, timestamp=object(), shard=object()
)
return cls()
def test_get_user_when_no_cache(self, event):
event = hikari_test_helpers.mock_class_namespace(typing_events.TypingEvent, app=None)()
assert event.get_user() is None
def test_get_user(self, event):
assert event.get_user() is event.app.cache.get_user.return_value
def test_trigger_typing(self, event):
event.app.rest.trigger_typing = mock.Mock()
result = event.trigger_typing()
event.app.rest.trigger_typing.assert_called_once_with(123)
assert result is event.app.rest.trigger_typing.return_value
class TestGuildTypingEvent:
@pytest.fixture()
def event(self):
cls = hikari_test_helpers.mock_class_namespace(typing_events.GuildTypingEvent)
return cls(
channel_id=123,
timestamp=object(),
shard=object(),
guild_id=789,
member=mock.Mock(id=456, app=mock.Mock(rest=mock.AsyncMock())),
)
def test_app_property(self, event):
assert event.app is event.member.app
def test_get_channel_when_no_cache(self):
event = hikari_test_helpers.mock_class_namespace(typing_events.GuildTypingEvent, app=None, init_=False)()
assert event.get_channel() is None
@pytest.mark.parametrize("guild_channel_impl", [channels.GuildNewsChannel, channels.GuildTextChannel])
def test_get_channel(self, event, guild_channel_impl):
event.app.cache.get_guild_channel = mock.Mock(return_value=mock.Mock(spec_set=guild_channel_impl))
result = event.get_channel()
assert result is event.app.cache.get_guild_channel.return_value
event.app.cache.get_guild_channel.assert_called_once_with(123)
@pytest.mark.asyncio()
async def test_get_guild_when_no_cache(self):
event = hikari_test_helpers.mock_class_namespace(typing_events.GuildTypingEvent, app=None, init_=False)()
assert event.get_guild() is None
def test_get_guild_when_available(self, event):
result = event.get_guild()
assert result is event.app.cache.get_available_guild.return_value
event.app.cache.get_available_guild.assert_called_once_with(789)
event.app.cache.get_unavailable_guild.assert_not_called()
def test_get_guild_when_unavailable(self, event):
event.app.cache.get_available_guild.return_value = None
result = event.get_guild()
assert result is event.app.cache.get_unavailable_guild.return_value
event.app.cache.get_unavailable_guild.assert_called_once_with(789)
event.app.cache.get_available_guild.assert_called_once_with(789)
def test_user_id(self, event):
assert event.user_id == event.member.id
assert event.user_id == 456
@pytest.mark.asyncio()
@pytest.mark.parametrize("guild_channel_impl", [channels.GuildNewsChannel, channels.GuildTextChannel])
async def test_fetch_channel(self, event, guild_channel_impl):
event.app.rest.fetch_channel = mock.AsyncMock(return_value=mock.Mock(spec_set=guild_channel_impl))
await event.fetch_channel()
event.app.rest.fetch_channel.assert_awaited_once_with(123)
@pytest.mark.asyncio()
async def METHOD_NAME(self, event):
await event.fetch_guild()
event.app.rest.fetch_guild.assert_awaited_once_with(789)
@pytest.mark.asyncio()
async def test_fetch_guild_preview(self, event):
await event.fetch_guild_preview()
event.app.rest.fetch_guild_preview.assert_awaited_once_with(789)
@pytest.mark.asyncio()
async def test_fetch_member(self, event):
await event.fetch_member()
event.app.rest.fetch_member.assert_awaited_once_with(789, 456)
@pytest.mark.asyncio()
class TestDMTypingEvent:
@pytest.fixture()
def event(self):
cls = hikari_test_helpers.mock_class_namespace(typing_events.DMTypingEvent)
return cls(
channel_id=123, timestamp=object(), shard=object(), app=mock.Mock(rest=mock.AsyncMock()), user_id=456
)
async def test_fetch_channel(self, event):
event.app.rest.fetch_channel = mock.AsyncMock(return_value=mock.Mock(spec_set=channels.DMChannel))
await event.fetch_channel()
event.app.rest.fetch_channel.assert_awaited_once_with(123)
async def test_fetch_user(self, event):
await event.fetch_user()
event.app.rest.fetch_user.assert_awaited_once_with(456)
|
1,637 |
process project updates
|
"""
This module contains signal handler for region outbox messages.
These receivers are triggered on the region silo as outbox messages
are drained. Receivers are expected to make local state changes (tombstones)
and perform RPC calls to propagate changes to Control Silo.
"""
from __future__ import annotations
from typing import Any
from django.dispatch import receiver
from sentry.models import (
Organization,
OrganizationMember,
OutboxCategory,
Project,
process_region_outbox,
)
from sentry.models.team import Team
from sentry.receivers.outbox import maybe_process_tombstone
from sentry.services.hybrid_cloud.auth import auth_service
from sentry.services.hybrid_cloud.identity import identity_service
from sentry.services.hybrid_cloud.log import AuditLogEvent, UserIpEvent, log_rpc_service
from sentry.services.hybrid_cloud.organization_mapping import organization_mapping_service
from sentry.services.hybrid_cloud.organization_mapping.serial import (
update_organization_mapping_from_instance,
)
from sentry.services.hybrid_cloud.organizationmember_mapping import (
RpcOrganizationMemberMappingUpdate,
organizationmember_mapping_service,
)
from sentry.services.hybrid_cloud.orgauthtoken import orgauthtoken_rpc_service
from sentry.types.region import get_local_region
@receiver(process_region_outbox, sender=OutboxCategory.AUDIT_LOG_EVENT)
def process_audit_log_event(payload: Any, **kwds: Any):
if payload is not None:
log_rpc_service.record_audit_log(event=AuditLogEvent(**payload))
@receiver(process_region_outbox, sender=OutboxCategory.ORGAUTHTOKEN_UPDATE)
def process_orgauthtoken_update(payload: Any, **kwds: Any):
if payload is not None:
orgauthtoken_rpc_service.update_orgauthtoken(**payload)
@receiver(process_region_outbox, sender=OutboxCategory.USER_IP_EVENT)
def process_user_ip_event(payload: Any, **kwds: Any):
if payload is not None:
log_rpc_service.record_user_ip(event=UserIpEvent(**payload))
@receiver(process_region_outbox, sender=OutboxCategory.ORGANIZATION_MEMBER_UPDATE)
def process_organization_member_updates(
object_identifier: int, payload: Any, shard_identifier: int, **kwds: Any
):
if (org_member := OrganizationMember.objects.filter(id=object_identifier).last()) is None:
# Delete all identities that may have been associated. This is an implicit cascade.
if payload and payload.get("user_id") is not None:
identity_service.delete_identities(
user_id=payload["user_id"], organization_id=shard_identifier
)
organizationmember_mapping_service.delete(
organizationmember_id=object_identifier,
organization_id=shard_identifier,
)
return
rpc_org_member_update = RpcOrganizationMemberMappingUpdate.from_orm(org_member)
organizationmember_mapping_service.upsert_mapping(
organizationmember_id=org_member.id,
organization_id=shard_identifier,
mapping=rpc_org_member_update,
)
@receiver(process_region_outbox, sender=OutboxCategory.TEAM_UPDATE)
def process_team_updates(
object_identifier: int, payload: Any, shard_identifier: int, **kwargs: Any
):
maybe_process_tombstone(Team, object_identifier)
@receiver(process_region_outbox, sender=OutboxCategory.ORGANIZATION_UPDATE)
def process_organization_updates(object_identifier: int, **kwds: Any):
if (org := maybe_process_tombstone(Organization, object_identifier)) is None:
organization_mapping_service.delete(organization_id=object_identifier)
return
update = update_organization_mapping_from_instance(org, get_local_region())
organization_mapping_service.upsert(organization_id=org.id, update=update)
@receiver(process_region_outbox, sender=OutboxCategory.PROJECT_UPDATE)
def METHOD_NAME(object_identifier: int, **kwds: Any):
if (proj := maybe_process_tombstone(Project, object_identifier)) is None:
return
proj
@receiver(process_region_outbox, sender=OutboxCategory.ORGANIZATION_MAPPING_CUSTOMER_ID_UPDATE)
def process_organization_mapping_customer_id_update(
object_identifier: int, payload: Any, **kwds: Any
):
if (org := maybe_process_tombstone(Organization, object_identifier)) is None:
return
if payload and "customer_id" in payload:
organization_mapping_service.update(
organization_id=org.id, update={"customer_id": payload["customer_id"]}
)
@receiver(process_region_outbox, sender=OutboxCategory.DISABLE_AUTH_PROVIDER)
def process_disable_auth_provider(object_identifier: int, **kwds: Any):
auth_service.disable_provider(provider_id=object_identifier)
|
1,638 |
get async read session
|
""" Setup database to perform CRUD transactions
"""
import logging
from typing import Generator, AsyncGenerator
from contextlib import contextmanager, asynccontextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from .. import config
logger = logging.getLogger(__name__)
write_user = config.get('POSTGRES_WRITE_USER', 'wps')
read_user = config.get('POSTGRES_READ_USER', 'wpsread')
postgres_password = config.get('POSTGRES_PASSWORD', 'wps')
postgres_write_host = config.get('POSTGRES_WRITE_HOST', 'localhost')
postgres_read_host = config.get('POSTGRES_READ_HOST', 'localhost')
postgres_port = config.get('POSTGRES_PORT', '5432')
postgres_database = config.get('POSTGRES_DATABASE', 'wps')
DB_WRITE_STRING = f'postgresql://{write_user}:{postgres_password}@{postgres_write_host}:{postgres_port}/{postgres_database}'
DB_READ_STRING = f'postgresql://{read_user}:{postgres_password}@{postgres_read_host}:{postgres_port}/{postgres_database}'
ASYNC_DB_READ_STRING = f'postgresql+asyncpg://{read_user}:{postgres_password}@{postgres_read_host}:{postgres_port}/{postgres_database}'
ASYNC_DB_WRITE_STRING = f'postgresql+asyncpg://{write_user}:{postgres_password}@{postgres_write_host}:{postgres_port}/{postgres_database}'
# connect to database - defaulting to always use utc timezone
connect_args = {'options': '-c timezone=utc'}
_write_engine = create_engine(DB_WRITE_STRING, connect_args=connect_args)
# use pre-ping on read, as connections are quite often stale due to how few users we have at the moment.
_read_engine = create_engine(
DB_READ_STRING,
pool_size=int(config.get('POSTGRES_POOL_SIZE', 5)),
max_overflow=int(config.get('POSTGRES_MAX_OVERFLOW', 10)),
pool_pre_ping=True, connect_args=connect_args)
# TODO: figure out connection pooling? pre-ping etc.?
_async_read_engine = create_async_engine(ASYNC_DB_READ_STRING, connect_args={"timeout": 30})
_async_write_engine = create_async_engine(ASYNC_DB_WRITE_STRING)
# bind session to database
# avoid using these variables anywhere outside of context manager - if
# sessions are not closed, it will result in the api running out of
# connections and becoming non-responsive.
_write_session = sessionmaker(
autocommit=False, autoflush=False, bind=_write_engine)
_read_session = sessionmaker(
autocommit=False, autoflush=False, bind=_read_engine)
_async_read_sessionmaker = sessionmaker(
autocommit=False, autoflush=False, bind=_async_read_engine, class_=AsyncSession)
_async_write_sessionmaker = sessionmaker(
autocommit=False, autoflush=False, bind=_async_write_engine, class_=AsyncSession)
def _get_write_session() -> Session:
""" abstraction used for mocking out a write session """
return _write_session()
def _get_read_session() -> Session:
""" abstraction used for mocking out a read session """
return _read_session()
def METHOD_NAME() -> AsyncSession:
""" abstraction used for mocking out a read session """
return _async_read_sessionmaker()
def _get_async_write_session() -> AsyncSession:
""" abstraction used for mocking out a read session """
return _async_write_sessionmaker()
@asynccontextmanager
async def get_async_read_session_scope() -> AsyncGenerator[AsyncSession, None]:
""" Return a session scope for async read session """
session = METHOD_NAME()
try:
yield session
finally:
await session.close()
@asynccontextmanager
async def get_async_write_session_scope() -> AsyncGenerator[AsyncSession, None]:
""" Return a session scope for async read session """
session = _get_async_write_session()
try:
yield session
await session.commit()
except:
await session.rollback()
raise
finally:
await session.close()
@contextmanager
def get_read_session_scope() -> Generator[Session, None, None]:
"""Provide a transactional scope around a series of operations.
THIS METHOD IS DEPRECATED! PLEASE MOVE TO USING: get_async_read_session_scope
"""
session = _get_read_session()
try:
yield session
finally:
logger.info('session closed by context manager')
session.close()
@contextmanager
def get_write_session_scope() -> Generator[Session, None, None]:
"""Provide a transactional scope around a series of operations.
THIS METHOD IS DEPRECATED! PLEASE MOVE TO USING: get_async_write_session_scope
"""
session = _get_write_session()
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
session.close()
|
1,639 |
test auto encoder hp struct
|
# Copyright 2023 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import absltest
from flax.core import Scope, Array, init, nn, unfreeze
import jax
from jax import numpy as jnp, random
from flax import struct
from jax.scipy.linalg import expm
from dataclasses import dataclass, InitVar
from typing import Any, Callable, Sequence, NamedTuple, Any
def mlp(scope: Scope, x: Array, hidden: int, out: int):
x = scope.child(nn.dense, 'hidden')(x, hidden)
x = nn.relu(x)
return scope.child(nn.dense, 'out')(x, out)
@dataclass
class AutoEncoder:
latents: int
features: int
hidden: int
def __call__(self, scope, x):
z = self.encode(scope, x)
return self.decode(scope, z)
def encode(self, scope, x):
return scope.child(mlp, 'encoder')(x, self.hidden, self.latents)
def decode(self, scope, z):
return scope.child(mlp, 'decoder')(z, self.hidden, self.features)
def module_method(fn, name=None):
if name is None:
name = fn.__name__ if hasattr(fn, '__name__') else None
def wrapper(self, *args, **kwargs):
scope = self.scope.rewound()
mod_fn = lambda scope: fn(self, scope, *args, **kwargs)
return scope.child(mod_fn, name)()
return wrapper
@dataclass
class AutoEncoder2:
scope: Scope
latents: int
features: int
hidden: int
def __call__(self, x):
z = self.encode(x)
return self.decode(z)
@module_method
def encode(self, scope, x):
return mlp(scope, x, self.hidden, self.latents)
@module_method
def decode(self, scope, z):
return mlp(scope, z, self.hidden, self.features)
@dataclass
class AutoEncoder3:
encode: Callable
decode: Callable
@staticmethod
def create(scope, hidden: int, latents: int, features: int):
enc = scope.child(mlp, 'encode', hidden=hidden, out=latents)
dec = scope.child(mlp, 'decode', hidden=hidden, out=features)
return AutoEncoder3(enc, dec)
def __call__(self, x):
z = self.encode(x)
return self.decode(z)
class AutoEncoderTest(absltest.TestCase):
def METHOD_NAME(self):
ae = AutoEncoder(latents=2, features=4, hidden=3)
x = jnp.ones((1, 4))
x_r, variables = init(ae)(random.PRNGKey(0), x)
self.assertEqual(x.shape, x_r.shape)
variable_shapes = unfreeze(
jax.tree_util.tree_map(jnp.shape, variables['params'])
)
self.assertEqual(
variable_shapes,
{
'encoder': {
'hidden': {'kernel': (4, 3), 'bias': (3,)},
'out': {'kernel': (3, 2), 'bias': (2,)},
},
'decoder': {
'hidden': {'kernel': (2, 3), 'bias': (3,)},
'out': {'kernel': (3, 4), 'bias': (4,)},
},
},
)
def test_auto_encoder_with_scope(self):
ae = lambda scope, x: AutoEncoder2(scope, latents=2, features=4, hidden=3)(
x
)
x = jnp.ones((1, 4))
x_r, variables = init(ae)(random.PRNGKey(0), x)
self.assertEqual(x.shape, x_r.shape)
variable_shapes = unfreeze(
jax.tree_util.tree_map(jnp.shape, variables['params'])
)
self.assertEqual(
variable_shapes,
{
'encode': {
'hidden': {'kernel': (4, 3), 'bias': (3,)},
'out': {'kernel': (3, 2), 'bias': (2,)},
},
'decode': {
'hidden': {'kernel': (2, 3), 'bias': (3,)},
'out': {'kernel': (3, 4), 'bias': (4,)},
},
},
)
def test_auto_encoder_bind_method(self):
ae = lambda scope, x: AutoEncoder3.create(
scope, latents=2, features=4, hidden=3
)(x)
x = jnp.ones((1, 4))
x_r, variables = init(ae)(random.PRNGKey(0), x)
self.assertEqual(x.shape, x_r.shape)
variable_shapes = unfreeze(
jax.tree_util.tree_map(jnp.shape, variables['params'])
)
self.assertEqual(
variable_shapes,
{
'encode': {
'hidden': {'kernel': (4, 3), 'bias': (3,)},
'out': {'kernel': (3, 2), 'bias': (2,)},
},
'decode': {
'hidden': {'kernel': (2, 3), 'bias': (3,)},
'out': {'kernel': (3, 4), 'bias': (4,)},
},
},
)
if __name__ == '__main__':
absltest.main()
|
1,640 |
set override
|
import contextlib
import functools
import inspect
import sys
from collections import defaultdict
from typing import DefaultDict, List, Optional, Tuple
class GeneratorStats:
_warn_cache: DefaultDict[str, int] = defaultdict(int)
_error_cache: DefaultDict[str, int] = defaultdict(int)
_traces: List[Tuple[Optional[str], Optional[str], str]] = []
_trace_lineno = False
_blue = ''
_red = ''
_yellow = ''
_clear = ''
def __getattr__(self, name):
if 'silent' not in self.__dict__:
from drf_spectacular.settings import spectacular_settings
self.silent = spectacular_settings.DISABLE_ERRORS_AND_WARNINGS
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def __bool__(self):
return bool(self._warn_cache or self._error_cache)
@contextlib.contextmanager
def silence(self):
self.silent, tmp = True, self.silent
try:
yield
finally:
self.silent = tmp
def reset(self):
self._warn_cache.clear()
self._error_cache.clear()
def enable_color(self):
self._blue = '\033[0;34m'
self._red = '\033[0;31m'
self._yellow = '\033[0;33m'
self._clear = '\033[0m'
def enable_trace_lineno(self):
self._trace_lineno = True
def _get_current_trace(self):
source_locations = [t for t in self._traces if t[0]]
if source_locations:
sourcefile, lineno, _ = source_locations[-1]
source_location = f'{sourcefile}:{lineno}' if lineno else sourcefile
else:
source_location = ''
breadcrumbs = ' > '.join(t[2] for t in self._traces)
return source_location, breadcrumbs
def emit(self, msg, severity):
assert severity in ['warning', 'error']
cache = self._warn_cache if severity == 'warning' else self._error_cache
source_location, breadcrumbs = self._get_current_trace()
prefix = f'{self._blue}{source_location}: ' if source_location else ''
prefix += self._yellow if severity == 'warning' else self._red
prefix += f'{severity.capitalize()}'
prefix += f' [{breadcrumbs}]: ' if breadcrumbs else ': '
msg = prefix + self._clear + str(msg)
if not self.silent and msg not in cache:
print(msg, file=sys.stderr)
cache[msg] += 1
def emit_summary(self):
if not self.silent and (self._warn_cache or self._error_cache):
print(
f'\nSchema generation summary:\n'
f'Warnings: {sum(self._warn_cache.values())} ({len(self._warn_cache)} unique)\n'
f'Errors: {sum(self._error_cache.values())} ({len(self._error_cache)} unique)\n',
file=sys.stderr
)
GENERATOR_STATS = GeneratorStats()
def warn(msg, delayed=None):
if delayed:
warnings = get_override(delayed, 'warnings', [])
warnings.append(msg)
METHOD_NAME(delayed, 'warnings', warnings)
else:
GENERATOR_STATS.emit(msg, 'warning')
def error(msg, delayed=None):
if delayed:
errors = get_override(delayed, 'errors', [])
errors.append(msg)
METHOD_NAME(delayed, 'errors', errors)
else:
GENERATOR_STATS.emit(msg, 'error')
def reset_generator_stats():
GENERATOR_STATS.reset()
@contextlib.contextmanager
def add_trace_message(obj):
"""
Adds a message to be used as a prefix when emitting warnings and errors.
"""
sourcefile, lineno = _get_source_location(obj)
GENERATOR_STATS._traces.append((sourcefile, lineno, obj.__name__))
yield
GENERATOR_STATS._traces.pop()
@functools.lru_cache(maxsize=1000)
def _get_source_location(obj):
try:
sourcefile = inspect.getsourcefile(obj)
except: # noqa: E722
sourcefile = None
try:
# This is a rather expensive operation. Only do it when explicitly enabled (CLI)
# and cache results to speed up some recurring objects like serializers.
lineno = inspect.getsourcelines(obj)[1] if GENERATOR_STATS._trace_lineno else None
except: # noqa: E722
lineno = None
return sourcefile, lineno
def has_override(obj, prop):
if isinstance(obj, functools.partial):
obj = obj.func
if not hasattr(obj, '_spectacular_annotation'):
return False
if prop not in obj._spectacular_annotation:
return False
return True
def get_override(obj, prop, default=None):
if isinstance(obj, functools.partial):
obj = obj.func
if not has_override(obj, prop):
return default
return obj._spectacular_annotation[prop]
def METHOD_NAME(obj, prop, value):
if not hasattr(obj, '_spectacular_annotation'):
obj._spectacular_annotation = {}
elif '_spectacular_annotation' not in obj.__dict__:
obj._spectacular_annotation = obj._spectacular_annotation.copy()
obj._spectacular_annotation[prop] = value
return obj
def get_view_method_names(view, schema=None):
schema = schema or view.schema
return [
item for item in dir(view) if callable(getattr(view, item)) and (
item in view.http_method_names
or item in schema.method_mapping.values()
or item == 'list'
or hasattr(getattr(view, item), 'mapping')
)
]
def isolate_view_method(view, method_name):
"""
Prevent modifying a view method which is derived from other views. Changes to
a derived method would leak into the view where the method originated from.
Break derivation by wrapping the method and explicitly setting it on the view.
"""
method = getattr(view, method_name)
# no isolation is required if the view method is not derived.
# @api_view is a special case that also breaks isolation. It proxies all view
# methods through a single handler function, which then also requires isolation.
if method_name in view.__dict__ and method.__name__ != 'handler':
return method
@functools.wraps(method)
def wrapped_method(self, request, *args, **kwargs):
return method(self, request, *args, **kwargs)
# wraps() will only create a shallow copy of method.__dict__. Updates to "kwargs"
# via @extend_schema would leak to the original method. Isolate by creating a copy.
if hasattr(method, 'kwargs'):
wrapped_method.kwargs = method.kwargs.copy()
setattr(view, method_name, wrapped_method)
return wrapped_method
def cache(user_function):
""" simple polyfill for python < 3.9 """
return functools.lru_cache(maxsize=None)(user_function)
|
1,641 |
test backup cancel
|
#!/usr/bin/env python3
# group: rw
#
# Test nbd reconnect
#
# Copyright (c) 2019 Virtuozzo International GmbH.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time
import os
import iotests
from iotests import qemu_img_create, file_path, qemu_nbd_popen
disk_a, disk_b, nbd_sock = file_path('disk_a', 'disk_b', 'nbd-sock')
nbd_uri = 'nbd+unix:///?socket=' + nbd_sock
wait_limit = 3.0
wait_step = 0.2
class TestNbdReconnect(iotests.QMPTestCase):
def init_vm(self, disk_size):
qemu_img_create('-f', iotests.imgfmt, disk_a, str(disk_size))
qemu_img_create('-f', iotests.imgfmt, disk_b, str(disk_size))
self.vm = iotests.VM().add_drive(disk_a)
self.vm.launch()
self.vm.hmp_qemu_io('drive0', 'write 0 {}'.format(disk_size))
def tearDown(self):
self.vm.shutdown()
os.remove(disk_a)
os.remove(disk_b)
def start_job(self, job):
"""Stat job with nbd target and kill the server"""
assert job in ('blockdev-backup', 'blockdev-mirror')
with qemu_nbd_popen('-k', nbd_sock, '-f', iotests.imgfmt, disk_b):
result = self.vm.qmp('blockdev-add',
**{'node_name': 'backup0',
'driver': 'raw',
'file': {'driver': 'nbd',
'server': {'type': 'unix',
'path': nbd_sock},
'reconnect-delay': 10}})
self.assert_qmp(result, 'return', {})
result = self.vm.qmp(job, device='drive0',
sync='full', target='backup0',
speed=(1 * 1024 * 1024))
self.assert_qmp(result, 'return', {})
# Wait for some progress
t = 0.0
while t < wait_limit:
jobs = self.vm.qmp('query-block-jobs')['return']
if jobs and jobs[0]['offset'] > 0:
break
time.sleep(wait_step)
t += wait_step
self.assertTrue(jobs and jobs[0]['offset'] > 0) # job started
jobs = self.vm.qmp('query-block-jobs')['return']
# Check that job is still in progress
self.assertTrue(jobs)
self.assertTrue(jobs[0]['offset'] < jobs[0]['len'])
result = self.vm.qmp('block-job-set-speed', device='drive0', speed=0)
self.assert_qmp(result, 'return', {})
# Emulate server down time for 1 second
time.sleep(1)
def test_backup(self):
size = 5 * 1024 * 1024
self.init_vm(size)
self.start_job('blockdev-backup')
with qemu_nbd_popen('-k', nbd_sock, '-f', iotests.imgfmt, disk_b):
e = self.vm.event_wait('BLOCK_JOB_COMPLETED')
self.assertEqual(e['data']['offset'], size)
result = self.vm.qmp('blockdev-del', node_name='backup0')
self.assert_qmp(result, 'return', {})
def cancel_job(self):
result = self.vm.qmp('block-job-cancel', device='drive0', force=True)
self.assert_qmp(result, 'return', {})
start_t = time.time()
self.vm.event_wait('BLOCK_JOB_CANCELLED')
delta_t = time.time() - start_t
self.assertTrue(delta_t < 2.0)
def test_mirror_cancel(self):
# Mirror speed limit doesn't work well enough, it seems that mirror
# will run many parallel requests anyway. MAX_IN_FLIGHT is 16 and
# MAX_IO_BYTES is 1M in mirror.c, so let's use 20M disk.
self.init_vm(20 * 1024 * 1024)
self.start_job('blockdev-mirror')
self.cancel_job()
def METHOD_NAME(self):
self.init_vm(5 * 1024 * 1024)
self.start_job('blockdev-backup')
self.cancel_job()
if __name__ == '__main__':
iotests.main(supported_fmts=['qcow2'])
|
1,642 |
is finished
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import functools
import operator
import time
import click
class TestStatus(enum.Enum):
PASSED = enum.auto()
FAILED = enum.auto()
SKIPPED = enum.auto()
WARNED = enum.auto()
ERRORED = enum.auto()
@classmethod
def from_string(cls, value: str):
value = value.lower()
if value.startswith(("failed", "fail")):
return cls.FAILED
if value.startswith(("passed", "pass", "success", "ok")):
return cls.PASSED
if value.startswith(("skipped", "skip", "ignore", "ignored")):
return cls.SKIPPED
if value.startswith("WARNING"):
return cls.WARNED
raise ValueError(f"Unknown test status `{value}`")
def to_ansi_color(self):
if self == TestStatus.FAILED:
return "red"
if self == TestStatus.PASSED:
return "green"
return "yellow"
class TestCaseSource:
def __init__(self, filename, line=None):
self.filename = filename
self.line = line
class TestCase:
def __init__( # pylint: disable=too-many-arguments
self,
name,
status,
message=None,
stdout=None,
source=None,
duration=0,
exception=None,
):
assert isinstance(status, TestStatus)
if status == TestStatus.ERRORED:
assert isinstance(exception, Exception)
self.name = name.strip()
self.status = status
self.message = message
self.stdout = stdout
self.source = source
self.duration = duration
self.exception = exception
def humanize(self):
parts = []
if self.source:
parts.append("%s:%d: " % (self.source.filename, self.source.line))
parts.append(self.name)
if self.message:
parts.append(": " + self.message)
parts.extend(
[
"\t",
"[%s]" % click.style(self.status.name, fg=self.status.to_ansi_color()),
]
)
return "".join(parts)
class TestSuite:
def __init__(self, env_name, test_name, finished=False, test_dir=None):
self.env_name = env_name
self.test_name = test_name
self.test_dir = test_dir
self.timestamp = 0
self.duration = 0
self._cases = []
self._finished = finished
@property
def cases(self):
return self._cases
@property
def status(self):
for s in (TestStatus.ERRORED, TestStatus.FAILED):
if self.get_status_nums(s):
return s
if self._cases and any(c.status == TestStatus.PASSED for c in self._cases):
return TestStatus.PASSED
return TestStatus.SKIPPED
def get_status_nums(self, status):
return len([True for c in self._cases if c.status == status])
def add_case(self, case: TestCase):
assert isinstance(case, TestCase)
self._cases.append(case)
def METHOD_NAME(self):
return self._finished
def on_start(self):
self.timestamp = time.time()
def on_finish(self):
if self.METHOD_NAME():
return
self._finished = True
self.duration = time.time() - self.timestamp
class TestResult:
def __init__(self, project_dir):
self.project_dir = project_dir
self._suites = []
@property
def suites(self):
return self._suites
def add_suite(self, suite):
assert isinstance(suite, TestSuite)
self._suites.append(suite)
@property
def duration(self):
return functools.reduce(operator.add, [s.duration for s in self._suites])
@property
def case_nums(self):
return functools.reduce(operator.add, [len(s.cases) for s in self._suites])
@property
def is_errored(self):
return any(s.status == TestStatus.ERRORED for s in self._suites)
def get_status_nums(self, status):
return functools.reduce(
operator.add, [s.get_status_nums(status) for s in self._suites]
)
|
1,643 |
get models
|
import requests
import csv
import re
import requests
from collections import defaultdict
default_map_name = 'covid19map'
base_url = 'https://%s.elixir-luxembourg.org/minerva/api/'
resource_url = ('https://git-r3lab.uni.lu/covid/models/-/raw/master/'
'Integration/MINERVA_build/resources.csv')
def get_config(map_name=default_map_name):
url = (base_url % map_name) + 'configuration/'
res = requests.get(url)
res.raise_for_status()
return res.json()
def get_project_id_from_config(config):
options = config.get('options', [])
for option in options:
if option.get('type') == 'DEFAULT_MAP':
return option.get('value')
return None
def get_latest_project_id(map_name=default_map_name):
url = (base_url % map_name) + 'projects/'
res = requests.get(url)
projects = res.json()
if '_' not in map_name:
map_name = map_name.replace('map', '_map')
# Find projects that start with the map name and end with the date
p = '%s_\d{2}[a-zA-Z]{3}\d{2}$' % map_name
# Pick project with latest creation date
latest_project = max(
[pr for pr in projects if re.match(p, pr['projectId'])],
key=lambda pr: pr['creationDate'])
project_id = latest_project['projectId']
return project_id
def METHOD_NAME(project_id, map_name=default_map_name):
url = (base_url % map_name) + ('projects/%s/models/' % project_id)
res = requests.get(url)
res.raise_for_status()
return res.json()
def get_model_elements(model_id, project_id, map_name=default_map_name):
url = (base_url % map_name) + \
('projects/%s/models/%s/' % (project_id, model_id)) + \
'bioEntities/elements/?columns=id,name,type,elementId,complexId,references'
res = requests.get(url)
res.raise_for_status()
return res.json()
def get_all_model_elements(models, project_id, map_name=default_map_name):
all_elements = []
for model in models:
model_id = model['idObject']
model_elements = get_model_elements(model_id, project_id,
map_name)
all_elements += model_elements
return all_elements
def get_element_references(element):
refs = element.get('references', [])
references = [(ref.get('type'), ref.get('resource')) for ref in refs]
if element.get('name'):
references.append(('TEXT', element['name']))
return references
def get_all_valid_element_refs(map_name=default_map_name):
project_id = get_latest_project_id(map_name)
models = METHOD_NAME(project_id, map_name)
all_model_elements = get_all_model_elements(models, project_id,
map_name)
element_refs = [get_element_references(element) for element
in all_model_elements]
valid_element_refs = [ref for ref in element_refs if ref]
return valid_element_refs
def get_ids_to_refs(model_id, map_name=default_map_name):
project_id = get_latest_project_id(map_name)
model_elements = get_model_elements(model_id, project_id, map_name)
object_ids_to_element_ids = {}
ids_to_refs = {}
complex_ids_to_members = defaultdict(set)
for element in model_elements:
object_ids_to_element_ids[element['id']] = element['elementId']
ref = get_element_references(element)
if ref:
ids_to_refs[element['elementId']] = ref
if element.get('complexId'):
complex_ids_to_members[element['complexId']].add(
element['elementId'])
complex_members = {}
for complex_id, members in complex_ids_to_members.items():
complex_members[object_ids_to_element_ids[complex_id]] = members
return ids_to_refs, complex_members
def get_model_ids(map_name=default_map_name):
project_id = get_latest_project_id(map_name)
models = METHOD_NAME(project_id, map_name)
model_names_to_ids = {}
for model in models:
model_names_to_ids[model['name']] = model['idObject']
return model_names_to_ids
def get_sif_filenames_to_ids(map_name=default_map_name):
model_names_to_ids = get_model_ids(map_name=map_name)
filenames_to_ids = {}
res = requests.get(resource_url)
csv_reader = csv.reader(res.text.splitlines())
for row in csv_reader:
model_name = row[3]
if model_name in model_names_to_ids:
fname = row[1].split('/')[-1][:-4] + '_raw.sif'
filenames_to_ids[fname] = model_names_to_ids[model_name]
return filenames_to_ids
|
1,644 |
set data from form
|
from App.config import getConfiguration
from plone.base import PloneMessageFactory as _
from plone.base.interfaces import IBundleRegistry
from plone.registry.interfaces import IRegistry
from Products.CMFPlone.resources.browser.resource import update_resource_registry_mtime
from Products.Five.browser import BrowserView
from Products.statusmessages.interfaces import IStatusMessage
from zope.component import getUtility
import operator
class ResourceRegistryControlPanelView(BrowserView):
@property
def _bundles(self):
registry = getUtility(IRegistry)
return registry.collectionOfInterface(
IBundleRegistry, prefix="plone.bundles", check=False
)
@property
def bundles_data(self):
result = []
for name, record in self._bundles.items():
result.append(
{
"name": name,
"safe_name": name.replace(".", "-"),
"jscompilation": record.jscompilation,
"csscompilation": record.csscompilation,
"expression": record.expression,
"enabled": record.enabled,
"depends": record.depends,
"load_async": record.load_async,
"load_defer": record.load_defer,
}
)
result = list(sorted(result, key=operator.itemgetter("name")))
result.append(
{
"name": "",
"safe_name": "",
"jscompilation": "",
"csscompilation": "",
"expression": "",
"enabled": False,
"depends": "",
"load_async": False,
"load_defer": False,
}
)
return result
def global_debug_mode(self):
return getConfiguration().debug_mode
def debug_mode(self):
registry = getUtility(IRegistry)
return registry["plone.resources.development"]
def _add(self):
name = self.request.form.get("name", None)
if name is None or name == "":
IStatusMessage(self.request).addStatusMessage(
_("Name can not be empty."), "error"
)
return
bundles = self._bundles
if name in bundles:
IStatusMessage(self.request).addStatusMessage(
_("Record ${name} already exists.", mapping=dict(name=name)), "error"
)
return
record = bundles.add(name)
self.METHOD_NAME(record)
IStatusMessage(self.request).addStatusMessage(
_("Record ${name} created.", mapping=dict(name=name)), "info"
)
def _update(self):
new_name = self.request.form.get("name", None)
if new_name is None or new_name == "":
IStatusMessage(self.request).addStatusMessage(
_("Name can not be empty."), "error"
)
return
original_name = self.request.form.get("original_name", None)
bundles = self._bundles
if new_name != original_name:
if original_name not in bundles:
IStatusMessage(self.request).addStatusMessage(
_("Expected record missing."), "error"
)
return
if new_name in bundles:
IStatusMessage(self.request).addStatusMessage(
_(
"Record name ${new_name} already taken.",
mapping=dict(new_name=new_name),
),
"error",
)
return
record = bundles[original_name]
del bundles[original_name]
# update prefix
record.__prefix__ = record.__prefix__.replace(original_name, new_name)
bundles[new_name] = record
else:
record = bundles[original_name]
self.METHOD_NAME(record)
IStatusMessage(self.request).addStatusMessage(_("Changes saved."), "info")
def METHOD_NAME(self, record):
names = record.__schema__.names()
data = {k: v for k, v in self.request.form.items() if k in names}
bool_names = ["enabled", "load_async", "load_defer"]
for bool_name in bool_names:
data[bool_name] = bool_name in data
for field_name, value in data.items():
full_name = record.__prefix__ + field_name
record.__registry__[full_name] = value
self._switch_cache(False)
def _delete(self):
name = self.request.form.get("original_name", None)
bundles = self._bundles
if name not in bundles:
IStatusMessage(self.request).addStatusMessage(
_("Expected record ${name} missing.", mapping=dict(name=name)), "error"
)
return
del bundles[name]
self._switch_cache(False)
IStatusMessage(self.request).addStatusMessage(_("Record deleted."), "info")
def _switch_cache(self, state):
registry = getUtility(IRegistry)
registry["plone.resources.development"] = state
def process_form(self):
if self.request["method"] != "POST":
return
action = self.request.form["action"]
if action == "add":
self._add()
elif action == "update":
self._update()
elif action == "delete":
self._delete()
elif action == "activate_cache":
self._switch_cache(True)
elif action == "deactivate_cache":
self._switch_cache(False)
else:
raise ValueError("Invalid form data")
update_resource_registry_mtime()
self.request.response.redirect(self.request["ACTUAL_URL"])
|
1,645 |
test filter out disabled capabilities ignore partially
|
import sublime
from LSP.plugin.core.settings import read_client_config, update_client_config
from LSP.plugin.core.views import get_uri_and_position_from_location
from LSP.plugin.core.views import to_encoded_filename
from os import environ
from os.path import dirname, pathsep
from unittesting import DeferrableTestCase
import unittest
import sys
test_file_path = dirname(__file__) + "/testfile.txt"
class ConfigParsingTests(DeferrableTestCase):
def test_can_parse_old_client_settings(self):
settings = {
"command": ["pyls"],
"scopes": ["text.html.vue"],
"syntaxes": ["Packages/Python/Python.sublime-syntax"], # it should use this one
"languageId": "java"
}
config = read_client_config("pyls", settings)
self.assertEqual(config.selector, "source.python")
self.assertEqual(config.priority_selector, "(text.html.vue)")
def test_can_parse_client_settings_with_languages(self):
settings = {
"command": ["pyls"],
# Check that "selector" will be "source.python"
"languages": [{"languageId": "python"}]
}
config = read_client_config("pyls", settings)
self.assertEqual(config.selector, "(source.python)")
self.assertEqual(config.priority_selector, "(source.python)")
def test_can_parse_settings_with_selector(self):
settings = {
"command": ["pyls"],
"selector": "source.python"
}
config = read_client_config("pyls", settings)
self.assertEqual(config.selector, "source.python")
self.assertEqual(config.priority_selector, "source.python")
def test_can_update_config(self):
settings = {
"command": ["pyls"],
"document_selector": "source.python",
"languageId": "python"
}
config = read_client_config("pyls", settings)
config = update_client_config(config, {"enabled": True})
self.assertEqual(config.enabled, True)
def test_can_read_experimental_capabilities(self):
experimental_capabilities = {
"foo": 1,
"bar": True,
"baz": "abc"
}
settings = {
"command": ["pyls"],
"document_selector": "source.python",
"languageId": "python",
"experimental_capabilities": experimental_capabilities
}
config = read_client_config("pyls", settings)
self.assertEqual(config.experimental_capabilities, experimental_capabilities)
def test_transport_config_extends_env_path(self):
settings = {
"command": ["pyls"],
"selector": "source.python",
"env": {
"PATH": "/a/b/"
}
}
config = read_client_config("pyls", settings)
transport_config = config.resolve_transport_config({})
original_path = environ.copy()['PATH']
resolved_path = transport_config.env['PATH']
self.assertEqual(resolved_path, '/a/b/{}{}'.format(pathsep, original_path))
def test_list_in_environment(self):
settings = {
"command": ["pyls"],
"selector": "source.python",
"env": {
"FOO": ["C:/hello", "X:/there", "Y:/$foobar"],
"BAR": "baz"
}
}
config = read_client_config("pyls", settings)
resolved = config.resolve_transport_config({"foobar": "asdf"})
if sublime.platform() == "windows":
self.assertEqual(resolved.env["FOO"], "C:/hello;X:/there;Y:/asdf")
else:
self.assertEqual(resolved.env["FOO"], "C:/hello:X:/there:Y:/asdf")
self.assertEqual(resolved.env["BAR"], "baz")
def test_disabled_capabilities(self):
settings = {
"command": ["pyls"],
"selector": "source.python",
"disabled_capabilities": {
"colorProvider": True,
"completionProvider": {"triggerCharacters": True},
"codeActionProvider": True
}
}
config = read_client_config("pyls", settings)
self.assertTrue(config.is_disabled_capability("colorProvider"))
# If only a sub path is disabled, the entire capability should not be disabled as a whole
self.assertFalse(config.is_disabled_capability("completionProvider"))
# This sub path should be disabled
self.assertTrue(config.is_disabled_capability("completionProvider.triggerCharacters"))
# But not this sub path
self.assertFalse(config.is_disabled_capability("completionProvider.resolveProvider"))
# The entire codeActionProvider is disabled
self.assertTrue(config.is_disabled_capability("codeActionProvider"))
# If codeActionProvider is disabled, all of its sub paths should be disabled as well
self.assertTrue(config.is_disabled_capability("codeActionProvider.codeActionKinds"))
# This one should be enabled
self.assertFalse(config.is_disabled_capability("definitionProvider"))
def METHOD_NAME(self):
settings = {
"command": ["pyls"],
"selector": "source.python",
"disabled_capabilities": {"completionProvider": {"triggerCharacters": True}}
}
config = read_client_config("pyls", settings)
capability_path = "completionProvider"
options = {"triggerCharacters": ["!"], "resolveProvider": True}
options = config.filter_out_disabled_capabilities(capability_path, options)
self.assertNotIn("triggerCharacters", options)
self.assertIn("resolveProvider", options)
@unittest.skipIf(sys.platform.startswith("win"), "requires non-Windows")
def test_path_maps(self):
config = read_client_config("asdf", {
"command": ["asdf"],
"selector": "source.foo",
"path_maps": [
{
"local": "/home/user/projects/myproject",
"remote": "/workspace"
},
{
"local": "/home/user/projects/another",
"remote": "/workspace2"
}
]
})
uri = config.map_client_path_to_server_uri("/home/user/projects/myproject/file.js")
self.assertEqual(uri, "file:///workspace/file.js")
uri = config.map_client_path_to_server_uri("/home/user/projects/another/foo.js")
self.assertEqual(uri, "file:///workspace2/foo.js")
uri = config.map_client_path_to_server_uri("/some/path/with/no/mapping.py")
self.assertEqual(uri, "file:///some/path/with/no/mapping.py")
path = config.map_server_uri_to_client_path("file:///workspace/bar.html")
self.assertEqual(path, "/home/user/projects/myproject/bar.html")
path = config.map_server_uri_to_client_path("file:///workspace2/style.css")
self.assertEqual(path, "/home/user/projects/another/style.css")
# Test to_encoded_filename
uri, position = get_uri_and_position_from_location({
'uri': 'file:///foo/bar',
'range': {'start': {'line': 0, 'character': 5}}
}) # type: ignore
path = config.map_server_uri_to_client_path(uri)
self.assertEqual(to_encoded_filename(path, position), '/foo/bar:1:6')
uri, position = get_uri_and_position_from_location({
'targetUri': 'file:///foo/bar',
'targetSelectionRange': {'start': {'line': 1234, 'character': 4321}}
}) # type: ignore
path = config.map_server_uri_to_client_path(uri)
self.assertEqual(to_encoded_filename(path, position), '/foo/bar:1235:4322')
|
1,646 |
on request
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from typing import ( Union, Optional )
from azure.core.pipeline import PipelineRequest
from azure.core.pipeline.policies import ( SansIOHTTPPolicy, BearerTokenCredentialPolicy, AzureKeyCredentialPolicy )
from azure.core.credentials import ( TokenCredential, AzureKeyCredential )
from ._client import TextTranslationClient as ServiceClientGenerated
DEFAULT_TOKEN_SCOPE = "https://api.microsofttranslator.com/"
def patch_sdk():
"""Do not remove from this file.
`patch_sdk` is a last resort escape hatch that allows you to do customizations
you can't accomplish using the techniques described in
https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
class TranslatorCredential:
""" Credential for Translator Service. It is using combination of Resource key and region.
"""
def __init__(self, key: str, region: str) -> None:
self.key = key
self.region = region
def update(self, key: str) -> None:
"""Update the key.
This can be used when you've regenerated your service key and want
to update long-lived clients.
:param str key: The key used to authenticate to an Azure service
:raises: ValueError or TypeError
"""
if not key:
raise ValueError("The key used for updating can not be None or empty")
if not isinstance(key, str):
raise TypeError("The key used for updating must be a string.")
self.key = key
class TranslatorAuthenticationPolicy(SansIOHTTPPolicy):
""" Translator Authentication Policy. Adds both authentication headers that are required.
Ocp-Apim-Subscription-Region header contains region of the Translator resource.
Ocp-Apim-Subscription-Key header contains API key of the Translator resource.
"""
def __init__(self, credential: TranslatorCredential):
self.credential = credential
def METHOD_NAME(self, request: PipelineRequest) -> None:
request.http_request.headers["Ocp-Apim-Subscription-Key"] = self.credential.key
request.http_request.headers["Ocp-Apim-Subscription-Region"] = self.credential.region
def get_translation_endpoint(endpoint, api_version):
if not endpoint:
endpoint = "https://api.cognitive.microsofttranslator.com"
translator_endpoint: str = ""
if "cognitiveservices" in endpoint:
translator_endpoint = endpoint + "/translator/text/v" + api_version
else:
translator_endpoint = endpoint
return translator_endpoint
def set_authentication_policy(credential, kwargs):
if isinstance(credential, TranslatorCredential):
if not kwargs.get("authentication_policy"):
kwargs["authentication_policy"] = TranslatorAuthenticationPolicy(credential)
elif isinstance(credential, AzureKeyCredential):
if not kwargs.get("authentication_policy"):
kwargs["authentication_policy"] = AzureKeyCredentialPolicy(
name="Ocp-Apim-Subscription-Key", credential=credential)
elif hasattr(credential, "get_token"):
if not kwargs.get("authentication_policy"):
kwargs["authentication_policy"] = BearerTokenCredentialPolicy(credential, *kwargs.pop("credential_scopes", [DEFAULT_TOKEN_SCOPE]), kwargs)
class TextTranslationClient(ServiceClientGenerated):
"""Text translation is a cloud-based REST API feature of the Translator service that uses neural
machine translation technology to enable quick and accurate source-to-target text translation
in real time across all supported languages.
The following methods are supported by the Text Translation feature:
Languages. Returns a list of languages supported by Translate, Transliterate, and Dictionary
Lookup operations.
Translate. Renders single source-language text to multiple target-language texts with a single
request.
Transliterate. Converts characters or letters of a source language to the corresponding
characters or letters of a target language.
Detect. Returns the source code language code and a boolean variable denoting whether the
detected language is supported for text translation and transliteration.
Dictionary lookup. Returns equivalent words for the source term in the target language.
Dictionary example Returns grammatical structure and context examples for the source term and
target term pair.
Combinations of endpoint and credential values:
str + AzureKeyCredential - used custom domain translator endpoint
str + TokenCredential - used for regional endpoint with token authentication
str + TranslatorCredential - used for National Clouds
None + AzureKeyCredential - used for global translator endpoint with global Translator resource
None + Token - general translator endpoint with token authentication
None + TranslatorCredential - general translator endpoint with regional Translator resource
:param endpoint: Supported Text Translation endpoints (protocol and hostname, for example:
https://api.cognitive.microsofttranslator.com). Required.
:type endpoint: str
:param credential: Credential used to authenticate with the Translator service
:type credential: Union[AzureKeyCredential , TokenCredential , TranslatorCredential]
:keyword api_version: Default value is "3.0". Note that overriding this default value may
result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
credential: Union[AzureKeyCredential , TokenCredential , TranslatorCredential],
*,
endpoint: Optional[str] = None,
api_version = "3.0",
**kwargs):
set_authentication_policy(credential, kwargs)
translation_endpoint = get_translation_endpoint(endpoint, api_version)
super().__init__(
endpoint=translation_endpoint,
api_version=api_version,
**kwargs
)
__all__ = ["TextTranslationClient", "TranslatorCredential"]
|
1,647 |
test 2 paragraphs long line
|
"""
Python Markdown
A Python implementation of John Gruber's Markdown.
Documentation: https://python-markdown.github.io/
GitHub: https://github.com/Python-Markdown/markdown/
PyPI: https://pypi.org/project/Markdown/
Started by Manfred Stienstra (http://www.dwerg.net/).
Maintained for a few years by Yuri Takhteyev (http://www.freewisdom.org).
Currently maintained by Waylan Limberg (https://github.com/waylan),
Dmitry Shachnev (https://github.com/mitya57) and Isaac Muse (https://github.com/facelessuser).
Copyright 2007-2018 The Python Markdown Project (v. 1.7 and later)
Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
Copyright 2004 Manfred Stienstra (the original version)
License: BSD (see LICENSE.md for details).
"""
from markdown.test_tools import TestCase
class TestParagraphBlocks(TestCase):
def test_simple_paragraph(self):
self.assertMarkdownRenders(
'A simple paragraph.',
'<p>A simple paragraph.</p>'
)
def test_blank_line_before_paragraph(self):
self.assertMarkdownRenders(
'\nA paragraph preceded by a blank line.',
'<p>A paragraph preceded by a blank line.</p>'
)
def test_multiline_paragraph(self):
self.assertMarkdownRenders(
self.dedent(
"""
This is a paragraph
on multiple lines
with hard returns.
"""
),
self.dedent(
"""
<p>This is a paragraph
on multiple lines
with hard returns.</p>
"""
)
)
def test_paragraph_long_line(self):
self.assertMarkdownRenders(
'A very long long long long long long long long long long long long long long long long long long long '
'long long long long long long long long long long long long long paragraph on 1 line.',
'<p>A very long long long long long long long long long long long long long long long long long long '
'long long long long long long long long long long long long long long paragraph on 1 line.</p>'
)
def METHOD_NAME(self):
self.assertMarkdownRenders(
'A very long long long long long long long long long long long long long long long long long long long '
'long long long long long long long long long long long long long paragraph on 1 line.\n\n'
'A new long long long long long long long long long long long long long long long '
'long paragraph on 1 line.',
'<p>A very long long long long long long long long long long long long long long long long long long '
'long long long long long long long long long long long long long long paragraph on 1 line.</p>\n'
'<p>A new long long long long long long long long long long long long long long long '
'long paragraph on 1 line.</p>'
)
def test_consecutive_paragraphs(self):
self.assertMarkdownRenders(
self.dedent(
"""
Paragraph 1.
Paragraph 2.
"""
),
self.dedent(
"""
<p>Paragraph 1.</p>
<p>Paragraph 2.</p>
"""
)
)
def test_consecutive_paragraphs_tab(self):
self.assertMarkdownRenders(
self.dedent(
"""
Paragraph followed by a line with a tab only.
\t
Paragraph after a line with a tab only.
"""
),
self.dedent(
"""
<p>Paragraph followed by a line with a tab only.</p>
<p>Paragraph after a line with a tab only.</p>
"""
)
)
def test_consecutive_paragraphs_space(self):
self.assertMarkdownRenders(
self.dedent(
"""
Paragraph followed by a line with a space only.
Paragraph after a line with a space only.
"""
),
self.dedent(
"""
<p>Paragraph followed by a line with a space only.</p>
<p>Paragraph after a line with a space only.</p>
"""
)
)
def test_consecutive_multiline_paragraphs(self):
self.assertMarkdownRenders(
self.dedent(
"""
Paragraph 1, line 1.
Paragraph 1, line 2.
Paragraph 2, line 1.
Paragraph 2, line 2.
"""
),
self.dedent(
"""
<p>Paragraph 1, line 1.
Paragraph 1, line 2.</p>
<p>Paragraph 2, line 1.
Paragraph 2, line 2.</p>
"""
)
)
def test_paragraph_leading_space(self):
self.assertMarkdownRenders(
' A paragraph with 1 leading space.',
'<p>A paragraph with 1 leading space.</p>'
)
def test_paragraph_2_leading_spaces(self):
self.assertMarkdownRenders(
' A paragraph with 2 leading spaces.',
'<p>A paragraph with 2 leading spaces.</p>'
)
def test_paragraph_3_leading_spaces(self):
self.assertMarkdownRenders(
' A paragraph with 3 leading spaces.',
'<p>A paragraph with 3 leading spaces.</p>'
)
def test_paragraph_trailing_leading_space(self):
self.assertMarkdownRenders(
' A paragraph with 1 trailing and 1 leading space. ',
'<p>A paragraph with 1 trailing and 1 leading space. </p>'
)
def test_paragraph_trailing_tab(self):
self.assertMarkdownRenders(
'A paragraph with 1 trailing tab.\t',
'<p>A paragraph with 1 trailing tab. </p>'
)
def test_paragraphs_CR(self):
self.assertMarkdownRenders(
'Paragraph 1, line 1.\rParagraph 1, line 2.\r\rParagraph 2, line 1.\rParagraph 2, line 2.\r',
self.dedent(
"""
<p>Paragraph 1, line 1.
Paragraph 1, line 2.</p>
<p>Paragraph 2, line 1.
Paragraph 2, line 2.</p>
"""
)
)
def test_paragraphs_LF(self):
self.assertMarkdownRenders(
'Paragraph 1, line 1.\nParagraph 1, line 2.\n\nParagraph 2, line 1.\nParagraph 2, line 2.\n',
self.dedent(
"""
<p>Paragraph 1, line 1.
Paragraph 1, line 2.</p>
<p>Paragraph 2, line 1.
Paragraph 2, line 2.</p>
"""
)
)
def test_paragraphs_CR_LF(self):
self.assertMarkdownRenders(
'Paragraph 1, line 1.\r\nParagraph 1, line 2.\r\n\r\nParagraph 2, line 1.\r\nParagraph 2, line 2.\r\n',
self.dedent(
"""
<p>Paragraph 1, line 1.
Paragraph 1, line 2.</p>
<p>Paragraph 2, line 1.
Paragraph 2, line 2.</p>
"""
)
)
|
1,648 |
generate key pair
|
"""
Ephemeral Elliptic Curve Diffie-Hellman (ECDH) key exchange
RFC 5656, Section 4
"""
from hashlib import sha256, sha384, sha512
from paramiko.common import byte_chr
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives import serialization
from binascii import hexlify
_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32)
c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)]
class KexNistp256:
name = "ecdh-sha2-nistp256"
hash_algo = sha256
curve = ec.SECP256R1()
def __init__(self, transport):
self.transport = transport
# private key, client public and server public keys
self.P = 0
self.Q_C = None
self.Q_S = None
def start_kex(self):
self.METHOD_NAME()
if self.transport.server_mode:
self.transport._expect_packet(_MSG_KEXECDH_INIT)
return
m = Message()
m.add_byte(c_MSG_KEXECDH_INIT)
# SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion
m.add_string(
self.Q_C.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXECDH_REPLY)
def parse_next(self, ptype, m):
if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT):
return self._parse_kexecdh_init(m)
elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY):
return self._parse_kexecdh_reply(m)
raise SSHException(
"KexECDH asked to handle packet type {:d}".format(ptype)
)
def METHOD_NAME(self):
self.P = ec.generate_private_key(self.curve, default_backend())
if self.transport.server_mode:
self.Q_S = self.P.public_key()
return
self.Q_C = self.P.public_key()
def _parse_kexecdh_init(self, m):
Q_C_bytes = m.get_string()
self.Q_C = ec.EllipticCurvePublicKey.from_encoded_point(
self.curve, Q_C_bytes
)
K_S = self.transport.get_server_key().asbytes()
K = self.P.exchange(ec.ECDH(), self.Q_C)
K = int(hexlify(K), 16)
# compute exchange hash
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
)
hm.add_string(K_S)
hm.add_string(Q_C_bytes)
# SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion
hm.add_string(
self.Q_S.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
hm.add_mpint(int(K))
H = self.hash_algo(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
sig = self.transport.get_server_key().sign_ssh_data(
H, self.transport.host_key_type
)
# construct reply
m = Message()
m.add_byte(c_MSG_KEXECDH_REPLY)
m.add_string(K_S)
m.add_string(
self.Q_S.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
m.add_string(sig)
self.transport._send_message(m)
self.transport._activate_outbound()
def _parse_kexecdh_reply(self, m):
K_S = m.get_string()
Q_S_bytes = m.get_string()
self.Q_S = ec.EllipticCurvePublicKey.from_encoded_point(
self.curve, Q_S_bytes
)
sig = m.get_binary()
K = self.P.exchange(ec.ECDH(), self.Q_S)
K = int(hexlify(K), 16)
# compute exchange hash and verify signature
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
)
hm.add_string(K_S)
# SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion
hm.add_string(
self.Q_C.public_bytes(
serialization.Encoding.X962,
serialization.PublicFormat.UncompressedPoint,
)
)
hm.add_string(Q_S_bytes)
hm.add_mpint(K)
self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest())
self.transport._verify_key(K_S, sig)
self.transport._activate_outbound()
class KexNistp384(KexNistp256):
name = "ecdh-sha2-nistp384"
hash_algo = sha384
curve = ec.SECP384R1()
class KexNistp521(KexNistp256):
name = "ecdh-sha2-nistp521"
hash_algo = sha512
curve = ec.SECP521R1()
|
1,649 |
is running
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import signal
import subprocess
import sys
import time
from platformio.compat import (
IS_WINDOWS,
aio_create_task,
aio_get_running_loop,
get_locale_encoding,
)
class DebugSubprocessProtocol(asyncio.SubprocessProtocol):
def __init__(self, factory):
self.factory = factory
self._is_exited = False
def connection_made(self, transport):
self.factory.connection_made(transport)
def pipe_data_received(self, fd, data):
pipe_to_cb = [
self.factory.stdin_data_received,
self.factory.stdout_data_received,
self.factory.stderr_data_received,
]
pipe_to_cb[fd](data)
def connection_lost(self, exc):
self.process_exited()
def process_exited(self):
if self._is_exited:
return
self.factory.process_exited()
self._is_exited = True
class DebugBaseProcess:
STDOUT_CHUNK_SIZE = 2048
LOG_FILE = None
def __init__(self):
self.transport = None
self._is_running = False
self._last_activity = 0
self._exit_future = None
self._stdin_read_task = None
self._std_encoding = get_locale_encoding()
async def spawn(self, *args, **kwargs):
wait_until_exit = False
if "wait_until_exit" in kwargs:
wait_until_exit = kwargs["wait_until_exit"]
del kwargs["wait_until_exit"]
for pipe in ("stdin", "stdout", "stderr"):
if pipe not in kwargs:
kwargs[pipe] = subprocess.PIPE
loop = aio_get_running_loop()
await loop.subprocess_exec(
lambda: DebugSubprocessProtocol(self), *args, **kwargs
)
if wait_until_exit:
self._exit_future = loop.create_future()
await self._exit_future
def METHOD_NAME(self):
return self._is_running
def connection_made(self, transport):
self._is_running = True
self.transport = transport
def connect_stdin_pipe(self):
self._stdin_read_task = aio_create_task(self._read_stdin_pipe())
async def _read_stdin_pipe(self):
loop = aio_get_running_loop()
if IS_WINDOWS:
while True:
self.stdin_data_received(
await loop.run_in_executor(None, sys.stdin.buffer.readline)
)
else:
reader = asyncio.StreamReader()
protocol = asyncio.StreamReaderProtocol(reader)
await loop.connect_read_pipe(lambda: protocol, sys.stdin)
while True:
self.stdin_data_received(await reader.readline())
def stdin_data_received(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
def stdout_data_received(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
while data:
chunk = data[: self.STDOUT_CHUNK_SIZE]
print(chunk.decode(self._std_encoding, "replace"), end="", flush=True)
data = data[self.STDOUT_CHUNK_SIZE :]
def stderr_data_received(self, data):
self._last_activity = time.time()
if self.LOG_FILE:
with open(self.LOG_FILE, "ab") as fp:
fp.write(data)
print(
data.decode(self._std_encoding, "replace"),
end="",
file=sys.stderr,
flush=True,
)
def process_exited(self):
self._is_running = False
self._last_activity = time.time()
# Allow terminating via SIGINT/CTRL+C
signal.signal(signal.SIGINT, signal.default_int_handler)
if self._stdin_read_task:
self._stdin_read_task.cancel()
self._stdin_read_task = None
if self._exit_future:
self._exit_future.set_result(True)
self._exit_future = None
def terminate(self):
if not self.METHOD_NAME() or not self.transport:
return
try:
self.transport.kill()
self.transport.close()
except: # pylint: disable=bare-except
pass
|
1,650 |
tcsendbreak
|
import sys
from _typeshed import FileDescriptorLike
from typing import Any
from typing_extensions import TypeAlias
if sys.platform != "win32":
# Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints.
_Attr: TypeAlias = list[int | list[bytes | int]]
B0: int
B1000000: int
B110: int
B115200: int
B1152000: int
B1200: int
B134: int
B150: int
B1500000: int
B1800: int
B19200: int
B200: int
B2000000: int
B230400: int
B2400: int
B2500000: int
B300: int
B3000000: int
B3500000: int
B38400: int
B4000000: int
B460800: int
B4800: int
B50: int
B500000: int
B57600: int
B576000: int
B600: int
B75: int
B921600: int
B9600: int
BRKINT: int
BS0: int
BS1: int
BSDLY: int
CBAUD: int
CBAUDEX: int
CDEL: int
CDSUSP: int
CEOF: int
CEOL: int
CEOL2: int
CEOT: int
CERASE: int
CESC: int
CFLUSH: int
CIBAUD: int
CINTR: int
CKILL: int
CLNEXT: int
CLOCAL: int
CNUL: int
COMMON: int
CQUIT: int
CR0: int
CR1: int
CR2: int
CR3: int
CRDLY: int
CREAD: int
CRPRNT: int
CRTSCTS: int
CS5: int
CS6: int
CS7: int
CS8: int
CSIZE: int
CSTART: int
CSTOP: int
CSTOPB: int
CSUSP: int
CSWTCH: int
CWERASE: int
ECHO: int
ECHOCTL: int
ECHOE: int
ECHOK: int
ECHOKE: int
ECHONL: int
ECHOPRT: int
EXTA: int
EXTB: int
FF0: int
FF1: int
FFDLY: int
FIOASYNC: int
FIOCLEX: int
FIONBIO: int
FIONCLEX: int
FIONREAD: int
FLUSHO: int
HUPCL: int
IBSHIFT: int
ICANON: int
ICRNL: int
IEXTEN: int
IGNBRK: int
IGNCR: int
IGNPAR: int
IMAXBEL: int
INIT_C_CC: int
INLCR: int
INPCK: int
IOCSIZE_MASK: int
IOCSIZE_SHIFT: int
ISIG: int
ISTRIP: int
IUCLC: int
IXANY: int
IXOFF: int
IXON: int
N_MOUSE: int
N_PPP: int
N_SLIP: int
N_STRIP: int
N_TTY: int
NCC: int
NCCS: int
NL0: int
NL1: int
NLDLY: int
NOFLSH: int
NSWTCH: int
OCRNL: int
OFDEL: int
OFILL: int
OLCUC: int
ONLCR: int
ONLRET: int
ONOCR: int
OPOST: int
PARENB: int
PARMRK: int
PARODD: int
PENDIN: int
TAB0: int
TAB1: int
TAB2: int
TAB3: int
TABDLY: int
TCFLSH: int
TCGETA: int
TCGETS: int
TCIFLUSH: int
TCIOFF: int
TCIOFLUSH: int
TCION: int
TCOFLUSH: int
TCOOFF: int
TCOON: int
TCSADRAIN: int
TCSAFLUSH: int
TCSANOW: int
TCSASOFT: int
TCSBRK: int
TCSBRKP: int
TCSETA: int
TCSETAF: int
TCSETAW: int
TCSETS: int
TCSETSF: int
TCSETSW: int
TCXONC: int
TIOCCONS: int
TIOCEXCL: int
TIOCGETD: int
TIOCGICOUNT: int
TIOCGLCKTRMIOS: int
TIOCGPGRP: int
TIOCGSERIAL: int
TIOCGSIZE: int
TIOCGSOFTCAR: int
TIOCGWINSZ: int
TIOCINQ: int
TIOCLINUX: int
TIOCM_CAR: int
TIOCM_CD: int
TIOCM_CTS: int
TIOCM_DSR: int
TIOCM_DTR: int
TIOCM_LE: int
TIOCM_RI: int
TIOCM_RNG: int
TIOCM_RTS: int
TIOCM_SR: int
TIOCM_ST: int
TIOCMBIC: int
TIOCMBIS: int
TIOCMGET: int
TIOCMIWAIT: int
TIOCMSET: int
TIOCNOTTY: int
TIOCNXCL: int
TIOCOUTQ: int
TIOCPKT_DATA: int
TIOCPKT_DOSTOP: int
TIOCPKT_FLUSHREAD: int
TIOCPKT_FLUSHWRITE: int
TIOCPKT_NOSTOP: int
TIOCPKT_START: int
TIOCPKT_STOP: int
TIOCPKT: int
TIOCSCTTY: int
TIOCSER_TEMT: int
TIOCSERCONFIG: int
TIOCSERGETLSR: int
TIOCSERGETMULTI: int
TIOCSERGSTRUCT: int
TIOCSERGWILD: int
TIOCSERSETMULTI: int
TIOCSERSWILD: int
TIOCSETD: int
TIOCSLCKTRMIOS: int
TIOCSPGRP: int
TIOCSSERIAL: int
TIOCSSIZE: int
TIOCSSOFTCAR: int
TIOCSTI: int
TIOCSWINSZ: int
TIOCTTYGSTRUCT: int
TOSTOP: int
VDISCARD: int
VEOF: int
VEOL: int
VEOL2: int
VERASE: int
VINTR: int
VKILL: int
VLNEXT: int
VMIN: int
VQUIT: int
VREPRINT: int
VSTART: int
VSTOP: int
VSUSP: int
VSWTC: int
VSWTCH: int
VT0: int
VT1: int
VTDLY: int
VTIME: int
VWERASE: int
XCASE: int
XTABS: int
def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... # Returns _Attr; we use Any to avoid a union in the return type
def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ...
def METHOD_NAME(__fd: FileDescriptorLike, __duration: int) -> None: ...
def tcdrain(__fd: FileDescriptorLike) -> None: ...
def tcflush(__fd: FileDescriptorLike, __queue: int) -> None: ...
def tcflow(__fd: FileDescriptorLike, __action: int) -> None: ...
if sys.version_info >= (3, 11):
def tcgetwinsize(__fd: FileDescriptorLike) -> tuple[int, int]: ...
def tcsetwinsize(__fd: FileDescriptorLike, __winsize: tuple[int, int]) -> None: ...
class error(Exception): ...
|
1,651 |
write meson build
|
from __future__ import annotations
import errno
import shutil
import subprocess
from pathlib import Path
from ._backend import Backend
from string import Template
import warnings
class MesonTemplate:
"""Template meson build file generation class."""
def __init__(
self,
modulename: str,
sources: list[Path],
deps: list[str],
object_files: list[Path],
linker_args: list[str],
c_args: list[str],
build_type: str,
):
self.modulename = modulename
self.build_template_path = (
Path(__file__).parent.absolute() / "meson.build.template"
)
self.sources = sources
self.deps = deps
self.substitutions = {}
self.objects = object_files
self.pipeline = [
self.initialize_template,
self.sources_substitution,
self.deps_substitution,
]
self.build_type = build_type
def meson_build_template(self) -> str:
if not self.build_template_path.is_file():
raise FileNotFoundError(
errno.ENOENT,
"Meson build template"
f" {self.build_template_path.absolute()}"
" does not exist.",
)
return self.build_template_path.read_text()
def initialize_template(self) -> None:
self.substitutions["modulename"] = self.modulename
self.substitutions["buildtype"] = self.build_type
def sources_substitution(self) -> None:
indent = " " * 21
self.substitutions["source_list"] = f",\n{indent}".join(
[f"'{source}'" for source in self.sources]
)
def deps_substitution(self) -> None:
indent = " " * 21
self.substitutions["dep_list"] = f",\n{indent}".join(
[f"dependency('{dep}')" for dep in self.deps]
)
def generate_meson_build(self):
for node in self.pipeline:
node()
template = Template(self.meson_build_template())
return template.substitute(self.substitutions)
class MesonBackend(Backend):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dependencies = self.extra_dat.get("dependencies", [])
self.meson_build_dir = "bbdir"
self.build_type = (
"debug" if any("debug" in flag for flag in self.fc_flags) else "release"
)
def _move_exec_to_root(self, build_dir: Path):
walk_dir = Path(build_dir) / self.meson_build_dir
path_objects = walk_dir.glob(f"{self.modulename}*.so")
for path_object in path_objects:
shutil.move(path_object, Path.cwd())
def _get_build_command(self):
return [
"meson",
"setup",
self.meson_build_dir,
]
def METHOD_NAME(self, build_dir: Path) -> None:
"""Writes the meson build file at specified location"""
meson_template = MesonTemplate(
self.modulename,
self.sources,
self.dependencies,
self.extra_objects,
self.flib_flags,
self.fc_flags,
self.build_type,
)
src = meson_template.generate_meson_build()
Path(build_dir).mkdir(parents=True, exist_ok=True)
meson_build_file = Path(build_dir) / "meson.build"
meson_build_file.write_text(src)
return meson_build_file
def run_meson(self, build_dir: Path):
completed_process = subprocess.run(self._get_build_command(), cwd=build_dir)
if completed_process.returncode != 0:
raise subprocess.CalledProcessError(
completed_process.returncode, completed_process.args
)
completed_process = subprocess.run(
["meson", "compile", "-C", self.meson_build_dir], cwd=build_dir
)
if completed_process.returncode != 0:
raise subprocess.CalledProcessError(
completed_process.returncode, completed_process.args
)
def compile(self) -> None:
self.sources = _prepare_sources(self.modulename, self.sources, self.build_dir)
self.METHOD_NAME(self.build_dir)
self.run_meson(self.build_dir)
self._move_exec_to_root(self.build_dir)
def _prepare_sources(mname, sources, bdir):
extended_sources = sources.copy()
Path(bdir).mkdir(parents=True, exist_ok=True)
# Copy sources
for source in sources:
shutil.copy(source, bdir)
generated_sources = [
Path(f"{mname}module.c"),
Path(f"{mname}-f2pywrappers2.f90"),
Path(f"{mname}-f2pywrappers.f"),
]
bdir = Path(bdir)
for generated_source in generated_sources:
if generated_source.exists():
shutil.copy(generated_source, bdir / generated_source.name)
extended_sources.append(generated_source.name)
generated_source.unlink()
extended_sources = [
Path(source).name
for source in extended_sources
if not Path(source).suffix == ".pyf"
]
return extended_sources
|
1,652 |
test mack total parameter risk
|
### Building out a dev environment with a working copy
### of R ChainLadder is difficult. These tests are
### Currently inactive, but available should the compatibility
### of the installs improve at a later date.
import numpy as np
import pytest
import chainladder as cl
try:
from rpy2.robjects.packages import importr
from rpy2.robjects import r
CL = importr("ChainLadder")
except:
pass
def mack_r(data, alpha, est_sigma, tail):
if tail:
return r(
'mack<-MackChainLadder({},alpha={}, est.sigma="{}", tail=TRUE)'.format(
data, alpha, est_sigma
)
)
else:
return r(
'mack<-MackChainLadder({},alpha={}, est.sigma="{}")'.format(
data, alpha, est_sigma
)
)
def mack_p(data, average, est_sigma, tail):
if tail:
return cl.MackChainladder().fit(
cl.TailCurve(curve="exponential").fit_transform(
cl.Development(
average=average, sigma_interpolation=est_sigma
).fit_transform(cl.load_sample(data))
)
)
else:
return cl.MackChainladder().fit(
cl.Development(
average=average, sigma_interpolation=est_sigma
).fit_transform(cl.load_sample(data))
)
data = ["ABC", "MW2008"]
tail = [True, False]
averages = [("simple", 0), ("volume", 1), ("regression", 2)]
est_sigma = [("log-linear", "log-linear"), ("mack", "Mack")]
@pytest.mark.r
@pytest.mark.parametrize("data", data)
@pytest.mark.parametrize("averages", averages)
@pytest.mark.parametrize("est_sigma", est_sigma)
@pytest.mark.parametrize("tail", tail)
def test_mack_full_std_err(data, averages, est_sigma, tail, atol):
df = mack_r(data, averages[1], est_sigma[1], tail).rx("F.se")
p = mack_p(data, averages[0], est_sigma[0], tail).full_std_err_
xp = p.get_array_module()
p = p.values[0, 0, :, :][:, :-1] if not tail else p.values[0, 0, :, :]
r = xp.array(df[0])
assert xp.allclose(r, p, atol=atol)
@pytest.mark.r
@pytest.mark.parametrize("data", data)
@pytest.mark.parametrize("averages", averages)
@pytest.mark.parametrize("est_sigma", est_sigma)
@pytest.mark.parametrize("tail", tail)
def test_mack_process_risk(data, averages, est_sigma, tail, atol):
df = mack_r(data, averages[1], est_sigma[1], tail).rx("Mack.ProcessRisk")
p = mack_p(data, averages[0], est_sigma[0], tail).process_risk_
xp = p.get_array_module()
p = p.values[0, 0, :, :][:, :-1] if not tail else p.values[0, 0, :, :]
r = xp.array(df[0])
assert xp.allclose(r, p, atol=atol)
@pytest.mark.r
@pytest.mark.parametrize("data", data)
@pytest.mark.parametrize("averages", averages)
@pytest.mark.parametrize("est_sigma", est_sigma)
@pytest.mark.parametrize("tail", tail)
def test_mack_parameter_risk(data, averages, est_sigma, tail, atol):
df = mack_r(data, averages[1], est_sigma[1], tail).rx("Mack.ParameterRisk")
p = mack_p(data, averages[0], est_sigma[0], tail).parameter_risk_
xp = p.get_array_module()
p = p.values[0, 0, :, :][:, :-1] if not tail else p.values[0, 0, :, :]
r = xp.array(df[0])
assert xp.allclose(r, p, atol=atol)
@pytest.mark.r
@pytest.mark.parametrize("data", data)
@pytest.mark.parametrize("averages", averages)
@pytest.mark.parametrize("est_sigma", est_sigma)
@pytest.mark.parametrize("tail", tail)
def test_mack_total_process_risk(data, averages, est_sigma, tail, atol):
df = mack_r(data, averages[1], est_sigma[1], tail).rx("Total.ProcessRisk")
p = mack_p(data, averages[0], est_sigma[0], tail).total_process_risk_
xp = p.get_array_module()
p = p.values[0, 0, :, :][:, :-1] if not tail else p.values[0, 0, :, :]
r = xp.array(df[0])[None, ...]
assert xp.allclose(r, xp.nan_to_num(p), atol=atol)
@pytest.mark.r
@pytest.mark.parametrize("data", data)
@pytest.mark.parametrize("averages", averages)
@pytest.mark.parametrize("est_sigma", est_sigma)
@pytest.mark.parametrize("tail", tail)
def METHOD_NAME(data, averages, est_sigma, tail, atol):
df = mack_r(data, averages[1], est_sigma[1], tail).rx("Total.ParameterRisk")
p = mack_p(data, averages[0], est_sigma[0], tail).total_parameter_risk_
xp = p.get_array_module()
p = p.values[0, 0, :, :][:, :-1] if not tail else p.values[0, 0, :, :]
r = xp.array(df[0])[None]
assert xp.allclose(r, xp.nan_to_num(p), atol=atol)
@pytest.mark.r
@pytest.mark.parametrize("data", data)
@pytest.mark.parametrize("averages", averages)
@pytest.mark.parametrize("est_sigma", est_sigma)
@pytest.mark.parametrize("tail", tail)
def test_mack_mack_std_err_(data, averages, est_sigma, tail, atol):
df = mack_r(data, averages[1], est_sigma[1], tail).rx("Mack.S.E")
p = mack_p(data, averages[0], est_sigma[0], tail).mack_std_err_
xp = p.get_array_module()
p = p.values[0, 0, :, :][:, :-1] if not tail else p.values[0, 0, :, :]
r = xp.array(df[0])
assert xp.allclose(r, xp.nan_to_num(p), atol=atol)
@pytest.mark.r
def test_mack_asymmetric():
r("Paid <- matrix(NA, 45, 45)")
r("Paid[seq(1,45,4),] <- qpaid")
out = r("M <- MackChainLadder(Paid)")
tri = cl.load_sample("quarterly")["paid"]
xp = tri.get_array_module()
assert round(float(xp.array(out.rx("Mack.S.E")[0])[-1, -1]), 2) == round(
float(cl.MackChainladder().fit(tri).summary_.to_frame(origin_as_datetime=False).iloc[-1, -1]), 2
|
1,653 |
test algebraic field
|
"""repr() printing tests."""
import pytest
from diofant import (FF, QQ, ZZ, Abs, Catalan, Dummy, E, EulerGamma, Float,
Function, GoldenRatio, I, ImmutableMatrix, Integer,
Matrix, Rational, Symbol, Wild, WildFunction, false,
field, grlex, nan, ones, oo, pi, ring, root, sin, sqrt,
srepr, true, zoo)
from diofant.abc import x, y
from diofant.core.exprtools import Factors
__all__ = ()
# eval(repr(expr)) == expr has to succeed in the right environment. The right
# environment is the scope of "from diofant import *" for most cases.
ENV: dict[str, object] = {}
imports = ['from diofant import *',
'from diofant.domains.integerring import GMPYIntegerRing, PythonIntegerRing',
'from diofant.domains.rationalfield import GMPYRationalField, PythonRationalField',
'from diofant.polys.orderings import GradedLexOrder, LexOrder']
exec('\n'.join(imports), ENV) # pylint: disable=exec-used
def sT(expr, string):
# Tests that repr delivers the expected string and that
# the condition eval(repr(expr))==expr holds.
assert repr(expr) == string
assert eval(string, ENV) == expr # pylint: disable=eval-used
def test_printmethod():
class R(Abs):
def _diofantrepr(self, printer):
return f'foo({printer._print(self.args[0])})'
assert repr(R(x)) == "foo(Symbol('x'))"
def test_Add():
sT(x + y, "Add(Symbol('x'), Symbol('y'))")
assert srepr(x**2 + 1, order='lex') == ("Add(Pow(Symbol('x'), "
'Integer(2)), Integer(1))')
def test_Function():
sT(Function('f')(x), "Function('f')(Symbol('x'))")
# test unapplied Function
sT(Function('f'), "Function('f')")
sT(sin(x), "sin(Symbol('x'))")
sT(sin, 'sin')
def test_Singletons():
sT(Catalan, 'Catalan')
sT(zoo, 'zoo')
sT(EulerGamma, 'EulerGamma')
sT(E, 'E')
sT(GoldenRatio, 'GoldenRatio')
sT(Rational(1, 2), 'Rational(1, 2)')
sT(I, 'I')
sT(oo, 'oo')
sT(nan, 'nan')
sT(-oo, '-oo')
sT(Integer(-1), 'Integer(-1)')
sT(Integer(1), 'Integer(1)')
sT(pi, 'pi')
sT(Integer(0), 'Integer(0)')
def test_Integer():
sT(Integer(4), 'Integer(4)')
def test_list():
sT([x, Integer(4)], "[Symbol('x'), Integer(4)]")
def test_Matrix():
for cls, name in [(Matrix, 'MutableDenseMatrix'), (ImmutableMatrix, 'ImmutableMatrix')]:
sT(cls([[x**+1, 1], [y, x + y]]),
f"{name}([[Symbol('x'), Integer(1)], [Symbol('y'), Add(Symbol('x'), Symbol('y'))]])")
sT(cls(), f'{name}([])')
sT(cls([[x**+1, 1], [y, x + y]]), f"{name}([[Symbol('x'), Integer(1)], [Symbol('y'), Add(Symbol('x'), Symbol('y'))]])")
def test_empty_Matrix():
sT(ones(0, 3), 'MutableDenseMatrix(0, 3, [])')
sT(ones(4, 0), 'MutableDenseMatrix(4, 0, [])')
sT(ones(0, 0), 'MutableDenseMatrix([])')
def test_Rational():
sT(Rational(1, 3), 'Rational(1, 3)')
sT(Rational(-1, 3), 'Rational(-1, 3)')
def test_Factors():
assert repr(Factors(x*y**2)) == 'Factors({x: 1, y: 2})'
def test_AlgebraicElement():
K = QQ.algebraic_field(sqrt(2))
a = K.unit
sT(a, f'AlgebraicField({QQ!r}, Pow(Integer(2), Rational(1, 2)))([Integer(0), Integer(1)])')
K = QQ.algebraic_field(root(-2, 3))
a = K.unit
sT(a, f'AlgebraicField({QQ!r}, Pow(Integer(-2), Rational(1, 3)))([Integer(0), Integer(1)])')
def test_Float():
sT(Float('1.23', dps=3), "Float('1.22998', dps=3)")
sT(Float('1.23456789', dps=9), "Float('1.23456788994', dps=9)")
sT(Float('1.234567890123456789', dps=19),
"Float('1.234567890123456789013', dps=19)")
sT(Float(
'0.60038617995049726', 15), "Float('0.60038617995049726', dps=15)")
def test_Symbol():
sT(x, "Symbol('x')")
sT(y, "Symbol('y')")
sT(Symbol('x', negative=True), "Symbol('x', negative=True)")
def test_Symbol_two_assumptions():
x = Symbol('x', negative=0, integer=1)
# order could vary
s1 = "Symbol('x', integer=True, negative=False)"
s2 = "Symbol('x', negative=False, integer=True)"
assert repr(x) in (s1, s2)
assert eval(repr(x), ENV) == x # pylint: disable=eval-used
def test_Symbol_no_special_commutative_treatment():
sT(Symbol('x'), "Symbol('x')")
sT(Symbol('x', commutative=False), "Symbol('x', commutative=False)")
sT(Symbol('x', commutative=0), "Symbol('x', commutative=False)")
sT(Symbol('x', commutative=True), "Symbol('x', commutative=True)")
sT(Symbol('x', commutative=1), "Symbol('x', commutative=True)")
def test_Wild():
sT(Wild('x', even=True), "Wild('x', even=True)")
def test_Dummy():
# cannot use sT here
d = Dummy('d', nonzero=True)
assert repr(d) == "Dummy('d', nonzero=True)"
def test_Dummy_from_Symbol():
# should not get the full dictionary of assumptions
n = Symbol('n', integer=True)
d = n.as_dummy()
assert repr(d) == "Dummy('n', integer=True)"
def test_tuple():
sT((x,), "(Symbol('x'),)")
sT((x, y), "(Symbol('x'), Symbol('y'))")
def test_WildFunction():
sT(WildFunction('w'), "WildFunction('w')")
def test_settings():
pytest.raises(TypeError, lambda: srepr(x, method='garbage'))
def test_Mul():
sT(3*x**3*y, "Mul(Integer(3), Pow(Symbol('x'), Integer(3)), Symbol('y'))")
def test_FiniteField():
sT(FF(2), 'GF(2)')
F4 = FF(2, [1, 1, 1])
repr(F4.one) # not raises
def test_PolynomialRing():
sT(ZZ.inject('x'), f"UnivarPolynomialRing({ZZ!r}, (Symbol('x'),), LexOrder())")
sT(QQ.poly_ring('x', 'y', order=grlex),
f"PolynomialRing({QQ!r}, (Symbol('x'), Symbol('y')), GradedLexOrder())")
sT(ZZ.inject('x', 'y', 'z', 't').eject('t'),
f"PolynomialRing(UnivarPolynomialRing({ZZ!r}, (Symbol('t'),), "
"LexOrder()), (Symbol('x'), Symbol('y'), Symbol('z')), LexOrder())")
def test_FractionField():
sT(ZZ.inject('x').field, f"FractionField({ZZ!r}, (Symbol('x'),), LexOrder())")
sT(QQ.frac_field('x', 'y', order=grlex),
f"FractionField({QQ!r}, (Symbol('x'), Symbol('y')), GradedLexOrder())")
sT(ZZ.inject('x', 'y', 'z', 't').eject('t').field,
f"FractionField(UnivarPolynomialRing({ZZ!r}, (Symbol('t'),), LexOrder()), "
"(Symbol('x'), Symbol('y'), Symbol('z')), LexOrder())")
def test_PolyElement():
R, x, y = ring('x y', ZZ)
g = R.domain.dtype
assert repr(3*x**2*y + 1) == (f"PolyElement(PolynomialRing({ZZ!r}, (Symbol('x'), "
"Symbol('y')), LexOrder()), [((2, 1), "
f'{g(3)!r}), ((0, 0), {g(1)!r})])')
def test_FracElement():
F, x, y = field('x y', ZZ)
g = F.domain.dtype
assert repr((3*x**2*y + 1)/(x - y**2)) == (f"FracElement(FractionField({ZZ!r}, (Symbol('x'), "
f"Symbol('y')), LexOrder()), [((2, 1), {g(3)!r}), "
f'((0, 0), {g(1)!r})], [((1, 0), {g(1)!r}), '
f'((0, 2), {g(-1)!r})])')
def test_BooleanAtom():
assert repr(true) == 'true'
assert repr(false) == 'false'
def METHOD_NAME():
sT(QQ.algebraic_field(sqrt(2)),
f'AlgebraicField({QQ!r}, Pow(Integer(2), Rational(1, 2)))')
|
1,654 |
check sha1sum file
|
#!/usr/bin/env python
import argparse
import hashlib
import os
import sys
import requests
# Here I am disabling warnings as they pollute long download
# screens. However, I am passing a more readable warning to the user
# instructing them.
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from tqdm import tqdm
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def BuildDirectories(filepath):
d = os.path.dirname(filepath)
if not os.path.isdir(d):
os.makedirs(d)
def filehash(filepath):
blocksize = 64 * 1024
sha = hashlib.sha1()
with open(filepath, "rb") as fp:
while True:
data = fp.read(blocksize)
if not data:
break
sha.update(data)
return sha.hexdigest()
def GenerateSha1sumFile(root, suffix=".nc"):
lines = ""
for topdir, dirs, files in os.walk(root):
if topdir.startswith("_"):
continue
if topdir.startswith("./_"):
continue
for fpath in [os.path.join(topdir, f) for f in files]:
if not fpath.endswith(suffix):
continue
size = os.path.getsize(fpath)
sha = filehash(fpath)
name = os.path.relpath(fpath, root)
lines += "%s %s\n" % (sha, name)
return lines
def METHOD_NAME(sha1sumfile, root):
needs_updating = []
with open(sha1sumfile) as f:
lines = f.readlines()
for line in lines:
line = line.split()
sha1sum, filename = line
fpath = os.path.join(root, filename)
if os.path.isfile(fpath):
if sha1sum != filehash(fpath):
needs_updating.append(filename)
else:
needs_updating.append(filename)
return needs_updating
# default value is ILAMB_ROOT if set
local_root = "./"
if "ILAMB_ROOT" in os.environ:
local_root = os.environ["ILAMB_ROOT"]
# parse options
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--local_root",
dest="local_root",
metavar="PATH",
type=str,
default=local_root,
help="Location on your system.",
)
parser.add_argument(
"--remote_root",
dest="remote_root",
metavar="PATH",
type=str,
default="https://www.ilamb.org/ILAMB-Data/",
help="Location on the remote system.",
)
parser.add_argument(
"-c",
"--create",
dest="create",
action="store_true",
help="Enable to create a sha1sum check file of the contents of the local root",
)
parser.add_argument(
"--no-check-certificate",
dest="check",
action="store_true",
help="Enable to skip checking authenticity of the downloaded certificate",
)
parser.add_argument(
"-y",
dest="auto_accept",
action="store_true",
help="Enable to automatically accept the query to download files",
)
args = parser.parse_args()
# use create mode if you want to make a checksum file of a directory
if args.create:
with open(args.local_root + "/SHA1SUM", mode="w") as f:
f.write(GenerateSha1sumFile(args.local_root))
sys.exit()
print(
"\nComparing remote location:\n\n\t%s\n\nTo local location:\n\n\t%s"
% (args.remote_root, args.local_root)
)
# download and build the sha1sum check files
try:
resp = requests.get(args.remote_root + "/SHA1SUM", verify=(not args.check))
except requests.exceptions.SSLError:
print(
"""
SSLError: The certificate from the remote site you contacted could not
be verified. If you trust this site (for example if you are connecting
to our server https://www.ilamb.org) then you may rerun ilamb-fetch
with the --no-check-certificate option which will bypass the
certificate check step.
"""
)
sys.exit(1)
with open(args.local_root + "/SHA1SUM", "wb") as f:
f.write(resp.content)
if "404 Not Found" in open(args.local_root + "/SHA1SUM").read():
raise ValueError(
"Could not find the sha1 sum file: %s" % (args.remote_root + "/SHA1SUM")
)
needs_updating = METHOD_NAME(args.local_root + "/SHA1SUM", args.local_root)
if len(needs_updating) == 0:
print("\nAll your data is up-to-date and clean.\n")
os.system("rm -f " + args.local_root + "/SHA1SUM")
sys.exit()
print("\nI found the following files which are missing, out of date, or corrupt:\n")
for key in needs_updating:
print("\t%s%s" % (args.local_root, key))
if args.auto_accept:
reply = "y"
else:
reply = str(input("\nCalculate Total Download size? [y/n] ")).lower().strip()
if reply[0] == "y":
total_download_size = 0
with tqdm(total=len(needs_updating)) as pbar:
for key in needs_updating:
resp = requests.get(
args.remote_root + "/" + key, stream=True, verify=(not args.check)
)
total_download_size += int(resp.headers.get("content-length"))
pbar.update(1)
print("\nTotal download size: %6.1f MB" % (total_download_size / 1e6))
if args.auto_accept:
reply = "y"
else:
reply = str(input("\nDownload replacements? [y/n] ")).lower().strip()
if reply[0] == "y":
print(" ")
for key in needs_updating:
print("\tDownloading %s/%s..." % (args.remote_root, key))
BuildDirectories(args.local_root + "/" + key)
resp = requests.get(
args.remote_root + "/" + key, stream=True, verify=(not args.check)
)
total_size = int(resp.headers.get("content-length"))
initial_pos = 0
file = args.local_root + "/" + key
with open(file, "wb") as f:
with tqdm(
total=total_size,
unit="B",
unit_scale=True,
desc=file,
initial=initial_pos,
ascii=True,
) as pbar:
for ch in resp.iter_content(chunk_size=1024):
if ch:
f.write(ch)
pbar.update(len(ch))
print("\nDownload complete. Rerun ilamb-fetch to check file integrity.\n")
os.system("rm -f " + args.local_root + "/SHA1SUM")
|
1,655 |
dict key
|
# MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
import SCons.Memoize
# Enable memoization counting
SCons.Memoize.EnableMemoization()
class FakeObject:
def __init__(self) -> None:
self._memo = {}
def METHOD_NAME(self, argument):
return argument
@SCons.Memoize.CountDictCall(METHOD_NAME)
def dict(self, argument):
memo_key = argument
try:
memo_dict = self._memo['dict']
except KeyError:
memo_dict = {}
self._memo['dict'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
result = self.compute_dict(argument)
memo_dict[memo_key] = result
return result
@SCons.Memoize.CountMethodCall
def value(self):
try:
return self._memo['value']
except KeyError:
pass
result = self.compute_value()
self._memo['value'] = result
return result
def get_memoizer_counter(self, name):
return SCons.Memoize.CounterList.get(self.__class__.__name__+'.'+name, None)
class Returner:
def __init__(self, result) -> None:
self.result = result
self.calls = 0
def __call__(self, *args, **kw):
self.calls = self.calls + 1
return self.result
class CountDictTestCase(unittest.TestCase):
def test___call__(self) -> None:
"""Calling a Memoized dict method
"""
obj = FakeObject()
called = []
fd1 = Returner(1)
fd2 = Returner(2)
obj.compute_dict = fd1
r = obj.dict(11)
assert r == 1, r
obj.compute_dict = fd2
r = obj.dict(12)
assert r == 2, r
r = obj.dict(11)
assert r == 1, r
obj.compute_dict = fd1
r = obj.dict(11)
assert r == 1, r
r = obj.dict(12)
assert r == 2, r
assert fd1.calls == 1, fd1.calls
assert fd2.calls == 1, fd2.calls
c = obj.get_memoizer_counter('dict')
assert c.hit == 3, c.hit
assert c.miss == 2, c.miss
class CountValueTestCase(unittest.TestCase):
def test___call__(self) -> None:
"""Calling a Memoized value method
"""
obj = FakeObject()
called = []
fv1 = Returner(1)
fv2 = Returner(2)
obj.compute_value = fv1
r = obj.value()
assert r == 1, r
r = obj.value()
assert r == 1, r
obj.compute_value = fv2
r = obj.value()
assert r == 1, r
r = obj.value()
assert r == 1, r
assert fv1.calls == 1, fv1.calls
assert fv2.calls == 0, fv2.calls
c = obj.get_memoizer_counter('value')
assert c.hit == 3, c.hit
assert c.miss == 1, c.miss
if __name__ == "__main__":
unittest.main()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
1,656 |
calc partiality anisotropy set
|
from __future__ import absolute_import, division, print_function
from cctbx.array_family import flex
from scitbx.matrix import sqr, col
from cctbx.crystal_orientation import crystal_orientation, basis_type
import math
import numpy as np
class partiality_handler(object):
"""
mod_partiality:
1. Calculate partiality for given
miller indices, crystal orientation, unit cell, wavelength.
2. Cacluate spot centroid delta distance
"""
def __init__(self):
"""
Intialitze parameters
"""
def calc_full_refl(self, I_o_p_set, sin_theta_over_lambda_sq_set,
G, B, p_set, rs_set, flag_volume_correction=True):
I_o_full_set = I_o_p_set/(G * flex.exp(-2*B*sin_theta_over_lambda_sq_set) * p_set)
return I_o_full_set
def calc_spot_radius(self, a_star_matrix, miller_indices, wavelength):
#calculate spot_radius based on rms delta_S for all spots
S0 = -1*col((0,0,1./wavelength))
sd_array = a_star_matrix.elems * miller_indices.as_vec3_double() + S0.elems
rh_set = sd_array.norms() - (1/wavelength)
return rh_set.standard_deviation_of_the_sample()
def voigt(self, x, sig, nu):
if nu < 0:
nu = 0
elif nu > 1:
nu = 1
f1 = nu * math.sqrt(math.log(2)/math.pi) * flex.exp(-4*math.log(2)*((x/sig)**2)) * (1/abs(sig))
f2 = (1-nu)/(math.pi*abs(sig)*(1+(4*((x/sig)**2))))
f3 = ((nu * math.sqrt(math.log(2)/math.pi))/abs(sig)) + ((1-nu)/(math.pi*abs(sig)))
svx = (f1 + f2)/f3
return svx
def lognpdf(self, x, FWHM, zero):
#find sig from root of this function
zero = np.abs(zero)
sig_range = np.arange(50)/100
t = sig_range * math.sqrt(math.log(4))
sig_set = np.array([sig_range[np.argmin(np.abs(( fwhm - (zero * (np.exp(t) - np.exp(-1*t))) )))] for fwhm in FWHM])
#calc x0
x0 = math.log(zero) + sig_set**2
g = 1/( sig_set * math.sqrt(2*math.pi) * np.exp(x0-((sig_set**2)/2)) )
#calc lognpdf
X = zero - x
f1 = 1/( X * sig_set * math.sqrt(2*math.pi) )
f2 = np.exp( -1 * (np.log(X)-x0)**2 / (2*(sig_set**2)) )
svx = flex.double(f1 * f2 / g)
return svx
def METHOD_NAME(self, my_uc, rotx, roty, miller_indices,
ry, rz, r0, re, nu,
bragg_angle_set, alpha_angle_set, wavelength, crystal_init_orientation,
spot_pred_x_mm_set, spot_pred_y_mm_set, detector_distance_mm,
partiality_model, flag_beam_divergence):
#use III.4 in Winkler et al 1979 (A35; P901) for set of miller indices
O = sqr(my_uc.orthogonalization_matrix()).transpose()
R = sqr(crystal_init_orientation.crystal_rotation_matrix()).transpose()
CO = crystal_orientation(O*R, basis_type.direct)
CO_rotate = CO.rotate_thru((1,0,0), rotx
).rotate_thru((0,1,0), roty)
A_star = sqr(CO_rotate.reciprocal_matrix())
S0 = -1*col((0,0,1./wavelength))
#caculate rs
rs_set = r0 + (re * flex.tan(bragg_angle_set))
if flag_beam_divergence:
rs_set += ((ry * flex.cos(alpha_angle_set))**2 + (rz * flex.sin(alpha_angle_set))**2)**(1/2)
#calculate rh
x = A_star.elems * miller_indices.as_vec3_double()
sd_array = x + S0.elems
rh_set = sd_array.norms() - (1/wavelength)
#calculate partiality
if partiality_model == "Lorentzian":
partiality_set = ((rs_set**2)/((2*(rh_set**2))+(rs_set**2)))
elif partiality_model == "Voigt":
partiality_set = self.voigt(rh_set, rs_set, nu)
elif partiality_model == "Lognormal":
partiality_set = self.lognpdf(rh_set, rs_set, nu)
#calculate delta_xy
if sum(spot_pred_y_mm_set) == 0:
#hack for dials integration - spot_pred_x_mm_set is s1 * to be fixed *
delta_xy_set = (spot_pred_x_mm_set - sd_array).norms()
else:
d_ratio = -detector_distance_mm/sd_array.parts()[2]
calc_xy_array = flex.vec3_double(sd_array.parts()[0]*d_ratio, \
sd_array.parts()[1]*d_ratio, flex.double([0]*len(d_ratio)))
pred_xy_array = flex.vec3_double(spot_pred_x_mm_set, spot_pred_y_mm_set, flex.double([0]*len(d_ratio)))
delta_xy_set = (pred_xy_array - calc_xy_array).norms()
return partiality_set, delta_xy_set, rs_set, rh_set
|
1,657 |
test get highest notification setting value
|
from sentry.models import User
from sentry.notifications.helpers import (
get_highest_notification_setting_value,
get_most_specific_notification_setting_value,
)
from sentry.notifications.types import (
NotificationScopeType,
NotificationSettingOptionValues,
NotificationSettingTypes,
)
from sentry.services.hybrid_cloud.actor import ActorType, RpcActor
from sentry.testutils.cases import TestCase
from sentry.testutils.silo import control_silo_test
from sentry.types.integrations import ExternalProviders
@control_silo_test(stable=True)
class GetMostSpecificNotificationSettingValueTestCase(TestCase):
def setUp(self) -> None:
self.user = self.create_user()
def test_get_most_specific_notification_setting_value_empty_workflow(self):
value = get_most_specific_notification_setting_value(
notification_settings_by_scope={},
recipient=RpcActor(id=self.user.id, actor_type=ActorType.USER),
parent_id=1,
type=NotificationSettingTypes.WORKFLOW,
)
assert value == NotificationSettingOptionValues.SUBSCRIBE_ONLY
def test_get_most_specific_notification_setting_value_empty_alerts(self):
value = get_most_specific_notification_setting_value(
notification_settings_by_scope={},
recipient=RpcActor(id=self.user.id, actor_type=ActorType.USER),
parent_id=1,
type=NotificationSettingTypes.ISSUE_ALERTS,
)
assert value == NotificationSettingOptionValues.ALWAYS
def test_get_most_specific_notification_setting_value_user(self):
notification_settings_by_scope = {
NotificationScopeType.USER: {
self.user.id: {
ExternalProviders.SLACK: NotificationSettingOptionValues.NEVER,
ExternalProviders.EMAIL: NotificationSettingOptionValues.ALWAYS,
},
},
}
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
recipient=RpcActor(id=self.user.id, actor_type=ActorType.USER),
parent_id=1,
type=NotificationSettingTypes.ISSUE_ALERTS,
)
assert value == NotificationSettingOptionValues.ALWAYS
def test_get_most_specific_notification_setting_value(self):
project_id = 1
notification_settings_by_scope = {
NotificationScopeType.USER: {
self.user.id: {
ExternalProviders.SLACK: NotificationSettingOptionValues.NEVER,
ExternalProviders.EMAIL: NotificationSettingOptionValues.ALWAYS,
},
},
NotificationScopeType.PROJECT: {
project_id: {
ExternalProviders.SLACK: NotificationSettingOptionValues.NEVER,
ExternalProviders.EMAIL: NotificationSettingOptionValues.NEVER,
},
},
}
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
recipient=RpcActor(id=self.user.id, actor_type=ActorType.USER),
parent_id=project_id,
type=NotificationSettingTypes.ISSUE_ALERTS,
)
assert value == NotificationSettingOptionValues.NEVER
class GetHighestNotificationSettingValueTestCase(TestCase):
def setUp(self) -> None:
self.user = User(id=1)
def test_get_highest_notification_setting_value_empty(self):
assert get_highest_notification_setting_value({}) is None
def METHOD_NAME(self):
value = get_highest_notification_setting_value(
{
ExternalProviders.SLACK: NotificationSettingOptionValues.NEVER,
ExternalProviders.EMAIL: NotificationSettingOptionValues.ALWAYS,
}
)
assert value == NotificationSettingOptionValues.ALWAYS
def test_get_highest_notification_setting_value_never(self):
value = get_highest_notification_setting_value(
{
ExternalProviders.SLACK: NotificationSettingOptionValues.NEVER,
ExternalProviders.EMAIL: NotificationSettingOptionValues.NEVER,
}
)
assert value == NotificationSettingOptionValues.NEVER
|
1,658 |
install
|
#
# SPDX-License-Identifier: BSD-2-Clause
#
# Copyright (c) 2022 Alex Richardson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from .crosscompileproject import CompilationTargets, CrossCompileCMakeProject, GitRepository
class BuildDBus(CrossCompileCMakeProject):
target = "dbus"
supported_architectures = CompilationTargets.ALL_FREEBSD_AND_CHERIBSD_TARGETS + CompilationTargets.ALL_NATIVE
repository = GitRepository("https://gitlab.freedesktop.org/dbus/dbus.git",
old_urls=[b"https://gitlab.freedesktop.org/arichardson/dbus.git"])
dependencies = ("libexpat",)
ctest_script_extra_args = ["--test-timeout", str(120 * 60)] # Tests can take a long time to run
def setup(self):
super().setup()
# Disable documentation to reduce dependencies
self.add_cmake_options(DBUS_ENABLE_DOXYGEN_DOCS=False, DBUS_ENABLE_XML_DOCS=False)
# Work around https://gitlab.freedesktop.org/pkg-config/pkg-config/-/issues/52:
self.add_cmake_options(DBUS_RELOCATABLE=False)
# Skip glib support for now:
self.add_cmake_options(DBUS_WITH_GLIB=False)
if not self.compiling_for_host():
self.add_cmake_options(DBUS_SESSION_SOCKET_DIR="/tmp")
self.add_cmake_options(TEST_SOCKET_DIR="/tmp") # Don't try to create test sockets on SMBFS
self.add_cmake_options(CMAKE_INSTALL_LOCALSTATEDIR="/var") # don't use /usr/local/var/
# Testing malloc failures makes the testsuite painfully slow.
self.ctest_environment["DBUS_TEST_MALLOC_FAILURES"] = "0"
def METHOD_NAME(self, **kwargs):
super().METHOD_NAME()
if not self.compiling_for_host() and self.target_info.is_freebsd():
self.write_file(self.rootfs_dir / "etc/rc.conf.d/dbus", contents="dbus_enable=\"YES\"\n",
overwrite=True, print_verbose_only=False)
# Slightly modified version of https://cgit.freebsd.org/ports/plain/devel/dbus/files/dbus.in
# to add the necessary users on-demand and chmod/chown the rsync'd files
self.write_file(self.rootfs_dir / self.target_info.localbase / "etc/rc.d/dbus", contents=f"""#!/bin/sh
# PROVIDE: dbus
# REQUIRE: DAEMON ldconfig
#
# Add the following lines to /etc/rc.conf to enable the D-BUS messaging system:
#
# dbus_enable="YES"
#
. /etc/rc.subr
: ${{dbus_enable=${{gnome_enable-NO}}}} ${{dbus_flags="--system"}}
name=dbus
rcvar=dbus_enable
command="{self.install_prefix}/bin/dbus-daemon"
pidfile="/var/run/dbus/pid"
start_precmd="dbus_prestart"
stop_postcmd="dbus_poststop"
dbus_prestart()
{{
# See UIDs and GIDs in freebsd-ports
if ! pw group show messagebus > /dev/null ; then
pw groupadd -n messagebus -g 556
fi
if ! pw user show messagebus > /dev/null ; then
pw useradd -n messagebus -u 556 -c "D-BUS Daemon User" -d /nonexistent -s /usr/sbin/nologin -g 556
fi
chown root:messagebus {self.install_prefix}/libexec/dbus-daemon-launch-helper
chmod 4750 {self.install_prefix}/libexec/dbus-daemon-launch-helper
chmod -R u+rwX,go+rX,go-w {self.install_prefix}/share/dbus-1 {self.install_prefix}/etc/dbus-1
mkdir -p /var/lib/dbus
{self.install_prefix}/bin/dbus-uuidgen --ensure
mkdir -p /var/run/dbus
}}
dbus_poststop()
{{
rm -f $pidfile
}}
load_rc_config ${{name}}
run_rc_command "$1"
""", overwrite=True, mode=0o755)
|
1,659 |
get args parser
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
import argparse
import os
import sys
import os.path as osp
import torch
ROOT = os.getcwd()
if str(ROOT) not in sys.path:
sys.path.append(str(ROOT))
from yolov6.utils.events import LOGGER
from yolov6.core.inferer import Inferer
def METHOD_NAME(add_help=True):
parser = argparse.ArgumentParser(description='YOLOv6 PyTorch Inference.', add_help=add_help)
parser.add_argument('--weights', type=str, default='weights/yolov6s.pt', help='model path(s) for inference.')
parser.add_argument('--source', type=str, default='data/images', help='the source path, e.g. image-file/dir.')
parser.add_argument('--yaml', type=str, default='data/coco.yaml', help='data yaml file.')
parser.add_argument('--img-size', type=int, default=640, help='the image-size(h,w) in inference size.')
parser.add_argument('--conf-thres', type=float, default=0.25, help='confidence threshold for inference.')
parser.add_argument('--iou-thres', type=float, default=0.45, help='NMS IoU threshold for inference.')
parser.add_argument('--max-det', type=int, default=1000, help='maximal inferences per image.')
parser.add_argument('--device', default='0', help='device to run our model i.e. 0 or 0,1,2,3 or cpu.')
parser.add_argument('--save-txt', action='store_true', help='save results to *.txt.')
parser.add_argument('--save-img', action='store_false', help='save visuallized inference results.')
parser.add_argument('--classes', nargs='+', type=int, help='filter by classes, e.g. --classes 0, or --classes 0 2 3.')
parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS.')
parser.add_argument('--project', default='runs/inference', help='save inference results to project/name.')
parser.add_argument('--name', default='exp', help='save inference results to project/name.')
parser.add_argument('--hide-labels', default=False, action='store_true', help='hide labels.')
parser.add_argument('--hide-conf', default=False, action='store_true', help='hide confidences.')
parser.add_argument('--half', action='store_true', help='whether to use FP16 half-precision inference.')
args = parser.parse_args()
LOGGER.info(args)
return args
@torch.no_grad()
def run(weights=osp.join(ROOT, 'yolov6s.pt'),
source=osp.join(ROOT, 'data/images'),
yaml=None,
img_size=640,
conf_thres=0.25,
iou_thres=0.45,
max_det=1000,
device='',
save_txt=False,
save_img=True,
classes=None,
agnostic_nms=False,
project=osp.join(ROOT, 'runs/inference'),
name='exp',
hide_labels=False,
hide_conf=False,
half=False,
):
""" Inference process
This function is the main process of inference, supporting image files or dirs containing images.
Args:
weights: The path of model.pt, e.g. yolov6s.pt
source: Source path, supporting image files or dirs containing images.
yaml: Data yaml file, .
img_size: Inference image-size, e.g. 640
conf_thres: Confidence threshold in inference, e.g. 0.25
iou_thres: NMS IOU threshold in inference, e.g. 0.45
max_det: Maximal detections per image, e.g. 1000
device: Cuda device, e.e. 0, or 0,1,2,3 or cpu
save_txt: Save results to *.txt
save_img: Save visualized inference results
classes: Filter by class: --class 0, or --class 0 2 3
agnostic_nms: Class-agnostic NMS
project: Save results to project/name
name: Save results to project/name, e.g. 'exp'
line_thickness: Bounding box thickness (pixels), e.g. 3
hide_labels: Hide labels, e.g. False
hide_conf: Hide confidences
half: Use FP16 half-precision inference, e.g. False
"""
# create save dir
save_dir = osp.join(project, name)
if (save_img or save_txt) and not osp.exists(save_dir):
os.makedirs(save_dir)
else:
LOGGER.warning('Save directory already existed')
if save_txt:
os.mkdir(osp.join(save_dir, 'labels'))
# Inference
inferer = Inferer(source, weights, device, yaml, img_size, half)
inferer.infer(conf_thres, iou_thres, classes, agnostic_nms, max_det, save_dir, save_txt, save_img, hide_labels, hide_conf)
if save_txt or save_img:
LOGGER.info(f"Results saved to {save_dir}")
def main(args):
run(**vars(args))
if __name__ == "__main__":
args = METHOD_NAME()
main(args)
|
1,660 |
test files
|
import testing
from testing import value_eq,object_eq
from testing import divert_nexus_log,restore_nexus_log
associated_files = dict()
def get_filenames():
filenames = [
'Fe.aug-cc-pwcv5z-dk.0.bas',
'Fe.aug-cc-pwcv5z-dk.0.gbs',
'Fe.BFD_VQZ.bas',
'Fe.BFD_VQZ.gbs',
'Fe.stuttgart_rsc_1997.0.bas',
'Fe.stuttgart_rsc_1997.0.gbs',
'Fe.stuttgart_rsc_1997_ecp.0.bas',
'Fe.stuttgart_rsc_1997_ecp.0.gbs',
]
return filenames
#end def get_filenames
def get_files():
return testing.collect_unit_test_file_paths('basisset',associated_files)
#end def get_files
def METHOD_NAME():
filenames = get_filenames()
files = get_files()
assert(set(files.keys())==set(filenames))
#end def test_files
def test_import():
import basisset
from basisset import BasisSets
from basisset import process_gaussian_text
from basisset import GaussianBasisSet
#end def test_import
def test_basissets():
from basisset import BasisSets
from basisset import BasisFile
from basisset import gamessBasisFile
# empty initialization
BasisSets()
BasisFile()
gamessBasisFile()
filenames = get_filenames()
files = get_files()
f = [files[fn] for fn in filenames]
# standard initialization
divert_nexus_log()
bf = BasisFile(f[1])
gbf = gamessBasisFile(f[0])
bs = BasisSets(f[2:]+[bf,gbf])
restore_nexus_log()
assert(bf.element=='Fe')
assert(bf.filename=='Fe.aug-cc-pwcv5z-dk.0.gbs')
assert(gbf.element=='Fe')
assert(gbf.filename=='Fe.aug-cc-pwcv5z-dk.0.bas')
assert(gbf.text.startswith('IRON'))
assert(gbf.text.endswith('1 1.3776500 1.0000000'))
assert(len(gbf.text.strip())==21135)
for fn in filenames:
assert(fn in bs)
assert(isinstance(bs[fn],BasisFile))
if fn.endswith('.bas'):
assert(isinstance(bs[fn],gamessBasisFile))
#end if
#end for
#end def test_basissets
def test_process_gaussian_text():
from basisset import process_gaussian_text
filenames = get_filenames()
files = get_files()
bs_ref = {
'Fe.aug-cc-pwcv5z-dk.0.bas' : 503,
'Fe.aug-cc-pwcv5z-dk.0.gbs' : 503,
'Fe.BFD_VQZ.bas' : 132,
'Fe.BFD_VQZ.gbs' : 132,
'Fe.stuttgart_rsc_1997.0.bas' : 38,
'Fe.stuttgart_rsc_1997.0.gbs' : 38,
}
pp_ref = {
'Fe.BFD_VQZ.bas' : ( 9, 132 ),
'Fe.BFD_VQZ.gbs' : ( 13, 132 ),
'Fe.stuttgart_rsc_1997.0.bas' : ( 13, 38 ),
'Fe.stuttgart_rsc_1997.0.gbs' : ( 17, 38 ),
'Fe.stuttgart_rsc_1997_ecp.0.bas' : ( 13, None ),
'Fe.stuttgart_rsc_1997_ecp.0.gbs' : ( 17, None ),
}
for fn in filenames:
if fn.endswith('.bas'):
format = 'gamess'
elif fn.endswith('.gbs'):
format = 'gaussian'
else:
format = None
#end if
f = open(files[fn],'r')
text = f.read()
f.close()
bs = process_gaussian_text(text,format,pp=False)
if fn in bs_ref:
assert(len(bs)==bs_ref[fn])
#end if
pp,bs = process_gaussian_text(text,format)
if fn in pp_ref:
ppr,bsr = pp_ref[fn]
assert(len(pp)==ppr)
if bsr is None:
assert(bs is None)
else:
assert(len(bs)==bsr)
#end if
#end if
#end for
#end def test_process_gaussian_text
def test_gaussianbasisset():
from basisset import GaussianBasisSet
filenames = get_filenames()
files = get_files()
GaussianBasisSet()
ref = {
'Fe.aug-cc-pwcv5z-dk.0.bas' : 49,
'Fe.aug-cc-pwcv5z-dk.0.gbs' : 49,
'Fe.BFD_VQZ.bas' : 23,
'Fe.BFD_VQZ.gbs' : 23,
'Fe.stuttgart_rsc_1997.0.bas' : 15,
'Fe.stuttgart_rsc_1997.0.gbs' : 15,
}
gbs = dict()
for fn in filenames:
if 'ecp' not in fn:
if fn.endswith('.bas'):
format = 'gamess'
elif fn.endswith('.gbs'):
format = 'gaussian'
else:
format = None
#end if
bs = GaussianBasisSet(files[fn],format)
assert(bs.name=='Fe')
assert(len(bs.basis)==ref[fn])
text = bs.write(format=format)
text = 'header line\n'+text
bs2 = GaussianBasisSet()
bs2.read_text(text,format=format)
assert(object_eq(bs.basis,bs2.basis))
#end if
#end for
#end def test_gaussianbasisset
|
1,661 |
test discover collision
|
# -----------------------------------------------------------------------------
# Copyright (c) 2012 - 2018, Anaconda, Inc. and Intake contributors
# All rights reserved.
#
# The full license is in the LICENSE file, distributed with this software.
# -----------------------------------------------------------------------------
import os
import os.path
import shlex
import subprocess
import sys
import pytest
import intake
from intake.source import discovery
@pytest.fixture
def extra_pythonpath():
basedir = os.path.dirname(__file__)
extra_path = os.path.join(basedir, "plugin_searchpath")
# Put extra directory on the python path
sys.path.append(extra_path)
yield extra_path
# Return python path back to normal
sys.path.remove(extra_path)
def test_package_scan(extra_pythonpath, tmp_config_path):
"This tests a non-public function."
# Default path (sys.path)
results = discovery._package_scan()
assert "foo" in results
# Explicit path
results = discovery._package_scan(path=[extra_pythonpath])
assert "foo" in results
def test_discover_cli(extra_pythonpath, tmp_config_path):
env = os.environ.copy()
env["INTAKE_CONF_FILE"] = tmp_config_path
env["PYTHONPATH"] = extra_pythonpath
# directory is not automatically scanned any more
subprocess.call(shlex.split("intake drivers enable foo intake_foo.FooPlugin"), stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env)
out = subprocess.check_output(shlex.split("intake drivers list"), stderr=subprocess.STDOUT, env=env)
assert b"foo" in out
assert out.index(b"Disabled") > out.index(b"foo")
subprocess.check_output(shlex.split("intake drivers disable foo"), stderr=subprocess.STDOUT, env=env)
out = subprocess.check_output(shlex.split("intake drivers list"), stderr=subprocess.STDOUT, env=env)
assert b"foo" in out
assert out.index(b"Disabled") < out.index(b"foo")
def test_discover(extra_pythonpath, tmp_config_path):
drivers = intake.source.discovery.DriverSouces(do_scan=True)
with pytest.warns(PendingDeprecationWarning):
assert "foo" in drivers.scanned
registry = intake.source.DriverRegistry(drivers)
# Check that package scan (name-based) discovery worked.
assert "foo" in registry
registry["foo"]()
# Check that entrypoints-based discovery worked.
assert "some_test_driver" in registry
registry["some_test_driver"]()
# Now again, turning off the package scan.
drivers = intake.source.discovery.DriverSouces()
registry = intake.source.DriverRegistry(drivers)
# Check that package scan (name-based) discovery did *not* happen.
assert "foo" not in registry
# Check that entrypoints-based discovery worked.
assert "some_test_driver" in registry
registry["some_test_driver"]()
def test_enable_and_disable(extra_pythonpath, tmp_config_path):
# Disable and then enable a package scan result.
try:
drivers = intake.source.discovery.DriverSouces(do_scan=True)
registry = intake.source.DriverRegistry(drivers)
assert "foo" in registry
drivers.disable("foo")
with pytest.warns(PendingDeprecationWarning):
assert "foo" in discovery.drivers.scanned
assert "foo" not in registry
drivers.enable("foo", "intake_foo.FooPlugin")
assert "foo" in registry
finally:
drivers.enable("foo", "intake_foo.FooPlugin")
# Disable and then enable an entrypoint result.
try:
drivers.disable("some_test_driver")
assert "some_test_driver" not in registry
drivers.enable("some_test_driver", "driver_with_entrypoints.SomeTestDriver")
assert "some_test_driver" in registry
finally:
drivers.enable("some_test_driver", "driver_with_entrypoints.SomeTestDriver")
def test_register_and_unregister(extra_pythonpath, tmp_config_path):
registry = intake.source.registry
assert "bar" not in registry
with pytest.raises(ImportError):
from intake import open_bar
intake.register_driver("bar", "intake_foo.FooPlugin")
assert "bar" in registry
from intake import open_bar # noqa
intake.unregister_driver("bar")
assert "bar" not in registry
with pytest.raises(ImportError):
from intake import open_bar # noqa
def METHOD_NAME(extra_pythonpath, tmp_config_path):
with pytest.warns(UserWarning):
discovery._package_scan(plugin_prefix="collision_")
|
1,662 |
get window geometry
|
"Zoom a window to maximum height."
import re
import sys
import tkinter
class WmInfoGatheringError(Exception):
pass
class ZoomHeight:
# Cached values for maximized window dimensions, one for each set
# of screen dimensions.
_max_height_and_y_coords = {}
def __init__(self, editwin):
self.editwin = editwin
self.top = self.editwin.top
def zoom_height_event(self, event=None):
zoomed = self.zoom_height()
if zoomed is None:
self.top.bell()
else:
menu_status = 'Restore' if zoomed else 'Zoom'
self.editwin.update_menu_label(menu='options', index='* Height',
label=f'{menu_status} Height')
return "break"
def zoom_height(self):
top = self.top
width, height, x, y = METHOD_NAME(top)
if top.wm_state() != 'normal':
# Can't zoom/restore window height for windows not in the 'normal'
# state, e.g. maximized and full-screen windows.
return None
try:
maxheight, maxy = self.get_max_height_and_y_coord()
except WmInfoGatheringError:
return None
if height != maxheight:
# Maximize the window's height.
set_window_geometry(top, (width, maxheight, x, maxy))
return True
else:
# Restore the window's height.
#
# .wm_geometry('') makes the window revert to the size requested
# by the widgets it contains.
top.wm_geometry('')
return False
def get_max_height_and_y_coord(self):
top = self.top
screen_dimensions = (top.winfo_screenwidth(),
top.winfo_screenheight())
if screen_dimensions not in self._max_height_and_y_coords:
orig_state = top.wm_state()
# Get window geometry info for maximized windows.
try:
top.wm_state('zoomed')
except tkinter.TclError:
# The 'zoomed' state is not supported by some esoteric WMs,
# such as Xvfb.
raise WmInfoGatheringError(
'Failed getting geometry of maximized windows, because ' +
'the "zoomed" window state is unavailable.')
top.update()
maxwidth, maxheight, maxx, maxy = METHOD_NAME(top)
if sys.platform == 'win32':
# On Windows, the returned Y coordinate is the one before
# maximizing, so we use 0 which is correct unless a user puts
# their dock on the top of the screen (very rare).
maxy = 0
maxrooty = top.winfo_rooty()
# Get the "root y" coordinate for non-maximized windows with their
# y coordinate set to that of maximized windows. This is needed
# to properly handle different title bar heights for non-maximized
# vs. maximized windows, as seen e.g. in Windows 10.
top.wm_state('normal')
top.update()
orig_geom = METHOD_NAME(top)
max_y_geom = orig_geom[:3] + (maxy,)
set_window_geometry(top, max_y_geom)
top.update()
max_y_geom_rooty = top.winfo_rooty()
# Adjust the maximum window height to account for the different
# title bar heights of non-maximized vs. maximized windows.
maxheight += maxrooty - max_y_geom_rooty
self._max_height_and_y_coords[screen_dimensions] = maxheight, maxy
set_window_geometry(top, orig_geom)
top.wm_state(orig_state)
return self._max_height_and_y_coords[screen_dimensions]
def METHOD_NAME(top):
geom = top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
return tuple(map(int, m.groups()))
def set_window_geometry(top, geometry):
top.wm_geometry("{:d}x{:d}+{:d}+{:d}".format(*geometry))
if __name__ == "__main__":
from unittest import main
main('idlelib.idle_test.test_zoomheight', verbosity=2, exit=False)
# Add htest?
|
1,663 |
global msg domain lang
|
from django.utils.translation import gettext as _
from django.utils.translation import gettext_noop
from corehq.apps.translations.models import SMSTranslations
from corehq.util.translation import localize
MSG_GENERIC_ERROR = "sms.survey.restart"
MSG_TOUCHFORMS_DOWN = "sms.survey.temporarilydown"
MSG_TOUCHFORMS_ERROR = "sms.survey.internalerror"
MSG_CHOICE_OUT_OF_RANGE = "sms.validation.outofrange"
MSG_INVALID_CHOICE = "sms.validation.invalidchoice"
MSG_INVALID_INT = "sms.validation.invalidint"
MSG_INVALID_INT_RANGE = "sms.validation.invalidintrange"
MSG_INVALID_FLOAT = "sms.validation.invalidfloat"
MSG_INVALID_LONG = "sms.validation.invalidlong"
MSG_INVALID_DATE = "sms.validation.invaliddate"
MSG_INVALID_TIME = "sms.validation.invalidtime"
MSG_KEYWORD_NOT_FOUND = "sms.keyword.notfound"
MSG_START_KEYWORD_USAGE = "sms.keyword.startusage"
MSG_UNKNOWN_GLOBAL_KEYWORD = "sms.keyword.unknownglobal"
MSG_FIELD_REQUIRED = "sms.survey.fieldrequired"
MSG_EXPECTED_NAMED_ARGS_SEPARATOR = "sms.structured.missingseparator"
MSG_MULTIPLE_ANSWERS_FOUND = "sms.structured.multipleanswers"
MSG_MULTIPLE_QUESTIONS_MATCH = "sms.structured.ambiguousanswer"
MSG_MISSING_EXTERNAL_ID = "sms.caselookup.missingexternalid"
MSG_CASE_NOT_FOUND = "sms.caselookup.casenotfound"
MSG_MULTIPLE_CASES_FOUND = "sms.caselookup.multiplecasesfound"
MSG_FIELD_DESCRIPTOR = "sms.survey.fielddescriptor"
MSG_FORM_NOT_FOUND = "sms.survey.formnotfound"
MSG_FORM_ERROR = "sms.survey.formerror"
MSG_OPTED_IN = "sms.opt.in"
MSG_OPTED_OUT = "sms.opt.out"
MSG_DUPLICATE_USERNAME = "sms.validation.duplicateusername"
MSG_USERNAME_TOO_LONG = "sms.validation.usernametoolong"
MSG_VERIFICATION_START_WITH_REPLY = "sms.verify.startwithreplyto"
MSG_VERIFICATION_START_WITHOUT_REPLY = "sms.verify.startwithoutreplyto"
MSG_VERIFICATION_SUCCESSFUL = "sms.verify.successful"
MSG_REGISTRATION_WELCOME_CASE = "sms.registration.welcome.case"
MSG_REGISTRATION_WELCOME_MOBILE_WORKER = "sms.registration.welcome.mobileworker"
_MESSAGES = {
MSG_GENERIC_ERROR: gettext_noop("An error has occurred. Please try restarting the survey."),
MSG_TOUCHFORMS_DOWN: gettext_noop(
"Our system is receiving a lot of messages now. "
"Can you re-send in 15 minutes? Apologies for the inconvenience!"),
MSG_TOUCHFORMS_ERROR: gettext_noop("Internal server error."),
MSG_CHOICE_OUT_OF_RANGE: gettext_noop("Answer is out of range."),
MSG_INVALID_CHOICE: gettext_noop("Invalid choice."),
MSG_INVALID_INT: gettext_noop("Invalid integer entered."),
MSG_INVALID_INT_RANGE:
gettext_noop("Invalid integer entered, expected a number between -2147483648 and 2147483647."),
MSG_INVALID_FLOAT: gettext_noop("Invalid decimal number entered."),
MSG_INVALID_LONG: gettext_noop("Invalid long integer entered."),
MSG_INVALID_DATE: gettext_noop("Invalid date format: expected {0}."),
MSG_INVALID_TIME: gettext_noop("Invalid time format: expected HHMM (24-hour)."),
MSG_KEYWORD_NOT_FOUND: gettext_noop("Keyword not found: '{0}'"),
MSG_START_KEYWORD_USAGE: gettext_noop("Usage: {0} <keyword>"),
MSG_UNKNOWN_GLOBAL_KEYWORD: gettext_noop("Unknown command: '{0}'"),
MSG_FIELD_REQUIRED: gettext_noop("This field is required."),
MSG_EXPECTED_NAMED_ARGS_SEPARATOR: gettext_noop("Expected name and value to be joined by '{0}'."),
MSG_MULTIPLE_ANSWERS_FOUND: gettext_noop("More than one answer found for '{0}'"),
MSG_MULTIPLE_QUESTIONS_MATCH: gettext_noop("More than one question matches '{0}'"),
MSG_MISSING_EXTERNAL_ID: gettext_noop("Please provide an external id for the case."),
MSG_CASE_NOT_FOUND: gettext_noop("Case with the given external id was not found."),
MSG_MULTIPLE_CASES_FOUND: gettext_noop("More than one case was found with the given external id."),
MSG_FIELD_DESCRIPTOR: gettext_noop("Field '{0}': "),
MSG_FORM_NOT_FOUND: gettext_noop("Could not find the survey being requested."),
MSG_FORM_ERROR: gettext_noop("There is a configuration error with this survey. "
"Please contact your administrator."),
MSG_OPTED_IN: gettext_noop("You have opted-in to receive messages from"
" CommCareHQ. To opt-out, reply to this number with {0}"),
MSG_OPTED_OUT: gettext_noop("You have opted-out from receiving"
" messages from CommCareHQ. To opt-in, reply to this number with {0}"),
MSG_DUPLICATE_USERNAME: gettext_noop("CommCare user {0} already exists"),
MSG_USERNAME_TOO_LONG: gettext_noop("Username {0} is too long. Must be under {1} characters."),
MSG_VERIFICATION_START_WITH_REPLY: gettext_noop("Welcome to CommCareHQ! Is this phone used by {0}? "
"If yes, reply '123' to {1} to start using SMS with CommCareHQ."),
MSG_VERIFICATION_START_WITHOUT_REPLY: gettext_noop("Welcome to CommCareHQ! Is this phone used by {0}? "
"If yes, reply '123' to start using SMS with CommCareHQ."),
MSG_VERIFICATION_SUCCESSFUL: gettext_noop("Thank you. This phone has been verified for "
"using SMS with CommCareHQ"),
MSG_REGISTRATION_WELCOME_CASE: gettext_noop("Thank you for registering with CommCareHQ."),
MSG_REGISTRATION_WELCOME_MOBILE_WORKER: gettext_noop("Thank you for registering with CommCareHQ."),
}
def get_message(msg_id, verified_number=None, context=None, domain=None, language=None):
"""
Translates the message according to the user's and domain's preferences.
msg_id - one of the MSG_* constants above
verified_number - pass in the PhoneNumber of a contact in order to
use this contact's domain and language to translate
context - some messages require additional parameters; pass them as a
tuple or list
domain - if the contact doesn't have a verified number, pass the domain
in here to use this domain's translation doc
language - if the contact doesn't have a verified number, pass the language
code in here to use this language
"""
default_msg = _MESSAGES.get(msg_id, "")
if domain:
translations = SMSTranslations.objects.filter(domain=domain).first()
elif verified_number:
translations = SMSTranslations.objects.filter(domain=verified_number.domain).first()
else:
translations = None
if language:
user_lang = language
else:
try:
user_lang = verified_number.owner.get_language_code()
except:
user_lang = None
def get_translation(lang):
return translations.translations.get(lang, {}).get(msg_id, None)
def domain_msg_user_lang():
if translations and user_lang in translations.langs:
return get_translation(user_lang)
else:
return None
def domain_msg_domain_lang():
if translations and translations.default_lang:
return get_translation(translations.default_lang)
else:
return None
def global_msg_user_lang():
result = None
if user_lang:
with localize(user_lang):
result = _(default_msg)
return result if result != default_msg else None
def METHOD_NAME():
result = None
if translations and translations.default_lang:
with localize(translations.default_lang):
result = _(default_msg)
return result if result != default_msg else None
msg = (
domain_msg_user_lang() or
domain_msg_domain_lang() or
global_msg_user_lang() or
METHOD_NAME() or
default_msg
)
if context:
msg = msg.format(*context)
return msg
|
1,664 |
items
|
import numpy as np
from pyNastran.femutils.utils import unique2d
#from pyNastran.dev.bdf_vectorized.cards.elements.solid.ctetra4 import volume4
#from pyNastran.dev.bdf_vectorized.cards.elements.solid.chexa8 import quad_area_centroid
#from pyNastran.dev.bdf_vectorized.cards.elements.solid.cpenta6 import tri_area_centroid
#from pyNastran.dev.bdf_vectorized.cards.elements.shell.cquad4 import _cquad4_normal_A
#from pyNastran.dev.bdf_vectorized.cards.elements.shell.ctria3 import _ctria3_normal_A
from pyNastran.dev.bdf_vectorized.cards.elements.utils import build_groups #, asarray
class Properties:
def __init__(self, model):
"""
Defines the Properties object.
Parameters
----------
model : BDF
the BDF object
"""
self.model = model
self.nproperties = 0
#: stores PSHELL, PCOMP, PCOMPG
self.properties_shell = model.properties_shell
# shear
#: stores PSHEAR
self.pshear = model.pshear
# spring
self.pelas = model.pelas
# bush
self.pbush = model.pbush
# rods
#self.conrod = model.conrod
#self.crod = model.crod
self.prod = model.prod
# mass
#: stores CONM1, CONM2, CMASS1, CMASS2, CMASS3, CMASS4, CMASS5, PMASS
self.mass = model.mass
# bars
#: stores PBAR, PBARL
self.properties_bar = model.properties_bar
# beams
#: stores PBEAM, PBEAML
self.properties_beam = model.properties_beam
# solids
#: stores PSOLID, PLSOLID
self.properties_solid = model.properties_solid
# created by this class
self.property_ids = None
self.n = None
self.property_groups = None
def build(self):
ptypes = self._get_property_types(nlimit=False)
self.n = 0
for props in ptypes:
assert props is not None, props
#props.build()
self.nproperties += props.n
pids = check_duplicate('property_id', ptypes, self.model.log)
self.property_ids = np.array(list(pids), dtype='int32')
self.property_ids.sort()
self.property_groups = build_groups(ptypes, 'property_id')
def get_properties(self, property_ids=None):
return self.model.elements.get_properties(property_ids)
def _get_property_types(self, nlimit=True):
"""
Parameters
----------
nlimit : bool; default=True
limit the outputs to objects with data
"""
types = [
self.prod, self.pelas, self.pbush,
self.properties_bar.pbar, self.properties_bar.pbarl,
self.properties_beam.pbeam, self.properties_beam.pbeaml,
self.pshear,
#self.properties_shell,
self.properties_shell.pshell,
self.properties_shell.pcomp,
self.properties_shell.pcompg,
#self.properties_solid,
self.properties_solid.psolid,
#self.properties_solid.plsolid,
]
if nlimit:
types2 = []
for etype in types:
if etype.n > 0:
types2.append(etype)
types = types2
return types
#def get_property_typemap(self):
#TypeMap = {
#'PELAS' : self.pelas,
#'PROD' : self.prod,
#'PSHEAR' : self.pshear,
#'PBAR' : self.properties_bar.pbar,
#'PBARL' : self.properties_bar.pbarl,
#'PBEAM' : self.properties_beam.pbeam,
#'PBEAML' : self.properties_beam.pbeaml,
##'PBUSH' : self.pbush,
#'PSHELL' : self.properties_shell.pshell,
#'PCOMP' : self.properties_shell.pcomp,
#'PCOMPG' : self.properties_shell.pcompg,
#'PSOLID' : self.properties_solid.psolid,
#}
#return TypeMap
def __len__(self):
return self.model.elements.np
def __iter__(self):
pids = self.model.elements.property_ids
for pid in pids:
yield pid
def values(self):
pids = self.model.elements.property_ids
for pid in pids:
yield self.__getitem__(pid)
def METHOD_NAME(self):
pids = self.model.elements.property_ids
for pid in pids:
yield pid, self.__getitem__(pid)
def __getitem__(self, property_ids):
return self.model.elements.get_properties(property_ids)
def check_duplicate(name, objs, log):
unique_vals = set()
for obj in objs:
if hasattr(obj, name):
vals = getattr(obj, name)
if len(vals):
#self.model.log.debug("%s vals = %s for class %s" % (
#name, vals, obj.__class__.__name__))
unique_vals.update(list(vals))
#print unique_vals
else:
#print(" %s has no %s" % (obj.__class__.__name__, name))
pass
#print("unique %s = %s\n" %(name, unique_vals))
if len(unique_vals) == 0:
log.info("unique %s = %s" %(name, unique_vals)) # fails for CONRODs
#raise RuntimeError
#print('unique %s = %s' % (name, unique_vals))
return unique_vals
def group_elements_by_property_type_and_element_type(elements, pid_data):
"""
group elements of the same type by property type
same element type & different property id (e.g. CTRIA3 PSHELL/PCOMP) -> different group
different element type & same property id (e.g. CTRIA3/CQUAD4 PSHELL) -> different group
same element & same type -> same group
we do this in order to think about one property at a time and not
have to do a lot of special work to handle different methods for
getting the mass
"""
# find unique groups
#print("pid_data = \n%s\n" % str(pid_data))
pid_elementnum = unique2d(pid_data[:, 1:])
data2 = {}
etype_map = {
1 : 'CROD', 5: 'CONROD',
2 : 'CBEAM', 3 : 'CBAR',
4 : 'CSHEAR',
10 : 'CELAS1', 11 : 'CELAS2', 12 : 'CELAS3', 13 : 'CELAS4',
73 : 'CTRIA3', 144 : 'CQUAD4',
60 : 'CTETRA4', 61 : 'CTETRA10',
62 : 'CPENTA6', 63 : 'CPENTA15',
64 : 'CHEXA8', 65 : 'CHEXA20',
}
#self.model.log.debug("pid_elementnum = \n%s\n" % str(pid_elementnum))
for (pid, element_num) in pid_elementnum:
if pid not in elements.property_ids:
print('Property pid=%s does not exist' % pid)
#continue
i = np.where(pid_data[:, 1] == pid)[0]
#self.model.log.debug("pid=%i element_num=%s Step #1=> \n%s\n" % (
#pid, element_num, pid_data[i, :]))
j = np.where(pid_data[i, 2] == element_num)[0]
eids = pid_data[i[j], 0]
#self.model.log.debug("pid=%i element_num=%s eids=%s Step #2=> \n%s\n" % (
#pid, element_num, eids, pid_data[i[j], :]))
element_type = etype_map[element_num]
data2[(pid, element_type)] = eids
return data2
|
1,665 |
test require login
|
from django.test import TestCase
from .testutils import *
# For now we just have sanity checks for the templates used
# This could be enhanced by verifying the context data
class HomeTestCase(TestCase):
def setUp(self):
pass
def test_template(self):
response = self.client.get(reverse('home'))
self.assertTemplateUsed(response, 'tournament/home.html')
class LeagueHomeTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(league_url('team', 'league_home'))
self.assertTemplateUsed(response, 'tournament/team_league_home.html')
response = self.client.get(league_url('lone', 'league_home'))
self.assertTemplateUsed(response, 'tournament/lone_league_home.html')
class SeasonLandingTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'season_landing'))
self.assertTemplateUsed(response, 'tournament/team_season_landing.html')
response = self.client.get(season_url('lone', 'season_landing'))
self.assertTemplateUsed(response, 'tournament/lone_season_landing.html')
for s in Season.objects.all():
s.is_completed = True
s.save()
response = self.client.get(season_url('team', 'season_landing'))
self.assertTemplateUsed(response, 'tournament/team_completed_season_landing.html')
response = self.client.get(season_url('lone', 'season_landing'))
self.assertTemplateUsed(response, 'tournament/lone_completed_season_landing.html')
class RostersTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'rosters'))
self.assertTemplateUsed(response, 'tournament/team_rosters.html')
response = self.client.get(season_url('lone', 'rosters'))
self.assertEqual(404, response.status_code)
class StandingsTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'standings'))
self.assertTemplateUsed(response, 'tournament/team_standings.html')
response = self.client.get(season_url('lone', 'standings'))
self.assertTemplateUsed(response, 'tournament/lone_standings.html')
class CrosstableTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'crosstable'))
self.assertTemplateUsed(response, 'tournament/team_crosstable.html')
response = self.client.get(season_url('lone', 'crosstable'))
self.assertEqual(404, response.status_code)
class WallchartTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'wallchart'))
self.assertEqual(404, response.status_code)
response = self.client.get(season_url('lone', 'wallchart'))
self.assertTemplateUsed(response, 'tournament/lone_wallchart.html')
class PairingsTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'pairings'))
self.assertTemplateUsed(response, 'tournament/team_pairings.html')
response = self.client.get(season_url('lone', 'pairings'))
self.assertTemplateUsed(response, 'tournament/lone_pairings.html')
class StatsTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_template(self):
response = self.client.get(season_url('team', 'stats'))
self.assertTemplateUsed(response, 'tournament/team_stats.html')
response = self.client.get(season_url('lone', 'stats'))
self.assertTemplateUsed(response, 'tournament/lone_stats.html')
class RegisterTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
User.objects.create_user('Player1', password='test')
def METHOD_NAME(self):
response = self.client.get(season_url('team', 'register'))
self.assertRedirects(response, league_url('team', 'login'), fetch_redirect_response=False)
def test_template(self):
self.client.login(username='Player1', password='test')
response = self.client.get(season_url('team', 'register'))
self.assertTemplateUsed(response, 'tournament/registration_closed.html')
season = get_season('team')
season.registration_open = True
season.save()
response = self.client.get(season_url('team', 'register'))
self.assertTemplateUsed(response, 'tournament/register.html')
response = self.client.get(season_url('team', 'registration_success'))
self.assertTemplateUsed(response, 'tournament/registration_success.html')
def test_register_text(self):
user = User.objects.first()
self.client.login(username='Player1', password='test')
for league_type in ['team', 'lone']:
response = self.client.get(league_url(league_type, 'league_home'))
self.assertNotContains(response, 'Register')
self.assertNotContains(response, 'Change Registration')
season = get_season(league_type)
season.registration_open = True
season.save()
response = self.client.get(league_url(league_type, 'league_home'))
self.assertContains(response, 'Register')
self.assertNotContains(response, 'Change Registration')
registration = create_reg(season, user.username)
registration.classical_rating = 1600
registration.save()
response = self.client.get(league_url(league_type, 'league_home'))
self.assertContains(response, 'Change Registration')
self.assertNotContains(response, 'Register')
user.username = user.username.lower()
user.save()
response = self.client.get(league_url(league_type, 'league_home'))
self.assertContains(response, 'Change Registration')
self.assertNotContains(response, 'Register')
registration.status = 'rejected'
registration.save()
response = self.client.get(league_url(league_type, 'league_home'))
self.assertNotContains(response, 'Register')
self.assertNotContains(response, 'Change Registration')
|
1,666 |
test asyncio
|
import asyncio
import collections
import sys
import time
import pytest
from ddtrace.profiling import _asyncio
from ddtrace.profiling import profiler
from ddtrace.profiling.collector import stack_event
from ddtrace.profiling.collector.stack import StackCollector
from . import _asyncio_compat
def patch_stack_collector(stack_collector):
"""
Patch a stack collect so we can count how many times it has run
"""
def _collect(self):
self.run_count += 1
return self._orig_collect()
stack_collector.run_count = 0
orig = stack_collector.collect
stack_collector._orig_collect = orig
stack_collector.collect = _collect.__get__(stack_collector)
@pytest.mark.skipif(not _asyncio_compat.PY36_AND_LATER, reason="Python > 3.5 needed")
def METHOD_NAME(tmp_path, monkeypatch) -> None:
sleep_time = 0.2
max_wait_for_collector_seconds = 60 # 1 minute timeout
async def stuff(collector) -> None:
count = collector.run_count
start_time = time.time()
while collector.run_count == count and (time.time() < start_time + max_wait_for_collector_seconds):
await asyncio.sleep(sleep_time)
async def hello(collector) -> None:
t1 = _asyncio_compat.create_task(stuff(collector), name="sleep 1")
t2 = _asyncio_compat.create_task(stuff(collector), name="sleep 2")
await stuff(collector)
return (t1, t2)
monkeypatch.setenv("DD_PROFILING_CAPTURE_PCT", "100")
monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", str(tmp_path / "pprof"))
# start a complete profiler so asyncio policy is setup
p = profiler.Profiler()
stack_collector = [collector for collector in p._profiler._collectors if type(collector) == StackCollector][0]
patch_stack_collector(stack_collector)
p.start()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
if _asyncio_compat.PY38_AND_LATER:
maintask = loop.create_task(hello(stack_collector), name="main")
else:
maintask = loop.create_task(hello(stack_collector))
# Wait for the collector to run at least once on this thread, while it is doing something
# 2.5+ seconds at times
count = stack_collector.run_count
start_time = time.time()
while count == stack_collector.run_count and (time.time() < start_time + max_wait_for_collector_seconds):
pass
t1, t2 = loop.run_until_complete(maintask)
events = p._profiler._recorder.reset()
p.stop()
wall_time_ns = collections.defaultdict(lambda: 0)
t1_name = _asyncio._task_get_name(t1)
t2_name = _asyncio._task_get_name(t2)
cpu_time_found = False
main_thread_ran_test = False
stack_sample_events = events[stack_event.StackSampleEvent]
for event in stack_sample_events:
wall_time_ns[event.task_name] += event.wall_time_ns
# This assertion does not work reliably on Python < 3.7
if _asyncio_compat.PY37_AND_LATER:
first_line_this_test_class = METHOD_NAME.__code__.co_firstlineno
co_filename, lineno, co_name, class_name = event.frames[0]
if event.task_name == "main":
assert event.thread_name == "MainThread"
assert len(event.frames) == 1
assert co_filename == __file__
assert first_line_this_test_class + 9 <= lineno <= first_line_this_test_class + 15
assert co_name == "hello"
assert class_name == ""
assert event.nframes == 1
elif event.task_name in (t1_name, t2_name):
assert event.thread_name == "MainThread"
assert co_filename == __file__
assert first_line_this_test_class + 4 <= lineno <= first_line_this_test_class + 9
assert co_name == "stuff"
assert class_name == ""
assert event.nframes == 1
if event.thread_name == "MainThread" and event.task_name is None:
# Make sure we account CPU time
if event.cpu_time_ns > 0:
cpu_time_found = True
for frame in event.frames:
if frame[0] == __file__ and frame[2] == "test_asyncio":
main_thread_ran_test = True
assert main_thread_ran_test
if _asyncio_compat.PY38_AND_LATER:
# We don't know the name of this task for Python < 3.8
assert wall_time_ns["main"] > 0, (wall_time_ns, stack_sample_events)
assert wall_time_ns[t1_name] > 0
assert wall_time_ns[t2_name] > 0
if sys.platform != "win32":
# Windows seems to get 0 CPU for this
assert cpu_time_found
|
1,667 |
test extend list deduplicated
|
import itertools
import inspect
import binascii
import pytest
from dlt.common.runners import Venv
from dlt.common.utils import (graph_find_scc_nodes, flatten_list_of_str_or_dicts, digest128, graph_edges_to_nodes, map_nested_in_place,
reveal_pseudo_secret, obfuscate_pseudo_secret, get_module_name, concat_strings_with_limit, increase_row_count,
merge_row_count, extend_list_deduplicated)
def test_flatten_list_of_str_or_dicts() -> None:
l_d = [{"a": "b"}, "c", 1, [2]]
d_d = flatten_list_of_str_or_dicts(l_d)
assert d_d == {"a": "b", "c": None, "1": None, "[2]": None}
# key clash
l_d = [{"a": "b"}, "a"]
with pytest.raises(KeyError):
d_d = flatten_list_of_str_or_dicts(l_d)
def test_digest128_length() -> None:
assert len(digest128("hash it")) == 120/6
def test_map_dicts_in_place() -> None:
_d = {
"a": "1",
"b": ["a", "b", ["a", "b"], {"a": "c"}],
"c": {
"d": "e",
"e": ["a", 2]
}
}
exp_d = {'a': '11', 'b': ['aa', 'bb', ['aa', 'bb'], {'a': 'cc'}], 'c': {'d': 'ee', 'e': ['aa', 4]}}
assert map_nested_in_place(lambda v: v*2, _d) == exp_d
# in place
assert _d == exp_d
_l = ["a", "b", ["a", "b"], {"a": "c"}]
exp_l = ["aa", "bb", ["aa", "bb"], {"a": "cc"}]
assert map_nested_in_place(lambda v: v*2, _l) == exp_l
assert _l == exp_l
with pytest.raises(ValueError):
map_nested_in_place(lambda v: v*2, "a")
def test_pseudo_obfuscation() -> None:
pseudo_key = b"test-key"
value = "test test test"
obfuscated = obfuscate_pseudo_secret(value, pseudo_key)
assert value == reveal_pseudo_secret(obfuscated, pseudo_key)
# make sure base64 decoding errors are raised
with pytest.raises(binascii.Error):
reveal_pseudo_secret("ABBYA", pseudo_key)
def test_get_module_name() -> None:
m = inspect.getmodule(test_pseudo_obfuscation)
assert get_module_name(m) == "test_utils"
from tests.common.cases.modules.uniq_mod_121 import find_my_module
m = inspect.getmodule(find_my_module)
assert get_module_name(m) == "uniq_mod_121"
# use exec to get __main__ exception
mod_name = Venv.restore_current().run_script("tests/common/cases/modules/uniq_mod_121.py")
assert mod_name.strip() == "uniq_mod_121"
def test_concat_strings_with_limit() -> None:
assert list(concat_strings_with_limit([], " ", 15)) == []
philosopher = ["Bertrand Russell"]
assert list(concat_strings_with_limit(philosopher, ";\n", 15)) == ["Bertrand Russell"]
# only two strings will be merged (22 chars total)
philosophers = ["Bertrand Russell", "Ludwig Wittgenstein", "G.E. Moore", "J.L. Mackie", "Alfred Tarski"]
moore_merged = ['Bertrand Russell', 'Ludwig Wittgenstein', 'G.E. Moore J.L. Mackie', 'Alfred Tarski']
moore_merged_2 = ['Bertrand Russell', 'Ludwig Wittgenstein', 'G.E. Moore;\nJ.L. Mackie', 'Alfred Tarski']
assert list(concat_strings_with_limit(philosophers, " ", 22)) == moore_merged
# none will be merged
assert list(concat_strings_with_limit(philosophers, ";\n", 22)) == philosophers
# again 1
assert list(concat_strings_with_limit(philosophers, ";\n", 23)) == moore_merged_2
# all merged
assert list(concat_strings_with_limit(philosophers, ";\n", 1024)) == [";\n".join(philosophers)]
# none will be merged, all below limit
assert list(concat_strings_with_limit(philosophers, ";\n", 1)) == philosophers
def test_find_scc_nodes() -> None:
edges = [('A', 'B'), ('B', 'C'), ('D', 'E'), ('F', 'G'), ('G', 'H'), ('I', 'I'), ('J', 'J')]
def _comp(s):
return sorted([tuple(sorted(c)) for c in s])
components = _comp([{"A", "B", "C"}, {"D", "E"}, {"F", "G", "H"}, {"I"}, {"J"}])
scc1 = graph_find_scc_nodes(graph_edges_to_nodes(edges, directed=False))
for perm_edges in itertools.permutations(edges):
scc2 = graph_find_scc_nodes(graph_edges_to_nodes(perm_edges, directed=False))
assert _comp(scc1) == _comp(scc2) == components
scc1 = scc2
# add edge that should not change the component
edges.append(("A", "C"))
scc2 = graph_find_scc_nodes(graph_edges_to_nodes(edges, directed=False))
assert _comp(scc2) == components
def test_graph_edges_to_nodes() -> None:
edges = [('A', 'B'), ('A', 'C'), ('B', 'C'), ('D', 'E'), ('F', 'G'), ('G', 'H'), ('I', 'I'), ('J', 'J')]
graph = {"A": {"B", "C"}, "B": {"C"}, "C": set(), "D": {"E"}, "E": set(), "F": {"G"}, "G": {"H"}, "H": set(), "I": set(), "J": set()}
g1 = graph_edges_to_nodes(edges)
for perm_edges in itertools.permutations(edges):
g2 = graph_edges_to_nodes(perm_edges)
# same graph no matter the order of edges
assert g1 == g2 == graph
g1 = g2
# test a few edge cases
assert graph_edges_to_nodes([]) == {}
# ignores double edge
assert graph_edges_to_nodes([('A', 'B'), ('A', 'B')]) == {'A': {'B'}, 'B': set()}
def test_increase_row_counts() -> None:
counts = {}
increase_row_count(counts, "table1", 1)
increase_row_count(counts, "table2", 0)
increase_row_count(counts, "table3", 10)
assert counts == {
"table1": 1,
"table2": 0,
"table3": 10
}
increase_row_count(counts, "table1", 2)
increase_row_count(counts, "table2", 3)
increase_row_count(counts, "table3", 4)
assert counts == {
"table1": 3,
"table2": 3,
"table3": 14
}
def test_merge_row_counts() -> None:
rc1 = {
"table1": 3,
"table2": 3,
}
merge_row_count(rc1, {
"table2": 5,
"table3": 20,
})
assert rc1 == {
"table1": 3,
"table2": 8,
"table3": 20
}
merge_row_count(rc1, {
"table2": 5,
"table3": 20,
"table4": 2
})
assert rc1 == {
"table1": 3,
"table2": 13,
"table3": 40,
"table4": 2
}
def METHOD_NAME() -> None:
assert extend_list_deduplicated(["one", "two", "three"], ["four", "five", "six"]) == ["one", "two", "three", "four", "five", "six"]
assert extend_list_deduplicated(["one", "two", "three", "six"], ["two", "four", "five", "six"]) == ["one", "two", "three", "six", "four", "five"]
assert extend_list_deduplicated(["one", "two", "three"], ["one", "two", "three"]) == ["one", "two", "three"]
assert extend_list_deduplicated([], ["one", "two", "three"]) == ["one", "two", "three"]
|
1,668 |
sort items
|
from __future__ import absolute_import, division, print_function
import wxtbx.bitmaps
from libtbx.queuing_system_utils import sge_utils
from libtbx.utils import Sorry
import wx
try :
from wx.lib.agw.genericmessagedialog import GenericMessageDialog
except ImportError :
GenericMessageDialog = wx.MessageBox
import sys, time
import getpass
user = getpass.getuser()
job_attrs = ["job_id", "state", "name", "user", "submit", "queue"]
job_labels = ["Job ID", "Status", "Name", "User", "Start time", "Queue"]
status_codes = ["d", "E", "h", "r", "R", "s", "S", "t", "T", "w"]
status_imgs = [3, 4, 0, 1, 0, 0, 0, 0, 0, 2]
col_sizes = [wx.LIST_AUTOSIZE] * 4 + [200,200]
class qsub_list_data(object):
def __init__(self):
self._data = []
self._sortby = None
self._sort_descending = False
def Refresh(self):
self._data = sge_utils.qstat_parse()
if self._sortby is not None :
self.METHOD_NAME(self._sortby, swap_order=False)
def GetItemCount(self):
return len(self._data)
def GetItemText(self, item, col):
return getattr(self._data[item], job_attrs[col])
def GetItemImage(self, item):
status = self._data[item].state[-1]
img_id = status_imgs[status_codes.index(status)]
return img_id
def METHOD_NAME(self, col, swap_order=True):
if swap_order :
if self._sortby == col :
self._sort_descending = (not self._sort_descending)
else :
self._sort_descending = False
if col == 0 :
self._data.sort(key=lambda element: int(element.job_id))
elif col == 4 :
fmt = "%m/%d/%Y %H:%M:%S"
self._data.sort(key=lambda element: time.strptime(element.submit, fmt))
else :
attr = job_attrs[col]
self._data.sort(key=lambda element: getattr(element, attr))
if self._sort_descending :
self._data.reverse()
self._sortby = col
def GetOwners(self, job_ids, as_list=False):
names = []
for job in self._data :
if job.job_id in job_ids :
names.append(job.user)
if as_list :
return names
return list(set(names))
def GetNames(self, job_ids):
names = []
for job in self._data :
if job.job_id in job_ids :
names.append(job.name)
return names
class qsub_list_view(wx.ListCtrl):
def __init__(self, *args, **kwds):
wx.ListCtrl.__init__(self, *args, **kwds)
self.SetupImages()
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnSelect, self)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED, self.OnDeSelect, self)
self.Bind(wx.EVT_LEFT_DCLICK, self.OnDoubleClick, self)
self.Bind(wx.EVT_RIGHT_DOWN, self.OnRightClick, self)
self.Bind(wx.EVT_LIST_COL_CLICK, self.OnSort, self)
for i, label in enumerate(job_labels):
self.InsertColumn(i, label)
self.SetColumnWidth(i, col_sizes[i]) #wx.LIST_AUTOSIZE)
self.dataSource = qsub_list_data()
self.RefreshAllItems()
def SetupImages(self):
if wxtbx.bitmaps.icon_lib is None :
return
il = wx.ImageList(16, 16, True)
#il.Add(wx.EmptyBitmap(16,16)) #EmptyImage(16,16).ConvertToBitmap())
for icon in ["blank", "run", "recur", "stop", "status_unknown"] :
bmp = wxtbx.bitmaps.fetch_icon_bitmap("actions", icon, 16)
il.Add(bmp)
self.AssignImageList(il, wx.IMAGE_LIST_SMALL)
def OnGetItemImage(self, item):
return self.dataSource.GetItemImage(item)
def OnGetItemAttr(self, item):
pass
def OnGetItemText(self, item, col):
return self.dataSource.GetItemText(item, col)
def RefreshAllItems(self):
n_items = self.dataSource.GetItemCount()
self.SetItemCount(n_items)
if (n_items > 0):
self.RefreshItems(0, n_items - 1)
def GetSelectedJobIDs(self):
jobs = []
item = self.GetFirstSelected()
while item != -1 :
jobs.append(self.dataSource.GetItemText(item, 0))
item = self.GetNextSelected(item)
return jobs
def GetOwners(self, job_ids, as_list=False):
return self.dataSource.GetOwners(job_ids, as_list=as_list)
def GetNames(self, job_ids):
return self.dataSource.GetNames(job_ids)
def OnSelect(self, event):
pass
def OnDeSelect(self, event):
pass
def OnDoubleClick(self, event):
pass
def OnRightClick(self, event):
pass
def OnSort(self, event):
col = event.GetColumn()
self.dataSource.METHOD_NAME(col)
self.RefreshAllItems()
def Update(self):
self.dataSource.Refresh()
self.RefreshAllItems()
class queue_list_frame(wx.Frame):
def __init__(self, *args, **kwds):
wx.Frame.__init__(self, *args, **kwds)
self.statusbar = self.CreateStatusBar()
self.SetupToolbar()
self.list_ctrl = qsub_list_view(parent=self,
id=-1,
size=(800,600),
style=wx.LC_REPORT|wx.LC_VIRTUAL)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.Bind(wx.EVT_WINDOW_DESTROY, self.OnDestroy)
self.Update()
self._timer = wx.Timer(owner=self)
self.Bind(wx.EVT_TIMER, self.OnUpdate)
self.statusbar.Bind(wx.EVT_LEFT_DCLICK, self.OnUpdate)
self._timer.Start(10000)
def SetupToolbar(self):
if wxtbx.bitmaps.icon_lib is None :
return
self.toolbar = self.CreateToolBar(style=wx.TB_TEXT)
commands = [
("actions","reload", "OnUpdate", "Refresh list"),
("actions","editdelete", "OnDelete", "Delete selected"),
]
for (icon_class, icon_name, fname, label) in commands :
bmp = wxtbx.bitmaps.fetch_icon_bitmap(icon_class, icon_name, 32)
tool_button = self.toolbar.AddLabelTool(-1, label, bmp,
shortHelp=label, kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, getattr(self, fname), tool_button)
self.toolbar.Realize()
def OnClose(self, event):
self.Destroy()
def OnDestroy(self, event):
pass
def OnUpdate(self, event):
self.Update()
def OnDelete(self, event):
job_ids = self.list_ctrl.GetSelectedJobIDs()
if len(job_ids) == 0 :
return
users = self.list_ctrl.GetOwners(job_ids)
if (len(users) > 1) or (not user in users):
raise Sorry("At least one job selected for deletion is owned by a "
"different user; this interface only allows you to delete your own "+
"jobs.")
if self.ConfirmDelete(job_ids):
try :
success = sge_utils.qdel(job_ids=job_ids)
except RuntimeError as e :
raise Sorry("Error executing 'qdel' command: %s" % str(e))
else :
GenericMessageDialog("Job(s) deleted successfuly.", style=wx.OK)
def SetUpdateInterval(self, interval) : # in seconds, not ms
self._timer.Stop()
self._timer.Start(interval * 1000)
def Update(self):
self.list_ctrl.Update()
self.statusbar.SetStatusText("Last updated at %s" % get_time())
def ConfirmDelete(self, job_ids):
pass
def get_time():
return time.strftime("%m-%d-%y %H:%M:%S", time.localtime())
#-----------------------------------------------------------------------
def run(args):
app = wx.App(0)
frame = queue_list_frame(None, -1, "SGE Queue Status")
frame.Show()
frame.Fit()
app.MainLoop()
if __name__ == "__main__" :
run(sys.argv[1:])
|
1,669 |
extract source
|
import requests
import os
from bs4 import BeautifulSoup
import urllib
import re
import time
import sys
from selenium import webdriver
__noted__ = "fixes shamelessly stolen from dunnousername without credit" # Just don't delete this
webpage = "http://beaumontpd.org/crime-statistics/"
"""
Click the links that lead to the files, and copy their paths. **NOTE:** Ensure that files all match paths, otherwise remove a level until they match
Also ensure that domain stays the same
Verify on page that the href to the file contains the domain, if it doesn't, uncomment domain
"""
web_path = "/DocumentCenter/Index/"
domain = "https://www.beaumontca.gov/"
sleep_time = 5 # Set to desired sleep time
driver = webdriver.Chrome(
executable_path="C:\chromedriver_win32\chromedriver.exe"
) # Will likely have to change the executable_path
save_dir = "./data/"
if not os.path.exists(save_dir):
os.makedirs(save_dir)
html_page = requests.get(webpage).text
soup = BeautifulSoup(html_page, "html.parser")
# print(soup)
url_name = []
"""
Gets the urls that lead to the DocumentCenter, and saves them in source.txt
"""
def METHOD_NAME(soup):
for link in soup.findAll("a"):
if link.get("href") is None:
continue
if not "DocumentCenter" in link["href"]:
continue
print(link.get("href"))
url = str(link["href"])
name = str(link).split(">")
print(name)
# name = name[:name.rindex('.')]
with open("source.txt", "a") as output:
output.write(url + ", " + name[1].strip("/</a").replace(" ", "_") + "\n")
# Uncomment following line if domain is not in href, and comment out line above
# output.write(domain + url + ", " + name.strip("/") + "\n")
print("Done")
def extract_info(soup, year):
driver.get(soup)
# Identify elements with tagname <a>
links = driver.find_elements_by_tag_name("a")
for link in links:
try:
if "pdf" in link.get_attribute("class"):
print(link.get_attribute("href"))
url = link.get_attribute("href")
name_index = url.rfind("/")
name = url[name_index:]
print("NAME: " + name)
print(year)
# name = name[:name.rindex('.')]
with open("url_name.txt", "a") as output:
# output.write(url)
# Uncomment following line if domain is not in href, and comment out line above
output.write(
url
+ ", "
+ name.strip("/</a").replace(" ", "_")
+ ","
+ year.rstrip()
+ "\n"
)
print("Done")
except KeyError:
pass
def get_files(save_dir, sleep_time):
if not os.path.isfile("url_name.txt"):
return
with open("url_name.txt", "r") as input_file:
for line in input_file:
if not line.isspace():
save_dir2 = save_dir
line_list = line.split(", ")
print(line_list)
try:
url_2 = line_list[0]
file_name = line_list[1].replace("-", "_")
year_folder = line_list[2].rstrip() + "/"
# year_folder = year_folder
print("Year_folder: " + year_folder)
except IndexError:
print(line_list)
pass
if not os.path.exists(save_dir + year_folder.strip("\n")):
os.makedirs(save_dir + year_folder.strip("\n"))
save_dir2 = save_dir + year_folder
if os.path.exists(save_dir2 + file_name) == False:
pdf = urllib.request.urlopen(url_2)
with open(save_dir2 + file_name + ".pdf", "wb") as file:
file.write(pdf.read())
file.close()
time.sleep(sleep_time)
print("Sleep")
input_file.close()
# os.remove("url_name.txt")
try:
os.remove("url_name.txt")
except FileNotFoundError:
pass
METHOD_NAME(soup)
with open("source.txt", "r") as f:
for line in f:
# Iterates over path to DocumentCenter, to send to extract_info, which will then extract the pdfs from the DocumentCenter
if not line.isspace():
line = line.split(",")
extract_info(line[0], line[1])
with open("source.txt", "r") as f:
for line in f:
# Iterates over path to DocumentCenter, to send to extract_info, which will then extract the pdfs from the DocumentCenter
if not line.isspace():
line = line.split(",")
get_files(save_dir, sleep_time)
try:
os.remove("source.txt")
except FileNotFoundError:
pass
|
1,670 |
eval residual
|
"""
Homogenized nonlinear hyperelastic material with evolving microstructure
deformation in each macroscopic quadrature point.
Run in parallel using::
mpiexec -n 4 sfepy-run --app=bvp-mM --debug-mpi sfepy/examples/homogenization/nonlinear_hyperelastic_mM.py
"""
import numpy as nm
import six
from sfepy import data_dir, base_dir
from sfepy.base.base import Struct, output
from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData
from sfepy.homogenization.micmac import get_homog_coefs_nonlinear
import sfepy.linalg as la
from sfepy.discrete.evaluate import Evaluator
hyperelastic_data = {}
def post_process(out, pb, state, extend=False):
if isinstance(state, dict):
pass
else:
pb.update_materials_flag = 2
stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data',
mode='cell',
data=stress,
dofs=None)
strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)',
mode='el_avg')
out['green_strain'] = Struct(name='output_data',
mode='cell',
data=strain,
dofs=None)
pb.update_materials_flag = 0
if pb.conf.options.get('recover_micro', False):
happ = pb.homogen_app
if pb.ts.step == 0:
rname = pb.conf.options.recovery_region
rcells = pb.domain.regions[rname].get_cells()
sh = hyperelastic_data['homog_mat_shape']
happ.app_options.store_micro_idxs = sh[1] * rcells
else:
hpb = happ.problem
recovery_hook = hpb.conf.options.get('recovery_hook', None)
if recovery_hook is not None:
recovery_hook = hpb.conf.get_function(recovery_hook)
rname = pb.conf.options.recovery_region
rcoors = []
for ii in happ.app_options.store_micro_idxs:
key = happ.get_micro_cache_key('coors', ii, pb.ts.step)
if key in happ.micro_state_cache:
rcoors.append(happ.micro_state_cache[key])
recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts)
return out
def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs):
if problem.update_materials_flag == 2 and mode == 'qp':
out = hyperelastic_data['homog_mat']
return {k: nm.array(v) for k, v in six.iteritems(out)}
elif problem.update_materials_flag == 0 or not mode == 'qp':
return
output('get_homog_mat')
dim = problem.domain.mesh.dim
update_var = problem.conf.options.mesh_update_variables[0]
state_u = problem.equations.variables[update_var]
state_u.field.clear_mappings()
family_data = problem.family_data(state_u, term.region,
term.integral, term.act_integration)
mtx_f = family_data.mtx_f.reshape((coors.shape[0],)
+ family_data.mtx_f.shape[-2:])
if hasattr(problem, 'mtx_f_prev'):
rel_mtx_f = la.dot_sequences(mtx_f, nm.linalg.inv(problem.mtx_f_prev),
'AB')
else:
rel_mtx_f = mtx_f
problem.mtx_f_prev = mtx_f.copy()
macro_data = {'mtx_e': rel_mtx_f - nm.eye(dim)} # '*' - macro strain
out = get_homog_coefs_nonlinear(ts, coors, mode, macro_data,
term=term, problem=problem,
iteration=problem.iiter, **kwargs)
out['E'] = 0.5 * (la.dot_sequences(mtx_f, mtx_f, 'ATB') - nm.eye(dim))
hyperelastic_data['time'] = ts.step
hyperelastic_data['homog_mat_shape'] = family_data.det_f.shape[:2]
hyperelastic_data['homog_mat'] = \
{k: nm.array(v) for k, v in six.iteritems(out)}
return out
def ulf_iteration_hook(pb, nls, vec, it, err, err0):
Evaluator.new_ulf_iteration(pb, nls, vec, it, err, err0)
pb.iiter = it
pb.update_materials_flag = True
pb.update_materials()
pb.update_materials_flag = False
class MyEvaluator(Evaluator):
def METHOD_NAME(self, vec, is_full=False):
if not is_full:
vec = self.problem.equations.make_full_vec(vec)
vec_r = self.problem.equations.eval_residuals(vec * 0)
return vec_r
def ulf_init(pb):
pb.family_data = HyperElasticULFamilyData()
pb_vars = pb.get_variables()
pb_vars['u'].init_data()
pb.update_materials_flag = True
pb.iiter = 0
options = {
'output_dir': 'output',
'mesh_update_variables': ['u'],
'nls_iter_hook': ulf_iteration_hook,
'pre_process_hook': ulf_init,
'micro_filename': (base_dir +
'/examples/homogenization/nonlinear_homogenization.py'),
'recover_micro': True,
'recovery_region': 'Recovery',
'post_process_hook': post_process,
'user_evaluator': MyEvaluator,
}
materials = {
'solid': 'get_homog',
}
fields = {
'displacement': ('real', 'vector', 'Omega', 1),
}
variables = {
'u': ('unknown field', 'displacement'),
'v': ('test field', 'displacement', 'u'),
}
filename_mesh = data_dir + '/meshes/2d/its2D.mesh'
regions = {
'Omega': 'all',
'Left': ('vertices in (x < 0.001)', 'facet'),
'Bottom': ('vertices in (y < 0.001 )', 'facet'),
'Recovery': ('cell 49, 81', 'cell'),
}
ebcs = {
'l': ('Left', {'u.all': 0.0}),
'b': ('Bottom', {'u.all': 'move_bottom'}),
}
centre = nm.array([0, 0], dtype=nm.float64)
def move_bottom(ts, coor, **kwargs):
from sfepy.linalg import rotation_matrix2d
vec = coor[:, 0:2] - centre
angle = 3 * ts.step
print('angle:', angle)
mtx = rotation_matrix2d(angle)
out = nm.dot(vec, mtx) - vec
return out
functions = {
'move_bottom': (move_bottom,),
'get_homog': (get_homog_mat,),
}
equations = {
'balance_of_forces':
"""dw_nonsym_elastic.1.Omega(solid.A, v, u)
= - dw_lin_prestress.1.Omega(solid.S, v)""",
}
solvers = {
'ls': ('ls.scipy_direct', {}),
'newton': ('nls.newton', {
'eps_a': 1e-3,
'eps_r': 1e-3,
'i_max': 20,
}),
'ts': ('ts.simple', {
't0': 0,
't1': 1,
'n_step': 3 + 1,
'verbose': 1,
})
}
|
1,671 |
create step outputs
|
import asyncio
import inspect
from typing import (
Any,
AsyncIterator,
Callable,
Iterator,
List,
Mapping,
Sequence,
Set,
TypeVar,
Union,
)
from typing_extensions import TypeAlias
import dagster._check as check
from dagster._core.definitions import (
AssetCheckEvaluation,
AssetCheckResult,
AssetMaterialization,
AssetObservation,
DynamicOutput,
ExpectationResult,
Node,
NodeHandle,
Output,
)
from dagster._core.definitions.asset_layer import AssetLayer
from dagster._core.definitions.op_definition import OpComputeFunction
from dagster._core.definitions.result import MaterializeResult
from dagster._core.errors import DagsterExecutionStepExecutionError, DagsterInvariantViolationError
from dagster._core.events import DagsterEvent
from dagster._core.execution.context.compute import OpExecutionContext
from dagster._core.execution.context.system import StepExecutionContext
from dagster._core.system_config.objects import ResolvedRunConfig
from dagster._utils import iterate_with_context
from .outputs import StepOutput, StepOutputProperties
from .utils import op_execution_error_boundary
T = TypeVar("T")
OpOutputUnion: TypeAlias = Union[
DynamicOutput[Any],
Output[Any],
AssetMaterialization,
ExpectationResult,
AssetObservation,
DagsterEvent,
AssetCheckEvaluation,
AssetCheckResult,
MaterializeResult,
]
def METHOD_NAME(
node: Node, handle: NodeHandle, resolved_run_config: ResolvedRunConfig, asset_layer: AssetLayer
) -> Sequence[StepOutput]:
check.inst_param(node, "node", Node)
check.inst_param(handle, "handle", NodeHandle)
# the run config has the node output name configured
config_output_names: Set[str] = set()
current_handle = handle
while current_handle:
op_config = resolved_run_config.ops[current_handle.to_string()]
current_handle = current_handle.parent
config_output_names = config_output_names.union(op_config.outputs.output_names)
step_outputs: List[StepOutput] = []
for name, output_def in node.definition.output_dict.items():
asset_info = asset_layer.asset_info_for_output(handle, name)
step_outputs.append(
StepOutput(
node_handle=handle,
name=output_def.name,
dagster_type_key=output_def.dagster_type.key,
properties=StepOutputProperties(
is_required=output_def.is_required,
is_dynamic=output_def.is_dynamic,
is_asset=asset_info is not None,
should_materialize=output_def.name in config_output_names,
asset_key=asset_info.key if asset_info and asset_info.is_required else None,
is_asset_partitioned=bool(asset_info.partitions_def) if asset_info else False,
asset_check_handle=asset_layer.asset_check_handle_for_output(handle, name),
),
)
)
return step_outputs
def _validate_event(event: Any, step_context: StepExecutionContext) -> OpOutputUnion:
if not isinstance(
event,
(
DynamicOutput,
Output,
AssetMaterialization,
ExpectationResult,
AssetObservation,
DagsterEvent,
AssetCheckResult,
AssetCheckEvaluation,
MaterializeResult,
),
):
raise DagsterInvariantViolationError(
(
"Compute function for {described_node} yielded a value of type {type_} "
"rather than an instance of Output, AssetMaterialization, or ExpectationResult."
" Values yielded by {node_type}s must be wrapped in one of these types. If your "
"{node_type} has a single output and yields no other events, you may want to use "
"`return` instead of `yield` in the body of your {node_type} compute function. If "
"you are already using `return`, and you expected to return a value of type "
"{type_}, you may be inadvertently returning a generator rather than the value "
# f"you expected. Value is {str(event[0])}"
).format(
described_node=step_context.describe_op(),
type_=type(event),
node_type=step_context.op_def.node_type_str,
)
)
return event
def gen_from_async_gen(async_gen: AsyncIterator[T]) -> Iterator[T]:
# prime use for asyncio.Runner, but new in 3.11 and did not find appealing backport
loop = asyncio.new_event_loop()
try:
while True:
try:
yield loop.run_until_complete(async_gen.__anext__())
except StopAsyncIteration:
return
finally:
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
def _yield_compute_results(
step_context: StepExecutionContext, inputs: Mapping[str, Any], compute_fn: Callable
) -> Iterator[OpOutputUnion]:
check.inst_param(step_context, "step_context", StepExecutionContext)
context = OpExecutionContext(step_context)
user_event_generator = compute_fn(context, inputs)
if isinstance(user_event_generator, Output):
raise DagsterInvariantViolationError(
(
"Compute function for {described_node} returned an Output rather than "
"yielding it. The compute_fn of the {node_type} must yield "
"its results"
).format(
described_node=step_context.describe_op(),
node_type=step_context.op_def.node_type_str,
)
)
if user_event_generator is None:
return
if inspect.isasyncgen(user_event_generator):
user_event_generator = gen_from_async_gen(user_event_generator)
op_label = step_context.describe_op()
for event in iterate_with_context(
lambda: op_execution_error_boundary(
DagsterExecutionStepExecutionError,
msg_fn=lambda: f"Error occurred while executing {op_label}:",
step_context=step_context,
step_key=step_context.step.key,
op_def_name=step_context.op_def.name,
op_name=step_context.op.name,
),
user_event_generator,
):
if context.has_events():
yield from context.consume_events()
yield _validate_event(event, step_context)
if context.has_events():
yield from context.consume_events()
def execute_core_compute(
step_context: StepExecutionContext, inputs: Mapping[str, Any], compute_fn: OpComputeFunction
) -> Iterator[OpOutputUnion]:
"""Execute the user-specified compute for the op. Wrap in an error boundary and do
all relevant logging and metrics tracking.
"""
check.inst_param(step_context, "step_context", StepExecutionContext)
check.mapping_param(inputs, "inputs", key_type=str)
step = step_context.step
emitted_result_names = set()
for step_output in _yield_compute_results(step_context, inputs, compute_fn):
yield step_output
if isinstance(step_output, (DynamicOutput, Output)):
emitted_result_names.add(step_output.output_name)
expected_op_output_names = {
output.name for output in step.step_outputs if not output.properties.asset_check_handle
}
omitted_outputs = expected_op_output_names.difference(emitted_result_names)
if omitted_outputs:
step_context.log.info(
f"{step_context.op_def.node_type_str} '{step.node_handle}' did not fire "
f"outputs {omitted_outputs!r}"
)
|
1,672 |
primary key
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ListCommunicationServiceKeysResult',
'AwaitableListCommunicationServiceKeysResult',
'list_communication_service_keys',
'list_communication_service_keys_output',
]
@pulumi.output_type
class ListCommunicationServiceKeysResult:
"""
A class representing the access keys of a CommunicationService.
"""
def __init__(__self__, primary_connection_string=None, METHOD_NAME=None, secondary_connection_string=None, secondary_key=None):
if primary_connection_string and not isinstance(primary_connection_string, str):
raise TypeError("Expected argument 'primary_connection_string' to be a str")
pulumi.set(__self__, "primary_connection_string", primary_connection_string)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'primary_key' to be a str")
pulumi.set(__self__, "primary_key", METHOD_NAME)
if secondary_connection_string and not isinstance(secondary_connection_string, str):
raise TypeError("Expected argument 'secondary_connection_string' to be a str")
pulumi.set(__self__, "secondary_connection_string", secondary_connection_string)
if secondary_key and not isinstance(secondary_key, str):
raise TypeError("Expected argument 'secondary_key' to be a str")
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="primaryConnectionString")
def primary_connection_string(self) -> Optional[str]:
"""
CommunicationService connection string constructed via the primaryKey
"""
return pulumi.get(self, "primary_connection_string")
@property
@pulumi.getter(name="primaryKey")
def METHOD_NAME(self) -> Optional[str]:
"""
The primary access key.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter(name="secondaryConnectionString")
def secondary_connection_string(self) -> Optional[str]:
"""
CommunicationService connection string constructed via the secondaryKey
"""
return pulumi.get(self, "secondary_connection_string")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> Optional[str]:
"""
The secondary access key.
"""
return pulumi.get(self, "secondary_key")
class AwaitableListCommunicationServiceKeysResult(ListCommunicationServiceKeysResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListCommunicationServiceKeysResult(
primary_connection_string=self.primary_connection_string,
METHOD_NAME=self.METHOD_NAME,
secondary_connection_string=self.secondary_connection_string,
secondary_key=self.secondary_key)
def list_communication_service_keys(communication_service_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListCommunicationServiceKeysResult:
"""
Get the access keys of the CommunicationService resource.
Azure REST API version: 2023-03-31.
:param str communication_service_name: The name of the CommunicationService resource.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['communicationServiceName'] = communication_service_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:communication:listCommunicationServiceKeys', __args__, opts=opts, typ=ListCommunicationServiceKeysResult).value
return AwaitableListCommunicationServiceKeysResult(
primary_connection_string=pulumi.get(__ret__, 'primary_connection_string'),
METHOD_NAME=pulumi.get(__ret__, 'primary_key'),
secondary_connection_string=pulumi.get(__ret__, 'secondary_connection_string'),
secondary_key=pulumi.get(__ret__, 'secondary_key'))
@_utilities.lift_output_func(list_communication_service_keys)
def list_communication_service_keys_output(communication_service_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListCommunicationServiceKeysResult]:
"""
Get the access keys of the CommunicationService resource.
Azure REST API version: 2023-03-31.
:param str communication_service_name: The name of the CommunicationService resource.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
|
1,673 |
test run through message interrupt save and
|
# -*- coding: utf-8 -*-
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.event import BpmnEvent
from SpiffWorkflow.bpmn.specs.event_definitions.message import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'matth'
class MessageNonInterruptTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, self.subprocesses = self.load_workflow_spec(
'Test-Workflows/*.bpmn20.xml',
'sid-b0903a88-fe74-4f93-b912-47b815ea8d1c',
False)
def testRunThroughHappySaveAndRestore(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.save_restore()
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.save_restore()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.do_next_exclusive_step('Do Something That Takes A Long Time')
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def METHOD_NAME(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.save_restore()
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.save_restore()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.workflow.do_engine_steps()
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughHappy(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.do_next_exclusive_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterrupt(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.workflow.do_engine_steps()
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(3, len(self.workflow.get_tasks(TaskState.WAITING)))
self.do_next_named_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptOtherOrder(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.do_next_exclusive_step('Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.workflow.do_engine_steps()
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptOtherOrderSaveAndRestore(self):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.save_restore()
self.do_next_exclusive_step(
'Select Test', choice='Message Non Interrupt')
self.workflow.do_engine_steps()
self.save_restore()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.catch(BpmnEvent(MessageEventDefinition('Test Message'), {}))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.WAITING)))
self.assertEqual(2, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Do Something That Takes A Long Time')
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
|
1,674 |
main
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component import Evaluation
from pipeline.component import HeteroSSHELR
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def prettify(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def METHOD_NAME(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True)
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
lr_param = {
"name": "hetero_sshe_lr_0",
"penalty": "L2",
"optimizer": "sgd",
"tol": 0.0001,
"alpha": 0.01,
"max_iter": 30,
"early_stop": "weight_diff",
"batch_size": -1,
"learning_rate": 0.15,
"init_param": {
"init_method": "zeros",
"fit_intercept": False
},
"encrypt_param": {
"key_length": 1024
},
"reveal_every_iter": False,
"reveal_strategy": "respectively"
}
hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
prettify(pipeline.get_component("evaluation_0").get_summary())
pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
predict_pipeline = PipeLine()
# add data reader onto predict pipeline
predict_pipeline.add_component(reader_0)
# add selected components from train pipeline onto predict pipeline
# specify data source
predict_pipeline.add_component(
pipeline, data=Data(
predict_input={
pipeline.data_transform_0.input.data: reader_0.output.data}))
# run predict model
predict_pipeline.predict()
return pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
METHOD_NAME(args.config)
else:
METHOD_NAME()
|
1,675 |
create space path
|
from urllib.parse import urlparse
import squish
from os import makedirs
from os.path import exists, join
from helpers.SpaceHelper import get_space_id
from helpers.ConfigHelper import get_config, set_config
from helpers.SyncHelper import listenSyncStatusForItem
def substituteInLineCodes(value):
value = value.replace('%local_server%', get_config('localBackendUrl'))
value = value.replace('%secure_local_server%', get_config('secureLocalBackendUrl'))
value = value.replace('%client_root_sync_path%', get_config('clientRootSyncPath'))
value = value.replace('%current_user_sync_path%', get_config('currentUserSyncPath'))
value = value.replace(
'%local_server_hostname%', urlparse(get_config('localBackendUrl')).netloc
)
return value
def getClientDetails(context):
clientDetails = {'server': '', 'user': '', 'password': ''}
for row in context.table[0:]:
row[1] = substituteInLineCodes(row[1])
if row[0] == 'server':
clientDetails.update({'server': row[1]})
elif row[0] == 'user':
clientDetails.update({'user': row[1]})
elif row[0] == 'password':
clientDetails.update({'password': row[1]})
return clientDetails
def createUserSyncPath(username):
# '' at the end adds '/' to the path
userSyncPath = join(get_config('clientRootSyncPath'), username, '')
if not exists(userSyncPath):
makedirs(userSyncPath)
setCurrentUserSyncPath(userSyncPath)
return userSyncPath
def METHOD_NAME(space='Personal'):
spacePath = join(get_config('currentUserSyncPath'), space, '')
if not exists(spacePath):
makedirs(spacePath)
return spacePath
def setCurrentUserSyncPath(syncPath):
set_config('currentUserSyncPath', syncPath)
def getResourcePath(resource='', user='', space=''):
sync_path = get_config('currentUserSyncPath')
if user:
sync_path = user
if get_config('ocis'):
space = space or get_config('syncConnectionName')
sync_path = join(sync_path, space)
sync_path = join(get_config('clientRootSyncPath'), sync_path)
resource = resource.replace(sync_path, '').strip('/')
return join(
sync_path,
resource,
)
def getCurrentUserSyncPath():
return get_config('currentUserSyncPath')
def startClient():
squish.startApplication(
"owncloud -s"
+ " --logfile "
+ get_config('clientLogFile')
+ " --logdebug"
+ " --logflush"
+ " --confdir "
+ get_config('clientConfigDir')
)
def getPollingInterval():
pollingInterval = '''[ownCloud]
remotePollInterval={pollingInterval}
'''
args = {'pollingInterval': 5000}
pollingInterval = pollingInterval.format(**args)
return pollingInterval
def setUpClient(username, displayName, space="Personal"):
userSetting = '''
[Accounts]
0/Folders/1/davUrl={url}
0/Folders/1/ignoreHiddenFiles=true
0/Folders/1/localPath={client_sync_path}
0/Folders/1/displayString={displayString}
0/Folders/1/paused=false
0/Folders/1/targetPath=/
0/Folders/1/version=2
0/Folders/1/virtualFilesMode=off
0/dav_user={davUserName}
0/display-name={displayUserName}
0/http_CredentialVersion=1
0/http_oauth={oauth}
0/http_user={davUserName}
0/url={local_server}
0/user={displayUserFirstName}
0/version=1
version=2
'''
userSetting = userSetting + getPollingInterval()
syncPath = createUserSyncPath(username)
dav_endpoint = join("remote.php/dav/files", username)
server_url = get_config('localBackendUrl')
is_ocis = get_config('ocis')
if is_ocis:
set_config('syncConnectionName', space)
syncPath = METHOD_NAME(space)
if space == "Personal":
space = displayName
dav_endpoint = join("dav/spaces", get_space_id(space, username))
args = {
'url': join(server_url, dav_endpoint, ''),
'displayString': get_config('syncConnectionName'),
'displayUserName': displayName,
'davUserName': username if is_ocis else username.lower(),
'displayUserFirstName': displayName.split()[0],
'client_sync_path': syncPath,
'local_server': server_url,
'oauth': 'true' if is_ocis else 'false',
}
userSetting = userSetting.format(**args)
configFile = open(get_config('clientConfigFile'), "w")
configFile.write(userSetting)
configFile.close()
startClient()
listenSyncStatusForItem(syncPath)
|
1,676 |
count max reuse followers
|
from flask import g, current_app
from werkzeug.local import LocalProxy
from udata.models import db, WithMetrics
from udata.core.organization.models import Organization
from udata.core.dataset.models import Dataset
from udata.core.reuse.models import Reuse
__all__ = ('Site', 'SiteSettings')
DEFAULT_FEED_SIZE = 20
class SiteSettings(db.EmbeddedDocument):
home_datasets = db.ListField(db.ReferenceField(Dataset))
home_reuses = db.ListField(db.ReferenceField(Reuse))
class Site(WithMetrics, db.Document):
id = db.StringField(primary_key=True)
title = db.StringField(required=True)
keywords = db.ListField(db.StringField())
feed_size = db.IntField(required=True, default=DEFAULT_FEED_SIZE)
configs = db.DictField()
themes = db.DictField()
settings = db.EmbeddedDocumentField(SiteSettings, default=SiteSettings)
__metrics_keys__ = [
'max_dataset_followers',
'max_dataset_reuses',
'max_reuse_datasets',
'max_reuse_followers',
'max_org_followers',
'max_org_reuses',
'max_org_datasets',
'datasets',
'discussions',
'followers',
'organizations',
'public-service',
'resources',
'reuses',
'users',
]
def __str__(self):
return self.title or ''
def count_users(self):
from udata.models import User
self.metrics['users'] = User.objects(confirmed_at__ne=None, deleted=None).count()
self.save()
def count_org(self):
from udata.models import Organization
self.metrics['organizations'] = Organization.objects.visible().count()
self.save()
def count_org_for_badge(self, badge_kind):
from udata.models import Organization
self.metrics[badge_kind] = Organization.objects(badges__kind=badge_kind).count()
self.save()
def count_datasets(self):
from udata.models import Dataset
self.metrics['datasets'] = Dataset.objects.visible().count()
self.save()
def count_resources(self):
self.metrics['resources'] = next(Dataset.objects.visible().aggregate(
{'$project': {'resources': 1}},
{'$unwind': '$resources' },
{'$group': {'_id': 'result', 'count': {'$sum': 1}}}
), {}).get('count', 0)
self.save()
def count_reuses(self):
self.metrics['reuses'] = Reuse.objects.visible().count()
self.save()
def count_followers(self):
from udata.models import Follow
self.metrics['followers'] = Follow.objects(until=None).count()
self.save()
def count_discussions(self):
from udata.models import Discussion
self.metrics['discussions'] = Discussion.objects.count()
self.save()
def count_max_dataset_followers(self):
dataset = (Dataset.objects(metrics__followers__gt=0).visible()
.order_by('-metrics.followers').first())
self.metrics['max_dataset_followers'] = dataset.metrics['followers'] if dataset else 0
self.save()
def count_max_dataset_reuses(self):
dataset = (Dataset.objects(metrics__reuses__gt=0).visible()
.order_by('-metrics.reuses').first())
self.metrics['max_dataset_reuses'] = dataset.metrics['reuses'] if dataset else 0
self.save()
def count_max_reuse_datasets(self):
reuse = (Reuse.objects(metrics__datasets__gt=0).visible()
.order_by('-metrics.datasets').first())
self.metrics['max_reuse_datasets'] = reuse.metrics['datasets'] if reuse else 0
self.save()
def METHOD_NAME(self):
reuse = (Reuse.objects(metrics__followers__gt=0).visible()
.order_by('-metrics.followers').first())
self.metrics['max_reuse_followers'] = reuse.metrics['followers'] if reuse else 0
self.save()
def count_max_org_followers(self):
org = (Organization.objects(metrics__followers__gt=0).visible()
.order_by('-metrics.followers').first())
self.metrics['max_org_followers'] = org.metrics['followers'] if org else 0
self.save()
def count_max_org_reuses(self):
org = (Organization.objects(metrics__reuses__gt=0).visible()
.order_by('-metrics.reuses').first())
self.metrics['max_org_reuses'] = org.metrics['reuses'] if org else 0
self.save()
def count_max_org_datasets(self):
org = (Organization.objects(metrics__datasets__gt=0).visible()
.order_by('-metrics.datasets').first())
self.metrics['max_org_datasets'] = org.metrics['datasets'] if org else 0
self.save()
def get_current_site():
if getattr(g, 'site', None) is None:
site_id = current_app.config['SITE_ID']
g.site, _ = Site.objects.get_or_create(id=site_id, defaults={
'title': current_app.config.get('SITE_TITLE'),
'keywords': current_app.config.get('SITE_KEYWORDS', []),
})
return g.site
current_site = LocalProxy(get_current_site)
@Dataset.on_delete.connect
def remove_from_home_datasets(dataset):
if dataset in current_site.settings.home_datasets:
current_site.settings.home_datasets.remove(dataset)
current_site.save()
@Reuse.on_delete.connect
def remove_from_home_reuses(reuse):
if reuse in current_site.settings.home_reuses:
current_site.settings.home_reuses.remove(reuse)
current_site.save()
|
1,677 |
test unicast missing port
|
"""Functional tests for scanning.
The tests here are supposed to cover non-protocol specific aspects of
scanning, like scanning for a specific device or derive device model.
Two "generic" protocols (MRP and AirPlay) have been arbitrarily chosen
to have something to test with (could have been other protocols). They
are just called "service1" and "service2" to emphasize that the specific
protocols are irrelevant. Later, service3 was added as well...
"""
from ipaddress import ip_address
import pytest
from pyatv.const import DeviceModel, Protocol
from tests import fake_udns
from tests.conftest import Scanner
from tests.utils import assert_device
SERVICE_1_ID = "mrp_id_1"
SERVICE_1_NAME = "MRP ATV"
SERVICE_1_SERVICE_NAME = "MRP Service"
SERVICE_1_IP = "10.0.0.1"
SERVICE_2_ID = "AA:BB:CC:DD:EE:FF"
SERVICE_2_NAME = "AirPlay ATV"
SERVICE_2_IP = "10.0.0.2"
SERVICE_3_ID = "raopid"
SERVICE_3_NAME = "AirPlay ATV"
DEFAULT_KNOCK_PORTS = {3689, 7000, 49152, 32498}
pytestmark = pytest.mark.asyncio
def service1(model=None):
return fake_udns.mrp_service(
SERVICE_1_SERVICE_NAME,
SERVICE_1_NAME,
SERVICE_1_ID,
addresses=[SERVICE_1_IP],
model=model,
)
def service2(address=SERVICE_1_IP):
return fake_udns.airplay_service(SERVICE_2_NAME, SERVICE_2_ID, addresses=[address])
def service3():
return fake_udns.raop_service(
SERVICE_3_NAME, SERVICE_3_ID, addresses=[SERVICE_1_IP], port=5000
)
def mrp_service_tvos_15():
return fake_udns.mrp_service(
SERVICE_1_SERVICE_NAME,
SERVICE_1_NAME,
SERVICE_1_ID,
addresses=[SERVICE_1_IP],
version="19J346",
)
async def test_multicast_scan_no_device_found(multicast_scan: Scanner):
atvs = await multicast_scan()
assert len(atvs) == 0
async def test_multicast_scan_for_particular_device(udns_server, multicast_scan):
udns_server.add_service(service1())
udns_server.add_service(service2(address=SERVICE_1_IP))
udns_server.add_service(service3())
atvs = await multicast_scan(identifier={SERVICE_1_ID, SERVICE_2_ID})
assert len(atvs) == 1
assert atvs[0].name == SERVICE_2_NAME
async def test_multicast_scan_for_specific_devices(udns_server, multicast_scan):
udns_server.add_service(service1())
udns_server.add_service(service2(address=SERVICE_2_IP))
atvs = await multicast_scan(identifier=SERVICE_2_ID)
assert len(atvs) == 1
assert atvs[0].name == SERVICE_2_NAME
assert atvs[0].address == ip_address(SERVICE_2_IP)
async def test_multicast_scan_deep_sleeping_device(
udns_server, multicast_scan: Scanner
):
udns_server.sleep_proxy = True
udns_server.add_service(service1())
atvs = await multicast_scan()
assert len(atvs) == 1
assert atvs[0].deep_sleep
async def test_multicast_scan_device_info(udns_server, multicast_scan: Scanner):
udns_server.add_service(service1())
udns_server.add_service(service2())
atvs = await multicast_scan()
assert len(atvs) == 1
device_info = atvs[0].device_info
assert device_info.mac == SERVICE_2_ID
async def test_multicast_scan_device_model(udns_server, multicast_scan: Scanner):
udns_server.add_service(service1(model="J105aAP"))
atvs = await multicast_scan(protocol=Protocol.MRP)
assert len(atvs) == 1
device_info = atvs[0].device_info
assert device_info.model == DeviceModel.Gen4K
async def test_multicast_filter_multiple_protocols(
udns_server, multicast_scan: Scanner
):
udns_server.add_service(service1())
udns_server.add_service(service2())
udns_server.add_service(service3())
atvs = await multicast_scan(protocol={Protocol.MRP, Protocol.RAOP})
assert len(atvs) == 1
atv = atvs[0]
assert len(atv.services) == 2
assert atv.get_service(Protocol.MRP) is not None
assert atv.get_service(Protocol.RAOP) is not None
async def test_multicast_mrp_tvos15_disabled(udns_server, multicast_scan: Scanner):
udns_server.add_service(mrp_service_tvos_15())
atvs = await multicast_scan()
assert len(atvs) == 1
atv = atvs[0]
assert not atv.get_service(Protocol.MRP).enabled
async def test_unicast_scan_no_results(unicast_scan: Scanner):
atvs = await unicast_scan()
assert len(atvs) == 0
async def METHOD_NAME(udns_server, unicast_scan: Scanner):
udns_server.add_service(
fake_udns.FakeDnsService("dummy", SERVICE_1_IP, None, None, None)
)
atvs = await unicast_scan()
assert len(atvs) == 0
async def test_unicast_missing_properties(udns_server, unicast_scan: Scanner):
udns_server.add_service(
fake_udns.FakeDnsService("dummy", SERVICE_1_IP, 1234, None, None)
)
atvs = await unicast_scan()
assert len(atvs) == 0
async def test_unicast_scan_device_info(udns_server, unicast_scan: Scanner):
udns_server.add_service(service1())
udns_server.add_service(service2())
atvs = await unicast_scan()
assert len(atvs) == 1
device_info = atvs[0].device_info
assert device_info.mac == SERVICE_2_ID
async def test_unicast_scan_device_model(udns_server, unicast_scan: Scanner):
udns_server.add_service(service1(model="J105aAP"))
atvs = await unicast_scan()
assert len(atvs) == 1
device_info = atvs[0].device_info
assert device_info.model == DeviceModel.Gen4K
async def test_unicast_scan_port_knock(unicast_scan: Scanner, stub_knock_server):
await unicast_scan()
assert stub_knock_server.ports == DEFAULT_KNOCK_PORTS
assert stub_knock_server.knock_count == 1
async def test_unicast_filter_multiple_protocols(udns_server, unicast_scan: Scanner):
udns_server.add_service(service1())
udns_server.add_service(service2())
udns_server.add_service(service3())
atvs = await unicast_scan(protocol={Protocol.MRP, Protocol.RAOP})
assert len(atvs) == 1
atv = atvs[0]
assert len(atv.services) == 2
assert atv.get_service(Protocol.MRP) is not None
assert atv.get_service(Protocol.RAOP) is not None
async def test_unicast_mrp_tvos15_disabled(udns_server, unicast_scan: Scanner):
udns_server.add_service(mrp_service_tvos_15())
atvs = await unicast_scan()
assert len(atvs) == 1
atv = atvs[0]
assert not atv.get_service(Protocol.MRP).enabled
|
1,678 |
compilers minimum version
|
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.files import get, copy, rmdir, replace_in_file
from conan.tools.build import check_min_cppstd
from conan.tools.scm import Version
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
import os
required_conan_version = ">=1.53.0"
class SeasocksConan(ConanFile):
name = "seasocks"
description = "A tiny embeddable C++ HTTP and WebSocket server for Linux"
license = "BSD-2-Clause"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/mattgodbolt/seasocks"
topics = ("embeddable", "webserver", "websockets")
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"with_zlib": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"with_zlib": True,
}
@property
def _min_cppstd(self):
return 11 if Version(self.version) < "1.4.5" else 17
@property
def METHOD_NAME(self):
if Version(self.version) < "1.4.5":
return {}
else:
return {
"Visual Studio": "16",
"msvc": "191",
"gcc": "7",
"clang": "7",
"apple-clang": "10",
}
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
if self.options.with_zlib:
self.requires("zlib/1.2.13")
def validate(self):
if self.settings.os not in ["Linux", "FreeBSD"]:
raise ConanInvalidConfiguration(f"{self.ref} doesn't support this os")
if self.settings.compiler.cppstd:
check_min_cppstd(self, self._min_cppstd)
minimum_version = self.METHOD_NAME.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def _patch_sources(self):
# No warnings as errors
cmakelists = os.path.join(self.source_folder, "CMakeLists.txt")
replace_in_file(self, cmakelists, "-Werror", "")
replace_in_file(self, cmakelists, "-pedantic-errors", "")
def generate(self):
tc = CMakeToolchain(self)
tc.variables["DEFLATE_SUPPORT"] = self.options.with_zlib
tc.variables["SEASOCKS_SHARED"] = self.options.shared
tc.variables["SEASOCKS_EXAMPLE_APP"] = False
tc.variables["UNITTESTS"] = False
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def build(self):
self._patch_sources()
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
copy(self, pattern="LICENSE", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
rmdir(self, os.path.join(self.package_folder, "share"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "Seasocks")
self.cpp_info.set_property("cmake_target_name", "Seasocks::seasocks")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["libseasocks"].libs = ["seasocks"]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["libseasocks"].system_libs.extend(["pthread", "m"])
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "Seasocks"
self.cpp_info.names["cmake_find_package_multi"] = "Seasocks"
self.cpp_info.components["libseasocks"].names["cmake_find_package"] = "seasocks"
self.cpp_info.components["libseasocks"].names["cmake_find_package_multi"] = "seasocks"
self.cpp_info.components["libseasocks"].set_property("cmake_target_name", "Seasocks::seasocks")
if self.options.with_zlib:
self.cpp_info.components["libseasocks"].requires = ["zlib::zlib"]
|
1,679 |
remove by hash
|
"""
ssh_key_management: Endpoints for managing SSH keys on the robot
"""
import contextlib
import functools
import hashlib
import ipaddress
import logging
import os
from typing import (
Any,
Generator,
IO,
List,
Tuple,
)
from aiohttp import web
from .handler_type import Handler
LOG = logging.getLogger(__name__)
def require_linklocal(handler: Handler) -> Handler:
"""Ensure the decorated is only called if the request is linklocal.
The host ip address should be in the X-Host-IP header (provided by nginx)
"""
@functools.wraps(handler)
async def decorated(request: web.Request) -> web.Response:
ipaddr_str = request.headers.get("x-host-ip")
invalid_req_data = {
"error": "bad-interface",
"message": (
f"The endpoint {request.rel_url}"
f" can only be used from link-local connections."
f" Make sure you're connected to this robot directly by cable"
f" and using this robot's wired IP address"
f" (not its wireless IP address)."
),
}
if not ipaddr_str:
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data=invalid_req_data, status=403
)
try:
addr = ipaddress.ip_address(ipaddr_str)
except ValueError:
LOG.exception(f"Couldn't parse host ip address {ipaddr_str}")
raise
if not addr.is_link_local:
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data=invalid_req_data, status=403
)
return await handler(request)
return decorated
@contextlib.contextmanager
def authorized_keys(mode: str = "r") -> Generator[IO[Any], None, None]:
"""Open the authorized_keys file. Separate function for mocking.
:param mode: As :py:meth:`open`
"""
path = "/var/home/.ssh/authorized_keys"
if not os.path.exists(path):
os.makedirs(os.path.dirname(path))
open(path, "w").close()
with open(path, mode) as ak:
yield ak
def get_keys() -> List[Tuple[str, str]]:
"""Return a list of tuples of [md5(pubkey), pubkey]"""
with authorized_keys() as ak:
return [
(hashlib.new("md5", line.encode()).hexdigest(), line)
for line in ak.read().split("\n")
if line.strip()
]
def METHOD_NAME(hashval: str) -> None:
"""Remove the key whose md5 sum matches hashval.
:raises: KeyError if the hashval wasn't found
"""
key_details = get_keys()
with authorized_keys("w") as ak:
for keyhash, key in key_details:
if keyhash != hashval:
ak.write(f"{key}\n")
break
else:
raise KeyError(hashval)
def key_present(hashval: str) -> bool:
"""Check if the key whose md5 is hashval is in authorized_keys
:returns: ``True`` if the key is present, ``False`` otherwise
"""
return hashval in [keyhash for keyhash, _ in get_keys()]
@require_linklocal
async def list_keys(request: web.Request) -> web.Response:
"""List keys in the authorized_keys file.
GET /server/ssh_keys
-> 200 OK {"public_keys": [{"key_md5": md5 hex digest, "key": key string}]}
(or 403 if not from the link-local connection)
"""
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
{
"public_keys": [
{"key_md5": details[0], "key": details[1]} for details in get_keys()
]
},
status=200,
)
@require_linklocal
async def add(request: web.Request) -> web.Response:
"""Add a public key to the authorized_keys file.
POST /server/ssh_keys {"key": key string}
-> 201 Created
If the key string doesn't look like an openssh public key, rejects with 400
"""
def key_error(error: str, message: str) -> web.Response:
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data={"error": error, "message": message}, status=400
)
body = await request.json()
if "key" not in body or not isinstance(body["key"], str):
return key_error("no-key", 'No "key" element in body')
pubkey = body["key"]
# Do some fairly minor sanitization; dropbear will ignore invalid keys but
# we still don’t want to have a bunch of invalid data in there
pubkey_parts = pubkey.split()
if len(pubkey_parts) == 0:
return key_error("bad-key", "Key is empty")
alg = pubkey_parts[0]
# We don’t allow dss so this has to be rsa or ecdsa and shouldn’t start
# with restrictions
if alg != "ssh-rsa" and not alg.startswith("ecdsa"):
LOG.warning(f"weird keyfile uploaded: starts with {alg}")
return key_error("bad-key", f"Key starts with invalid algorithm {alg}")
if "\n" in pubkey[:-1]:
LOG.warning("Newlines in keyfile that shouldn't be there")
return key_error("bad-key", "Key has a newline")
# This is a more or less correct key we can write
if "\n" == pubkey[-1]:
pubkey = pubkey[:-1]
hashval = hashlib.new("md5", pubkey.encode()).hexdigest()
if not key_present(hashval):
with authorized_keys("a") as ak:
ak.write(f"{pubkey}\n")
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data={"message": f"Added key {hashval}", "key_md5": hashval}, status=201
)
@require_linklocal
async def clear(request: web.Request) -> web.Response:
"""Clear all public keys from authorized_keys
DELETE /server/ssh_keys
-> 200 OK if successful
(or 403 if not from the link-local connection)
"""
with authorized_keys("w") as ak:
ak.write("\n".join([]) + "\n")
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data={
"message": "Keys cleared. " "Restart robot to take effect",
"restart_url": "/server/restart",
},
status=200,
)
@require_linklocal
async def remove(request: web.Request) -> web.Response:
"""Remove a public key from authorized_keys
DELETE /server/ssh_keys/:key_md5_hexdigest
-> 200 OK if the key was found
-> 404 Not Found otherwise
"""
requested_hash = request.match_info["key_md5"]
new_keys: List[str] = []
found = False
for keyhash, key in get_keys():
if keyhash == requested_hash:
found = True
else:
new_keys.append(key)
if not found:
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data={
"error": "invalid-key-hash",
"message": f"No such key md5 {requested_hash}",
},
status=404,
)
with authorized_keys("w") as ak:
ak.write("\n".join(new_keys) + "\n")
return web.json_response( # type: ignore[no-untyped-call,no-any-return]
data={
"message": f"Key {requested_hash} deleted. " "Restart robot to take effect",
"restart_url": "/server/restart",
},
status=200,
)
|
1,680 |
clean name
|
"""
Copyright 2016, 2017 UFPE - Universidade Federal de Pernambuco
Este arquivo é parte do programa Amadeus Sistema de Gestão de Aprendizagem, ou simplesmente Amadeus LMS
O Amadeus LMS é um software livre; você pode redistribui-lo e/ou modifica-lo dentro dos termos da Licença Pública Geral GNU como publicada pela Fundação do Software Livre (FSF); na versão 2 da Licença.
Este programa é distribuído na esperança que possa ser útil, mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a Licença Pública Geral GNU para maiores detalhes.
Você deve ter recebido uma cópia da Licença Pública Geral GNU, sob o título "LICENSE", junto com este programa, se não, escreva para a Fundação do Software Livre (FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
"""
# coding=utf-8
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.utils.html import strip_tags
from django.forms.models import inlineformset_factory
from subjects.models import Tag
from .models import Bulletin
from file_resubmit.widgets import ResubmitFileWidget
from pendencies.forms import PendenciesLimitedForm
from pendencies.models import Pendencies
class BulletinForm(forms.ModelForm):
subject = None
MAX_UPLOAD_SIZE = 1024*1024
def __init__(self, *args, **kwargs):
super(BulletinForm, self).__init__(*args, **kwargs)
self.subject = kwargs['initial'].get('subject', None)
if self.instance.id:
self.subject = self.instance.topic.subject
self.initial['tags'] = ", ".join(self.instance.tags.all().values_list("name", flat = True))
tags = forms.CharField(label = _('Tags'), required = False)
class Meta:
model = Bulletin
fields = ['name', 'content', 'brief_description', 'show_window', 'visible','file_content','indicators']
labels = {
'name': _('Bulletin name'),
'content': _('Bulletin content'),
}
widgets = {
'content': forms.Textarea,
'brief_description': forms.Textarea,
'file_content': ResubmitFileWidget(attrs={'accept':'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet,application/vnd.ms-excel,application/vnd.oasis.opendocument.spreadsheet,text/csv'}),
'indicators': ResubmitFileWidget(attrs={'accept':'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet,application/vnd.ms-excel,application/vnd.oasis.opendocument.spreadsheet,text/csv'}),
}
def METHOD_NAME(self):
name = self.cleaned_data.get('name', '')
topics = self.subject.topic_subject.all()
for topic in topics:
if self.instance.id:
same_name = topic.resource_topic.filter(name__unaccent__iexact = name).exclude(id = self.instance.id).count()
else:
same_name = topic.resource_topic.filter(name__unaccent__iexact = name).count()
if same_name > 0:
self._errors['name'] = [_('This subject already has a bulletin with this name')]
return ValueError
return name
def clean_file_content(self):
file_content = self.cleaned_data.get('file_content', False)
if file_content:
if hasattr(file_content, '_size'):
if file_content._size > self.MAX_UPLOAD_SIZE:
self._errors['file_content'] = [_("The file is too large. It should have less than 1MB.")]
return ValueError
elif not self.instance.pk:
self._errors['file_content'] = [_('This field is required.')]
return ValueError
return file_content
def clean_indicators(self):
indicators = self.cleaned_data.get('indicators', False)
if indicators:
if hasattr(indicators, '_size'):
if indicators._size > self.MAX_UPLOAD_SIZE:
self._errors['indicators'] = [_("The file is too large. It should have less than 1MB.")]
return ValueError
elif not self.instance.pk:
self._errors['indicators'] = [_('This field is required.')]
return ValueError
return indicators
def save(self, commit = True):
super(BulletinForm, self).save(commit = True)
self.instance.save()
previous_tags = self.instance.tags.all()
tags = self.cleaned_data['tags'].split(",")
#Excluding unwanted tags
for prev in previous_tags:
if not prev.name in tags:
self.instance.tags.remove(prev)
for tag in tags:
tag = tag.strip()
exist = Tag.objects.filter(name = tag).exists()
if exist:
new_tag = Tag.objects.get(name = tag)
else:
new_tag = Tag.objects.create(name = tag)
if not new_tag in self.instance.tags.all():
self.instance.tags.add(new_tag)
return self.instance
class FormModalMessage(forms.Form):
MAX_UPLOAD_SIZE = 10*1024*1024
comment = forms.CharField(widget=forms.Textarea,label=_("Message"))
image = forms.FileField(widget=ResubmitFileWidget(attrs={'accept':'image/*'}),required=False)
def clean_comment(self):
comment = self.cleaned_data.get('comment', '')
cleaned_comment = strip_tags(comment)
if cleaned_comment == '':
self._errors['comment'] = [_('This field is required.')]
return ValueError
return comment
def clean_image(self):
image = self.cleaned_data.get('image', False)
if image:
if hasattr(image, '_size'):
if image._size > self.MAX_UPLOAD_SIZE:
self._errors['image'] = [_("The image is too large. It should have less than 10MB.")]
return ValueError
return image
InlinePendenciesFormset = inlineformset_factory(Bulletin, Pendencies, form = PendenciesLimitedForm, extra = 1, max_num = 3, validate_max = True, can_delete = True)
|
1,681 |
tear down
|
# Copyright 2018 gevent contributors. See LICENSE for details.
import os
import unittest
import sys
from gevent import _config
class TestResolver(unittest.TestCase):
old_resolver = None
def setUp(self):
if 'GEVENT_RESOLVER' in os.environ:
self.old_resolver = os.environ['GEVENT_RESOLVER']
del os.environ['GEVENT_RESOLVER']
def METHOD_NAME(self):
if self.old_resolver:
os.environ['GEVENT_RESOLVER'] = self.old_resolver
def test_key(self):
self.assertEqual(_config.Resolver.environment_key, 'GEVENT_RESOLVER')
def test_default(self):
from gevent.resolver.thread import Resolver
conf = _config.Resolver()
self.assertEqual(conf.get(), Resolver)
def test_env(self):
from gevent.resolver.blocking import Resolver
os.environ['GEVENT_RESOLVER'] = 'foo,bar,block,dnspython'
conf = _config.Resolver()
self.assertEqual(conf.get(), Resolver)
os.environ['GEVENT_RESOLVER'] = 'dnspython'
# The existing value is unchanged
self.assertEqual(conf.get(), Resolver)
# A new object reflects it
try:
from gevent.resolver.dnspython import Resolver as DResolver
except ImportError: # pragma: no cover
# dnspython is optional; skip it.
import warnings
warnings.warn('dnspython not installed')
else:
conf = _config.Resolver()
self.assertEqual(conf.get(), DResolver)
def test_set_str_long(self):
from gevent.resolver.blocking import Resolver
conf = _config.Resolver()
conf.set('gevent.resolver.blocking.Resolver')
self.assertEqual(conf.get(), Resolver)
def test_set_str_short(self):
from gevent.resolver.blocking import Resolver
conf = _config.Resolver()
conf.set('block')
self.assertEqual(conf.get(), Resolver)
def test_set_class(self):
from gevent.resolver.blocking import Resolver
conf = _config.Resolver()
conf.set(Resolver)
self.assertEqual(conf.get(), Resolver)
def test_set_through_config(self):
from gevent.resolver.thread import Resolver as Default
from gevent.resolver.blocking import Resolver
conf = _config.Config()
self.assertEqual(conf.resolver, Default)
conf.resolver = 'block'
self.assertEqual(conf.resolver, Resolver)
class TestFunctions(unittest.TestCase):
def test_validate_bool(self):
self.assertTrue(_config.validate_bool('on'))
self.assertTrue(_config.validate_bool('1'))
self.assertFalse(_config.validate_bool('off'))
self.assertFalse(_config.validate_bool('0'))
self.assertFalse(_config.validate_bool(''))
with self.assertRaises(ValueError):
_config.validate_bool(' hmm ')
def test_validate_invalid(self):
with self.assertRaises(ValueError):
_config.validate_invalid(self)
class TestConfig(unittest.TestCase):
def test__dir__(self):
self.assertEqual(sorted(_config.config.settings),
sorted(dir(_config.config)))
def test_getattr(self):
# Bypass the property that might be set here
self.assertIsNotNone(_config.config.__getattr__('resolver'))
def test__getattr__invalid(self):
with self.assertRaises(AttributeError):
getattr(_config.config, 'no_such_setting')
def test_set_invalid(self):
with self.assertRaises(AttributeError):
_config.config.set('no such setting', True)
class TestImportableSetting(unittest.TestCase):
def test_empty_list(self):
i = _config.ImportableSetting()
with self.assertRaisesRegex(ImportError,
"Cannot import from empty list"):
i._import_one_of([])
def test_path_not_supported(self):
import warnings
i = _config.ImportableSetting()
path = list(sys.path)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
with self.assertRaisesRegex(ImportError,
"Cannot import 'foo/bar/gevent.no_such_module'"):
i._import_one('foo/bar/gevent.no_such_module')
# We restored the path
self.assertEqual(path, sys.path)
# We did not issue a warning
self.assertEqual(len(w), 0)
def test_non_string(self):
i = _config.ImportableSetting()
self.assertIs(i._import_one(self), self)
def test_get_options(self):
i = _config.ImportableSetting()
self.assertEqual({}, i.get_options())
i.shortname_map = {'foo': 'bad/path'}
options = i.get_options()
self.assertIn('foo', options)
if __name__ == '__main__':
unittest.main()
|
1,682 |
log
|
import argparse
import os
import numpy as np
import timeit
import tensorflow as tf
import horovod.tensorflow as hvd
from tensorflow.keras import applications
# Benchmark settings
parser = argparse.ArgumentParser(description='TensorFlow Synthetic Benchmark',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--fp16-allreduce', action='store_true', default=False,
help='use fp16 compression during allreduce')
parser.add_argument('--model', type=str, default='ResNet50',
help='model to benchmark')
parser.add_argument('--batch-size', type=int, default=32,
help='input batch size')
parser.add_argument('--num-warmup-batches', type=int, default=10,
help='number of warm-up batches that don\'t count towards benchmark')
parser.add_argument('--num-batches-per-iter', type=int, default=10,
help='number of batches per benchmark iteration')
parser.add_argument('--num-iters', type=int, default=10,
help='number of benchmark iterations')
parser.add_argument('--eager', action='store_true', default=False,
help='enables eager execution')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--use-adasum', action='store_true', default=False,
help='use adasum algorithm to do reduction')
args = parser.parse_args()
args.cuda = not args.no_cuda
hvd.init()
# Horovod: pin GPU to be used to process local rank (one GPU per process)
config = tf.ConfigProto()
if args.cuda:
config.gpu_options.allow_growth = True
config.gpu_options.visible_device_list = str(hvd.local_rank())
else:
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
config.gpu_options.allow_growth = False
config.gpu_options.visible_device_list = ''
if args.eager:
tf.enable_eager_execution(config)
# Set up standard model.
model = getattr(applications, args.model)(weights=None)
lr_scaler = hvd.size()
# By default, Adasum doesn't need scaling when increasing batch size. If used with NCCL,
# scale lr by local_size
if args.use_adasum:
lr_scaler = hvd.local_size() if args.cuda and hvd.nccl_built() else 1
opt = tf.train.GradientDescentOptimizer(0.01 * lr_scaler)
# Horovod: (optional) compression algorithm.
compression = hvd.Compression.fp16 if args.fp16_allreduce else hvd.Compression.none
# Horovod: wrap optimizer with DistributedOptimizer.
opt = hvd.DistributedOptimizer(opt, compression=compression, op=hvd.Adasum if args.use_adasum else hvd.Average)
init = tf.global_variables_initializer()
bcast_op = hvd.broadcast_global_variables(0)
data = tf.random_uniform([args.batch_size, 224, 224, 3])
target = tf.random_uniform([args.batch_size, 1], minval=0, maxval=999, dtype=tf.int64)
def loss_function():
probs = model(data, training=True)
return tf.losses.sparse_softmax_cross_entropy(target, probs)
def METHOD_NAME(s, nl=True):
if hvd.rank() != 0:
return
print(s, end='\n' if nl else '')
METHOD_NAME('Model: %s' % args.model)
METHOD_NAME('Batch size: %d' % args.batch_size)
device = 'GPU' if args.cuda else 'CPU'
METHOD_NAME('Number of %ss: %d' % (device, hvd.size()))
def run(benchmark_step):
# Warm-up
METHOD_NAME('Running warmup...')
timeit.timeit(benchmark_step, number=args.num_warmup_batches)
# Benchmark
METHOD_NAME('Running benchmark...')
img_secs = []
for x in range(args.num_iters):
time = timeit.timeit(benchmark_step, number=args.num_batches_per_iter)
img_sec = args.batch_size * args.num_batches_per_iter / time
METHOD_NAME('Iter #%d: %.1f img/sec per %s' % (x, img_sec, device))
img_secs.append(img_sec)
# Results
img_sec_mean = np.mean(img_secs)
img_sec_conf = 1.96 * np.std(img_secs)
METHOD_NAME('Img/sec per %s: %.1f +-%.1f' % (device, img_sec_mean, img_sec_conf))
METHOD_NAME('Total img/sec on %d %s(s): %.1f +-%.1f' %
(hvd.size(), device, hvd.size() * img_sec_mean, hvd.size() * img_sec_conf))
if tf.executing_eagerly():
with tf.device(device):
run(lambda: opt.minimize(loss_function, var_list=model.trainable_variables))
else:
with tf.Session(config=config) as session:
init.run()
bcast_op.run()
loss = loss_function()
train_opt = opt.minimize(loss)
run(lambda: session.run(train_opt))
|
1,683 |
iterkeys
|
from _typeshed import StrPath, SupportsKeysAndGetItem
from collections.abc import Container, Iterable, Iterator, Mapping, MutableMapping, Sequence
from typing import TypeVar, overload
from typing_extensions import Literal, TypeAlias
from uuid import UUID
_T = TypeVar("_T")
_Token: TypeAlias = (
tuple[Literal["EMPTY"], str, None]
| tuple[Literal["COMMENT"], str, None]
| tuple[Literal["SECTION"], str, tuple[str, ...]]
| tuple[Literal["KV"], str, tuple[str, str, str]]
)
def get_app_dir(app_name: str, roaming: bool = ..., force_posix: bool = ...) -> str: ...
class Dialect:
def __init__(
self,
ns_sep: str = ...,
kv_sep: str = ...,
quotes: Sequence[str] = ...,
true: Sequence[str] = ...,
false: Sequence[str] = ...,
comments: Container[str] = ...,
allow_escaping: bool = ...,
linesep: str | None = ...,
) -> None: ...
@property
def ns_sep(self) -> str: ...
@property
def kv_sep(self) -> str: ...
@property
def quotes(self) -> Sequence[str]: ...
@property
def true(self) -> Sequence[str]: ...
@property
def false(self) -> Sequence[str]: ...
@property
def comments(self) -> Container[str]: ...
@property
def allow_escaping(self) -> bool: ...
@property
def linesep(self) -> str | None: ...
def get_actual_linesep(self) -> str: ...
def get_strippable_lineseps(self) -> str: ...
def kv_serialize(self, key: str, val: str | None) -> str | None: ...
def escape(self, value: str, quote: str | None = ...) -> str: ...
def unescape(self, value: str) -> str: ...
def to_string(self, value: bool | float | str) -> str: ...
def dict_from_iterable(self, iterable: Iterable[str]) -> MutableMapping[str, str]: ...
def tokenize(self, iterable: Iterable[str]) -> Iterator[_Token]: ...
def update_tokens(
self, old_tokens: Iterable[_Token], changes: SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]]
) -> list[_Token]: ...
default_dialect: Dialect
class IniData(MutableMapping[str, str]):
def __init__(self, mapping: Mapping[str, str] | None = ..., dialect: Dialect | None = ...) -> None: ...
@property
def dialect(self) -> Dialect: ...
@property
def is_dirty(self) -> bool: ...
def get_updated_lines(self, line_iter: Iterable[_Token] | None = ...) -> list[_Token]: ...
def discard(self) -> None: ...
def rollover(self) -> None: ...
def to_dict(self) -> dict[str, str]: ...
def __len__(self) -> int: ...
@overload
def get(self, name: str) -> str | None: ...
@overload
def get(self, name: str, default: _T) -> str | _T: ...
@overload
def get_ascii(self, name: str) -> str | None: ...
@overload
def get_ascii(self, name: str, default: _T) -> str | _T: ...
@overload
def get_bool(self, name: str) -> bool: ...
@overload
def get_bool(self, name: str, default: _T) -> bool | _T: ...
@overload
def get_int(self, name: str) -> int | None: ...
@overload
def get_int(self, name: str, default: _T = ...) -> int | _T: ...
@overload
def get_float(self, name: str) -> float | None: ...
@overload
def get_float(self, name: str, default: _T) -> float | _T: ...
@overload
def get_uuid(self, name: str) -> UUID | None: ...
@overload
def get_uuid(self, name: str, default: _T) -> UUID | _T: ...
def itersections(self) -> Iterator[str]: ...
def sections(self) -> Iterator[str]: ...
def iteritems(self) -> Iterator[tuple[str, str]]: ...
def METHOD_NAME(self) -> Iterator[str]: ...
def itervalues(self) -> Iterator[str]: ...
# NB: keys, items, values currently return a generator, which is
# incompatible with the views returned by Mappings
def items(self) -> Iterator[tuple[str, str]]: ... # type: ignore[override]
def keys(self) -> Iterator[str]: ... # type: ignore[override]
def __iter__(self) -> Iterator[str]: ...
def values(self) -> Iterator[str]: ... # type: ignore[override]
def section_as_dict(self, section: str) -> dict[str, str]: ...
def __getitem__(self, name: str) -> str: ...
def __setitem__(self, name: str, value: str) -> None: ...
def __delitem__(self, name: str) -> None: ...
class IniFile(IniData):
def __init__(self, filename: StrPath, encoding: str | None = ..., dialect: Dialect | None = ...) -> None: ...
@property
def filename(self) -> str: ...
@property
def encoding(self) -> str | None: ...
@property
def is_new(self) -> bool: ...
def save(self, create_folder: bool = ...) -> None: ...
class AppIniFile(IniFile):
def __init__(
self,
app_name: str,
filename: StrPath,
roaming: bool = ...,
force_posix: bool = ...,
encoding: str | None = ...,
dialect: Dialect | None = ...,
) -> None: ...
|
1,684 |
test eval gives lambda custom globals
|
# Test the most dynamic corner cases of Python's runtime semantics.
import builtins
import unittest
from test.support import swap_item, swap_attr
class RebindBuiltinsTests(unittest.TestCase):
"""Test all the ways that we can change/shadow globals/builtins."""
def configure_func(self, func, *args):
"""Perform TestCase-specific configuration on a function before testing.
By default, this does nothing. Example usage: spinning a function so
that a JIT will optimize it. Subclasses should override this as needed.
Args:
func: function to configure.
*args: any arguments that should be passed to func, if calling it.
Returns:
Nothing. Work will be performed on func in-place.
"""
pass
def test_globals_shadow_builtins(self):
# Modify globals() to shadow an entry in builtins.
def foo():
return len([1, 2, 3])
self.configure_func(foo)
self.assertEqual(foo(), 3)
with swap_item(globals(), "len", lambda x: 7):
self.assertEqual(foo(), 7)
def test_modify_builtins(self):
# Modify the builtins module directly.
def foo():
return len([1, 2, 3])
self.configure_func(foo)
self.assertEqual(foo(), 3)
with swap_attr(builtins, "len", lambda x: 7):
self.assertEqual(foo(), 7)
def test_modify_builtins_while_generator_active(self):
# Modify the builtins out from under a live generator.
def foo():
x = range(3)
yield len(x)
yield len(x)
self.configure_func(foo)
g = foo()
self.assertEqual(next(g), 3)
with swap_attr(builtins, "len", lambda x: 7):
self.assertEqual(next(g), 7)
def test_modify_builtins_from_leaf_function(self):
# Verify that modifications made by leaf functions percolate up the
# callstack.
with swap_attr(builtins, "len", len):
def bar():
builtins.len = lambda x: 4
def foo(modifier):
l = []
l.append(len(range(7)))
modifier()
l.append(len(range(7)))
return l
self.configure_func(foo, lambda: None)
self.assertEqual(foo(bar), [7, 4])
def test_cannot_change_globals_or_builtins_with_eval(self):
def foo():
return len([1, 2, 3])
self.configure_func(foo)
# Note that this *doesn't* change the definition of len() seen by foo().
builtins_dict = {"len": lambda x: 7}
globals_dict = {"foo": foo, "__builtins__": builtins_dict,
"len": lambda x: 8}
self.assertEqual(eval("foo()", globals_dict), 3)
self.assertEqual(eval("foo()", {"foo": foo}), 3)
def test_cannot_change_globals_or_builtins_with_exec(self):
def foo():
return len([1, 2, 3])
self.configure_func(foo)
globals_dict = {"foo": foo}
exec("x = foo()", globals_dict)
self.assertEqual(globals_dict["x"], 3)
# Note that this *doesn't* change the definition of len() seen by foo().
builtins_dict = {"len": lambda x: 7}
globals_dict = {"foo": foo, "__builtins__": builtins_dict,
"len": lambda x: 8}
exec("x = foo()", globals_dict)
self.assertEqual(globals_dict["x"], 3)
def test_cannot_replace_builtins_dict_while_active(self):
def foo():
x = range(3)
yield len(x)
yield len(x)
self.configure_func(foo)
g = foo()
self.assertEqual(next(g), 3)
with swap_item(globals(), "__builtins__", {"len": lambda x: 7}):
self.assertEqual(next(g), 3)
def test_cannot_replace_builtins_dict_between_calls(self):
def foo():
return len([1, 2, 3])
self.configure_func(foo)
self.assertEqual(foo(), 3)
with swap_item(globals(), "__builtins__", {"len": lambda x: 7}):
self.assertEqual(foo(), 3)
def METHOD_NAME(self):
globals_dict = {"len": lambda x: 7}
foo = eval("lambda: len([])", globals_dict)
self.configure_func(foo)
self.assertEqual(foo(), 7)
if __name__ == "__main__":
unittest.main()
|
1,685 |
test st video from url
|
# Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""st.video unit tests"""
from io import BytesIO
import numpy as np
import streamlit as st
from streamlit.runtime.media_file_storage import MediaFileStorageError
from streamlit.runtime.memory_media_file_storage import _calculate_file_id
from streamlit.web.server.server import MEDIA_ENDPOINT
from tests.delta_generator_test_case import DeltaGeneratorTestCase
class VideoTest(DeltaGeneratorTestCase):
def test_st_video_from_bytes(self):
"""Test st.video using fake bytes data."""
# Make up some bytes to pretend we have a video. The server should not vet
# the video before sending it to the browser.
fake_video_data = "\x12\x10\x35\x44\x55\x66".encode("utf-8")
st.video(fake_video_data)
el = self.get_delta_from_queue().new_element
# locate resultant file in InMemoryFileManager and test its properties.
file_id = _calculate_file_id(fake_video_data, "video/mp4")
media_file = self.media_file_storage.get_file(file_id)
self.assertIsNotNone(media_file)
self.assertEqual(media_file.mimetype, "video/mp4")
self.assertEqual(self.media_file_storage.get_url(file_id), el.video.url)
def METHOD_NAME(self):
"""We can pass a URL directly to st.video"""
some_url = "http://www.marmosetcare.com/video/in-the-wild/intro.webm"
st.video(some_url)
el = self.get_delta_from_queue().new_element
self.assertEqual(el.video.url, some_url)
def test_youtube_urls_transformed_to_embed_links(self):
"""Youtube URLs should be transformed into embed links."""
yt_urls = (
"https://youtu.be/_T8LGqJtuGc",
"https://www.youtube.com/watch?v=kmfC-i9WgH0",
"https://www.youtube.com/embed/sSn4e1lLVpA",
)
yt_embeds = (
"https://www.youtube.com/embed/_T8LGqJtuGc",
"https://www.youtube.com/embed/kmfC-i9WgH0",
"https://www.youtube.com/embed/sSn4e1lLVpA",
)
# url should be transformed into an embed link (or left alone).
for x in range(0, len(yt_urls)):
st.video(yt_urls[x])
el = self.get_delta_from_queue().new_element
self.assertEqual(el.video.url, yt_embeds[x])
def test_st_video_raises_on_bad_filename(self):
"""A non-URL string is assumed to be a filename. A file we can't
open will result in an error.
"""
with self.assertRaises(MediaFileStorageError):
st.video("not/a/real/file")
def test_st_video_from_none(self):
"""st.video(None) is not an error."""
st.video(None)
el = self.get_delta_from_queue().new_element
self.assertEqual(el.video.url, "")
def test_st_video_other_inputs(self):
"""Test that our other data types don't result in an error."""
st.video(b"bytes_data")
st.video("str_data".encode("utf-8"))
st.video(BytesIO(b"bytesio_data"))
st.video(np.array([0, 1, 2, 3]))
def test_st_video_options(self):
"""Test st.video with options."""
fake_video_data = "\x11\x22\x33\x44\x55\x66".encode("utf-8")
st.video(fake_video_data, format="video/mp4", start_time=10)
el = self.get_delta_from_queue().new_element
self.assertEqual(el.video.start_time, 10)
self.assertTrue(el.video.url.startswith(MEDIA_ENDPOINT))
self.assertTrue(
_calculate_file_id(fake_video_data, "video/mp4") in el.video.url
)
|
1,686 |
thread run
|
import logging
import re
import threading
from typing import Dict, Optional
from urllib.parse import urlparse
from flask import request
from requests.models import Request
from requests.structures import CaseInsensitiveDict
from localstack import config
from localstack.constants import APPLICATION_JSON, APPLICATION_XML, HEADER_CONTENT_TYPE
from localstack.utils.aws import aws_stack
from localstack.utils.aws.aws_responses import (
is_json_request,
requests_error_response,
requests_response,
requests_to_flask_response,
)
from localstack.utils.coverage_docs import get_coverage_link_for_service
from localstack.utils.patch import patch
from localstack.utils.strings import snake_to_camel_case
from localstack.utils.threads import FuncThread
LOG = logging.getLogger(__name__)
THREAD_LOCAL = threading.local()
MARKER_APIGW_REQUEST_REGION = "__apigw_request_region__"
AWS_REGION_REGEX = r"(us(-gov)?|ap|ca|cn|eu|sa)-(central|(north|south)?(east|west)?)-\d"
def get_proxy_request_for_thread():
try:
return THREAD_LOCAL.request_context
except Exception:
return None
def get_flask_request_for_thread():
try:
# Append/cache a converted request (requests.Request) to the the thread-local Flask request.
# We use this request object as the invocation context, which may be modified in other places,
# e.g., when manually configuring the region in the request context of an incoming API call.
if not hasattr(request, "_converted_request"):
request._converted_request = Request(
url=request.path,
data=request.data,
headers=CaseInsensitiveDict(request.headers),
method=request.method,
)
return request._converted_request
except Exception as e:
# swallow error: "Working outside of request context."
if "Working outside" in str(e):
return None
raise
def extract_region_from_auth_header(headers):
auth = headers.get("Authorization") or ""
region = re.sub(r".*Credential=[^/]+/[^/]+/([^/]+)/.*", r"\1", auth)
if region == auth:
return None
return region
def extract_region_from_headers(headers):
region = headers.get(MARKER_APIGW_REQUEST_REGION)
# Fix region lookup for certain requests, e.g., API gateway invocations
# that do not contain region details in the Authorization header.
if region:
return region
region = extract_region_from_auth_header(headers)
if not region:
# fall back to local region
region = aws_stack.get_local_region()
return region
def get_request_context():
candidates = [get_proxy_request_for_thread, get_flask_request_for_thread]
for req in candidates:
context = req()
if context is not None:
return context
class RequestContextManager:
"""Context manager which sets the given request context (i.e., region) for the scope of the block."""
def __init__(self, request_context):
self.request_context = request_context
def __enter__(self):
THREAD_LOCAL.request_context = self.request_context
def __exit__(self, type, value, traceback):
THREAD_LOCAL.request_context = None
def get_region_from_request_context():
"""look up region from request context"""
if config.USE_SINGLE_REGION:
return
request_context = get_request_context()
if not request_context:
return
return extract_region_from_headers(request_context.headers)
def configure_region_for_current_request(region_name: str, service_name: str):
"""Manually configure (potentially overwrite) the region in the current request context. This may be
used by API endpoints that are invoked directly by the user (without specifying AWS Authorization
headers), to still enable transparent region lookup via aws_stack.get_region() ..."""
# TODO: leaving import here for now, to avoid circular dependency
from localstack.utils.aws import aws_stack
request_context = get_request_context()
if not request_context:
LOG.info(
"Unable to set region '%s' in undefined request context: %s",
region_name,
request_context,
)
return
headers = request_context.headers
auth_header = headers.get("Authorization")
auth_header = auth_header or aws_stack.mock_aws_request_headers(service_name)["Authorization"]
auth_header = auth_header.replace("/%s/" % aws_stack.get_region(), "/%s/" % region_name)
try:
headers["Authorization"] = auth_header
except Exception as e:
if "immutable" not in str(e):
raise
_context_to_update = get_proxy_request_for_thread() or request
_context_to_update.headers = CaseInsensitiveDict({**headers, "Authorization": auth_header})
def mock_request_for_region(region_name: str, service_name: str = "dummy") -> Request:
result = Request()
result.headers["Authorization"] = aws_stack.mock_aws_request_headers(
service_name, region_name=region_name
)["Authorization"]
return result
def extract_service_name_from_auth_header(headers: Dict) -> Optional[str]:
try:
auth_header = headers.get("authorization", "")
credential_scope = auth_header.split(",")[0].split()[1]
_, _, _, service, _ = credential_scope.split("/")
return service
except Exception:
return
def patch_moto_request_handling():
# leave here to avoid import issues
from moto.core import utils as moto_utils
# make sure we properly handle/propagate "not implemented" errors
@patch(moto_utils.convert_to_flask_response.__call__)
def convert_to_flask_response_call(fn, *args, **kwargs):
try:
return fn(*args, **kwargs)
except NotImplementedError as e:
action = request.headers.get("X-Amz-Target")
action = action or f"{request.method} {urlparse(request.url).path}"
if action == "POST /":
# try to extract action from exception string
match = re.match(r"The ([a-zA-Z0-9_-]+) action has not been implemented", str(e))
if match:
action = snake_to_camel_case(match.group(1))
service = extract_service_name_from_auth_header(request.headers)
exception_message: str | None = e.args[0] if e.args else None
msg = exception_message or get_coverage_link_for_service(service, action)
response = requests_error_response(request.headers, msg, code=501)
if config.MOCK_UNIMPLEMENTED:
is_json = is_json_request(request.headers)
headers = {HEADER_CONTENT_TYPE: APPLICATION_JSON if is_json else APPLICATION_XML}
content = "{}" if is_json else "<Response />" # TODO: return proper mocked response
response = requests_response(content, headers=headers)
LOG.info(f"{msg}. Returning mocked response due to MOCK_UNIMPLEMENTED=1")
else:
LOG.info(msg)
# TODO: publish analytics event ...
return requests_to_flask_response(response)
if config.USE_SINGLE_REGION:
return
# make sure that we inherit THREAD_LOCAL request contexts to spawned sub-threads
@patch(FuncThread.__init__)
def thread_init(fn, self, *args, **kwargs):
self._req_context = get_request_context()
return fn(self, *args, **kwargs)
@patch(FuncThread.run)
def METHOD_NAME(fn, self, *args, **kwargs):
try:
if self._req_context:
THREAD_LOCAL.request_context = self._req_context
except AttributeError:
# sometimes there is a race condition where the previous patch has not been applied yet
pass
return fn(self, *args, **kwargs)
|
1,687 |
subnet resource id
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetWebAppSwiftVirtualNetworkConnectionResult',
'AwaitableGetWebAppSwiftVirtualNetworkConnectionResult',
'get_web_app_swift_virtual_network_connection',
'get_web_app_swift_virtual_network_connection_output',
]
@pulumi.output_type
class GetWebAppSwiftVirtualNetworkConnectionResult:
"""
Swift Virtual Network Contract. This is used to enable the new Swift way of doing virtual network integration.
"""
def __init__(__self__, id=None, kind=None, name=None, METHOD_NAME=None, swift_supported=None, system_data=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'subnet_resource_id' to be a str")
pulumi.set(__self__, "subnet_resource_id", METHOD_NAME)
if swift_supported and not isinstance(swift_supported, bool):
raise TypeError("Expected argument 'swift_supported' to be a bool")
pulumi.set(__self__, "swift_supported", swift_supported)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="subnetResourceId")
def METHOD_NAME(self) -> Optional[str]:
"""
The Virtual Network subnet's resource ID. This is the subnet that this Web App will join. This subnet must have a delegation to Microsoft.Web/serverFarms defined first.
"""
return pulumi.get(self, "subnet_resource_id")
@property
@pulumi.getter(name="swiftSupported")
def swift_supported(self) -> Optional[bool]:
"""
A flag that specifies if the scale unit this Web App is on supports Swift integration.
"""
return pulumi.get(self, "swift_supported")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetWebAppSwiftVirtualNetworkConnectionResult(GetWebAppSwiftVirtualNetworkConnectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWebAppSwiftVirtualNetworkConnectionResult(
id=self.id,
kind=self.kind,
name=self.name,
METHOD_NAME=self.METHOD_NAME,
swift_supported=self.swift_supported,
system_data=self.system_data,
type=self.type)
def get_web_app_swift_virtual_network_connection(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebAppSwiftVirtualNetworkConnectionResult:
"""
Gets a Swift Virtual Network connection.
:param str name: Name of the app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:web/v20201001:getWebAppSwiftVirtualNetworkConnection', __args__, opts=opts, typ=GetWebAppSwiftVirtualNetworkConnectionResult).value
return AwaitableGetWebAppSwiftVirtualNetworkConnectionResult(
id=pulumi.get(__ret__, 'id'),
kind=pulumi.get(__ret__, 'kind'),
name=pulumi.get(__ret__, 'name'),
METHOD_NAME=pulumi.get(__ret__, 'subnet_resource_id'),
swift_supported=pulumi.get(__ret__, 'swift_supported'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_web_app_swift_virtual_network_connection)
def get_web_app_swift_virtual_network_connection_output(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetWebAppSwiftVirtualNetworkConnectionResult]:
"""
Gets a Swift Virtual Network connection.
:param str name: Name of the app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
...
|
1,688 |
path and query
|
import json
import re
from datetime import datetime
from os.path import join
from urllib.parse import parse_qs, urlsplit
from django.core.files.base import ContentFile
from django.core.files.storage import DefaultStorage
from django.core.management.base import BaseCommand
from django.test import Client
def METHOD_NAME(url):
split_url = urlsplit(url)
return split_url.path + "?" + split_url.query
def page_from_url(url):
page_values = parse_qs(urlsplit(url).query).get("page")
if page_values:
return int(page_values[0])
return 1
def page_filename(endpoint, page_number):
return "{}-{:06d}.json".format(endpoint, page_number)
def is_timestamped_dir(directory):
return re.search(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$", directory)
class Command(BaseCommand):
help = "Cache the output of the persons and posts endpoints to a directory"
endpoints = ("people", "ballots")
def add_arguments(self, parser):
parser.add_argument(
"--hostname",
action="store",
default="candidates.democracyclub.org.uk",
help="Optional hostname if files are stored at a relative path",
)
parser.add_argument(
"--url-prefix",
help="Optional url_prefix for next and previous links",
)
parser.add_argument(
"--http",
help="Force URLs to use HTTP. Defaults to HTTPS",
action="store_true",
)
parser.add_argument(
"--page-size",
type=int,
help="How many results should be output per file (max 200)",
)
parser.add_argument(
"--prune",
action="store_true",
help=(
"Prune older timestamped directories (those over 12 hours "
"old, never deleting the latest successfully generated one "
"or any of the 4 most recent)"
),
)
def update_latest_page(self, output_directory, endpoint):
latest_page_location = join(output_directory, "latest")
file_name = join(latest_page_location, page_filename(endpoint, 1))
self.storage.save(file_name, ContentFile(self.first_page_json))
def prune(self):
all_dirs = []
for directory in self.storage.listdir(self.directory_path)[0]:
if is_timestamped_dir(directory):
all_dirs.append(directory)
# Make sure we always leave at least the last 4 directories
timestamped_directories_to_remove = sorted(all_dirs)[:-4]
for path in sorted(timestamped_directories_to_remove):
dir_path = join(self.directory_path, path)
for filename in self.storage.listdir(dir_path)[1]:
self.storage.delete(join(dir_path, filename))
def get_url_prefix(self, url_prefix=None):
"""
Use the hostname passed if there is one, otherwise try to guess it from
the DefaultStorage class.
Raise a ValueError if the hostname isn't an absolute URL
"""
if not url_prefix:
url_prefix = self.storage.base_url
match = "https://" if self.secure else "http://"
if not url_prefix.startswith(match):
raise ValueError(
"URL prefix must start with {}. Try using --url_prefix.".format(
match
)
)
return url_prefix
def rewrite_link(self, endpoint, url):
if not url:
return None
page = page_from_url(url)
filename = page_filename(endpoint, page)
return "/".join([self.url_prefix, self.timestamp, filename])
def get(self, url):
kwargs = {"SERVER_NAME": self.hostname}
if self.secure:
kwargs["wsgi.url_scheme"] = "https"
kwargs["secure"] = True
return self.client.get(url, **kwargs)
def rewrite_next_and_previous_links(self, endpoint, data):
data["next"] = self.rewrite_link(endpoint, data["next"])
data["previous"] = self.rewrite_link(endpoint, data["previous"])
def get_api_results_to_directory(self, endpoint, json_directory, page_size):
url = "/api/next/{endpoint}/?page_size={page_size}&format=json".format(
page_size=page_size, endpoint=endpoint
)
while url:
page = page_from_url(url)
output_filename = join(
json_directory, page_filename(endpoint, page)
)
response = self.get(url)
if response.status_code != 200:
msg = "Unexpected response {0} from {1}"
raise Exception(msg.format(response.status_code, url))
data = json.loads(response.content.decode("utf-8"))
original_next_url = data["next"]
self.rewrite_next_and_previous_links(endpoint, data)
json_page = json.dumps(data, indent=4, sort_keys=True).encode(
"utf8"
)
if page == 1:
self.first_page_json = json_page
self.storage.save(output_filename, ContentFile(json_page))
# Now make sure the next URL works with the test client:
if original_next_url:
url = METHOD_NAME(original_next_url)
else:
url = None
def handle(self, *args, **options):
self.client = Client()
self.directory_path = "cached-api"
self.storage = DefaultStorage()
self.secure = not options.get("http", False)
self.hostname = options["hostname"]
self.url_prefix = self.get_url_prefix(options["url_prefix"])
self.timestamp = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
json_directory = join(self.directory_path, self.timestamp)
page_size = options["page_size"]
if not page_size:
page_size = 200
for endpoint in self.endpoints:
self.get_api_results_to_directory(
endpoint, json_directory, page_size
)
self.update_latest_page(self.directory_path, endpoint)
if options["prune"]:
self.prune()
|
1,689 |
test linear gradients 3
|
"""Test how gradients are drawn."""
from ..testing_utils import assert_no_logs
@assert_no_logs
def test_linear_gradients_1(assert_pixels):
assert_pixels('''
_____
_____
_____
BBBBB
BBBBB
RRRRR
RRRRR
RRRRR
RRRRR
''', '''<style>@page { size: 5px 9px; background: linear-gradient(
white, white 3px, blue 0, blue 5px, red 0, red
)''')
@assert_no_logs
def test_linear_gradients_2(assert_pixels):
assert_pixels('''
_____
_____
_____
BBBBB
BBBBB
RRRRR
RRRRR
RRRRR
RRRRR
''', '''<style>@page { size: 5px 9px; background: linear-gradient(
white 3px, blue 0, blue 5px, red 0
)''')
@assert_no_logs
def METHOD_NAME(assert_pixels):
assert_pixels('''
___BBrrrr
___BBrrrr
___BBrrrr
___BBrrrr
___BBrrrr
''', '''<style>@page { size: 9px 5px; background: linear-gradient(
to right, white 3px, blue 0, blue 5px, red 0
)''')
@assert_no_logs
def test_linear_gradients_4(assert_pixels):
assert_pixels('''
BBBBBBrrrr
BBBBBBrrrr
BBBBBBrrrr
BBBBBBrrrr
BBBBBBrrrr
''', '''<style>@page { size: 10px 5px; background: linear-gradient(
to right, blue 5px, blue 6px, red 6px, red 9px
)''')
@assert_no_logs
def test_linear_gradients_5(assert_pixels):
assert_pixels('''
rBrrrBrrrB
rBrrrBrrrB
rBrrrBrrrB
rBrrrBrrrB
rBrrrBrrrB
''', '''
<style>@page { size: 10px 5px; background: repeating-linear-gradient(
to right, blue 50%, blue 60%, red 60%, red 90%
)''')
@assert_no_logs
def test_linear_gradients_6(assert_pixels):
assert_pixels('''
BBBrrrrrr
BBBrrrrrr
BBBrrrrrr
BBBrrrrrr
BBBrrrrrr
''', '''<style>@page { size: 9px 5px; background: linear-gradient(
to right, blue 3px, blue 3px, red 3px, red 3px
)''')
@assert_no_logs
def test_linear_gradients_7(assert_pixels):
assert_pixels('''
hhhhhhhhh
hhhhhhhhh
hhhhhhhhh
hhhhhhhhh
hhhhhhhhh
''', '''<style>@page { size: 9px 5px; background:
repeating-linear-gradient(
to right, black 3px, black 3px, #800080 3px, #800080 3px
)''')
@assert_no_logs
def test_linear_gradients_8(assert_pixels):
assert_pixels('''
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
''', '''<style>@page { size: 9px 5px; background:
repeating-linear-gradient(to right, blue 3px)''')
@assert_no_logs
def test_linear_gradients_9(assert_pixels):
assert_pixels('''
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
''', '''<style>@page { size: 9px 5px; background:
repeating-linear-gradient(45deg, blue 3px)''')
@assert_no_logs
def test_linear_gradients_10(assert_pixels):
assert_pixels('''
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
''', '''<style>@page { size: 9px 5px; background: linear-gradient(
45deg, blue 3px, red 3px, red 3px, blue 3px
)''')
@assert_no_logs
def test_linear_gradients_11(assert_pixels):
assert_pixels('''
BBBrBBBBB
BBBrBBBBB
BBBrBBBBB
BBBrBBBBB
BBBrBBBBB
''', '''<style>@page { size: 9px 5px; background: linear-gradient(
to right, blue 3px, red 3px, red 4px, blue 4px
)''')
@assert_no_logs
def test_linear_gradients_12(assert_pixels):
assert_pixels('''
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
BBBBBBBBB
''', '''<style>@page { size: 9px 5px; background:
repeating-linear-gradient(to right, red 3px, blue 3px, blue 4px, red 4px
)''')
@assert_no_logs
def test_linear_gradients_13(assert_pixels):
assert_pixels('''
_____
_____
_____
SSSSS
SSSSS
RRRRR
RRRRR
RRRRR
RRRRR
''', '''<style>@page { size: 5px 9px; background: linear-gradient(
white, white 3px, rgba(255, 0, 0, 0.751) 0, rgba(255, 0, 0, 0.751) 5px,
red 0, red
)''')
@assert_no_logs
def test_radial_gradients_1(assert_pixels):
assert_pixels('''
BBBBBB
BBBBBB
BBBBBB
BBBBBB
BBBBBB
BBBBBB
''', '''<style>@page { size: 6px; background:
radial-gradient(red -30%, blue -10%)''')
@assert_no_logs
def test_radial_gradients_2(assert_pixels):
assert_pixels('''
RRRRRR
RRRRRR
RRRRRR
RRRRRR
RRRRRR
RRRRRR
''', '''<style>@page { size: 6px; background:
radial-gradient(red 110%, blue 130%)''')
@assert_no_logs
def test_radial_gradients_3(assert_pixels):
assert_pixels('''
BzzzzzzzzB
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzRRzzzz
zzzzRRzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
BzzzzzzzzB
''', '''<style>@page { size: 10px 16px; background:
radial-gradient(red 20%, blue 80%)''')
@assert_no_logs
def test_radial_gradients_4(assert_pixels):
assert_pixels('''
BzzzzzzzzB
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzRRzzzz
zzzzRRzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
BzzzzzzzzB
''', '''<style>@page { size: 10px 16px; background:
radial-gradient(red 50%, blue 50%)''')
@assert_no_logs
def test_radial_gradients_5(assert_pixels):
assert_pixels('''
SzzzzzzzzS
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzRRzzzz
zzzzRRzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
zzzzzzzzzz
SzzzzzzzzS
''', '''<style>@page { size: 10px 16px; background:
radial-gradient(red 50%, rgba(255, 0, 0, 0.751) 50%)''')
|
1,690 |
test list subtitles movie no imdb
|
# -*- coding: utf-8 -*-
import pytest
import os
from subliminal_patch.providers.argenteam import ArgenteamProvider
from subliminal_patch.providers.argenteam import ArgenteamSubtitle
from subliminal_patch.core import Episode
from subzero.language import Language
@pytest.mark.parametrize(
"imdb_id,expected_id", [("tt0028950", 62790), ("tt0054407", 102006)]
)
def test_search_ids_movie(imdb_id, expected_id):
with ArgenteamProvider() as provider:
ids = provider._search_ids(imdb_id)
assert ids[0] == expected_id
def test_search_ids_tv_show():
with ArgenteamProvider() as provider:
ids = provider._search_ids("tt0306414", season=1, episode=1)
assert ids[0] == 10075
def test_parse_subtitles_episode():
with ArgenteamProvider() as provider:
assert len(provider._parse_subtitles([10075])) > 1
def test_parse_subtitles_movie():
with ArgenteamProvider() as provider:
assert len(provider._parse_subtitles([61], is_episode=False)) > 3
def test_get_matches_episode(episodes):
episode = episodes["breaking_bad_s01e01"]
subtitle = ArgenteamSubtitle(
Language.fromalpha2("es"),
None,
"https://argenteam.net/subtitles/24002/Breaking.Bad.%282008%29.S01E01-Pilot.BluRay.x264.720p-REWARD",
"Breaking.Bad.(2008).S01E01-Pilot.BluRay.x264.720p-REWARD\nBluRay x264 720p",
{"series", "title", "season", "episode", "imdb_id"},
)
matches = subtitle.get_matches(episode)
assert matches == {
"title",
"season",
"episode",
"imdb_id",
"source",
"video_codec",
"resolution",
"edition",
"streaming_service",
"release_group",
"series",
"year",
}
def test_get_matches_movie(movies):
movie = movies["dune"]
subtitle = ArgenteamSubtitle(
Language.fromalpha2("es"),
None,
"https://argenteam.net/subtitles/86024/Dune.Part.One.%282021%29.WEB.H264.1080p-NAISU",
"WEB H264 1080p",
{"title", "year", "imdb_id"},
)
matches = subtitle.get_matches(movie)
assert matches == {
"title",
"year",
"imdb_id",
"source",
"resolution",
"edition",
"video_codec",
"streaming_service",
}
def test_list_subtitles_movie(movies):
item = movies["dune"]
with ArgenteamProvider() as provider:
subtitles = provider.list_subtitles(item, {Language("spa", "MX")})
for expected in (
"https://argenteam.net/subtitles/86023/Dune.Part.One.%282021%29.WEB.H264.720p-NAISU",
"https://argenteam.net/subtitles/86024/Dune.Part.One.%282021%29.WEB.H264.1080p-NAISU",
"https://argenteam.net/subtitles/86025/Dune.Part.One.%282021%29.WEB.x265.2160p-NAISU",
):
assert any(expected == sub.download_link for sub in subtitles)
def METHOD_NAME(movies):
item = movies["dune"]
item.imdb_id = None
with ArgenteamProvider() as provider:
assert not provider.list_subtitles(item, {Language("spa", "MX")})
def test_list_subtitles_movie_not_found(movies):
item = movies["dune"]
item.imdb_id = "tt29318321832"
with ArgenteamProvider() as provider:
assert not provider.list_subtitles(item, {Language("spa", "MX")})
def test_list_subtitles_episode(episodes):
item = episodes["breaking_bad_s01e01"]
with ArgenteamProvider() as provider:
subtitles = provider.list_subtitles(item, {Language("spa", "MX")})
for expected in (
"https://argenteam.net/subtitles/24002/Breaking.Bad.%282008%29.S01E01-Pilot.BluRay.x264.720p-REWARD",
"https://argenteam.net/subtitles/23940/Breaking.Bad.%282008%29.S01E01-Pilot.DVDRip.XviD-ORPHEUS",
):
assert any(expected == sub.download_link for sub in subtitles)
def test_list_subtitles_episode_no_imdb_id(episodes):
item = episodes["breaking_bad_s01e01"]
item.series_imdb_id = None
with ArgenteamProvider() as provider:
assert not provider.list_subtitles(item, {Language("spa", "MX")})
def test_list_subtitles_episode_not_found(episodes):
item = episodes["breaking_bad_s01e01"]
item.series_imdb_id = "tt29318321832"
with ArgenteamProvider() as provider:
assert not provider.list_subtitles(item, {Language("spa", "MX")})
def test_download_subtitle(episodes):
item = episodes["breaking_bad_s01e01"]
with ArgenteamProvider() as provider:
subtitles = provider.list_subtitles(item, {Language("spa", "MX")})
provider.download_subtitle(subtitles[0])
assert subtitles[0].is_valid()
|
1,691 |
pipeline
|
# Copyright (c) 2020-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import numpy as np
import nvidia.dali.fn as fn
import nvidia.dali.types as types
import random
from nvidia.dali import pipeline_def
from sequences_test_utils import ArgCb, video_suite_helper
from test_utils import RandomDataIterator
def dali_type_to_np(dtype):
if dtype == types.FLOAT:
return np.single
elif dtype == types.INT16:
return np.short
elif dtype == types.INT32:
return np.intc
elif dtype == types.UINT8:
return np.ubyte
else:
assert False
@pipeline_def()
def ColorTwistPipeline(data_iterator, is_input_float, inp_dtype, out_dtype):
imgs = fn.external_source(source=data_iterator)
o_dtype = dali_type_to_np(out_dtype)
# converting float inputs to integer outs leads to binary images as
# input is in -1 to 1 range in such case
if is_input_float and not np.issubdtype(o_dtype, np.floating):
imgs *= 255
H = fn.random.uniform(range=[-20, 20])
S = fn.random.uniform(range=[0, 2])
brightness = fn.random.uniform(range=[0, 2])
contrast = fn.random.uniform(range=[0, 2])
out_dtype_arg = out_dtype if out_dtype != inp_dtype else None
out_cpu, out_gpu = (
fn.color_twist(input, hue=H, saturation=S, brightness=brightness, contrast=contrast,
dtype=out_dtype_arg) for input in (imgs, imgs.gpu()))
return imgs, out_cpu, out_gpu, H, S, brightness, contrast
rgb2yiq = np.array([[.299, .587, .114],
[.596, -.274, -.321],
[.211, -.523, .311]])
yiq2rgb = np.linalg.inv(rgb2yiq)
def convert_sat(data, out_dtype):
clipped = data
if not np.issubdtype(out_dtype, np.floating):
max_range = np.iinfo(out_dtype).max
min_range = np.iinfo(out_dtype).min
clipped = np.clip(clipped, min_range, max_range)
clipped = np.round(clipped)
return clipped.astype(out_dtype)
def ref_color_twist(img, H, S, brightness, contrast, out_dtype):
inp_dtype = img.dtype
angle = math.radians(H)
s, c = math.sin(angle), math.cos(angle)
# Rotate the color components by angle and scale by S.
# The fun part is that it doesn't really matter that much which
hmat = np.array([[1, 0, 0],
[0, c * S, s * S],
[0, -s * S, c * S]])
m = np.matmul(yiq2rgb, np.matmul(hmat, rgb2yiq))
num_pixels = np.prod(img.shape[:-1])
pixels = img.reshape([num_pixels, img.shape[-1]])
pixels = np.matmul(pixels, m.transpose())
if np.issubdtype(inp_dtype, np.floating):
grey = 0.5
else:
grey = 128
pixels = ((pixels - grey) * contrast + grey) * brightness
img = pixels.reshape(img.shape)
return convert_sat(img, out_dtype)
def check(input, out_cpu, out_gpu, H, S, brightness, contrast, out_dtype):
ref = ref_color_twist(input, H, S, brightness, contrast, out_dtype)
if np.issubdtype(out_dtype, np.floating):
rel_err = 1e-3
abs_err = 1e-3
else:
rel_err = 1 / 512
# due to rounding error for integer out type can be off by 1
abs_err = 1
assert np.allclose(out_cpu, ref, rel_err, abs_err)
assert np.allclose(out_gpu, ref, rel_err, abs_err)
def check_ref(inp_dtype, out_dtype, has_3_dims):
batch_size = 32
n_iters = 8
shape = (128, 32, 3) if not has_3_dims else (random.randint(2, 5), 128, 32, 3)
inp_dtype = dali_type_to_np(inp_dtype)
ri1 = RandomDataIterator(batch_size, shape=shape, dtype=inp_dtype)
pipe = ColorTwistPipeline(seed=2139,
batch_size=batch_size,
num_threads=4,
device_id=0,
data_iterator=ri1,
is_input_float=np.issubdtype(inp_dtype, np.floating),
inp_dtype=inp_dtype,
out_dtype=out_dtype)
pipe.build()
for _ in range(n_iters):
inp, out_cpu, out_gpu, H, S, B, C = pipe.run()
out_gpu = out_gpu.as_cpu()
for i in range(batch_size):
h, s, b, c = H.at(i), S.at(i), B.at(i), C.at(i)
check(inp.at(i), out_cpu.at(i), out_gpu.at(i), h, s, b, c, dali_type_to_np(out_dtype))
def test_color_twist():
for inp_dtype in [types.FLOAT, types.INT16, types.UINT8]:
for out_dtype in [types.FLOAT, types.INT16, types.UINT8]:
has_3_dims = random.choice([False, True])
yield check_ref, inp_dtype, out_dtype, has_3_dims
def test_video():
def hue(sample_desc):
return np.float32(360 * sample_desc.rng.random())
def saturation(sample_desc):
return np.float32(sample_desc.rng.random())
def value(sample_desc):
return np.float32(sample_desc.rng.random())
def contrast(sample_desc):
return np.float32(2 * sample_desc.rng.random())
def brightness(sample_desc):
return np.float32(2 * sample_desc.rng.random())
video_test_cases = [
(fn.hue, {}, [ArgCb("hue", hue, True)]),
(fn.saturation, {}, [ArgCb("saturation", saturation, True)]),
(fn.hsv, {}, [
ArgCb("hue", hue, True),
ArgCb("saturation", saturation, True),
ArgCb("value", value, True)
]),
(fn.hsv, {}, [
ArgCb("hue", hue, False),
ArgCb("saturation", saturation, True),
ArgCb("value", value, False)
]),
(fn.color_twist, {}, [
ArgCb("brightness", brightness, True),
ArgCb("hue", hue, True),
ArgCb("saturation", saturation, True),
ArgCb("contrast", contrast, True),
]),
(fn.color_twist, {}, [ArgCb("brightness", brightness, True),
ArgCb("hue", hue, False)]),
]
yield from video_suite_helper(video_test_cases, test_channel_first=False)
def test_color_twist_default_dtype():
np_types = [types.FLOAT, types.INT32, types.INT16, types.UINT8] # Just some types
def impl(op, device, type):
@pipeline_def(batch_size=1, num_threads=3, device_id=0)
def METHOD_NAME():
data = fn.constant(idata=255, shape=(10, 10, 3), dtype=type, device=device)
return op(data)
pipe = METHOD_NAME()
pipe.build()
data, = pipe.run()
assert data[0].dtype == type, f"{data[0].dtype} != {type}"
for device in ['gpu', 'cpu']:
for type in np_types:
for op in [fn.hue]:
yield impl, op, device, type
|
1,692 |
test module repr with name
|
# Test the module type
import unittest
import weakref
from test.support import run_unittest, gc_collect
from test.script_helper import assert_python_ok
import sys
ModuleType = type(sys)
class FullLoader:
@classmethod
def module_repr(cls, m):
return "<module '{}' (crafted)>".format(m.__name__)
class BareLoader:
pass
class ModuleTests(unittest.TestCase):
def test_uninitialized(self):
# An uninitialized module has no __dict__ or __name__,
# and __doc__ is None
foo = ModuleType.__new__(ModuleType)
self.assertTrue(foo.__dict__ is None)
self.assertRaises(SystemError, dir, foo)
try:
s = foo.__name__
self.fail("__name__ = %s" % repr(s))
except AttributeError:
pass
self.assertEqual(foo.__doc__, ModuleType.__doc__)
def test_no_docstring(self):
# Regularly initialized module, no docstring
foo = ModuleType("foo")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, None)
self.assertIs(foo.__loader__, None)
self.assertIs(foo.__package__, None)
self.assertIs(foo.__spec__, None)
self.assertEqual(foo.__dict__, {"__name__": "foo", "__doc__": None,
"__loader__": None, "__package__": None,
"__spec__": None})
def test_ascii_docstring(self):
# ASCII docstring
foo = ModuleType("foo", "foodoc")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc")
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc",
"__loader__": None, "__package__": None,
"__spec__": None})
def test_unicode_docstring(self):
# Unicode docstring
foo = ModuleType("foo", "foodoc\u1234")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc\u1234")
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc\u1234",
"__loader__": None, "__package__": None,
"__spec__": None})
def test_reinit(self):
# Reinitialization should not replace the __dict__
foo = ModuleType("foo", "foodoc\u1234")
foo.bar = 42
d = foo.__dict__
foo.__init__("foo", "foodoc")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc")
self.assertEqual(foo.bar, 42)
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc", "bar": 42,
"__loader__": None, "__package__": None, "__spec__": None})
self.assertTrue(foo.__dict__ is d)
def test_dont_clear_dict(self):
# See issue 7140.
def f():
foo = ModuleType("foo")
foo.bar = 4
return foo
gc_collect()
self.assertEqual(f().__dict__["bar"], 4)
def test_clear_dict_in_ref_cycle(self):
destroyed = []
m = ModuleType("foo")
m.destroyed = destroyed
s = """class A:
def __init__(self, l):
self.l = l
def __del__(self):
self.l.append(1)
a = A(destroyed)"""
exec(s, m.__dict__)
del m
gc_collect()
self.assertEqual(destroyed, [1])
def test_weakref(self):
m = ModuleType("foo")
wr = weakref.ref(m)
self.assertIs(wr(), m)
del m
gc_collect()
self.assertIs(wr(), None)
def test_module_repr_minimal(self):
# reprs when modules have no __file__, __name__, or __loader__
m = ModuleType('foo')
del m.__name__
self.assertEqual(repr(m), "<module '?'>")
def METHOD_NAME(self):
m = ModuleType('foo')
self.assertEqual(repr(m), "<module 'foo'>")
def test_module_repr_with_name_and_filename(self):
m = ModuleType('foo')
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def test_module_repr_with_filename_only(self):
m = ModuleType('foo')
del m.__name__
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module '?' from '/tmp/foo.py'>")
def test_module_repr_with_loader_as_None(self):
m = ModuleType('foo')
assert m.__loader__ is None
self.assertEqual(repr(m), "<module 'foo'>")
def test_module_repr_with_bare_loader_but_no_name(self):
m = ModuleType('foo')
del m.__name__
# Yes, a class not an instance.
m.__loader__ = BareLoader
loader_repr = repr(BareLoader)
self.assertEqual(
repr(m), "<module '?' ({})>".format(loader_repr))
def test_module_repr_with_full_loader_but_no_name(self):
# m.__loader__.module_repr() will fail because the module has no
# m.__name__. This exception will get suppressed and instead the
# loader's repr will be used.
m = ModuleType('foo')
del m.__name__
# Yes, a class not an instance.
m.__loader__ = FullLoader
loader_repr = repr(FullLoader)
self.assertEqual(
repr(m), "<module '?' ({})>".format(loader_repr))
def test_module_repr_with_bare_loader(self):
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = BareLoader
module_repr = repr(BareLoader)
self.assertEqual(
repr(m), "<module 'foo' ({})>".format(module_repr))
def test_module_repr_with_full_loader(self):
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = FullLoader
self.assertEqual(
repr(m), "<module 'foo' (crafted)>")
def test_module_repr_with_bare_loader_and_filename(self):
# Because the loader has no module_repr(), use the file name.
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = BareLoader
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def test_module_repr_with_full_loader_and_filename(self):
# Even though the module has an __file__, use __loader__.module_repr()
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = FullLoader
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' (crafted)>")
def test_module_repr_builtin(self):
self.assertEqual(repr(sys), "<module 'sys' (built-in)>")
def test_module_repr_source(self):
r = repr(unittest)
starts_with = "<module 'unittest' from '"
ends_with = "__init__.py'>"
self.assertEqual(r[:len(starts_with)], starts_with,
'{!r} does not start with {!r}'.format(r, starts_with))
self.assertEqual(r[-len(ends_with):], ends_with,
'{!r} does not end with {!r}'.format(r, ends_with))
def test_module_finalization_at_shutdown(self):
# Module globals and builtins should still be available during shutdown
rc, out, err = assert_python_ok("-c", "from test import final_a")
self.assertFalse(err)
lines = out.splitlines()
self.assertEqual(set(lines), {
b"x = a",
b"x = b",
b"final_a.x = a",
b"final_b.x = b",
b"len = len",
b"shutil.rmtree = rmtree"})
# frozen and namespace module reprs are tested in importlib.
def test_main():
run_unittest(ModuleTests)
if __name__ == '__main__':
test_main()
|
1,693 |
clamp to origin
|
from PyQt5.QtCore import QObject, Qt
from PyQt5.QtGui import QColor, QCursor, QPainter, QPainterPath, QPixmap
from PyQt5.QtWidgets import QApplication, QGraphicsDropShadowEffect
from defconQt.tools.drawing import applyEffectToPixmap
_path = QPainterPath()
_path.moveTo(9, 7.3)
_path.lineTo(9, 24)
_path.lineTo(21, 12)
_path.lineTo(16.3, 12)
_path.lineTo(18.6, 6.6)
_path.lineTo(14.85, 5)
_path.lineTo(12.5, 10.7)
_path.closeSubpath()
path = QPainterPath()
path.moveTo(10, 9.75)
path.lineTo(12.8, 12.5)
path.lineTo(15.3, 6.5)
path.lineTo(17.2, 7.3)
path.lineTo(14.75, 13.1)
path.lineTo(18.5, 13.1)
path.lineTo(10, 21.5)
path.closeSubpath()
class BaseTool(QObject):
icon = QPainterPath()
name = QApplication.translate("BaseTool", "Tool")
shortcut = None
grabKeyboard = False
@property
def cursor(self):
# TODO: cache?
return self.makeCursor(_path, path, 9.5, 1)
def toolActivated(self):
pass
def toolDisabled(self):
pass
def drawingAttribute(self, attr, flags):
return None
def drawingColor(self, attr, flags):
return None
@property
def _font(self):
return self.parent().window().font_()
@property
def _glyph(self):
return self.parent().activeGlyph()
# helper functions
def METHOD_NAME(self, pos, origin):
deltaX = pos.x() - origin.x()
deltaY = pos.y() - origin.y()
# go into the first quadrant to simplify our study
aDeltaX = abs(deltaX)
aDeltaY = abs(deltaY)
# diagonal incr.
# if aDeltaY >= aDeltaX * 2:
# pos.setX(origin.x())
# elif aDeltaY > aDeltaX / 2:
# avg = (aDeltaX + aDeltaY) / 2
# pos.setX(origin.x() + copysign(avg, deltaX))
# pos.setY(origin.y() + copysign(avg, deltaY))
if aDeltaY >= aDeltaX:
pos.setX(origin.x())
else:
pos.setY(origin.y())
return pos
def magnetPos(self, pos):
widget = self.parent()
mouseItem = widget.itemAt(pos)
if isinstance(mouseItem, tuple):
contour, index = mouseItem
point = contour[index]
pos.setX(point.x)
pos.setY(point.y)
# TODO: also clamp to (0, 0) and (glyph.width, 0), conditionally?
return pos
def makeCursor(self, whitePath, blackPath, x, y):
pixmap = QPixmap(24, 24)
pixmap.fill(Qt.transparent)
painter = QPainter()
painter.begin(pixmap)
painter.setRenderHint(QPainter.Antialiasing)
painter.translate(0, pixmap.height())
painter.scale(1, -1)
painter.fillPath(whitePath, Qt.white)
painter.end()
effect = QGraphicsDropShadowEffect()
effect.setColor(QColor.fromRgbF(0, 0, 0, 0.3))
effect.setBlurRadius(4)
effect.setOffset(0, 1)
pixmap = applyEffectToPixmap(pixmap, effect)
painter.begin(pixmap)
painter.setRenderHint(QPainter.Antialiasing)
painter.translate(0, pixmap.height())
painter.scale(1, -1)
painter.fillPath(blackPath, Qt.black)
painter.end()
return QCursor(pixmap, int(x), int(y))
# events
def contextMenuEvent(self, event):
pass
def keyPressEvent(self, event):
pass
def keyReleaseEvent(self, event):
pass
def mousePressEvent(self, event):
if event.button() == Qt.MidButton:
self._panOrigin = event.globalPos()
def mouseMoveEvent(self, event):
if hasattr(self, "_panOrigin"):
pos = event.globalPos()
self.parent().scrollBy(pos - self._panOrigin)
self._panOrigin = pos
def mouseReleaseEvent(self, event):
if hasattr(self, "_panOrigin"):
del self._panOrigin
def mouseDoubleClickEvent(self, event):
pass
# custom painting
def paintBackground(self, painter, index):
pass
def paint(self, painter, index):
pass
|
1,694 |
test compute gradient
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import unittest
import numpy as np
from sklearn import metrics
from fate_arch.session import computing_session as session
from federatedml.ensemble.basic_algorithms.decision_tree.tree_core.loss.cross_entropy import \
SigmoidBinaryCrossEntropyLoss
from federatedml.ensemble.basic_algorithms.decision_tree.tree_core.loss.cross_entropy import SoftmaxCrossEntropyLoss
from federatedml.util import consts
class TestSigmoidBinaryCrossEntropyLoss(unittest.TestCase):
def setUp(self):
session.init("test_cross_entropy")
self.sigmoid_loss = SigmoidBinaryCrossEntropyLoss()
self.y_list = [i % 2 for i in range(100)]
self.predict_list = [random.random() for i in range(100)]
self.y = session.parallelize(self.y_list, include_key=False, partition=16)
self.predict = session.parallelize(self.predict_list, include_key=False, partition=16)
def test_predict(self):
for i in range(1, 10):
np_v = 1.0 / (1.0 + np.exp(-1.0 / i))
self.assertTrue(np.fabs(self.sigmoid_loss.predict(1.0 / i) - np_v) < consts.FLOAT_ZERO)
def METHOD_NAME(self):
for i in range(10):
pred = random.random()
y = i % 2
grad = pred - y
self.assertTrue(np.fabs(self.sigmoid_loss.compute_grad(y, pred) - grad) < consts.FLOAT_ZERO)
def test_compute_hess(self):
for i in range(10):
pred = random.random()
y = i % 2
hess = pred * (1 - pred)
self.assertTrue(np.fabs(self.sigmoid_loss.compute_hess(y, pred) - hess) < consts.FLOAT_ZERO)
def test_compute_loss(self):
sklearn_loss = metrics.log_loss(self.y_list, self.predict_list)
sigmoid_loss = self.sigmoid_loss.compute_loss(self.y, self.predict)
self.assertTrue(np.fabs(sigmoid_loss - sklearn_loss) < consts.FLOAT_ZERO)
def tearDown(self):
session.stop()
class TestSoftmaxCrossEntropyLoss(unittest.TestCase):
def setUp(self):
session.init("test_cross_entropy")
self.softmax_loss = SoftmaxCrossEntropyLoss()
self.y_list = [i % 5 for i in range(100)]
self.predict_list = [np.array([random.random() for i in range(5)]) for j in range(100)]
self.y = session.parallelize(self.y_list, include_key=False, partition=16)
self.predict = session.parallelize(self.predict_list, include_key=False, partition=16)
def test_predict(self):
for i in range(10):
list = [random.random() for j in range(5)]
pred_arr = np.asarray(list, dtype='float64')
mx = pred_arr.max()
predict = np.exp(pred_arr - mx) / sum(np.exp(pred_arr - mx))
softmaxloss_predict = self.softmax_loss.predict(pred_arr)
self.assertTrue(np.fabs(predict - softmaxloss_predict).all() < consts.FLOAT_ZERO)
def test_compute_grad(self):
for i in range(10):
pred = np.asarray([random.random() for j in range(5)], dtype="float64")
label = random.randint(0, 4)
softmaxloss_grad = self.softmax_loss.compute_grad(label, pred)
grad = pred.copy()
grad[label] -= 1
self.assertTrue(np.fabs(grad - softmaxloss_grad).all() < consts.FLOAT_ZERO)
def test_compute_hess(self):
for i in range(10):
pred = np.asarray([random.random() for j in range(5)], dtype='float64')
label = random.randint(0, 4)
softmaxloss_hess = self.softmax_loss.compute_hess(label, pred)
hess = 2 * pred * (1 - pred)
self.assertTrue(np.fabs(hess - softmaxloss_hess).all() < consts.FLOAT_ZERO)
def test_compute_loss(self):
softmax_loss = self.softmax_loss.compute_loss(self.y, self.predict)
loss = sum(-np.log(pred[yi]) for yi, pred in zip(self.y_list, self.predict_list)) / len(self.y_list)
self.assertTrue(np.fabs(softmax_loss - loss) < consts.FLOAT_ZERO)
def tearDown(self):
session.stop()
if __name__ == '__main__':
unittest.main()
|
1,695 |
is marked for disconnect
|
import time
import signal
import os
from ..common.trex_types import RC_OK, RC_ERR
from ..common.trex_req_resp_client import JsonRpcClient, BatchMessage, ErrNo as JsonRpcErrNo
class RRConnection(object):
'''
Manages a simple RR connection to the server
connection state object
describes the connection to the server state
can be either fully disconnected, fully connected
or marked for disconnection
'''
DISCONNECTED = 1
CONNECTED = 2
MARK_FOR_DISCONNECT = 3
def __init__ (self, ctx):
# hold pointer to context
self.ctx = ctx
self.sigint_on_conn_lost = False
# low level RPC layer
self.rpc = JsonRpcClient(ctx)
# init state
self.state = (self.DISCONNECTED, None)
def probe_server (self):
rpc = JsonRpcClient(self.ctx)
rpc.set_timeout_sec(self.rpc.get_timeout_sec())
try:
rpc.connect()
return rpc.transmit('get_version')
finally:
rpc.disconnect()
def disconnect (self):
'''
disconnect from both channels
sync and async
'''
try:
self.rpc.disconnect()
self.rpc.set_api_h(None)
finally:
self.state = (self.DISCONNECTED, None)
def connect (self):
'''
connect to the server (two channels)
'''
# first disconnect if already connected
if self.is_any_connected():
self.disconnect()
# connect
rc = self.__connect()
if not rc:
self.disconnect()
return rc
def barrier (self):
'''
executes a barrier
when it retruns, an async barrier is guaranteed
'''
pass
def sync (self):
'''
fully sync the client with the server
must be called after all the config
was done
'''
pass
def mark_for_disconnect (self, cause):
'''
A multithread safe call
any thread can mark the current connection
as not valid
and will require the main thread to reconnect
'''
pass
def sigint_on_conn_lost_enable (self):
'''
when enabled, if connection
is lost a SIGINT will be sent
to the main thread
'''
self.sigint_on_conn_lost = True
def sigint_on_conn_lost_disable (self):
'''
disable SIGINT dispatching
on case of connection lost
'''
self.sigint_on_conn_lost = False
def is_alive (self):
'''
return True if any data has arrived
the server in the last 3 seconds
'''
return True
def is_any_connected (self):
return ( self.rpc.is_connected() )
def is_connected (self):
return (self.state[0] == self.CONNECTED and self.rpc.is_connected())
def METHOD_NAME (self):
return self.state[0] == self.MARK_FOR_DISCONNECT
def get_disconnection_cause (self):
return self.state[1]
########## private ################
def __connect (self):
'''
connect to the server (two channels)
'''
# start with the sync channel
self.ctx.logger.pre_cmd("Connecting to RPC server on {0}:{1}".format(self.ctx.server, self.ctx.sync_port))
rc = self.rpc.connect()
if not rc:
return rc
# API sync V2
rc = self.rpc.transmit("api_sync_v2", params = self.ctx.api_ver)
self.ctx.logger.post_cmd(rc)
if not rc:
# api_sync_v2 is not present in v2.30 and older
if rc.errno() == JsonRpcErrNo.MethodNotSupported:
return RC_ERR('Mismatch between client and server versions')
return rc
# get the API_H and provide it to the RPC channel from now on
self.rpc.set_api_h(rc.data()['api_h'])
self.state = (self.CONNECTED, None)
return RC_OK()
|
1,696 |
get xs
|
from collections import OrderedDict, namedtuple
from math import sin, cos, pi, sqrt, atan2
Box = namedtuple('Box', 'x y dx dy') # corner and size of a 2D shape
Padding = namedtuple('Padding', 'x y')
def clip_angles(a1, a2):
"Return the angles such that a1 to a2 extend at maximum from -pi to pi"
EPSILON = 1e-8 # without it, p1 can be == p2 and svg arcs are not drawn
return max(-pi + EPSILON, a1), min(pi - EPSILON, a2)
def cartesian(point):
r, a = point
return r * cos(a), r * sin(a)
def summary(nodes, prop="name"):
"Return a list of names summarizing the given list of nodes"
return list(OrderedDict((first_value(node, prop), None) for node in nodes).keys())
def first_value(tree, prop):
"Return the value of the requested property for the first node that has it"
return next((node.props.get(prop) for node in tree.traverse('preorder')
if node.props.get(prop)), '')
def METHOD_NAME(box):
x, _, dx, _ = box
return x, x + dx
def get_ys(box):
_, y, _, dy = box
return y, y + dy
def intersects_box(b1, b2):
"Return True if the boxes b1 and b2 (of the same kind) intersect"
return (intersects_segment(METHOD_NAME(b1), METHOD_NAME(b2)) and
intersects_segment(get_ys(b1), get_ys(b2)))
def intersects_segment(s1, s2):
"Return True if the segments s1 and s2 intersect"
s1min, s1max = s1
s2min, s2max = s2
return s1min <= s2max and s2min <= s1max
def intersects_angles(rect, asec):
"Return True if any part of rect is contained within the angles of the asec"
return any(intersects_segment(get_ys(circumasec(r)), get_ys(asec))
for r in split_thru_negative_xaxis(rect))
# We divide rect in two if it passes thru the -x axis, because then its
# circumbscribing asec goes from -pi to +pi and (wrongly) always intersects.
def split_thru_negative_xaxis(rect):
"Return a list of rectangles resulting from cutting the given one"
x, y, dx, dy = rect
if x >= 0 or y > 0 or y + dy < 0:
return [rect]
else:
EPSILON = 1e-8
return [Box(x, y, dx, -y-EPSILON), Box(x, EPSILON, dx, dy + y)]
def circumrect(asec):
"Return the rectangle that circumscribes the given annular sector"
if asec is None:
return None
rmin, amin, dr, da = asec
rmax, amax = rmin + dr, amin + da
amin, amax = clip_angles(amin, amax)
points = [(rmin, amin), (rmin, amax), (rmax, amin), (rmax, amax)]
xs = [r * cos(a) for r,a in points]
ys = [r * sin(a) for r,a in points]
xmin, ymin = min(xs), min(ys)
xmax, ymax = max(xs), max(ys)
if amin < -pi/2 < amax: # asec traverses the -y axis
ymin = -rmax
if amin < 0 < amax: # asec traverses the +x axis
xmax = rmax
if amin < pi/2 < amax: # asec traverses the +y axis
ymax = rmax
# NOTE: the annular sectors we consider never traverse the -x axis.
return Box(xmin, ymin, xmax - xmin, ymax - ymin)
def circumasec(rect):
"Return the annular sector that circumscribes the given rectangle"
if rect is None:
return None
x, y, dx, dy = rect
points = [(x, y), (x, y+dy), (x+dx, y), (x+dx, y+dy)]
radius2 = [x*x + y*y for x,y in points]
if x <= 0 and x+dx >= 0 and y <= 0 and y+dy >= 0:
return Box(0, -pi, sqrt(max(radius2)), 2*pi)
else:
angles = [atan2(y, x) for x,y in points]
rmin, amin = sqrt(min(radius2)), min(angles)
return Box(rmin, amin, sqrt(max(radius2)) - rmin, max(angles) - amin)
# Basic drawing elements.
def draw_nodebox(box, name='', properties=None,
node_id=None, searched_by=None, style=None):
properties = { k:v for k,v in (properties or {}).items() \
if not (k.startswith('_') or k == 'seq')}
return ['nodebox', box, name,
properties, node_id or [],
searched_by or [], style or {}]
def draw_outline(box, style=None):
return ['outline', box, style or {}]
def get_line_type(style):
types = ['solid', 'dotted', 'dashed']
if style.get('type'):
style['type'] = types[int(style['type'])]
else:
style['type'] = types[0]
return style
def draw_line(p1, p2, line_type='', parent_of=None, style=None):
style = get_line_type(style or {})
return ['line', p1, p2, line_type, parent_of or [], style]
def draw_arc(p1, p2, large=False, arc_type='', style=None):
style = get_line_type(style or {})
return ['arc', p1, p2, int(large), arc_type, style]
def draw_circle(center, radius, circle_type='', style=None, tooltip=None):
return ['circle', center, radius, circle_type, style or {}, tooltip or '']
def draw_ellipse(center, rx, ry, ellipse_type='', style=None, tooltip=None):
return ['ellipse', center, rx, ry, ellipse_type, style or {}, tooltip or '']
def draw_slice(center, r, a, da, slice_type='', style=None, tooltip=None):
return ['slice', (center, r, a, da), slice_type, style or {}, tooltip or '']
def draw_triangle(box, tip, triangle_type='', style=None, tooltip=None):
"""Returns array with all the information needed to draw a triangle
in front end.
:box: bounds triangle
:tip: defines tip orientation 'top', 'left' or 'right'.
:triangle_type: will label triangle in front end (class)
"""
return ['triangle', box, tip, triangle_type, style or {}, tooltip or '']
def draw_text(box, text, text_type='', rotation=0, anchor=None, style=None):
return ['text', box, text, text_type, rotation, anchor or "", style or {}]
def draw_rect(box, rect_type, style=None, tooltip=None):
return ['rect', box, rect_type, style or {}, tooltip or '']
def draw_rhombus(box, rhombus_type='', style=None, tooltip=None):
""" Create rhombus provided a bounding box """
# Rotate the box to provide a rhombus (points) to drawing engine
x, y, dx, dy = box
rhombus = ((x + dx / 2, y), # top
(x + dx, y + dy / 2), # right
(x + dx / 2, y + dy), # bottom
(x, y + dy / 2)) # left
return ['rhombus', rhombus, rhombus_type, style or {}, tooltip or '']
def draw_arrow(box, tip, orientation='right', arrow_type='',
style=None, tooltip=None):
""" Create arrow provided a bounding box """
x, y, dx, dy = box
if orientation == 'right':
arrow = ((x, y),
(x + dx - tip, y),
(x + dx, y + dy / 2),
(x + dx - tip, y + dy),
(x, y + dy))
elif orientation == 'left':
arrow = ((x, y + dy / 2),
(x + tip, y),
(x + dx, y),
(x + dx, y + dy),
(x + tip, y + dy))
return ['polygon', arrow, arrow_type, style or {}, tooltip or '']
def draw_array(box, a, tooltip=None):
return ['array', box, a, tooltip or '']
def draw_html(box, html, html_type='', style=None):
return ['html', box, html, html_type, style or {}]
def draw_img(box, img, img_type='', style=None):
return ['img', box, img, img_type, style or {}]
|
1,697 |
simulate
|
from campaign.campaign_main.campaign_7_2 import MAP
from module.campaign.campaign_base import CampaignBase
from module.config.config import AzurLaneConfig
from module.logger import logger
from module.map_detection.homography import Homography
from module.map_detection.utils import *
class Config:
pass
# Universal configs to reduce error
INTERNAL_LINES_HOUGHLINES_THRESHOLD = 40
EDGE_LINES_HOUGHLINES_THRESHOLD = 40
DETECTION_BACKEND = 'perspective'
INTERNAL_LINES_FIND_PEAKS_PARAMETERS = {
'height': (80, 255 - 24),
'width': (1.5, 10),
'prominence': 10,
'distance': 35,
}
EDGE_LINES_FIND_PEAKS_PARAMETERS = {
'height': (255 - 40, 255),
'prominence': 10,
'distance': 50,
'wlen': 1000
}
STORY_OPTION = -2
MAP_FOCUS_ENEMY_AFTER_BATTLE = True
MAP_HAS_SIREN = True
MAP_HAS_FLEET_STEP = True
IGNORE_LOW_EMOTION_WARN = False
MAP_GRID_CENTER_TOLERANCE = 0.2
MAP_SWIPE_MULTIPLY = (1.320, 1.009)
MAP_SWIPE_MULTIPLY_MINITOUCH = (1.276, 0.974)
cfg = AzurLaneConfig('alas', task='Alas').merge(Config())
cfg.DETECTION_BACKEND = 'perspective'
az = CampaignBase(cfg)
az.map = MAP
# az.device.disable_stuck_detection()
az.update()
hm = Homography(cfg)
# Load from a known homo_storage
# sto = ((10, 5), [(137.776, 83.461), (1250.155, 83.461), (18.123, 503.909), (1396.595, 503.909)])
# hm.load_homography(storage=sto)
# Or from screenshot
hm.load_homography(image=np.array(az.device.image))
class SwipeSimulate:
def __init__(self, swipe, simulate_count=4):
self.simulate_count = simulate_count
self.swipe = np.array(swipe, dtype=float)
self.swipe_base = self.cal_swipe_base()
logger.info(self.swipe_base)
def cal_swipe_base(self):
swipe_base = None
for loca, grid in az.view.grids.items():
offset = grid.screen2grid([az.config.SCREEN_CENTER])[0].astype(int)
points = grid.grid2screen(np.add([[0.5, 0], [-0.5, 0], [0, 0.5], [0, -0.5]], offset))
swipe_base = np.array([np.linalg.norm(points[0] - points[1]), np.linalg.norm(points[2] - points[3])])
break
if swipe_base is None:
logger.critical('Unable to get swipe_base')
exit(1)
else:
return swipe_base
@staticmethod
def normalise_offset(offset):
"""
Convert hm.homo_loca (range from 0 to 140),
to swipe difference (range from -70 to 70)
"""
if offset[0] > 70:
offset[0] -= 140
if offset[1] > 100:
offset[1] -= 140
return offset
def METHOD_NAME(self):
logger.hr(f'Swipe: {self.swipe}', level=1)
record = []
for n in range(self.simulate_count):
hm.detect(az.device.image)
# hm.draw()
init_offset = self.normalise_offset(hm.homo_loca)
az.device.swipe_vector(self.swipe)
az.device.sleep(0.3)
az.device.screenshot()
hm.detect(az.device.image)
offset = self.normalise_offset(hm.homo_loca)
record.append(offset - init_offset)
# fit = hm.fit_points(np.array(record), encourage=2)
fit = np.mean(record, axis=0)
# (170, 65)
multiply = np.round(np.abs(self.swipe) / ((np.abs(self.swipe) // (170, 130))) / self.swipe_base, 3)
logger.info(
f'[{n}/{self.simulate_count}] init_offset={init_offset}, offset={offset}, fit={fit}, multiply={multiply}')
fleet = az.get_fleet_show_index()
az.fleet_set(3 - fleet)
az.fleet_set(fleet)
# az.fleet_set(3)
# az.fleet_set(1)
az.ensure_no_info_bar()
self.multiply = multiply
self.swipe -= (fit[0], 0)
# self.swipe -= (0, fit[1])
self.show()
return abs(fit[0])
# return abs(fit[1])
def show(self):
print()
print(f'Last swipe: {self.swipe}')
print('Result to copy:')
print()
# MAP_SWIPE_MULTIPLY = 1.579
# MAP_SWIPE_MULTIPLY_MINITOUCH = 1.527
if az.config.Emulator_ControlMethod == 'minitouch':
multiply = np.round(self.multiply[0] / 1.572 * 1.626, 3)
minitouch = self.multiply[0]
else:
multiply = self.multiply[0]
minitouch = np.round(self.multiply[0] / 1.626 * 1.572, 3)
print(f' MAP_SWIPE_MULTIPLY = {str(multiply).ljust(5, "0")}')
print(f' MAP_SWIPE_MULTIPLY_MINITOUCH = {str(minitouch).ljust(5, "0")}')
print()
print()
print(f'Last swipe: {self.swipe}')
print('Result to copy:')
print()
# MAP_SWIPE_MULTIPLY = 1.579
# MAP_SWIPE_MULTIPLY_MINITOUCH = 1.527
if az.config.Emulator_ControlMethod == 'minitouch':
multiply = np.round(self.multiply[0] / 1.572 * 1.626, 3)
minitouch = self.multiply[1]
else:
multiply = self.multiply[1]
minitouch = np.round(self.multiply[0] / 1.626 * 1.572, 3)
print(f' MAP_SWIPE_MULTIPLY = {str(multiply).ljust(5, "0")}')
print(f' MAP_SWIPE_MULTIPLY_MINITOUCH = {str(minitouch).ljust(5, "0")}')
print()
def run(self):
while 1:
result = self.METHOD_NAME()
if result <= 1:
break
if __name__ == '__main__':
"""
To fit MAP_SWIPE_MULTIPLY.
Before running this, move your fleet on map to be like this:
FL is current fleet, Fl is another fleet.
Camera should focus on current fleet (Double click switch over to refocus)
-- -- -- -- --
-- Fl -- FL --
-- -- -- -- --
After run, Result is ready to copy.
"""
sim = SwipeSimulate((400, 0)).run()
|
1,698 |
test expression table control
|
# Copyright 2023 Avaiga Private Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import inspect
import pandas as pd # type: ignore
from taipy.gui import Gui
def test_expression_text_control_str(gui: Gui, test_client, helpers):
gui._bind_var_val("x", "Hello World!")
md_string = "<|{x}|>"
expected_list = ["<Field", 'dataType="str"', 'defaultValue="Hello World!"', "value={tpec_TpExPr_x_TPMDL_0}"]
helpers.test_control_md(gui, md_string, expected_list)
def test_expression_text_control_int(gui: Gui, test_client, helpers):
gui._bind_var_val("x", 10)
md_string = "<|{x}|>"
expected_list = ["<Field", 'dataType="int"', 'defaultValue="10"', "value={tpec_TpExPr_x_TPMDL_0}"]
helpers.test_control_md(gui, md_string, expected_list)
def test_expression_text_control_1(gui: Gui, test_client, helpers):
gui._set_frame(inspect.currentframe())
gui._bind_var_val("x", 10)
gui._bind_var_val("y", 20)
md_string = "<|{x + y}|>"
expected_list = [
"<Field",
'dataType="int"',
'defaultValue="30"',
"value={tp_TpExPr_x_y_TPMDL_0_0}",
]
helpers.test_control_md(gui, md_string, expected_list)
def test_expression_text_control_2(gui: Gui, test_client, helpers):
gui._set_frame(inspect.currentframe())
gui._bind_var_val("x", 10)
gui._bind_var_val("y", 20)
md_string = "<|x + y = {x + y}|>"
expected_list = [
"<Field",
'dataType="str"',
'defaultValue="x + y = 30"',
"value={tp_TpExPr_x_y_x_y_TPMDL_0_0}",
]
helpers.test_control_md(gui, md_string, expected_list)
def test_expression_text_control_3(gui: Gui, test_client, helpers):
gui._set_frame(inspect.currentframe())
gui._bind_var_val("x", "Mickey Mouse")
gui._bind_var_val("y", "Donald Duck")
md_string = "<|Hello {x} and {y}|>"
expected_list = [
"<Field",
'dataType="str"',
'defaultValue="Hello Mickey Mouse and Donald Duck"',
"value={tp_TpExPr_Hello_x_and_y_TPMDL_0_0}",
]
helpers.test_control_md(gui, md_string, expected_list)
def test_expression_text_gt_operator(gui: Gui, test_client, helpers):
gui._set_frame(inspect.currentframe())
gui._bind_var_val("x", 0)
md_string = "<|{x > 0}|>"
expected_list = [
"<Field",
'dataType="bool"',
'defaultValue="false"',
"value={tp_TpExPr_x_0_TPMDL_0_0}",
]
helpers.test_control_md(gui, md_string, expected_list)
def test_expression_button_control(gui: Gui, test_client, helpers):
gui._bind_var_val("label", "A button label")
md_string = "<|button|label={label}|>"
expected_list = ["<Button", 'defaultLabel="A button label"', "label={tpec_TpExPr_label_TPMDL_0}"]
helpers.test_control_md(gui, md_string, expected_list)
def METHOD_NAME(gui: Gui, test_client, helpers):
gui._set_frame(inspect.currentframe())
gui._bind_var_val("pd", pd)
gui._bind_var_val("series_1", pd.Series(["a", "b", "c"], name="Letters"))
gui._bind_var_val("series_2", pd.Series([1, 2, 3], name="Numbers"))
md_string = "<|{pd.concat([series_1, series_2], axis=1)}|table|columns=Letters;Numbers|>"
expected_list = [
"<Table",
'defaultColumns="{"Letters": {"index": 0, "type": "object", "dfid": "Letters"}, "Numbers": {"index": 1, "type": "int", "dfid": "Numbers"}}"',
'updateVarName="_TpD_tp_TpExPr_pd_concat_series_1_series_2_axis_1_TPMDL_0_0"',
"data={_TpD_tp_TpExPr_pd_concat_series_1_series_2_axis_1_TPMDL_0_0}",
]
helpers.test_control_md(gui, md_string, expected_list)
assert isinstance(gui._get_data_scope().tp_TpExPr_pd_concat_series_1_series_2_axis_1_TPMDL_0_0, pd.DataFrame)
def test_lambda_expression_selector(gui: Gui, test_client, helpers):
gui._bind_var_val(
"lov",
[
{"id": "1", "name": "scenario 1"},
{"id": "3", "name": "scenario 3"},
{"id": "2", "name": "scenario 2"},
],
)
gui._bind_var_val("sel", {"id": "1", "name": "scenario 1"})
md_string = "<|{sel}|selector|lov={lov}|type=test|adapter={lambda elt: (elt['id'], elt['name'])}|>"
expected_list = [
"<Selector",
'defaultLov="[["1", "scenario 1"], ["3", "scenario 3"], ["2", "scenario 2"]]"',
'defaultValue="["1"]"',
'updateVars="lov=_TpL_tpec_TpExPr_lov_TPMDL_0"',
"lov={_TpL_tpec_TpExPr_lov_TPMDL_0}",
'updateVarName="_TpLv_tpec_TpExPr_sel_TPMDL_0"',
"value={_TpLv_tpec_TpExPr_sel_TPMDL_0}",
]
helpers.test_control_md(gui, md_string, expected_list)
|
1,699 |
set up
|
import unittest
from flood_forecast.transformer_xl.informer import Informer
from flood_forecast.transformer_xl.data_embedding import DataEmbedding
from flood_forecast.preprocessing.pytorch_loaders import TemporalLoader, TemporalTestLoader
from flood_forecast.temporal_decoding import decoding_function
import torch
class TestInformer(unittest.TestCase):
def METHOD_NAME(self):
self.informer = Informer(3, 3, 3, 20, 20, 20, factor=1)
self.kwargs = {
"file_path": "tests/test_data/keag_small.csv",
"forecast_history": 5,
"forecast_length": 1,
"target_col": ["cfs"],
"relevant_cols": ["cfs", "temp", "precip"],
"sort_column": "date",
"feature_params":
{
"datetime_params": {
"month": "numerical",
"day": "numerical",
"day_of_week": "numerical",
"hour": "numerical"
}
}
}
def test_informer(self):
# Format should be (batch_size, seq_len, n_time_series) (batch_size, seq_len,,)
result = self.informer(torch.rand(2, 20, 3), torch.rand(2, 20, 4), torch.rand(2, 20, 3), torch.rand(2, 20, 4))
self.assertEqual(len(result.shape), 3)
self.assertEqual(result.shape[0], 2)
self.assertEqual(result.shape[1], 20)
def test_data_embedding(self):
d = DataEmbedding(5, 128, data=5)
r = d(torch.rand(2, 10, 5), torch.rand(2, 10, 5))
self.assertTrue(hasattr(d.temporal_embedding, "month_embed"))
self.assertEqual(r.shape[2], 128)
def test_temporal_loader(self):
loa = TemporalLoader(["month", "day", "day_of_week", "hour"], self.kwargs)
result = loa[0]
self.assertEqual(len(result), 2)
# Test output has proper dimensions
# print(loa[0][0].shape)
self.assertEqual(result[0][0].shape[0], 5)
self.assertEqual(result[0][1].shape[1], 4)
self.assertEqual(result[0][0].shape[1], 3)
self.assertEqual(result[0][1].shape[0], 5)
# Test output right order
temporal_src_embd = result[0][1]
second = temporal_src_embd[2, :]
self.assertEqual(second[0], 5)
self.assertEqual(second[1], 1)
self.assertEqual(second[3], 3)
# Test data loading component
d = DataEmbedding(3, 128)
embedding = d(result[0][0].unsqueeze(0), temporal_src_embd.unsqueeze(0))
self.assertEqual(embedding.shape[2], 128)
i = Informer(3, 3, 3, 5, 5, out_len=4, factor=1)
r0 = result[0][0].unsqueeze(0)
r1 = result[0][1].unsqueeze(0)
r3 = result[1][1].unsqueeze(0)
r2 = result[1][0].unsqueeze(0)
res = i(r0, r1, r3, r2)
self.assertEqual(res.shape[1], 1)
self.assertEqual(r3[0, 0, 0].item(), 459)
def test_temporal_load(self):
loa = TemporalLoader(["month", "day", "day_of_week", "hour"], self.kwargs, 2)
data = loa[0]
self.assertEqual(data[1][1].shape[0], 3)
self.assertEqual(data[1][1][0, 0].item(), 449)
self.assertEqual(data[1][1][2, 0], 459)
def test_data_temporal_loader_init(self):
kwargs2 = self.kwargs.copy()
kwargs3 = {
"forecast_total": 336,
"df_path": "tests/test_data2/keag_small.csv",
"kwargs": kwargs2
}
d = TemporalTestLoader(["month", "day", "day_of_week", "hour"], kwargs3, 3)
src, trg, df, _ = d[0]
self.assertEqual(trg[0].shape[0], 339)
self.assertEqual(src[0].shape[0], 5)
self.assertEqual(len(df.index), 341)
self.assertEqual(trg[1][0, 0].item(), 445)
def test_decodign_t(self):
src = torch.rand(20, 3)
trg = torch.rand(355, 3)
src1 = torch.rand(20, 4)
trg1 = torch.rand(355, 4)
d = decoding_function(self.informer, src, trg, 5, src1, trg1, 1, 20, 336, "cpu")
self.assertEqual(d.shape[0], 1)
self.assertEqual(d.shape[1], 336)
def test_decoding_2(self):
src = torch.rand(20, 3)
trg = torch.rand(355, 3)
src1 = torch.rand(20, 4)
trg1 = torch.rand(355, 4)
d = decoding_function(self.informer, src, trg, 5, src1, trg1, 1, 20, 336, "cpu")
self.assertEqual(d.shape[0], 1)
self.assertEqual(d.shape[1], 336)
self.assertNotEqual(d[0, 0, 0].item(), d[0, 1, 0].item())
self.assertNotAlmostEqual(d[0, 0, 0].item(), d[0, 330, 0].item())
self.assertNotAlmostEqual(d[0, 20, 0].item(), d[0, 333, 0].item())
self.assertNotAlmostEqual(d[0, 300, 0].item(), d[0, 334, 0].item())
self.assertNotAlmostEqual(d[0, 20, 0].item(), trg[20, 0].item())
self.assertNotAlmostEqual(d[0, 21, 0].item(), trg[21, 0].item())
def test_decoding_3(self):
informer_model2 = Informer(3, 3, 3, 48, 24, 12, factor=1)
src = torch.rand(1, 48, 3)
trg = torch.rand(1, 362, 3)
src1 = torch.rand(1, 48, 4)
trg1 = torch.rand(1, 362, 4)
d = decoding_function(informer_model2, src, trg, 12, src1, trg1, 1, 36, 336, "cpu")
self.assertEqual(d.shape[0], 1)
self.assertEqual(d.shape[1], 336)
def test_t_loade2(self):
s_wargs = {
"file_path": "tests/test_data/keag_small.csv",
"forecast_history": 39,
"forecast_length": 2,
"target_col": ["cfs"],
"relevant_cols": ["cfs", "temp", "precip"],
"sort_column": "date",
"feature_params":
{
"datetime_params": {
"month": "numerical",
"day": "numerical",
"day_of_week": "numerical",
"hour": "numerical"
}
}
}
s_wargs["forecast_history"] = 39
t_load = TemporalLoader(["month", "day"], s_wargs, 30)
src, trg = t_load[0]
self.assertEqual(trg[1].shape[0], 32)
self.assertEqual(trg[0].shape[0], 32)
self.assertEqual(trg[1].shape[1], 5)
self.assertEqual(trg[0].shape[1], 2)
# this test makes sure the label_lens parameter works
print("Complete")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.