id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
null |
---|---|---|
2,000 |
from rest_framework import serializers as ser
from addons.osfstorage.models import OsfStorageFile
from api.base.serializers import (
IDField,
JSONAPISerializer,
LinksField,
RelationshipField,
TypeField,
VersionedDateTimeField,
)
from api.base.utils import absolute_reverse
from api.files.serializers import get_file_download_link
from api.nodes.serializers import NodeSerializer
class MeetingSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'location',
])
id = IDField(source='endpoint', read_only=True)
type = TypeField()
name = ser.CharField(read_only=True)
location = ser.CharField(read_only=True)
start_date = VersionedDateTimeField(read_only=True)
end_date = VersionedDateTimeField(read_only=True)
info_url = ser.URLField(read_only=True)
logo_url = ser.URLField(read_only=True)
field_names = ser.DictField(read_only=True)
submissions_count = ser.SerializerMethodField()
active = ser.BooleanField(read_only=True)
type_one_submission_email = ser.SerializerMethodField()
type_two_submission_email = ser.SerializerMethodField()
is_accepting_type_one = ser.BooleanField(source='poster', read_only=True)
is_accepting_type_two = ser.BooleanField(source='talk', read_only=True)
submissions = RelationshipField(
related_view='meetings:meeting-submissions',
related_view_kwargs={'meeting_id': '<endpoint>'},
related_meta={'count': 'get_submissions_count'},
)
links = LinksField({
'self': 'get_absolute_url',
'html': 'get_absolute_html_url',
})
def format_submission_email(self, obj, submission_field):
if obj.active:
return '{}-{}@osf.io'.format(obj.endpoint, obj.field_names.get(submission_field))
return ''
def get_type_one_submission_email(self, obj):
return self.format_submission_email(obj, 'submission1')
def get_type_two_submission_email(self, obj):
return self.format_submission_email(obj, 'submission2')
def get_absolute_url(self, obj):
return absolute_reverse('meetings:meeting-detail', kwargs={'meeting_id': obj.endpoint})
def get_submissions_count(self, obj):
if getattr(obj, 'submissions_count', None):
return obj.submissions_count
else:
return obj.valid_submissions.count()
class Meta:
type_ = 'meetings'
class MeetingSubmissionSerializer(NodeSerializer):
filterable_fields = frozenset([
'title',
'meeting_category',
'author_name',
])
author_name = ser.SerializerMethodField()
download_count = ser.SerializerMethodField()
meeting_category = ser.SerializerMethodField()
author = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': 'get_author_id'},
read_only=True,
)
links = LinksField({
'self': 'get_absolute_url',
'html': 'get_absolute_html_url',
'download': 'get_download_link',
})
def get_author(self, obj):
contrib_queryset = obj.contributor_set.filter(visible=True).order_by('_order')
if contrib_queryset:
return contrib_queryset.first().user
return None
def get_author_id(self, obj):
# Author guid is annotated on queryset in ListView
if getattr(obj, 'author_id', None):
return obj.author_id
else:
author = self.get_author(obj)
return author._id if author else None
def get_author_name(self, obj):
"""
Returns the first bibliographic contributor's family_name if it exists.
Otherwise, return its fullname.
"""
if getattr(obj, 'author_name', None):
# Field is annotated on queryset in ListView for filtering purposes
return obj.author_name
else:
author = self.get_author(obj)
if author:
return author.family_name if author.family_name else author.fullname
return None
def get_meeting_category(self, obj):
"""
Returns the existance of a certain tag on the node. If the first submission type tag exists,
return that. Otherwise, return the second submission type tag as a default.
"""
if getattr(obj, 'meeting_category', None):
# Field is annotated on queryset in ListView for filtering purposes
return obj.meeting_category
else:
meeting = self.context['meeting']
submission1_name = meeting.field_names.get('submission1')
submission2_name = meeting.field_names.get('submission2')
submission_tags = obj.tags.values_list('name', flat=True)
return submission1_name if submission1_name in submission_tags else submission2_name
def get_download_count(self, obj):
"""
Return the download counts of the first osfstorage file
"""
if getattr(obj, 'download_count', None):
return obj.download_count or 0
else:
submission_file = self.get_submission_file(obj)
return submission_file.get_download_count() if submission_file else None
def get_download_link(self, obj):
"""
First osfstoragefile on a node - if the node was created for a meeting,
assuming its first file is the meeting submission.
"""
if getattr(obj, 'file_id', None):
submission_file = OsfStorageFile.objects.get(id=obj.file_id)
else:
submission_file = self.get_submission_file(obj)
if submission_file:
return get_file_download_link(submission_file)
return None
def get_submission_file(self, obj):
return obj.files.order_by('created').first()
def get_absolute_url(self, obj):
meeting_endpoint = self.context['meeting'].endpoint
return absolute_reverse(
'meetings:meeting-submission-detail',
kwargs={
'meeting_id': meeting_endpoint,
'submission_id': obj._id,
},
)
# Overrides BaseAPISerializer
def METHOD_NAME(self):
"""
Since meeting submissions are actually nodes, we are subclassing the NodeSerializer,
but we only want to return a subset of fields specific to meetings
"""
fieldset = [
'date_created',
'title',
'author',
'author_name',
'meeting_category',
'download_count',
'submission_file',
]
for field_name in list(self.fields.keys()):
if field_name in ('id', 'links', 'type'):
# MUST return these fields
continue
if field_name not in fieldset:
self.fields.pop(field_name)
return super().METHOD_NAME()
class Meta:
type_ = 'meeting-submissions'
| null |
2,001 |
# -*- coding: utf-8 -*-
import numpy as np
import pytest
import pypsa
@pytest.fixture
def empty_network_5_buses():
# Set up empty network with 5 buses.
network = pypsa.Network()
n_buses = 5
for i in range(n_buses):
network.add("Bus", f"bus_{i}")
return network
def test_mremove(ac_dc_network):
"""
GIVEN the AC DC exemplary pypsa network.
WHEN two components of Generator are removed with mremove
THEN the generator dataframe and the time-dependent generator
dataframe should not contain the removed elements.
"""
network = ac_dc_network
generators = {"Manchester Wind", "Frankfurt Wind"}
network.mremove("Generator", generators)
assert not generators.issubset(network.generators.index)
assert not generators.issubset(network.generators_t.p_max_pu.columns)
def test_mremove_misspelled_component(ac_dc_network, caplog):
"""
GIVEN the AC DC exemplary pypsa network.
WHEN a misspelled component is removed with mremove
THEN the function should not change anything in the Line
component dataframe and an error should be logged.
"""
network = ac_dc_network
len_lines = len(network.lines.index)
network.mremove("Liness", ["0", "1"])
assert len_lines == len(network.lines.index)
assert caplog.records[-1].levelname == "ERROR"
def METHOD_NAME(empty_network_5_buses):
"""
GIVEN an empty PyPSA network with 5 buses.
WHEN multiple components of Load are added to the network with
madd and attribute p_set
THEN the corresponding load components should be in the index of
the static load dataframe. Also the column p_set should contain any
value greater than 0.
"""
buses = empty_network_5_buses.buses.index
# Add load components at every bus with attribute p_set.
load_names = "load_" + buses
empty_network_5_buses.madd(
"Load",
load_names,
bus=buses,
p_set=3,
)
assert load_names.equals(empty_network_5_buses.loads.index)
assert (empty_network_5_buses.loads.p_set == 3).all()
def test_madd_t(empty_network_5_buses):
"""
GIVEN an empty PyPSA network with 5 buses and 7 snapshots.
WHEN multiple components of Load are added to the network with
madd and attribute p_set
THEN the corresponding load components should be in the columns
of the time-dependent load_t dataframe. Also, the shape of the
dataframe should resemble 7 snapshots x 5 buses.
"""
# Set up empty network with 5 buses and 7 snapshots.
snapshots = range(7)
empty_network_5_buses.set_snapshots(snapshots)
buses = empty_network_5_buses.buses.index
# Add load component at every bus with time-dependent attribute p_set.
load_names = "load_" + buses
empty_network_5_buses.madd(
"Load",
load_names,
bus=buses,
p_set=np.random.rand(len(snapshots), len(buses)),
)
assert load_names.equals(empty_network_5_buses.loads_t.p_set.columns)
assert empty_network_5_buses.loads_t.p_set.shape == (len(snapshots), len(buses))
def test_madd_misspelled_component(empty_network_5_buses, caplog):
"""
GIVEN an empty PyPSA network with 5 buses.
WHEN multiple components of a misspelled component are added
THEN the function should not change anything and an error should
be logged.
"""
misspelled_component = "Generatro"
empty_network_5_buses.madd(
misspelled_component,
["g_1", "g_2"],
bus=["bus_1", "bus_2"],
)
assert empty_network_5_buses.generators.empty
assert caplog.records[-1].levelname == "ERROR"
assert caplog.records[-1].message == (
f"Component class {misspelled_component} not found"
)
def test_madd_duplicated_index(empty_network_5_buses, caplog):
"""
GIVEN an empty PyPSA network with 5 buses.
WHEN adding generators with the same name
THEN the function should fail and an error should be logged.
"""
empty_network_5_buses.madd(
"Generator",
["g_1", "g_1"],
bus=["bus_1", "bus_2"],
)
assert caplog.records[-1].levelname == "ERROR"
assert caplog.records[-1].message == (
"Error, new components for Generator are not unique"
)
def test_madd_defaults(empty_network_5_buses):
"""
GIVEN an empty PyPSA network with 5 buses.
WHEN adding multiple components of Generator and Load with madd
THEN the defaults should be set correctly according to
n.component_attrs.
"""
gen_names = ["g_1", "g_2"]
empty_network_5_buses.madd(
"Generator",
gen_names,
bus=["bus_1", "bus_2"],
)
line_names = ["l_1", "l_2"]
empty_network_5_buses.madd(
"Load",
line_names,
bus=["bus_1", "bus_2"],
)
assert empty_network_5_buses.generators.loc[gen_names[0], "control"] == (
empty_network_5_buses.component_attrs.Generator.loc["control", "default"]
)
assert empty_network_5_buses.loads.loc[line_names[0], "p_set"] == (
empty_network_5_buses.component_attrs.Load.loc["p_set", "default"]
)
def test_copy_default_behavior(ac_dc_network):
"""
GIVEN the AC DC exemplary pypsa network.
WHEN copying the network with timestamps
THEN the copied network should have the same generators, loads
and timestamps.
"""
snapshot = ac_dc_network.snapshots[2]
copied_network = ac_dc_network.copy()
loads = ac_dc_network.loads.index.tolist()
generators = ac_dc_network.generators.index.tolist()
copied_loads = copied_network.loads.index.tolist()
copied_generators = copied_network.generators.index.tolist()
assert loads == copied_loads
assert generators == copied_generators
assert not copied_network.snapshots.empty
assert snapshot in copied_network.snapshots
def test_copy_deep_copy_behavior(ac_dc_network):
"""
GIVEN the AC DC exemplary pypsa network.
WHEN copying the network and changing a component
THEN the original network should have not changed.
"""
copied_network = ac_dc_network.copy()
copied_network.loads.rename(index={"London": "Berlin"}, inplace=True)
assert ac_dc_network.loads.index[0] != copied_network.loads.index[0]
def test_copy_no_snapshot(ac_dc_network):
"""
GIVEN the AC DC exemplary pypsa network.
WHEN copying the network without snapshots
THEN the copied network should only have the current time index.
"""
snapshot = ac_dc_network.snapshots[2]
copied_network = ac_dc_network.copy(with_time=False, snapshots=snapshot)
assert copied_network.snapshots.size == 1
assert snapshot not in copied_network.snapshots
| null |
2,002 |
################################################################################
# Creme is a free/open-source Customer Relationship Management software
# Copyright (C) 2009-2022 Hybird
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from __future__ import annotations
import logging
from datetime import datetime
from typing import Iterator
from django.conf import settings
from django.core.mail import EmailMessage, get_connection
from django.db.models.query_utils import Q
from django.db.transaction import atomic
from django.utils.timezone import now
from django.utils.translation import gettext as _
from ..models import CremeModel, DateReminder, Job, JobResult
logger = logging.getLogger(__name__)
FIRST_REMINDER = 1
class Reminder:
id: str = '' # Override with generate_id()
model: type[CremeModel] # Override with a CremeModel sub-class
def __init__(self):
pass
@staticmethod
def generate_id(app_name: str, name: str) -> str:
return f'reminder_{app_name}-{name}'
def get_emails(self, object) -> list[str]:
addresses = []
default_addr = getattr(settings, 'DEFAULT_USER_EMAIL', None)
if default_addr:
addresses.append(default_addr)
else:
logger.critical(
'Reminder: the setting DEFAULT_USER_EMAIL has not been filled ; '
'no email will be sent.'
)
return addresses
def METHOD_NAME(self, object: CremeModel) -> str:
pass
def generate_email_body(self, object: CremeModel) -> str:
pass
def get_Q_filter(self) -> Q: # TODO: get_queryset instead ????
pass
def ok_for_continue(self) -> bool:
return True
def send_mails(self, instance: CremeModel, job: Job) -> bool:
body = self.generate_email_body(instance)
subject = self.METHOD_NAME(instance)
EMAIL_SENDER = settings.EMAIL_SENDER
messages = [
EmailMessage(subject, body, EMAIL_SENDER, [email])
for email in self.get_emails(instance)
]
try:
with get_connection() as connection:
connection.send_messages(messages)
except Exception as e:
logger.critical('Error while sending reminder emails (%s)', e)
JobResult.objects.create(
job=job,
messages=[
_('An error occurred while sending emails related to «{model}»').format(
model=self.model._meta.verbose_name,
),
_('Original error: {}').format(e),
],
)
return False
return True # Means 'OK'
def execute(self, job: Job) -> None:
if not self.ok_for_continue():
return
dt_now = now().replace(microsecond=0, second=0)
for instance in self.model.objects.filter(self.get_Q_filter()).exclude(reminded=True):
self.send_mails(instance, job)
with atomic():
DateReminder.objects.create(
date_of_remind=dt_now,
ident=FIRST_REMINDER,
object_of_reminder=instance,
)
instance.reminded = True
instance.save()
def next_wakeup(self, now_value: datetime) -> datetime | None:
"""Returns the next time when the job manager should wake up in order
to send the related emails.
@param now_value: datetime object representing 'now'.
@return None -> the job has not to be woken up.
A datetime instance -> the job should be woken up at this time.
If it's in the past, it means the job should be run immediately
(tip: you can simply return now_value).
"""
raise NotImplementedError
class ReminderRegistry:
class RegistrationError(Exception):
pass
def __init__(self):
self._reminders: dict[str, Reminder] = {}
def register(self, reminder_class: type[Reminder]) -> ReminderRegistry:
"""Register a class of Reminder.
@type reminder_class: Class "inheriting" <creme_core.core.reminder.Reminder>.
"""
reminders = self._reminders
reminder_id = reminder_class.id
if not reminder_id:
raise self.RegistrationError(
f"Reminder class with empty id: {reminder_class}",
)
if reminder_id in reminders:
raise self.RegistrationError(
f"Duplicated reminder's id or reminder registered twice: {reminder_id}"
)
reminders[reminder_id] = reminder_class()
return self
def unregister(self, reminder_class: type[Reminder]) -> None:
if self._reminders.pop(reminder_class.id, None) is None:
raise self.RegistrationError(
f'No reminder is registered with this ID: {reminder_class.id}'
)
def __iter__(self) -> Iterator[Reminder]:
return iter(self._reminders.values())
reminder_registry = ReminderRegistry()
| null |
2,003 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdomain.endpoint import endpoint_data
class SaveBatchTaskForUpdatingContactInfoByNewContactRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Domain', '2018-01-29', 'SaveBatchTaskForUpdatingContactInfoByNewContact')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Country(self): # String
return self.get_query_params().get('Country')
def set_Country(self, Country): # String
self.add_query_param('Country', Country)
def get_City(self): # String
return self.get_query_params().get('City')
def set_City(self, City): # String
self.add_query_param('City', City)
def get_TransferOutProhibited(self): # Boolean
return self.get_query_params().get('TransferOutProhibited')
def set_TransferOutProhibited(self, TransferOutProhibited): # Boolean
self.add_query_param('TransferOutProhibited', TransferOutProhibited)
def get_ZhCity(self): # String
return self.get_query_params().get('ZhCity')
def set_ZhCity(self, ZhCity): # String
self.add_query_param('ZhCity', ZhCity)
def get_TelExt(self): # String
return self.get_query_params().get('TelExt')
def set_TelExt(self, TelExt): # String
self.add_query_param('TelExt', TelExt)
def get_Province(self): # String
return self.get_query_params().get('Province')
def set_Province(self, Province): # String
self.add_query_param('Province', Province)
def get_ZhRegistrantName(self): # String
return self.get_query_params().get('ZhRegistrantName')
def set_ZhRegistrantName(self, ZhRegistrantName): # String
self.add_query_param('ZhRegistrantName', ZhRegistrantName)
def get_PostalCode(self): # String
return self.get_query_params().get('PostalCode')
def set_PostalCode(self, PostalCode): # String
self.add_query_param('PostalCode', PostalCode)
def get_Lang(self): # String
return self.get_query_params().get('Lang')
def set_Lang(self, Lang): # String
self.add_query_param('Lang', Lang)
def get_Email(self): # String
return self.get_query_params().get('Email')
def set_Email(self, Email): # String
self.add_query_param('Email', Email)
def get_ZhRegistrantOrganization(self): # String
return self.get_query_params().get('ZhRegistrantOrganization')
def set_ZhRegistrantOrganization(self, ZhRegistrantOrganization): # String
self.add_query_param('ZhRegistrantOrganization', ZhRegistrantOrganization)
def get_Address(self): # String
return self.get_query_params().get('Address')
def set_Address(self, Address): # String
self.add_query_param('Address', Address)
def get_TelArea(self): # String
return self.get_query_params().get('TelArea')
def set_TelArea(self, TelArea): # String
self.add_query_param('TelArea', TelArea)
def get_ContactType(self): # String
return self.get_query_params().get('ContactType')
def set_ContactType(self, ContactType): # String
self.add_query_param('ContactType', ContactType)
def get_ZhAddress(self): # String
return self.get_query_params().get('ZhAddress')
def set_ZhAddress(self, ZhAddress): # String
self.add_query_param('ZhAddress', ZhAddress)
def get_RegistrantType(self): # String
return self.get_query_params().get('RegistrantType')
def set_RegistrantType(self, RegistrantType): # String
self.add_query_param('RegistrantType', RegistrantType)
def get_DomainNames(self): # RepeatList
return self.get_query_params().get('DomainName')
def METHOD_NAME(self, DomainName): # RepeatList
for depth1 in range(len(DomainName)):
self.add_query_param('DomainName.' + str(depth1 + 1), DomainName[depth1])
def get_Telephone(self): # String
return self.get_query_params().get('Telephone')
def set_Telephone(self, Telephone): # String
self.add_query_param('Telephone', Telephone)
def get_ZhProvince(self): # String
return self.get_query_params().get('ZhProvince')
def set_ZhProvince(self, ZhProvince): # String
self.add_query_param('ZhProvince', ZhProvince)
def get_RegistrantOrganization(self): # String
return self.get_query_params().get('RegistrantOrganization')
def set_RegistrantOrganization(self, RegistrantOrganization): # String
self.add_query_param('RegistrantOrganization', RegistrantOrganization)
def get_UserClientIp(self): # String
return self.get_query_params().get('UserClientIp')
def set_UserClientIp(self, UserClientIp): # String
self.add_query_param('UserClientIp', UserClientIp)
def get_RegistrantName(self): # String
return self.get_query_params().get('RegistrantName')
def set_RegistrantName(self, RegistrantName): # String
self.add_query_param('RegistrantName', RegistrantName)
| null |
2,004 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class ModifyFlowLogAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'ModifyFlowLogAttribute','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_NetflowVersion(self): # String
return self.get_query_params().get('NetflowVersion')
def set_NetflowVersion(self, NetflowVersion): # String
self.add_query_param('NetflowVersion', NetflowVersion)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_InactiveAging(self): # Integer
return self.get_query_params().get('InactiveAging')
def set_InactiveAging(self, InactiveAging): # Integer
self.add_query_param('InactiveAging', InactiveAging)
def get_SlsRegionId(self): # String
return self.get_query_params().get('SlsRegionId')
def set_SlsRegionId(self, SlsRegionId): # String
self.add_query_param('SlsRegionId', SlsRegionId)
def get_ActiveAging(self): # Integer
return self.get_query_params().get('ActiveAging')
def set_ActiveAging(self, ActiveAging): # Integer
self.add_query_param('ActiveAging', ActiveAging)
def get_OutputType(self): # String
return self.get_query_params().get('OutputType')
def set_OutputType(self, OutputType): # String
self.add_query_param('OutputType', OutputType)
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_LogstoreName(self): # String
return self.get_query_params().get('LogstoreName')
def set_LogstoreName(self, LogstoreName): # String
self.add_query_param('LogstoreName', LogstoreName)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_NetflowServerPort(self): # Integer
return self.get_query_params().get('NetflowServerPort')
def set_NetflowServerPort(self, NetflowServerPort): # Integer
self.add_query_param('NetflowServerPort', NetflowServerPort)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def METHOD_NAME(self): # String
return self.get_query_params().get('NetflowServerIp')
def set_NetflowServerIp(self, NetflowServerIp): # String
self.add_query_param('NetflowServerIp', NetflowServerIp)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_FlowLogId(self): # String
return self.get_query_params().get('FlowLogId')
def set_FlowLogId(self, FlowLogId): # String
self.add_query_param('FlowLogId', FlowLogId)
| null |
2,005 |
""" do book related things with other users """
from django.apps import apps
from django.db import models, IntegrityError, transaction
from django.db.models import Q
from bookwyrm.settings import DOMAIN
from .base_model import BookWyrmModel
from . import fields
from .relationship import UserBlocks
class Group(BookWyrmModel):
"""A group of users"""
name = fields.CharField(max_length=100)
user = fields.ForeignKey("User", on_delete=models.CASCADE)
description = fields.TextField(blank=True, null=True)
privacy = fields.PrivacyField()
def METHOD_NAME(self):
"""don't want the user to be in there in this case"""
return f"https://{DOMAIN}/group/{self.id}"
@classmethod
def followers_filter(cls, queryset, viewer):
"""Override filter for "followers" privacy level to allow non-following
group members to see the existence of group-curated lists"""
return queryset.exclude(
~Q( # user is not a group member
Q(user__followers=viewer) | Q(user=viewer) | Q(memberships__user=viewer)
),
privacy="followers", # and the status of the group is followers only
)
@classmethod
def direct_filter(cls, queryset, viewer):
"""Override filter for "direct" privacy level to allow group members
to see the existence of groups and group lists"""
return queryset.exclude(~Q(memberships__user=viewer), privacy="direct")
class GroupMember(models.Model):
"""Users who are members of a group"""
created_date = models.DateTimeField(auto_now_add=True)
updated_date = models.DateTimeField(auto_now=True)
group = models.ForeignKey(
"Group", on_delete=models.CASCADE, related_name="memberships"
)
user = models.ForeignKey(
"User", on_delete=models.CASCADE, related_name="memberships"
)
class Meta:
"""Users can only have one membership per group"""
constraints = [
models.UniqueConstraint(fields=["group", "user"], name="unique_membership")
]
def save(self, *args, **kwargs):
"""don't let a user invite someone who blocked them"""
# blocking in either direction is a no-go
if UserBlocks.objects.filter(
Q(
user_subject=self.group.user,
user_object=self.user,
)
| Q(
user_subject=self.user,
user_object=self.group.user,
)
).exists():
raise IntegrityError()
# accepts and requests are handled by the GroupMemberInvitation model
super().save(*args, **kwargs)
@classmethod
def from_request(cls, join_request):
"""converts a join request into a member relationship"""
# remove the invite
join_request.delete()
# make a group member
return cls.objects.create(
user=join_request.user,
group=join_request.group,
)
@classmethod
def remove(cls, owner, user):
"""remove a user from a group"""
memberships = cls.objects.filter(group__user=owner, user=user).all()
for member in memberships:
member.delete()
class GroupMemberInvitation(models.Model):
"""adding a user to a group requires manual confirmation"""
created_date = models.DateTimeField(auto_now_add=True)
group = models.ForeignKey(
"Group", on_delete=models.CASCADE, related_name="user_invitations"
)
user = models.ForeignKey(
"User", on_delete=models.CASCADE, related_name="group_invitations"
)
class Meta:
"""Users can only have one outstanding invitation per group"""
constraints = [
models.UniqueConstraint(fields=["group", "user"], name="unique_invitation")
]
def save(self, *args, **kwargs):
"""make sure the membership doesn't already exist"""
# if there's an invitation for a membership that already exists, accept it
# without changing the local database state
if GroupMember.objects.filter(user=self.user, group=self.group).exists():
self.accept()
return
# blocking in either direction is a no-go
if UserBlocks.objects.filter(
Q(
user_subject=self.group.user,
user_object=self.user,
)
| Q(
user_subject=self.user,
user_object=self.group.user,
)
).exists():
raise IntegrityError()
# make an invitation
super().save(*args, **kwargs)
@transaction.atomic
def accept(self):
"""turn this request into the real deal"""
GroupMember.from_request(self)
model = apps.get_model("bookwyrm.Notification", require_ready=True)
# tell the group owner
model.notify(
self.group.user,
self.user,
related_group=self.group,
notification_type=model.ACCEPT,
)
# let the other members know about it
for membership in self.group.memberships.all():
member = membership.user
if member not in (self.user, self.group.user):
model.notify(
member,
self.user,
related_group=self.group,
notification_type=model.JOIN,
)
def reject(self):
"""generate a Reject for this membership request"""
self.delete()
| null |
2,006 |
from abc import abstractmethod
from typing import List, Iterator, Union
from docutils import nodes
from docutils.statemachine import ViewList, string2lines
from docutils.parsers.rst import Directive, directives
from conversion import transpile_py_to_r
def METHOD_NAME(app):
app.add_directive('pharmpy-execute', PharmpyExecute)
app.add_directive('pharmpy-code', PharmpyCode)
return {
'version': '0.1',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
def csv_option(s):
return [p.strip() for p in s.split(",")] if s else []
class RecursiveDirective(Directive):
def _convert_lines_to_nodes(self, lines: List[str]) -> List[nodes.Node]:
"""Turn an RST string into a node that can be used in the document.
See https://github.com/sphinx-doc/sphinx/issues/8039
"""
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(
ViewList(
string2lines('\n'.join(lines)),
source='[SnippetDirective]',
),
self.content_offset,
node,
)
return node.children
class PharmpyAbstractCodeDirective(RecursiveDirective):
option_spec = {
'linenos': directives.flag,
'lineno-start': directives.nonnegative_int,
'emphasize-lines': directives.unchanged_required,
}
def run(self):
return self._nodes()
def _nodes(self):
lines = self._lines()
return self._convert_lines_to_nodes(lines)
@abstractmethod
def _lines(self) -> List[str]:
"""Return lines for this directive"""
def _input(self):
return [
'.. tabs::',
*_indent(3, [
'',
'.. code-tab:: py',
*_indent(3, self._code_option_lines()),
'',
*_indent(3, self.content),
'',
'.. code-tab:: r R',
*_indent(3, self._code_option_lines()),
'',
*_indent(3, transpile_py_to_r(self.content)),
]),
]
def _code_option_lines(self):
if 'emphasize-lines' in self.options:
yield f':emphasize-lines:{self.options.get("emphasize-lines")}'
if 'linenos' in self.options:
yield ':linenos:'
if 'lineno-start' in self.options:
yield f':lineno-start:{self.options.get("lineno-start")}'
class PharmpyExecute(PharmpyAbstractCodeDirective):
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
has_content = True
option_spec = {
**PharmpyAbstractCodeDirective.option_spec,
'hide-code': directives.flag,
'hide-output': directives.flag,
'code-below': directives.flag,
'raises': csv_option,
'stderr': directives.flag,
}
def _lines(self) -> List[str]:
return [
f'.. container:: pharmpy-snippet{"" if "hide-output" in self.options else " with-output"}',
'',
*_indent(3, self._input_output_lines())
]
def _input_output_lines(self):
# NOTE self._output should always be returned here, even when
# `hide-output` is set, otherwise the code will not be executed.
if 'hide-code' in self.options:
return self._output()
if 'code-below' in self.options:
return [
*self._output(),
'',
*self._input(),
]
return [
*self._input(),
'',
*self._output(),
]
def _output(self):
return [
'.. jupyter-execute::',
*_indent(3, [
*self._jupyter_option_lines(),
'',
*self.content
]),
]
def _jupyter_option_lines(self):
yield ':hide-code:'
if 'hide-output' in self.options:
yield ':hide-output:'
if 'raise' in self.options:
yield f':raises:{",".join(self.options.get("raises"))}'
if 'stderr' in self.options:
yield ':stderr:'
class PharmpyCode(PharmpyAbstractCodeDirective):
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
has_content = True
option_spec = PharmpyAbstractCodeDirective.option_spec
def _lines(self) -> List[str]:
return [
'.. container:: pharmpy-snippet',
'',
*_indent(3, self._input())
]
def _indent(n: int, lines: Union[List[str],Iterator[str]]):
return map(lambda line: (' '*n + line) if line else line, lines)
| null |
2,007 |
import os
import pytest
from puppetboard import docker_settings
from importlib import reload as reload
@pytest.fixture(scope='function')
def cleanup_env(request):
for env_var in dir(docker_settings):
if (env_var.startswith('__') or env_var.startswith('_') or
env_var.islower()):
continue
if env_var in os.environ:
del os.environ[env_var]
reload(docker_settings)
return
def test_default_host_port(cleanup_env):
assert docker_settings.PUPPETDB_HOST == 'puppetdb'
assert docker_settings.PUPPETDB_PORT == 8080
def test_set_host_port(cleanup_env):
os.environ['PUPPETDB_HOST'] = 'puppetdb2'
os.environ['PUPPETDB_PORT'] = '9081'
reload(docker_settings)
assert docker_settings.PUPPETDB_HOST == 'puppetdb2'
assert docker_settings.PUPPETDB_PORT == 9081
def test_set_proto(cleanup_env):
os.environ['PUPPETDB_PROTO'] = 'https'
reload(docker_settings)
assert docker_settings.PUPPETDB_PROTO == 'https'
def test_cert_true_test(cleanup_env):
os.environ['PUPPETDB_SSL_VERIFY'] = 'True'
reload(docker_settings)
assert docker_settings.PUPPETDB_SSL_VERIFY is True
os.environ['PUPPETDB_SSL_VERIFY'] = 'true'
reload(docker_settings)
assert docker_settings.PUPPETDB_SSL_VERIFY is True
def test_cert_false_test(cleanup_env):
os.environ['PUPPETDB_SSL_VERIFY'] = 'False'
reload(docker_settings)
assert docker_settings.PUPPETDB_SSL_VERIFY is False
os.environ['PUPPETDB_SSL_VERIFY'] = 'false'
reload(docker_settings)
assert docker_settings.PUPPETDB_SSL_VERIFY is False
def test_cert_path(cleanup_env):
ca_file = '/usr/ssl/path/ca.pem'
os.environ['PUPPETDB_SSL_VERIFY'] = ca_file
reload(docker_settings)
assert docker_settings.PUPPETDB_SSL_VERIFY == ca_file
def METHOD_NAME(cleanup_env):
import tempfile
cert_string = '-----BEGIN CERTIFICATE-----\nMIIFkjCCA3qgAwf'
os.environ['PUPPETDB_KEY'] = cert_string
reload(docker_settings)
assert docker_settings.PUPPETDB_KEY.startswith(tempfile.gettempdir())
with open(docker_settings.PUPPETDB_KEY) as test_cert_file:
assert test_cert_file.read() == '-----BEGIN CERTIFICATE-----\nMIIFkjCCA3qgAwf'
# Clean up the generated file
os.unlink(docker_settings.PUPPETDB_KEY)
def test_cert_to_file_base64(cleanup_env):
import tempfile
cert_string = 'LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUZrakNDQTNxZ0F3SUI='
os.environ['PUPPETDB_KEY'] = cert_string
reload(docker_settings)
assert docker_settings.PUPPETDB_KEY.startswith(tempfile.gettempdir())
with open(docker_settings.PUPPETDB_KEY) as test_cert_file:
assert test_cert_file.read() == '-----BEGIN CERTIFICATE-----\nMIIFkjCCA3qgAwIB'
# Clean up the generated file
os.unlink(docker_settings.PUPPETDB_KEY)
def validate_facts(facts):
assert isinstance(facts, list)
assert len(facts) > 0
for map in facts:
assert isinstance(map, tuple)
assert len(map) == 2
def test_inventory_facts_default(cleanup_env):
validate_facts(docker_settings.INVENTORY_FACTS)
def test_invtory_facts_custom(cleanup_env):
os.environ['INVENTORY_FACTS'] = "A, B, C, D"
reload(docker_settings)
validate_facts(docker_settings.INVENTORY_FACTS)
def test_inventory_fact_tempaltes_default(cleanup_env):
assert isinstance(docker_settings.INVENTORY_FACT_TEMPLATES, dict)
assert len(docker_settings.INVENTORY_FACT_TEMPLATES) == 3
def test_inventory_fact_tempaltes_custom(cleanup_env):
os.environ['INVENTORY_FACT_TEMPLATES'] = """{"os": "{{ fact_os_detection(value) }}"}"""
reload(docker_settings)
assert isinstance(docker_settings.INVENTORY_FACT_TEMPLATES, dict)
assert len(docker_settings.INVENTORY_FACT_TEMPLATES) == 1
def test_graph_facts_defautl(cleanup_env):
facts = docker_settings.GRAPH_FACTS
assert isinstance(facts, list)
assert 'puppetversion' in facts
def test_graph_facts_custom(cleanup_env):
os.environ['GRAPH_FACTS'] = "architecture, puppetversion, extra"
reload(docker_settings)
facts = docker_settings.GRAPH_FACTS
assert isinstance(facts, list)
assert len(facts) == 3
assert 'puppetversion' in facts
assert 'architecture' in facts
assert 'extra' in facts
def test_default_table_selctor(cleanup_env):
assert [10, 20, 50, 100, 500] == docker_settings.TABLE_COUNT_SELECTOR
def test_env_table_selector(cleanup_env):
os.environ['TABLE_COUNT_SELECTOR'] = '5,15,25'
reload(docker_settings)
assert [5, 15, 25] == docker_settings.TABLE_COUNT_SELECTOR
def test_env_column_options(cleanup_env):
os.environ['DISPLAYED_METRICS'] = 'resources.total, events.failure'
reload(docker_settings)
assert ['resources.total',
'events.failure'] == docker_settings.DISPLAYED_METRICS
def test_enable_class_default(cleanup_env):
assert False == docker_settings.ENABLE_CLASS
def test_enable_class_true(cleanup_env):
os.environ['ENABLE_CLASS'] = 'True'
reload(docker_settings)
assert docker_settings.ENABLE_CLASS is True
os.environ['ENABLE_CLASS'] = 'true'
reload(docker_settings)
assert docker_settings.ENABLE_CLASS is True
def test_enable_class_false(cleanup_env):
os.environ['ENABLE_CLASS'] = 'False'
reload(docker_settings)
assert docker_settings.ENABLE_CLASS is False
os.environ['ENABLE_CLASS'] = 'false'
reload(docker_settings)
assert docker_settings.ENABLE_CLASS is False
def test_cache_timeout_default(cleanup_env):
assert 3600 == docker_settings.CACHE_DEFAULT_TIMEOUT
def test_cache_type_default(cleanup_env):
assert 'SimpleCache' == docker_settings.CACHE_TYPE
def test_cache_memcached_servers(cleanup_env):
os.environ['CACHE_TYPE'] = 'MemcachedCache'
reload(docker_settings)
assert ['memcached:11211'] == docker_settings.CACHE_MEMCACHED_SERVERS
def test_class_events_status_columns_default(cleanup_env):
assert [('failure', 'Failure'),
('success', 'Success'),
('noop', 'Noop')] == docker_settings.CLASS_EVENTS_STATUS_COLUMNS
def test_scheduler_enabled_true(cleanup_env):
os.environ['SCHEDULER_ENABLED'] = 'True'
reload(docker_settings)
assert docker_settings.SCHEDULER_ENABLED is True
os.environ['SCHEDULER_ENABLED'] = 'true'
reload(docker_settings)
assert docker_settings.SCHEDULER_ENABLED is True
def test_scheduler_enabled_false(cleanup_env):
os.environ['SCHEDULER_ENABLED'] = 'False'
reload(docker_settings)
assert docker_settings.SCHEDULER_ENABLED is False
os.environ['SCHEDULER_ENABLED'] = 'false'
reload(docker_settings)
assert docker_settings.SCHEDULER_ENABLED is False
def test_scheduler_jobs_default(cleanup_env):
assert [{'func': 'puppetboard.schedulers.classes:build_async_cache',
'id': 'do_build_async_cache_1',
'seconds': 300,
'trigger': 'interval'}] == docker_settings.SCHEDULER_JOBS
def test_scheduler_jobs_custom(cleanup_env):
os.environ['SCHEDULER_JOBS'] = "id,do_build_async_cache_1,func,puppetboard.schedulers.classes:build_async_cache,trigger,interval,seconds,600"
reload(docker_settings)
jobs = docker_settings.SCHEDULER_JOBS
assert isinstance(jobs, list)
assert len(jobs) == 1
for job in jobs:
assert isinstance(job, dict)
assert len(job) == 4
assert 'id' in job
assert 'func' in job
assert 'trigger' in job
assert 'seconds' in job
assert 600 == job['seconds']
| null |
2,008 |
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, color (GUICG13)
import GemRB
import CharOverview
import CommonTables
import GUICommon
import IDLUCommon
import Portrait
from GUIDefines import *
from ie_stats import IE_SEX, IE_CLASS
ColorTable = 0
HairTable = 0
SkinTable = 0
ColorWindow = 0
ColorPicker = 0
DoneButton = 0
ColorIndex = 0
PickedColor = 0
HairButton = 0
SkinButton = 0
MajorButton = 0
MinorButton = 0
Color1 = 0
Color2 = 0
Color3 = 0
Color4 = 0
PDollButton = 0
PDollResRef = 0
def RefreshPDoll():
global ColorWindow, PDollButton
global Color1, Color2, Color3, Color4, PDollResRef
PDollButton.SetFlags(IE_GUI_BUTTON_PLAYALWAYS|IE_GUI_BUTTON_CENTER_PICTURES, OP_OR)
PDollButton.SetBAM("", 0, 0, 0) # just hide or there is a tiny artifact
PDollButton.SetAnimation (None) # force reset
PDollButton.SetAnimation (PDollResRef, 1, 8, [0, Color4, Color3, Color2, 0, 0, Color1, 0])
return
def OnLoad():
global ColorWindow, DoneButton, PDollButton
global HairTable, SkinTable, ColorTable
global HairButton, SkinButton, MajorButton, MinorButton
global Color1, Color2, Color3, Color4, PDollResRef
ColorWindow=GemRB.LoadWindow(13, "GUICG")
CharOverview.PositionCharGenWin(ColorWindow)
pc = GemRB.GetVar ("Slot")
Race = IDLUCommon.GetRace (pc)
RaceName = CommonTables.Races.GetRowName (Race)
HairTable = GemRB.LoadTable(CommonTables.Races.GetValue(RaceName, "HAIR"))
SkinTable = GemRB.LoadTable(CommonTables.Races.GetValue(RaceName, "SKIN"))
ColorTable = GemRB.LoadTable("clowncol")
#set these colors to some default
Gender = GemRB.GetPlayerStat (pc, IE_SEX)
Portrait.Init (Gender)
Portrait.Set (GemRB.GetPlayerPortrait (pc)["ResRef"])
PortraitName = Portrait.Name () # strips the last char like the table needs
PortraitTable = GemRB.LoadTable("pictures")
Color1 = PortraitTable.GetValue(PortraitName, "HAIR", GTV_INT)
Color2 = PortraitTable.GetValue(PortraitName, "SKIN", GTV_INT)
Color3 = PortraitTable.GetValue(PortraitName, "MAJOR", GTV_INT)
Color4 = PortraitTable.GetValue(PortraitName, "MINOR", GTV_INT)
PDollButton = ColorWindow.GetControl(1)
PDollButton.SetFlags(IE_GUI_BUTTON_PICTURE,OP_OR)
PDollButton.SetState(IE_GUI_BUTTON_LOCKED)
HairButton = ColorWindow.GetControl(2)
HairButton.SetFlags(IE_GUI_BUTTON_PICTURE,OP_OR)
HairButton.OnPress (HairPress)
HairButton.SetBAM("COLGRAD", 1, 0, Color1)
SkinButton = ColorWindow.GetControl(3)
SkinButton.SetFlags(IE_GUI_BUTTON_PICTURE,OP_OR)
SkinButton.OnPress (SkinPress)
SkinButton.SetBAM("COLGRAD", 1, 0, Color2)
MajorButton = ColorWindow.GetControl(5)
MajorButton.SetFlags(IE_GUI_BUTTON_PICTURE,OP_OR)
MajorButton.OnPress (MajorPress)
MajorButton.SetBAM("COLGRAD", 1, 0, Color3)
MinorButton = ColorWindow.GetControl(4)
MinorButton.SetFlags(IE_GUI_BUTTON_PICTURE,OP_OR)
MinorButton.OnPress (MinorPress)
MinorButton.SetBAM("COLGRAD", 1, 0, Color4)
BackButton = ColorWindow.GetControl(13)
BackButton.SetText(15416)
DoneButton = ColorWindow.GetControl(0)
DoneButton.SetText(11973)
DoneButton.MakeDefault()
DoneButton.OnPress (NextPress)
BackButton.OnPress (BackPress)
# calculate the paperdoll animation id from the race, class and gender
PDollTable = GemRB.LoadTable ("avatars")
table = GemRB.LoadTable ("avprefr")
RaceID = CommonTables.Races.GetValue (RaceName, "ID", GTV_INT)
# look up base race if needed
if RaceID > 1000:
RaceID = RaceID >> 16
Race = CommonTables.Races.FindValue ("ID", RaceID)
RaceName = CommonTables.Races.GetRowName (Race)
AnimID = 0x6000 + table.GetValue (RaceName, "RACE")
table = GemRB.LoadTable ("avprefc")
Class = GemRB.GetPlayerStat (pc, IE_CLASS)
ClassName = GUICommon.GetClassRowName (Class - 1, "index")
AnimID = AnimID + table.GetValue (ClassName, "PREFIX")
table = GemRB.LoadTable ("avprefg")
AnimID = AnimID + table.GetValue (Gender, 0)
PDollResRef = PDollTable.GetValue (hex(AnimID), "AT_1", GTV_STR)
if PDollResRef == "*":
print("ERROR, couldn't find the paperdoll! AnimID is", hex(AnimID))
print("Falling back to an elven paperdoll.")
PDollResRef = "CEMB1"
PDollResRef += "G11"
RefreshPDoll()
ColorWindow.Focus()
return
def RandomDonePress():
#should be better
GemRB.SetVar("Selected", GemRB.Roll(1,5,0) )
DonePress()
def DonePress():
global Color1, Color2, Color3, Color4, ColorWindow, ColorIndex, PickedColor, ColorPicker
if ColorPicker:
ColorPicker.Close ()
ColorWindow.Focus()
if ColorIndex==0:
PickedColor=HairTable.GetValue(GemRB.GetVar("Selected"),0)
Color1=PickedColor
HairButton.SetBAM("COLGRAD", 1, 0, Color1)
RefreshPDoll()
return
if ColorIndex==1:
PickedColor=SkinTable.GetValue(GemRB.GetVar("Selected"),0)
Color2=PickedColor
SkinButton.SetBAM("COLGRAD", 1, 0, Color2)
RefreshPDoll()
return
if ColorIndex==2:
PickedColor=ColorTable.GetValue(0, GemRB.GetVar("Selected"))
Color3=PickedColor
MajorButton.SetBAM("COLGRAD", 1, 0, Color3)
RefreshPDoll()
return
PickedColor=ColorTable.GetValue(1, GemRB.GetVar("Selected"))
Color4=PickedColor
MinorButton.SetBAM("COLGRAD", 1, 0, Color4)
RefreshPDoll()
return
def CancelPress():
global ColorPicker, ColorWindow
if ColorPicker:
ColorPicker.Close ()
ColorWindow.Focus()
def METHOD_NAME():
global ColorPicker, ColorIndex, PickedColor
ColorPicker=GemRB.LoadWindow(14)
GemRB.SetVar("Selected",-1)
for i in range(33):
Button = ColorPicker.GetControl(i)
Button.SetState(IE_GUI_BUTTON_DISABLED)
Button.SetFlags(IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
m = 33
if ColorIndex==0:
m=HairTable.GetRowCount()
t=HairTable
if ColorIndex==1:
m=SkinTable.GetRowCount()
t=SkinTable
for i in range(m):
if ColorIndex<2:
MyColor=t.GetValue(i,0)
else:
MyColor=ColorTable.GetValue(ColorIndex-2, i)
if MyColor == "*":
break
Button = ColorPicker.GetControl(i)
Button.SetBAM("COLGRAD", 2, 0, MyColor)
if PickedColor == MyColor:
GemRB.SetVar("Selected",i)
Button.SetState(IE_GUI_BUTTON_ENABLED)
Button.SetVarAssoc("Selected",i)
Button.OnPress (DonePress)
Button = ColorPicker.GetControl(33)
#default button
Button.SetVarAssoc("Selected", 0)
Button.OnPress (RandomDonePress)
Button.SetText("RND")
CancelButton = ColorPicker.GetControl(35)
CancelButton.SetText(13727)
CancelButton.OnPress (CancelPress)
CancelButton.MakeEscape()
ColorPicker.Focus()
return
def HairPress():
global ColorIndex, PickedColor
ColorIndex = 0
PickedColor = Color1
METHOD_NAME()
return
def SkinPress():
global ColorIndex, PickedColor
ColorIndex = 1
PickedColor = Color2
METHOD_NAME()
return
def MajorPress():
global ColorIndex, PickedColor
ColorIndex = 2
PickedColor = Color3
METHOD_NAME()
return
def MinorPress():
global ColorIndex, PickedColor
ColorIndex = 3
PickedColor = Color4
METHOD_NAME()
return
def BackPress():
if ColorWindow:
ColorWindow.Close ()
GemRB.SetNextScript("CharGen7")
return
def NextPress():
if ColorWindow:
ColorWindow.Close ()
GemRB.SetVar("Color1",Color1)
GemRB.SetVar("Color2",Color2)
GemRB.SetVar("Color3",Color3)
GemRB.SetVar("Color4",Color4)
GemRB.SetNextScript("CSound") #character sound
return
| null |
2,009 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdyplsapi.endpoint import endpoint_data
class BindAxnRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dyplsapi', '2017-05-25', 'BindAxn')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_CallDisplayType(self): # Integer
return self.get_query_params().get('CallDisplayType')
def set_CallDisplayType(self, CallDisplayType): # Integer
self.add_query_param('CallDisplayType', CallDisplayType)
def get_CallTimeout(self): # Integer
return self.get_query_params().get('CallTimeout')
def set_CallTimeout(self, CallTimeout): # Integer
self.add_query_param('CallTimeout', CallTimeout)
def get_PhoneNoX(self): # String
return self.get_query_params().get('PhoneNoX')
def set_PhoneNoX(self, PhoneNoX): # String
self.add_query_param('PhoneNoX', PhoneNoX)
def get_RingConfig(self): # String
return self.get_query_params().get('RingConfig')
def set_RingConfig(self, RingConfig): # String
self.add_query_param('RingConfig', RingConfig)
def get_ASRStatus(self): # Boolean
return self.get_query_params().get('ASRStatus')
def set_ASRStatus(self, ASRStatus): # Boolean
self.add_query_param('ASRStatus', ASRStatus)
def get_PhoneNoB(self): # String
return self.get_query_params().get('PhoneNoB')
def set_PhoneNoB(self, PhoneNoB): # String
self.add_query_param('PhoneNoB', PhoneNoB)
def get_PhoneNoA(self): # String
return self.get_query_params().get('PhoneNoA')
def set_PhoneNoA(self, PhoneNoA): # String
self.add_query_param('PhoneNoA', PhoneNoA)
def get_ExpectCity(self): # String
return self.get_query_params().get('ExpectCity')
def set_ExpectCity(self, ExpectCity): # String
self.add_query_param('ExpectCity', ExpectCity)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_OutOrderId(self): # String
return self.get_query_params().get('OutOrderId')
def set_OutOrderId(self, OutOrderId): # String
self.add_query_param('OutOrderId', OutOrderId)
def get_PoolKey(self): # String
return self.get_query_params().get('PoolKey')
def set_PoolKey(self, PoolKey): # String
self.add_query_param('PoolKey', PoolKey)
def get_Expiration(self): # String
return self.get_query_params().get('Expiration')
def set_Expiration(self, Expiration): # String
self.add_query_param('Expiration', Expiration)
def get_IsRecordingEnabled(self): # Boolean
return self.get_query_params().get('IsRecordingEnabled')
def set_IsRecordingEnabled(self, IsRecordingEnabled): # Boolean
self.add_query_param('IsRecordingEnabled', IsRecordingEnabled)
def get_OutId(self): # String
return self.get_query_params().get('OutId')
def METHOD_NAME(self, OutId): # String
self.add_query_param('OutId', OutId)
def get_NoType(self): # String
return self.get_query_params().get('NoType')
def set_NoType(self, NoType): # String
self.add_query_param('NoType', NoType)
def get_ASRModelId(self): # String
return self.get_query_params().get('ASRModelId')
def set_ASRModelId(self, ASRModelId): # String
self.add_query_param('ASRModelId', ASRModelId)
def get_CallRestrict(self): # String
return self.get_query_params().get('CallRestrict')
def set_CallRestrict(self, CallRestrict): # String
self.add_query_param('CallRestrict', CallRestrict)
| null |
2,010 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class AddTrafficMatchRuleToTrafficMarkingPolicyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'AddTrafficMatchRuleToTrafficMarkingPolicy')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_TrafficMarkingPolicyId(self): # String
return self.get_query_params().get('TrafficMarkingPolicyId')
def set_TrafficMarkingPolicyId(self, TrafficMarkingPolicyId): # String
self.add_query_param('TrafficMarkingPolicyId', TrafficMarkingPolicyId)
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_TrafficMatchRuless(self): # RepeatList
return self.get_query_params().get('TrafficMatchRules')
def set_TrafficMatchRuless(self, TrafficMatchRules): # RepeatList
for depth1 in range(len(TrafficMatchRules)):
if TrafficMatchRules[depth1].get('DstPortRange') is not None:
for depth2 in range(len(TrafficMatchRules[depth1].get('DstPortRange'))):
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.DstPortRange.' + str(depth2 + 1), TrafficMatchRules[depth1].get('DstPortRange')[depth2])
if TrafficMatchRules[depth1].get('MatchDscp') is not None:
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.MatchDscp', TrafficMatchRules[depth1].get('MatchDscp'))
if TrafficMatchRules[depth1].get('Protocol') is not None:
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.Protocol', TrafficMatchRules[depth1].get('Protocol'))
if TrafficMatchRules[depth1].get('TrafficMatchRuleDescription') is not None:
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.TrafficMatchRuleDescription', TrafficMatchRules[depth1].get('TrafficMatchRuleDescription'))
if TrafficMatchRules[depth1].get('SrcPortRange') is not None:
for depth2 in range(len(TrafficMatchRules[depth1].get('SrcPortRange'))):
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.SrcPortRange.' + str(depth2 + 1), TrafficMatchRules[depth1].get('SrcPortRange')[depth2])
if TrafficMatchRules[depth1].get('DstCidr') is not None:
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.DstCidr', TrafficMatchRules[depth1].get('DstCidr'))
if TrafficMatchRules[depth1].get('TrafficMatchRuleName') is not None:
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.TrafficMatchRuleName', TrafficMatchRules[depth1].get('TrafficMatchRuleName'))
if TrafficMatchRules[depth1].get('SrcCidr') is not None:
self.add_query_param('TrafficMatchRules.' + str(depth1 + 1) + '.SrcCidr', TrafficMatchRules[depth1].get('SrcCidr'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def METHOD_NAME(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
| null |
2,011 |
"""This file contains functions for loading data from disk
and performing some checks on them.
"""
from os import PathLike
from pathlib import Path
from typing import Dict, Tuple
import numpy as np
import pandas as pd
from nilearn.surface import Mesh
TSV_FIRST_COLUMN = "participant_id"
TSV_SECOND_COLUMN = "session_id"
def _read_and_check_tsv_file(tsv_file: PathLike) -> pd.DataFrame:
"""This function reads the TSV file provided and performs some basic checks.
Parameters
----------
tsv_file : PathLike
Path to the TSV file to open.
Returns
-------
tsv_data : pd.DataFrame
DataFrame obtained from the file.
"""
try:
return pd.read_csv(tsv_file, sep="\t").set_index(
[TSV_FIRST_COLUMN, TSV_SECOND_COLUMN]
)
except FileNotFoundError:
raise FileNotFoundError(f"File {tsv_file} does not exist.")
except KeyError:
raise ValueError(
f"The TSV data should have at least two columns: {TSV_FIRST_COLUMN} and {TSV_SECOND_COLUMN}"
)
def _get_t1_freesurfer_custom_file_template(base_dir: PathLike) -> str:
"""Returns a Template for the path to the desired surface file.
Parameters
----------
base_dir : PathLike
Base directory to search for the template.
Returns
-------
template_path : str
Path to the t1 freesurfer template.
"""
return str(base_dir) + (
"/%(subject)s/%(session)s/t1/freesurfer_cross_sectional/%(subject)s_%(session)s"
"/surf/%(hemi)s.thickness.fwhm%(fwhm)s.fsaverage.mgh"
)
def METHOD_NAME(
input_dir: PathLike,
surface_file: str,
df: pd.DataFrame,
fwhm: float,
) -> np.ndarray:
"""This function builds the cortical thickness array.
Parameters
----------
input_dir : PathLike
Input directory.
surface_file : str
Template for the path to the surface file of interest.
df : pd.DataFrame
Subjects DataFrame.
fwhm : float
Smoothing parameter only used to retrieve the right surface file.
Returns
-------
thickness : np.ndarray
Cortical thickness. Hemispheres and subjects are stacked.
"""
from nibabel.freesurfer.mghformat import load
thickness = []
for idx, row in df.iterrows():
subject = row[TSV_FIRST_COLUMN]
session = row[TSV_SECOND_COLUMN]
parts = []
for hemi in ["lh", "rh"]:
query = {"subject": subject, "session": session, "fwhm": fwhm, "hemi": hemi}
parts.append(
load(str(Path(input_dir) / Path(surface_file % query))).get_fdata()
)
combined = np.vstack(parts)
thickness.append(combined.flatten())
thickness = np.vstack(thickness)
if thickness.shape[0] != len(df):
raise ValueError(
f"Unexpected shape for thickness array : {thickness.shape}. "
f"Expected {len(df)} rows."
)
return thickness
def _get_average_surface(fsaverage_path: PathLike) -> Tuple[Dict, Mesh]:
"""This function extracts the average surface and the average mesh
from the path to the fsaverage templates.
.. note::
Note that the average surface is returned as a dictionary
with 'coord' and 'tri' as keys, while the average mesh is
returned as a Nilearn Mesh object (basically a NamedTuple
with 'coordinates' and 'faces' attributes). The surface
isn't returned as a Nilearn Surface object for compatibility
with BrainStats.
.. warning::
There is an issue with faces having a value of 0 as index.
This is most likely a bug in BrainStat as MATLAB indexing
starts at 1 while Python starts at zero.
Parameters
----------
fsaverage_path : PathLike
Path to the fsaverage templates.
Returns
-------
average_surface : dict
Average surface as a dictionary for BrainStat compatibility.
average_mesh : nilearn.surface.Mesh
Average mesh as a Nilearn Mesh object.
"""
import copy
from nilearn.surface import Mesh, load_surf_mesh
meshes = [
load_surf_mesh(str(fsaverage_path / Path(f"{hemi}.pial")))
for hemi in ["lh", "rh"]
]
coordinates = np.vstack([mesh.coordinates for mesh in meshes])
faces = np.vstack(
[meshes[0].faces, meshes[1].faces + meshes[0].coordinates.shape[0]]
)
average_mesh = Mesh(
coordinates=coordinates,
faces=copy.deepcopy(faces),
)
##################
# UGLY HACK !!! Need investigation
##################
# Uncomment the following line if getting an error
# with negative values in bincount in Brainstat.
# Not sure, but might be a bug in BrainStat...
#
faces += 1
#################
average_surface = {
"coord": coordinates,
"tri": faces,
}
return average_surface, average_mesh
| null |
2,012 |
import ctypes
import io
import struct
import pytest
import env
from pybind11_tests import ConstructorStats
from pybind11_tests import buffers as m
np = pytest.importorskip("numpy")
if m.long_double_and_double_have_same_size:
# Determined by the compiler used to build the pybind11 tests
# (e.g. MSVC gets here, but MinGW might not).
np_float128 = None
np_complex256 = None
else:
# Determined by the compiler used to build numpy (e.g. MinGW).
np_float128 = getattr(np, *["float128"] * 2)
np_complex256 = getattr(np, *["complex256"] * 2)
CPP_NAME_FORMAT_NP_DTYPE_TABLE = [
("PyObject *", "O", object),
("bool", "?", np.bool_),
("std::int8_t", "b", np.int8),
("std::uint8_t", "B", np.uint8),
("std::int16_t", "h", np.int16),
("std::uint16_t", "H", np.uint16),
("std::int32_t", "i", np.int32),
("std::uint32_t", "I", np.uint32),
("std::int64_t", "q", np.int64),
("std::uint64_t", "Q", np.uint64),
("float", "f", np.float32),
("double", "d", np.float64),
("long double", "g", np_float128),
("std::complex<float>", "Zf", np.complex64),
("std::complex<double>", "Zd", np.complex128),
("std::complex<long double>", "Zg", np_complex256),
]
CPP_NAME_FORMAT_TABLE = [
(cpp_name, format)
for cpp_name, format, np_dtype in CPP_NAME_FORMAT_NP_DTYPE_TABLE
if np_dtype is not None
]
CPP_NAME_NP_DTYPE_TABLE = [
(cpp_name, np_dtype) for cpp_name, _, np_dtype in CPP_NAME_FORMAT_NP_DTYPE_TABLE
]
@pytest.mark.parametrize(("cpp_name", "np_dtype"), CPP_NAME_NP_DTYPE_TABLE)
def test_format_descriptor_format_buffer_info_equiv(cpp_name, np_dtype):
if np_dtype is None:
pytest.skip(
f"cpp_name=`{cpp_name}`: `long double` and `double` have same size."
)
if isinstance(np_dtype, str):
pytest.skip(f"np.{np_dtype} does not exist.")
np_array = np.array([], dtype=np_dtype)
for other_cpp_name, expected_format in CPP_NAME_FORMAT_TABLE:
format, np_array_is_matching = m.format_descriptor_format_buffer_info_equiv(
other_cpp_name, np_array
)
assert format == expected_format
if other_cpp_name == cpp_name:
assert np_array_is_matching
else:
assert not np_array_is_matching
def METHOD_NAME():
with pytest.raises(RuntimeError) as excinfo:
m.Matrix(np.array([1, 2, 3])) # trying to assign a 1D array
assert str(excinfo.value) == "Incompatible buffer format!"
m3 = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
m4 = m.Matrix(m3)
for i in range(m4.rows()):
for j in range(m4.cols()):
assert m3[i, j] == m4[i, j]
cstats = ConstructorStats.get(m.Matrix)
assert cstats.alive() == 1
del m3, m4
assert cstats.alive() == 0
assert cstats.values() == ["2x3 matrix"]
assert cstats.copy_constructions == 0
# assert cstats.move_constructions >= 0 # Don't invoke any
assert cstats.copy_assignments == 0
assert cstats.move_assignments == 0
# https://foss.heptapod.net/pypy/pypy/-/issues/2444
# TODO: fix on recent PyPy
@pytest.mark.xfail(
env.PYPY, reason="PyPy 7.3.7 doesn't clear this anymore", strict=False
)
def test_to_python():
mat = m.Matrix(5, 4)
assert memoryview(mat).shape == (5, 4)
assert mat[2, 3] == 0
mat[2, 3] = 4.0
mat[3, 2] = 7.0
assert mat[2, 3] == 4
assert mat[3, 2] == 7
assert struct.unpack_from("f", mat, (3 * 4 + 2) * 4) == (7,)
assert struct.unpack_from("f", mat, (2 * 4 + 3) * 4) == (4,)
mat2 = np.array(mat, copy=False)
assert mat2.shape == (5, 4)
assert abs(mat2).sum() == 11
assert mat2[2, 3] == 4
assert mat2[3, 2] == 7
mat2[2, 3] = 5
assert mat2[2, 3] == 5
cstats = ConstructorStats.get(m.Matrix)
assert cstats.alive() == 1
del mat
pytest.gc_collect()
assert cstats.alive() == 1
del mat2 # holds a mat reference
pytest.gc_collect()
assert cstats.alive() == 0
assert cstats.values() == ["5x4 matrix"]
assert cstats.copy_constructions == 0
# assert cstats.move_constructions >= 0 # Don't invoke any
assert cstats.copy_assignments == 0
assert cstats.move_assignments == 0
def test_inherited_protocol():
"""SquareMatrix is derived from Matrix and inherits the buffer protocol"""
matrix = m.SquareMatrix(5)
assert memoryview(matrix).shape == (5, 5)
assert np.asarray(matrix).shape == (5, 5)
def test_pointer_to_member_fn():
for cls in [m.Buffer, m.ConstBuffer, m.DerivedBuffer]:
buf = cls()
buf.value = 0x12345678
value = struct.unpack("i", bytearray(buf))[0]
assert value == 0x12345678
def test_readonly_buffer():
buf = m.BufferReadOnly(0x64)
view = memoryview(buf)
assert view[0] == 0x64
assert view.readonly
with pytest.raises(TypeError):
view[0] = 0
def test_selective_readonly_buffer():
buf = m.BufferReadOnlySelect()
memoryview(buf)[0] = 0x64
assert buf.value == 0x64
io.BytesIO(b"A").readinto(buf)
assert buf.value == ord(b"A")
buf.readonly = True
with pytest.raises(TypeError):
memoryview(buf)[0] = 0
with pytest.raises(TypeError):
io.BytesIO(b"1").readinto(buf)
def test_ctypes_array_1d():
char1d = (ctypes.c_char * 10)()
int1d = (ctypes.c_int * 15)()
long1d = (ctypes.c_long * 7)()
for carray in (char1d, int1d, long1d):
info = m.get_buffer_info(carray)
assert info.itemsize == ctypes.sizeof(carray._type_)
assert info.size == len(carray)
assert info.ndim == 1
assert info.shape == [info.size]
assert info.strides == [info.itemsize]
assert not info.readonly
def test_ctypes_array_2d():
char2d = ((ctypes.c_char * 10) * 4)()
int2d = ((ctypes.c_int * 15) * 3)()
long2d = ((ctypes.c_long * 7) * 2)()
for carray in (char2d, int2d, long2d):
info = m.get_buffer_info(carray)
assert info.itemsize == ctypes.sizeof(carray[0]._type_)
assert info.size == len(carray) * len(carray[0])
assert info.ndim == 2
assert info.shape == [len(carray), len(carray[0])]
assert info.strides == [info.itemsize * len(carray[0]), info.itemsize]
assert not info.readonly
def test_ctypes_from_buffer():
test_pystr = b"0123456789"
for pyarray in (test_pystr, bytearray(test_pystr)):
pyinfo = m.get_buffer_info(pyarray)
if pyinfo.readonly:
cbytes = (ctypes.c_char * len(pyarray)).from_buffer_copy(pyarray)
cinfo = m.get_buffer_info(cbytes)
else:
cbytes = (ctypes.c_char * len(pyarray)).from_buffer(pyarray)
cinfo = m.get_buffer_info(cbytes)
assert cinfo.size == pyinfo.size
assert cinfo.ndim == pyinfo.ndim
assert cinfo.shape == pyinfo.shape
assert cinfo.strides == pyinfo.strides
assert not cinfo.readonly
| null |
2,013 |
# Copyright 2017,2018,2019,2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from collections import OrderedDict
import os
import time
import nnabla as nn
import nnabla.functions as F
import nnabla.logger as logger
import nnabla.parametric_functions as PF
import nnabla.solvers as S
import nnabla.utils.save as save
import numpy as np
from six.moves import range
def categorical_error(pred, label):
"""
Compute categorical error given score vectors and labels as
numpy.ndarray.
"""
pred_label = pred.argmax(1)
return (pred_label != label.flat).mean()
def mnist_lenet_prediction(image, scope="reference", test=False):
"""
Construct LeNet for MNIST.
"""
with nn.parameter_scope(scope):
image /= 255.0
c1 = PF.convolution(image, 16, (5, 5), name='conv1')
c1 = F.relu(F.max_pooling(c1, (2, 2)))
c2 = PF.convolution(c1, 16, (5, 5), name='conv2')
c2 = F.relu(F.max_pooling(c2, (2, 2)))
c3 = F.relu(PF.affine(c2, 50, name='fc3'))
c4 = PF.affine(c3, 10, name='fc4')
return c4
def mnist_lenet_prediction_slim(image, scope="slim", rrate=0.75, test=False):
"""
Construct LeNet for MNIST.
"""
with nn.parameter_scope(scope):
image /= 255.0
c1 = PF.convolution(image, 16, (5, 5), name='conv1')
c1 = F.relu(F.max_pooling(c1, (2, 2)))
c2 = PF.convolution(c1, 16, (5, 5), name='conv2')
c2 = F.relu(F.max_pooling(c2, (2, 2)))
# SVD applied
inmaps = np.prod(c2.shape[1:]) # c * h * w
outmaps0 = 50 # original outmaps
outmaps1 = METHOD_NAME(inmaps, outmaps0, rrate)
d0 = F.relu(PF.affine(c2, outmaps1, name='fc-d0'))
d1 = F.relu(PF.affine(d0, outmaps0, name='fc-d1'))
c4 = PF.affine(d1, 10, name='fc4')
return c4
def METHOD_NAME(inmaps, outmaps, rrate):
maps = int(rrate * inmaps * outmaps / (inmaps + outmaps))
logger.info("###################")
logger.info(
"Num.Parameters is reduced {}x{} -> {}x{} + {}x{} by {}".format(
inmaps, outmaps, inmaps, maps, maps, outmaps, rrate))
logger.info("###################")
time.sleep(1)
return maps
def decompose_network_and_set_params(model_load_path,
reference, slim, rrate=0.75):
# Parameters loaded globally, but call here for consistency
nn.load_parameters(model_load_path)
# Decompose
with nn.parameter_scope(reference):
trained_params = nn.get_parameters()
# original parameter
W = trained_params["fc3/affine/W"].d
# original maps
inmaps = W.shape[0]
outmaps0 = W.shape[1]
# new maps, R < N*M / (N+M) * rrate
outmaps1 = METHOD_NAME(inmaps, outmaps0, rrate)
# singular value decomposition
U, s, V = np.linalg.svd(W, full_matrices=False)
S = np.diag(s)
SV = S.dot(V)
U_approx = U[:, :outmaps1]
SV_approx = SV[:outmaps1, :outmaps0]
# Set trained parameters and decomposed parameters
# set trained parameters
with nn.parameter_scope(slim):
slim_params = nn.get_parameters()
for n, v in trained_params.items():
if not n in slim_params.keys():
continue
v_slim = slim_params[n]
v_slim.d = v.d
# set decomposed parameters and original bias
# a new bias is introduced due to decomposition
slim_params["fc-d0/affine/W"].d = U_approx
slim_params["fc-d1/affine/W"].d = SV_approx
b = trained_params["fc3/affine/b"]
slim_params["fc-d1/affine/b"].d = b.d
# Clear the parameters of the reference net
with nn.parameter_scope(reference):
nn.clear_parameters()
| null |
2,014 |
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
"""
Project:
glideinwms
Purpose:
unit test for glideinwms/lib/timeConversion.python
Author:
Dennis Box, [email protected]
"""
import os
import time
import unittest
import hypothesis
import hypothesis.strategies as st
import xmlrunner
from glideinwms.lib.timeConversion import (
extractHuman,
extractISO8601_Local,
extractISO8601_UTC,
extractRFC2822_Local,
extractRFC2822_UTC,
extractSeconds,
get_time_in_format,
getHuman,
getISO8601_Local,
getISO8601_UTC,
getRFC2822_Local,
getRFC2822_UTC,
getSeconds,
getTZval,
)
# unittest_utils will handle putting the appropriate directories on the python
# path for us.
# from glideinwms.unittests.unittest_utils import runTest
#
# define these globally for convenience
#
now = 1518767040
now_dst = 1521186240
expected = str(now)
human = "Fri Feb 16 01:44:00 2018"
iso_utc = "2018-02-16T07:44:00Z"
iso_local = "2018-02-16T01:44:00-06:00"
iso_local_dst = "2018-03-16T01:44:00-06:00"
rfc_2822_utc = "Fri, 16 Feb 2018 07:44:00 +0000"
rfc_2822_local = "Fri, 16 Feb 2018 01:44:00 -0600"
tz = "US/Central"
# tz = 'CST+06CDT,M3.2.0,M11.1.0'
tzval = 21600
tzval_dst = 18000
tz_wrong = "US/Eastern"
class TestTimeFunctions(unittest.TestCase):
def setUp(self):
os.environ["TZ"] = tz
time.tzset()
def test_get_seconds(self):
self.assertEqual(expected, getSeconds(now))
def test_extract_seconds(self):
self.assertEqual(now, extractSeconds(expected))
def test_get_human(self):
self.assertEqual(human, getHuman(now))
def test_extract_human(self):
os.environ["TZ"] = tz
time.tzset()
self.assertEqual(float(now), float(extractHuman(human)))
def test_get_is_o8601__ut_c(self):
self.assertEqual(iso_utc, getISO8601_UTC(now))
def test_extract_is_o8601__ut_c(self):
self.assertEqual(now, extractISO8601_UTC(iso_utc))
def test_get_is_o8601__local(self):
os.environ["TZ"] = tz
time.tzset()
self.assertEqual(iso_local, getISO8601_Local(now))
os.environ["TZ"] = tz_wrong
time.tzset()
self.assertNotEqual(iso_local, getISO8601_Local(now))
def test_extract_is_o8601__local(self):
os.environ["TZ"] = tz
time.tzset()
self.assertEqual(now, extractISO8601_Local(iso_local))
self.assertEqual(now_dst, extractISO8601_Local(iso_local_dst))
# use hypothesis to test hundreds of times between unix epoch and
# unix 4-byte time overflow that get and extract are symmetric
@hypothesis.given(st.floats(min_value=0, max_value=2147483647.0))
def test_ISO8601_Local__symmetric(self, flt_time):
t = int(flt_time)
tstr = getISO8601_Local(flt_time)
self.assertEqual(t, extractISO8601_Local(getISO8601_Local(flt_time)))
self.assertEqual(tstr, getISO8601_Local(extractISO8601_Local(tstr)))
def test_get_rf_c2822__ut_c(self):
self.assertEqual(rfc_2822_utc, getRFC2822_UTC(now))
def METHOD_NAME(self):
self.assertEqual(now, extractRFC2822_UTC(rfc_2822_utc))
def test_get_rf_c2822__local(self):
os.environ["TZ"] = tz
time.tzset()
self.assertEqual(rfc_2822_local, getRFC2822_Local(now))
def test_extract_rf_c2822__local(self):
self.assertEqual(now, extractRFC2822_Local(rfc_2822_local))
# use hypothesis to test hundreds of times between unix epoch and
# unix 4-byte time overflow that get and extract are symmetric
@hypothesis.given(st.floats(min_value=0, max_value=2147483647.0))
def test_rf_c2822_local_symmetric(self, flt_time):
t = int(flt_time)
tstr = getRFC2822_Local(flt_time)
self.assertEqual(t, extractRFC2822_Local(getRFC2822_Local(flt_time)))
self.assertEqual(tstr, getRFC2822_Local(extractRFC2822_Local(tstr)))
def test_get_time_in_format(self):
os.environ["TZ"] = tz
time.tzset()
time_format = "%a, %d %b %Y %H:%M:%S -0600"
self.assertEqual(rfc_2822_local, get_time_in_format(now, time_format))
def test_get_t_zval(self):
os.environ["TZ"] = tz
time.tzset()
self.assertNotEqual(getTZval(now), getTZval(now_dst))
self.assertEqual(tzval_dst, getTZval(now_dst))
self.assertEqual(tzval, getTZval(now))
os.environ["TZ"] = tz_wrong
time.tzset()
self.assertNotEqual(tzval_dst, getTZval(now_dst))
self.assertNotEqual(tzval, getTZval(now))
if __name__ == "__main__":
unittest.main(testRunner=xmlrunner.XMLTestRunner(output="unittests-reports"))
| null |
2,015 |
# Copyright cocotb contributors
# Licensed under the Revised BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-3-Clause
"""
Tests that specifically test generator-based coroutines
"""
import re
import pytest
from common import _check_traceback
import cocotb
from cocotb.triggers import NullTrigger, Timer
# Tests relating to providing meaningful errors if we forget to use the
# yield keyword correctly to turn a function into a coroutine
@cocotb.test(expect_error=TypeError)
def test_not_a_coroutine(dut):
"""Example of a failing to use the yield keyword in a test"""
dut._log.warning("This test will fail because we don't yield anything")
@cocotb.coroutine
def function_not_a_coroutine():
"""If we don't yield, this isn't a coroutine"""
return "This should fail"
@cocotb.test()
def test_function_not_a_coroutine(dut):
"""Example of trying to yield a coroutine that isn't a coroutine"""
yield Timer(500)
try:
# failure should occur before we even try to yield or fork the coroutine
function_not_a_coroutine()
except TypeError as exc:
assert "isn't a valid coroutine" in str(exc)
else:
assert False
def normal_function(dut):
return True
@cocotb.test()
def test_function_not_decorated(dut):
try:
yield normal_function(dut)
except TypeError as exc:
assert "yielded" in str(exc)
assert "scheduler can't handle" in str(exc)
else:
assert False
@cocotb.test()
def test_function_not_decorated_start_soon(dut):
"""Example of trying to fork a coroutine that isn't a coroutine"""
yield Timer(500)
try:
cocotb.start_soon(normal_function(dut))
except TypeError as exc:
assert "isn't a coroutine" in str(exc)
else:
assert False
yield Timer(500)
@cocotb.coroutine
def example():
yield NullTrigger()
@cocotb.test()
def METHOD_NAME(dut):
"""Catch (and provide useful error) for attempts to fork coroutines
incorrectly"""
yield Timer(100)
try:
cocotb.start_soon(example)
except TypeError as exc:
assert "a coroutine that hasn't started" in str(exc)
else:
assert False
@cocotb.test(expect_fail=False)
def test_yield_list(dut):
"""Example of yielding on a list of triggers"""
yield [Timer(1000), Timer(2000)]
yield Timer(10_000)
@cocotb.coroutine
def erroring_coro():
yield Timer(100)
fail # noqa
@cocotb.test()
def test_coroutine_error(dut):
"""Error in a coroutine that we yield"""
with pytest.raises(NameError):
yield erroring_coro()
@cocotb.test()
def test_coroutine_return(dut):
"""Test that the Python 3.3 syntax for returning from generators works"""
@cocotb.coroutine
def return_it(x):
return x
# this makes `return_it` a coroutine
yield
ret = yield return_it(42)
assert ret == 42, "Return statement did not work"
@cocotb.test()
def test_immediate_coro(dut):
"""
Test that coroutines can return immediately
"""
@cocotb.coroutine
def immediate_value():
return 42
yield
@cocotb.coroutine
def immediate_exception():
raise ValueError
yield
assert (yield immediate_value()) == 42
try:
yield immediate_exception()
except ValueError:
pass
else:
assert False, "Exception was not raised"
@cocotb.test()
def test_exceptions_direct(dut):
"""Test exception propagation via a direct yield statement"""
@cocotb.coroutine
def raise_inner():
yield Timer(10)
raise ValueError("It is soon now")
@cocotb.coroutine
def raise_soon():
yield Timer(1)
yield raise_inner()
yield _check_traceback(
raise_soon(), ValueError, r".*in raise_soon.*in raise_inner", re.DOTALL
)
@cocotb.test()
def test_exceptions_forked(dut):
"""Test exception propagation via cocotb.fork"""
@cocotb.coroutine
def raise_inner():
yield Timer(10)
raise ValueError("It is soon now")
@cocotb.coroutine
def raise_soon():
yield Timer(1)
coro = cocotb.start_soon(raise_inner())
yield coro.join()
yield _check_traceback(
raise_soon(), ValueError, r".*in raise_soon.*in raise_inner", re.DOTALL
)
| null |
2,016 |
import os
import unittest
import numpy as np
from utils import ApproxComparisonTestCase
import meep as mp
class TestCavityArraySlice(ApproxComparisonTestCase):
data_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "data"))
expected_1d = np.load(os.path.join(data_dir, "cavity_arrayslice_1d.npy"))
expected_2d = np.load(os.path.join(data_dir, "cavity_arrayslice_2d.npy"))
def setUp(self):
r = 0.36
d = 1.4
sy = 6
pad = 2
dpml = 1
sx = (2 * (pad + dpml + 3)) + d - 1
cell = mp.Vector3(sx, sy, 0)
blk = mp.Block(
size=mp.Vector3(mp.inf, 1.2, mp.inf), material=mp.Medium(epsilon=13)
)
geometry = [blk]
geometry.extend(mp.Cylinder(r, center=mp.Vector3(d / 2 + i)) for i in range(3))
geometry.extend(mp.Cylinder(r, center=mp.Vector3(d / -2 - i)) for i in range(3))
sources = [mp.Source(mp.GaussianSource(0.25, fwidth=0.2), mp.Hz, mp.Vector3())]
self.sim = mp.Simulation(
cell_size=cell,
geometry=geometry,
sources=sources,
boundary_layers=[mp.PML(dpml)],
resolution=20,
)
self.x_min = -0.25 * sx
self.x_max = +0.25 * sx
self.y_min = -0.15 * sy
self.y_max = +0.15 * sy
self.size_1d = mp.Vector3(self.x_max - self.x_min)
self.center_1d = mp.Vector3((self.x_min + self.x_max) / 2)
self.size_2d = mp.Vector3(self.x_max - self.x_min, self.y_max - self.y_min)
self.center_2d = mp.Vector3(
(self.x_min + self.x_max) / 2, (self.y_min + self.y_max) / 2
)
def test_1d_slice(self):
self.sim.run(until_after_sources=0)
vol = mp.Volume(center=self.center_1d, size=self.size_1d)
hl_slice1d = self.sim.get_array(mp.Hz, vol)
tol = 1e-5 if mp.is_single_precision() else 1e-8
self.assertClose(self.expected_1d, hl_slice1d, epsilon=tol)
def test_2d_slice(self):
self.sim.run(until_after_sources=0)
vol = mp.Volume(center=self.center_2d, size=self.size_2d)
hl_slice2d = self.sim.get_array(mp.Hz, vol)
tol = 1e-5 if mp.is_single_precision() else 1e-8
self.assertClose(self.expected_2d, hl_slice2d, epsilon=tol)
def test_1d_slice_user_array(self):
self.sim.run(until_after_sources=0)
arr = np.zeros(
126, dtype=np.float32 if mp.is_single_precision() else np.float64
)
vol = mp.Volume(center=self.center_1d, size=self.size_1d)
self.sim.get_array(mp.Hz, vol, arr=arr)
tol = 1e-5 if mp.is_single_precision() else 1e-8
self.assertClose(self.expected_1d, arr, epsilon=tol)
def METHOD_NAME(self):
self.sim.run(until_after_sources=0)
arr = np.zeros(
(126, 38), dtype=np.float32 if mp.is_single_precision() else np.float64
)
vol = mp.Volume(center=self.center_2d, size=self.size_2d)
self.sim.get_array(mp.Hz, vol, arr=arr)
tol = 1e-5 if mp.is_single_precision() else 1e-8
self.assertClose(self.expected_2d, arr, epsilon=tol)
def test_illegal_user_array(self):
self.sim.run(until_after_sources=0)
with self.assertRaises(ValueError):
arr = np.zeros(128)
vol = mp.Volume(center=self.center_1d, size=self.size_1d)
self.sim.get_array(mp.Hz, vol, arr=arr)
with self.assertRaises(ValueError):
arr = np.zeros((126, 39))
vol = mp.Volume(center=self.center_2d, size=self.size_2d)
self.sim.get_array(mp.Hz, vol, arr=arr)
with self.assertRaises(ValueError):
arr = np.zeros((126, 38))
vol = mp.Volume(center=self.center_2d, size=self.size_2d)
self.sim.get_array(mp.Hz, vol, cmplx=True, arr=arr)
def test_1d_complex_slice(self):
self.sim.run(until_after_sources=0)
vol = mp.Volume(center=self.center_1d, size=self.size_1d)
hl_slice1d = self.sim.get_array(mp.Hz, vol, cmplx=True)
self.assertTrue(
hl_slice1d.dtype == np.complex64
if mp.is_single_precision()
else np.complex128
)
self.assertTrue(hl_slice1d.shape[0] == 126)
def test_2d_complex_slice(self):
self.sim.run(until_after_sources=0)
vol = mp.Volume(center=self.center_2d, size=self.size_2d)
hl_slice2d = self.sim.get_array(mp.Hz, vol, cmplx=True)
self.assertTrue(
hl_slice2d.dtype == np.complex64
if mp.is_single_precision()
else np.complex128
)
self.assertTrue(hl_slice2d.shape[0] == 126 and hl_slice2d.shape[1] == 38)
if __name__ == "__main__":
unittest.main()
| null |
2,017 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class SetLoadBalancerTCPListenerAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ens', '2017-11-10', 'SetLoadBalancerTCPListenerAttribute','ens')
self.set_method('POST')
def get_HealthCheckURI(self): # String
return self.get_query_params().get('HealthCheckURI')
def METHOD_NAME(self, HealthCheckURI): # String
self.add_query_param('HealthCheckURI', HealthCheckURI)
def get_EstablishedTimeout(self): # Integer
return self.get_query_params().get('EstablishedTimeout')
def set_EstablishedTimeout(self, EstablishedTimeout): # Integer
self.add_query_param('EstablishedTimeout', EstablishedTimeout)
def get_PersistenceTimeout(self): # Integer
return self.get_query_params().get('PersistenceTimeout')
def set_PersistenceTimeout(self, PersistenceTimeout): # Integer
self.add_query_param('PersistenceTimeout', PersistenceTimeout)
def get_HealthCheckDomain(self): # String
return self.get_query_params().get('HealthCheckDomain')
def set_HealthCheckDomain(self, HealthCheckDomain): # String
self.add_query_param('HealthCheckDomain', HealthCheckDomain)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_HealthCheckConnectTimeout(self): # Integer
return self.get_query_params().get('HealthCheckConnectTimeout')
def set_HealthCheckConnectTimeout(self, HealthCheckConnectTimeout): # Integer
self.add_query_param('HealthCheckConnectTimeout', HealthCheckConnectTimeout)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_EipTransmit(self): # String
return self.get_query_params().get('EipTransmit')
def set_EipTransmit(self, EipTransmit): # String
self.add_query_param('EipTransmit', EipTransmit)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_HealthCheckType(self): # String
return self.get_query_params().get('HealthCheckType')
def set_HealthCheckType(self, HealthCheckType): # String
self.add_query_param('HealthCheckType', HealthCheckType)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)
def get_HealthCheckHttpCode(self): # String
return self.get_query_params().get('HealthCheckHttpCode')
def set_HealthCheckHttpCode(self, HealthCheckHttpCode): # String
self.add_query_param('HealthCheckHttpCode', HealthCheckHttpCode)
| null |
2,018 |
# coding=utf-8
# Copyright 2023 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for view_builder."""
import functools
import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflow_datasets.core.dataset_builders import view_builder
def add_number(number: int, increment: int) -> int:
return number + increment
def is_even(number: int) -> bool:
return number % 2 == 0
_MNIST_TRANSFORMATIONS = [
tfds.transform.remove_feature(feature_name="image"),
tfds.transform.apply_filter(fn=is_even, input_feature="label"),
tfds.transform.apply_fn(
fn=functools.partial(add_number, increment=10),
input_feature="label",
output_feature="label_plus_10",
),
tfds.transform.apply_fn(
fn=functools.partial(add_number, increment=2),
input_feature="label",
output_feature="label_plus_2",
),
]
_TRANSFORMED_MNIST_FEATURES = tfds.features.FeaturesDict({
"label": tfds.features.ClassLabel(num_classes=10),
"label_plus_10": tfds.features.Scalar(dtype=tf.int64),
"label_plus_2": tfds.features.Scalar(dtype=tf.int64),
})
def add_number_map_fn(
dataset: tf.data.Dataset,
increment: int,
input_name: str,
output_name: str,
) -> tf.data.Dataset:
def f(ex):
ex[output_name] = ex[input_name] + increment
return ex
return dataset.map(f)
def remove_feature_map_fn(
dataset: tf.data.Dataset,
feature_name: str,
) -> tf.data.Dataset:
def f(ex):
del ex[feature_name]
return ex
return dataset.map(f)
_MNIST_DATASET_TRANSFORMATIONS = [
functools.partial(
add_number_map_fn,
increment=10,
input_name="label",
output_name="label_plus_10",
),
functools.partial(
add_number_map_fn,
increment=2,
input_name="label",
output_name="label_plus_2",
),
functools.partial(remove_feature_map_fn, feature_name="image"),
]
class DummyMnistViewWithoutConfigs(view_builder.ViewBuilder):
VERSION = "1.0.0"
INPUT_DATASET = "dummy_mnist"
EX_TRANSFORMATIONS = _MNIST_TRANSFORMATIONS
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
features=_TRANSFORMED_MNIST_FEATURES,
description="A different view on Mnist.",
)
class DummyMnistViewWithConfigs(view_builder.ViewBuilder):
VERSION = "1.0.0"
BUILDER_CONFIGS = [
view_builder.ViewConfig(
name="add_stuff",
input_dataset="dummy_mnist",
ex_transformations=_MNIST_TRANSFORMATIONS,
)
]
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
features=_TRANSFORMED_MNIST_FEATURES,
description="A different view on Mnist.",
)
class BeamDummyMnistViewWithConfigs(view_builder.ViewBuilder):
VERSION = "1.0.0"
BUILDER_CONFIGS = [
view_builder.ViewConfig(
name="add_stuff",
input_dataset="dummy_mnist",
ex_transformations=_MNIST_TRANSFORMATIONS,
)
]
USE_BEAM = True
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
features=_TRANSFORMED_MNIST_FEATURES,
description="A different view on Mnist.",
)
class DummyMnistViewDatasetTransform(view_builder.ViewBuilder):
VERSION = "1.0.0"
BUILDER_CONFIGS = [
view_builder.ViewConfig(
name="add_stuff",
input_dataset="dummy_mnist",
ds_transformations=_MNIST_DATASET_TRANSFORMATIONS,
)
]
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
features=_TRANSFORMED_MNIST_FEATURES,
description="A different view on Mnist.",
)
def test_view_builder_with_configs_load():
with tfds.testing.tmp_dir() as data_dir:
tfds.testing.DummyMnist(data_dir=data_dir).download_and_prepare()
ds_train = tfds.load(
"dummy_mnist_view_with_configs", split="train", data_dir=data_dir
)
assert len(list(ds_train)) == 10
for example in ds_train:
assert example["label"] + 10 == example["label_plus_10"]
assert example["label"] + 2 == example["label_plus_2"]
def test_beam_view_builder_with_configs_load():
with tfds.testing.tmp_dir() as data_dir:
tfds.testing.DummyMnist(data_dir=data_dir).download_and_prepare()
ds_train = tfds.load(
"beam_dummy_mnist_view_with_configs", split="train", data_dir=data_dir
)
assert len(list(ds_train)) == 10
for example in ds_train:
assert example["label"] + 10 == example["label_plus_10"]
assert example["label"] + 2 == example["label_plus_2"]
def METHOD_NAME():
with tfds.testing.tmp_dir() as data_dir:
tfds.testing.DummyMnist(data_dir=data_dir).download_and_prepare()
ds_train = tfds.load(
"dummy_mnist_view_without_configs", split="train", data_dir=data_dir
)
assert len(list(ds_train)) == 10
for example in ds_train:
assert example["label"] + 10 == example["label_plus_10"]
assert example["label"] + 2 == example["label_plus_2"]
def test_view_builder_tf_dataset_with_configs_load():
with tfds.testing.tmp_dir() as data_dir:
tfds.testing.DummyMnist(data_dir=data_dir).download_and_prepare()
ds_train = tfds.load(
"dummy_mnist_view_dataset_transform", split="train", data_dir=data_dir
)
assert len(list(ds_train)) == 20
for example in ds_train:
assert example["label"] + 10 == example["label_plus_10"]
assert example["label"] + 2 == example["label_plus_2"]
| null |
2,019 |
import abc
import logging
from django.db import models
from django.utils import timezone
from osf.exceptions import ValidationValueError, ValidationTypeError
from osf.external.askismet import tasks as akismet_tasks
from osf.external.spam.tasks import check_resource_for_domains_postcommit, check_resource_with_spam_services
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from osf.utils.fields import ensure_str, NonNaiveDateTimeField
from website import settings
logger = logging.getLogger(__name__)
def _validate_reports(value, *args, **kwargs):
from osf.models import OSFUser
for key, val in value.items():
if not OSFUser.load(key):
raise ValidationValueError('Keys must be user IDs')
if not isinstance(val, dict):
raise ValidationTypeError('Values must be dictionaries')
if ('category' not in val or 'text' not in val or 'date' not in val or 'retracted' not in val):
raise ValidationValueError(
('Values must include `date`, `category`, ',
'`text`, `retracted` keys')
)
class SpamStatus(object):
UNKNOWN = None
FLAGGED = 1
SPAM = 2
HAM = 4
class SpamMixin(models.Model):
"""Mixin to add to objects that can be marked as spam.
"""
class Meta:
abstract = True
# # Node fields that trigger an update to search on save
# SPAM_UPDATE_FIELDS = {
# 'spam_status',
# }
spam_status = models.IntegerField(default=SpamStatus.UNKNOWN, null=True, blank=True, db_index=True)
spam_pro_tip = models.CharField(default=None, null=True, blank=True, max_length=200)
# Data representing the original spam indication
# - author: author name
# - author_email: email of the author
# - content: data flagged
# - headers: request headers
# - Remote-Addr: ip address from request
# - User-Agent: user agent from request
# - Referer: referrer header from request (typo +1, rtd)
spam_data = DateTimeAwareJSONField(default=dict, blank=True)
date_last_reported = NonNaiveDateTimeField(default=None, null=True, blank=True, db_index=True)
# Reports is a dict of reports keyed on reporting user
# Each report is a dictionary including:
# - date: date reported
# - retracted: if a report has been retracted
# - category: What type of spam does the reporter believe this is
# - text: Comment on the comment
reports = DateTimeAwareJSONField(
default=dict, blank=True, validators=[_validate_reports]
)
def flag_spam(self):
# If ham and unedited then tell user that they should read it again
if self.spam_status == SpamStatus.UNKNOWN:
self.spam_status = SpamStatus.FLAGGED
def remove_flag(self, save=False):
if self.spam_status != SpamStatus.FLAGGED:
return
for report in self.reports.values():
if not report.get('retracted', True):
return
self.spam_status = SpamStatus.UNKNOWN
if save:
self.save()
@property
def is_spam(self):
return self.spam_status == SpamStatus.SPAM
@property
def is_spammy(self):
return self.spam_status in [SpamStatus.FLAGGED, SpamStatus.SPAM]
@property
def is_ham(self):
return self.spam_status == SpamStatus.HAM
@property
def is_hammy(self):
return self.is_ham or (
self.spam_status == SpamStatus.UNKNOWN and self.is_assumed_ham
)
@property
def is_assumed_ham(self):
"""If True, will automatically skip spam checks.
Override to set criteria for assumed ham.
"""
return False
def report_abuse(self, user, save=False, **kwargs):
"""Report object is spam or other abuse of OSF
:param user: User submitting report
:param save: Save changes
:param kwargs: Should include category and message
:raises ValueError: if user is reporting self
"""
if user == self.user:
raise ValueError('User cannot report self.')
self.flag_spam()
date = timezone.now()
report = {'date': date, 'retracted': False}
report.update(kwargs)
if 'text' not in report:
report['text'] = None
self.reports[user._id] = report
self.date_last_reported = report['date']
if save:
self.save()
def retract_report(self, user, save=False):
"""Retract last report by user
Only marks the last report as retracted because there could be
history in how the object is edited that requires a user
to flag or retract even if object is marked as HAM.
:param user: User retracting
:param save: Save changes
"""
if user._id in self.reports:
if not self.reports[user._id]['retracted']:
self.reports[user._id]['retracted'] = True
self.remove_flag()
else:
raise ValueError('User has not reported this content')
if save:
self.save()
def unspam(self, save=False):
self.spam_status = SpamStatus.UNKNOWN
if save:
self.save()
def METHOD_NAME(self, save=False, train_spam_services=True):
self.spam_status = SpamStatus.HAM
if save:
self.save()
if train_spam_services and self.spam_data:
akismet_tasks.submit_ham.apply_async(
kwargs=dict(
guid=self.guids.first()._id,
)
)
def confirm_spam(self, domains=None, save=True, train_spam_services=True):
if domains:
if 'domains' in self.spam_data:
self.spam_data['domains'].extend(domains)
self.spam_data['domains'] = list(set(self.spam_data['domains']))
else:
self.spam_data['domains'] = domains
elif train_spam_services and self.spam_data:
akismet_tasks.submit_spam.apply_async(
kwargs=dict(
guid=self.guids.first()._id,
)
)
self.spam_status = SpamStatus.SPAM
if save:
self.save()
@abc.abstractmethod
def check_spam(self, user, saved_fields, request, save=False):
"""Must return is_spam"""
pass
def do_check_spam(self, author, author_email, content, request_headers):
if self.is_hammy:
return
if self.is_spammy:
return
request_kwargs = {
'remote_addr': request_headers.get('Remote-Addr') or request_headers['Host'], # for local testing
'user_agent': request_headers.get('User-Agent'),
'referer': request_headers.get('Referer'),
}
request_kwargs.update(request_headers)
check_resource_for_domains_postcommit(
self.guids.first()._id,
content,
)
if settings.SPAM_SERVICES_ENABLED:
for key, value in request_kwargs.items():
request_kwargs[key] = ensure_str(value)
check_resource_with_spam_services(
self.guids.first()._id,
content,
author,
author_email,
request_kwargs,
)
| null |
2,020 |
# Drakkar-Software OctoBot-Tentacles
# Copyright (c) Drakkar-Software, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import decimal
import typing
import octobot_trading.exchanges as exchanges
import octobot_trading.exchanges.connectors.ccxt.constants as ccxt_constants
import octobot_trading.enums as trading_enums
import octobot_trading.errors
class Bitget(exchanges.RestExchange):
DESCRIPTION = ""
FIX_MARKET_STATUS = True
REMOVE_MARKET_STATUS_PRICE_LIMITS = True
@classmethod
def get_name(cls):
return 'bitget'
def get_adapter_class(self):
return BitgetCCXTAdapter
def get_additional_connector_config(self):
# tell ccxt to use amount as provided and not to compute it by multiplying it by price which is done here
# (price should not be sent to market orders). Only used for buy market orders
return {
ccxt_constants.CCXT_OPTIONS: {
"createMarketBuyOrderRequiresPrice": False # disable quote conversion
}
}
async def create_order(self, order_type: trading_enums.TraderOrderType, symbol: str, quantity: decimal.Decimal,
price: decimal.Decimal = None, stop_price: decimal.Decimal = None,
side: trading_enums.TradeOrderSide = None, current_price: decimal.Decimal = None,
reduce_only: bool = False, params: dict = None) -> typing.Optional[dict]:
if order_type is trading_enums.TraderOrderType.BUY_MARKET:
# on Bitget, market orders are in quote currency (YYY in XYZ/YYY)
used_price = price or current_price
if not used_price:
raise octobot_trading.errors.NotSupported(f"{self.get_name()} requires a price parameter to create "
f"market orders as quantity is in quote currency")
quantity = quantity * used_price
return await super().create_order(order_type, symbol, quantity,
price=price, stop_price=stop_price,
side=side, current_price=current_price,
reduce_only=reduce_only, params=params)
class BitgetCCXTAdapter(exchanges.CCXTAdapter):
def METHOD_NAME(self, raw, **kwargs):
fixed = super().METHOD_NAME(raw, **kwargs)
try:
if fixed[trading_enums.ExchangeConstantsOrderColumns.TYPE.value] \
== trading_enums.TradeOrderType.MARKET.value and \
fixed[trading_enums.ExchangeConstantsOrderColumns.SIDE.value] \
== trading_enums.TradeOrderSide.BUY.value:
# convert amount to have the same units as evert other exchange: use FILLED for accuracy
fixed[trading_enums.ExchangeConstantsOrderColumns.AMOUNT.value] = \
fixed[trading_enums.ExchangeConstantsOrderColumns.FILLED.value]
except KeyError:
pass
return fixed
def fix_trades(self, raw, **kwargs):
raw = super().fix_trades(raw, **kwargs)
for trade in raw:
# fees example for paid fees in USDT:
# {'code': 'USDT', 'cost': -0.015922}
fee = trade[trading_enums.ExchangeConstantsOrderColumns.FEE.value]
if trading_enums.FeePropertyColumns.CURRENCY.value not in fee:
fee[trading_enums.FeePropertyColumns.CURRENCY.value] = fee.get("code")
if fee[trading_enums.FeePropertyColumns.COST.value]:
fee[trading_enums.FeePropertyColumns.COST.value] *= -1
return raw
| null |
2,021 |
from typing import Any, Dict, List, Sequence, Union
PY3: Any
str_type = str
GRAPH_ATTRIBUTES: Any
EDGE_ATTRIBUTES: Any
NODE_ATTRIBUTES: Any
CLUSTER_ATTRIBUTES: Any
DEFAULT_PROGRAMS: Any
def is_windows() -> bool: ...
def is_anaconda() -> bool: ...
def get_executable_extension() -> str: ...
def graph_from_dot_data(s: str) -> List["Dot"]: ...
class Common:
def set_parent_graph(self, parent_graph: "Graph") -> None: ...
def get_parent_graph(self) -> "Graph": ...
def set(self, name: str, value: str) -> None: ...
def get(self, name: str) -> str: ...
def get_attributes(self) -> Dict[str, str]: ...
def set_sequence(self, seq: str) -> None: ...
def get_sequence(self) -> str: ...
def create_attribute_methods(self, obj_attributes: List[str]) -> None: ...
class Error(Exception):
value: Any
def __init__(self, value: str) -> None: ...
class InvocationException(Exception):
value: Any
def __init__(self, value: str) -> None: ...
class Node(Common):
obj_dict: Any
def __init__(self, name: str = ..., obj_dict: Any | None = ..., **attrs: str) -> None: ...
def set_name(self, node_name: str) -> None: ...
def get_name(self) -> str: ...
def get_port(self) -> str: ...
def add_style(self, style: str) -> None: ...
def to_string(self) -> str: ...
class Edge(Common):
obj_dict: Any
def __init__(
self,
src: str = ...,
dst: str = ...,
obj_dict: Any | None = ...,
**attrs: Dict[str, str],
) -> None: ...
def get_source(self) -> str: ...
def get_destination(self) -> str: ...
def __hash__(self) -> int: ...
def __eq__(self, edge: Any) -> bool: ...
def parse_node_ref(self, node_str: str) -> str: ...
def to_string(self) -> str: ...
class Graph(Common):
obj_dict: Any
def __init__(
self,
graph_name: str = ...,
obj_dict: Any | None = ...,
graph_type: str = ...,
strict: bool = ...,
suppress_disconnected: bool = ...,
simplify: bool = ...,
**attrs: Dict[str, str],
) -> None: ...
def get_graph_type(self) -> str: ...
def get_top_graph_type(self) -> str: ...
def set_graph_defaults(self, **attrs: Dict[str, str]) -> None: ...
def get_graph_defaults(self, **attrs: Dict[str, str]) -> Dict[str, str]: ...
def set_node_defaults(self, **attrs: Dict[str, str]) -> None: ...
def get_node_defaults(self, **attrs: Dict[str, str]) -> Dict[str, str]: ...
def set_edge_defaults(self, **attrs: Dict[str, str]) -> None: ...
def get_edge_defaults(self, **attrs: Dict[str, str]) -> Dict[str, str]: ...
def METHOD_NAME(self, simplify: bool) -> None: ...
def get_simplify(self) -> bool: ...
def set_type(self, graph_type: str) -> None: ...
def get_type(self) -> str: ...
def set_name(self, graph_name: str) -> None: ...
def get_name(self) -> str: ...
def set_strict(self, val: bool) -> None: ...
def get_strict(self, val: Any) -> bool: ...
def set_suppress_disconnected(self, val: bool) -> None: ...
def get_suppress_disconnected(self, val: Any) -> None: ...
def get_next_sequence_number(self) -> int: ...
def add_node(self, graph_node: Node) -> None: ...
def del_node(self, name: Union[str, Node], index: int | None = ...) -> bool: ...
def get_node(self, name: str) -> Node: ...
def get_nodes(self) -> List[Node]: ...
def get_node_list(self) -> List[Node]: ...
def add_edge(self, graph_edge: Edge) -> None: ...
def del_edge(
self,
src_or_list: Union[Sequence[Node], Node],
dst: str | int | None = ...,
index: int | None = ...,
) -> bool: ...
def get_edge(
self, src_or_list: Union[Sequence[Node], Node], dst: Any | None = ...
) -> List[Edge]: ...
def get_edges(self) -> List[Edge]: ...
def get_edge_list(self) -> List[Edge]: ...
def add_subgraph(self, sgraph: Union["Subgraph", "Cluster"]) -> None: ...
def get_subgraph(self, name: str) -> List["Subgraph"]: ...
def get_subgraphs(self) -> List["Subgraph"]: ...
def get_subgraph_list(self) -> List["Subgraph"]: ...
def set_parent_graph(self, parent_graph: "Graph") -> None: ...
def to_string(self) -> str: ...
class Subgraph(Graph):
def __init__(
self,
graph_name: str = ...,
obj_dict: Any | Dict[str, str] = ...,
suppress_disconnected: bool = ...,
simplify: bool = ...,
**attrs: Dict[str, str],
) -> None: ...
class Cluster(Graph):
def __init__(
self,
graph_name: str = ...,
obj_dict: Any | Dict[str, str] = ...,
suppress_disconnected: bool = ...,
simplify: bool = ...,
**attrs: Dict[str, str],
) -> None: ...
class Dot(Graph):
shape_files: Any
formats: Any
prog: str
def __init__(self, *argsl: Any, **argsd: Dict[str, str]): ...
def set_shape_files(self, file_paths: Union[str, Sequence[str]]) -> None: ...
def set_prog(self, prog: str) -> None: ...
def write(
self,
path: str,
prog: Any | str = ...,
format: str = ...,
encoding: Any | str = ...,
) -> bool: ...
def create(
self, prog: Any | str = ..., format: str = ..., encoding: Any | str = ...
) -> bytes: ...
| null |
2,022 |
from typing import Tuple
import pytest
import torch
from pytorch_lightning import Trainer
from torch import Tensor, nn
from torch.utils.data import DataLoader, Dataset
from lightly.utils.benchmarking import KNNClassifier
class TestKNNClassifier:
def test(self) -> None:
# Define 4 training points from 4 classes.
train_features = torch.tensor(
[
[0.0, -1.0],
[0.0, 1.0],
[1.0, 0.0],
[1.0, 1.0],
]
)
train_targets = torch.tensor([0, 1, 2, 3])
train_dataset = _FeaturesDataset(features=train_features, targets=train_targets)
# Define 3 validation points.
# Their expected predicted labels are their closest training points in order.
val_features = torch.tensor(
[
[0.0, -0.4], # predicted_labels = [0, 1, 2, 3]
[0.6, 0.7], # predicted_labels = [3, 1, 2, 0]
[0.6, 0.3], # predicted_labels = [2, 3, 1, 0]
]
)
val_targets = torch.tensor([0, 0, 1])
val_dataset = _FeaturesDataset(features=val_features, targets=val_targets)
train_dataloader = DataLoader(train_dataset, batch_size=2)
val_dataloader = DataLoader(val_dataset, batch_size=2)
# Run KNN classifier.
model = nn.Identity()
classifier = KNNClassifier(model, num_classes=4, knn_k=3, topk=(1, 2, 3, 4))
trainer = Trainer(max_epochs=1, accelerator="cpu", devices=1)
trainer.fit(
model=classifier,
train_dataloaders=train_dataloader,
val_dataloaders=val_dataloader,
)
assert trainer.callback_metrics["val_top1"].item() == pytest.approx(1 / 3)
assert trainer.callback_metrics["val_top2"].item() == pytest.approx(1 / 3)
assert trainer.callback_metrics["val_top3"].item() == pytest.approx(2 / 3)
assert trainer.callback_metrics["val_top4"].item() == pytest.approx(3 / 3)
def test__cpu(self) -> None:
self._test__accelerator(accelerator="cpu", expected_device="cpu")
@pytest.mark.skipif(not torch.cuda.is_available(), reason="No cuda available")
def test__cuda(self) -> None:
self._test__accelerator(accelerator="gpu", expected_device="cuda")
def _test__accelerator(self, accelerator: str, expected_device: str) -> None:
torch.manual_seed(0)
model = nn.Linear(3, 2)
classifier = KNNClassifier(model, num_classes=10, knn_k=20)
trainer = Trainer(max_epochs=1, accelerator=accelerator, devices=1)
train_features = torch.randn(40, 3)
train_targets = torch.randint(0, 10, (40,))
train_dataset = _FeaturesDataset(features=train_features, targets=train_targets)
val_features = torch.randn(10, 3)
val_targets = torch.randint(0, 10, (10,))
val_dataset = _FeaturesDataset(features=val_features, targets=val_targets)
train_dataloader = DataLoader(train_dataset, batch_size=3)
val_dataloader = DataLoader(val_dataset, batch_size=3)
trainer.fit(
model=classifier,
train_dataloaders=train_dataloader,
val_dataloaders=val_dataloader,
)
assert trainer.callback_metrics["val_top1"].item() >= 0.0
assert (
trainer.callback_metrics["val_top5"].item()
>= trainer.callback_metrics["val_top1"].item()
)
assert trainer.callback_metrics["val_top5"].item() <= 1.0
assert classifier._train_features == []
assert classifier._train_targets == []
assert classifier._train_features_tensor is not None
assert classifier._train_targets_tensor is not None
assert classifier._train_features_tensor.shape == (2, 40)
assert classifier._train_targets_tensor.shape == (40,)
assert classifier._train_features_tensor.dtype == torch.float32
assert classifier._train_features_tensor.device.type == expected_device
assert classifier._train_targets_tensor.device.type == expected_device
def METHOD_NAME(self) -> None:
model = nn.Identity()
# Set feature_dtype to torch.int to test if classifier correctly changes dtype.
# We cannot test for torch.float16 because it is not supported on cpu.
classifier = KNNClassifier(
model, num_classes=10, knn_k=3, feature_dtype=torch.int
)
trainer = Trainer(max_epochs=1, accelerator="cpu", devices=1)
train_features = torch.randn(4, 3)
train_targets = torch.randint(0, 10, (4,))
train_dataset = _FeaturesDataset(features=train_features, targets=train_targets)
val_features = torch.randn(4, 3)
val_targets = torch.randint(0, 10, (4,))
val_dataset = _FeaturesDataset(features=val_features, targets=val_targets)
train_dataloader = DataLoader(train_dataset)
val_dataloader = DataLoader(val_dataset)
trainer.fit(
model=classifier,
train_dataloaders=train_dataloader,
val_dataloaders=val_dataloader,
)
assert classifier._train_features_tensor is not None
assert classifier._train_features_tensor.dtype == torch.int
class _FeaturesDataset(Dataset):
def __init__(self, features: Tensor, targets) -> None:
super().__init__()
self.features = features
self.targets = targets
def __getitem__(self, index: int) -> Tuple[Tensor, Tensor]:
return self.features[index], self.targets[index]
def __len__(self) -> int:
return len(self.features)
| null |
2,023 |
#!/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module which provides compatibility with older Python versions."""
__all__ = ["PY3", "int", "long", "xrange", "exec_", "callable", "lru_cache"]
import collections
import functools
import sys
try:
import __builtin__
except ImportError:
import builtins as __builtin__ # py3
PY3 = sys.version_info[0] == 3
if PY3:
int = int
long = int
xrange = range
unicode = str
basestring = str
exec_ = getattr(__builtin__, "exec")
else:
int = int
long = long
xrange = xrange
unicode = unicode
basestring = basestring
def exec_(code, globs=None, locs=None):
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
# removed in 3.0, reintroduced in 3.2
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
# --- stdlib additions
# py 3.2 functools.lru_cache
# Taken from: http://code.activestate.com/recipes/578078
# Credit: Raymond Hettinger
try:
from functools import lru_cache
except ImportError:
try:
from threading import RLock
except ImportError:
from dummy_threading import RLock
_CacheInfo = collections.namedtuple(
"CacheInfo", ["hits", "misses", "maxsize", "currsize"])
class _HashedSeq(list):
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(args, kwds, typed,
kwd_mark=(object(), ),
fasttypes=set((int, str, frozenset, type(None))),
sorted=sorted, tuple=tuple, type=type, len=len):
key = args
if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark
for item in sorted_items:
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for k, v in sorted_items)
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def lru_cache(maxsize=100, typed=False):
"""Least-recently-used cache decorator, see:
http://docs.python.org/3/library/functools.html#functools.lru_cache
"""
def METHOD_NAME(user_function):
cache = dict()
stats = [0, 0]
HITS, MISSES = 0, 1
make_key = _make_key
cache_get = cache.get
_len = len
lock = RLock()
root = []
root[:] = [root, root, None, None]
nonlocal_root = [root]
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3
if maxsize == 0:
def wrapper(*args, **kwds):
result = user_function(*args, **kwds)
stats[MISSES] += 1
return result
elif maxsize is None:
def wrapper(*args, **kwds):
key = make_key(args, kwds, typed)
result = cache_get(key, root)
if result is not root:
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
stats[MISSES] += 1
return result
else:
def wrapper(*args, **kwds):
if kwds or typed:
key = make_key(args, kwds, typed)
else:
key = args
lock.acquire()
try:
link = cache_get(key)
if link is not None:
root, = nonlocal_root
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
stats[HITS] += 1
return result
finally:
lock.release()
result = user_function(*args, **kwds)
lock.acquire()
try:
root, = nonlocal_root
if key in cache:
pass
elif _len(cache) >= maxsize:
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
root = nonlocal_root[0] = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
del cache[oldkey]
cache[key] = oldroot
else:
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
stats[MISSES] += 1
finally:
lock.release()
return result
def cache_info():
"""Report cache statistics"""
lock.acquire()
try:
return _CacheInfo(stats[HITS], stats[MISSES], maxsize,
len(cache))
finally:
lock.release()
def cache_clear():
"""Clear the cache and cache statistics"""
lock.acquire()
try:
cache.clear()
root = nonlocal_root[0]
root[:] = [root, root, None, None]
stats[:] = [0, 0]
finally:
lock.release()
wrapper.__wrapped__ = user_function
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return functools.update_wrapper(wrapper, user_function)
return METHOD_NAME
| null |
2,024 |
from pyrokinetics.gk_code import GKOutputReaderGENE
from pyrokinetics.gk_code.gk_output import GKOutput
from pyrokinetics import template_dir
from pyrokinetics.normalisation import SimulationNormalisation as Normalisation
from pathlib import Path
import numpy as np
import pytest
import subprocess
import shutil
from .utils import array_similar
# TODO mock output tests, similar to GS2
@pytest.fixture(scope="module")
def gene_tmp_path(tmp_path_factory):
tmp_dir = tmp_path_factory.mktemp("test_gk_output_reader_gene")
return tmp_dir
@pytest.fixture
def reader():
return GKOutputReaderGENE()
@pytest.fixture
def gene_output_dir(gene_tmp_path):
mock_dir = gene_tmp_path / "mock_dir"
mock_dir.mkdir()
subprocess.run(
["cp", str(template_dir / "input.gene"), str(mock_dir / "parameters_0000")]
)
return mock_dir
@pytest.fixture
def gene_output_dir_missing_parameters(gene_tmp_path):
mock_dir = gene_tmp_path / "broken_mock_dir"
mock_dir.mkdir()
for f in [mock_dir / f for f in ["nrg_0000", "field_0000"]]:
with open(f, "w") as _:
pass
return mock_dir
@pytest.fixture
def empty_gene_dir(gene_tmp_path):
mock_dir = gene_tmp_path / "empty_dir"
mock_dir.mkdir()
return mock_dir
@pytest.fixture
def not_gene_file(gene_tmp_path):
mock_dir = gene_tmp_path / "nongene_dir"
mock_dir.mkdir()
filename = mock_dir / "hello_world.txt"
with open(filename, "w") as file:
file.write("hello world!")
return filename
def test_verify_gene_output(reader, gene_output_dir):
# Expect exception to be raised if this fails
reader.verify_file_type(gene_output_dir)
def test_verify_gene_missing_parameters(reader, gene_output_dir_missing_parameters):
with pytest.raises(Exception):
reader.verify_file_type(gene_output_dir_missing_parameters)
def test_verify_not_gene_dir(reader, empty_gene_dir):
with pytest.raises(Exception):
reader.verify_file_type(empty_gene_dir)
def test_verify_not_gene_file(reader, not_gene_file):
with pytest.raises(Exception):
reader.verify_file_type(not_gene_file)
@pytest.mark.parametrize(
"input_path",
[
Path("dir/to/parameters_0003"),
Path("dir/to/nrg_0017"),
Path("dir/to/input_file"),
Path("dir_0001/to_5102/parameters_0005"),
],
)
def test_infer_path_from_input_file_gene(input_path):
output_path = GKOutputReaderGENE.infer_path_from_input_file(input_path)
# If the last four chars are digits, expect to find "parameters_####".
# Otherwise, get the dir
last_4_chars = str(input_path)[-4:]
if last_4_chars.isdigit():
assert output_path == input_path.parent / f"parameters_{last_4_chars}"
else:
assert output_path == input_path.parent
# Golden answer tests
# Compares against results obtained using GKCode methods from commit 7d551eaa
# Update: Commit 9eae331 accounts for last time step (7d551eaa-2nd last step)
# Update: Commit 3974780 accounts for correct frequency sign
# Update: Commit d3da468c accounts for new gkoutput structure
# This data was gathered from templates/outputs/GENE_linear
reference_data_commit_hash = "d3da468c"
@pytest.fixture(scope="class")
def METHOD_NAME(request):
this_dir = Path(__file__).parent
cdf_path = (
this_dir
/ "golden_answers"
/ f"gene_linear_output_{reference_data_commit_hash}.netcdf4"
)
# ds = get_golden_answer_data(cdf_path)
request.cls.reference_data = GKOutput.from_netcdf(cdf_path)
@pytest.fixture(scope="class")
def golden_answer_data(request):
path = template_dir / "outputs" / "GENE_linear" / "parameters_0001"
norm = Normalisation("test_gk_output_gene")
request.cls.data = GKOutputReaderGENE().read_from_file(path, norm=norm)
@pytest.mark.usefixtures("golden_answer_reference_data", "golden_answer_data")
class TestGENEGoldenAnswers:
def test_coords(self):
"""
Ensure that all reference coords are present in data
"""
for c in self.reference_data.coords:
dtype = self.reference_data[c].dtype
if dtype == "float64" or dtype == "complex128":
assert array_similar(self.reference_data[c], self.data[c])
else:
assert np.array_equal(self.reference_data[c], self.data[c])
@pytest.mark.parametrize(
"var",
[
"phi",
"particle",
"momentum",
"heat",
"eigenvalues",
"eigenfunctions",
"growth_rate",
"mode_frequency",
],
)
def test_data_vars(self, var):
assert array_similar(self.reference_data[var], self.data[var])
@pytest.mark.parametrize(
"attr",
[
"linear",
"gk_code",
"input_file",
"attribute_units",
"title",
"growth_rate_tolerance",
],
)
def test_data_attrs(self, attr):
if isinstance(getattr(self.reference_data, attr), float):
assert np.isclose(
getattr(self.reference_data, attr), getattr(self.data, attr)
)
else:
assert getattr(self.reference_data, attr) == getattr(self.data, attr)
def test_gene_read_omega_file(tmp_path):
"""Can we read growth rate/frequency from `omega` text file"""
shutil.copytree(template_dir / "outputs/GENE_linear", tmp_path, dirs_exist_ok=True)
fields_file = tmp_path / "field_0001"
fields_file.unlink()
norm = Normalisation("test_gk_output_gene")
data = GKOutputReaderGENE().read_from_file(tmp_path / "parameters_0001", norm=norm)
assert np.allclose(
data["growth_rate"].isel(time=-1, ky=0, kx=0).data.magnitude, 1.848
)
assert np.allclose(
data["mode_frequency"].isel(time=-1, ky=0, kx=0).data.magnitude, 12.207
)
| null |
2,025 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkalb.endpoint import endpoint_data
class CreateServerGroupRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Alb', '2020-06-16', 'CreateServerGroup','alb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ServerGroupName(self): # String
return self.get_query_params().get('ServerGroupName')
def set_ServerGroupName(self, ServerGroupName): # String
self.add_query_param('ServerGroupName', ServerGroupName)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_HealthCheckConfig(self): # Struct
return self.get_query_params().get('HealthCheckConfig')
def set_HealthCheckConfig(self, HealthCheckConfig): # Struct
if HealthCheckConfig.get('HealthCheckCodes') is not None:
for index1, value1 in enumerate(HealthCheckConfig.get('HealthCheckCodes')):
self.add_query_param('HealthCheckConfig.HealthCheckCodes.' + str(index1 + 1), value1)
if HealthCheckConfig.get('HealthCheckEnabled') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckEnabled', HealthCheckConfig.get('HealthCheckEnabled'))
if HealthCheckConfig.get('HealthCheckTimeout') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckTimeout', HealthCheckConfig.get('HealthCheckTimeout'))
if HealthCheckConfig.get('HealthCheckMethod') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckMethod', HealthCheckConfig.get('HealthCheckMethod'))
if HealthCheckConfig.get('HealthCheckHost') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckHost', HealthCheckConfig.get('HealthCheckHost'))
if HealthCheckConfig.get('HealthCheckProtocol') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckProtocol', HealthCheckConfig.get('HealthCheckProtocol'))
if HealthCheckConfig.get('UnhealthyThreshold') is not None:
self.add_query_param('HealthCheckConfig.UnhealthyThreshold', HealthCheckConfig.get('UnhealthyThreshold'))
if HealthCheckConfig.get('HealthyThreshold') is not None:
self.add_query_param('HealthCheckConfig.HealthyThreshold', HealthCheckConfig.get('HealthyThreshold'))
if HealthCheckConfig.get('HealthCheckTcpFastCloseEnabled') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckTcpFastCloseEnabled', HealthCheckConfig.get('HealthCheckTcpFastCloseEnabled'))
if HealthCheckConfig.get('HealthCheckPath') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckPath', HealthCheckConfig.get('HealthCheckPath'))
if HealthCheckConfig.get('HealthCheckInterval') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckInterval', HealthCheckConfig.get('HealthCheckInterval'))
if HealthCheckConfig.get('HealthCheckHttpCodes') is not None:
for index1, value1 in enumerate(HealthCheckConfig.get('HealthCheckHttpCodes')):
self.add_query_param('HealthCheckConfig.HealthCheckHttpCodes.' + str(index1 + 1), value1)
if HealthCheckConfig.get('HealthCheckHttpVersion') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckHttpVersion', HealthCheckConfig.get('HealthCheckHttpVersion'))
if HealthCheckConfig.get('HealthCheckConnectPort') is not None:
self.add_query_param('HealthCheckConfig.HealthCheckConnectPort', HealthCheckConfig.get('HealthCheckConnectPort'))
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def METHOD_NAME(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Protocol(self): # String
return self.get_query_params().get('Protocol')
def set_Protocol(self, Protocol): # String
self.add_query_param('Protocol', Protocol)
def get_ServiceName(self): # String
return self.get_query_params().get('ServiceName')
def set_ServiceName(self, ServiceName): # String
self.add_query_param('ServiceName', ServiceName)
def get_StickySessionConfig(self): # Struct
return self.get_query_params().get('StickySessionConfig')
def set_StickySessionConfig(self, StickySessionConfig): # Struct
if StickySessionConfig.get('StickySessionEnabled') is not None:
self.add_query_param('StickySessionConfig.StickySessionEnabled', StickySessionConfig.get('StickySessionEnabled'))
if StickySessionConfig.get('Cookie') is not None:
self.add_query_param('StickySessionConfig.Cookie', StickySessionConfig.get('Cookie'))
if StickySessionConfig.get('CookieTimeout') is not None:
self.add_query_param('StickySessionConfig.CookieTimeout', StickySessionConfig.get('CookieTimeout'))
if StickySessionConfig.get('StickySessionType') is not None:
self.add_query_param('StickySessionConfig.StickySessionType', StickySessionConfig.get('StickySessionType'))
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ServerGroupType(self): # String
return self.get_query_params().get('ServerGroupType')
def set_ServerGroupType(self, ServerGroupType): # String
self.add_query_param('ServerGroupType', ServerGroupType)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_UchConfig(self): # Struct
return self.get_query_params().get('UchConfig')
def set_UchConfig(self, UchConfig): # Struct
if UchConfig.get('Type') is not None:
self.add_query_param('UchConfig.Type', UchConfig.get('Type'))
if UchConfig.get('Value') is not None:
self.add_query_param('UchConfig.Value', UchConfig.get('Value'))
| null |
2,026 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import unittest
from tempfile import TemporaryDirectory
from unittest.mock import call, MagicMock
import torch
from reagent.core.tensorboardX import summary_writer_context, SummaryWriterContext
from reagent.test.base.horizon_test_base import HorizonTestBase
from torch.utils.tensorboard import SummaryWriter
class TestSummaryWriterContext(HorizonTestBase):
def METHOD_NAME(self) -> None:
self.assertIsNone(SummaryWriterContext.add_scalar("test", torch.ones(1)))
def test_with_none(self) -> None:
with summary_writer_context(None):
self.assertIsNone(SummaryWriterContext.add_scalar("test", torch.ones(1)))
def test_writing(self) -> None:
with TemporaryDirectory() as tmp_dir:
writer = SummaryWriter(tmp_dir)
writer.add_scalar = MagicMock()
with summary_writer_context(writer):
SummaryWriterContext.add_scalar("test", torch.ones(1))
writer.add_scalar.assert_called_once_with(
"test", torch.ones(1), global_step=0
)
def test_writing_stack(self) -> None:
with TemporaryDirectory() as tmp_dir1, TemporaryDirectory() as tmp_dir2:
writer1 = SummaryWriter(tmp_dir1)
writer1.add_scalar = MagicMock()
writer2 = SummaryWriter(tmp_dir2)
writer2.add_scalar = MagicMock()
with summary_writer_context(writer1):
with summary_writer_context(writer2):
SummaryWriterContext.add_scalar("test2", torch.ones(1))
SummaryWriterContext.add_scalar("test1", torch.zeros(1))
writer1.add_scalar.assert_called_once_with(
"test1", torch.zeros(1), global_step=0
)
writer2.add_scalar.assert_called_once_with(
"test2", torch.ones(1), global_step=0
)
def test_swallowing_exception(self) -> None:
with TemporaryDirectory() as tmp_dir:
writer = SummaryWriter(tmp_dir)
writer.add_scalar = MagicMock(side_effect=NotImplementedError("test"))
# pyre-fixme[16]: `SummaryWriter` has no attribute `exceptions_to_ignore`.
writer.exceptions_to_ignore = (NotImplementedError, KeyError)
with summary_writer_context(writer):
SummaryWriterContext.add_scalar("test", torch.ones(1))
def test_not_swallowing_exception(self) -> None:
with TemporaryDirectory() as tmp_dir:
writer = SummaryWriter(tmp_dir)
writer.add_scalar = MagicMock(side_effect=NotImplementedError("test"))
with self.assertRaisesRegex(
NotImplementedError, "test"
), summary_writer_context(writer):
SummaryWriterContext.add_scalar("test", torch.ones(1))
def test_swallowing_histogram_value_error(self) -> None:
with TemporaryDirectory() as tmp_dir:
writer = SummaryWriter(tmp_dir)
with summary_writer_context(writer):
SummaryWriterContext.add_histogram("bad_histogram", torch.ones(100, 1))
def test_global_step(self) -> None:
with TemporaryDirectory() as tmp_dir:
writer = SummaryWriter(tmp_dir)
writer.add_scalar = MagicMock()
with summary_writer_context(writer):
SummaryWriterContext.add_scalar("test", torch.ones(1))
SummaryWriterContext.increase_global_step()
SummaryWriterContext.add_scalar("test", torch.zeros(1))
writer.add_scalar.assert_has_calls(
[
call("test", torch.ones(1), global_step=0),
call("test", torch.zeros(1), global_step=1),
]
)
self.assertEqual(2, len(writer.add_scalar.mock_calls))
def test_add_custom_scalars(self) -> None:
with TemporaryDirectory() as tmp_dir:
writer = SummaryWriter(tmp_dir)
writer.add_custom_scalars = MagicMock()
with summary_writer_context(writer):
SummaryWriterContext.add_custom_scalars_multilinechart(
["a", "b"], category="cat", title="title"
)
with self.assertRaisesRegex(
AssertionError, "Title \\(title\\) is already in category \\(cat\\)"
):
SummaryWriterContext.add_custom_scalars_multilinechart(
["c", "d"], category="cat", title="title"
)
SummaryWriterContext.add_custom_scalars_multilinechart(
["e", "f"], category="cat", title="title2"
)
SummaryWriterContext.add_custom_scalars_multilinechart(
["g", "h"], category="cat2", title="title"
)
SummaryWriterContext.add_custom_scalars(writer)
writer.add_custom_scalars.assert_called_once_with(
{
"cat": {
"title": ["Multiline", ["a", "b"]],
"title2": ["Multiline", ["e", "f"]],
},
"cat2": {"title": ["Multiline", ["g", "h"]]},
}
)
| null |
2,027 |
import os
import unittest
from tempfile import NamedTemporaryFile
import numpy as np
from Orange.data import ContinuousVariable, DiscreteVariable, StringVariable, \
TimeVariable, Domain, Table
from Orange.data.io import TabReader, ExcelReader
from Orange.data.io_util import guess_data_type
from Orange.misc.collections import natural_sorted
class TestTableFilters(unittest.TestCase):
def test_guess_data_type_continuous(self):
# should be ContinuousVariable
valuemap, values, coltype = guess_data_type(list(range(1, 100)))
self.assertEqual(ContinuousVariable, coltype)
self.assertIsNone(valuemap)
np.testing.assert_array_equal(np.array(list(range(1, 100))), values)
valuemap, values, coltype = guess_data_type([1, 2, 3, 1, 2, 3])
self.assertEqual(ContinuousVariable, coltype)
self.assertIsNone(valuemap)
np.testing.assert_array_equal([1, 2, 3, 1, 2, 3], values)
valuemap, values, coltype = guess_data_type(
["1", "2", "3", "1", "2", "3"])
self.assertEqual(ContinuousVariable, coltype)
self.assertIsNone(valuemap)
np.testing.assert_array_equal([1, 2, 3, 1, 2, 3], values)
def test_guess_data_type_discrete(self):
# should be DiscreteVariable
valuemap, values, coltype = guess_data_type([1, 2, 1, 2])
self.assertEqual(DiscreteVariable, coltype)
self.assertEqual([1, 2], valuemap)
np.testing.assert_array_equal([1, 2, 1, 2], values)
valuemap, values, coltype = guess_data_type(["1", "2", "1", "2", "a"])
self.assertEqual(DiscreteVariable, coltype)
self.assertEqual(["1", "2", "a"], valuemap)
np.testing.assert_array_equal(['1', '2', '1', '2', 'a'], values)
# just below the threshold for string variable
in_values = list(map(lambda x: str(x) + "a", range(24))) + ["a"] * 76
valuemap, values, coltype = guess_data_type(in_values)
self.assertEqual(DiscreteVariable, coltype)
self.assertEqual(natural_sorted(set(in_values)), valuemap)
np.testing.assert_array_equal(in_values, values)
def test_guess_data_type_string(self):
# should be StringVariable
# too many different values for discrete
in_values = list(map(lambda x: str(x) + "a", range(90)))
valuemap, values, coltype = guess_data_type(in_values)
self.assertEqual(StringVariable, coltype)
self.assertIsNone(valuemap)
np.testing.assert_array_equal(in_values, values)
# more than len(values)**0.7
in_values = list(map(lambda x: str(x) + "a", range(25))) + ["a"] * 75
valuemap, values, coltype = guess_data_type(in_values)
self.assertEqual(StringVariable, coltype)
self.assertIsNone(valuemap)
np.testing.assert_array_equal(in_values, values)
# more than 100 different values - exactly 101
# this is the case when len(values)**0.7 rule would vote for the
# DiscreteVariable
in_values = list(map(lambda x: str(x) + "a", range(100))) + ["a"] * 999
valuemap, values, coltype = guess_data_type(in_values)
self.assertEqual(StringVariable, coltype)
self.assertIsNone(valuemap)
np.testing.assert_array_equal(in_values, values)
def METHOD_NAME(self):
in_values = ["2019-10-10", "2019-10-10", "2019-10-10", "2019-10-01"]
valuemap, _, coltype = guess_data_type(in_values)
self.assertEqual(TimeVariable, coltype)
self.assertIsNone(valuemap)
in_values = ["2019-10-10T12:08:51", "2019-10-10T12:08:51",
"2019-10-10T12:08:51", "2019-10-01T12:08:51"]
valuemap, _, coltype = guess_data_type(in_values)
self.assertEqual(TimeVariable, coltype)
self.assertIsNone(valuemap)
in_values = ["2019-10-10 12:08:51", "2019-10-10 12:08:51",
"2019-10-10 12:08:51", "2019-10-01 12:08:51"]
valuemap, _, coltype = guess_data_type(in_values)
self.assertEqual(TimeVariable, coltype)
self.assertIsNone(valuemap)
in_values = ["2019-10-10 12:08", "2019-10-10 12:08",
"2019-10-10 12:08", "2019-10-01 12:08"]
valuemap, _, coltype = guess_data_type(in_values)
self.assertEqual(TimeVariable, coltype)
self.assertIsNone(valuemap)
def test_guess_data_type_values_order(self):
"""
Test if values are ordered naturally
"""
in_values = [
"something1", "something12", "something2", "something1",
"something20", "something1", "something2", "something12",
"something1", "something12"
]
res = ["something1", "something2", "something12", "something20"]
valuemap, _, coltype = guess_data_type(in_values)
self.assertEqual(DiscreteVariable, coltype)
self.assertListEqual(res, valuemap)
class TestWriters(unittest.TestCase):
def setUp(self):
self.domain = Domain([DiscreteVariable("a", values=tuple("xyz")),
ContinuousVariable("b", number_of_decimals=3)],
ContinuousVariable("c", number_of_decimals=0),
[StringVariable("d")])
self.data = Table.from_numpy(
self.domain,
np.array([[1, 0.5], [2, np.nan], [np.nan, 1.0625]]),
np.array([3, 1, 7]),
np.array(["foo bar baz".split()]).T
)
def test_write_tab(self):
with NamedTemporaryFile(suffix=".tab", delete=False) as f:
fname = f.name
try:
TabReader.write(fname, self.data)
with open(fname, encoding="utf-8") as f:
self.assertEqual(f.read().strip(), """
c\td\ta\tb
continuous\tstring\tx y z\tcontinuous
class\tmeta\t\t
3\tfoo\ty\t0.500
1\tbar\tz\t
7\tbaz\t\t1.06250""".strip())
finally:
os.remove(fname)
def test_roundtrip_xlsx(self):
with NamedTemporaryFile(suffix=".xlsx", delete=False) as f:
fname = f.name
try:
ExcelReader.write(fname, self.data)
data = ExcelReader(fname).read()
np.testing.assert_equal(data.X, self.data.X)
np.testing.assert_equal(data.Y, self.data.Y)
np.testing.assert_equal(data.metas, self.data.metas)
np.testing.assert_equal(data.domain, self.data.domain)
finally:
os.remove(fname)
if __name__ == "__main__":
unittest.main()
| null |
2,028 |
"""Implements a xonsh tracer."""
import importlib
import inspect
import linecache
import os
import re
import sys
import typing as tp
import xonsh.procs.pipelines as xpp
import xonsh.prompt.cwd as prompt
from xonsh.cli_utils import Annotated, Arg, ArgParserAlias
from xonsh.inspectors import find_file
from xonsh.lazyasd import LazyObject
from xonsh.lazyimps import pyghooks, pygments
from xonsh.platform import HAS_PYGMENTS
from xonsh.tools import DefaultNotGiven, normabspath, print_color, to_bool
terminal = LazyObject(
lambda: importlib.import_module("pygments.formatters.terminal"),
globals(),
"terminal",
)
class TracerType:
"""Represents a xonsh tracer object, which keeps track of all tracing
state. This is a singleton.
"""
_inst: tp.Optional["TracerType"] = None
valid_events = frozenset(["line", "call"])
def __new__(cls, *args, **kwargs):
if cls._inst is None:
cls._inst = super().__new__(cls, *args, **kwargs)
return cls._inst
def __init__(self):
self.prev_tracer = DefaultNotGiven
self.files = set()
self.usecolor = True
self.lexer = pyghooks.XonshLexer()
self.formatter = terminal.TerminalFormatter()
self._last = ("", -1) # filename, lineno tuple
def __del__(self):
for f in set(self.files):
self.stop(f)
def color_output(self, usecolor):
"""Specify whether or not the tracer output should be colored."""
# we have to use a function to set usecolor because of the way that
# lazyasd works. Namely, it cannot dispatch setattr to the target
# object without being unable to access its own __dict__. This makes
# setting an attr look like getting a function.
self.usecolor = usecolor
def start(self, filename):
"""Starts tracing a file."""
files = self.files
if len(files) == 0:
self.prev_tracer = sys.gettrace()
files.add(normabspath(filename))
sys.settrace(self.trace)
curr = inspect.currentframe()
for frame, fname, *_ in inspect.getouterframes(curr, context=0):
if normabspath(fname) in files:
frame.f_trace = self.trace
def stop(self, filename):
"""Stops tracing a file."""
filename = normabspath(filename)
self.files.discard(filename)
if len(self.files) == 0:
sys.settrace(self.prev_tracer)
curr = inspect.currentframe()
for frame, fname, *_ in inspect.getouterframes(curr, context=0):
if normabspath(fname) == filename:
frame.f_trace = self.prev_tracer
self.prev_tracer = DefaultNotGiven
def trace(self, frame, event, arg):
"""Implements a line tracing function."""
if event not in self.valid_events:
return self.trace
fname = find_file(frame)
if fname in self.files:
lineno = frame.f_lineno
curr = (fname, lineno)
if curr != self._last:
line = linecache.getline(fname, lineno).rstrip()
s = tracer_format_line(
fname,
lineno,
line,
color=self.usecolor,
lexer=self.lexer,
formatter=self.formatter,
)
print_color(s)
self._last = curr
return self.trace
def on_files(
self,
_args,
files: Annotated[tp.Iterable[str], Arg(nargs="*")] = ("__file__",),
):
"""begins tracing selected files.
Parameters
----------
_args
argv from alias parser
files
file paths to watch, use "__file__" (default) to select the current file.
"""
for f in files:
if f == "__file__":
f = _find_caller(_args)
if f is None:
continue
self.start(f)
def off_files(
self,
_args,
files: Annotated[tp.Iterable[str], Arg(nargs="*")] = ("__file__",),
):
"""removes selected files fom tracing.
Parameters
----------
files
file paths to stop watching, use ``__file__`` (default) to select the current file.
"""
for f in files:
if f == "__file__":
f = _find_caller(_args)
if f is None:
continue
self.stop(f)
def toggle_color(
self,
toggle: Annotated[bool, Arg(type=to_bool)] = False,
):
"""output color management for tracer
Parameters
----------
toggle
true/false, y/n, etc. to toggle color usage.
"""
self.color_output(toggle)
tracer = LazyObject(TracerType, globals(), "tracer")
COLORLESS_LINE = "{fname}:{lineno}:{line}"
COLOR_LINE = "{{PURPLE}}{fname}{{BLUE}}:" "{{GREEN}}{lineno}{{BLUE}}:" "{{RESET}}"
def tracer_format_line(fname, lineno, line, color=True, lexer=None, formatter=None):
"""Formats a trace line suitable for printing."""
fname = min(fname, prompt._replace_home(fname), os.path.relpath(fname), key=len)
if not color:
return COLORLESS_LINE.format(fname=fname, lineno=lineno, line=line)
cline = COLOR_LINE.format(fname=fname, lineno=lineno)
if not HAS_PYGMENTS:
return cline + line
# OK, so we have pygments
tokens = pyghooks.partial_color_tokenize(cline)
lexer = lexer or pyghooks.XonshLexer()
tokens += pygments.lex(line, lexer=lexer)
if tokens[-1][1] == "\n":
del tokens[-1]
elif tokens[-1][1].endswith("\n"):
tokens[-1] = (tokens[-1][0], tokens[-1][1].rstrip())
return tokens
#
# Command line interface
#
def _find_caller(args):
"""Somewhat hacky method of finding the __file__ based on the line executed."""
re_line = re.compile(r"[^;\s|&<>]+\s+" + r"\s+".join(args))
curr = inspect.currentframe()
for _, fname, lineno, _, lines, _ in inspect.getouterframes(curr, context=1)[3:]:
if lines is not None and re_line.search(lines[0]) is not None:
return fname
elif (
lineno == 1 and re_line.search(linecache.getline(fname, lineno)) is not None
):
# There is a bug in CPython such that getouterframes(curr, context=1)
# will actually return the 2nd line in the code_context field, even though
# line number is itself correct. We manually fix that in this branch.
return fname
else:
msg = (
"xonsh: warning: __file__ name could not be found. You may be "
"trying to trace interactively. Please pass in the file names "
"you want to trace explicitly."
)
print(msg, file=sys.stderr)
class TracerAlias(ArgParserAlias):
"""Tool for tracing xonsh code as it runs."""
def METHOD_NAME(self):
parser = self.create_parser(prog="trace")
parser.add_command(tracer.on_files, prog="on", aliases=["start", "add"])
parser.add_command(tracer.off_files, prog="off", aliases=["stop", "del", "rm"])
parser.add_command(tracer.toggle_color, prog="color", aliases=["ls"])
return parser
def __call__(self, *args, **kwargs):
spec = kwargs.get("spec")
usecolor = (
spec and (spec.captured not in xpp.STDOUT_CAPTURE_KINDS)
) and sys.stdout.isatty()
tracer.color_output(usecolor)
return super().__call__(*args, **kwargs)
tracermain = TracerAlias()
| null |
2,029 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcloudauth.endpoint import endpoint_data
class CompareFaceVerifyRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cloudauth', '2019-03-07', 'CompareFaceVerify')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ProductCode(self): # String
return self.get_body_params().get('ProductCode')
def set_ProductCode(self, ProductCode): # String
self.add_body_params('ProductCode', ProductCode)
def get_TargetCertifyId(self): # String
return self.get_body_params().get('TargetCertifyId')
def set_TargetCertifyId(self, TargetCertifyId): # String
self.add_body_params('TargetCertifyId', TargetCertifyId)
def METHOD_NAME(self): # String
return self.get_body_params().get('TargetFaceContrastPicture')
def set_TargetFaceContrastPicture(self, TargetFaceContrastPicture): # String
self.add_body_params('TargetFaceContrastPicture', TargetFaceContrastPicture)
def get_TargetOssBucketName(self): # String
return self.get_body_params().get('TargetOssBucketName')
def set_TargetOssBucketName(self, TargetOssBucketName): # String
self.add_body_params('TargetOssBucketName', TargetOssBucketName)
def get_OuterOrderNo(self): # String
return self.get_body_params().get('OuterOrderNo')
def set_OuterOrderNo(self, OuterOrderNo): # String
self.add_body_params('OuterOrderNo', OuterOrderNo)
def get_SourceFaceContrastPicture(self): # String
return self.get_body_params().get('SourceFaceContrastPicture')
def set_SourceFaceContrastPicture(self, SourceFaceContrastPicture): # String
self.add_body_params('SourceFaceContrastPicture', SourceFaceContrastPicture)
def get_SourceCertifyId(self): # String
return self.get_body_params().get('SourceCertifyId')
def set_SourceCertifyId(self, SourceCertifyId): # String
self.add_body_params('SourceCertifyId', SourceCertifyId)
def get_TargetFaceContrastPictureUrl(self): # String
return self.get_body_params().get('TargetFaceContrastPictureUrl')
def set_TargetFaceContrastPictureUrl(self, TargetFaceContrastPictureUrl): # String
self.add_body_params('TargetFaceContrastPictureUrl', TargetFaceContrastPictureUrl)
def get_SourceOssObjectName(self): # String
return self.get_body_params().get('SourceOssObjectName')
def set_SourceOssObjectName(self, SourceOssObjectName): # String
self.add_body_params('SourceOssObjectName', SourceOssObjectName)
def get_SourceOssBucketName(self): # String
return self.get_body_params().get('SourceOssBucketName')
def set_SourceOssBucketName(self, SourceOssBucketName): # String
self.add_body_params('SourceOssBucketName', SourceOssBucketName)
def get_TargetOssObjectName(self): # String
return self.get_body_params().get('TargetOssObjectName')
def set_TargetOssObjectName(self, TargetOssObjectName): # String
self.add_body_params('TargetOssObjectName', TargetOssObjectName)
def get_SceneId(self): # Long
return self.get_body_params().get('SceneId')
def set_SceneId(self, SceneId): # Long
self.add_body_params('SceneId', SceneId)
def get_SourceFaceContrastPictureUrl(self): # String
return self.get_body_params().get('SourceFaceContrastPictureUrl')
def set_SourceFaceContrastPictureUrl(self, SourceFaceContrastPictureUrl): # String
self.add_body_params('SourceFaceContrastPictureUrl', SourceFaceContrastPictureUrl)
def get_Crop(self): # String
return self.get_body_params().get('Crop')
def set_Crop(self, Crop): # String
self.add_body_params('Crop', Crop)
| null |
2,030 |
"""
Validate that our settings functions work
"""
import importlib
import sys
from unittest import mock
from ddt import ddt, data
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
import semantic_version
from search.base import MockedESTestCase
REQUIRED_SETTINGS = {
'MAILGUN_URL': 'http://fake.mailgun.url',
'MAILGUN_KEY': 'fake_mailgun_key',
'OPENSEARCH_INDEX': 'some_index',
'OPEN_DISCUSSIONS_SITE_KEY': 'mm_test',
}
@ddt
class TestSettings(MockedESTestCase):
"""Validate that settings work as expected."""
def reload_settings(self):
"""
Reload settings module with cleanup to restore it.
Returns:
dict: dictionary of the newly reloaded settings ``vars``
"""
importlib.reload(sys.modules['micromasters.settings'])
# Restore settings to original settings after test
self.addCleanup(importlib.reload, sys.modules['micromasters.settings'])
return vars(sys.modules['micromasters.settings'])
def test_s3_settings(self):
"""Verify that we enable and configure S3 with a variable"""
# Unset, we don't do S3
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_USE_S3': 'False'
}, clear=True):
settings_vars = self.reload_settings()
self.assertNotEqual(
settings_vars.get('DEFAULT_FILE_STORAGE'),
'storages.backends.s3boto3.S3Boto3Storage'
)
with self.assertRaises(ImproperlyConfigured):
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_USE_S3': 'True',
}, clear=True):
self.reload_settings()
# Verify it all works with it enabled and configured 'properly'
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_USE_S3': 'True',
'AWS_ACCESS_KEY_ID': '1',
'AWS_SECRET_ACCESS_KEY': '2',
'AWS_STORAGE_BUCKET_NAME': '3',
}, clear=True):
settings_vars = self.reload_settings()
self.assertEqual(
settings_vars.get('DEFAULT_FILE_STORAGE'),
'storages.backends.s3boto3.S3Boto3Storage'
)
def test_admin_settings(self):
"""Verify that we configure email with environment variable"""
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_ADMIN_EMAIL': ''
}, clear=True):
settings_vars = self.reload_settings()
self.assertFalse(settings_vars.get('ADMINS', False))
test_admin_email = '[email protected]'
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_ADMIN_EMAIL': test_admin_email,
}, clear=True):
settings_vars = self.reload_settings()
self.assertEqual(
(('Admins', test_admin_email),),
settings_vars['ADMINS']
)
# Manually set ADMIN to our test setting and verify e-mail
# goes where we expect
settings.ADMINS = (('Admins', test_admin_email),)
mail.mail_admins('Test', 'message')
self.assertIn(test_admin_email, mail.outbox[0].to)
def METHOD_NAME(self):
"""Verify that we can enable/disable database SSL with a var"""
# Check default state is SSL on
with mock.patch.dict('os.environ', REQUIRED_SETTINGS, clear=True):
settings_vars = self.reload_settings()
self.assertEqual(
settings_vars['DATABASES']['default']['OPTIONS'],
{'sslmode': 'require'}
)
# Check enabling the setting explicitly
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_DB_DISABLE_SSL': 'True'
}, clear=True):
settings_vars = self.reload_settings()
self.assertEqual(
settings_vars['DATABASES']['default']['OPTIONS'],
{}
)
# Disable it
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'MICROMASTERS_DB_DISABLE_SSL': 'False'
}, clear=True):
settings_vars = self.reload_settings()
self.assertEqual(
settings_vars['DATABASES']['default']['OPTIONS'],
{'sslmode': 'require'}
)
@data(*REQUIRED_SETTINGS.keys())
def test_required(self, missing_param):
"""An ImproperlyConfigured exception should be raised for each param missing here"""
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
missing_param: '',
}, clear=True), self.assertRaises(ImproperlyConfigured):
self.reload_settings()
def test_opensearch_index_pr_build(self):
"""For PR builds we will use the heroku app name instead of the given OPENSEARCH_INDEX"""
index_name = 'heroku_app_name_as_index'
with mock.patch.dict('os.environ', {
**REQUIRED_SETTINGS,
'HEROKU_APP_NAME': index_name,
'HEROKU_PARENT_APP_NAME': 'some_name',
}):
settings_vars = self.reload_settings()
assert settings_vars['OPENSEARCH_INDEX'] == index_name
@staticmethod
def test_semantic_version():
"""
Verify that we have a semantic compatible version.
"""
semantic_version.Version(settings.VERSION)
| null |
2,031 |
from flask import current_app, request, url_for
from .const import PERMISSION_PREFIX
def app_template_filter(filter_name=""):
def wrap(f):
if not hasattr(f, "_filter"):
f._filter = filter_name
return f
return wrap
class TemplateFilters(object):
security_manager = None
def __init__(self, app, security_manager):
self.security_manager = security_manager
for attr_name in dir(self):
if hasattr(getattr(self, attr_name), "_filter"):
attr = getattr(self, attr_name)
app.jinja_env.filters[attr._filter] = attr
@app_template_filter("get_actions_on_list")
def get_actions_on_list(self, actions, modelview_name):
res_actions = dict()
for action_key in actions:
action = actions[action_key]
if self.is_item_visible(action.name, modelview_name) and action.multiple:
res_actions[action_key] = action
return res_actions
@app_template_filter("get_actions_on_show")
def get_actions_on_show(self, actions, modelview_name):
res_actions = dict()
for action_key in actions:
action = actions[action_key]
if self.is_item_visible(action.name, modelview_name) and action.single:
res_actions[action_key] = action
return res_actions
@app_template_filter("safe_url_for")
def safe_url_for(self, endpoint, **values):
try:
return url_for(endpoint, **values)
except Exception:
return None
@app_template_filter("link_order")
def link_order_filter(self, column, modelview_name):
"""
Arguments are passed like:
_oc_<VIEW_NAME>=<COL_NAME>&_od_<VIEW_NAME>='asc'|'desc'
"""
new_args = request.view_args.copy()
args = request.args.copy()
if ("_oc_" + modelview_name) in args:
args["_oc_" + modelview_name] = column
if args.get("_od_" + modelview_name) == "asc":
args["_od_" + modelview_name] = "desc"
else:
args["_od_" + modelview_name] = "asc"
else:
args["_oc_" + modelview_name] = column
args["_od_" + modelview_name] = "asc"
return url_for(
request.endpoint,
**dict(list(new_args.items()) + list(args.to_dict().items()))
)
@app_template_filter("link_page")
def link_page_filter(self, page, modelview_name):
"""
Arguments are passed like: page_<VIEW_NAME>=<PAGE_NUMBER>
"""
new_args = request.view_args.copy()
args = request.args.copy()
args["page_" + modelview_name] = page
return url_for(
request.endpoint,
**dict(list(new_args.items()) + list(args.to_dict().items()))
)
@app_template_filter("link_page_size")
def link_page_size_filter(self, page_size, modelview_name):
"""
Arguments are passed like: psize_<VIEW_NAME>=<PAGE_NUMBER>
"""
new_args = request.view_args.copy()
args = request.args.copy()
args["psize_" + modelview_name] = page_size
return url_for(
request.endpoint,
**dict(list(new_args.items()) + list(args.to_dict().items()))
)
@app_template_filter("get_link_next")
def METHOD_NAME(self, s):
return request.args.get("next")
@app_template_filter("get_link_back")
def get_link_back_filter(self, request):
return request.args.get("next") or request.referrer
# TODO improve this
@app_template_filter("set_link_filters")
def set_link_filters_filter(self, path, filters):
lnkstr = path
for flt, value in filters.get_filters_values():
if flt.is_related_view:
if "?" in lnkstr:
lnkstr = lnkstr + "&_flt_0_" + flt.column_name + "=" + str(value)
else:
lnkstr = lnkstr + "?_flt_0_" + flt.column_name + "=" + str(value)
return lnkstr
@app_template_filter("get_link_order")
def get_link_order_filter(self, column, modelview_name):
if request.args.get("_oc_" + modelview_name) == column:
if request.args.get("_od_" + modelview_name) == "asc":
return 2
else:
return 1
else:
return 0
@app_template_filter("get_attr")
def get_attr_filter(self, obj, item):
return getattr(obj, item)
@app_template_filter("is_menu_visible")
def is_menu_visible(self, item):
return self.security_manager.has_access("menu_access", item.name)
@staticmethod
def find_views_by_name(view_name):
for view in current_app.appbuilder.baseviews:
if view.__class__.__name__ == view_name:
return view
@app_template_filter("is_item_visible")
def is_item_visible(self, permission: str, item: str) -> bool:
"""
Check if an item is visible on the template
this changed with permission mapping feature.
This is a best effort to deliver the feature
and not break compatibility
permission is:
- 'can_' + <METHOD_NAME>: On normal routes
- <METHOD_NAME>: when it's an action
"""
_view = self.find_views_by_name(item)
item = _view.class_permission_name
if PERMISSION_PREFIX in permission:
method = permission.split(PERMISSION_PREFIX)[1]
else:
if hasattr(_view, "actions") and _view.actions.get(permission):
permission_name = _view.get_action_permission_name(permission)
if permission_name not in _view.base_permissions:
return False
return self.security_manager.has_access(permission_name, item)
else:
method = permission
permission_name = PERMISSION_PREFIX + _view.get_method_permission(method)
if permission_name not in _view.base_permissions:
return False
return self.security_manager.has_access(permission_name, item)
| null |
2,032 |
#!/usr/bin/env python3
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import logging
import os
import pathlib
import tensorflow as tf
import numpy as np
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
import dataloading.feature_spec
from dataloading.dataloader import create_input_pipelines
LOGGER = logging.getLogger("run_performance_on_triton")
def METHOD_NAME(batch_sizes, dataset_path, dataset_type, result_path, feature_spec,
total_benchmark_samples, fused_embedding):
input_data = {}
for batch_size in batch_sizes:
filename = os.path.join(result_path, str(batch_size) + ".json")
print("generating input data: ", filename)
shapes = create_input_data_hps_batch(batch_size=batch_size, dst_path=filename, dataset_path=dataset_path,
dataset_type=dataset_type, feature_spec=feature_spec,
total_benchmark_samples=total_benchmark_samples,
fused_embedding=fused_embedding)
input_data[batch_size] = (filename, shapes)
return input_data
def create_input_data_hps_batch(batch_size, dst_path, dataset_path, dataset_type, feature_spec,
total_benchmark_samples, fused_embedding):
fspec = dataloading.feature_spec.FeatureSpec.from_yaml(
os.path.join(dataset_path, feature_spec)
)
num_tables = len(fspec.get_categorical_sizes())
table_ids = list(range(num_tables))
_, dataloader = create_input_pipelines(dataset_type=dataset_type, dataset_path=dataset_path,
train_batch_size=batch_size, test_batch_size=batch_size,
table_ids=table_ids, feature_spec=feature_spec, rank=0, world_size=1)
generated = 0
batches = []
categorical_cardinalities = fspec.get_categorical_sizes()
categorical_cardinalities = np.roll(np.cumsum(np.array(categorical_cardinalities)), 1)
categorical_cardinalities[0] = 0
for batch in dataloader.op():
features, labels = batch
numerical_features, cat_features = features
cat_features = tf.concat(cat_features, axis=1).numpy().astype(np.int32)
cat_features = np.add(cat_features, categorical_cardinalities).flatten()
numerical_features = numerical_features.numpy().astype(np.float32).flatten()
batch = {
"categorical_features": cat_features.tolist(),
"numerical_features": numerical_features.tolist(),
}
batches.append(batch)
generated += batch_size
if generated >= total_benchmark_samples:
break
with open(dst_path, "w") as f:
json.dump(obj={"data": batches}, fp=f, indent=4)
shapes = [
f"categorical_features:{cat_features.shape[0]}",
f"numerical_features:{numerical_features.shape[0]}",
]
return shapes
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--result-path",
type=pathlib.Path,
required=True,
help="Path where processed data is stored.",
)
parser.add_argument(
"--fused-embedding",
action="store_true",
help="Use the fused embedding API for HPS",
)
parser.add_argument(
"--batch-sizes",
type=int,
default=[256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536],
help="List of batch sizes to test.",
nargs="*",
)
parser.add_argument(
"-v",
"--verbose",
help="Verbose logs",
action="store_true",
default=False,
)
parser.add_argument(
"--dataset_path", default=None, required=True, help="Path to dataset directory"
)
parser.add_argument(
"--feature_spec",
default="feature_spec.yaml",
help="Name of the feature spec file in the dataset directory",
)
parser.add_argument(
"--dataset_type",
default="tf_raw",
choices=["tf_raw", "synthetic", "split_tfrecords"],
help="The type of the dataset to use",
)
parser.add_argument(
"--num-benchmark-samples",
default=2**18,
type=int,
help="The type of the dataset to use",
)
args = parser.parse_args()
log_level = logging.INFO if not args.verbose else logging.DEBUG
log_format = "%(asctime)s %(levelname)s %(name)s %(message)s"
logging.basicConfig(level=log_level, format=log_format)
input_data = METHOD_NAME(batch_sizes=args.batch_sizes, dataset_path=args.dataset_path, result_path=args.result_path,
dataset_type=args.dataset_type, feature_spec=args.feature_spec,
total_benchmark_samples=args.num_benchmark_samples,
fused_embedding=args.fused_embedding)
if __name__ == "__main__":
main()
| null |
2,033 |
"""Test the execution engines."""
import copy
import os
import amici
import benchmark_models_petab as models
import cloudpickle as pickle
import numpy as np
import pypesto
import pypesto.optimize
import pypesto.petab
from ..util import rosen_for_sensi
def test_basic():
for engine in [
pypesto.engine.SingleCoreEngine(),
pypesto.engine.MultiProcessEngine(n_procs=2),
pypesto.engine.MultiProcessEngine(n_procs=2, method="spawn"),
pypesto.engine.MultiProcessEngine(n_procs=2, method="fork"),
pypesto.engine.MultiProcessEngine(n_procs=2, method="forkserver"),
pypesto.engine.MultiThreadEngine(n_threads=4),
]:
_test_basic(engine)
def _test_basic(engine):
# set up problem
objective = rosen_for_sensi(max_sensi_order=2)['obj']
lb = 0 * np.ones((1, 2))
ub = 1 * np.ones((1, 2))
problem = pypesto.Problem(objective, lb, ub)
optimizer = pypesto.optimize.ScipyOptimizer(options={'maxiter': 10})
result = pypesto.optimize.minimize(
problem=problem,
n_starts=5,
engine=engine,
optimizer=optimizer,
progress_bar=False,
)
assert len(result.optimize_result) == 5
def test_petab():
for engine in [
pypesto.engine.SingleCoreEngine(),
pypesto.engine.MultiProcessEngine(n_procs=2),
pypesto.engine.MultiProcessEngine(n_procs=2, method="spawn"),
pypesto.engine.MultiProcessEngine(n_procs=2, method="fork"),
pypesto.engine.MultiProcessEngine(n_procs=2, method="forkserver"),
pypesto.engine.MultiThreadEngine(n_threads=4),
]:
_test_petab(engine)
def _test_petab(engine):
petab_importer = pypesto.petab.PetabImporter.from_yaml(
os.path.join(
models.MODELS_DIR, "Zheng_PNAS2012", "Zheng_PNAS2012.yaml"
)
)
objective = petab_importer.create_objective()
problem = petab_importer.create_problem(objective)
optimizer = pypesto.optimize.ScipyOptimizer(options={'maxiter': 10})
result = pypesto.optimize.minimize(
problem=problem,
n_starts=3,
engine=engine,
optimizer=optimizer,
progress_bar=False,
)
assert len(result.optimize_result) == 3
def test_deepcopy_objective():
"""Test copying objectives (needed for MultiProcessEngine)."""
petab_importer = pypesto.petab.PetabImporter.from_yaml(
os.path.join(
models.MODELS_DIR, "Zheng_PNAS2012", "Zheng_PNAS2012.yaml"
)
)
objective = petab_importer.create_objective()
objective.amici_solver.setSensitivityMethod(
amici.SensitivityMethod_adjoint
)
objective2 = copy.deepcopy(objective)
# test some properties
assert (
objective.amici_model.getParameterIds()
== objective2.amici_model.getParameterIds()
)
assert (
objective.amici_solver.getSensitivityOrder()
== objective2.amici_solver.getSensitivityOrder()
)
assert (
objective.amici_solver.getSensitivityMethod()
== objective2.amici_solver.getSensitivityMethod()
)
assert len(objective.edatas) == len(objective2.edatas)
assert objective.amici_model is not objective2.amici_model
assert objective.amici_solver is not objective2.amici_solver
assert objective.steadystate_guesses is not objective2.steadystate_guesses
def METHOD_NAME():
"""Test serializing objectives (needed for MultiThreadEngine)."""
petab_importer = pypesto.petab.PetabImporter.from_yaml(
os.path.join(
models.MODELS_DIR, "Zheng_PNAS2012", "Zheng_PNAS2012.yaml"
)
)
objective = petab_importer.create_objective()
objective.amici_solver.setSensitivityMethod(
amici.SensitivityMethod_adjoint
)
objective2 = pickle.loads(pickle.dumps(objective))
# test some properties
assert (
objective.amici_model.getParameterIds()
== objective2.amici_model.getParameterIds()
)
assert (
objective.amici_solver.getSensitivityOrder()
== objective2.amici_solver.getSensitivityOrder()
)
assert (
objective.amici_solver.getSensitivityMethod()
== objective2.amici_solver.getSensitivityMethod()
)
assert len(objective.edatas) == len(objective2.edatas)
| null |
2,034 |
##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
"""Unit test for BlindDataHolder binding"""
import os
import unittest
import IECore
class TestBlindDataHolder(unittest.TestCase):
def testConstructors(self):
"""Test BlindDataHolder constructors"""
b = IECore.BlindDataHolder()
c = IECore.CompoundData()
c["floatData"] = IECore.FloatData(3.0)
b = IECore.BlindDataHolder(c)
self.assertEqual( b.typeName(), "BlindDataHolder" )
self.assertFalse( IECore.Object.isAbstractType( "BlindDataHolder") )
def testBlindData(self):
"""Test BlindDataHolder blindData"""
b = IECore.BlindDataHolder()
b.blindData()["floatData"] = IECore.FloatData(1.0)
b.blindData()["intData"] = IECore.IntData(-5)
self.assertEqual( b.blindData()["floatData"].value, 1.0 )
self.assertEqual( b.blindData()["intData"].value, -5 )
self.assertEqual( len(b.blindData()), 2 )
def testComparison(self):
# test the empty case (where it doesn't allocate the compound data)
a = IECore.BlindDataHolder( )
b = IECore.BlindDataHolder( IECore.CompoundData() )
c = IECore.BlindDataHolder( )
c.blindData()
self.assertEqual( a, a )
self.assertEqual( a, a )
self.assertEqual( a, b )
self.assertEqual( b, a )
self.assertEqual( a, c )
self.assertEqual( c, a )
self.assertEqual( b, c )
self.assertEqual( c, b )
c.blindData()['a'] = IECore.IntData(10)
self.assertNotEqual( a, c )
self.assertNotEqual( c, a )
self.assertNotEqual( b, c )
self.assertNotEqual( c, b )
def testLoadSave(self):
"""Test BlindDataHolder load/save"""
iface = IECore.IndexedIO.create( os.path.join( "test", "BlindDataHolder.fio" ), IECore.IndexedIO.OpenMode.Write )
# first simple test: saving with some blind data
b1 = IECore.BlindDataHolder()
b1.blindData()["floatData"] = IECore.FloatData(1.0)
b1.blindData()["intData"] = IECore.IntData(-5)
b1.save( iface, "test" )
b2 = IECore.Object.load( iface, "test" )
self.assertEqual( b1, b2 )
# should have written a "blindData" entry into the indexed io hierarchy
self.assertTrue( isinstance( iface.directory( ["test","data","BlindDataHolder", "data", "blindData"], IECore.IndexedIO.MissingBehaviour.NullIfMissing ), IECore.IndexedIO ) )
# second test: overriding with no blind data
b1 = IECore.BlindDataHolder()
b1.save( iface, "test" )
b1 = IECore.Object.load( iface, "test" )
self.assertEqual( b1, IECore.BlindDataHolder() )
# "blindData" entry should be excluded from the IndexedIO hierarchy
self.assertEqual( iface.directory( ["test","data","BlindDataHolder"], IECore.IndexedIO.MissingBehaviour.NullIfMissing ), None )
def METHOD_NAME( self ) :
b1 = IECore.BlindDataHolder()
b2 = IECore.BlindDataHolder()
self.assertEqual( b1.hash(), b2.hash() )
b2.blindData()["a"] = IECore.FloatData( 1 )
self.assertNotEqual( b1.hash(), b2.hash() )
def tearDown(self):
if os.path.isfile( os.path.join( ".", "test", "BlindDataHolder.fio" ) ) :
os.remove( os.path.join( ".", "test", "BlindDataHolder.fio" ) )
if __name__ == "__main__":
unittest.main()
| null |
2,035 |
import shutil
from dataclasses import replace
import pytest
from pharmpy.internals.fs.cwd import chdir
from pharmpy.modeling import remove_covariance_step, transform_blq
from pharmpy.tools import read_modelfit_results
from pharmpy.tools.ruvsearch.results import psn_resmod_results
from pharmpy.tools.ruvsearch.tool import _create_dataset, create_workflow, validate_input
from pharmpy.workflows import Workflow
def test_filter_dataset(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem/pheno_pd.mod')
res = read_modelfit_results(testdata / 'nonmem/pheno_pd.mod')
model = model.replace(modelfit_results=res)
cwres = _create_dataset(model, dv=2)
expected_cwres = [-1.15490, 0.95703, -0.85365, 0.42327]
assert cwres['DV'].tolist() == expected_cwres
def test_resmod_results(testdata):
res = psn_resmod_results(testdata / 'psn' / 'resmod_dir1')
assert list(res.cwres_models['dOFV']) == [
-1.31,
-3.34,
-13.91,
-18.54,
-8.03,
-4.20,
-0.25,
-1.17,
-0.00,
-0.09,
-2.53,
-3.12,
-3.60,
-25.62,
-7.66,
-0.03,
-5.53,
]
def test_resmod_results_dvid(testdata):
res = psn_resmod_results(testdata / 'psn' / 'resmod_dir2')
df = res.cwres_models
assert df['dOFV'].loc[1, '1', 'autocorrelation'] == -0.74
assert df['dOFV'].loc[1, 'sum', 'tdist'] == -35.98
def test_create_workflow():
assert isinstance(create_workflow(), Workflow)
def test_create_workflow_with_model(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
remove_covariance_step(model)
assert isinstance(create_workflow(model=model), Workflow)
def METHOD_NAME():
validate_input()
def test_validate_input_with_model(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
res = read_modelfit_results(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
model = model.replace(modelfit_results=res)
model = remove_covariance_step(model)
validate_input(model=model)
def test_create_dataset(load_model_for_test, testdata, tmp_path):
model = load_model_for_test(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
res = read_modelfit_results(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
model = model.replace(modelfit_results=res)
df = _create_dataset(model, dv=None)
assert len(df) == 1006
assert (df['DV'] != 0).all()
with chdir(tmp_path):
for path in (testdata / 'nonmem' / 'ruvsearch').glob('mox3.*'):
shutil.copy2(path, tmp_path)
shutil.copy2(testdata / 'nonmem' / 'ruvsearch' / 'moxo_simulated_resmod.csv', tmp_path)
shutil.copy2(testdata / 'nonmem' / 'ruvsearch' / 'mytab', tmp_path)
# Introduce 0 in CWRES to mimic rows BLQ
with open('mytab') as f:
mytab_new = f.read().replace('-2.4366E+00', '0.0000E+00')
with open('mytab', 'w') as f:
f.write(mytab_new)
model = load_model_for_test('mox3.mod')
res = read_modelfit_results('mox3.mod')
model = model.replace(modelfit_results=res)
model = transform_blq(model, method='m3', lloq=0.05)
df = _create_dataset(model, dv=None)
assert len(df) == 1005
assert (df['DV'] != 0).all()
@pytest.mark.parametrize(
('model_path', 'arguments', 'exception', 'match'),
[
(
None,
dict(groups=3.1415),
TypeError,
'Invalid `groups`',
),
(
None,
dict(groups=0),
ValueError,
'Invalid `groups`',
),
(
None,
dict(p_value='x'),
TypeError,
'Invalid `p_value`',
),
(
None,
dict(p_value=1.01),
ValueError,
'Invalid `p_value`',
),
(
None,
dict(skip='ABC'),
TypeError,
'Invalid `skip`',
),
(
None,
dict(skip=1),
TypeError,
'Invalid `skip`',
),
(
None,
dict(skip=['IIV_on_RUV', 'power', 'time', 0]),
TypeError,
'Invalid `skip`',
),
(
None,
dict(skip=['IIV_on_RUV', 'power', 'time']),
ValueError,
'Invalid `skip`',
),
(
None,
dict(model=1),
TypeError,
'Invalid `model`',
),
],
)
def test_validate_input_raises(
load_model_for_test,
testdata,
model_path,
arguments,
exception,
match,
):
model = load_model_for_test(testdata.joinpath(*model_path)) if model_path else None
kwargs = {'model': model, **arguments}
with pytest.raises(exception, match=match):
validate_input(**kwargs)
def test_validate_input_raises_modelfit_results(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem' / 'pheno.mod')
model = model.replace(modelfit_results=None)
with pytest.raises(ValueError, match="missing modelfit results"):
validate_input(model=model)
def test_validate_input_raises_cwres(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
res = read_modelfit_results(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
model = remove_covariance_step(model)
modelfit_results = replace(res, residuals=res.residuals.drop(columns=['CWRES']))
model = model.replace(modelfit_results=modelfit_results)
with pytest.raises(ValueError, match="CWRES"):
validate_input(model=model)
def test_validate_input_raises_cipredi(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
res = read_modelfit_results(testdata / 'nonmem' / 'ruvsearch' / 'mox3.mod')
model = remove_covariance_step(model)
modelfit_results = replace(res, predictions=res.predictions.drop(columns=['CIPREDI']))
model = model.replace(modelfit_results=modelfit_results)
with pytest.raises(ValueError, match="IPRED"):
validate_input(model=model)
def test_validate_input_raises_ipred(load_model_for_test, testdata):
model = load_model_for_test(testdata / 'nonmem' / 'pheno_real.mod')
res = read_modelfit_results(testdata / 'nonmem' / 'pheno_real.mod')
model = remove_covariance_step(model)
modelfit_results = replace(res, predictions=res.predictions.drop(columns=['IPRED']))
model = model.replace(modelfit_results=modelfit_results)
with pytest.raises(ValueError, match="IPRED"):
validate_input(model=model)
| null |
2,036 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdataworks_public.endpoint import endpoint_data
class UpdateDataServiceApiRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dataworks-public', '2020-05-18', 'UpdateDataServiceApi')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ScriptDetails(self): # String
return self.get_body_params().get('ScriptDetails')
def set_ScriptDetails(self, ScriptDetails): # String
self.add_body_params('ScriptDetails', ScriptDetails)
def get_RequestMethod(self): # Integer
return self.get_body_params().get('RequestMethod')
def set_RequestMethod(self, RequestMethod): # Integer
self.add_body_params('RequestMethod', RequestMethod)
def get_ApiDescription(self): # String
return self.get_body_params().get('ApiDescription')
def set_ApiDescription(self, ApiDescription): # String
self.add_body_params('ApiDescription', ApiDescription)
def get_Timeout(self): # Integer
return self.get_body_params().get('Timeout')
def set_Timeout(self, Timeout): # Integer
self.add_body_params('Timeout', Timeout)
def get_ResourceGroupId(self): # Long
return self.get_body_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # Long
self.add_body_params('ResourceGroupId', ResourceGroupId)
def get_TenantId(self): # Long
return self.get_body_params().get('TenantId')
def set_TenantId(self, TenantId): # Long
self.add_body_params('TenantId', TenantId)
def get_Protocols(self): # String
return self.get_body_params().get('Protocols')
def set_Protocols(self, Protocols): # String
self.add_body_params('Protocols', Protocols)
def get_ProjectId(self): # Long
return self.get_body_params().get('ProjectId')
def set_ProjectId(self, ProjectId): # Long
self.add_body_params('ProjectId', ProjectId)
def get_ResponseContentType(self): # Integer
return self.get_body_params().get('ResponseContentType')
def set_ResponseContentType(self, ResponseContentType): # Integer
self.add_body_params('ResponseContentType', ResponseContentType)
def get_ApiPath(self): # String
return self.get_body_params().get('ApiPath')
def set_ApiPath(self, ApiPath): # String
self.add_body_params('ApiPath', ApiPath)
def get_WizardDetails(self): # String
return self.get_body_params().get('WizardDetails')
def set_WizardDetails(self, WizardDetails): # String
self.add_body_params('WizardDetails', WizardDetails)
def get_VisibleRange(self): # Integer
return self.get_body_params().get('VisibleRange')
def METHOD_NAME(self, VisibleRange): # Integer
self.add_body_params('VisibleRange', VisibleRange)
def get_RegistrationDetails(self): # String
return self.get_body_params().get('RegistrationDetails')
def set_RegistrationDetails(self, RegistrationDetails): # String
self.add_body_params('RegistrationDetails', RegistrationDetails)
def get_ApiId(self): # Long
return self.get_body_params().get('ApiId')
def set_ApiId(self, ApiId): # Long
self.add_body_params('ApiId', ApiId)
| null |
2,037 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class UpdateNetworkAclEntriesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'UpdateNetworkAclEntries','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_EgressAclEntriess(self): # RepeatList
return self.get_query_params().get('EgressAclEntries')
def set_EgressAclEntriess(self, EgressAclEntries): # RepeatList
for depth1 in range(len(EgressAclEntries)):
if EgressAclEntries[depth1].get('NetworkAclEntryId') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.NetworkAclEntryId', EgressAclEntries[depth1].get('NetworkAclEntryId'))
if EgressAclEntries[depth1].get('EntryType') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.EntryType', EgressAclEntries[depth1].get('EntryType'))
if EgressAclEntries[depth1].get('NetworkAclEntryName') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.NetworkAclEntryName', EgressAclEntries[depth1].get('NetworkAclEntryName'))
if EgressAclEntries[depth1].get('Policy') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.Policy', EgressAclEntries[depth1].get('Policy'))
if EgressAclEntries[depth1].get('Description') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.Description', EgressAclEntries[depth1].get('Description'))
if EgressAclEntries[depth1].get('Protocol') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.Protocol', EgressAclEntries[depth1].get('Protocol'))
if EgressAclEntries[depth1].get('DestinationCidrIp') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.DestinationCidrIp', EgressAclEntries[depth1].get('DestinationCidrIp'))
if EgressAclEntries[depth1].get('Port') is not None:
self.add_query_param('EgressAclEntries.' + str(depth1 + 1) + '.Port', EgressAclEntries[depth1].get('Port'))
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_NetworkAclId(self): # String
return self.get_query_params().get('NetworkAclId')
def set_NetworkAclId(self, NetworkAclId): # String
self.add_query_param('NetworkAclId', NetworkAclId)
def get_UpdateIngressAclEntries(self): # Boolean
return self.get_query_params().get('UpdateIngressAclEntries')
def set_UpdateIngressAclEntries(self, UpdateIngressAclEntries): # Boolean
self.add_query_param('UpdateIngressAclEntries', UpdateIngressAclEntries)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_UpdateEgressAclEntries(self): # Boolean
return self.get_query_params().get('UpdateEgressAclEntries')
def METHOD_NAME(self, UpdateEgressAclEntries): # Boolean
self.add_query_param('UpdateEgressAclEntries', UpdateEgressAclEntries)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_IngressAclEntriess(self): # RepeatList
return self.get_query_params().get('IngressAclEntries')
def set_IngressAclEntriess(self, IngressAclEntries): # RepeatList
for depth1 in range(len(IngressAclEntries)):
if IngressAclEntries[depth1].get('NetworkAclEntryId') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.NetworkAclEntryId', IngressAclEntries[depth1].get('NetworkAclEntryId'))
if IngressAclEntries[depth1].get('EntryType') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.EntryType', IngressAclEntries[depth1].get('EntryType'))
if IngressAclEntries[depth1].get('NetworkAclEntryName') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.NetworkAclEntryName', IngressAclEntries[depth1].get('NetworkAclEntryName'))
if IngressAclEntries[depth1].get('Policy') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.Policy', IngressAclEntries[depth1].get('Policy'))
if IngressAclEntries[depth1].get('SourceCidrIp') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.SourceCidrIp', IngressAclEntries[depth1].get('SourceCidrIp'))
if IngressAclEntries[depth1].get('Description') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.Description', IngressAclEntries[depth1].get('Description'))
if IngressAclEntries[depth1].get('Protocol') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.Protocol', IngressAclEntries[depth1].get('Protocol'))
if IngressAclEntries[depth1].get('Port') is not None:
self.add_query_param('IngressAclEntries.' + str(depth1 + 1) + '.Port', IngressAclEntries[depth1].get('Port'))
| null |
2,038 |
# Copyright (c) 2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Tests for testtools.monkey."""
from testtools import TestCase
from testtools.matchers import MatchesException, Raises
from testtools.monkey import MonkeyPatcher, patch
class TestObj:
def __init__(self):
self.foo = 'foo value'
self.bar = 'bar value'
self.baz = 'baz value'
class MonkeyPatcherTest(TestCase):
"""
Tests for 'MonkeyPatcher' monkey-patching class.
"""
def setUp(self):
super(MonkeyPatcherTest, self).setUp()
self.test_object = TestObj()
self.original_object = TestObj()
self.monkey_patcher = MonkeyPatcher()
def test_empty(self):
# A monkey patcher without patches doesn't change a thing.
self.monkey_patcher.patch()
# We can't assert that all state is unchanged, but at least we can
# check our test object.
self.assertEquals(self.original_object.foo, self.test_object.foo)
self.assertEquals(self.original_object.bar, self.test_object.bar)
self.assertEquals(self.original_object.baz, self.test_object.baz)
def test_construct_with_patches(self):
# Constructing a 'MonkeyPatcher' with patches adds all of the given
# patches to the patch list.
patcher = MonkeyPatcher((self.test_object, 'foo', 'haha'),
(self.test_object, 'bar', 'hehe'))
patcher.patch()
self.assertEquals('haha', self.test_object.foo)
self.assertEquals('hehe', self.test_object.bar)
self.assertEquals(self.original_object.baz, self.test_object.baz)
def test_patch_existing(self):
# Patching an attribute that exists sets it to the value defined in the
# patch.
self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
self.monkey_patcher.patch()
self.assertEquals(self.test_object.foo, 'haha')
def test_patch_non_existing(self):
# Patching a non-existing attribute sets it to the value defined in
# the patch.
self.monkey_patcher.add_patch(self.test_object, 'doesntexist', 'value')
self.monkey_patcher.patch()
self.assertEquals(self.test_object.doesntexist, 'value')
def test_restore_non_existing(self):
# Restoring a value that didn't exist before the patch deletes the
# value.
self.monkey_patcher.add_patch(self.test_object, 'doesntexist', 'value')
self.monkey_patcher.patch()
self.monkey_patcher.restore()
marker = object()
self.assertIs(marker, getattr(self.test_object, 'doesntexist', marker))
def test_patch_already_patched(self):
# Adding a patch for an object and attribute that already have a patch
# overrides the existing patch.
self.monkey_patcher.add_patch(self.test_object, 'foo', 'blah')
self.monkey_patcher.add_patch(self.test_object, 'foo', 'BLAH')
self.monkey_patcher.patch()
self.assertEquals(self.test_object.foo, 'BLAH')
self.monkey_patcher.restore()
self.assertEquals(self.test_object.foo, self.original_object.foo)
def test_restore_twice_is_a_no_op(self):
# Restoring an already-restored monkey patch is a no-op.
self.monkey_patcher.add_patch(self.test_object, 'foo', 'blah')
self.monkey_patcher.patch()
self.monkey_patcher.restore()
self.assertEquals(self.test_object.foo, self.original_object.foo)
self.monkey_patcher.restore()
self.assertEquals(self.test_object.foo, self.original_object.foo)
def METHOD_NAME(self):
# run_with_patches runs the given callable, passing in all arguments
# and keyword arguments, and returns the return value of the callable.
log = []
def f(a, b, c=None):
log.append((a, b, c))
return 'foo'
result = self.monkey_patcher.run_with_patches(f, 1, 2, c=10)
self.assertEquals('foo', result)
self.assertEquals([(1, 2, 10)], log)
def test_repeated_run_with_patches(self):
# We can call the same function with run_with_patches more than
# once. All patches apply for each call.
def f():
return (self.test_object.foo, self.test_object.bar,
self.test_object.baz)
self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
result = self.monkey_patcher.run_with_patches(f)
self.assertEquals(
('haha', self.original_object.bar, self.original_object.baz),
result)
result = self.monkey_patcher.run_with_patches(f)
self.assertEquals(
('haha', self.original_object.bar, self.original_object.baz),
result)
def test_run_with_patches_restores(self):
# run_with_patches restores the original values after the function has
# executed.
self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
self.assertEquals(self.original_object.foo, self.test_object.foo)
self.monkey_patcher.run_with_patches(lambda: None)
self.assertEquals(self.original_object.foo, self.test_object.foo)
def test_run_with_patches_restores_on_exception(self):
# run_with_patches restores the original values even when the function
# raises an exception.
def _():
self.assertEquals(self.test_object.foo, 'haha')
self.assertEquals(self.test_object.bar, 'blahblah')
raise RuntimeError("Something went wrong!")
self.monkey_patcher.add_patch(self.test_object, 'foo', 'haha')
self.monkey_patcher.add_patch(self.test_object, 'bar', 'blahblah')
self.assertThat(lambda:self.monkey_patcher.run_with_patches(_),
Raises(MatchesException(RuntimeError("Something went wrong!"))))
self.assertEquals(self.test_object.foo, self.original_object.foo)
self.assertEquals(self.test_object.bar, self.original_object.bar)
class TestPatchHelper(TestCase):
def test_patch_patches(self):
# patch(obj, name, value) sets obj.name to value.
test_object = TestObj()
patch(test_object, 'foo', 42)
self.assertEqual(42, test_object.foo)
def test_patch_returns_cleanup(self):
# patch(obj, name, value) returns a nullary callable that restores obj
# to its original state when run.
test_object = TestObj()
original = test_object.foo
cleanup = patch(test_object, 'foo', 42)
cleanup()
self.assertEqual(original, test_object.foo)
def test_suite():
from unittest import TestLoader
return TestLoader().loadTestsFromName(__name__)
| null |
2,039 |
#!/usr/bin/env python3
# Copyright (c) 2018-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the deriveaddresses rpc call."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import assert_equal, assert_raises_rpc_error
class DeriveaddressesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def METHOD_NAME(self):
assert_raises_rpc_error(-5, "Missing checksum", self.nodes[0].deriveaddresses, "a")
descriptor = "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)#t6wfjs64"
address = "ert1qjqmxmkpmxt80xz4y3746zgt0q3u3ferrfpgxn5"
assert_equal(self.nodes[0].deriveaddresses(descriptor), [address])
descriptor = descriptor[:-9]
assert_raises_rpc_error(-5, "Missing checksum", self.nodes[0].deriveaddresses, descriptor)
descriptor_pubkey = "wpkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)#s9ga3alw"
address = "ert1qjqmxmkpmxt80xz4y3746zgt0q3u3ferrfpgxn5"
assert_equal(self.nodes[0].deriveaddresses(descriptor_pubkey), [address])
ranged_descriptor = "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)#kft60nuy"
assert_equal(self.nodes[0].deriveaddresses(ranged_descriptor, [1, 2]), ["ert1qhku5rq7jz8ulufe2y6fkcpnlvpsta7rqdpq5ny", "ert1qpgptk2gvshyl0s9lqshsmx932l9ccsv2zq7jrq"])
assert_equal(self.nodes[0].deriveaddresses(ranged_descriptor, 2), [address, "ert1qhku5rq7jz8ulufe2y6fkcpnlvpsta7rqdpq5ny", "ert1qpgptk2gvshyl0s9lqshsmx932l9ccsv2zq7jrq"])
assert_raises_rpc_error(-8, "Range should not be specified for an un-ranged descriptor", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"), [0, 2])
assert_raises_rpc_error(-8, "Range must be specified for a ranged descriptor", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"))
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), 10000000000)
assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [1000000000, 2000000000])
assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [2, 0])
assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [-1, 0])
combo_descriptor = descsum_create("combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)")
assert_equal(self.nodes[0].deriveaddresses(combo_descriptor), ["2dnaGtwYgBhXYQGTArxKKapi52Mkf3KTQhb", "2dnaGtwYgBhXYQGTArxKKapi52Mkf3KTQhb", address, "XY2Fo8bxL1EViXjWrZ5iZrb5thmfPvWJxw"])
hardened_without_privkey_descriptor = descsum_create("wpkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1'/1/0)")
assert_raises_rpc_error(-5, "Cannot derive script without private keys", self.nodes[0].deriveaddresses, hardened_without_privkey_descriptor)
bare_multisig_descriptor = descsum_create("multi(1,tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0,tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)")
assert_raises_rpc_error(-5, "Descriptor does not have a corresponding address", self.nodes[0].deriveaddresses, bare_multisig_descriptor)
if __name__ == '__main__':
DeriveaddressesTest().main()
| null |
2,040 |
from django.urls import reverse
from parameterized import parameterized
from creme.creme_core.auth.entity_credentials import EntityCredentials
from creme.creme_core.models import SetCredentials
from creme.reports.report_chart_registry import (
ReportPieChart,
report_chart_registry,
)
from creme.reports.views import graph as graph_views
from .base import BaseReportsTestCase, skipIfCustomReport
@skipIfCustomReport
class GraphFetchSettingsTestCase(BaseReportsTestCase):
@classmethod
def setUpClass(cls):
super(GraphFetchSettingsTestCase, cls).setUpClass()
# TODO : Use a fake registry instead.
report_chart_registry.register(
ReportPieChart(name='fakepie', label='Fake Pie')
)
def test_update_settings__missing_id(self):
self.login_as_root()
self.assertPOST404(
path=reverse('reports__update_graph_fetch_settings', args=(99999,)),
data={'chart': 'fakepie'},
)
def METHOD_NAME(self):
"""Edition on reports is needed to update the settings."""
user = self.login_as_standard(allowed_apps=['reports'])
SetCredentials.objects.create(
role=user.role,
value=EntityCredentials.VIEW, # EntityCredentials.CHANGE
set_type=SetCredentials.ESET_OWN,
)
graph = self._create_documents_rgraph(user=self.get_root_user())
self.assertEqual(graph.asc, True)
self.assertEqual(graph.chart, None)
with self.assertLogs(graph_views.logger, level='WARNING') as logs:
response = self.assertPOST200(
path=reverse('reports__update_graph_fetch_settings', args=(graph.pk,)),
data={
"sort": "DESC",
"chart": 'fakepie',
}
)
self.assertJSONEqual(response.content, {
"sort": "ASC",
"chart": None
})
self.assertEqual([
f'WARNING:creme.reports.views.graph:The ReportGraph id="{graph.id}" '
'cannot be edited, so the settings are not saved.'
], logs.output)
@parameterized.expand([
({}, 'Chart value is missing'),
({"sort": "ASC"}, 'Chart value is missing'),
({"chart": "unknown", "sort": "ASC"}, (
'Chart value must be in '
f'{[c[0] for c in report_chart_registry] + ["fakepie"]} '
'(value=unknown)'
)),
({"chart": "fakepie", "sort": "unknown"}, (
'Order value must be ASC or DESC (value=unknown)'
)),
])
def test_update_settings__invalid_argument(self, data, expected):
user = self.login_as_root_and_get()
graph = self._create_documents_rgraph(user=user)
response = self.assertPOST(
400,
path=reverse('reports__update_graph_fetch_settings', args=(graph.pk,)),
data=data,
)
self.assertEqual(response.content.decode(), expected)
def test_update_settings(self):
user = self.login_as_root_and_get()
graph = self._create_documents_rgraph(user=user)
self.assertEqual(graph.asc, True)
self.assertEqual(graph.chart, None)
response = self.assertPOST200(
path=reverse('reports__update_graph_fetch_settings', args=(graph.pk,)),
data={
"sort": "DESC",
"chart": 'fakepie',
}
)
self.assertJSONEqual(response.content, {"sort": "DESC", "chart": "fakepie"})
graph.refresh_from_db()
self.assertEqual(graph.asc, False)
self.assertEqual(graph.chart, 'fakepie')
def test_update_instance_settings__missing_id(self):
user = self.login_as_root_and_get()
self.assertPOST404(
path=reverse('reports__update_graph_fetch_settings_for_instance', args=(99999, 88888)),
data={
"chart": "fakepie",
}
)
graph = self._create_documents_rgraph(user=user)
config = self._create_graph_instance_brick(graph)
self.assertPOST404(
path=reverse(
'reports__update_graph_fetch_settings_for_instance', args=(99999, graph.pk)
),
data={
"chart": "fakepie",
}
)
self.assertPOST404(
path=reverse(
'reports__update_graph_fetch_settings_for_instance', args=(config.pk, 888888)
),
data={
"chart": "fakepie",
}
)
@parameterized.expand([
({}, 'Chart value is missing'),
({"sort": "ASC"}, 'Chart value is missing'),
({"chart": "unknown", "sort": "ASC"}, (
'Chart value must be in '
f'{[c[0] for c in report_chart_registry] + ["fakepie"]} '
'(value=unknown)'
)),
({"chart": "fakepie", "sort": "unknown"}, (
'Order value must be ASC or DESC (value=unknown)'
)),
])
def test_update_instance_settings__invalid_argument(self, data, expected):
user = self.login_as_root_and_get()
graph = self._create_documents_rgraph(user=user)
config = self._create_graph_instance_brick(graph)
response = self.assertPOST(
400,
path=reverse(
'reports__update_graph_fetch_settings_for_instance', args=(config.pk, graph.pk)
),
data=data
)
self.assertEqual(response.content.decode(), expected)
def test_update_instance_settings(self):
user = self.login_as_root_and_get()
graph = self._create_documents_rgraph(user=user)
config = self._create_graph_instance_brick(graph)
self.assertEqual(graph.asc, True)
self.assertEqual(graph.chart, None)
response = self.assertPOST200(
path=reverse(
'reports__update_graph_fetch_settings_for_instance', args=(config.pk, graph.pk,)
),
data={
"sort": "DESC",
"chart": 'fakepie',
},
)
self.assertJSONEqual(response.content, {"sort": "DESC", "chart": "fakepie"})
graph.refresh_from_db()
self.assertEqual(graph.asc, False)
self.assertEqual(graph.chart, 'fakepie')
| null |
2,041 |
# Copyright (c) ZenML GmbH 2023. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Base class for code repositories."""
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type
from uuid import UUID
from zenml.config.secret_reference_mixin import SecretReferenceMixin
from zenml.logger import get_logger
from zenml.models.code_repository_models import CodeRepositoryResponseModel
from zenml.utils import source_utils
if TYPE_CHECKING:
from zenml.code_repositories import LocalRepositoryContext
logger = get_logger(__name__)
class BaseCodeRepositoryConfig(SecretReferenceMixin, ABC):
"""Base config for code repositories."""
class BaseCodeRepository(ABC):
"""Base class for code repositories.
Code repositories are used to connect to a remote code repository and
store information about the repository, such as the URL, the owner,
the repository name, and the host. They also provide methods to
download files from the repository when a pipeline is run remotely.
"""
def __init__(
self,
METHOD_NAME: UUID,
config: Dict[str, Any],
) -> None:
"""Initializes a code repository.
Args:
id: The ID of the code repository.
config: The config of the code repository.
"""
self._id = METHOD_NAME
self._config = config
self.login()
@property
def config(self) -> "BaseCodeRepositoryConfig":
"""Config class for Code Repository.
Returns:
The config class.
"""
return BaseCodeRepositoryConfig(**self._config)
@classmethod
def from_model(
cls, model: CodeRepositoryResponseModel
) -> "BaseCodeRepository":
"""Loads a code repository from a model.
Args:
model: The CodeRepositoryResponseModel to load from.
Returns:
The loaded code repository object.
"""
class_: Type[
BaseCodeRepository
] = source_utils.load_and_validate_class(
source=model.source, expected_class=BaseCodeRepository
)
return class_(METHOD_NAME=model.METHOD_NAME, config=model.config)
@property
def METHOD_NAME(self) -> UUID:
"""ID of the code repository.
Returns:
The ID of the code repository.
"""
return self._id
@property
def requirements(self) -> Set[str]:
"""Set of PyPI requirements for the repository.
Returns:
A set of PyPI requirements for the repository.
"""
from zenml.integrations.utils import get_requirements_for_module
return set(get_requirements_for_module(self.__module__))
@abstractmethod
def login(self) -> None:
"""Logs into the code repository.
This method is called when the code repository is initialized.
It should be used to authenticate with the code repository.
Raises:
RuntimeError: If the login fails.
"""
pass
@abstractmethod
def download_files(
self, commit: str, directory: str, repo_sub_directory: Optional[str]
) -> None:
"""Downloads files from the code repository to a local directory.
Args:
commit: The commit hash to download files from.
directory: The directory to download files to.
repo_sub_directory: The subdirectory in the repository to
download files from.
Raises:
RuntimeError: If the download fails.
"""
pass
@abstractmethod
def get_local_context(
self, path: str
) -> Optional["LocalRepositoryContext"]:
"""Gets a local repository context from a path.
Args:
path: The path to the local repository.
Returns:
The local repository context object.
"""
pass
| null |
2,042 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkr_kvstore.endpoint import endpoint_data
class ModifyInstanceSpecRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'R-kvstore', '2015-01-01', 'ModifyInstanceSpec','redisa')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_CouponNo(self): # String
return self.get_query_params().get('CouponNo')
def set_CouponNo(self, CouponNo): # String
self.add_query_param('CouponNo', CouponNo)
def METHOD_NAME(self): # String
return self.get_query_params().get('InstanceClass')
def set_InstanceClass(self, InstanceClass): # String
self.add_query_param('InstanceClass', InstanceClass)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_EffectiveTime(self): # String
return self.get_query_params().get('EffectiveTime')
def set_EffectiveTime(self, EffectiveTime): # String
self.add_query_param('EffectiveTime', EffectiveTime)
def get_SourceBiz(self): # String
return self.get_query_params().get('SourceBiz')
def set_SourceBiz(self, SourceBiz): # String
self.add_query_param('SourceBiz', SourceBiz)
def get_BusinessInfo(self): # String
return self.get_query_params().get('BusinessInfo')
def set_BusinessInfo(self, BusinessInfo): # String
self.add_query_param('BusinessInfo', BusinessInfo)
def get_ShardCount(self): # Integer
return self.get_query_params().get('ShardCount')
def set_ShardCount(self, ShardCount): # Integer
self.add_query_param('ShardCount', ShardCount)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_MajorVersion(self): # String
return self.get_query_params().get('MajorVersion')
def set_MajorVersion(self, MajorVersion): # String
self.add_query_param('MajorVersion', MajorVersion)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_ReadOnlyCount(self): # Integer
return self.get_query_params().get('ReadOnlyCount')
def set_ReadOnlyCount(self, ReadOnlyCount): # Integer
self.add_query_param('ReadOnlyCount', ReadOnlyCount)
def get_ForceUpgrade(self): # Boolean
return self.get_query_params().get('ForceUpgrade')
def set_ForceUpgrade(self, ForceUpgrade): # Boolean
self.add_query_param('ForceUpgrade', ForceUpgrade)
def get_OrderType(self): # String
return self.get_query_params().get('OrderType')
def set_OrderType(self, OrderType): # String
self.add_query_param('OrderType', OrderType)
| null |
2,043 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkess.endpoint import endpoint_data
class EnableScalingGroupRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ess', '2014-08-28', 'EnableScalingGroup','ess')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ScalingGroupId(self): # String
return self.get_query_params().get('ScalingGroupId')
def set_ScalingGroupId(self, ScalingGroupId): # String
self.add_query_param('ScalingGroupId', ScalingGroupId)
def get_ActiveScalingConfigurationId(self): # String
return self.get_query_params().get('ActiveScalingConfigurationId')
def set_ActiveScalingConfigurationId(self, ActiveScalingConfigurationId): # String
self.add_query_param('ActiveScalingConfigurationId', ActiveScalingConfigurationId)
def get_LaunchTemplateId(self): # String
return self.get_query_params().get('LaunchTemplateId')
def set_LaunchTemplateId(self, LaunchTemplateId): # String
self.add_query_param('LaunchTemplateId', LaunchTemplateId)
def METHOD_NAME(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_LaunchTemplateOverrides(self): # RepeatList
return self.get_query_params().get('LaunchTemplateOverride')
def set_LaunchTemplateOverrides(self, LaunchTemplateOverride): # RepeatList
for depth1 in range(len(LaunchTemplateOverride)):
if LaunchTemplateOverride[depth1].get('WeightedCapacity') is not None:
self.add_query_param('LaunchTemplateOverride.' + str(depth1 + 1) + '.WeightedCapacity', LaunchTemplateOverride[depth1].get('WeightedCapacity'))
if LaunchTemplateOverride[depth1].get('InstanceType') is not None:
self.add_query_param('LaunchTemplateOverride.' + str(depth1 + 1) + '.InstanceType', LaunchTemplateOverride[depth1].get('InstanceType'))
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LaunchTemplateVersion(self): # String
return self.get_query_params().get('LaunchTemplateVersion')
def set_LaunchTemplateVersion(self, LaunchTemplateVersion): # String
self.add_query_param('LaunchTemplateVersion', LaunchTemplateVersion)
def get_InstanceIds(self): # RepeatList
return self.get_query_params().get('InstanceId')
def set_InstanceIds(self, InstanceId): # RepeatList
for depth1 in range(len(InstanceId)):
self.add_query_param('InstanceId.' + str(depth1 + 1), InstanceId[depth1])
def get_LoadBalancerWeights(self): # RepeatList
return self.get_query_params().get('LoadBalancerWeight')
def set_LoadBalancerWeights(self, LoadBalancerWeight): # RepeatList
for depth1 in range(len(LoadBalancerWeight)):
self.add_query_param('LoadBalancerWeight.' + str(depth1 + 1), LoadBalancerWeight[depth1])
| null |
2,044 |
"""Miscellaneous utilities for the HTML writer."""
import warnings
from typing import (Any, Dict, Generic, Iterable, Iterator, List, Mapping,
Optional, MutableMapping, Tuple, TypeVar, Union, Sequence)
from pydoctor import epydoc2stan
import collections.abc
from pydoctor import model
from twisted.web.template import Tag
class DocGetter:
"""L{epydoc2stan} bridge."""
def get(self, ob: model.Documentable, summary: bool = False) -> Tag:
if summary:
return epydoc2stan.format_summary(ob)
else:
return epydoc2stan.format_docstring(ob)
def get_type(self, ob: model.Documentable) -> Optional[Tag]:
return epydoc2stan.type2stan(ob)
def get_toc(self, ob: model.Documentable) -> Optional[Tag]:
return epydoc2stan.format_toc(ob)
def srclink(o: model.Documentable) -> Optional[str]:
"""
Get object source code URL, i.e. hosted on github.
"""
return o.sourceHref
def css_class(o: model.Documentable) -> str:
"""
A short, lower case description for use as a CSS class in HTML.
Includes the kind and privacy.
"""
kind = o.kind
assert kind is not None # if kind is None, object is invisible
class_ = epydoc2stan.format_kind(kind).lower().replace(' ', '')
if o.privacyClass is model.PrivacyClass.PRIVATE:
class_ += ' private'
return class_
def overriding_subclasses(
classobj: model.Class,
name: str,
firstcall: bool = True
) -> Iterator[model.Class]:
"""
Helper function to retreive the subclasses that override the given name from the parent class object.
"""
if not firstcall and name in classobj.contents:
yield classobj
else:
for subclass in classobj.subclasses:
if subclass.isVisible:
yield from overriding_subclasses(subclass, name, firstcall=False)
def nested_bases(classobj: model.Class) -> Iterator[Tuple[model.Class, ...]]:
"""
Helper function to retreive the complete list of base classes chains (represented by tuples) for a given Class.
A chain of classes is used to compute the member inheritence from the first element to the last element of the chain.
The first yielded chain only contains the Class itself.
Then for each of the super-classes:
- the next yielded chain contains the super class and the class itself,
- the the next yielded chain contains the super-super class, the super class and the class itself, etc...
"""
_mro = classobj.mro()
for i, _ in enumerate(_mro):
yield tuple(reversed(_mro[:(i+1)]))
def unmasked_attrs(baselist: Sequence[model.Class]) -> Sequence[model.Documentable]:
"""
Helper function to reteive the list of inherited children given a base classes chain (As yielded by L{nested_bases}).
The returned members are inherited from the Class listed first in the chain to the Class listed last: they are not overriden in between.
"""
maybe_masking = {
o.name
for b in baselist[1:]
for o in b.contents.values()
}
return [o for o in baselist[0].contents.values()
if o.isVisible and o.name not in maybe_masking]
def objects_order(o: model.Documentable) -> Tuple[int, int, str]:
"""
Function to use as the value of standard library's L{sorted} function C{key} argument
such that the objects are sorted by: Privacy, Kind and Name.
Example::
children = sorted((o for o in ob.contents.values() if o.isVisible),
key=objects_order)
"""
return (-o.privacyClass.value, -o.kind.value if o.kind else 0, o.fullName().lower())
def class_members(cls: model.Class) -> List[Tuple[Tuple[model.Class, ...], Sequence[model.Documentable]]]:
"""
Returns the members as well as the inherited members of a class.
@returns: Tuples of tuple: C{inherited_via:Tuple[model.Class, ...], attributes:Sequence[model.Documentable]}.
"""
baselists = []
for baselist in nested_bases(cls):
attrs = unmasked_attrs(baselist)
if attrs:
baselists.append((baselist, attrs))
return baselists
def inherited_members(cls: model.Class) -> List[model.Documentable]:
"""
Returns only the inherited members of a class, as a plain list.
"""
children : List[model.Documentable] = []
for inherited_via,attrs in class_members(cls):
if len(inherited_via)>1:
children.extend(attrs)
return children
def METHOD_NAME(filename: str) -> None:
"""Deprecated: can be removed once Twisted stops patching this."""
warnings.warn("pydoctor.templatewriter.util.templatefile() "
"is deprecated and returns None. It will be remove in future versions. "
"Please use the templating system.")
return None
_VT = TypeVar('_VT')
# Credits: psf/requests see https://github.com/psf/requests/blob/main/AUTHORS.rst
class CaseInsensitiveDict(MutableMapping[str, _VT], Generic[_VT]):
"""A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, data: Optional[Union[Mapping[str, _VT], Iterable[Tuple[str, _VT]]]] = None, **kwargs: Any) -> None:
self._store: Dict[str, Tuple[str, _VT]] = collections.OrderedDict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key: str, value: _VT) -> None:
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.lower()] = (key, value)
def __getitem__(self, key: str) -> _VT:
return self._store[key.lower()][1]
def __delitem__(self, key: str) -> None:
del self._store[key.lower()]
def __iter__(self) -> Iterator[str]:
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self) -> int:
return len(self._store)
def lower_items(self) -> Iterator[Tuple[str, _VT]]:
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other: Any) -> bool:
if isinstance(other, collections.abc.Mapping):
other = CaseInsensitiveDict(other)
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
else:
return NotImplemented
# Copy is required
def copy(self) -> 'CaseInsensitiveDict[_VT]':
return CaseInsensitiveDict(self._store.values())
def __repr__(self) -> str:
return str(dict(self.items()))
| null |
2,045 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class CreateOTAFirmwareRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'CreateOTAFirmware')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SignMethod(self):
return self.get_query_params().get('SignMethod')
def METHOD_NAME(self,SignMethod):
self.add_query_param('SignMethod',SignMethod)
def get_MultiFiless(self):
return self.get_query_params().get('MultiFiles')
def set_MultiFiless(self, MultiFiless):
for depth1 in range(len(MultiFiless)):
if MultiFiless[depth1].get('Size') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.Size', MultiFiless[depth1].get('Size'))
if MultiFiless[depth1].get('Name') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.Name', MultiFiless[depth1].get('Name'))
if MultiFiless[depth1].get('SignValue') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.SignValue', MultiFiless[depth1].get('SignValue'))
if MultiFiless[depth1].get('FileMd5') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.FileMd5', MultiFiless[depth1].get('FileMd5'))
if MultiFiless[depth1].get('Url') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.Url', MultiFiless[depth1].get('Url'))
def get_NeedToVerify(self):
return self.get_query_params().get('NeedToVerify')
def set_NeedToVerify(self,NeedToVerify):
self.add_query_param('NeedToVerify',NeedToVerify)
def get_Type(self):
return self.get_query_params().get('Type')
def set_Type(self,Type):
self.add_query_param('Type',Type)
def get_FirmwareUrl(self):
return self.get_query_params().get('FirmwareUrl')
def set_FirmwareUrl(self,FirmwareUrl):
self.add_query_param('FirmwareUrl',FirmwareUrl)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_FirmwareDesc(self):
return self.get_query_params().get('FirmwareDesc')
def set_FirmwareDesc(self,FirmwareDesc):
self.add_query_param('FirmwareDesc',FirmwareDesc)
def get_ModuleName(self):
return self.get_query_params().get('ModuleName')
def set_ModuleName(self,ModuleName):
self.add_query_param('ModuleName',ModuleName)
def get_FirmwareSign(self):
return self.get_query_params().get('FirmwareSign')
def set_FirmwareSign(self,FirmwareSign):
self.add_query_param('FirmwareSign',FirmwareSign)
def get_FirmwareSize(self):
return self.get_query_params().get('FirmwareSize')
def set_FirmwareSize(self,FirmwareSize):
self.add_query_param('FirmwareSize',FirmwareSize)
def get_FirmwareName(self):
return self.get_query_params().get('FirmwareName')
def set_FirmwareName(self,FirmwareName):
self.add_query_param('FirmwareName',FirmwareName)
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey)
def get_SrcVersion(self):
return self.get_query_params().get('SrcVersion')
def set_SrcVersion(self,SrcVersion):
self.add_query_param('SrcVersion',SrcVersion)
def get_Udi(self):
return self.get_query_params().get('Udi')
def set_Udi(self,Udi):
self.add_query_param('Udi',Udi)
def get_DestVersion(self):
return self.get_query_params().get('DestVersion')
def set_DestVersion(self,DestVersion):
self.add_query_param('DestVersion',DestVersion
| null |
2,046 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class ListMediaNameRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'ListMediaName')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_OriginSiteUserId(self): # String
return self.get_query_params().get('OriginSiteUserId')
def set_OriginSiteUserId(self, OriginSiteUserId): # String
self.add_query_param('OriginSiteUserId', OriginSiteUserId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MediaName(self): # String
return self.get_query_params().get('MediaName')
def set_MediaName(self, MediaName): # String
self.add_query_param('MediaName', MediaName)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_TenantId(self): # String
return self.get_query_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_query_param('TenantId', TenantId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AccessStatus(self): # String
return self.get_query_params().get('AccessStatus')
def set_AccessStatus(self, AccessStatus): # String
self.add_query_param('AccessStatus', AccessStatus)
def get_FirstScene(self): # String
return self.get_query_params().get('FirstScene')
def set_FirstScene(self, FirstScene): # String
self.add_query_param('FirstScene', FirstScene)
def get_EndCreateTime(self): # Long
return self.get_query_params().get('EndCreateTime')
def set_EndCreateTime(self, EndCreateTime): # Long
self.add_query_param('EndCreateTime', EndCreateTime)
def METHOD_NAME(self): # String
return self.get_query_params().get('Business')
def set_Business(self, Business): # String
self.add_query_param('Business', Business)
def get_Os(self): # String
return self.get_query_params().get('Os')
def set_Os(self, Os): # String
self.add_query_param('Os', Os)
def get_MediaStatus(self): # String
return self.get_query_params().get('MediaStatus')
def set_MediaStatus(self, MediaStatus): # String
self.add_query_param('MediaStatus', MediaStatus)
def get_Environment(self): # String
return self.get_query_params().get('Environment')
def set_Environment(self, Environment): # String
self.add_query_param('Environment', Environment)
def get_StartCreateTime(self): # Long
return self.get_query_params().get('StartCreateTime')
def set_StartCreateTime(self, StartCreateTime): # Long
self.add_query_param('StartCreateTime', StartCreateTime)
def get_UserSite(self): # String
return self.get_query_params().get('UserSite')
def set_UserSite(self, UserSite): # String
self.add_query_param('UserSite', UserSite)
def get_SecondScene(self): # String
return self.get_query_params().get('SecondScene')
def set_SecondScene(self, SecondScene): # String
self.add_query_param('SecondScene', SecondScene)
def get_MediaType(self): # String
return self.get_query_params().get('MediaType')
def set_MediaType(self, MediaType): # String
self.add_query_param('MediaType', MediaType)
| null |
2,047 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import logging
import numpy as np
import reagent.core.types as rlt
import torch
import torch.nn as nn
from reagent.core.dataclasses import field
from reagent.model_utils.seq2slate_utils import Seq2SlateMode
from reagent.models.seq2slate import Seq2SlateTransformerNet
from reagent.optimizer.union import Optimizer__Union
from reagent.training.reagent_lightning_module import ReAgentLightningModule
from sklearn.metrics import (
average_precision_score,
dcg_score,
ndcg_score,
roc_auc_score,
)
logger = logging.getLogger(__name__)
class Seq2SlatePairwiseAttnTrainer(ReAgentLightningModule):
"""
Seq2Slate without a decoder learned in a supervised learning fashion (
https://arxiv.org/pdf/1904.06813.pdf )
"""
def __init__(
self,
seq2slate_net: Seq2SlateTransformerNet,
slate_size: int,
calc_cpe: bool,
policy_optimizer: Optimizer__Union = field( # noqa: B008
default_factory=Optimizer__Union.default
),
) -> None:
super().__init__()
self.seq2slate_net = seq2slate_net
self.slate_size = slate_size
self.calc_cpe = calc_cpe
self.policy_optimizer = policy_optimizer
self.log_softmax = nn.LogSoftmax(dim=1)
self.kl_loss = nn.KLDivLoss(reduction="batchmean")
def configure_optimizers(self):
optimizers = []
optimizers.append(
self.policy_optimizer.make_optimizer_scheduler(
self.seq2slate_net.parameters()
)
)
return optimizers
def train_step_gen(
self, training_batch: rlt.PreprocessedRankingInput, batch_idx: int
):
assert type(training_batch) is rlt.PreprocessedRankingInput
# shape: batch_size, tgt_seq_len
encoder_scores = self.seq2slate_net(
training_batch, mode=Seq2SlateMode.ENCODER_SCORE_MODE
).encoder_scores
assert encoder_scores.requires_grad
loss = self.kl_loss(
self.log_softmax(encoder_scores), training_batch.position_reward
)
detached_loss = loss.detach().cpu()
self.reporter.log(train_cross_entropy_loss=detached_loss)
yield loss
# pyre-ignore inconsistent override because lightning doesn't use types
def METHOD_NAME(self, batch: rlt.PreprocessedRankingInput, batch_idx: int):
# pyre-fixme[16]: `Optional` has no attribute `shape`.
batch_size = batch.position_reward.shape[0]
# shape: batch_size, tgt_seq_len
encoder_scores = self.seq2slate_net(
batch, mode=Seq2SlateMode.ENCODER_SCORE_MODE
).encoder_scores
assert (
encoder_scores.shape[1] == batch.position_reward.shape[1] == self.slate_size
)
ce_loss = self.kl_loss(
self.log_softmax(encoder_scores), batch.position_reward
).item()
if not self.calc_cpe:
self.reporter.log(eval_cross_entropy_loss=ce_loss)
return
# shape: batch_size, tgt_seq_len
ranking_output = self.seq2slate_net(
batch, mode=Seq2SlateMode.RANK_MODE, greedy=True
)
# pyre-fixme[16]: `int` has no attribute `cpu`.
ranked_idx = (ranking_output.ranked_tgt_out_idx - 2).cpu().numpy()
# pyre-fixme[58]: `-` is not supported for operand types
# `Optional[torch.Tensor]` and `int`.
logged_idx = (batch.tgt_out_idx - 2).cpu().numpy()
score_bar = np.arange(self.slate_size, 0, -1)
batch_dcg = []
batch_ndcg = []
batch_mean_ap = []
batch_auc = []
batch_base_dcg = []
batch_base_ndcg = []
batch_base_map = []
batch_base_auc = []
for i in range(batch_size):
# no positive label in the slate or slate labels are all positive
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if (not torch.any(batch.position_reward[i].bool())) or (
torch.all(batch.position_reward[i].bool())
):
continue
ranked_scores = np.zeros(self.slate_size)
ranked_scores[ranked_idx[i]] = score_bar
truth_scores = np.zeros(self.slate_size)
truth_scores[logged_idx[i]] = batch.position_reward[i].cpu().numpy()
base_scores = np.zeros(self.slate_size)
base_scores[logged_idx[i]] = score_bar
# average_precision_score accepts 1D arrays
# dcg & ndcg accepts 2D arrays
batch_mean_ap.append(average_precision_score(truth_scores, ranked_scores))
batch_base_map.append(average_precision_score(truth_scores, base_scores))
batch_auc.append(roc_auc_score(truth_scores, ranked_scores))
batch_base_auc.append(roc_auc_score(truth_scores, base_scores))
ranked_scores = np.expand_dims(ranked_scores, axis=0)
truth_scores = np.expand_dims(truth_scores, axis=0)
base_scores = np.expand_dims(base_scores, axis=0)
batch_dcg.append(dcg_score(truth_scores, ranked_scores))
batch_ndcg.append(ndcg_score(truth_scores, ranked_scores))
batch_base_dcg.append(dcg_score(truth_scores, base_scores))
batch_base_ndcg.append(ndcg_score(truth_scores, base_scores))
self.reporter.log(
eval_cross_entropy_loss=ce_loss,
eval_dcg=torch.mean(torch.tensor(batch_dcg)).reshape(1),
eval_ndcg=torch.mean(torch.tensor(batch_ndcg)).reshape(1),
eval_mean_ap=torch.mean(torch.tensor(batch_mean_ap)).reshape(1),
eval_auc=torch.mean(torch.tensor(batch_auc)).reshape(1),
eval_base_dcg=torch.mean(torch.tensor(batch_base_dcg)).reshape(1),
eval_base_ndcg=torch.mean(torch.tensor(batch_base_ndcg)).reshape(1),
eval_base_map=torch.mean(torch.tensor(batch_base_map)).reshape(1),
eval_base_auc=torch.mean(torch.tensor(batch_base_auc)).reshape(1),
)
| null |
2,048 |
# Drakkar-Software OctoBot-Tentacles
# Copyright (c) Drakkar-Software, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import threading
# comment imports to remove twitter from dependencies when tentacle is disabled
# import twitter
import octobot_services.channel as services_channel
import octobot_services.constants as services_constants
import octobot_services.service_feeds as service_feeds
import tentacles.Services.Services_bases as Services_bases
# disable inheritance to disable tentacle visibility. Disabled as starting from feb 9 2023, API is now paid only
# class TwitterServiceFeedChannel(services_channel.AbstractServiceFeedChannel):
class TwitterServiceFeedChannel:
pass
# disable inheritance to disable tentacle visibility. Disabled as starting from feb 9 2023, API is now paid only
# class TwitterServiceFeed(service_feeds.AbstractServiceFeed, threading.Thread):
class TwitterServiceFeed:
FEED_CHANNEL = TwitterServiceFeedChannel
REQUIRED_SERVICES = [Services_bases.TwitterService]
def __init__(self, config, main_async_loop, bot_id):
super().__init__(config, main_async_loop, bot_id)
threading.Thread.__init__(self, name=self.get_name())
self.user_ids = []
self.hashtags = []
self.counter = 0
async def _inner_start(self) -> bool:
threading.Thread.start(self)
return True
# merge new config into existing config
def update_feed_config(self, config):
if services_constants.CONFIG_TWITTERS_ACCOUNTS in self.feed_config:
self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS] = {
**self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS],
**config[services_constants.CONFIG_TWITTERS_ACCOUNTS]}
else:
self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS] = config[
services_constants.CONFIG_TWITTERS_ACCOUNTS]
if services_constants.CONFIG_TWITTERS_HASHTAGS in self.feed_config:
self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS] = {
**self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS],
**config[services_constants.CONFIG_TWITTERS_HASHTAGS]}
else:
self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS] = config[
services_constants.CONFIG_TWITTERS_HASHTAGS]
def METHOD_NAME(self):
tempo_added_accounts = []
for symbol in self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS]:
for account in self.feed_config[services_constants.CONFIG_TWITTERS_ACCOUNTS][symbol]:
if account not in tempo_added_accounts:
tempo_added_accounts.append(account)
try:
self.user_ids.append(str(self.services[0].get_user_id(account)))
except twitter.TwitterError as e:
self.logger.error(account + " : " + str(e))
def _init_hashtags(self):
for symbol in self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS]:
for hashtag in self.feed_config[services_constants.CONFIG_TWITTERS_HASHTAGS][symbol]:
if hashtag not in self.hashtags:
self.hashtags.append(hashtag)
def _initialize(self):
if not self.user_ids:
self.METHOD_NAME()
if not self.hashtags:
self._init_hashtags()
def _something_to_watch(self):
return (services_constants.CONFIG_TWITTERS_HASHTAGS in self.feed_config and self.feed_config[
services_constants.CONFIG_TWITTERS_HASHTAGS]) \
or (services_constants.CONFIG_TWITTERS_ACCOUNTS in self.feed_config and self.feed_config[
services_constants.CONFIG_TWITTERS_ACCOUNTS])
async def _start_listener(self):
for tweet in self.services[0].get_endpoint().GetStreamFilter(follow=self.user_ids,
track=self.hashtags,
stall_warnings=True):
self.counter += 1
string_tweet = self.services[0].get_tweet_text(tweet)
if string_tweet:
tweet_desc = str(tweet).lower()
self._notify_consumers(
{
services_constants.FEED_METADATA: tweet_desc,
services_constants.CONFIG_TWEET: tweet,
services_constants.CONFIG_TWEET_DESCRIPTION: string_tweet.lower()
}
)
async def _start_service_feed(self):
while not self.should_stop:
try:
await self._start_listener()
except twitter.error.TwitterError as e:
self.logger.exception(e, True, f"Error when receiving Twitter feed: {e.message} ({e})")
self.should_stop = True
except Exception as e:
self.logger.exception(e, True, f"Error when receiving Twitter feed: ({e})")
self.should_stop = True
return False
| null |
2,049 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdateTaskDetailRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'devops-rdc', '2020-03-03', 'UpdateTaskDetail')
self.set_method('POST')
def get_Note(self): # String
return self.get_body_params().get('Note')
def set_Note(self, Note): # String
self.add_body_params('Note', Note)
def get_ExecutorId(self): # String
return self.get_body_params().get('ExecutorId')
def set_ExecutorId(self, ExecutorId): # String
self.add_body_params('ExecutorId', ExecutorId)
def get_StartDate(self): # String
return self.get_body_params().get('StartDate')
def set_StartDate(self, StartDate): # String
self.add_body_params('StartDate', StartDate)
def get_DelInvolvers(self): # String
return self.get_body_params().get('DelInvolvers')
def set_DelInvolvers(self, DelInvolvers): # String
self.add_body_params('DelInvolvers', DelInvolvers)
def get_Content(self): # String
return self.get_body_params().get('Content')
def set_Content(self, Content): # String
self.add_body_params('Content', Content)
def get_SprintId(self): # String
return self.get_body_params().get('SprintId')
def set_SprintId(self, SprintId): # String
self.add_body_params('SprintId', SprintId)
def get_CustomFieldId(self): # String
return self.get_body_params().get('CustomFieldId')
def set_CustomFieldId(self, CustomFieldId): # String
self.add_body_params('CustomFieldId', CustomFieldId)
def METHOD_NAME(self): # String
return self.get_body_params().get('ProjectId')
def set_ProjectId(self, ProjectId): # String
self.add_body_params('ProjectId', ProjectId)
def get_TaskId(self): # String
return self.get_body_params().get('TaskId')
def set_TaskId(self, TaskId): # String
self.add_body_params('TaskId', TaskId)
def get_TaskFlowStatusId(self): # String
return self.get_body_params().get('TaskFlowStatusId')
def set_TaskFlowStatusId(self, TaskFlowStatusId): # String
self.add_body_params('TaskFlowStatusId', TaskFlowStatusId)
def get_TagIds(self): # String
return self.get_body_params().get('TagIds')
def set_TagIds(self, TagIds): # String
self.add_body_params('TagIds', TagIds)
def get_AddInvolvers(self): # String
return self.get_body_params().get('AddInvolvers')
def set_AddInvolvers(self, AddInvolvers): # String
self.add_body_params('AddInvolvers', AddInvolvers)
def get_Priority(self): # Long
return self.get_body_params().get('Priority')
def set_Priority(self, Priority): # Long
self.add_body_params('Priority', Priority)
def get_OrgId(self): # String
return self.get_body_params().get('OrgId')
def set_OrgId(self, OrgId): # String
self.add_body_params('OrgId', OrgId)
def get_DueDate(self): # String
return self.get_body_params().get('DueDate')
def set_DueDate(self, DueDate): # String
self.add_body_params('DueDate', DueDate)
def get_WorkTimes(self): # Long
return self.get_body_params().get('WorkTimes')
def set_WorkTimes(self, WorkTimes): # Long
self.add_body_params('WorkTimes', WorkTimes)
def get_StoryPoint(self): # String
return self.get_body_params().get('StoryPoint')
def set_StoryPoint(self, StoryPoint): # String
self.add_body_params('StoryPoint', StoryPoint)
def get_CustomFieldValues(self): # String
return self.get_body_params().get('CustomFieldValues')
def set_CustomFieldValues(self, CustomFieldValues): # String
self.add_body_params('CustomFieldValues', CustomFieldValues)
| null |
2,050 |
# -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Simon Dodsley <[email protected]>,2017
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
HAS_PURESTORAGE = True
try:
from purestorage import purestorage
except ImportError:
HAS_PURESTORAGE = False
HAS_PYPURECLIENT = True
try:
from pypureclient import flasharray
except ImportError:
HAS_PYPURECLIENT = False
from os import environ
import platform
VERSION = 1.4
USER_AGENT_BASE = "Ansible"
def METHOD_NAME(module):
"""Return System Object or Fail"""
user_agent = "%(base)s %(class)s/%(version)s (%(platform)s)" % {
"base": USER_AGENT_BASE,
"class": __name__,
"version": VERSION,
"platform": platform.platform(),
}
array_name = module.params["fa_url"]
api = module.params["api_token"]
if HAS_PURESTORAGE:
if array_name and api:
system = purestorage.FlashArray(
array_name, api_token=api, user_agent=user_agent, verify_https=False
)
elif environ.get("PUREFA_URL") and environ.get("PUREFA_API"):
system = purestorage.FlashArray(
environ.get("PUREFA_URL"),
api_token=(environ.get("PUREFA_API")),
user_agent=user_agent,
verify_https=False,
)
else:
module.fail_json(
msg="You must set PUREFA_URL and PUREFA_API environment variables "
"or the fa_url and api_token module arguments"
)
try:
system.get()
except Exception:
module.fail_json(
msg="Pure Storage FlashArray authentication failed. Check your credentials"
)
else:
module.fail_json(msg="purestorage SDK is not installed.")
return system
def get_array(module):
"""Return System Object or Fail"""
user_agent = "%(base)s %(class)s/%(version)s (%(platform)s)" % {
"base": USER_AGENT_BASE,
"class": __name__,
"version": VERSION,
"platform": platform.platform(),
}
array_name = module.params["fa_url"]
api = module.params["api_token"]
if HAS_PYPURECLIENT:
if array_name and api:
system = flasharray.Client(
target=array_name,
api_token=api,
user_agent=user_agent,
)
elif environ.get("PUREFA_URL") and environ.get("PUREFA_API"):
system = flasharray.Client(
target=(environ.get("PUREFA_URL")),
api_token=(environ.get("PUREFA_API")),
user_agent=user_agent,
)
else:
module.fail_json(
msg="You must set PUREFA_URL and PUREFA_API environment variables "
"or the fa_url and api_token module arguments"
)
try:
system.get_hardware()
except Exception:
module.fail_json(
msg="Pure Storage FlashArray authentication failed. Check your credentials"
)
else:
module.fail_json(msg="py-pure-client and/or requests are not installed.")
return system
def purefa_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
fa_url=dict(),
api_token=dict(no_log=True),
)
| null |
2,051 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class CopySnapshotRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'CopySnapshot','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_SnapshotId(self): # String
return self.get_query_params().get('SnapshotId')
def set_SnapshotId(self, SnapshotId): # String
self.add_query_param('SnapshotId', SnapshotId)
def get_DestinationRegionId(self): # String
return self.get_query_params().get('DestinationRegionId')
def set_DestinationRegionId(self, DestinationRegionId): # String
self.add_query_param('DestinationRegionId', DestinationRegionId)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_Arns(self): # RepeatList
return self.get_query_params().get('Arn')
def set_Arns(self, Arn): # RepeatList
for depth1 in range(len(Arn)):
if Arn[depth1].get('RoleType') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.RoleType', Arn[depth1].get('RoleType'))
if Arn[depth1].get('Rolearn') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.Rolearn', Arn[depth1].get('Rolearn'))
if Arn[depth1].get('AssumeRoleFor') is not None:
self.add_query_param('Arn.' + str(depth1 + 1) + '.AssumeRoleFor', Arn[depth1].get('AssumeRoleFor'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_DestinationSnapshotName(self): # String
return self.get_query_params().get('DestinationSnapshotName')
def set_DestinationSnapshotName(self, DestinationSnapshotName): # String
self.add_query_param('DestinationSnapshotName', DestinationSnapshotName)
def get_DestinationSnapshotDescription(self): # String
return self.get_query_params().get('DestinationSnapshotDescription')
def set_DestinationSnapshotDescription(self, DestinationSnapshotDescription): # String
self.add_query_param('DestinationSnapshotDescription', DestinationSnapshotDescription)
def get_Encrypted(self): # Boolean
return self.get_query_params().get('Encrypted')
def set_Encrypted(self, Encrypted): # Boolean
self.add_query_param('Encrypted', Encrypted)
def get_RetentionDays(self): # Integer
return self.get_query_params().get('RetentionDays')
def set_RetentionDays(self, RetentionDays): # Integer
self.add_query_param('RetentionDays', RetentionDays)
def get_KMSKeyId(self): # String
return self.get_query_params().get('KMSKeyId')
def METHOD_NAME(self, KMSKeyId): # String
self.add_query_param('KMSKeyId', KMSKeyId)
def get_DestinationStorageLocationArn(self): # String
return self.get_query_params().get('DestinationStorageLocationArn')
def set_DestinationStorageLocationArn(self, DestinationStorageLocationArn): # String
self.add_query_param('DestinationStorageLocationArn', DestinationStorageLocationArn)
| null |
2,052 |
##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import unittest
import threading
import time
import weakref
import IECore
class TestMessageHandler( unittest.TestCase ) :
def METHOD_NAME( self ) :
self.assertEqual( IECore.Msg, IECore.MessageHandler )
self.assertTrue( IECore.Msg is IECore.MessageHandler )
def testStack( self ) :
for i in range( 1, 10 ) :
m = IECore.NullMessageHandler()
with m :
self.assertTrue( m.isSame( IECore.MessageHandler.currentHandler() ) )
m1 = IECore.NullMessageHandler()
m2 = IECore.NullMessageHandler()
self.assertTrue( IECore.MessageHandler.currentHandler().isSame( IECore.MessageHandler.getDefaultHandler() ) )
with m1 :
self.assertTrue( IECore.MessageHandler.currentHandler().isSame( m1 ) )
with m2 :
self.assertTrue( IECore.MessageHandler.currentHandler().isSame( m2 ) )
self.assertTrue( IECore.MessageHandler.currentHandler().isSame( m1 ) )
self.assertTrue( IECore.MessageHandler.currentHandler().isSame( IECore.MessageHandler.getDefaultHandler() ) )
def testLevelStringConversion( self ) :
ll = [
(IECore.MessageHandler.Level.Error, "ERROR"),
(IECore.MessageHandler.Level.Warning, "WARNING"),
(IECore.MessageHandler.Level.Info, "INFO"),
(IECore.MessageHandler.Level.Debug, "DEBUG"),
(IECore.MessageHandler.Level.Invalid, "INVALID"),
]
for l, s in ll :
self.assertEqual( IECore.MessageHandler.levelAsString( l ), s )
self.assertEqual( IECore.MessageHandler.stringAsLevel( s ), l )
self.assertEqual( IECore.MessageHandler.stringAsLevel( s.lower() ), l )
def testOutput( self ) :
with IECore.NullMessageHandler() :
IECore.MessageHandler.output( IECore.Msg.Level.Debug, "message handler test", "ignore me" )
IECore.MessageHandler.output( IECore.Msg.Level.Info, "message handler test", "and me" )
IECore.MessageHandler.output( IECore.Msg.Level.Warning, "message handler test", "and me" )
IECore.MessageHandler.output( IECore.Msg.Level.Error, "message handler test", "and me" )
IECore.msg( IECore.Msg.Level.Error, "message handler test", "and me" )
def testOStreamHandler( self ) :
IECore.OStreamMessageHandler.cErrHandler()
IECore.OStreamMessageHandler.cOutHandler()
def testCompoundHandler( self ) :
h = IECore.CompoundMessageHandler()
h.addHandler( IECore.OStreamMessageHandler.cErrHandler() )
h.addHandler( IECore.OStreamMessageHandler.cOutHandler() )
h.removeHandler( IECore.OStreamMessageHandler.cErrHandler() )
h.removeHandler( IECore.OStreamMessageHandler.cOutHandler() )
def testLevelFilteredMessageHandler( self ):
with IECore.LevelFilteredMessageHandler( IECore.NullMessageHandler(), IECore.Msg.Level.Info ) :
IECore.MessageHandler.output( IECore.Msg.Level.Debug, "message handler test", "ignore me" )
IECore.MessageHandler.output( IECore.Msg.Level.Info, "message handler test", "and me" )
IECore.MessageHandler.output( IECore.Msg.Level.Warning, "message handler test", "and me" )
IECore.MessageHandler.output( IECore.Msg.Level.Error, "message handler test", "and me" )
class Derived( IECore.MessageHandler ):
def __init__( self ):
IECore.MessageHandler.__init__( self )
self.lastMessage = IECore.StringData("")
self.lastContext = IECore.StringData("")
self.lastLevel = IECore.IntData(0)
def handle( self, level, context, msg ):
self.lastLevel.value = level
self.lastContext.value = context
self.lastMessage.value = msg
def testSubclassing( self ):
myHandler = self.Derived()
with myHandler :
IECore.MessageHandler.output( IECore.Msg.Level.Info, "context", "message" )
self.assertEqual( myHandler.lastLevel.value, IECore.Msg.Level.Info )
self.assertEqual( myHandler.lastContext.value, "context" )
self.assertEqual( myHandler.lastMessage.value, "message" )
def testContextManager( self ) :
currentHandler = IECore.MessageHandler.currentHandler()
myHandler = self.Derived()
with myHandler :
IECore.MessageHandler.output( IECore.Msg.Level.Info, "context", "message" )
self.assertTrue( currentHandler.isSame( IECore.MessageHandler.currentHandler() ) )
self.assertEqual( myHandler.lastLevel.value, IECore.Msg.Level.Info )
self.assertEqual( myHandler.lastContext.value, "context" )
self.assertEqual( myHandler.lastMessage.value, "message" )
def testIsRefCounted( self ) :
self.assertTrue( issubclass( IECore.MessageHandler, IECore.RefCounted ) )
def testDefaultHandler( self ) :
self.assertTrue( isinstance( IECore.MessageHandler.currentHandler(), IECore.LevelFilteredMessageHandler ) )
def testSetLogLevel( self ) :
oldLevel = IECore.MessageHandler.currentHandler().getLevel()
if oldLevel==IECore.MessageHandler.Level.Info :
newLevel = IECore.MessageHandler.Level.Warning
else :
newLevel = IECore.MessageHandler.Level.Info
IECore.setLogLevel( newLevel )
self.assertEqual( IECore.MessageHandler.currentHandler().getLevel(), newLevel )
IECore.setLogLevel( oldLevel )
self.assertEqual( IECore.MessageHandler.currentHandler().getLevel(), oldLevel )
def testContextManagerReturnValue( self ) :
mh = self.Derived()
with mh as mh2 :
pass
self.assertTrue( mh is mh2 )
def testThreading( self ) :
def f( handler ) :
with handler :
for i in range( 0, 100 ) :
IECore.msg( IECore.Msg.Level.Info, "test", str( i ) )
time.sleep( 0.0001 ) # encourage python to switch threads
handlers = []
threads = []
for i in range( 0, 100 ) :
handler = IECore.CapturingMessageHandler()
thread = threading.Thread( target = f, args = [ handler ] )
threads.append( thread )
handlers.append( handler )
thread.start()
for thread in threads :
thread.join()
for handler in handlers :
self.assertEqual( len( handler.messages ), 100 )
for i, m in enumerate( handler.messages ) :
self.assertEqual( str( i ), m.message )
def testLifetime( self ) :
m = IECore.NullMessageHandler()
w = weakref.ref( m )
with m :
pass
del m
self.assertEqual( w(), None )
if __name__ == "__main__":
unittest.main()
| null |
2,053 |
# Copyright (c) 2015 - 2023, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
''' Describes the best known configuration for Quantum Espresso.
'''
import os
import glob
import distutils.dir_util
from .. import apps
# Valid pool counts depend on how many k-points are in the input problem.
# There must be at least enough pools to distribute the k-points across them.
POOL_COUNTS_BY_NAME = {
'qef-benchmarks/AUSURF112': 2, # 2 k-points
'es-benchmarks/Si63Ge-scf': 4, # 4 k-points
'es-benchmarks/Si63Ge-vc-relax': 10, # 10 k-points
'es-benchmarks/CsI_3264nmm.cif/12': 293, # 293 k-points
'es-benchmarks/CsI_3264nmm.cif/27': 89, # 89 k-points
'es-benchmarks/CsI_3264nmm.cif/48': 220, # 220 k-points
'es-benchmarks/ZrSi_6542mnm.cif/6': 352, # 352 k-points
'es-benchmarks/ZrSi_6542mnm.cif/12': 190, # 190 k-points
'es-benchmarks/ZrSi_6542mnm.cif/27': 92, # 92 k-points
'es-benchmarks/ZrSi_6542mnm.cif/36': 76, # 76 k-points
'es-benchmarks/Na2O_4242nmnm.cif/8': 1756, # 1756 k-points
'es-benchmarks/Fe_Graphene-scf': 2, # 1 k-point. DGEMM phase fails from bad inputs with pools=1
}
def setup_run_args(parser):
""" Add common arguments for all run scripts:
--benchmark-name
"""
parser.add_argument('--benchmark-name',
help='Specify which input to give to Quantum Espresso.',
choices=list(POOL_COUNTS_BY_NAME))
def create_appconf(mach, args):
return QuantumEspressoAppConf(args.node_count)
class QuantumEspressoAppConf(apps.AppConf):
@staticmethod
def name():
return 'qe'
def __init__(self, node_count, input_name=None):
benchmark_dir = os.path.dirname(os.path.abspath(__file__))
self._bin_path = os.path.join(benchmark_dir, 'q-e-qe-6.6', 'bin')
self._node_count = node_count
self._ranks_per_node = 20 if node_count == 1 else 10
self._cpus_per_rank = 2 if node_count == 1 else 4
if input_name is None:
input_name = 'qef-benchmarks/AUSURF112'
self._input_name = input_name.replace('/', '-')
self._input_dir = os.path.join(benchmark_dir, input_name)
self._pool_count = POOL_COUNTS_BY_NAME[input_name]
total_ranks = self._node_count * self._ranks_per_node
self._ranks_per_pool = total_ranks // self._pool_count
while self._ranks_per_pool == 0 and self._cpus_per_rank > 1:
print('Warning: Problem {} is too large for {} ranks. Reducing '
'threads per rank and increasing ranks per node.'.format(
input_name, total_ranks, self._pool_count))
self._ranks_per_node = self._ranks_per_node * 2
self._cpus_per_rank = self._cpus_per_rank // 2
total_ranks = self._node_count * self._ranks_per_node
self._ranks_per_pool = total_ranks // self._pool_count
if self._ranks_per_pool == 0:
raise ValueError('Problem {} is too large for {} ranks. Need at least {} ranks.'.format(
input_name, total_ranks, self._pool_count));
self._thread_group_count = total_ranks // (self._pool_count * 2)
if self._thread_group_count % 2 == 0:
# We perfer thread group count == ranks / (2*pools), but it needs
# to be possible to distribute across the thread groups. So only
# do the final division by 2 if the result is a whole number.
self._thread_group_count = self._thread_group_count // 2
if self._thread_group_count == 0:
self._thread_group_count = self._ranks_per_node
def get_rank_per_node(self):
return self._ranks_per_node
def get_cpu_per_rank(self):
return self._cpus_per_rank
def METHOD_NAME(self, run_id, output_dir):
# Unlike shutil, this copies the contents of the source dir, without
# the source dir itself
distutils.dir_util.copy_tree(self._input_dir, output_dir)
input_files = glob.glob(os.path.join(output_dir, '*.in'))
if len(input_files) != 1:
raise ValueError('Expected exactly 1 *.in file present in {}. '
'Discovered {} files'.format(self._input_dir,
len(input_files)));
for input_file in input_files:
os.rename(input_file, os.path.join(output_dir, self._input_name))
def get_bash_exec_path(self):
return os.path.join(self._bin_path, 'pw.x')
def get_bash_exec_args(self):
# See https://xconfigure.readthedocs.io/en/latest/qe/ for config details
return ['-i', self._input_name,
'-npool', str(self._pool_count),
# QE will internally round ndiag down to a square
'-ndiag', str(self._ranks_per_pool),
'-ntg', str(self._thread_group_count)]
def get_custom_geopm_args(self):
return ['--geopm-ctl=application',
'--geopm-hyperthreads-disable',
]
| null |
2,054 |
#!/usr/bin/env python3
"""Create Release note from Github Issues and Pull Requests for a given version
Example:
$ python3 bin/release/get_release_note.py 1.7.0
"""
import datetime
import json
import sys
from typing import List
import requests
from bs4 import BeautifulSoup
hurl_repo_url = "https://github.com/Orange-OpenSource/hurl"
class Pull:
def __init__(self, url: str, description: str, tags: List[str] = [], issues=None):
if issues is None:
issues = []
self.url = url
self.description = description
self.tags = tags
self.issues = issues
def __repr__(self):
return 'Pull("%s", "%s","%s", %s)' % (
self.url,
self.description,
str(self.tags),
str(self.issues),
)
def __eq__(self, other):
"""Overrides the default implementation"""
if isinstance(other, Pull):
if self.url != other.url:
return False
if self.description != other.description:
return False
if self.tags != other.tags:
return False
if self.issues != other.issues:
return False
return True
return False
class Issue:
def __init__(self, number: int, tags: List[str], author: str, pulls: List[Pull]):
self.number = number
self.tags = tags
self.author = author
self.pulls = pulls
def __repr__(self):
return (
'Issue(\n number=%s,\n tag=["%s"],\n author="%s",\n pulls=[%s]\n)'
% (
self.number,
",".join(['"%s"' % t for t in self.tags]),
self.author,
",".join([str(p) for p in self.pulls]),
)
)
def release_note(milestone: str) -> str:
"""return markdown release note for the given milestone"""
date = datetime.datetime.now()
milestone_number = get_milestone(milestone)
issues = get_issues(milestone_number)
pulls = pulls_from_issues(issues)
authors = [
author
for author in METHOD_NAME(issues)
if author not in ["jcamiel", "lepapareil", "fabricereix"]
]
return generate_md(milestone, date, pulls, authors)
def pulls_from_issues(issues: List[Issue]) -> List[Pull]:
"""return list of pulls from list of issues"""
pulls: dict[str, Pull] = {}
for issue in issues:
for pull in issue.pulls:
if pull.url in pulls:
saved_pull = pulls[pull.url]
for tag in issue.tags:
if tag not in saved_pull.tags:
saved_pull.tags.append(tag)
saved_pull.issues.append(issue.number)
else:
if pull.url.startswith("/Orange-OpenSource/hurl"):
pull.tags = issue.tags
pull.issues.append(issue.number)
pulls[pull.url] = pull
return list(pulls.values())
def get_issues(milestone_number: int) -> List[Issue]:
"""Return issues for the given milestone and tags"""
path = "/issues?milestone=%s&state=all&per_page=100" % milestone_number
response = github_get(path)
issues = []
for issue_json in json.loads(response):
if "pull_request" in issue_json:
continue
number = issue_json["number"]
tags = []
if "labels" in issue_json:
labels = issue_json["labels"]
tags = [label["name"] for label in labels]
author = issue_json["user"]["login"]
pulls = get_linked_pulls(number)
issue = Issue(number, tags, author, pulls)
issues.append(issue)
return issues
def get_linked_pulls(issue_number) -> List[Pull]:
"""return linked pull request for a given issue"""
# Webscapping the webpage issue
# because the API does not provide the relationship between issues and Pull request
url = "https://github.com/Orange-OpenSource/hurl/issues/%d" % issue_number
sys.stderr.write("* GET %s\n" % url)
r = requests.get(url)
html = r.text
pulls = webscrapping_linked_pulls(html)
return pulls
def webscrapping_linked_pulls(html) -> List[Pull]:
soup = BeautifulSoup(html, "html.parser")
links = soup.select("development-menu a")
pulls = []
for link in links:
url = link["href"]
if url == "/Orange-OpenSource/hurl":
continue
description = "".join(link.getText()).strip()
pull = Pull(url, description)
pulls.append(pull)
return pulls
def METHOD_NAME(issues: List[Issue]) -> List[str]:
"""return list of unique authors from a list of issues"""
authors = []
for issue in issues:
author = issue.author
if author not in authors:
authors.append(author)
return authors
def generate_md(
milestone: str, date: datetime.datetime, pulls: List[Pull], authors: List[str]
) -> str:
"""Generate Markdown"""
s = "[%s (%s)](%s)" % (
milestone,
date.strftime("%Y-%m-%d"),
hurl_repo_url + "/blob/master/CHANGELOG.md#" + milestone,
)
s += "\n========================================================================================================================"
s += "\n\nThanks to"
for author in authors:
s += "\n[@%s](https://github.com/%s)," % (author, author)
categories = {"enhancement": "Enhancements", "bug": "Bugs Fixed"}
for category in categories:
category_pulls = [pull for pull in pulls if category in pull.tags]
if len(category_pulls) > 0:
s += "\n\n\n" + categories[category] + ":" + "\n\n"
for pull in category_pulls:
issues = " ".join(
"[#%s](%s/issues/%s)" % (issue, hurl_repo_url, issue)
for issue in pull.issues
)
s += "* %s %s\n" % (pull.description, issues)
s += "\n"
return s
def get_milestone(title: str) -> int:
"""Return milestone number"""
path = "/milestones?state=all"
response = github_get(path)
for milestone in json.loads(response):
if milestone["title"] == title:
return milestone["number"]
return -1
def github_get(path: str) -> str:
"""Execute an HTTP GET with request"""
github_api_url = "https://api.github.com/repos/Orange-OpenSource/hurl"
url = github_api_url + path
sys.stderr.write("* GET %s\n" % url)
r = requests.get(
url,
# headers={"authorization": "Bearer " + github_api_token} # increase rate limit
)
if r.status_code != 200:
raise Exception("HTTP Error %s - %s" % (r.status_code, r.text))
return r.text
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: bin/release/get_release_note.py <VERSION>")
sys.exit(1)
version = sys.argv[1]
print(release_note(version))
| null |
2,055 |
import pytest
from unittest.mock import (
Mock,
)
from web3.providers.eth_tester.middleware import (
async_default_transaction_fields_middleware,
default_transaction_fields_middleware,
)
from web3.types import (
BlockData,
)
SAMPLE_ADDRESS_LIST = [
"0x0000000000000000000000000000000000000001",
"0x0000000000000000000000000000000000000002",
"0x0000000000000000000000000000000000000003",
]
SAMPLE_ADDRESS = "0x0000000000000000000000000000000000000004"
@pytest.mark.parametrize("block_number", {0, "0x0", "earliest"})
def test_get_transaction_count_formatters(w3, block_number):
tx_counts = w3.eth.get_transaction_count(w3.eth.accounts[-1], block_number)
assert tx_counts == 0
def test_get_block_formatters(w3):
all_block_keys = BlockData.__annotations__.keys()
all_non_poa_block_keys = set(
[k for k in all_block_keys if k != "proofOfAuthorityData"]
)
latest_block = w3.eth.get_block("latest")
latest_block_keys = set(latest_block.keys())
assert all_non_poa_block_keys == latest_block_keys
@pytest.mark.parametrize(
"w3_accounts, w3_coinbase, method, from_field_added, from_field_value",
(
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "eth_call", True, SAMPLE_ADDRESS),
(
SAMPLE_ADDRESS_LIST,
SAMPLE_ADDRESS,
"eth_estimateGas",
True,
SAMPLE_ADDRESS,
),
(
SAMPLE_ADDRESS_LIST,
SAMPLE_ADDRESS,
"eth_sendTransaction",
True,
SAMPLE_ADDRESS,
),
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "eth_gasPrice", False, None),
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "eth_blockNumber", False, None),
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "meow", False, None),
(SAMPLE_ADDRESS_LIST, None, "eth_call", True, SAMPLE_ADDRESS_LIST[0]),
(SAMPLE_ADDRESS_LIST, None, "eth_estimateGas", True, SAMPLE_ADDRESS_LIST[0]),
(
SAMPLE_ADDRESS_LIST,
None,
"eth_sendTransaction",
True,
SAMPLE_ADDRESS_LIST[0],
),
(SAMPLE_ADDRESS_LIST, None, "eth_gasPrice", False, None),
(SAMPLE_ADDRESS_LIST, None, "eth_blockNumber", False, None),
(SAMPLE_ADDRESS_LIST, None, "meow", False, None),
(None, SAMPLE_ADDRESS, "eth_call", True, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_estimateGas", True, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_sendTransaction", True, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_gasPrice", False, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_blockNumber", False, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "meow", False, SAMPLE_ADDRESS),
(None, None, "eth_call", True, None),
(None, None, "eth_estimateGas", True, None),
(None, None, "eth_sendTransaction", True, None),
(None, None, "eth_gasPrice", False, None),
(None, None, "eth_blockNumber", False, None),
(None, None, "meow", False, None),
),
)
def test_default_transaction_fields_middleware(
w3_accounts, w3_coinbase, method, from_field_added, from_field_value
):
def mock_request(_method, params):
return params
mock_w3 = Mock()
mock_w3.eth.accounts = w3_accounts
mock_w3.eth.coinbase = w3_coinbase
middleware = default_transaction_fields_middleware(mock_request, mock_w3)
base_params = {"chainId": 5}
filled_transaction = middleware(method, [base_params])
filled_params = filled_transaction[0]
assert ("from" in filled_params.keys()) == from_field_added
if "from" in filled_params.keys():
assert filled_params["from"] == from_field_value
filled_transaction[0].pop("from", None)
assert filled_transaction[0] == base_params
# -- async -- #
@pytest.mark.parametrize(
"w3_accounts, w3_coinbase, method, from_field_added, from_field_value",
(
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "eth_call", True, SAMPLE_ADDRESS),
(
SAMPLE_ADDRESS_LIST,
SAMPLE_ADDRESS,
"eth_estimateGas",
True,
SAMPLE_ADDRESS,
),
(
SAMPLE_ADDRESS_LIST,
SAMPLE_ADDRESS,
"eth_sendTransaction",
True,
SAMPLE_ADDRESS,
),
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "eth_gasPrice", False, None),
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "eth_blockNumber", False, None),
(SAMPLE_ADDRESS_LIST, SAMPLE_ADDRESS, "meow", False, None),
(SAMPLE_ADDRESS_LIST, None, "eth_call", True, SAMPLE_ADDRESS_LIST[0]),
(SAMPLE_ADDRESS_LIST, None, "eth_estimateGas", True, SAMPLE_ADDRESS_LIST[0]),
(
SAMPLE_ADDRESS_LIST,
None,
"eth_sendTransaction",
True,
SAMPLE_ADDRESS_LIST[0],
),
(SAMPLE_ADDRESS_LIST, None, "eth_gasPrice", False, None),
(SAMPLE_ADDRESS_LIST, None, "eth_blockNumber", False, None),
(SAMPLE_ADDRESS_LIST, None, "meow", False, None),
(None, SAMPLE_ADDRESS, "eth_call", True, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_estimateGas", True, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_sendTransaction", True, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_gasPrice", False, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "eth_blockNumber", False, SAMPLE_ADDRESS),
(None, SAMPLE_ADDRESS, "meow", False, SAMPLE_ADDRESS),
(None, None, "eth_call", True, None),
(None, None, "eth_estimateGas", True, None),
(None, None, "eth_sendTransaction", True, None),
(None, None, "eth_gasPrice", False, None),
(None, None, "eth_blockNumber", False, None),
(None, None, "meow", False, None),
),
)
@pytest.mark.asyncio
async def test_async_default_transaction_fields_middleware(
w3_accounts,
w3_coinbase,
method,
from_field_added,
from_field_value,
):
async def mock_request(_method, params):
return params
async def mock_async_accounts():
return w3_accounts
async def METHOD_NAME():
return w3_coinbase
mock_w3 = Mock()
mock_w3.eth.accounts = mock_async_accounts()
mock_w3.eth.coinbase = METHOD_NAME()
middleware = await async_default_transaction_fields_middleware(
mock_request, mock_w3
)
base_params = {"chainId": 5}
filled_transaction = await middleware(method, [base_params])
filled_params = filled_transaction[0]
assert ("from" in filled_params.keys()) == from_field_added
if "from" in filled_params.keys():
assert filled_params["from"] == from_field_value
filled_transaction[0].pop("from", None)
assert filled_transaction[0] == base_params
# clean up
mock_w3.eth.accounts.close()
mock_w3.eth.coinbase.close()
| null |
2,056 |
from shared.permissions.policy_engine.policy_engine import PermissionResult, PermissionResultObjectSet
from shared.permissions.policy_engine.base_policy_enforcer import BasePolicyEnforcer
from shared.permissions.policy_engine.policy_engine import PolicyEngine
from shared.database.permissions.roles import Role, RoleMemberObject, ValidObjectTypes
from shared.database.source_control.file_perms import FileDefaultRoles, FileRolesPermissions, FilePermissions
from shared.database.source_control.dataset_perms import DatasetPermissions, DatasetDefaultRoles
from shared.database.auth.member import Member
from shared.database.source_control.file import File
from shared.permissions.policy_engine.dataset_policy_enforcer import DatasetPolicyEnforcer
from shared.database.project_perms import ProjectDefaultRoles
from sqlalchemy.orm.session import Session
from sqlalchemy import or_
from typing import List
from enum import Enum
class FilePolicyEnforcer(BasePolicyEnforcer):
def __init__(self, session: Session, project: 'Project', policy_engine: PolicyEngine):
super().__init__(session = session, project = project, policy_engine = policy_engine)
self.session = session
self.project = project
self.policy_engine = policy_engine
def list_default_roles(self, member_id: int, object_id: int) -> List[str]:
role_names = []
for elm in list(FileDefaultRoles):
role_names.append(elm.value)
role_member_objects = self.session.query(RoleMemberObject).filter(
RoleMemberObject.default_role_name.in_(role_names),
RoleMemberObject.member_id == member_id,
RoleMemberObject.object_id == object_id
)
result = [elm.default_role_name for elm in role_member_objects]
return result
def __has_perm_from_dataset(self, member_id: int, object_id: int, perm: Enum) -> PermissionResult:
dataset_id_list = File.get_directories_ids(session = self.session, file_id = object_id)
ds_policy_enforcer = DatasetPolicyEnforcer(session = self.session, project = self.project,
policy_engine = self.policy_engine)
ds_perm = DatasetPermissions.dataset_edit
if perm == FilePermissions.file_view:
ds_perm = DatasetPermissions.dataset_view
elif perm == FilePermissions.file_edit:
ds_perm = DatasetPermissions.dataset_edit
elif perm == FilePermissions.file_delete:
ds_perm = DatasetPermissions.dataset_delete
perm_result: PermissionResult = ds_policy_enforcer.has_perm_for_at_least_one(
member_id = member_id,
object_type = ValidObjectTypes.dataset.name,
object_id_list = dataset_id_list,
perm = ds_perm,
)
return perm_result
def has_perm(self, member_id: int, object_type: str, object_id: int, perm: Enum) -> PermissionResult:
# Check Default Permissions
default_roles = self.list_default_roles(member_id = member_id, object_id = object_id)
for role in default_roles:
if FileRolesPermissions.get(role) is not None:
perms_list = FileRolesPermissions.get(role)
if perm.value in perms_list:
result = PermissionResult(
allowed = True,
member_id = member_id,
object_type = object_type,
object_id = object_id
)
return result
# Check Project Permissions
member = Member.get_by_id(session = self.session, member_id = member_id)
allowed_project_roles = [ProjectDefaultRoles.viewer.value,
ProjectDefaultRoles.editor.value,
ProjectDefaultRoles.annotator.value,
ProjectDefaultRoles.admin.value]
if perm != FilePermissions.file_view:
allowed_project_roles = [ProjectDefaultRoles.editor.value,
ProjectDefaultRoles.annotator.value,
ProjectDefaultRoles.admin.value]
perm_result: PermissionResult = self.policy_engine.member_has_any_project_role(member = member,
project_id = self.project.id,
roles = allowed_project_roles)
if perm_result.allowed:
return perm_result
# Check Dataset Permissions
perm_result: PermissionResult = self.__has_perm_from_dataset(
member_id = member_id,
object_id = object_id,
perm = perm
)
if perm_result.allowed:
return perm_result
# Custom Roles checking
perm_result = super().has_perm(member_id = member_id,
object_id = object_id,
object_type = object_type,
perm = perm)
return perm_result
def METHOD_NAME(self, perm: Enum) -> List[str]:
result = []
for role_name, perms_list in FileRolesPermissions.items():
if perm.value in perms_list:
result.append(role_name)
return result
| null |
2,057 |
# coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictBool, StrictStr, conint
from lightly.openapi_generated.swagger_client.models.docker_worker_config_v2_docker_object_level import DockerWorkerConfigV2DockerObjectLevel
from lightly.openapi_generated.swagger_client.models.docker_worker_config_v2_docker_stopping_condition import DockerWorkerConfigV2DockerStoppingCondition
from lightly.openapi_generated.swagger_client.models.docker_worker_config_v3_docker_corruptness_check import DockerWorkerConfigV3DockerCorruptnessCheck
from lightly.openapi_generated.swagger_client.models.docker_worker_config_v3_docker_datasource import DockerWorkerConfigV3DockerDatasource
from lightly.openapi_generated.swagger_client.models.lightly_docker_selection_method import LightlyDockerSelectionMethod
class DockerWorkerConfigV2Docker(BaseModel):
"""
docker run configurations, keys should match the structure of https://github.com/lightly-ai/lightly-core/blob/develop/onprem-docker/lightly_worker/src/lightly_worker/resources/docker/docker.yaml
"""
checkpoint: Optional[StrictStr] = None
corruptness_check: Optional[DockerWorkerConfigV3DockerCorruptnessCheck] = Field(None, alias="corruptnessCheck")
datasource: Optional[DockerWorkerConfigV3DockerDatasource] = None
embeddings: Optional[StrictStr] = None
enable_training: Optional[StrictBool] = Field(None, alias="enableTraining")
method: Optional[LightlyDockerSelectionMethod] = None
normalize_embeddings: Optional[StrictBool] = Field(None, alias="normalizeEmbeddings")
output_image_format: Optional[StrictStr] = Field(None, alias="outputImageFormat")
object_level: Optional[DockerWorkerConfigV2DockerObjectLevel] = Field(None, alias="objectLevel")
pretagging: Optional[StrictBool] = None
pretagging_upload: Optional[StrictBool] = Field(None, alias="pretaggingUpload")
relevant_filenames_file: Optional[StrictStr] = Field(None, alias="relevantFilenamesFile")
selected_sequence_length: Optional[conint(strict=True, ge=1)] = Field(None, alias="selectedSequenceLength")
stopping_condition: Optional[DockerWorkerConfigV2DockerStoppingCondition] = Field(None, alias="stoppingCondition")
upload_report: Optional[StrictBool] = Field(None, alias="uploadReport")
__properties = ["checkpoint", "corruptnessCheck", "datasource", "embeddings", "enableTraining", "method", "normalizeEmbeddings", "outputImageFormat", "objectLevel", "pretagging", "pretaggingUpload", "relevantFilenamesFile", "selectedSequenceLength", "stoppingCondition", "uploadReport"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.METHOD_NAME(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> DockerWorkerConfigV2Docker:
"""Create an instance of DockerWorkerConfigV2Docker from a JSON string"""
return cls.from_dict(json.loads(json_str))
def METHOD_NAME(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of corruptness_check
if self.corruptness_check:
_dict['corruptnessCheck' if by_alias else 'corruptness_check'] = self.corruptness_check.METHOD_NAME(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of datasource
if self.datasource:
_dict['datasource' if by_alias else 'datasource'] = self.datasource.METHOD_NAME(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of object_level
if self.object_level:
_dict['objectLevel' if by_alias else 'object_level'] = self.object_level.METHOD_NAME(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of stopping_condition
if self.stopping_condition:
_dict['stoppingCondition' if by_alias else 'stopping_condition'] = self.stopping_condition.METHOD_NAME(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> DockerWorkerConfigV2Docker:
"""Create an instance of DockerWorkerConfigV2Docker from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return DockerWorkerConfigV2Docker.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in DockerWorkerConfigV2Docker) in the input: " + str(obj))
_obj = DockerWorkerConfigV2Docker.parse_obj({
"checkpoint": obj.get("checkpoint"),
"corruptness_check": DockerWorkerConfigV3DockerCorruptnessCheck.from_dict(obj.get("corruptnessCheck")) if obj.get("corruptnessCheck") is not None else None,
"datasource": DockerWorkerConfigV3DockerDatasource.from_dict(obj.get("datasource")) if obj.get("datasource") is not None else None,
"embeddings": obj.get("embeddings"),
"enable_training": obj.get("enableTraining"),
"method": obj.get("method"),
"normalize_embeddings": obj.get("normalizeEmbeddings"),
"output_image_format": obj.get("outputImageFormat"),
"object_level": DockerWorkerConfigV2DockerObjectLevel.from_dict(obj.get("objectLevel")) if obj.get("objectLevel") is not None else None,
"pretagging": obj.get("pretagging"),
"pretagging_upload": obj.get("pretaggingUpload"),
"relevant_filenames_file": obj.get("relevantFilenamesFile"),
"selected_sequence_length": obj.get("selectedSequenceLength"),
"stopping_condition": DockerWorkerConfigV2DockerStoppingCondition.from_dict(obj.get("stoppingCondition")) if obj.get("stoppingCondition") is not None else None,
"upload_report": obj.get("uploadReport")
})
return _obj
| null |
2,058 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdateMonitorRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CDRS', '2020-11-01', 'UpdateMonitor')
self.set_method('POST')
def get_CorpId(self):
return self.get_body_params().get('CorpId')
def set_CorpId(self,CorpId):
self.add_body_params('CorpId', CorpId)
def get_Description(self):
return self.get_body_params().get('Description')
def set_Description(self,Description):
self.add_body_params('Description', Description)
def get_RuleName(self):
return self.get_body_params().get('RuleName')
def set_RuleName(self,RuleName):
self.add_body_params('RuleName', RuleName)
def get_PicOperateType(self):
return self.get_body_params().get('PicOperateType')
def set_PicOperateType(self,PicOperateType):
self.add_body_params('PicOperateType', PicOperateType)
def get_AttributeName(self):
return self.get_body_params().get('AttributeName')
def set_AttributeName(self,AttributeName):
self.add_body_params('AttributeName', AttributeName)
def get_AttributeOperateType(self):
return self.get_body_params().get('AttributeOperateType')
def set_AttributeOperateType(self,AttributeOperateType):
self.add_body_params('AttributeOperateType', AttributeOperateType)
def get_RuleExpression(self):
return self.get_body_params().get('RuleExpression')
def METHOD_NAME(self,RuleExpression):
self.add_body_params('RuleExpression', RuleExpression)
def get_NotifierTimeOut(self):
return self.get_body_params().get('NotifierTimeOut')
def set_NotifierTimeOut(self,NotifierTimeOut):
self.add_body_params('NotifierTimeOut', NotifierTimeOut)
def get_TaskId(self):
return self.get_body_params().get('TaskId')
def set_TaskId(self,TaskId):
self.add_body_params('TaskId', TaskId)
def get_DeviceOperateType(self):
return self.get_body_params().get('DeviceOperateType')
def set_DeviceOperateType(self,DeviceOperateType):
self.add_body_params('DeviceOperateType', DeviceOperateType)
def get_PicList(self):
return self.get_body_params().get('PicList')
def set_PicList(self,PicList):
self.add_body_params('PicList', PicList)
def get_AttributeValueList(self):
return self.get_body_params().get('AttributeValueList')
def set_AttributeValueList(self,AttributeValueList):
self.add_body_params('AttributeValueList', AttributeValueList)
def get_NotifierAppSecret(self):
return self.get_body_params().get('NotifierAppSecret')
def set_NotifierAppSecret(self,NotifierAppSecret):
self.add_body_params('NotifierAppSecret', NotifierAppSecret)
def get_NotifierExtendValues(self):
return self.get_body_params().get('NotifierExtendValues')
def set_NotifierExtendValues(self,NotifierExtendValues):
self.add_body_params('NotifierExtendValues', NotifierExtendValues)
def get_DeviceList(self):
return self.get_body_params().get('DeviceList')
def set_DeviceList(self,DeviceList):
self.add_body_params('DeviceList', DeviceList)
def get_NotifierUrl(self):
return self.get_body_params().get('NotifierUrl')
def set_NotifierUrl(self,NotifierUrl):
self.add_body_params('NotifierUrl', NotifierUrl)
def get_NotifierType(self):
return self.get_body_params().get('NotifierType')
def set_NotifierType(self,NotifierType):
self.add_body_params('NotifierType', NotifierType)
def get_PicExtendList(self):
return self.get_body_params().get('PicExtendList')
def set_PicExtendList(self,PicExtendList):
self.add_body_params('PicExtendList', PicExtendList)
def get_AlgorithmVendor(self):
return self.get_body_params().get('AlgorithmVendor')
def set_AlgorithmVendor(self,AlgorithmVendor):
self.add_body_params('AlgorithmVendor', AlgorithmVendor
| null |
2,059 |
from ..base.twilltestcase import (
common,
ShedTwillTestCase,
)
column_repository_name = "column_maker_0080"
column_repository_description = "Add column"
column_repository_long_description = "Compute an expression on every row"
convert_repository_name = "convert_chars_0080"
convert_repository_description = "Convert delimiters"
convert_repository_long_description = "Convert delimiters to tab"
category_name = "Test 0080 Advanced Circular Dependencies"
category_description = "Test circular dependency features"
class TestRepositoryCircularDependencies(ShedTwillTestCase):
"""Verify that the code correctly handles circular dependencies."""
def METHOD_NAME(self):
"""Create necessary user accounts."""
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
self.login(email=common.admin_email, username=common.admin_username)
def test_0005_create_column_repository(self):
"""Create and populate the column_maker repository."""
category = self.create_category(name=category_name, description=category_description)
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
repository = self.get_or_create_repository(
name=column_repository_name,
description=column_repository_description,
long_description=column_repository_long_description,
owner=common.test_user_1_name,
category=category,
strings_displayed=[],
)
self.upload_file(
repository,
filename="column_maker/column_maker.tar",
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message="Uploaded column_maker tarball.",
strings_displayed=[],
strings_not_displayed=[],
)
def test_0005_create_convert_repository(self):
"""Create and populate the convert_chars repository."""
self.login(email=common.admin_email, username=common.admin_username)
category = self.create_category(name=category_name, description=category_description)
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
repository = self.get_or_create_repository(
name=convert_repository_name,
description=convert_repository_description,
long_description=convert_repository_long_description,
owner=common.test_user_1_name,
category=category,
strings_displayed=[],
)
self.upload_file(
repository,
filename="convert_chars/convert_chars.tar",
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message="Uploaded convert_chars tarball.",
strings_displayed=[],
strings_not_displayed=[],
)
def test_0020_create_repository_dependencies(self):
"""Upload a repository_dependencies.xml file that specifies the current revision of convert_chars_0080 to the column_maker_0080 repository."""
convert_repository = self._get_repository_by_name_and_owner(convert_repository_name, common.test_user_1_name)
column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name)
repository_dependencies_path = self.generate_temp_path("test_0080", additional_paths=["convert"])
repository_tuple = (
self.url,
convert_repository.name,
convert_repository.owner,
self.get_repository_tip(convert_repository),
)
self.create_repository_dependency(
repository=column_repository, repository_tuples=[repository_tuple], filepath=repository_dependencies_path
)
def test_0025_create_dependency_on_filtering(self):
"""Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository."""
convert_repository = self._get_repository_by_name_and_owner(convert_repository_name, common.test_user_1_name)
column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name)
repository_dependencies_path = self.generate_temp_path("test_0080", additional_paths=["convert"])
repository_tuple = (
self.url,
column_repository.name,
column_repository.owner,
self.get_repository_tip(column_repository),
)
self.create_repository_dependency(
repository=convert_repository, repository_tuples=[repository_tuple], filepath=repository_dependencies_path
)
def test_0030_verify_repository_dependencies(self):
"""Verify that each repository can depend on the other without causing an infinite loop."""
convert_repository = self._get_repository_by_name_and_owner(convert_repository_name, common.test_user_1_name)
column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name)
self.check_repository_dependency(
convert_repository, column_repository, self.get_repository_tip(column_repository)
)
self.check_repository_dependency(
column_repository, convert_repository, self.get_repository_tip(convert_repository)
)
def test_0035_verify_repository_metadata(self):
"""Verify that resetting the metadata does not change it."""
column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name)
convert_repository = self._get_repository_by_name_and_owner(convert_repository_name, common.test_user_1_name)
for repository in [column_repository, convert_repository]:
self.verify_unchanged_repository_metadata(repository)
| null |
2,060 |
# Copyright 2019 The KerasTuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
from keras_tuner import errors
from keras_tuner.api_export import keras_tuner_export
@keras_tuner_export("keras_tuner.HyperModel")
class HyperModel:
"""Defines a search space of models.
A search space is a collection of models. The `build` function will build
one of the models from the space using the given `HyperParameters` object.
Users should subclass the `HyperModel` class to define their search spaces
by overriding `build()`, which creates and returns the Keras model.
Optionally, you may also override `fit()` to customize the training process
of the model.
Examples:
In `build()`, you can create the model using the hyperparameters.
```python
class MyHyperModel(kt.HyperModel):
def build(self, hp):
model = keras.Sequential()
model.add(keras.layers.Dense(
hp.Choice('units', [8, 16, 32]),
activation='relu'))
model.add(keras.layers.Dense(1, activation='relu'))
model.compile(loss='mse')
return model
```
When overriding `HyperModel.fit()`, if you use `model.fit()` to train your
model, which returns the training history, you can return it directly. You
may use `hp` to specify any hyperparameters to tune.
```python
class MyHyperModel(kt.HyperModel):
def build(self, hp):
...
def fit(self, hp, model, *args, **kwargs):
return model.fit(
*args,
epochs=hp.Int("epochs", 5, 20),
**kwargs)
```
If you have a customized training process, you can return the objective
value as a float.
If you want to keep track of more metrics, you can return a dictionary of
the metrics to track.
```python
class MyHyperModel(kt.HyperModel):
def build(self, hp):
...
def fit(self, hp, model, *args, **kwargs):
...
return {
"loss": loss,
"val_loss": val_loss,
"val_accuracy": val_accuracy
}
```
Args:
name: Optional string, the name of this HyperModel.
tunable: Boolean, whether the hyperparameters defined in this
hypermodel should be added to search space. If `False`, either the
search space for these parameters must be defined in advance, or
the default values will be used. Defaults to True.
"""
def __init__(self, name=None, tunable=True):
self.name = name
self.tunable = tunable
self._build = self.METHOD_NAME
self.METHOD_NAME = self._build_wrapper
def METHOD_NAME(self, hp):
"""Builds a model.
Args:
hp: A `HyperParameters` instance.
Returns:
A model instance.
"""
raise NotImplementedError
def _build_wrapper(self, hp, *args, **kwargs):
if not self.tunable:
# Copy `HyperParameters` object so that new entries are not added
# to the search space.
hp = hp.copy()
return self._build(hp, *args, **kwargs)
def declare_hyperparameters(self, hp):
pass
def fit(self, hp, model, *args, **kwargs):
"""Train the model.
Args:
hp: HyperParameters.
model: `keras.Model` built in the `build()` function.
**kwargs: All arguments passed to `Tuner.search()` are in the
`kwargs` here. It always contains a `callbacks` argument, which
is a list of default Keras callback functions for model
checkpointing, tensorboard configuration, and other tuning
utilities. If `callbacks` is passed by the user from
`Tuner.search()`, these default callbacks will be appended to
the user provided list.
Returns:
A `History` object, which is the return value of `model.fit()`, a
dictionary, or a float.
If return a dictionary, it should be a dictionary of the metrics to
track. The keys are the metric names, which contains the
`objective` name. The values should be the metric values.
If return a float, it should be the `objective` value.
"""
return model.fit(*args, **kwargs)
class DefaultHyperModel(HyperModel):
"""Produces HyperModel from a model building function."""
def __init__(self, METHOD_NAME, name=None, tunable=True):
super().__init__(name=name)
self.METHOD_NAME = METHOD_NAME
def get_hypermodel(hypermodel):
"""Gets a HyperModel from a HyperModel or callable."""
if hypermodel is None:
return None
if isinstance(hypermodel, HyperModel):
return hypermodel
if not callable(hypermodel):
raise errors.FatalValueError(
"The `hypermodel` argument should be either "
"a callable with signature `build(hp)` returning a model, "
"or an instance of `HyperModel`."
)
return DefaultHyperModel(hypermodel)
| null |
2,061 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkslb.endpoint import endpoint_data
class SetLoadBalancerUDPListenerAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Slb', '2014-05-15', 'SetLoadBalancerUDPListenerAttribute','slb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_AclStatus(self): # String
return self.get_query_params().get('AclStatus')
def set_AclStatus(self, AclStatus): # String
self.add_query_param('AclStatus', AclStatus)
def get_AclType(self): # String
return self.get_query_params().get('AclType')
def set_AclType(self, AclType): # String
self.add_query_param('AclType', AclType)
def get_MasterSlaveServerGroup(self): # String
return self.get_query_params().get('MasterSlaveServerGroup')
def set_MasterSlaveServerGroup(self, MasterSlaveServerGroup): # String
self.add_query_param('MasterSlaveServerGroup', MasterSlaveServerGroup)
def get_VServerGroupId(self): # String
return self.get_query_params().get('VServerGroupId')
def set_VServerGroupId(self, VServerGroupId): # String
self.add_query_param('VServerGroupId', VServerGroupId)
def get_AclId(self): # String
return self.get_query_params().get('AclId')
def set_AclId(self, AclId): # String
self.add_query_param('AclId', AclId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_MasterSlaveServerGroupId(self): # String
return self.get_query_params().get('MasterSlaveServerGroupId')
def set_MasterSlaveServerGroupId(self, MasterSlaveServerGroupId): # String
self.add_query_param('MasterSlaveServerGroupId', MasterSlaveServerGroupId)
def get_healthCheckReq(self): # String
return self.get_query_params().get('healthCheckReq')
def set_healthCheckReq(self, healthCheckReq): # String
self.add_query_param('healthCheckReq', healthCheckReq)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_healthCheckExp(self): # String
return self.get_query_params().get('healthCheckExp')
def set_healthCheckExp(self, healthCheckExp): # String
self.add_query_param('healthCheckExp', healthCheckExp)
def get_ProxyProtocolV2Enabled(self): # Boolean
return self.get_query_params().get('ProxyProtocolV2Enabled')
def set_ProxyProtocolV2Enabled(self, ProxyProtocolV2Enabled): # Boolean
self.add_query_param('ProxyProtocolV2Enabled', ProxyProtocolV2Enabled)
def get_HealthCheckSwitch(self): # String
return self.get_query_params().get('HealthCheckSwitch')
def set_HealthCheckSwitch(self, HealthCheckSwitch): # String
self.add_query_param('HealthCheckSwitch', HealthCheckSwitch)
def get_HealthCheckConnectTimeout(self): # Integer
return self.get_query_params().get('HealthCheckConnectTimeout')
def set_HealthCheckConnectTimeout(self, HealthCheckConnectTimeout): # Integer
self.add_query_param('HealthCheckConnectTimeout', HealthCheckConnectTimeout)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_UnhealthyThreshold(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Integer
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Integer
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)
def get_VServerGroup(self): # String
return self.get_query_params().get('VServerGroup')
def set_VServerGroup(self, VServerGroup): # String
self.add_query_param('VServerGroup', VServerGroup)
| null |
2,062 |
import os
import tempfile
from galaxy.tool_util.parser import get_tool_source
from galaxy.util.compression_utils import CompressedFile
from galaxy.util.resources import resource_path
from galaxy_test.base import api_asserts
from tool_shed.test.base.populators import repo_tars
from ..base.api import ShedApiTestCase
COLUMN_MAKER_PATH = resource_path(__package__, "../test_data/column_maker/column_maker.tar")
class TestShedRepositoriesApi(ShedApiTestCase):
def test_create(self):
populator = self.populator
category_id = populator.new_category(prefix="testcreate").id
repos_by_category = populator.repositories_by_category(category_id)
repos = repos_by_category.repositories
assert len(repos) == 0
populator.new_repository(category_id)
repos_by_category = populator.repositories_by_category(category_id)
repos = repos_by_category.repositories
assert len(repos) == 1
def test_update_repository(self):
populator = self.populator
prefix = "testupdate"
category_id = populator.new_category(prefix=prefix).id
repository = populator.new_repository(category_id, prefix=prefix)
repository_id = repository.id
repository_update = populator.upload_revision(
repository_id,
COLUMN_MAKER_PATH,
)
assert repository_update.is_ok
# used by getRepository in TS client.
def test_metadata_simple(self):
populator = self.populator
repository = populator.setup_column_maker_repo(prefix="repoformetadata")
repository_metadata = populator.get_metadata(repository)
metadata_for_revisions = repository_metadata.__root__
assert len(metadata_for_revisions) == 1
only_key = list(metadata_for_revisions.keys())[0]
assert only_key.startswith("0:")
only_revision = list(metadata_for_revisions.values())[0]
assert only_revision
assert only_revision.downloadable
assert not only_revision.malicious
def test_index_simple(self):
populator = self.populator
repo = populator.setup_column_maker_repo(prefix="repoforindex")
repository_id = repo.id
show_response = self.api_interactor.get(f"repositories/{repository_id}")
index_response = self.api_interactor.get("repositories")
api_asserts.assert_status_code_is_ok(show_response)
api_asserts.assert_status_code_is_ok(index_response)
repository_ids = [r["id"] for r in index_response.json()]
assert repository_id in repository_ids
repository = self.populator.get_repository_for(repo.owner, repo.name)
assert repository.owner == repo.owner
assert repository.name == repo.name
def test_install_info(self):
# actually installing requires a whole Galaxy setup and the install manager but
# we can test the response validates against the future facing InstallInfo pydandic
# models.
populator = self.populator
repo = populator.setup_column_maker_and_get_metadata(prefix="repoforinstallinfo")
populator.get_install_info(repo)
def test_get_ordered_installable_revisions(self):
# Used in ephemeris...
populator = self.populator
repository = populator.setup_column_maker_repo(prefix="repoforindex")
assert repository.owner
assert repository.name
revisions = populator.get_ordered_installable_revisions(repository.owner, repository.name)
assert len(revisions.__root__) == 1
def test_reset_on_repository(self):
populator = self.populator
repository = populator.setup_column_maker_repo(prefix="repoforreseta")
assert repository.owner
assert repository.name
revisions = populator.get_ordered_installable_revisions(repository.owner, repository.name)
assert len(revisions.__root__) == 1
metadata_response = populator.reset_metadata(repository)
assert metadata_response.start_time
assert metadata_response.stop_time
assert metadata_response.status == "ok"
assert len(metadata_response.repository_status) == 1
revisions = populator.get_ordered_installable_revisions(repository.owner, repository.name)
assert len(revisions.__root__) == 1
def test_repository_search(self):
populator = self.populator
repository = populator.setup_column_maker_repo(prefix="repoforreposearch")
populator.reindex()
results = populator.repo_search_query("repoforreposearch")
assert len(results.hits) == 1
first_hit = results.hits[0]
assert first_hit.repository.name == repository.name
assert first_hit.repository.times_downloaded == 0
def test_repo_tars(self):
for index, repo_path in enumerate(repo_tars("column_maker")):
path = CompressedFile(repo_path).extract(tempfile.mkdtemp())
tool_xml_path = os.path.join(path, "column_maker.xml")
tool_source = get_tool_source(config_file=tool_xml_path)
tool_version = tool_source.parse_version()
if index == 0:
assert tool_version == "1.1.0"
elif index == 1:
assert tool_version == "1.2.0"
elif index == 2:
assert tool_version == "1.3.0"
else:
raise AssertionError("Wrong number of repo tars returned...")
def test_reset_on_simple_repository(self):
populator = self.populator
repository = populator.setup_test_data_repo("column_maker")
populator.assert_has_n_installable_revisions(repository, 3)
response = self.api_interactor.post(
"repositories/reset_metadata_on_repository", data={"repository_id": repository.id}
)
api_asserts.assert_status_code_is_ok(response)
populator.assert_has_n_installable_revisions(repository, 3)
def test_reset_with_uninstallable_revisions(self):
populator = self.populator
# setup a repository with 4 revisions but only 3 installable ones due to no version change in a tool
repository = populator.setup_test_data_repo("column_maker_with_download_gaps")
populator.assert_has_n_installable_revisions(repository, 3)
response = self.api_interactor.post(
"repositories/reset_metadata_on_repository", data={"repository_id": repository.id}
)
api_asserts.assert_status_code_is_ok(response)
populator.assert_has_n_installable_revisions(repository, 3)
def METHOD_NAME(self):
populator = self.populator
repository = populator.setup_test_data_repo("column_maker_with_download_gaps")
populator.assert_has_n_installable_revisions(repository, 3)
# reseting one at a time or resetting everything via the web controllers works...
# reseting all at once via the API does not work - it breaks the repository
response = self.api_interactor.post(
"repositories/reset_metadata_on_repositories",
data={"payload": "can not be empty because bug in controller"},
)
api_asserts.assert_status_code_is_ok(response)
populator.assert_has_n_installable_revisions(repository, 3)
| null |
2,063 |
# coding=utf-8
# Copyright 2023 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tree utils.
On the surface, the structures encoded in PENMAN notation are a tree,
and only by resolving repeated node index (variables) as reentrancies
does the actual graph become accessible.
"""
from typing import Tuple, List, Any
# Node indexes.
Variable = str
# Relation/attribute names.
Role = str
# Node context branch.
Branch = Tuple[Role, Any]
# Node contexts for constructing a tree structure.
Node = Tuple[Variable, List[Branch]]
class Tree(object):
"""Represents a tree example."""
def __init__(self, node: Node):
self.node = node
def nodes(self) -> List[Node]:
"""Returns the nodes in the tree as a flat list."""
return METHOD_NAME(self.node)
def format(self, indent: int = 2, new_line: bool = True):
"""Formats the tree structure into a PENMAN string.
Example:
>>> tree = Tree(
'x0', [('/', 'unknown'), (':ARG', ('x1', [('/', '_look_v_up')]))])
>>> print(tree.format())
(x0 / unknown
:ARG (x1 / _look_v_up))
Args:
indent: How to indent formatted strings.
new_line: Whether to have new line for each node context.
Returns:
The PENMAN-serialized string of the tree.
"""
if indent < 0 or not new_line: indent = 0
node_indexes = set([idx for idx, _ in self.nodes()])
parts = [_format_node(self.node, indent, new_line, 0, node_indexes)]
if new_line:
return '\n'.join(parts)
else:
return ' '.join(parts)
def is_atomic(x) -> bool:
"""Returns ``True`` if *x* is a valid atomic value.
Examples:
>>> is_atomic('a')
True
>>> is_atomic(None)
True
>>> is_atomic(3.14)
True
>>> is_atomic(('a', [('/', 'alpha')]))
False
Args:
x: input string.
Returns:
True if input string is a valid atomic value.
"""
return x is None or isinstance(x, (str, int, float))
def METHOD_NAME(node: Node) -> List[Node]:
idx, contexts = node
ns = [] if idx is None else [node]
for _, target in contexts:
# If target is not atomic, assume it's a valid tree node
if not is_atomic(target):
ns.extend(METHOD_NAME(target))
return ns
def _format_node(node: Node,
indent: int,
new_line: bool,
column: int,
node_indexes) -> str:
"""Formats node into a PENMAN string."""
idx, contexts = node
if not idx:
return '()' # Empty node.
if not contexts:
return f'({idx!s})' # Index-only node.
# Determines appropriate joiner based on value of indent.
column += indent
joiner = '\n' + ' ' * column if new_line else ' '
# Formats the contexts and join them.
# If index is non-empty, all initial attributes are compactly
# joined on the same line, otherwise they use joiner.
parts: List[str] = []
compact = bool(node_indexes)
for context in contexts:
target = context[1]
if compact and (not is_atomic(target) or target in node_indexes):
compact = False
if parts:
parts = [' '.join(parts)]
parts.append(
_format_context(context, indent, new_line, column, node_indexes))
# Checks if all contexts can be compactly written.
if compact:
parts = [' '.join(parts)]
return f'({idx!s} {joiner.join(parts)})'
def _format_context(context, indent, new_line, column, node_indexes):
"""Formats node context into a PENMAN string."""
role, target = context
if role != '/' and not role.startswith(':'):
role = ':' + role
sep = ' '
if not target:
target = sep = ''
elif not is_atomic(target):
target = _format_node(target, indent, new_line, column, node_indexes)
return f'{role}{sep}{target!s}'
| null |
2,064 |
import time
import pytest
import datetime
from api.base.settings.defaults import API_BASE, DEFAULT_ES_NULL_VALUE
from osf_tests.factories import (
InstitutionFactory,
AuthUserFactory,
)
from osf.metrics import UserInstitutionProjectCounts
@pytest.mark.es
@pytest.mark.django_db
class TestInstitutionDepartmentList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def user2(self):
return AuthUserFactory()
@pytest.fixture()
def user3(self):
return AuthUserFactory()
@pytest.fixture()
def user4(self):
return AuthUserFactory()
@pytest.fixture()
def populate_counts(self, user, user2, user3, user4, admin, institution):
# This represents a Department that had a user, but no longer has any users, so does not appear in results.
UserInstitutionProjectCounts.record(
user_id=user._id,
institution_id=institution._id,
department='Old Department',
public_project_count=1,
private_project_count=1,
timestamp=datetime.date(2017, 2, 4)
).save()
# The user has left the department
UserInstitutionProjectCounts.record(
user_id=user._id,
institution_id=institution._id,
department='New Department',
public_project_count=1,
private_project_count=1,
).save()
# A second user entered the department
UserInstitutionProjectCounts.record(
user_id=user2._id,
institution_id=institution._id,
department='New Department',
public_project_count=1,
private_project_count=1
).save()
# A new department with a single user to test sorting
UserInstitutionProjectCounts.record(
user_id=user3._id,
institution_id=institution._id,
department='Smaller Department',
public_project_count=1,
private_project_count=1
).save()
# A user with no department
UserInstitutionProjectCounts.record(
user_id=user4._id,
institution_id=institution._id,
public_project_count=1,
private_project_count=1
).save()
time.sleep(5) # ES is slow
@pytest.fixture()
def admin(self, institution):
user = AuthUserFactory()
group = institution.get_group('institutional_admins')
group.user_set.add(user)
group.save()
return user
@pytest.fixture()
def METHOD_NAME(self, institution):
return f'/{API_BASE}institutions/{institution._id}/metrics/departments/'
def test_auth(self, app, METHOD_NAME, user, admin):
resp = app.get(METHOD_NAME, expect_errors=True)
assert resp.status_code == 401
resp = app.get(METHOD_NAME, auth=user.auth, expect_errors=True)
assert resp.status_code == 403
resp = app.get(METHOD_NAME, auth=admin.auth)
assert resp.status_code == 200
assert resp.json['data'] == []
def test_get(self, app, METHOD_NAME, admin, institution, populate_counts):
resp = app.get(METHOD_NAME, auth=admin.auth)
assert resp.json['data'] == [{
'id': f'{institution._id}-New-Department',
'type': 'institution-departments',
'attributes': {
'name': 'New Department',
'number_of_users': 2
},
'links': {'self': f'http://localhost:8000/v2/institutions/{institution._id}/metrics/departments/'}
}, {
'id': f'{institution._id}-Smaller-Department',
'type': 'institution-departments',
'attributes': {
'name': 'Smaller Department',
'number_of_users': 1
},
'links': {'self': f'http://localhost:8000/v2/institutions/{institution._id}/metrics/departments/'}
}, {
'id': f'{institution._id}-{DEFAULT_ES_NULL_VALUE}',
'type': 'institution-departments',
'attributes': {
'name': DEFAULT_ES_NULL_VALUE,
'number_of_users': 1
},
'links': {'self': f'http://localhost:8000/v2/institutions/{institution._id}/metrics/departments/'}
}]
# Tests CSV Export
headers = {
'accept': 'text/csv'
}
resp = app.get(METHOD_NAME, auth=admin.auth, headers=headers)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/csv; charset=utf-8'
response_body = resp.text
rows = response_body.split('\r\n')
header_row = rows[0].split(',')
new_department_row = rows[1].split(',')
smaller_department_row = rows[2].split(',')
na_row = rows[3].split(',')
assert header_row == ['id', 'name', 'number_of_users', 'type']
assert new_department_row == [f'{institution._id}-New-Department', 'New Department', '2', 'institution-departments']
assert smaller_department_row == [f'{institution._id}-Smaller-Department', 'Smaller Department', '1', 'institution-departments']
assert na_row == [f'{institution._id}-N/A', 'N/A', '1', 'institution-departments']
def test_pagination(self, app, METHOD_NAME, admin, institution, populate_counts):
resp = app.get(f'{METHOD_NAME}?filter[name]=New Department', auth=admin.auth)
assert resp.json['data'] == [{
'id': '{}-{}'.format(institution._id, 'New-Department'),
'type': 'institution-departments',
'attributes': {
'name': 'New Department',
'number_of_users': 2
},
'links': {'self': f'http://localhost:8000/v2/institutions/{institution._id}/metrics/departments/'}
}]
resp = app.get(f'{METHOD_NAME}?page[size]=2', auth=admin.auth)
assert len(resp.json['data']) == 2
assert resp.json['links']['meta']['per_page'] == 2
assert resp.json['links']['meta']['total'] == 3
resp = app.get(f'{METHOD_NAME}?page[size]=2&page=2', auth=admin.auth)
assert len(resp.json['data']) == 1
assert resp.json['links']['meta']['per_page'] == 2
assert resp.json['links']['meta']['total'] == 3
| null |
2,065 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class CreateOTAFirmwareRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'CreateOTAFirmware')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SignMethod(self):
return self.get_query_params().get('SignMethod')
def set_SignMethod(self,SignMethod):
self.add_query_param('SignMethod',SignMethod)
def get_MultiFiless(self):
return self.get_query_params().get('MultiFiles')
def set_MultiFiless(self, MultiFiless):
for depth1 in range(len(MultiFiless)):
if MultiFiless[depth1].get('Size') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.Size', MultiFiless[depth1].get('Size'))
if MultiFiless[depth1].get('Name') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.Name', MultiFiless[depth1].get('Name'))
if MultiFiless[depth1].get('SignValue') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.SignValue', MultiFiless[depth1].get('SignValue'))
if MultiFiless[depth1].get('FileMd5') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.FileMd5', MultiFiless[depth1].get('FileMd5'))
if MultiFiless[depth1].get('Url') is not None:
self.add_query_param('MultiFiles.' + str(depth1 + 1) + '.Url', MultiFiless[depth1].get('Url'))
def get_NeedToVerify(self):
return self.get_query_params().get('NeedToVerify')
def set_NeedToVerify(self,NeedToVerify):
self.add_query_param('NeedToVerify',NeedToVerify)
def get_Type(self):
return self.get_query_params().get('Type')
def METHOD_NAME(self,Type):
self.add_query_param('Type',Type)
def get_FirmwareUrl(self):
return self.get_query_params().get('FirmwareUrl')
def set_FirmwareUrl(self,FirmwareUrl):
self.add_query_param('FirmwareUrl',FirmwareUrl)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_FirmwareDesc(self):
return self.get_query_params().get('FirmwareDesc')
def set_FirmwareDesc(self,FirmwareDesc):
self.add_query_param('FirmwareDesc',FirmwareDesc)
def get_ModuleName(self):
return self.get_query_params().get('ModuleName')
def set_ModuleName(self,ModuleName):
self.add_query_param('ModuleName',ModuleName)
def get_FirmwareSign(self):
return self.get_query_params().get('FirmwareSign')
def set_FirmwareSign(self,FirmwareSign):
self.add_query_param('FirmwareSign',FirmwareSign)
def get_FirmwareSize(self):
return self.get_query_params().get('FirmwareSize')
def set_FirmwareSize(self,FirmwareSize):
self.add_query_param('FirmwareSize',FirmwareSize)
def get_FirmwareName(self):
return self.get_query_params().get('FirmwareName')
def set_FirmwareName(self,FirmwareName):
self.add_query_param('FirmwareName',FirmwareName)
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey)
def get_SrcVersion(self):
return self.get_query_params().get('SrcVersion')
def set_SrcVersion(self,SrcVersion):
self.add_query_param('SrcVersion',SrcVersion)
def get_Udi(self):
return self.get_query_params().get('Udi')
def set_Udi(self,Udi):
self.add_query_param('Udi',Udi)
def get_DestVersion(self):
return self.get_query_params().get('DestVersion')
def set_DestVersion(self,DestVersion):
self.add_query_param('DestVersion',DestVersion
| null |
2,066 |
"""
Views related to OAuth2 platform applications. Intended for OSF internal use only
"""
from django.db.models import Q
from rest_framework.exceptions import APIException
from rest_framework import generics
from rest_framework import permissions as drf_permissions
from api.base.renderers import JSONAPIRenderer, JSONRendererWithESISupport
from framework.auth import cas
from framework.auth.oauth_scopes import CoreScopes
from osf.models import ApiOAuth2Application
from api.base.filters import ListFilterMixin
from api.base.utils import get_object_or_error
from api.base.views import JSONAPIBaseView, DeprecatedView
from api.base import permissions as base_permissions
from api.applications.serializers import ApiOAuth2ApplicationSerializer, ApiOAuth2ApplicationDetailSerializer, ApiOAuth2ApplicationResetSerializer
class ApplicationMixin(object):
"""Mixin with convenience methods for retrieving the current application based on the
current URL. By default, fetches the current application based on the client_id kwarg.
"""
def get_app(self):
app = get_object_or_error(ApiOAuth2Application, Q(client_id=self.kwargs['client_id'], is_active=True), self.request)
self.check_object_permissions(self.request, app)
return app
class ApplicationList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin):
"""
Get a list of API applications (eg OAuth2) that the user has registered
"""
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.OwnerOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.APPLICATIONS_READ]
required_write_scopes = [CoreScopes.APPLICATIONS_WRITE]
serializer_class = ApiOAuth2ApplicationSerializer
view_category = 'applications'
view_name = 'application-list'
renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ] # Hide from web-browsable API tool
ordering = ('-created',)
def get_default_queryset(self):
return ApiOAuth2Application.objects.filter(owner=self.request.user, is_active=True)
# overrides ListAPIView
def METHOD_NAME(self):
return self.get_queryset_from_request()
def perform_create(self, serializer):
"""Add user to the created object"""
serializer.validated_data['owner'] = self.request.user
serializer.save()
class ApplicationDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, ApplicationMixin):
"""
Get information about a specific API application (eg OAuth2) that the user has registered
Should not return information if the application belongs to a different user
"""
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.OwnerOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.APPLICATIONS_READ]
required_write_scopes = [CoreScopes.APPLICATIONS_WRITE]
serializer_class = ApiOAuth2ApplicationDetailSerializer
view_category = 'applications'
view_name = 'application-detail'
renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ] # Hide from web-browsable API tool
def get_object(self):
return self.get_app()
# overrides DestroyAPIView
def perform_destroy(self, instance):
"""Instance is not actually deleted from DB- just flagged as inactive, which hides it from list views"""
obj = self.get_object()
try:
obj.deactivate(save=True)
except cas.CasHTTPError:
raise APIException('Could not revoke application auth tokens; please try again later')
def perform_update(self, serializer):
"""Necessary to prevent owner field from being blanked on updates"""
serializer.validated_data['owner'] = self.request.user
# TODO: Write code to transfer ownership
serializer.save(owner=self.request.user)
class ApplicationReset(DeprecatedView, generics.CreateAPIView, ApplicationMixin):
"""
Resets client secret of a specific API application (eg OAuth2) that the user has registered
Should not perform update or return information if the application belongs to a different user
"""
max_version = '2.14'
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.OwnerOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.APPLICATIONS_READ]
required_write_scopes = [CoreScopes.APPLICATIONS_WRITE]
serializer_class = ApiOAuth2ApplicationResetSerializer
renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ] # Hide from web-browsable API tool
view_category = 'applications'
view_name = 'application-reset'
def get_object(self):
return self.get_app()
def perform_create(self, serializer):
"""Resets the application client secret, revokes all tokens"""
app = self.get_object()
app.reset_secret(save=True)
app.reload()
serializer.validated_data['client_secret'] = app.client_secret
| null |
2,067 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkslb.endpoint import endpoint_data
class DescribeLoadBalancersRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Slb', '2014-05-15', 'DescribeLoadBalancers','slb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_NetworkType(self): # String
return self.get_query_params().get('NetworkType')
def set_NetworkType(self, NetworkType): # String
self.add_query_param('NetworkType', NetworkType)
def get_AddressIPVersion(self): # String
return self.get_query_params().get('AddressIPVersion')
def set_AddressIPVersion(self, AddressIPVersion): # String
self.add_query_param('AddressIPVersion', AddressIPVersion)
def get_MasterZoneId(self): # String
return self.get_query_params().get('MasterZoneId')
def set_MasterZoneId(self, MasterZoneId): # String
self.add_query_param('MasterZoneId', MasterZoneId)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_LoadBalancerName(self): # String
return self.get_query_params().get('LoadBalancerName')
def set_LoadBalancerName(self, LoadBalancerName): # String
self.add_query_param('LoadBalancerName', LoadBalancerName)
def get_SlaveZoneId(self): # String
return self.get_query_params().get('SlaveZoneId')
def set_SlaveZoneId(self, SlaveZoneId): # String
self.add_query_param('SlaveZoneId', SlaveZoneId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def METHOD_NAME(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ServerId(self): # String
return self.get_query_params().get('ServerId')
def set_ServerId(self, ServerId): # String
self.add_query_param('ServerId', ServerId)
def get_Tags(self): # String
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # String
self.add_query_param('Tags', Tags)
def get_ServerIntranetAddress(self): # String
return self.get_query_params().get('ServerIntranetAddress')
def set_ServerIntranetAddress(self, ServerIntranetAddress): # String
self.add_query_param('ServerIntranetAddress', ServerIntranetAddress)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_InternetChargeType(self): # String
return self.get_query_params().get('InternetChargeType')
def set_InternetChargeType(self, InternetChargeType): # String
self.add_query_param('InternetChargeType', InternetChargeType)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AddressType(self): # String
return self.get_query_params().get('AddressType')
def set_AddressType(self, AddressType): # String
self.add_query_param('AddressType', AddressType)
def get_Address(self): # String
return self.get_query_params().get('Address')
def set_Address(self, Address): # String
self.add_query_param('Address', Address)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_LoadBalancerStatus(self): # String
return self.get_query_params().get('LoadBalancerStatus')
def set_LoadBalancerStatus(self, LoadBalancerStatus): # String
self.add_query_param('LoadBalancerStatus', LoadBalancerStatus)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_PayType(self): # String
return self.get_query_params().get('PayType')
def set_PayType(self, PayType): # String
self.add_query_param('PayType', PayType)
| null |
2,068 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkess.endpoint import endpoint_data
class CreateAlarmRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ess', '2014-08-28', 'CreateAlarm','ess')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_MetricType(self): # String
return self.get_query_params().get('MetricType')
def METHOD_NAME(self, MetricType): # String
self.add_query_param('MetricType', MetricType)
def get_ScalingGroupId(self): # String
return self.get_query_params().get('ScalingGroupId')
def set_ScalingGroupId(self, ScalingGroupId): # String
self.add_query_param('ScalingGroupId', ScalingGroupId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_ExpressionsLogicOperator(self): # String
return self.get_query_params().get('ExpressionsLogicOperator')
def set_ExpressionsLogicOperator(self, ExpressionsLogicOperator): # String
self.add_query_param('ExpressionsLogicOperator', ExpressionsLogicOperator)
def get_AlarmActions(self): # RepeatList
return self.get_query_params().get('AlarmAction')
def set_AlarmActions(self, AlarmAction): # RepeatList
for depth1 in range(len(AlarmAction)):
self.add_query_param('AlarmAction.' + str(depth1 + 1), AlarmAction[depth1])
def get_Threshold(self): # Float
return self.get_query_params().get('Threshold')
def set_Threshold(self, Threshold): # Float
self.add_query_param('Threshold', Threshold)
def get_Effective(self): # String
return self.get_query_params().get('Effective')
def set_Effective(self, Effective): # String
self.add_query_param('Effective', Effective)
def get_EvaluationCount(self): # Integer
return self.get_query_params().get('EvaluationCount')
def set_EvaluationCount(self, EvaluationCount): # Integer
self.add_query_param('EvaluationCount', EvaluationCount)
def get_MetricName(self): # String
return self.get_query_params().get('MetricName')
def set_MetricName(self, MetricName): # String
self.add_query_param('MetricName', MetricName)
def get_Dimensions(self): # RepeatList
return self.get_query_params().get('Dimension')
def set_Dimensions(self, Dimension): # RepeatList
for depth1 in range(len(Dimension)):
if Dimension[depth1].get('DimensionValue') is not None:
self.add_query_param('Dimension.' + str(depth1 + 1) + '.DimensionValue', Dimension[depth1].get('DimensionValue'))
if Dimension[depth1].get('DimensionKey') is not None:
self.add_query_param('Dimension.' + str(depth1 + 1) + '.DimensionKey', Dimension[depth1].get('DimensionKey'))
def get_Period(self): # Integer
return self.get_query_params().get('Period')
def set_Period(self, Period): # Integer
self.add_query_param('Period', Period)
def get_Expressions(self): # RepeatList
return self.get_query_params().get('Expression')
def set_Expressions(self, Expression): # RepeatList
for depth1 in range(len(Expression)):
if Expression[depth1].get('Period') is not None:
self.add_query_param('Expression.' + str(depth1 + 1) + '.Period', Expression[depth1].get('Period'))
if Expression[depth1].get('Threshold') is not None:
self.add_query_param('Expression.' + str(depth1 + 1) + '.Threshold', Expression[depth1].get('Threshold'))
if Expression[depth1].get('MetricName') is not None:
self.add_query_param('Expression.' + str(depth1 + 1) + '.MetricName', Expression[depth1].get('MetricName'))
if Expression[depth1].get('ComparisonOperator') is not None:
self.add_query_param('Expression.' + str(depth1 + 1) + '.ComparisonOperator', Expression[depth1].get('ComparisonOperator'))
if Expression[depth1].get('Statistics') is not None:
self.add_query_param('Expression.' + str(depth1 + 1) + '.Statistics', Expression[depth1].get('Statistics'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_GroupId(self): # Integer
return self.get_query_params().get('GroupId')
def set_GroupId(self, GroupId): # Integer
self.add_query_param('GroupId', GroupId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_ComparisonOperator(self): # String
return self.get_query_params().get('ComparisonOperator')
def set_ComparisonOperator(self, ComparisonOperator): # String
self.add_query_param('ComparisonOperator', ComparisonOperator)
def get_Statistics(self): # String
return self.get_query_params().get('Statistics')
def set_Statistics(self, Statistics): # String
self.add_query_param('Statistics', Statistics)
| null |
2,069 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdklive.endpoint import endpoint_data
class AddCustomLiveStreamTranscodeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'live', '2016-11-01', 'AddCustomLiveStreamTranscode','live')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResWithSource(self): # String
return self.get_query_params().get('ResWithSource')
def set_ResWithSource(self, ResWithSource): # String
self.add_query_param('ResWithSource', ResWithSource)
def get_Gop(self): # String
return self.get_query_params().get('Gop')
def set_Gop(self, Gop): # String
self.add_query_param('Gop', Gop)
def get_AudioCodec(self): # String
return self.get_query_params().get('AudioCodec')
def set_AudioCodec(self, AudioCodec): # String
self.add_query_param('AudioCodec', AudioCodec)
def get_KmsUID(self): # String
return self.get_query_params().get('KmsUID')
def set_KmsUID(self, KmsUID): # String
self.add_query_param('KmsUID', KmsUID)
def get_Height(self): # Integer
return self.get_query_params().get('Height')
def set_Height(self, Height): # Integer
self.add_query_param('Height', Height)
def get_App(self): # String
return self.get_query_params().get('App')
def set_App(self, App): # String
self.add_query_param('App', App)
def get_Profile(self): # Integer
return self.get_query_params().get('Profile')
def set_Profile(self, Profile): # Integer
self.add_query_param('Profile', Profile)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ExtWithSource(self): # String
return self.get_query_params().get('ExtWithSource')
def set_ExtWithSource(self, ExtWithSource): # String
self.add_query_param('ExtWithSource', ExtWithSource)
def get_BitrateWithSource(self): # String
return self.get_query_params().get('BitrateWithSource')
def set_BitrateWithSource(self, BitrateWithSource): # String
self.add_query_param('BitrateWithSource', BitrateWithSource)
def get_Domain(self): # String
return self.get_query_params().get('Domain')
def set_Domain(self, Domain): # String
self.add_query_param('Domain', Domain)
def get_Template(self): # String
return self.get_query_params().get('Template')
def set_Template(self, Template): # String
self.add_query_param('Template', Template)
def METHOD_NAME(self): # String
return self.get_query_params().get('Lazy')
def set_Lazy(self, Lazy): # String
self.add_query_param('Lazy', Lazy)
def get_KmsKeyExpireInterval(self): # String
return self.get_query_params().get('KmsKeyExpireInterval')
def set_KmsKeyExpireInterval(self, KmsKeyExpireInterval): # String
self.add_query_param('KmsKeyExpireInterval', KmsKeyExpireInterval)
def get_TemplateType(self): # String
return self.get_query_params().get('TemplateType')
def set_TemplateType(self, TemplateType): # String
self.add_query_param('TemplateType', TemplateType)
def get_AudioProfile(self): # String
return self.get_query_params().get('AudioProfile')
def set_AudioProfile(self, AudioProfile): # String
self.add_query_param('AudioProfile', AudioProfile)
def get_EncryptParameters(self): # String
return self.get_query_params().get('EncryptParameters')
def set_EncryptParameters(self, EncryptParameters): # String
self.add_query_param('EncryptParameters', EncryptParameters)
def get_AudioChannelNum(self): # Integer
return self.get_query_params().get('AudioChannelNum')
def set_AudioChannelNum(self, AudioChannelNum): # Integer
self.add_query_param('AudioChannelNum', AudioChannelNum)
def get_FPS(self): # Integer
return self.get_query_params().get('FPS')
def set_FPS(self, FPS): # Integer
self.add_query_param('FPS', FPS)
def get_AudioRate(self): # Integer
return self.get_query_params().get('AudioRate')
def set_AudioRate(self, AudioRate): # Integer
self.add_query_param('AudioRate', AudioRate)
def get_FpsWithSource(self): # String
return self.get_query_params().get('FpsWithSource')
def set_FpsWithSource(self, FpsWithSource): # String
self.add_query_param('FpsWithSource', FpsWithSource)
def get_AudioBitrate(self): # Integer
return self.get_query_params().get('AudioBitrate')
def set_AudioBitrate(self, AudioBitrate): # Integer
self.add_query_param('AudioBitrate', AudioBitrate)
def get_Width(self): # Integer
return self.get_query_params().get('Width')
def set_Width(self, Width): # Integer
self.add_query_param('Width', Width)
def get_VideoBitrate(self): # Integer
return self.get_query_params().get('VideoBitrate')
def set_VideoBitrate(self, VideoBitrate): # Integer
self.add_query_param('VideoBitrate', VideoBitrate)
def get_KmsKeyID(self): # String
return self.get_query_params().get('KmsKeyID')
def set_KmsKeyID(self, KmsKeyID): # String
self.add_query_param('KmsKeyID', KmsKeyID)
| null |
2,070 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class PurchaseStorageCapacityUnitRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'PurchaseStorageCapacityUnit','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def METHOD_NAME(self, Description): # String
self.add_query_param('Description', Description)
def get_StartTime(self): # String
return self.get_query_params().get('StartTime')
def set_StartTime(self, StartTime): # String
self.add_query_param('StartTime', StartTime)
def get_Capacity(self): # Integer
return self.get_query_params().get('Capacity')
def set_Capacity(self, Capacity): # Integer
self.add_query_param('Capacity', Capacity)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_Period(self): # Integer
return self.get_query_params().get('Period')
def set_Period(self, Period): # Integer
self.add_query_param('Period', Period)
def get_Amount(self): # Integer
return self.get_query_params().get('Amount')
def set_Amount(self, Amount): # Integer
self.add_query_param('Amount', Amount)
def get_FromApp(self): # String
return self.get_query_params().get('FromApp')
def set_FromApp(self, FromApp): # String
self.add_query_param('FromApp', FromApp)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_PeriodUnit(self): # String
return self.get_query_params().get('PeriodUnit')
def set_PeriodUnit(self, PeriodUnit): # String
self.add_query_param('PeriodUnit', PeriodUnit)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
| null |
2,071 |
"""Process a model selection :class:`ModelProblem` after calibration."""
from pathlib import Path
from typing import List
import matplotlib.pyplot as plt
import numpy as np
from petab_select.constants import ESTIMATE, TYPE_PATH, Criterion
from .. import store, visualize
from ..C import TYPE_POSTPROCESSOR
from .model_problem import ModelProblem
def METHOD_NAME(
problem: ModelProblem,
postprocessors: List[TYPE_POSTPROCESSOR] = None,
):
"""Combine multiple postprocessors into a single postprocessor.
See `save_postprocessor` for usage hints.
Parameters
----------
problem:
A model selection :class:`ModelProblem` that has been optimized.
postprocessors:
A list of postprocessors, which will be sequentially applied to the
optimized model `problem`.
The location where results will be stored.
"""
for postprocessor in postprocessors:
postprocessor(problem)
def waterfall_plot_postprocessor(
problem: ModelProblem,
output_path: TYPE_PATH = ".",
):
"""Produce a waterfall plot.
See `save_postprocessor` for usage hints and argument documentation.
"""
visualize.waterfall(problem.minimize_result)
plot_output_path = Path(output_path) / (problem.model.model_hash + ".png")
plt.savefig(str(plot_output_path))
def save_postprocessor(
problem: ModelProblem,
output_path: TYPE_PATH = ".",
use_model_hash: bool = False,
):
"""Save the parameter estimation result.
When used, first set the output folder for results, e.g. with
`functools.partial`. This is because postprocessors should take only a
single parameter: an optimized model.
.. code-block:: python
from functools import partial
output_path = 'results'
pp = partial(save_postprocessor, output_path=output_path)
selector = pypesto.select.ModelSelector(
problem=problem,
model_postprocessor=pp,
)
Parameters
----------
problem:
A model selection :class:`ModelProblem` that has been optimized.
output_path:
The location where output will be stored.
use_model_hash:
Whether the filename should use the model hash. Defaults to `False`,
in which case the model ID is used instead.
"""
stem = problem.model.model_id
if use_model_hash:
stem = problem.model.get_hash()
store.write_result(
problem.minimize_result,
Path(output_path) / (stem + ".hdf5"),
)
def model_id_binary_postprocessor(problem: ModelProblem):
"""Change a PEtab Select model ID to a binary string.
Changes the model ID in-place to be a string like `M_ijk`, where
`i`, `j`, `k`, etc. are `1` if the parameter in that position is estimated,
or `0` if the parameter is fixed.
To ensure that other postprocessors (e.g. `report_postprocessor`) use this
new model ID, when in use with a `multi_postprocessor`, ensure this is
before the other postprocessors in the `postprocessors` argument of
`multi_postprocessor`.
Parameters
----------
problem:
A model selection :class:`ModelProblem` that has been optimized.
"""
model_id = "M_"
for parameter_value in problem.model.parameters.values():
model_id += "1" if parameter_value == ESTIMATE else "0"
problem.model.model_id = model_id
def report_postprocessor(
problem: ModelProblem,
output_filepath: TYPE_PATH,
criteria: List[Criterion] = None,
):
"""Create a TSV table of model selection results.
Parameters
----------
problem:
A model selection :class:`ModelProblem` that has been optimized.
output_filepath:
The file path where the report will be saved.
criteria:
The criteria that will be in the report. Defaults to nllh, AIC, AICc,
and BIC.
"""
output_filepath = Path(output_filepath)
write_header = False
# Only write the header if the file doesn't yet exist or is empty.
if not output_filepath.exists() or output_filepath.stat().st_size == 0:
write_header = True
if criteria is None:
criteria = [
Criterion.NLLH,
Criterion.AIC,
Criterion.AICC,
Criterion.BIC,
]
start_optimization_times = problem.minimize_result.optimize_result.time
header = []
row = []
header.append('model_id')
row.append(problem.model.model_id)
header.append('total_time')
row.append(str(sum(start_optimization_times)))
for criterion in criteria:
header.append(criterion.value)
row.append(str(problem.model.get_criterion(criterion)))
# Arbitrary convergence criterion
header.append('n_converged')
row.append(
str(
(
np.array(problem.minimize_result.optimize_result.fval)
< (problem.minimize_result.optimize_result.list[0].fval + 0.1)
).sum()
)
)
for start_index, start_optimization_time in enumerate(
start_optimization_times
):
header.append(f'start_time_{start_index}')
row.append(str(start_optimization_time))
with open(output_filepath, 'a+') as f:
if write_header:
f.write('\t'.join(header) + '\n')
f.write('\t'.join(row) + '\n')
| null |
2,072 |
from functools import partial
from django.contrib.contenttypes.models import ContentType
from django.core.paginator import Paginator
from django.template import Context, Template
from django.utils.translation import gettext as _
from creme.creme_core.core.paginator import FlowPaginator
from creme.creme_core.models import (
FakeMailingList,
FakeOrganisation,
HeaderFilter,
)
from creme.creme_core.models.header_filter import HeaderFilterList
from creme.creme_core.templatetags.creme_listview import (
listview_header_filters,
)
from ..base import CremeTestCase
# TODO: write complete tests for EntityCells
# TODO: to be completed
class CremeListViewTagsTestCase(CremeTestCase):
def test_listview_pager_slow(self):
user = self.get_root_user()
for i in range(1, 20):
FakeOrganisation.objects.create(user=user, name=f'A{i}')
paginator = Paginator(FakeOrganisation.objects.all(), 5)
with self.assertNoException():
template = Template(
r'{% load creme_listview %}'
r'{% listview_pager page %}'
)
rendered = template.render(Context({'page': paginator.page(1)}))
self.assertInHTML(
f'<a class="pager-link is-disabled pager-link-previous" href="" '
f'title="" >{_("Previous page")}</a>',
rendered,
)
self.assertInHTML(
'<span class="pager-link is-disabled pager-link-current">1</span>',
rendered,
)
self.assertInHTML(
'<a class="pager-link pager-link-next" href="" title="{help}" '
'data-page="2">{label}</a>'.format(
help=_('To page {}').format(2),
label=_('Next page'),
),
rendered,
)
def test_listview_pager_fast(self):
user = self.get_root_user()
for i in range(1, 20):
FakeOrganisation.objects.create(user=user, name=f'A{i}')
paginator = FlowPaginator(
queryset=FakeOrganisation.objects.all(),
key='name', per_page=5, count=20,
)
with self.assertNoException():
template = Template(
r'{% load creme_listview %}'
r'{% listview_pager page %}'
)
rendered = template.render(Context({
'page': paginator.page({
'type': 'first',
})
}))
self.assertInHTML(
'<a class="pager-link is-disabled pager-link-first" href="" '
'title="{label}" >{label}</a>'.format(
label=_('First page'),
),
rendered
)
self.assertInHTML(
'<a class="pager-link is-disabled pager-link-previous" href="" '
'title="{label}" >{label}</a>'.format(
label=_('Previous page'),
),
rendered,
)
def test_listview_header_filters01(self):
user = self.get_root_user()
ctype = ContentType.objects.get_for_model(FakeMailingList)
self.assertFalse(HeaderFilter.objects.filter(entity_type=ctype).first())
hf = HeaderFilter.objects.create_if_needed(
pk='test_hf-ml01', name='View', model=FakeMailingList,
)
hfilters = HeaderFilterList(
content_type=ctype,
user=user,
)
hfilters.select_by_id(hf.id)
ctxt = listview_header_filters(
model=FakeMailingList,
user=user,
hfilters=hfilters,
show_buttons=True,
)
self.assertIsInstance(ctxt, dict)
self.assertIs(ctxt.get('model'), FakeMailingList)
self.assertIs(ctxt.get('show_buttons'), True)
self.assertIs(ctxt.get('can_edit'), True)
self.assertIs(ctxt.get('can_delete'), False)
self.assertEqual(ctxt.get('selected'), hf)
self.assertEqual([hf], ctxt.get('global_header_filters'))
self.assertFalse([*ctxt.get('my_header_filters')])
self.assertFalse([*ctxt.get('other_header_filters')])
def METHOD_NAME(self):
user = self.get_root_user()
other_user = self.create_user()
ctype = ContentType.objects.get_for_model(FakeMailingList)
self.assertFalse(HeaderFilter.objects.filter(entity_type=ctype).first())
create_hf = partial(HeaderFilter.objects.create_if_needed, model=FakeMailingList)
hf01 = create_hf(pk='test_hf-ml01', name='View')
hf02 = create_hf(pk='test_hf-ml02', name='My view', user=user, is_custom=True)
hf03 = create_hf(pk='test_hf-ml03', name='Other view', user=other_user, is_custom=True)
hfilters = HeaderFilterList(
content_type=ctype,
user=user,
)
hfilters.select_by_id(hf02.id)
ctxt = listview_header_filters(
model=FakeMailingList,
user=user,
hfilters=hfilters,
show_buttons=False,
)
self.assertIs(ctxt.get('show_buttons'), False)
self.assertIs(ctxt.get('can_edit'), True)
self.assertIs(ctxt.get('can_delete'), True)
self.assertEqual(ctxt.get('selected'), hf02)
self.assertEqual([hf01], ctxt.get('global_header_filters'))
self.assertEqual([hf02], ctxt.get('my_header_filters'))
self.assertEqual([(other_user, [hf03])], ctxt.get('other_header_filters'))
| null |
2,073 |
from typing import List, Dict, Optional
from boa3.internal.neo.vm.VMCode import VMCode
class VMCodeMap:
def __init__(self):
self._vm_code_list: List[VMCode] = []
self._vm_code_addresses: List[int] = []
# optimization so it's not needed to iterate over everything in search of targets
self._vm_code_with_target: List[VMCode] = []
def __len__(self) -> int:
return self._vm_code_list.__len__()
def clear(self):
self._vm_code_addresses.clear()
self._vm_code_list.clear()
def get_code_map(self) -> Dict[int, VMCode]:
size = len(self)
return {self._vm_code_addresses[index]: self._vm_code_list[index] for index in range(size)}
def get_code_list(self) -> List[VMCode]:
return self._vm_code_list
def get_code_with_target_list(self) -> List[VMCode]:
return self._vm_code_with_target
def get_bytecode_size(self) -> int:
if len(self) < 1:
return 0
return self._vm_code_addresses[-1] + self._vm_code_list[-1].size
def insert_code(self, vm_code: VMCode, has_target: bool = False):
if vm_code not in self._vm_code_list:
self._vm_code_addresses.append(self.get_bytecode_size())
self._vm_code_list.append(vm_code)
if has_target:
self._vm_code_with_target.append(vm_code)
def METHOD_NAME(self, address: int) -> Optional[VMCode]:
try:
index = self._vm_code_addresses.index(address)
except ValueError:
# the address is not int the list
if address >= self.get_bytecode_size():
# the address is not in the bytecode
return None
# if the address is not the start of a instruction, gets the last instruction before given address
code_address = 0
for addr in self._vm_code_addresses:
if addr > address:
break
code_address = addr
index = self._vm_code_addresses.index(code_address)
return self._vm_code_list[index]
def get_start_address(self, vm_code: VMCode) -> int:
try:
index = self._vm_code_list.index(vm_code)
return self._vm_code_addresses[index]
except ValueError:
return 0
def get_end_address(self, vm_code: VMCode) -> int:
try:
index = self._vm_code_list.index(vm_code) + 1
if index == len(self._vm_code_list):
return self.get_bytecode_size()
else:
return self._vm_code_addresses[index] - 1
except ValueError:
return 0
def get_addresses(self, start_address: int, end_address: int) -> List[int]:
if start_address > end_address:
start_address, end_address = end_address, start_address
addresses = []
for address in range(start_address, end_address + 1):
if address in self._vm_code_addresses:
addresses.append(address)
return addresses
def get_addresses_from_codes(self, codes: List[VMCode]) -> List[int]:
if len(codes) < 1:
return []
addresses = []
for vm_code in codes:
try:
index = self._vm_code_list.index(vm_code)
addresses.append(self._vm_code_addresses[index])
except ValueError:
continue
return addresses
def get_opcodes(self, addresses: List[int]) -> List[VMCode]:
codes = []
for address in sorted(addresses):
try:
index = self._vm_code_addresses.index(address)
codes.append(self._vm_code_list[index])
except ValueError:
# address not in list
continue
return codes
def update_addresses(self, start_address: int = 0):
next_address = -1
final_size = len(self._vm_code_list)
if len(self._vm_code_addresses) > final_size:
self._vm_code_addresses = self._vm_code_addresses[:final_size]
for index in range(final_size):
address = self._vm_code_addresses[index]
if address >= start_address:
if next_address < 0:
if index > 0:
new_address = self._vm_code_addresses[index - 1]
next_address = new_address + self._vm_code_list[index - 1].size
else:
next_address = 0
if next_address != address:
if index < len(self._vm_code_addresses):
self._vm_code_addresses[index] = next_address
else:
self._vm_code_addresses.append(next_address)
next_address += self._vm_code_list[index].size
def move_to_end(self, first_code_address: int, last_code_address: int) -> Optional[int]:
if last_code_address < first_code_address:
return
if (len(self._vm_code_addresses) > 0 and
last_code_address == self._vm_code_addresses[-1]):
# there's nothing to change if it's moving the all the codes
return
first_index = -1
last_index = 0
for index, address in enumerate(self._vm_code_addresses):
if first_code_address <= address and first_index < 0:
first_index = index
elif address > last_code_address:
last_index = index
break
if first_index >= 0:
# if the first index was not set, there's nothing to move
if last_index < first_index:
last_index = len(self._vm_code_addresses)
self._vm_code_list[first_index:] = (self._vm_code_list[last_index:] +
self._vm_code_list[first_index:last_index])
self.update_addresses(first_code_address)
index = self.get_bytecode_size()
return index
def remove_opcodes_by_addresses(self, addresses: List[int]):
was_changed = False
# reversed so we only need to update addresses once after all are removed
for code_address in sorted(addresses, reverse=True):
try:
index = self._vm_code_addresses.index(code_address)
code = self._vm_code_list.pop(index)
was_changed = True
self._vm_code_with_target.remove(code)
except ValueError:
# don't stop the loop if an address is not found
continue
if was_changed:
self.update_addresses(min(addresses))
| null |
2,074 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class CreateVirtualBorderRouterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'CreateVirtualBorderRouter','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_CircuitCode(self): # String
return self.get_query_params().get('CircuitCode')
def set_CircuitCode(self, CircuitCode): # String
self.add_query_param('CircuitCode', CircuitCode)
def get_VlanId(self): # Integer
return self.get_query_params().get('VlanId')
def set_VlanId(self, VlanId): # Integer
self.add_query_param('VlanId', VlanId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def METHOD_NAME(self): # String
return self.get_query_params().get('PeerGatewayIp')
def set_PeerGatewayIp(self, PeerGatewayIp): # String
self.add_query_param('PeerGatewayIp', PeerGatewayIp)
def get_PeeringSubnetMask(self): # String
return self.get_query_params().get('PeeringSubnetMask')
def set_PeeringSubnetMask(self, PeeringSubnetMask): # String
self.add_query_param('PeeringSubnetMask', PeeringSubnetMask)
def get_LocalGatewayIp(self): # String
return self.get_query_params().get('LocalGatewayIp')
def set_LocalGatewayIp(self, LocalGatewayIp): # String
self.add_query_param('LocalGatewayIp', LocalGatewayIp)
def get_UserCidr(self): # String
return self.get_query_params().get('UserCidr')
def set_UserCidr(self, UserCidr): # String
self.add_query_param('UserCidr', UserCidr)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_PhysicalConnectionId(self): # String
return self.get_query_params().get('PhysicalConnectionId')
def set_PhysicalConnectionId(self, PhysicalConnectionId): # String
self.add_query_param('PhysicalConnectionId', PhysicalConnectionId)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_VbrOwnerId(self): # Long
return self.get_query_params().get('VbrOwnerId')
def set_VbrOwnerId(self, VbrOwnerId): # Long
self.add_query_param('VbrOwnerId', VbrOwnerId)
| null |
2,075 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Facilities to handle file streaming
------------------------------------
"""
import os
import stat
import time
import re
import errno
from gluon.http import HTTP
from gluon.utils import unlocalised_http_header_date
from gluon.contenttype import contenttype
from gluon._compat import PY2
regex_start_range = re.compile('\d+(?=\-)')
regex_stop_range = re.compile('(?<=\-)\d+')
DEFAULT_CHUNK_SIZE = 64 * 1024
def METHOD_NAME(stream, chunk_size=DEFAULT_CHUNK_SIZE, bytes=None, callback=None):
try:
offset = 0
while bytes is None or offset < bytes:
if not bytes is None and bytes - offset < chunk_size:
chunk_size = bytes - offset
data = stream.read(chunk_size)
length = len(data)
if not length:
break
else:
yield data
if length < chunk_size:
break
offset += length
finally:
stream.close()
if callback:
callback()
def stream_file_or_304_or_206(
static_file,
chunk_size=DEFAULT_CHUNK_SIZE,
request=None,
headers={},
status=200,
error_message=None
):
# FIX THIS
# if error_message is None:
# error_message = rewrite.THREAD_LOCAL.routes.error_message % 'invalid request'
try:
if PY2:
open_f = file # this makes no sense but without it GAE cannot open files
else:
open_f = open
fp = open_f(static_file,'rb')
except IOError as e:
if e.errno == errno.EISDIR:
raise HTTP(403, error_message, web2py_error='file is a directory')
elif e.errno == errno.EACCES:
raise HTTP(403, error_message, web2py_error='inaccessible file')
else:
raise HTTP(404, error_message, web2py_error='invalid file')
else:
fp.close()
stat_file = os.stat(static_file)
fsize = stat_file[stat.ST_SIZE]
modified = stat_file[stat.ST_MTIME]
mtime = unlocalised_http_header_date(time.gmtime(modified))
headers.setdefault('Content-Type', contenttype(static_file))
headers.setdefault('Last-Modified', mtime)
headers.setdefault('Pragma', 'cache')
headers.setdefault('Cache-Control', 'private')
# if this is a normal response and not a respnse to an error page
if status == 200:
if request and request.env.http_if_modified_since == mtime:
raise HTTP(304, **{'Content-Type': headers['Content-Type']})
elif request and request.env.http_range:
start_items = regex_start_range.findall(request.env.http_range)
if not start_items:
start_items = [0]
stop_items = regex_stop_range.findall(request.env.http_range)
if not stop_items or int(stop_items[0]) > fsize - 1:
stop_items = [fsize - 1]
part = (int(start_items[0]), int(stop_items[0]), fsize)
bytes = part[1] - part[0] + 1
try:
stream = open(static_file, 'rb')
except IOError as e:
if e.errno in (errno.EISDIR, errno.EACCES):
raise HTTP(403)
else:
raise HTTP(404)
stream.seek(part[0])
headers['Content-Range'] = 'bytes %i-%i/%i' % part
headers['Content-Length'] = '%i' % bytes
status = 206
# in all the other cases (not 304, not 206, but 200 or error page)
if status != 206:
enc = request.env.http_accept_encoding
if enc and 'gzip' in enc and not 'Content-Encoding' in headers:
gzipped = static_file + '.gz'
if os.path.isfile(gzipped) and os.path.getmtime(gzipped) >= modified:
static_file = gzipped
fsize = os.path.getsize(gzipped)
headers['Content-Encoding'] = 'gzip'
headers['Vary'] = 'Accept-Encoding'
try:
stream = open(static_file, 'rb')
except IOError as e:
# this better not happen when returning an error page ;-)
if e.errno in (errno.EISDIR, errno.EACCES):
raise HTTP(403)
else:
raise HTTP(404)
headers['Content-Length'] = fsize
bytes = None
if request and request.env.web2py_use_wsgi_file_wrapper:
wrapped = request.env.wsgi_file_wrapper(stream, chunk_size)
else:
wrapped = METHOD_NAME(stream, chunk_size=chunk_size, bytes=bytes)
raise HTTP(status, wrapped, **headers)
| null |
2,076 |
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pathlib
import click
import numpy as np
from neu.comm import CommunicatorWrapper
from neu.datasets import _get_sliced_data_source
from nnabla import logger
from nnabla.utils.data_iterator import data_iterator
from nnabla_diffusion.config import DatasetConfig
from .common import SimpleDatasource
# ImagenetDataIterator uses label_wordnetid.csv, label_words.csv, and validation_data_label.txt
DEFAULT_RESOURCE_DIR = os.path.join(os.path.dirname(__file__), # nnabla-examples/diffusion-models/dataset
"..", # nnabla-examples/diffusion-models
"..", # nnabla-examples
"image-classification/imagenet" # nnabla-examples/image-classification/imagenet
)
def _info(msg):
prefix = "[ImagenetDataIterator]"
logger.info(f"{prefix} {msg}")
def ImagenetDataIterator(conf: DatasetConfig,
comm: CommunicatorWrapper = None,
rng=None,
resource_dir=DEFAULT_RESOURCE_DIR):
# todo: use image-classification/imagenet utils
if not os.path.exists(conf.dataset_root_dir):
raise ValueError(f"[ImagenetDataIterator] '{conf.dataset_root_dir}' is not found. "
"Please make sure that you specify the correct directory path.")
# extract label id
label_wordnetid_csv = os.path.join(resource_dir, "label_wordnetid.csv")
_info(f"load label_wordnetid from {label_wordnetid_csv}.")
dname2label = {}
with open(label_wordnetid_csv, "r") as fp:
for l in fp:
label, dname = l.rstrip().split(",")
dname2label[dname] = int(label)
# get all files
if conf.train:
# ilsvrcYYYY/train/{label id}/*.JPEG
root_dir = pathlib.Path(os.path.join(conf.dataset_root_dir, "train"))
_info(f"load train data and label from {root_dir}.")
raw_paths = sorted(root_dir.rglob('*.JPEG'))
paths = []
labels = []
for path in raw_paths:
# Have to change pathlib.Path to string to avoid imread error
paths.append(str(path))
# Extract label
name = path.name
dname = name.split("_")[0]
label = dname2label[dname]
labels.append(label)
else:
# ilsvrsYYYY/val/*.JPEG
root_dir = os.path.join(conf.dataset_root_dir, "val")
_info(f"load validation data from {root_dir}.")
raise NotImplementedError("val is not supported now.")
ds = SimpleDatasource(conf,
img_paths=paths,
labels=labels,
rng=rng)
_info(f"Loaded imagenet dataset. # of images: {ds.size}.")
ds = _get_sliced_data_source(ds, comm, conf.shuffle_dataset)
return data_iterator(ds,
conf.batch_size,
with_memory_cache=False,
use_thread=True,
with_file_cache=False)
def METHOD_NAME(di, output_dir, comm=None, num_iters=100):
from neu.reporter import KVReporter
from nnabla.utils.image_utils import imsave
reporter = KVReporter(comm=comm)
os.makedirs(output_dir, exist_ok=True)
for itr in range(num_iters):
data, label = di.next()
reporter.kv_mean("mean", data.mean())
reporter.kv_mean("std", data.std())
reporter.kv_mean("max", data.max())
reporter.kv_mean("min", data.min())
imsave(os.path.join(
output_dir, f"{itr}.png"), data, channel_first=True)
reporter.dump()
@click.command()
@click.option("--imagenet_base_dir", default=None)
def main(imagenet_base_dir):
from neu.misc import init_nnabla
comm = init_nnabla(ext_name="cpu", device_id=0, type_config="float")
if imagenet_base_dir is not None and os.path.exists(imagenet_base_dir):
logger.info("Test imagenet data iterator.")
di = ImagenetDataIterator(2, imagenet_base_dir, comm=comm)
METHOD_NAME(di, "./tmp/imagene", comm)
if __name__ == "__main__":
main()
__all__ = [ImagenetDataIterator]
| null |
2,077 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class CreateVirtualBorderRouterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'CreateVirtualBorderRouter','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_CircuitCode(self): # String
return self.get_query_params().get('CircuitCode')
def set_CircuitCode(self, CircuitCode): # String
self.add_query_param('CircuitCode', CircuitCode)
def get_VlanId(self): # Integer
return self.get_query_params().get('VlanId')
def set_VlanId(self, VlanId): # Integer
self.add_query_param('VlanId', VlanId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_PeerGatewayIp(self): # String
return self.get_query_params().get('PeerGatewayIp')
def set_PeerGatewayIp(self, PeerGatewayIp): # String
self.add_query_param('PeerGatewayIp', PeerGatewayIp)
def get_PeeringSubnetMask(self): # String
return self.get_query_params().get('PeeringSubnetMask')
def set_PeeringSubnetMask(self, PeeringSubnetMask): # String
self.add_query_param('PeeringSubnetMask', PeeringSubnetMask)
def get_LocalGatewayIp(self): # String
return self.get_query_params().get('LocalGatewayIp')
def set_LocalGatewayIp(self, LocalGatewayIp): # String
self.add_query_param('LocalGatewayIp', LocalGatewayIp)
def get_UserCidr(self): # String
return self.get_query_params().get('UserCidr')
def set_UserCidr(self, UserCidr): # String
self.add_query_param('UserCidr', UserCidr)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def METHOD_NAME(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_PhysicalConnectionId(self): # String
return self.get_query_params().get('PhysicalConnectionId')
def set_PhysicalConnectionId(self, PhysicalConnectionId): # String
self.add_query_param('PhysicalConnectionId', PhysicalConnectionId)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_VbrOwnerId(self): # Long
return self.get_query_params().get('VbrOwnerId')
def set_VbrOwnerId(self, VbrOwnerId): # Long
self.add_query_param('VbrOwnerId', VbrOwnerId)
| null |
2,078 |
import logging
import tempfile
import unittest
from pathlib import Path
from processors import areatree
from processors.areatree.process import (
_areatree_lines,
_extract_building_prefix,
_extract_id_and_type,
_extract_names,
_split_line,
)
class AreatreeExtractNames(unittest.TestCase):
def test_extract_names_with_short_name(self) -> None:
"""If there is a short name, it is returned as well"""
names = ["Mathematics Informatics", "mi"]
expected_output = {"name": "Mathematics Informatics", "short_name": "mi"}
self.assertEqual(_extract_names(names), expected_output)
def test_extract_names_without_short_name(self) -> None:
"""If there is no short name, only the name is returned"""
names = ["Mathematics Informatics"]
expected_output = {"name": "Mathematics Informatics"}
self.assertEqual(_extract_names(names), expected_output)
def METHOD_NAME(self) -> None:
"""If the short name is longer than 20 chars, a warning is raised"""
names = ["Mechanical Engineering", "ThisIsAVeryLongNameForAShortName"]
expected_output = {"name": "Mechanical Engineering", "short_name": "ThisIsAVeryLongNameForAShortName"}
with self.assertLogs(level=logging.WARNING) as recorded_logs:
self.assertEqual(_extract_names(names), expected_output)
self.assertIn(
"'ThisIsAVeryLongNameForAShortName' is very long for a short name (>20 chars)",
recorded_logs.output[0],
)
def test_extract_names_with_extra_names(self) -> None:
"""If there are more than two names, an error is raised"""
names = ["Name1", "Name2", "Name3"]
with self.assertRaises(RuntimeError):
_extract_names(names)
with self.assertRaises(IndexError):
_extract_names([])
class AreatreeExtractBuildingPrefix(unittest.TestCase):
def test_dash_separator(self) -> None:
"""If the building id is separated by a dash, it is returned as a string"""
expected_result = {"b_prefix": "b1-b2-b3"}
self.assertEqual(_extract_building_prefix("b1-b2-b3"), expected_result)
def test_areatree_uncertain(self) -> None:
"""If the building id starts with a dash, it is marked as uncertain"""
expected_result = {"data_quality": {"areatree_uncertain": True}, "b_prefix": "b1-b2"}
self.assertEqual(_extract_building_prefix("-b1-b2"), expected_result)
def test_comma_separator(self) -> None:
"""If the building id is separated by a comma, it is split into a list"""
expected_result = {"b_prefix": ["b1", "b2", "b3"]}
self.assertEqual(_extract_building_prefix("b1,b2,b3"), expected_result)
def test_empty(self) -> None:
"""If the building id is empty, an empty dict is returned"""
self.assertEqual(_extract_building_prefix(""), {})
def test_building_ids_without_separator(self) -> None:
"""If the building id is not separated by a dash or comma, it is returned as is"""
expected_result = {"b_prefix": "b1"}
self.assertEqual(_extract_building_prefix("b1"), expected_result)
class AreatreeExtractIdAndType(unittest.TestCase):
def test_specified_type(self) -> None:
"""If the type is specified, it is returned"""
expected = {"id": "abc", "type": "building"}
self.assertEqual(_extract_id_and_type("abc[building]", None), expected)
self.assertEqual(_extract_id_and_type("abc[building]", "cdf"), expected)
def test_comma(self) -> None:
"""If the id is inferable from the line, it is returned"""
expected = {"id": "123", "visible_id": "visible_id", "type": "area"}
self.assertEqual(_extract_id_and_type("123,visible_id", None), expected)
self.assertEqual(_extract_id_and_type("123,visible_id", "cdf"), expected)
def test_single_id(self) -> None:
"""If the id is inferable from the line, it is returned"""
expected = {"id": "xyz", "type": "building"}
self.assertEqual(_extract_id_and_type("xyz", "xyz"), expected)
def test_id_not_inferable(self) -> None:
"""If the id is not inferable from the line, an error is raised"""
with self.assertRaises(RuntimeError):
_extract_id_and_type("", ["b_prefix1", "b_prefix2"])
with self.assertRaises(RuntimeError):
_extract_id_and_type("123,visible_id,extra_id", ["b_prefix1", "b_prefix2"])
with self.assertRaises(RuntimeError):
_extract_id_and_type("123,visible_id,extra_id", None)
class AreatreeLinesTestCase(unittest.TestCase):
def test_empty_file(self) -> None:
"""Empty file returns empty list"""
with tempfile.NamedTemporaryFile() as file:
areatree.process.AREATREE_FILE = Path(file.name)
self.assertEqual(list(_areatree_lines()), [])
def test_comment_lines(self) -> None:
"""Comment lines are removed"""
with tempfile.NamedTemporaryFile(mode="w+") as file:
areatree.process.AREATREE_FILE = Path(file.name)
file.write("line1\n")
file.write("\n") # Empty line
file.write("# Comment line\n")
file.write("line2\n")
file.flush()
self.assertEqual(list(_areatree_lines()), ["line1", "line2"])
def test_inline_comments(self) -> None:
"""Inline comments are removed"""
with tempfile.NamedTemporaryFile(mode="w+") as file:
areatree.process.AREATREE_FILE = Path(file.name)
file.write("line1#comment1\n")
file.write("line2#comment2 # comment 3\n")
file.flush()
self.assertEqual(list(_areatree_lines()), ["line1", "line2"])
def test_file_preserves_indentation(self) -> None:
"""Indentation is preserved"""
with tempfile.NamedTemporaryFile(mode="w+") as file:
areatree.process.AREATREE_FILE = Path(file.name)
file.write(" line1 \n")
file.write(" line2\n")
file.write("line3")
file.flush()
self.assertEqual(list(_areatree_lines()), [" line1", " line2", "line3"])
class SplitLineTestCase(unittest.TestCase):
def test_valid_line(self) -> None:
"""Valid lines are split correctly"""
self.assertEqual(_split_line("1:Building A:123,456"), ("1", "Building A", "123,456"))
def test_invalid_line_missing_parts(self) -> None:
"""Missing parts are not allowed"""
with self.assertRaises(RuntimeError):
_split_line("1:Building A")
def test_invalid_line_extra_parts(self) -> None:
"""Extra parts are not allowed"""
with self.assertRaises(RuntimeError):
_split_line("1:Building A:123,456:extra_part")
if __name__ == "__main__":
unittest.main()
| null |
2,079 |
from typing import (
Any,
Dict,
List,
Optional,
)
from typing_extensions import (
Literal,
Protocol,
)
Impact = Literal["minor", "moderate", "serious", "critical"]
# Axe tests we want to actively pass in Galaxy, the next list controls tests.
BASELINE_AXE_PASSING_IDS = [
"aria-roles",
]
# check all violations of this level as a baseline 'impact'...
BASELINE_VIOLATION_FILTER: Impact = "critical"
FORMS_VIOLATIONS = ["duplicate-id-aria"] # https://github.com/galaxyproject/galaxy/issues/16188
# unless they are in this list...
KNOWN_VIOLATIONS = FORMS_VIOLATIONS + [
"aria-required-attr",
"aria-required-children",
"aria-required-parent",
"image-alt", # test_workflow_editor.py::TestWorkflowEditor::test_existing_connections
"label",
"button-name",
"select-name",
]
# Over time we hope known violations grows smaller until the violation
# filter can be lowered. Next level would be "serious".
# xref https://github.com/galaxyproject/galaxy/issues/16185
class AxeResult:
def __init__(self, json: Dict[str, Any]):
self._json = json
@property
def id(self) -> str:
return self._json["id"]
@property
def description(self) -> str:
return self._json["description"]
@property
def impact(self) -> Optional[Impact]:
return self._json["impact"]
@property
def nodes(self):
return self._json["nodes"]
def is_impact_at_least(self, impact: Impact) -> bool:
range_of_impacts = []
if impact == "minor":
range_of_impacts = ["minor", "moderate", "serious", "critical"]
elif impact == "moderate":
range_of_impacts = ["moderate", "serious", "critical"]
elif impact == "serious":
range_of_impacts = ["serious", "critical"]
elif impact == "critical":
range_of_impacts = ["critical"]
return self.impact in range_of_impacts
class Violation(AxeResult):
@property
def METHOD_NAME(self) -> str:
nodes = self.nodes
nodes_str = ", ".join([n["html"] for n in nodes])
return f"AXE accessibility test violation found [{self.id}] with impact {self.impact}: {self.description}. Problem found in {nodes_str}."
class AxeResults(Protocol):
def assert_passes(self, id: str) -> None:
""""""
def assert_does_not_violate(self, id: str) -> None:
""""""
def violations(self) -> List[Violation]:
""""""
# these next two could be refactored into a mixin...
def violations_with_impact_of_at_least(self, impact: Impact) -> List[Violation]:
""""""
def assert_no_violations_with_impact_of_at_least(
self, impact: Impact, excludes: Optional[List[str]] = None
) -> None:
""""""
class RealAxeResults(AxeResults):
def __init__(self, json: Dict[str, Any]):
self._json = json
def assert_passes(self, id: str) -> None:
passing_results = self._json["passes"]
result = _check_list_for_id(passing_results, id)
assert result
def assert_does_not_violate(self, id: str) -> None:
violations = self._json["violations"]
result = _check_list_for_id(violations, id)
if result:
violation = Violation(result)
raise AssertionError(violation.METHOD_NAME)
def violations(self) -> List[Violation]:
violations = self._json["violations"]
return [Violation(v) for v in violations]
def violations_with_impact_of_at_least(self, impact: Impact) -> List[Violation]:
return [v for v in self.violations() if v.is_impact_at_least(impact)]
def assert_no_violations_with_impact_of_at_least(
self, impact: Impact, excludes: Optional[List[str]] = None
) -> None:
excludes = excludes or []
violations = self.violations_with_impact_of_at_least(impact)
filtered_violations = [v for v in violations if v.id not in excludes]
if filtered_violations:
raise AssertionError(filtered_violations[0].METHOD_NAME)
class NullAxeResults(AxeResults):
"""All assertions just pass because we're skipping Axe evaluation."""
def assert_passes(self, id: str) -> None:
pass
def assert_does_not_violate(self, id: str) -> None:
pass
def violations(self) -> List[Violation]:
return []
# these next two could be refactored into a mixin...
def violations_with_impact_of_at_least(self, impact: Impact) -> List[Violation]:
return []
def assert_no_violations_with_impact_of_at_least(
self, impact: Impact, excludes: Optional[List[str]] = None
) -> None:
pass
def assert_baseline_accessible(axe_results: AxeResults) -> None:
for passing_id in BASELINE_AXE_PASSING_IDS:
axe_results.assert_passes(passing_id)
for violation in axe_results.violations_with_impact_of_at_least(BASELINE_VIOLATION_FILTER):
violation_id = violation.id
if violation_id not in KNOWN_VIOLATIONS:
raise AssertionError(violation.METHOD_NAME)
def _check_list_for_id(result_list: List[Dict[str, Any]], id) -> Optional[Dict[str, Any]]:
for result in result_list:
if result.get("id") == id:
return result
return None
| null |
2,080 |
# Copyright 2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nnabla as nn
import nnabla.functions as F
import nnabla.initializer as I
import numpy as np
from .ops import upsample_conv_2d, downsample_2d, weight_init_fn
import subprocess as sp
def mapping_network(z, outmaps=512, num_layers=8, net_scope='G_mapping/Dense'):
lrmul = 0.01
runtime_coef = 0.00044194172
out = z
for i in range(num_layers):
with nn.parameter_scope(f'{net_scope}{i}'):
W, bias = weight_init_fn(
shape=(out.shape[1], outmaps), lrmul=lrmul)
out = F.affine(out, W*runtime_coef, bias*lrmul)
out = F.mul_scalar(F.leaky_relu(
out, alpha=0.2, inplace=False), np.sqrt(2), inplace=False)
return out
def styled_conv_block(conv_input, w, noise=None, res=4, inmaps=512, outmaps=512, kernel_size=3,
pad_size=1, demodulate=True, namescope="Conv", up=False, act=F.leaky_relu):
"""
Conv block with skip connection for Generator
"""
batch_size = conv_input.shape[0]
with nn.parameter_scope(f'G_synthesis/{res}x{res}/{namescope}'):
W, bias = weight_init_fn(shape=(w.shape[1], inmaps))
runtime_coef = (1. / np.sqrt(512)).astype(np.float32)
style = F.affine(w, W*runtime_coef, bias) + 1.0
runtime_coef_for_conv = (
1/np.sqrt(np.prod([inmaps, kernel_size, kernel_size]))).astype(np.float32)
if up:
init_function = weight_init_fn(
shape=(inmaps, outmaps, kernel_size, kernel_size), return_init=True)
conv_weight = nn.parameter.get_parameter_or_create(name=f'G_synthesis/{res}x{res}/{namescope}/conv/W',
shape=(inmaps, outmaps, kernel_size, kernel_size), initializer=init_function)
else:
init_function = weight_init_fn(
shape=(outmaps, inmaps, kernel_size, kernel_size), return_init=True)
conv_weight = nn.parameter.get_parameter_or_create(name=f'G_synthesis/{res}x{res}/{namescope}/conv/W',
shape=(outmaps, inmaps, kernel_size, kernel_size), initializer=init_function)
conv_weight = F.mul_scalar(conv_weight, runtime_coef_for_conv)
if up:
scale = F.reshape(
style, (style.shape[0], style.shape[1], 1, 1, 1), inplace=False)
else:
scale = F.reshape(
style, (style.shape[0], 1, style.shape[1], 1, 1), inplace=False)
mod_w = F.mul2(F.reshape(conv_weight, (1,) +
conv_weight.shape, inplace=False), scale)
if demodulate:
if up:
denom_w = F.pow_scalar(F.sum(F.pow_scalar(mod_w, 2.), axis=[
1, 3, 4], keepdims=True) + 1e-8, 0.5)
else:
denom_w = F.pow_scalar(F.sum(F.pow_scalar(mod_w, 2.), axis=[
2, 3, 4], keepdims=True) + 1e-8, 0.5)
demod_w = F.div2(mod_w, denom_w)
else:
demod_w = mod_w
conv_input = F.reshape(
conv_input, (1, -1, conv_input.shape[2], conv_input.shape[3]), inplace=False)
demod_w = F.reshape(
demod_w, (-1, demod_w.shape[2], demod_w.shape[3], demod_w.shape[4]), inplace=False)
if up:
k = [1, 3, 3, 1]
conv_out = upsample_conv_2d(
conv_input, demod_w, k, factor=2, gain=1, group=batch_size)
else:
conv_out = F.convolution(conv_input, demod_w, pad=(
pad_size, pad_size), group=batch_size)
conv_out = F.reshape(
conv_out, (batch_size, -1, conv_out.shape[2], conv_out.shape[3]), inplace=False)
if noise is not None:
noise_coeff = nn.parameter.get_parameter_or_create(
name=f'G_synthesis/{res}x{res}/{namescope}/noise_strength', shape=())
conv_out = F.add2(conv_out, noise*F.reshape(noise_coeff, (1, 1, 1, 1)))
else:
conv_out = conv_out
bias = nn.parameter.get_parameter_or_create(name=f'G_synthesis/{res}x{res}/{namescope}/conv/b', shape=(
outmaps,), initializer=np.random.randn(outmaps,).astype(np.float32))
conv_out = F.add2(conv_out, F.reshape(
bias, (1, outmaps, 1, 1), inplace=False))
if act == F.leaky_relu:
conv_out = F.mul_scalar(F.leaky_relu(
conv_out, alpha=0.2, inplace=False), np.sqrt(2), inplace=False)
else:
conv_out = act(conv_out)
return conv_out
def conv_layer(conv_input, inmaps, outmaps, kernel_size, downsample=False,
bias=True, act=F.leaky_relu, name_scope='Conv'):
"""
Conv layer for the residual block of the discriminator
"""
if downsample:
k = [1, 3, 3, 1]
out = downsample_2d(conv_input, k, factor=2,
gain=1, kernel_size=kernel_size)
stride = 2
pad = 0
else:
stride = 1
pad = kernel_size//2
out = conv_input
init_function = weight_init_fn(
shape=(outmaps, inmaps, kernel_size, kernel_size), return_init=True)
scale = 1/np.sqrt(inmaps*kernel_size**2)
conv_weight = nn.parameter.get_parameter_or_create(name=f'{name_scope}/W', initializer=init_function,
shape=(outmaps, inmaps, kernel_size, kernel_size))
if bias:
conv_bias = nn.parameter.get_parameter_or_create(
name=f'{name_scope}/b', shape=(outmaps,))
else:
conv_bias = None
out = F.convolution(out, conv_weight*scale, bias=conv_bias,
stride=(stride, stride), pad=(pad, pad))
if act == F.leaky_relu:
out = F.mul_scalar(F.leaky_relu(
out, alpha=0.2, inplace=False), np.sqrt(2), inplace=False)
else:
out = act(out)
return out
def METHOD_NAME(res_input, res, inmaps, outmaps, block_scope='res_block'):
"""
Residual block for Discriminator
"""
name_scope = f'Discriminator/{block_scope}_{res}x{res}'
out = conv_layer(res_input, inmaps, inmaps, kernel_size=3,
name_scope=f'{name_scope}/Conv1')
out = conv_layer(out, inmaps, outmaps, kernel_size=3,
downsample=True, name_scope=f'{name_scope}/Conv2')
skip = conv_layer(res_input, inmaps, outmaps, kernel_size=1, downsample=True,
bias=False, act=F.identity, name_scope=f'{name_scope}/ConvSkip')
out = F.mul_scalar(F.add2(out, skip), 1 /
np.sqrt(2).astype(np.float32), inplace=False)
return out
| null |
2,081 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class CreateCustomRoutingEndpointGroupsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'CreateCustomRoutingEndpointGroups','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def METHOD_NAME(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_EndpointGroupConfigurationss(self): # RepeatList
return self.get_query_params().get('EndpointGroupConfigurations')
def set_EndpointGroupConfigurationss(self, EndpointGroupConfigurations): # RepeatList
for depth1 in range(len(EndpointGroupConfigurations)):
if EndpointGroupConfigurations[depth1].get('EndpointGroupRegion') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointGroupRegion', EndpointGroupConfigurations[depth1].get('EndpointGroupRegion'))
if EndpointGroupConfigurations[depth1].get('Name') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.Name', EndpointGroupConfigurations[depth1].get('Name'))
if EndpointGroupConfigurations[depth1].get('Description') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.Description', EndpointGroupConfigurations[depth1].get('Description'))
if EndpointGroupConfigurations[depth1].get('DestinationConfigurations') is not None:
for depth2 in range(len(EndpointGroupConfigurations[depth1].get('DestinationConfigurations'))):
if EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('Protocols') is not None:
for depth3 in range(len(EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('Protocols'))):
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.DestinationConfigurations.' + str(depth2 + 1) + '.Protocols.' + str(depth3 + 1), EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('Protocols')[depth3])
if EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('FromPort') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.DestinationConfigurations.' + str(depth2 + 1) + '.FromPort', EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('FromPort'))
if EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('ToPort') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.DestinationConfigurations.' + str(depth2 + 1) + '.ToPort', EndpointGroupConfigurations[depth1].get('DestinationConfigurations')[depth2].get('ToPort'))
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations') is not None:
for depth2 in range(len(EndpointGroupConfigurations[depth1].get('EndpointConfigurations'))):
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('Type') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointConfigurations.' + str(depth2 + 1) + '.Type', EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('Type'))
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('Endpoint') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointConfigurations.' + str(depth2 + 1) + '.Endpoint', EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('Endpoint'))
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('TrafficToEndpointPolicy') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointConfigurations.' + str(depth2 + 1) + '.TrafficToEndpointPolicy', EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('TrafficToEndpointPolicy'))
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations') is not None:
for depth3 in range(len(EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations'))):
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('Address') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointConfigurations.' + str(depth2 + 1) + '.PolicyConfigurations.' + str(depth3 + 1) + '.Address', EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('Address'))
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('PortRanges') is not None:
for depth4 in range(len(EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('PortRanges'))):
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('PortRanges')[depth4].get('FromPort') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointConfigurations.' + str(depth2 + 1) + '.PolicyConfigurations.' + str(depth3 + 1) + '.PortRanges.' + str(depth4 + 1) + '.FromPort', EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('PortRanges')[depth4].get('FromPort'))
if EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('PortRanges')[depth4].get('ToPort') is not None:
self.add_query_param('EndpointGroupConfigurations.' + str(depth1 + 1) + '.EndpointConfigurations.' + str(depth2 + 1) + '.PolicyConfigurations.' + str(depth3 + 1) + '.PortRanges.' + str(depth4 + 1) + '.ToPort', EndpointGroupConfigurations[depth1].get('EndpointConfigurations')[depth2].get('PolicyConfigurations')[depth3].get('PortRanges')[depth4].get('ToPort'))
def get_ListenerId(self): # String
return self.get_query_params().get('ListenerId')
def set_ListenerId(self, ListenerId): # String
self.add_query_param('ListenerId', ListenerId)
def get_AcceleratorId(self): # String
return self.get_query_params().get('AcceleratorId')
def set_AcceleratorId(self, AcceleratorId): # String
self.add_query_param('AcceleratorId', AcceleratorId)
| null |
2,082 |
import json
from contextlib import closing
from typing import Callable, Tuple
from ...utils.cli import EOF, ClientRunnerFunc
from ..conftest import User
def test_add_user(run: ClientRunnerFunc, users: Tuple[User, ...]):
"""
Testcase for user addition.
"""
print("[ Add user ]")
# Add users
for i, user in enumerate(users):
add_arguments = [
"--output=json",
"admin",
"user",
"add",
"-u",
user.username,
"-n",
user.full_name,
"-r",
user.role,
"-s",
user.status,
"default",
user.email,
user.password,
]
if user.need_password_change:
add_arguments.append("--need-password-change")
with closing(run(add_arguments)) as p:
p.expect(EOF)
response = json.loads(p.before.decode())
assert response.get("ok") is True, f"Account creation failed: Account#{i+1}"
# Check if user is added
with closing(run(["--output=json", "admin", "user", "list"])) as p:
p.expect(EOF)
decoded = p.before.decode()
loaded = json.loads(decoded)
user_list = loaded.get("items")
assert isinstance(user_list, list), "Expected user list"
added_users = tuple(get_user_from_list(user_list, user.username) for user in users)
for i, (added_user, user) in enumerate(zip(added_users, users)):
assert bool(added_user), f"Added account doesn't exist: Account#{i+1}"
assert added_user.get("email") == user.email, f"E-mail mismatch: Account#{i+1}"
assert added_user.get("full_name") == user.full_name, f"Full name mismatch: Account#{i+1}"
assert added_user.get("status") == user.status, f"User status mismatch: Account#{i+1}"
assert added_user.get("role") == user.role, f"Role mismatch: Account#{i+1}"
assert (
added_user.get("need_password_change") is user.need_password_change
), f"Password change status mismatch: Account#{i+1}"
def METHOD_NAME(
run: ClientRunnerFunc,
users: Tuple[User, ...],
gen_username: Callable[[], str],
gen_fullname: Callable[[], str],
):
"""
Run this testcase after test_add_user.
Testcase for user update.
TODO: User update with roles is not fully covered yet.
"""
print("[ Update user ]")
# updated_users = [user.copy() for user in users]
updated_users = (
User(
username=gen_username(),
full_name=gen_fullname(),
email=user.email,
password=user.password,
role=["user", "admin", "monitor"][i % 3],
status=["inactive", "active", "active"][i % 3],
domain_name="default",
need_password_change=[False, True, False][i % 3],
)
for i, user in enumerate(users)
)
# Update user
for updated_user, user in zip(updated_users, users):
update_arguments = [
"--output=json",
"admin",
"user",
"update",
"-u",
updated_user.username,
"-n",
updated_user.full_name,
"-s",
updated_user.status,
"-r",
updated_user.role,
"-d",
updated_user.domain_name,
user.email,
]
if updated_user.need_password_change:
update_arguments.append("--need-password-change")
with closing(run(update_arguments)) as p:
p.expect(EOF)
# Check if user is updated correctly
with closing(run(["--output=json", "admin", "user", "list"])) as p:
p.expect(EOF)
after_update_decoded = p.before.decode()
after_update_loaded = json.loads(after_update_decoded)
updated_user_list = after_update_loaded.get("items")
assert isinstance(updated_user_list, list), "Expected user list"
for i, updated_user in enumerate(updated_users):
user_dict: dict = get_user_from_list(updated_user_list, updated_user.username)
assert bool(user_dict), f"Account not found - Account#{i+1}"
assert (
user_dict.get("full_name") == updated_user.full_name
), f"Full name mismatch: Account#{i+1}"
assert (
user_dict.get("status") == updated_user.status
), f"User status mismatch: Account#{i+1}"
assert user_dict.get("role") == updated_user.role, f"Role mismatch: Account#{i+1}"
assert (
user_dict.get("need_password_change") is updated_user.need_password_change
), f"Password change status mismatch: Account#{i+1}"
assert (
user_dict.get("domain_name") == updated_user.domain_name
), f"Domain mismatch: Account#{i+1}"
def test_delete_user(run: ClientRunnerFunc, users: Tuple[User, ...]):
"""
!!Run this testcase after running test_add_user
Testcase for user deletion.
"""
print("[ Delete user ]")
for i, fake_user in enumerate(users):
with closing(run(["--output=json", "admin", "user", "purge", fake_user.email])) as p:
p.sendline("y")
p.expect(EOF)
before = p.before.decode()
response = json.loads(before[before.index("{") :])
assert response.get("ok") is True, f"Account deletion failed: Account#{i+1}"
def test_list_user(run: ClientRunnerFunc):
"""
Testcase for user listing.
"""
with closing(run(["--output=json", "admin", "user", "list"])) as p:
p.expect(EOF)
decoded = p.before.decode()
loaded = json.loads(decoded)
user_list = loaded.get("items")
assert isinstance(user_list, list)
def get_user_from_list(users: list, username: str) -> dict:
for user in users:
if user.get("username") == username:
return user
return {}
| null |
2,083 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class SendFileRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'SendFile','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def METHOD_NAME(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_Timeout(self): # Long
return self.get_query_params().get('Timeout')
def set_Timeout(self, Timeout): # Long
self.add_query_param('Timeout', Timeout)
def get_Content(self): # String
return self.get_query_params().get('Content')
def set_Content(self, Content): # String
self.add_query_param('Content', Content)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_FileOwner(self): # String
return self.get_query_params().get('FileOwner')
def set_FileOwner(self, FileOwner): # String
self.add_query_param('FileOwner', FileOwner)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_Overwrite(self): # Boolean
return self.get_query_params().get('Overwrite')
def set_Overwrite(self, Overwrite): # Boolean
self.add_query_param('Overwrite', Overwrite)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_FileMode(self): # String
return self.get_query_params().get('FileMode')
def set_FileMode(self, FileMode): # String
self.add_query_param('FileMode', FileMode)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_ContentType(self): # String
return self.get_query_params().get('ContentType')
def set_ContentType(self, ContentType): # String
self.add_query_param('ContentType', ContentType)
def get_InstanceIds(self): # RepeatList
return self.get_query_params().get('InstanceId')
def set_InstanceIds(self, InstanceId): # RepeatList
for depth1 in range(len(InstanceId)):
self.add_query_param('InstanceId.' + str(depth1 + 1), InstanceId[depth1])
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_FileGroup(self): # String
return self.get_query_params().get('FileGroup')
def set_FileGroup(self, FileGroup): # String
self.add_query_param('FileGroup', FileGroup)
def get_TargetDir(self): # String
return self.get_query_params().get('TargetDir')
def set_TargetDir(self, TargetDir): # String
self.add_query_param('TargetDir', TargetDir)
| null |
2,084 |
# Copyright 2019 Camptocamp (http://www.camptocamp.com).
# @author Simone Orsi <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
# pylint: disable=method-required-super, consider-merging-classes-inherited
from odoo import _
from odoo.addons.component.core import AbstractComponent
# TODO: refactor out to a component event the handling of notifications
class PartnerServiceMixin(AbstractComponent):
_name = "shopinvader.partner.service.mixin"
@property
def access_info(self):
with self.shopinvader_backend.work_on(
"res.partner",
partner=self.partner,
partner_user=self.partner_user,
invader_partner=self.invader_partner,
invader_partner_user=self.invader_partner_user,
service_work=self.work,
) as work:
return work.component(usage="access.info")
def _post_create(self, partner):
self._notify_partner(partner, "create")
self._notify_salesman(partner, "create")
def _post_update(self, partner):
self._notify_partner(partner, "update")
self._notify_salesman(partner, "update")
def _notify_salesman(self, partner, mode):
needed = False
backend_policy = self.shopinvader_backend["salesman_notify_" + mode]
needed = self._notify_salesman_needed(backend_policy, partner, mode)
if needed:
self.env["mail.activity"].sudo().create(
self._notify_salesman_values(partner, mode)
)
def _notify_salesman_values(self, partner, mode):
# TODO: mode is not translated
msg = _("{addr_type} {mode} '{name}' needs review").format(
addr_type=partner.addr_type_display(), name=partner.name, mode=mode
)
return {
"res_model_id": self.env.ref("base.model_res_partner").id,
"res_id": self._notify_salesman_recipient(partner, mode).id,
"user_id": self._get_salesman(partner).id,
"activity_type_id": self.env.ref(
"shopinvader.mail_activity_review_customer"
).id,
"summary": msg,
}
def _get_salesman(self, partner):
"""Retrieve salesman for the partner up to its hierarchy."""
user = partner.user_id
while not user and partner.parent_id:
partner = partner.parent_id
user = partner.user_id
return user or self.env.user
def _notify_partner(self, partner, mode):
notif_type = self._notify_partner_type(partner, mode)
recipient = self.METHOD_NAME(partner, mode)
if notif_type and recipient:
self.shopinvader_backend._send_notification(notif_type, recipient)
# HACK: these methods were supposed to be overriden in specific services
# BUT the `customer` service has no `update` endpoint,
# it relies on `addresses` endpoint for updates, hence
# we are forced to discriminate on address type all in the same place.
def METHOD_NAME(self, partner, mode):
handler = getattr(
self, "_notify_partner_recipient_" + partner.address_type, None
)
if handler:
return handler(partner, mode)
return partner
def _notify_partner_recipient_address(self, partner, mode):
# notify on the owner of the address
# Safe default to given partner in case we are updating the profile
# which is done w/ the addresses endpoint anyway.
return partner.parent_id if partner.parent_id else partner
def _notify_partner_type(self, partner, mode):
handler = getattr(self, "_notify_partner_type_" + partner.address_type, None)
if handler:
return handler(partner, mode)
return partner
def _notify_partner_type_profile(self, partner, mode):
notif = None
if mode == "create":
notif = "new_customer_welcome"
elif mode == "update":
notif = "customer_updated"
return notif
def _notify_partner_type_address(self, partner, mode):
notif = None
if mode == "create":
notif = "address_created"
elif mode == "update":
notif = "address_updated"
return notif
def _notify_salesman_recipient(self, partner, mode):
handler = getattr(
self, "_notify_salesman_recipient_" + partner.address_type, None
)
if handler:
return handler(partner, mode)
return partner
def _notify_salesman_recipient_address(self, partner, mode):
# notify on the owner of the address
# Safe default to given partner in case we are updating the profile
# which is done w/ the addresses endpoint anyway.
return partner.parent_id if partner.parent_id else partner
def _notify_salesman_needed(self, backend_policy, partner, mode):
handler = getattr(self, "_notify_salesman_needed_" + partner.address_type, None)
if handler:
return handler(backend_policy, partner, mode)
return partner
def _notify_salesman_needed_address(self, backend_policy, partner, mode):
return backend_policy in ("all", "address")
def _notify_salesman_needed_profile(self, backend_policy, partner, mode):
if backend_policy in ("all", "company_and_user"):
return True
elif backend_policy == "company" and partner.is_company:
return True
elif backend_policy == "user" and not partner.is_company:
return True
return False
| null |
2,085 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkarms.endpoint import endpoint_data
class CreateOrUpdateAlertRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ARMS', '2019-08-08', 'CreateOrUpdateAlertRule','arms')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AlertGroup(self): # Long
return self.get_body_params().get('AlertGroup')
def set_AlertGroup(self, AlertGroup): # Long
self.add_body_params('AlertGroup', AlertGroup)
def get_AlertName(self): # String
return self.get_body_params().get('AlertName')
def set_AlertName(self, AlertName): # String
self.add_body_params('AlertName', AlertName)
def get_AlertStatus(self): # String
return self.get_body_params().get('AlertStatus')
def set_AlertStatus(self, AlertStatus): # String
self.add_body_params('AlertStatus', AlertStatus)
def get_Annotations(self): # String
return self.get_body_params().get('Annotations')
def set_Annotations(self, Annotations): # String
self.add_body_params('Annotations', Annotations)
def get_Duration(self): # Long
return self.get_body_params().get('Duration')
def set_Duration(self, Duration): # Long
self.add_body_params('Duration', Duration)
def METHOD_NAME(self): # String
return self.get_body_params().get('MetricsKey')
def set_MetricsKey(self, MetricsKey): # String
self.add_body_params('MetricsKey', MetricsKey)
def get_AlertRuleContent(self): # String
return self.get_body_params().get('AlertRuleContent')
def set_AlertRuleContent(self, AlertRuleContent): # String
self.add_body_params('AlertRuleContent', AlertRuleContent)
def get_PromQL(self): # String
return self.get_body_params().get('PromQL')
def set_PromQL(self, PromQL): # String
self.add_body_params('PromQL', PromQL)
def get_Level(self): # String
return self.get_body_params().get('Level')
def set_Level(self, Level): # String
self.add_body_params('Level', Level)
def get_AutoAddNewApplication(self): # Boolean
return self.get_body_params().get('AutoAddNewApplication')
def set_AutoAddNewApplication(self, AutoAddNewApplication): # Boolean
self.add_body_params('AutoAddNewApplication', AutoAddNewApplication)
def get_Filters(self): # String
return self.get_body_params().get('Filters')
def set_Filters(self, Filters): # String
self.add_body_params('Filters', Filters)
def get_ClusterId(self): # String
return self.get_body_params().get('ClusterId')
def set_ClusterId(self, ClusterId): # String
self.add_body_params('ClusterId', ClusterId)
def get_Message(self): # String
return self.get_body_params().get('Message')
def set_Message(self, Message): # String
self.add_body_params('Message', Message)
def get_NotifyStrategy(self): # String
return self.get_body_params().get('NotifyStrategy')
def set_NotifyStrategy(self, NotifyStrategy): # String
self.add_body_params('NotifyStrategy', NotifyStrategy)
def get_Labels(self): # String
return self.get_body_params().get('Labels')
def set_Labels(self, Labels): # String
self.add_body_params('Labels', Labels)
def get_Tagss(self): # RepeatList
return self.get_body_params().get('Tags')
def set_Tagss(self, Tags): # RepeatList
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_body_params('Tags.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_body_params('Tags.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def get_AlertType(self): # String
return self.get_body_params().get('AlertType')
def set_AlertType(self, AlertType): # String
self.add_body_params('AlertType', AlertType)
def get_AlertCheckType(self): # String
return self.get_body_params().get('AlertCheckType')
def set_AlertCheckType(self, AlertCheckType): # String
self.add_body_params('AlertCheckType', AlertCheckType)
def get_MetricsType(self): # String
return self.get_body_params().get('MetricsType')
def set_MetricsType(self, MetricsType): # String
self.add_body_params('MetricsType', MetricsType)
def get_AlertId(self): # Long
return self.get_body_params().get('AlertId')
def set_AlertId(self, AlertId): # Long
self.add_body_params('AlertId', AlertId)
def get_Pids(self): # String
return self.get_body_params().get('Pids')
def set_Pids(self, Pids): # String
self.add_body_params('Pids', Pids)
| null |
2,086 |
# OPENCORE - ADD
from functools import wraps
from shared.database.user import User
from shared.database.project import Project
from shared.database.auth.api import Auth_api
from shared.database.task.job.job import Job
from shared.database.task.job.user_to_job import User_To_Job
from werkzeug.exceptions import Forbidden, Unauthorized
from shared.helpers.permissions import getUserID
from shared.helpers.permissions import LoggedIn
from shared.helpers.permissions import defaultRedirect
from shared.helpers import sessionMaker
from flask import request
from shared.permissions.project_permissions import Project_permissions
from shared.permissions.api_permissions import API_Permissions
from shared.permissions.user_permissions import User_Permissions
class Job_permissions():
@staticmethod
def by_job_id(project_role_list = ["Editor", "admin"],
apis_project_list = [],
apis_user_list = [],
mode = "builder"):
"""
mode == "builder":
project_role_list required
"""
if not isinstance(project_role_list, list): project_role_list = [project_role_list]
def wrapper(func):
@wraps(func)
def inner(*args, **kwds):
job_id = kwds.get('job_id', None)
task_template_id = kwds.get('task_template_id', None)
if (job_id is None or job_id == "null" or job_id == "undefined") and task_template_id is None:
raise Forbidden("job_id is invalid")
if task_template_id is not None:
job_id = task_template_id
with sessionMaker.session_scope() as session:
# Permissions cascading from project
project_string_id = get_project_string_from_job_id(session, job_id)
# API
if request.authorization is not None:
result = API_Permissions.by_project(session = session,
project_string_id = project_string_id,
Roles = project_role_list)
if result is True:
return func(*args, **kwds)
else:
raise Forbidden("API access invalid")
# TODO do we need to validate user has applicable mode?
# ie they pass mode builder but are trainer?
# Basics should fail on project level check anyway here...
# User
# TODO move login stuff into the general User_Permissions
if LoggedIn() != True:
raise Unauthorized("Login again.")
user = session.query(User).filter(User.id == getUserID(session = session)).first()
if user is None:
raise Unauthorized("Login again.")
# Want to use the builder API permissions instead of
# flags since a user may be testing this as a builder
# TODO deprecate 'mode' flag or have it as something else
# like "builder_only" or something
# One downside of doing it this way is it means
# that we need to be careful with
# project_role_list list...
if user.api_enabled_builder is True:
result = Project_permissions.check_permissions(
session = session,
project_string_id = project_string_id,
Roles = project_role_list,
apis_project_list = apis_project_list,
apis_user_list = apis_user_list)
if result is True:
return func(*args, **kwds)
else:
raise Forbidden("Project access invalid")
if user.api_enabled_trainer is True:
# TODO refactor into function
# TODO handle "info" case of a trainer not yet
# on a job seeing basic stuff on active jobs...
# We allow trainers to see
# Basic info before they apply
# as long as job is active...
# if job.status != "active":
# raise Forbidden("No access.")
User_Permissions.general(user = user,
apis_user_list = apis_user_list)
user_to_job = User_To_Job.get_single_by_ids(session = session,
user_id = user.id,
job_id = job_id)
# TODO other status checking on this...
if user_to_job is None:
raise Forbidden("No access to this job. Please apply first.")
# Success case for trainer
return func(*args, **kwds)
raise Forbidden("No access.")
return inner
return wrapper
@staticmethod
def check_job_after_project_already_valid(
job,
project):
if job is None:
raise Forbidden("No job")
if job.project_id != project.id:
raise Forbidden("Permission")
def get_project_string_from_job_id(session,
job_id):
job = Job.get_by_id(session, job_id)
if job is None:
raise Forbidden
if job.project is None:
raise Forbidden
project_string_id = job.project.project_string_id
return project_string_id
def METHOD_NAME():
pass
# TODO move if statement stuff to here
def check_roles(Roles, Permissions):
for role in Roles:
if role in Permissions:
return True
| null |
2,087 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License
#
"""Entity implementing the glue between the policy engine and the rest of the system."""
import traceback
from typing import TYPE_CHECKING
from .policy_local import PolicyLocal
from ..dispatch import LogAdapter, LOG_INFO, LOG_DEBUG, LOG_ERROR, LOG_WARNING
if TYPE_CHECKING:
from ..management.agent import Agent
class PolicyManager:
"""
"""
def __init__(self, agent: 'Agent') -> None:
"""
"""
self._agent = agent
self._policy_local = PolicyLocal(self)
self.log_adapter = LogAdapter("POLICY")
self._use_hostname_patterns = False
def log(self, level, text):
info = traceback.extract_stack(limit=2)[0] # Caller frame info
self.log_adapter.log(level, text, info[0], info[1])
def METHOD_NAME(self, level: int, text: str) -> None:
info = traceback.extract_stack(limit=3)[0] # Caller's caller frame info
self.log_adapter.log(level, text, info[0], info[1])
def log_debug(self, text):
self.METHOD_NAME(LOG_DEBUG, text)
def log_info(self, text: str) -> None:
self.METHOD_NAME(LOG_INFO, text)
def log_error(self, text):
self.METHOD_NAME(LOG_ERROR, text)
def log_warning(self, text):
self.METHOD_NAME(LOG_WARNING, text)
def get_agent(self):
return self._agent
def get_use_hostname_patterns(self):
return self._use_hostname_patterns
def set_use_hostname_patterns(self, v: bool) -> None:
self._use_hostname_patterns = v
self._policy_local.use_hostname_patterns = v
#
# Management interface to create a ruleset
#
def create_ruleset(self, attributes):
"""
Create named policy ruleset
@param[in] attributes: from config
"""
self._policy_local.create_ruleset(attributes)
#
# Management interface to delete a ruleset
#
def delete_ruleset(self, id):
"""
Delete named policy ruleset
@param[in] id: ruleset name
"""
self._policy_local.policy_delete(id)
#
# Management interface to update a ruleset
#
def update_ruleset(self, attributes):
"""
Update named policy ruleset
@param[in] id: ruleset name
"""
self._policy_local.create_ruleset(attributes)
#
# Management interface to set the default vhost
#
def set_default_vhost(self, name: str) -> None:
"""
Set default application
@param name:
@return:
"""
self._policy_local.set_default_vhost(name)
#
# Runtime query interface
#
def lookup_vhost_alias(self, vhost_in):
"""
Resolve given vhost name to vhost settings name.
If the incoming name is a vhost hostname then return the same name.
If the incoming name is a vhost alias hostname then return the containing vhost name.
If a default vhost is defined then return its name.
:param vhost_in: vhost name to test
:return: name of policy settings vhost to be applied. Or blank if not defined.
"""
return self._policy_local.lookup_vhost_alias(vhost_in)
def lookup_user(self, user, rhost, vhost, conn_name, conn_id):
"""
Lookup function called from C.
Determine if a user on host accessing app through AMQP Open is allowed
according to the policy access rules.
If allowed then return the policy settings name
@param[in] user connection authId
@param[in] rhost connection remote host numeric IP address as string
@param[in] vhost application user is accessing
@param[in] conn_name connection name for accounting purposes
@param[in] conn_id internal connection id
@return settings user-group name if allowed; "" if not allowed
"""
return self._policy_local.lookup_user(user, rhost, vhost, conn_name, conn_id)
def lookup_settings(self, vhost, name, upolicy):
"""
Given a settings name, return the aggregated policy blob.
@param[in] vhost: vhost user is accessing
@param[in] name: user group name
@param[out] upolicy: map that receives the settings
@return settings were retrieved or not
"""
return self._policy_local.lookup_settings(vhost, name, upolicy)
def close_connection(self, conn_id):
"""
The connection identifed is closing. Remove it from the connection
accounting tables.
@param facts:
@return: none
"""
self._policy_local.close_connection(conn_id)
def set_max_message_size(self, size: int) -> None:
"""
Policy has set global maxMessageSize.
:param size:
:return: none
"""
self._policy_local.set_max_message_size(size)
#
#
#
def policy_lookup_vhost_alias(mgr, vhost):
"""
Look up a vhost in the policy database
Called by C code
@param mgr: policy_manager
@param vhost: Incoming vhost from an AMQP Open
@return: name of policy settings vhost to be applied or blank if lookup failed.
"""
return mgr.lookup_vhost_alias(vhost)
#
#
#
def policy_lookup_user(mgr, user, rhost, vhost, conn_name, conn_id):
"""
Look up a user in the policy database
Called by C code
@param mgr:
@param user:
@param rhost:
@param vhost:
@param conn_name:
@return:
"""
return mgr.lookup_user(user, rhost, vhost, conn_name, conn_id)
#
#
#
def policy_close_connection(mgr, conn_id):
"""
Close the connection.
Called by C code
@param mgr:
@param conn_id:
@return:
"""
mgr.close_connection(conn_id)
#
#
#
def policy_lookup_settings(mgr, vhost, name, upolicy):
"""
Return settings for <vhost, usergroup> in upolicy map
@param mgr:
@param vhost:
@param name:
@param upolicy:
@return:
"""
return mgr.lookup_settings(vhost, name, upolicy)
| null |
2,088 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkr_kvstore.endpoint import endpoint_data
class DescribeInstancesOverviewRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'R-kvstore', '2015-01-01', 'DescribeInstancesOverview','redisa')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_SearchKey(self): # String
return self.get_query_params().get('SearchKey')
def set_SearchKey(self, SearchKey): # String
self.add_query_param('SearchKey', SearchKey)
def get_NetworkType(self): # String
return self.get_query_params().get('NetworkType')
def set_NetworkType(self, NetworkType): # String
self.add_query_param('NetworkType', NetworkType)
def get_EngineVersion(self): # String
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self, EngineVersion): # String
self.add_query_param('EngineVersion', EngineVersion)
def get_InstanceClass(self): # String
return self.get_query_params().get('InstanceClass')
def set_InstanceClass(self, InstanceClass): # String
self.add_query_param('InstanceClass', InstanceClass)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_InstanceType(self): # String
return self.get_query_params().get('InstanceType')
def set_InstanceType(self, InstanceType): # String
self.add_query_param('InstanceType', InstanceType)
def get_EditionType(self): # String
return self.get_query_params().get('EditionType')
def set_EditionType(self, EditionType): # String
self.add_query_param('EditionType', EditionType)
def get_InstanceStatus(self): # String
return self.get_query_params().get('InstanceStatus')
def set_InstanceStatus(self, InstanceStatus): # String
self.add_query_param('InstanceStatus', InstanceStatus)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_PrivateIp(self): # String
return self.get_query_params().get('PrivateIp')
def set_PrivateIp(self, PrivateIp): # String
self.add_query_param('PrivateIp', PrivateIp)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_VSwitchId(self): # String
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self, VSwitchId): # String
self.add_query_param('VSwitchId', VSwitchId)
def get_InstanceIds(self): # String
return self.get_query_params().get('InstanceIds')
def set_InstanceIds(self, InstanceIds): # String
self.add_query_param('InstanceIds', InstanceIds)
def get_ArchitectureType(self): # String
return self.get_query_params().get('ArchitectureType')
def set_ArchitectureType(self, ArchitectureType): # String
self.add_query_param('ArchitectureType', ArchitectureType)
def get_VpcId(self): # String
return self.get_query_params().get('VpcId')
def set_VpcId(self, VpcId): # String
self.add_query_param('VpcId', VpcId)
def get_ZoneId(self): # String
return self.get_query_params().get('ZoneId')
def set_ZoneId(self, ZoneId): # String
self.add_query_param('ZoneId', ZoneId)
def METHOD_NAME(self): # String
return self.get_query_params().get('ChargeType')
def set_ChargeType(self, ChargeType): # String
self.add_query_param('ChargeType', ChargeType)
| null |
2,089 |
#!/usr/bin/env python
"""
Create the imppy.sh script using the passed parameters.
"""
import tools
from optparse import OptionParser
import os.path
import os
import platform
import stat
def uniq(ls):
"""Return the list with duplicate items removed"""
seen = set()
for obj in ls:
if obj not in seen:
seen.add(obj)
yield obj
class FileGenerator(object):
body = ["@LDPATH@", "", "@PYTHONPATH@", "",
"# Where to find data for the various modules",
"@IMP_DATA@", "",
"# Extra places to look for imp modules",
"@IMP_EXAMPLE_DATA@", "",
"# Location of binaries (for wine builds, which don't get PATH)",
"@IMP_BIN_DIR@", "",
"@PATH@", "", "@PRECOMMAND@", "", "@TMPDIR@"]
def __init__(self, options):
self.options = options
def native_paths(self, paths, also_with_suffix=False):
"""Convert cmake-provided paths into native paths"""
ret = [tools.from_cmake_path(x) for x in paths]
if self.options.suffix and also_with_suffix:
ret += [os.path.join(tools.from_cmake_path(x),
self.options.suffix) for x in paths]
return ret
def METHOD_NAME(self, reldir):
"""Get an absolute path to a binary directory"""
if self.options.suffix:
reldir = os.path.join(reldir, self.options.suffix)
return os.path.abspath(reldir)
def get_path(self):
modbin = [os.path.abspath(x) for x in tools.get_glob(["module_bin/*"])]
if self.options.suffix:
modbin += [os.path.join(x, self.options.suffix) for x in modbin]
return (modbin + [self.METHOD_NAME("bin")]
+ self.native_paths(self.options.path, True))
def write_file(self):
pypathsep = ";" if self.options.python_pathsep == 'w32' else os.pathsep
outfile = self.options.output
pythonpath = self.native_paths(self.options.python_path, True)
ldpath = self.native_paths(self.options.ld_path)
precommand = self.options.precommand
path = self.get_path()
externdata = self.native_paths(self.options.external_data)
libdir = self.METHOD_NAME("lib")
bindir = self.METHOD_NAME("bin")
datadir = os.path.abspath("data")
exampledir = os.path.abspath(os.path.join("doc", "examples"))
tmpdir = os.path.abspath("tmp")
if platform.system() == 'Linux':
varname = "LD_LIBRARY_PATH"
elif platform.system() == 'Darwin':
varname = "DYLD_LIBRARY_PATH"
else:
varname = None
lines = {"@LDPATH@": (varname, os.pathsep.join([libdir] + ldpath),
True, True),
"@PYTHONPATH@": ("PYTHONPATH",
pypathsep.join(
[libdir] + pythonpath), True, True),
"@IMP_BIN_DIR@": ("IMP_BIN_DIR", bindir, True, False),
"@PATH@":
("PATH", os.pathsep.join(uniq([bindir] + path)), True, True),
"@PRECOMMAND@": ("precommand", precommand, False, False),
"@IMP_DATA@": ("IMP_DATA", ":".join([datadir] + externdata),
True, False),
"@IMP_EXAMPLE_DATA@": ("IMP_EXAMPLE_DATA",
os.pathsep.join([exampledir]),
True, False),
"@TMPDIR@": ("IMP_TMP_DIR", tmpdir, True, False)}
contents = []
for line in self.template:
if line in lines:
val = lines[line]
if val[0] and len(val[1]) > 0:
# ick
if self.options.propagate == "no" or not val[3]:
contents.extend(self.set_variable(val[0], val[1],
val[2]))
else:
if 'PYTHONPATH' in val[0]:
sep = pypathsep
else:
sep = os.pathsep
contents.extend(self.set_variable_propagate(
val[0], val[1], val[2], sep))
else:
contents.append(line)
tools.rewrite(outfile, "\n".join(contents), verbose=False)
os.chmod(outfile, stat.S_IRWXU)
class ShellScriptFileGenerator(FileGenerator):
template = ["#!/usr/bin/env sh", "", ""] + FileGenerator.body \
+ ["", "", "mkdir -p \"${IMP_TMP_DIR}\"", "",
"exec ${precommand} \"$@\""]
def _internal_set(self, setstr, varname, export):
if export:
return [setstr, "export " + varname]
else:
return [setstr]
def set_variable(self, varname, value, export):
return (
self._internal_set(varname + '="' + value + '"', varname, export)
)
def set_variable_propagate(self, varname, value, export, sep):
return self._internal_set(
varname + '="' + value + '%s$%s"' % (sep, varname),
varname, export)
class BatchFileGenerator(FileGenerator):
template = [x for x in FileGenerator.body if not x.startswith('#')] \
+ ["", 'mkdir "%IMP_TMP_DIR%"']
def set_variable(self, varname, value, export):
return ['set %s=%s' % (varname, value)]
def set_variable_propagate(self, varname, value, export, sep):
return ['set %s=%s%s%%%s%%' % (varname, value, sep, varname)]
def get_path(self):
# Windows looks for libraries in PATH, not LD_LIBRARY_PATH
return FileGenerator.get_path(self) \
+ self.native_paths(self.options.ld_path, True)
parser = OptionParser()
parser.add_option("-p", "--python_path", dest="python_path", default=[],
action="append", help="PYTHONPATH.")
parser.add_option("-l", "--ld_path", dest="ld_path", default=[],
action="append", help="LD_LIB_PATH.")
parser.add_option("-c", "--precommand", dest="precommand", default="",
help="Command to run before all executables.")
parser.add_option("-P", "--path", dest="path", default=[],
action="append", help="The PATH.")
parser.add_option("--python_pathsep", default="",
help="The Python path separator style "
"to use ('w32' or empty)")
parser.add_option("-d", "--external_data", dest="external_data", default=[],
action="append", help="External data.")
parser.add_option("-e", "--propagate", dest="propagate", default="no",
help="Whether to pass the relevant environment variables "
"through.")
parser.add_option("-o", "--output", dest="output", default="imppy.sh",
help="Name of the file to produce.")
parser.add_option("--suffix", default="",
help="Subdirectory to suffix to binary directories")
def main():
(options, args) = parser.parse_args()
if options.output.endswith('.bat'):
gen = BatchFileGenerator(options)
else:
gen = ShellScriptFileGenerator(options)
gen.write_file()
if __name__ == '__main__':
main()
| null |
2,090 |
import json
import sys
import time
from pathlib import Path
import appdirs
import click
from tabulate import tabulate
from ai.backend.cli.interaction import ask_yn
from ai.backend.cli.types import ExitCode
from ..pretty import print_done, print_error, print_fail, print_info, print_wait
from ..session.lifecycle import Session
from . import admin
@admin.group()
def manager():
"""Set of manager control operations."""
@manager.command()
def METHOD_NAME():
"""Show the manager's current status."""
try:
with Session() as session:
resp = session.Manager.METHOD_NAME()
print(
tabulate(
[("Status", "Active Sessions"), (resp["status"], resp["active_sessions"])],
headers="firstrow",
)
)
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@manager.command()
@click.option(
"--wait",
is_flag=True,
help="Hold up freezing the manager until there are no running sessions in the manager.",
)
@click.option(
"--force-kill",
is_flag=True,
help="Kill all running sessions immediately and freeze the manager.",
)
def freeze(wait, force_kill):
"""Freeze manager."""
if wait and force_kill:
print(
"You cannot use both --wait and --force-kill options at the same time.",
file=sys.stderr,
)
return
try:
with Session() as session:
if wait:
while True:
resp = session.Manager.METHOD_NAME()
active_sessions_num = resp["active_sessions"]
if active_sessions_num == 0:
break
print_wait(
"Waiting for all sessions terminated... ({0} left)".format(
active_sessions_num
)
)
time.sleep(3)
print_done("All sessions are terminated.")
if force_kill:
print_wait("Killing all sessions...")
session.Manager.freeze(force_kill=force_kill)
if force_kill:
print_done("All sessions are killed.")
print("Manager is successfully frozen.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@manager.command()
def unfreeze():
"""Unfreeze manager."""
try:
with Session() as session:
session.Manager.unfreeze()
print("Manager is successfully unfrozen.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@admin.group()
def announcement():
"""Global announcement related commands"""
@announcement.command()
def get():
"""Get current announcement."""
try:
with Session() as session:
result = session.Manager.get_announcement()
if result.get("enabled", False):
msg = result.get("message")
print(msg)
else:
print("No announcements.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@announcement.command()
@click.option("-m", "--message", default=None, type=click.STRING)
def update(message):
"""
Post new announcement.
MESSAGE: Announcement message.
"""
try:
with Session() as session:
if message is None:
message = click.edit(
"<!-- Use Markdown format to edit the announcement message -->",
)
if message is None:
print_info("Cancelled")
sys.exit(ExitCode.FAILURE)
session.Manager.update_announcement(enabled=True, message=message)
print_done("Posted new announcement.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@announcement.command()
def delete():
"""Delete current announcement."""
if not ask_yn():
print_info("Cancelled.")
sys.exit(ExitCode.FAILURE)
try:
with Session() as session:
session.Manager.update_announcement(enabled=False)
print_done("Deleted announcement.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@announcement.command()
def dismiss():
"""Do not show the same announcement again."""
if not ask_yn():
print_info("Cancelled.")
sys.exit(ExitCode.FAILURE)
try:
local_state_path = Path(appdirs.user_state_dir("backend.ai", "Lablup"))
with open(local_state_path / "announcement.json", "rb") as f:
state = json.load(f)
state["dismissed"] = True
with open(local_state_path / "announcement.json", "w") as f:
json.dump(state, f)
print_done("Dismissed the last shown announcement.")
except (IOError, json.JSONDecodeError):
print_fail("No announcements seen yet.")
sys.exit(ExitCode.FAILURE)
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@manager.group()
def scheduler():
"""
The scheduler operation command group.
"""
pass
@scheduler.command()
@click.argument("agent_ids", nargs=-1)
def include_agents(agent_ids):
"""
Include agents in scheduling, meaning that the given agents
will be considered to be ready for creating new session containers.
"""
try:
with Session() as session:
session.Manager.scheduler_op("include-agents", agent_ids)
print_done("The given agents now accepts new sessions.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
@scheduler.command()
@click.argument("agent_ids", nargs=-1)
def exclude_agents(agent_ids):
"""
Exclude agents from scheduling, meaning that the given agents
will no longer start new sessions unless they are "included" again,
regardless of their restarts and rejoining events.
"""
try:
with Session() as session:
session.Manager.scheduler_op("exclude-agents", agent_ids)
print_done("The given agents will no longer start new sessions.")
except Exception as e:
print_error(e)
sys.exit(ExitCode.FAILURE)
| null |
2,091 |
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test codes functionality."""
import os
from typing import Type
from google.protobuf import message
from absl.testing import absltest
from absl.testing import parameterized
from proto.google.fhir.proto.stu3 import codes_pb2
from proto.google.fhir.proto.stu3 import datatypes_pb2
from google.fhir.core import codes
from google.fhir.core import fhir_errors
from google.fhir.core.testing import testdata_utils
_CODES_DIR = os.path.join('testdata', 'stu3', 'codes')
class CodesTest(parameterized.TestCase):
"""Tests functionality provided by the codes module."""
def testGetCodeAsString_withStringValueType(self):
"""Tests get_code_as_string with a string value-field type."""
code = datatypes_pb2.Code(value='foo')
self.assertEqual('foo', codes.get_code_as_string(code))
def testGetCodeAsString_withEnumValueType(self):
"""Tests get_code_as_string with an enum value-field type."""
code = codes_pb2.AdministrativeGenderCode(
value=codes_pb2.AdministrativeGenderCode.Value.FEMALE)
self.assertEqual('female', codes.get_code_as_string(code))
def testGetCodeAsString_withInvalidType(self):
"""Tests get_code_as_string with an invalid value-field type."""
not_a_code = datatypes_pb2.String(value='foo')
with self.assertRaises(ValueError) as ve:
_ = codes.get_code_as_string(not_a_code)
self.assertIsInstance(ve.exception, ValueError)
def testEnumValueDescriptorToCodeString(self):
"""Tests enum_value_descriptor_to_code_string functionality."""
female_value_descriptor = (
codes_pb2.AdministrativeGenderCode.Value.DESCRIPTOR.values_by_number[
codes_pb2.AdministrativeGenderCode.FEMALE])
self.assertEqual(
'female',
codes.enum_value_descriptor_to_code_string(female_value_descriptor))
def METHOD_NAME(self):
"""Tests code_string_to_enum_value_descriptor functionality."""
enum_descriptor = codes_pb2.AssertionOperatorTypeCode.Value.DESCRIPTOR
enum_value_descriptor = enum_descriptor.values_by_name['GREATERTHAN']
result = codes.code_string_to_enum_value_descriptor('greaterthan',
enum_descriptor)
self.assertEqual(result.name, enum_value_descriptor.name)
def testCodeStringToEnumValueDescriptor_withInvalidCodeString(self):
"""Tests code_string_to_enum_value_descriptor error handling."""
enum_descriptor = codes_pb2.AssertionOperatorTypeCode.Value.DESCRIPTOR
with self.assertRaises(fhir_errors.InvalidFhirError) as fe:
_ = codes.code_string_to_enum_value_descriptor('InvalidCode!',
enum_descriptor)
self.assertIsInstance(fe.exception, fhir_errors.InvalidFhirError)
def testCopyCode_fromTypedToGeneric(self):
"""Tests copy_code from a generic to typed Code."""
typed_code = codes_pb2.AdministrativeGenderCode(
value=codes_pb2.AdministrativeGenderCode.Value.FEMALE)
generic_code = datatypes_pb2.Code()
codes.copy_code(typed_code, generic_code)
self.assertEqual('female', generic_code.value)
def testCopyCode_fromGenericToTyped(self):
"""Tests copy_code from a typed to a generic Code."""
typed_code = codes_pb2.AdministrativeGenderCode()
generic_code = datatypes_pb2.Code(value='female')
codes.copy_code(generic_code, typed_code)
self.assertEqual(codes_pb2.AdministrativeGenderCode.Value.FEMALE,
typed_code.value)
def testCopyCode_fromGenericToGeneric(self):
"""Tests copy_code form a generic to a generic Code."""
source = datatypes_pb2.Code(value='female')
target = datatypes_pb2.Code()
codes.copy_code(source, target)
self.assertEqual('female', target.value)
def testCopyCode_fromTypedToTyped(self):
"""Tests copy_code from a typed to a typed Code."""
source = codes_pb2.AdministrativeGenderCode(
value=codes_pb2.AdministrativeGenderCode.Value.FEMALE)
target = codes_pb2.AdministrativeGenderCode()
codes.copy_code(source, target)
self.assertEqual(codes_pb2.AdministrativeGenderCode.Value.FEMALE,
target.value)
@parameterized.named_parameters(
('_withUsCoreOmb1', 'uscore_omb_1'),
('_withUsCoreOmb2', 'uscore_omb_2'),
)
def testCopyCoding_fromGenericToGeneric(self, name: str):
"""Tests copy_coding from a generic Coding to a generic Coding."""
source = self._coding_from_file(name + '_raw.prototxt',
datatypes_pb2.Coding)
target = datatypes_pb2.Coding()
codes.copy_coding(source, target)
self.assertEqual(source, target)
def _coding_from_file(self, name: str,
coding_cls: Type[message.Message]) -> message.Message:
"""Reads data from the CODES_DIR/name into an instance of coding_cls."""
return testdata_utils.read_protos(
os.path.join(_CODES_DIR, name), coding_cls)[0]
if __name__ == '__main__':
absltest.main()
| null |
2,092 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvpc.endpoint import endpoint_data
class CreateRouterInterfaceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'CreateRouterInterface','vpc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AccessPointId(self): # String
return self.get_query_params().get('AccessPointId')
def set_AccessPointId(self, AccessPointId): # String
self.add_query_param('AccessPointId', AccessPointId)
def get_OppositeAccessPointId(self): # String
return self.get_query_params().get('OppositeAccessPointId')
def set_OppositeAccessPointId(self, OppositeAccessPointId): # String
self.add_query_param('OppositeAccessPointId', OppositeAccessPointId)
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Spec(self): # String
return self.get_query_params().get('Spec')
def set_Spec(self, Spec): # String
self.add_query_param('Spec', Spec)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Period(self): # Integer
return self.get_query_params().get('Period')
def set_Period(self, Period): # Integer
self.add_query_param('Period', Period)
def get_OppositeRegionId(self): # String
return self.get_query_params().get('OppositeRegionId')
def set_OppositeRegionId(self, OppositeRegionId): # String
self.add_query_param('OppositeRegionId', OppositeRegionId)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_OppositeInterfaceOwnerId(self): # String
return self.get_query_params().get('OppositeInterfaceOwnerId')
def set_OppositeInterfaceOwnerId(self, OppositeInterfaceOwnerId): # String
self.add_query_param('OppositeInterfaceOwnerId', OppositeInterfaceOwnerId)
def get_Tagss(self): # RepeatList
return self.get_query_params().get('Tags')
def set_Tagss(self, Tags): # RepeatList
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tags.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tags.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def METHOD_NAME(self): # Boolean
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # Boolean
self.add_query_param('AutoRenew', AutoRenew)
def get_OppositeRouterType(self): # String
return self.get_query_params().get('OppositeRouterType')
def set_OppositeRouterType(self, OppositeRouterType): # String
self.add_query_param('OppositeRouterType', OppositeRouterType)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_PricingCycle(self): # String
return self.get_query_params().get('PricingCycle')
def set_PricingCycle(self, PricingCycle): # String
self.add_query_param('PricingCycle', PricingCycle)
def get_OppositeRouterId(self): # String
return self.get_query_params().get('OppositeRouterId')
def set_OppositeRouterId(self, OppositeRouterId): # String
self.add_query_param('OppositeRouterId', OppositeRouterId)
def get_Role(self): # String
return self.get_query_params().get('Role')
def set_Role(self, Role): # String
self.add_query_param('Role', Role)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_HealthCheckTargetIp(self): # String
return self.get_query_params().get('HealthCheckTargetIp')
def set_HealthCheckTargetIp(self, HealthCheckTargetIp): # String
self.add_query_param('HealthCheckTargetIp', HealthCheckTargetIp)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_FastLinkMode(self): # Boolean
return self.get_query_params().get('FastLinkMode')
def set_FastLinkMode(self, FastLinkMode): # Boolean
self.add_query_param('FastLinkMode', FastLinkMode)
def get_OppositeInterfaceId(self): # String
return self.get_query_params().get('OppositeInterfaceId')
def set_OppositeInterfaceId(self, OppositeInterfaceId): # String
self.add_query_param('OppositeInterfaceId', OppositeInterfaceId)
def get_InstanceChargeType(self): # String
return self.get_query_params().get('InstanceChargeType')
def set_InstanceChargeType(self, InstanceChargeType): # String
self.add_query_param('InstanceChargeType', InstanceChargeType)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_RouterType(self): # String
return self.get_query_params().get('RouterType')
def set_RouterType(self, RouterType): # String
self.add_query_param('RouterType', RouterType)
def get_HealthCheckSourceIp(self): # String
return self.get_query_params().get('HealthCheckSourceIp')
def set_HealthCheckSourceIp(self, HealthCheckSourceIp): # String
self.add_query_param('HealthCheckSourceIp', HealthCheckSourceIp)
def get_RouterId(self): # String
return self.get_query_params().get('RouterId')
def set_RouterId(self, RouterId): # String
self.add_query_param('RouterId', RouterId)
| null |
2,093 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class ModifySagWanRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'ModifySagWan','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def METHOD_NAME(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ISP(self): # String
return self.get_query_params().get('ISP')
def set_ISP(self, ISP): # String
self.add_query_param('ISP', ISP)
def get_Password(self): # String
return self.get_query_params().get('Password')
def set_Password(self, Password): # String
self.add_query_param('Password', Password)
def get_Mask(self): # String
return self.get_query_params().get('Mask')
def set_Mask(self, Mask): # String
self.add_query_param('Mask', Mask)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_Bandwidth(self): # Integer
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self, Bandwidth): # Integer
self.add_query_param('Bandwidth', Bandwidth)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_IP(self): # String
return self.get_query_params().get('IP')
def set_IP(self, IP): # String
self.add_query_param('IP', IP)
def get_Weight(self): # Integer
return self.get_query_params().get('Weight')
def set_Weight(self, Weight): # Integer
self.add_query_param('Weight', Weight)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_IPType(self): # String
return self.get_query_params().get('IPType')
def set_IPType(self, IPType): # String
self.add_query_param('IPType', IPType)
def get_Priority(self): # Integer
return self.get_query_params().get('Priority')
def set_Priority(self, Priority): # Integer
self.add_query_param('Priority', Priority)
def get_SmartAGId(self): # String
return self.get_query_params().get('SmartAGId')
def set_SmartAGId(self, SmartAGId): # String
self.add_query_param('SmartAGId', SmartAGId)
def get_SmartAGSn(self): # String
return self.get_query_params().get('SmartAGSn')
def set_SmartAGSn(self, SmartAGSn): # String
self.add_query_param('SmartAGSn', SmartAGSn)
def get_PortName(self): # String
return self.get_query_params().get('PortName')
def set_PortName(self, PortName): # String
self.add_query_param('PortName', PortName)
def get_Gateway(self): # String
return self.get_query_params().get('Gateway')
def set_Gateway(self, Gateway): # String
self.add_query_param('Gateway', Gateway)
def get_Username(self): # String
return self.get_query_params().get('Username')
def set_Username(self, Username): # String
self.add_query_param('Username', Username)
| null |
2,094 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import math as ma
from typing import Optional
import torch
from overrides import overrides
from torch import Tensor, nn
from torch.optim.optimizer import Optimizer
from archai.common import ml_utils
from archai.common.common import get_conf
from archai.common.config import Config
from archai.supergraph.algos.xnas.xnas_op import XnasOp
from archai.supergraph.datasets import data
from archai.supergraph.nas.arch_trainer import ArchTrainer
from archai.supergraph.nas.model import Model
from archai.supergraph.nas.model_desc import CellType
from archai.supergraph.utils.checkpoint import CheckPoint
class XnasArchTrainer(ArchTrainer):
def __init__(self, conf_train: Config, model: Model,
checkpoint: Optional[CheckPoint]) -> None:
super().__init__(conf_train, model, checkpoint)
self._conf_w_lossfn = conf_train['lossfn']
@overrides
def create_optimizer(self, conf_optim: Config, params) -> Optimizer:
# return optim that only operates on w, not alphas
return ml_utils.create_optimizer(conf_optim,
self.model.nonarch_params(recurse=True))
@overrides
def pre_fit(self, data_loaders:data.DataLoaders) -> None:
super().pre_fit(data_loaders)
# optimizers, schedulers needs to be recreated for each fit call
# as they have state
assert data_loaders.val_dl is not None
conf = get_conf()
self._train_batch = conf['nas']['search']['loader']['train_batch']
num_val_examples = len(data_loaders.val_dl) * self._train_batch
num_cells = conf['nas']['search']['model_desc']['n_cells']
num_reduction_cells = conf['nas']['search']['model_desc']['n_reductions']
num_normal_cells = num_cells - num_reduction_cells
num_primitives = len(XnasOp.PRIMITIVES)
assert num_cells > 0
assert num_reduction_cells > 0
assert num_normal_cells > 0
assert num_primitives > 0
self._normal_cell_effective_t = num_val_examples * self._epochs * num_normal_cells
self._reduction_cell_effective_t = num_val_examples * \
self._epochs * num_reduction_cells
self._normal_cell_lr = ma.sqrt(2 * ma.log(num_primitives) / (
self._normal_cell_effective_t * self._grad_clip * self._grad_clip))
self._reduction_cell_lr = ma.sqrt(2 * ma.log(num_primitives) / (
self._reduction_cell_effective_t * self._grad_clip * self._grad_clip))
self._xnas_optim = _XnasOptimizer(self._normal_cell_lr, self._reduction_cell_lr, self._normal_cell_effective_t,
self._reduction_cell_effective_t, self._train_batch, self._grad_clip,
self._multi_optim, self._apex, self.model)
@overrides
def post_fit(self, data_loaders:data.DataLoaders) -> None:
# delete state we created in pre_fit
del self._xnas_optim
return super().post_fit(data_loaders)
@overrides
def pre_epoch(self, data_loaders:data.DataLoaders)->None:
super().pre_epoch(data_loaders)
# prep val set to train alphas
assert data_loaders.val_dl is not None
self._val_dl = data_loaders.val_dl
self._valid_iter = iter(data_loaders.val_dl) # type: ignore
@overrides
def post_epoch(self, data_loaders:data.DataLoaders)->None:
del self._val_dl
del self._valid_iter # clean up
super().post_epoch(data_loaders)
@overrides
def pre_step(self, x: Tensor, y: Tensor) -> None:
super().pre_step(x, y)
# reset val loader if we exhausted it
try:
x_val, y_val = next(self._valid_iter)
except StopIteration:
# reinit iterator
self._valid_iter = iter(self._val_dl)
x_val, y_val = next(self._valid_iter)
x_val, y_val = x_val.to(self.get_device()), y_val.to(
self.get_device(), non_blocking=True)
# update alphas
self._xnas_optim.step(x, y, x_val, y_val)
@overrides
def METHOD_NAME(self, checkpoint: CheckPoint) -> None:
super().METHOD_NAME(checkpoint)
class _XnasOptimizer:
def __init__(self, ncell_lr: float, rcell_lr: float,
ncell_effective_t: float, rcell_effective_t: float, train_batch: int,
grad_clip: float, optim, apex, model: Model) -> None:
self._ncell_lr = ncell_lr
self._rcell_lr = rcell_lr
self._ncell_effective_t = ncell_effective_t
self._rcell_effective_t = rcell_effective_t
self._train_batch = train_batch
self._grad_clip = grad_clip
self._optim = optim
self._apex = apex
self._lossfn = nn.CrossEntropyLoss()
# to keep track of where we are in effective updates
self._t_rcell = 0
self._t_ncell = 0
self._model = model # main model with respect to w and alpha
@staticmethod
def _get_loss(model, lossfn, x, y):
logits, *_ = model(x) # might also return aux tower logits
return lossfn(logits, y)
def step(self, x_train: Tensor, y_train: Tensor, x_valid: Tensor, y_valid: Tensor) -> None:
# put model in train mode just to be safe
self._model.train()
# XNAS authors told Liam Li et al that
# the updates are made per data point instead
# of at a batch level. While nn.CrossEntropyLoss
# can give back per data point losses by using reduction='none' option,
# loss.backward() can only deal with scalar losses. So for now trying
# to do this one data point at a time to see if that
# runs reasonably fast. If not the next thing to try is
# to get the per data point loss all at once and then
# try to do loss[i].backward() and update alphas
batch_size = x_valid.shape[0]
for i in range(batch_size):
x = torch.unsqueeze(x_valid[i,:], 0)
y = torch.unsqueeze(y_valid[i], 0)
# zero out gradients for safety
self._optim.zero_grad()
# put model through val data
loss = self._get_loss(self._model, self._lossfn, x, y)
# compute gradients
loss.backward()
# do grad clip
self._apex.clip_grad(self._grad_clip, self._model, self._optim)
# for each op in the model update alphas
for cell in self._model.cells:
if cell.desc.cell_type == CellType.Reduction:
lr = self._rcell_lr
T = self._rcell_effective_t
self._t_rcell += 1
t = self._t_rcell
elif cell.desc.cell_type == CellType.Regular:
lr = self._ncell_lr
T = self._ncell_effective_t
self._t_ncell += 1
t = self._t_ncell
else:
raise NotImplementedError
# BUG: t need to be corrected
for op in cell.ops():
op.update_alphas(lr, t, T, self._grad_clip)
| null |
2,095 |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2022 Valory AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Tests for aea-test-autonomy plugin base test classes for agent e2e tests."""
from typing import Type, Union
from unittest import mock
import click
import pytest
from aea.test_tools.utils import as_context
from aea_test_autonomy.base_test_classes.agents import (
BaseTestEnd2End,
BaseTestEnd2EndExecution,
)
from aea_test_autonomy.docker.tendermint import FlaskTendermintDockerImage
class BaseTest:
"""BaseTest"""
test_cls: Type[BaseTestEnd2End]
setup_class_called: bool
def setup(self) -> None:
"""Setup test"""
self.setup_class_called = False
def teardown(self) -> None:
"""Teardown test"""
if self.setup_class_called:
self.test_cls.teardown_class()
def setup_test(self) -> Union[BaseTestEnd2End, BaseTestEnd2EndExecution]:
"""Setup test"""
self.test_cls.setup_class()
self.setup_class_called = True
test_instance = self.test_cls() # type: ignore
return test_instance
def test_setup_cls(self) -> None:
"""Test setup_class and setup"""
test_instance = self.setup_test()
assert isinstance(test_instance, self.test_cls)
class TestBaseTestEnd2End(BaseTest):
"""TestBaseTestEnd2End"""
test_cls = BaseTestEnd2End
def test_default_test_class(self) -> None:
"""Test default class attributes, prior to setup_class"""
# BaseTestEnd2End overrides of BaseAEATestCase
assert self.test_cls.capture_log is True
assert self.test_cls.cli_log_options == ["-v", "DEBUG"]
# default values likely to change
assert self.test_cls.happy_path == ()
assert self.test_cls.strict_check_strings == ()
def test_defaults_test_instance(self) -> None:
"""Test defaults, after setup_class"""
assert self.test_cls.agents == set()
assert self.test_cls.subprocesses == []
# no setup -> no tests needed for setup
assert not hasattr(self.test_cls, "setup")
class TestBaseTestEnd2EndExecution(BaseTest):
"""TestBaseTestEnd2EndExecution"""
test_cls = BaseTestEnd2EndExecution
@staticmethod
def set_mocked_flask_tendermint_image(
test_instance: BaseTestEnd2EndExecution, nb_nodes: int
) -> None:
"""Mocked FlaskTendermintDockerImage""" # autouse fixture sets this
# pylint: disable=protected-access
tendermint_image = FlaskTendermintDockerImage(mock.Mock())
FlaskTendermintDockerImage._extra_hosts = {}
tendermint_image.nb_nodes = nb_nodes
test_instance._tendermint_image = tendermint_image
def test_test_run_without_agents(self) -> None:
"""Test test_run without agents"""
nb_nodes = 0
test_instance = self.setup_test()
self.set_mocked_flask_tendermint_image(test_instance, nb_nodes)
test_instance.test_run(nb_nodes=nb_nodes)
def test_test_run_incorrect_agent_package(self) -> None:
"""Test test_run with one agent"""
nb_nodes = 1
test_instance = self.setup_test()
attribute = "agent_package"
with pytest.raises(AttributeError, match=f"has no attribute '{attribute}'"):
test_instance.test_run(nb_nodes)
setattr(test_instance, attribute, "")
with pytest.raises(click.exceptions.BadParameter):
test_instance.test_run(nb_nodes)
non_existent = "author/package"
expected = f'Item "{non_existent}" not found in source folder'
setattr(test_instance, attribute, non_existent)
with pytest.raises(click.exceptions.ClickException, match=expected):
test_instance.test_run(nb_nodes)
wrong_version = "valory/hello_world:0.0.0"
expected = "Wrong agent version in public ID: specified 0.0.0, found"
setattr(test_instance, attribute, wrong_version)
with pytest.raises(click.exceptions.ClickException, match=expected):
test_instance.test_run(nb_nodes)
def test_test_run_incorrect_skill_package(self) -> None:
"""Test incorrect skill package"""
nb_nodes = 1
test_instance = self.setup_test()
self.set_mocked_flask_tendermint_image(test_instance, nb_nodes)
test_instance.agent_package = "valory/hello_world:0.1.0"
attribute = "skill_package"
with pytest.raises(AttributeError, match=f"has no attribute '{attribute}'"):
test_instance.test_run(nb_nodes)
for item in ("", "author/package", "valory/hello_world:0.0.0"):
setattr(test_instance, attribute, item) # same for "author/package"
expected = 'Item "agent_00000" already exists in target folder "."'
with pytest.raises(click.exceptions.ClickException, match=expected):
test_instance.test_run(nb_nodes)
def METHOD_NAME(self) -> None:
"""Test test_run"""
nb_nodes = 1
test_instance = self.setup_test()
self.set_mocked_flask_tendermint_image(test_instance, nb_nodes)
test_instance.wait_to_finish = mock.Mock()
test_instance.agent_package = "valory/hello_world:0.1.0"
test_instance.skill_package = "valory/hello_world_abci:0.1.0"
mocked_missing_from_output = as_context(
mock.patch.object(
test_instance.__class__.__mro__[1], "missing_from_output"
),
mock.patch.object(
test_instance, "missing_from_output", return_value=("", "")
),
)
with mock.patch.object(test_instance, "run_agent") as mocked_run_agent:
with mock.patch.object(
test_instance, "health_check"
) as mocked_health_check:
with mocked_missing_from_output:
test_instance.test_run(nb_nodes)
mocked_run_agent.assert_called_once()
mocked_health_check.assert_called_once()
| null |
2,096 |
# coding: utf-8
"""
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
"""
from __future__ import annotations
from petstore_api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
from petstore_api.shared_imports.server_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
AdditionalProperties: typing_extensions.TypeAlias = schemas.NotAnyTypeSchema
class VersionEnums:
@schemas.classproperty
def V1(cls) -> typing.Literal["v1"]:
return Version.validate("v1")
@schemas.classproperty
def METHOD_NAME(cls) -> typing.Literal["v2"]:
return Version.validate("v2")
@dataclasses.dataclass(frozen=True)
class Version(
schemas.Schema
):
types: typing.FrozenSet[typing.Type] = frozenset({
str,
})
default: typing.Literal["v1"] = "v1"
enum_value_to_name: typing.Mapping[typing.Union[int, float, str, schemas.Bool, None], str] = dataclasses.field(
default_factory=lambda: {
"v1": "V1",
"v2": "V2",
}
)
enums = VersionEnums
@typing.overload
@classmethod
def validate(
cls,
arg: typing.Literal["v1"],
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> typing.Literal["v1"]: ...
@typing.overload
@classmethod
def validate(
cls,
arg: typing.Literal["v2"],
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> typing.Literal["v2"]: ...
@typing.overload
@classmethod
def validate(
cls,
arg: str,
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> typing.Literal["v1","v2",]: ...
@classmethod
def validate(
cls,
arg,
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> typing.Literal[
"v1",
"v2",
]:
validated_arg = super().validate_base(
arg,
configuration=configuration,
)
return typing.cast(typing.Literal[
"v1",
"v2",
],
validated_arg
)
Properties = typing.TypedDict(
'Properties',
{
"version": typing.Type[Version],
}
)
class VariablesDict(schemas.immutabledict[str, str]):
__required_keys__: typing.FrozenSet[str] = frozenset({
"version",
})
__optional_keys__: typing.FrozenSet[str] = frozenset({
})
def __new__(
cls,
*,
version: typing.Literal[
"v1",
"v2"
],
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
):
arg_: typing.Dict[str, typing.Any] = {
"version": version,
}
used_arg_ = typing.cast(VariablesDictInput, arg_)
return Variables.validate(used_arg_, configuration=configuration_)
@staticmethod
def from_dict_(
arg: typing.Union[
VariablesDictInput,
VariablesDict
],
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> VariablesDict:
return Variables.validate(arg, configuration=configuration)
@property
def version(self) -> typing.Literal["v1", "v2"]:
return typing.cast(
typing.Literal["v1", "v2"],
self.__getitem__("version")
)
VariablesDictInput = typing.TypedDict(
'VariablesDictInput',
{
"version": typing.Literal[
"v1",
"v2"
],
}
)
@dataclasses.dataclass(frozen=True)
class Variables(
schemas.Schema[VariablesDict, tuple]
):
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
required: typing.FrozenSet[str] = frozenset({
"version",
})
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
additional_properties: typing.Type[AdditionalProperties] = dataclasses.field(default_factory=lambda: AdditionalProperties) # type: ignore
type_to_output_cls: typing.Mapping[
typing.Type,
typing.Type
] = dataclasses.field(
default_factory=lambda: {
schemas.immutabledict: VariablesDict
}
)
@classmethod
def validate(
cls,
arg: typing.Union[
VariablesDictInput,
VariablesDict,
],
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
) -> VariablesDict:
return super().validate_base(
arg,
configuration=configuration,
)
@dataclasses.dataclass
class Server1(server.ServerWithVariables):
variables: VariablesDict = dataclasses.field(
default_factory=lambda: Variables.validate({
"version": Version.default,
})
)
variables_schema: typing.Type[Variables] = Variables
_url: str = "https://petstore.swagger.io/{version}"
| null |
2,097 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class ModifyNodeSpecRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Dds', '2015-12-01', 'ModifyNodeSpec','dds')
self.set_method('POST')
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_ReadonlyReplicas(self): # Integer
return self.get_query_params().get('ReadonlyReplicas')
def set_ReadonlyReplicas(self, ReadonlyReplicas): # Integer
self.add_query_param('ReadonlyReplicas', ReadonlyReplicas)
def get_CouponNo(self): # String
return self.get_query_params().get('CouponNo')
def set_CouponNo(self, CouponNo): # String
self.add_query_param('CouponNo', CouponNo)
def get_NodeClass(self): # String
return self.get_query_params().get('NodeClass')
def set_NodeClass(self, NodeClass): # String
self.add_query_param('NodeClass', NodeClass)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_EffectiveTime(self): # String
return self.get_query_params().get('EffectiveTime')
def set_EffectiveTime(self, EffectiveTime): # String
self.add_query_param('EffectiveTime', EffectiveTime)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_SwitchTime(self): # String
return self.get_query_params().get('SwitchTime')
def set_SwitchTime(self, SwitchTime): # String
self.add_query_param('SwitchTime', SwitchTime)
def get_NodeId(self): # String
return self.get_query_params().get('NodeId')
def set_NodeId(self, NodeId): # String
self.add_query_param('NodeId', NodeId)
def get_BusinessInfo(self): # String
return self.get_query_params().get('BusinessInfo')
def set_BusinessInfo(self, BusinessInfo): # String
self.add_query_param('BusinessInfo', BusinessInfo)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def get_FromApp(self): # String
return self.get_query_params().get('FromApp')
def set_FromApp(self, FromApp): # String
self.add_query_param('FromApp', FromApp)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_NodeStorage(self): # Integer
return self.get_query_params().get('NodeStorage')
def set_NodeStorage(self, NodeStorage): # Integer
self.add_query_param('NodeStorage', NodeStorage)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_OrderType(self): # String
return self.get_query_params().get('OrderType')
def METHOD_NAME(self, OrderType): # String
self.add_query_param('OrderType', OrderType)
| null |
2,098 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
from typing import Iterator
import torch
from torch import Tensor, autograd, nn
from torch.nn.modules.loss import _Loss
from torch.optim.optimizer import Optimizer
from archai.common import ml_utils
from archai.common.config import Config
from archai.common.utils import zip_eq
from archai.supergraph.nas.model import Model
def _flatten_concate(xs):
"""
flatten all tensor from [d1,d2,...dn] to [d]
and then concat all [d_1] to [d_1+d_2+d_3+...]
:param xs:
:return:
"""
return torch.cat([x.view(-1) for x in xs])
def _get_alphas(model:Model)->Iterator[nn.Parameter]:
return model.all_owned().param_by_kind('alphas')
def _get_loss(model:Model, lossfn, x, y):
logits, *_ = model(x) # might also return aux tower logits
return lossfn(logits, y)
class BilevelOptimizer:
def __init__(self, conf_alpha_optim:Config, w_momentum: float, w_decay: float,
model: Model, lossfn: _Loss) -> None:
self._w_momentum = w_momentum # momentum for w
self._w_weight_decay = w_decay # weight decay for w
self._lossfn = lossfn
self._model = model # main model with respect to w and alpha
self._alphas = list(_get_alphas(self._model))
# this is the optimizer to optimize alphas parameter
self._alpha_optim = ml_utils.create_optimizer(conf_alpha_optim, self._alphas)
def state_dict(self)->dict:
return {
'alpha_optim': self._alpha_optim.state_dict()
}
def METHOD_NAME(self, state_dict)->None:
self._alpha_optim.METHOD_NAME(state_dict['alpha_optim'])
def _unrolled_model(self, x, y, lr: float, main_optim: Optimizer)->Model:
# TODO: should this loss be stored for later use?
loss = _get_loss(self._model, self._lossfn, x, y)
params = _flatten_concate(self._model.parameters()).detach()
try:
moment = _flatten_concate(main_optim.state[v]['momentum_buffer'] for v in self._model.parameters())
moment.mul_(self._w_momentum)
except:
moment = torch.zeros_like(params)
# flatten all gradients
grads = _flatten_concate(autograd.grad(loss, self._model.parameters())).data
# indeed, here we implement a simple SGD with momentum and weight decay
# theta = theta - eta * (moment + weight decay + dtheta)
params = params.sub(lr, moment + grads + self._w_weight_decay*params)
# construct a new model
return self._params2model(params)
def _params2model(self, params)->Model:
"""
construct a new model with initialized weight from params
it use .state_dict() and load_state_dict() instead of
.parameters() + fill_()
:params: flatten weights, need to reshape to original shape
:return:
"""
params_d, offset = {}, 0
for k, v in self._model.named_parameters():
v_length = v.numel()
# restore params[] value to original shape
params_d[k] = params[offset: offset + v_length].view(v.size())
offset += v_length
assert offset == len(params)
model_new = copy.deepcopy(self._model)
model_dict = self._model.state_dict()
model_dict.update(params_d)
model_new.METHOD_NAME(model_dict)
return model_new.cuda()
def step(self, x_train: Tensor, y_train: Tensor, x_valid: Tensor, y_valid: Tensor,
main_optim: Optimizer) -> None:
# TODO: unlike darts paper, we get lr from optimizer insead of scheduler
lr = main_optim.param_groups[0]['lr']
self._alpha_optim.zero_grad()
# compute the gradient and write it into tensor.grad
# instead of generated by loss.backward()
self._backward_bilevel(x_train, y_train, x_valid, y_valid,
lr, main_optim)
# at this point we should have model with updated gradients for w and alpha
self._alpha_optim.step()
def _backward_bilevel(self, x_train, y_train, x_valid, y_valid, lr, main_optim):
""" Compute unrolled loss and backward its gradients """
# update vmodel with w', but leave alphas as-is
# w' = w - lr * grad
unrolled_model = self._unrolled_model(x_train, y_train, lr, main_optim)
# compute loss on validation set for model with w'
# wrt alphas. The autograd.grad is used instead of backward()
# to avoid having to loop through params
vloss = _get_loss(unrolled_model, self._lossfn, x_valid, y_valid)
vloss.backward()
dalpha = [v.grad for v in _get_alphas(unrolled_model)]
dparams = [v.grad.data for v in unrolled_model.parameters()]
hessian = self._hessian_vector_product(dparams, x_train, y_train)
# dalpha we have is from the unrolled model so we need to
# transfer those grades back to our main model
# update final gradient = dalpha - xi*hessian
# TODO: currently alphas lr is same as w lr
with torch.no_grad():
for alpha, da, h in zip_eq(self._alphas, dalpha, hessian):
alpha.grad = da - lr*h
# now that model has both w and alpha grads,
# we can run main_optim.step() to update the param values
def _hessian_vector_product(self, dw, x, y, epsilon_unit=1e-2):
"""
Implements equation 8
dw = dw` {L_val(w`, alpha)}
w+ = w + eps * dw
w- = w - eps * dw
hessian = (dalpha {L_trn(w+, alpha)} -dalpha {L_trn(w-, alpha)})/(2*eps)
eps = 0.01 / ||dw||
"""
"""scale epsilon with grad magnitude. The dw
is a multiplier on RHS of eq 8. So this scalling is essential
in making sure that finite differences approximation is not way off
Below, we flatten each w, concate all and then take norm"""
# TODO: is cat along dim 0 correct?
dw_norm = torch.cat([w.view(-1) for w in dw]).norm()
epsilon = epsilon_unit / dw_norm
# w+ = w + epsilon * grad(w')
with torch.no_grad():
for p, v in zip_eq(self._model.parameters(), dw):
p += epsilon * v
# Now that we have model with w+, we need to compute grads wrt alphas
# This loss needs to be on train set, not validation set
loss = _get_loss(self._model, self._lossfn, x, y)
dalpha_plus = autograd.grad(
loss, self._alphas) # dalpha{L_trn(w+)}
# get model with w- and then compute grads wrt alphas
# w- = w - eps*dw`
with torch.no_grad():
for p, v in zip_eq(self._model.parameters(), dw):
# we had already added dw above so sutracting twice gives w-
p -= 2. * epsilon * v
# similarly get dalpha_minus
loss = _get_loss(self._model, self._lossfn, x, y)
dalpha_minus = autograd.grad(loss, self._alphas)
# reset back params to original values by adding dw
with torch.no_grad():
for p, v in zip_eq(self._model.parameters(), dw):
p += epsilon * v
# apply eq 8, final difference to compute hessian
h = [(p - m) / (2. * epsilon)
for p, m in zip_eq(dalpha_plus, dalpha_minus)]
return h
| null |
2,099 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcbn.endpoint import endpoint_data
class ListTransitRouterRouteTablesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'ListTransitRouterRouteTables')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_TransitRouterRouteTableNamess(self): # RepeatList
return self.get_query_params().get('TransitRouterRouteTableNames')
def set_TransitRouterRouteTableNamess(self, TransitRouterRouteTableNames): # RepeatList
for depth1 in range(len(TransitRouterRouteTableNames)):
self.add_query_param('TransitRouterRouteTableNames.' + str(depth1 + 1), TransitRouterRouteTableNames[depth1])
def get_RouteTableOptions(self): # Struct
return self.get_query_params().get('RouteTableOptions')
def METHOD_NAME(self, RouteTableOptions): # Struct
if RouteTableOptions.get('MultiRegionECMP') is not None:
self.add_query_param('RouteTableOptions.MultiRegionECMP', RouteTableOptions.get('MultiRegionECMP'))
def get_TransitRouterRouteTableType(self): # String
return self.get_query_params().get('TransitRouterRouteTableType')
def set_TransitRouterRouteTableType(self, TransitRouterRouteTableType): # String
self.add_query_param('TransitRouterRouteTableType', TransitRouterRouteTableType)
def get_TransitRouterRouteTableStatus(self): # String
return self.get_query_params().get('TransitRouterRouteTableStatus')
def set_TransitRouterRouteTableStatus(self, TransitRouterRouteTableStatus): # String
self.add_query_param('TransitRouterRouteTableStatus', TransitRouterRouteTableStatus)
def get_TransitRouterRouteTableIdss(self): # RepeatList
return self.get_query_params().get('TransitRouterRouteTableIds')
def set_TransitRouterRouteTableIdss(self, TransitRouterRouteTableIds): # RepeatList
for depth1 in range(len(TransitRouterRouteTableIds)):
self.add_query_param('TransitRouterRouteTableIds.' + str(depth1 + 1), TransitRouterRouteTableIds[depth1])
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_TransitRouterId(self): # String
return self.get_query_params().get('TransitRouterId')
def set_TransitRouterId(self, TransitRouterId): # String
self.add_query_param('TransitRouterId', TransitRouterId)
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
| null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.