id
int64 0
300k
| label
stringlengths 1
74
⌀ | text
stringlengths 4k
8k
|
---|---|---|
1,300 |
on results
|
from android.runnable import run_on_ui_thread
from jnius import autoclass
from jnius import java_method
from jnius import PythonJavaClass
from plyer.facades import STT
from plyer.platforms.android import activity
ArrayList = autoclass('java.util.ArrayList')
Bundle = autoclass('android.os.Bundle')
Context = autoclass('android.content.Context')
Intent = autoclass('android.content.Intent')
RecognizerIntent = autoclass('android.speech.RecognizerIntent')
RecognitionListener = autoclass('android.speech.RecognitionListener')
SpeechRecognizer = autoclass('android.speech.SpeechRecognizer')
SpeechResults = SpeechRecognizer.RESULTS_RECOGNITION
class SpeechListener(PythonJavaClass):
__javainterfaces__ = ['android/speech/RecognitionListener']
# class variables because PythonJavaClass class failed
# to see them later in getters and setters
_error_callback = None
_result_callback = None
_partial_result_callback = None
_volume_callback = None
def __init__(self):
super().__init__()
# overwrite class variables in the object
self._error_callback = None
self._result_callback = None
self._partial_result_callback = None
self._volume_callback = None
# error handling
@property
def error_callback(self):
return self._error_callback
@error_callback.setter
def error_callback(self, callback):
'''
Set error callback. It is called when error occurs.
:param callback: function with one parameter for error message
'''
self._error_callback = callback
# result handling
@property
def result_callback(self):
return self._result_callback
@result_callback.setter
def result_callback(self, callback):
'''
Set result callback. It is called when results are received.
:param callback: function with one parameter for lists of strings
'''
self._result_callback = callback
@property
def partial_result_callback(self):
return self._partial_result_callback
@partial_result_callback.setter
def partial_result_callback(self, callback):
'''
Set partial result callback. It is called when partial results are
received while the listener is still in listening mode.
:param callback: function with one parameter for lists of strings
'''
self._partial_result_callback = callback
# voice changes handling
@property
def volume_callback(self):
return self._volume_callback
@volume_callback.setter
def volume_callback(self, callback):
'''
Set volume voice callback.
It is called when loudness of the voice changes.
:param callback: function with one parameter for volume RMS dB (float).
'''
self._volume_callback = callback
# Implementation Java Interfaces
@java_method('()V')
def onBeginningOfSpeech(self):
pass
@java_method('([B)V')
def onBufferReceived(self, buffer):
pass
@java_method('()V')
def onEndOfSpeech(self):
pass
@java_method('(I)V')
def onError(self, error):
msg = ''
if error == SpeechRecognizer.ERROR_AUDIO:
msg = 'audio'
if error == SpeechRecognizer.ERROR_CLIENT:
msg = 'client'
if error == SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
msg = 'insufficient_permissions'
if error == SpeechRecognizer.ERROR_NETWORK:
msg = 'network'
if error == SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
msg = 'network_timeout'
if error == SpeechRecognizer.ERROR_NO_MATCH:
msg = 'no_match'
if error == SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
msg = 'recognizer_busy'
if error == SpeechRecognizer.ERROR_SERVER:
msg = 'server'
if error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
msg = 'speech_timeout'
if msg and self.error_callback:
self.error_callback('error:' + msg)
@java_method('(ILandroid/os/Bundle;)V')
def onEvent(self, event_type, params):
pass
@java_method('(Landroid/os/Bundle;)V')
def onPartialResults(self, results):
texts = []
matches = results.getStringArrayList(SpeechResults)
for match in matches.toArray():
if isinstance(match, bytes):
match = match.decode('utf-8')
texts.append(match)
if texts and self.partial_result_callback:
self.partial_result_callback(texts)
@java_method('(Landroid/os/Bundle;)V')
def onReadyForSpeech(self, params):
pass
@java_method('(Landroid/os/Bundle;)V')
def METHOD_NAME(self, results):
texts = []
matches = results.getStringArrayList(SpeechResults)
for match in matches.toArray():
if isinstance(match, bytes):
match = match.decode('utf-8')
texts.append(match)
if texts and self.result_callback:
self.result_callback(texts)
@java_method('(F)V')
def onRmsChanged(self, rmsdB):
if self.volume_callback:
self.volume_callback(rmsdB)
class AndroidSpeech(STT):
'''
Android Speech Implementation.
Android class `SpeechRecognizer`'s listening deactivates automatically.
Class methods `_on_error()`, `_on_result()` listeners. You can find
documentation here:
https://developer.android.com/reference/android/speech/RecognitionListener
'''
def _on_error(self, msg):
self.errors.append(msg)
self.stop()
def _on_result(self, messages):
self.results.extend(messages)
self.stop()
def _on_partial(self, messages):
self.partial_results.extend(messages)
@run_on_ui_thread
def _start(self):
intent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH)
intent.putExtra(
RecognizerIntent.EXTRA_CALLING_PACKAGE,
activity.getPackageName()
)
# language preferences
intent.putExtra(
RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, self.language
)
intent.putExtra(
RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH
)
# results settings
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1000)
intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, True)
if self.prefer_offline:
intent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, True)
# listener and callbacks
listener = SpeechListener()
listener.error_callback = self._on_error
listener.result_callback = self._on_result
listener.partial_result_callback = self._on_partial
# create recognizer and start
self.speech = SpeechRecognizer.createSpeechRecognizer(activity)
self.speech.setRecognitionListener(listener)
self.speech.startListening(intent)
@run_on_ui_thread
def _stop(self):
if not self.speech:
return
# stop listening
self.speech.stopListening()
# free object
self.speech.destroy()
self.speech = None
def _exist(self):
return bool(
SpeechRecognizer.isRecognitionAvailable(activity)
)
def instance():
return AndroidSpeech()
|
1,301 |
degree
|
# sage.doctest: optional - sage.rings.finite_rings (because all doctests use finite fields)
"""
Places of function fields: rational
"""
# ****************************************************************************
# Copyright (C) 2016-2022 Kwankyu Lee <[email protected]>
# 2019 Brent Baccala
# 2021 Jonathan Kliem
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
# ****************************************************************************
from .place import FunctionFieldPlace
class FunctionFieldPlace_rational(FunctionFieldPlace):
"""
Places of rational function fields.
"""
def METHOD_NAME(self):
"""
Return the degree of the place.
EXAMPLES::
sage: F.<x> = FunctionField(GF(2))
sage: O = F.maximal_order()
sage: i = O.ideal(x^2 + x + 1)
sage: p = i.place()
sage: p.degree()
2
"""
if self.is_infinite_place():
return 1
else:
return self._prime.gen().numerator().METHOD_NAME()
def is_infinite_place(self):
"""
Return ``True`` if the place is at infinite.
EXAMPLES::
sage: F.<x> = FunctionField(GF(2))
sage: F.places()
[Place (1/x), Place (x), Place (x + 1)]
sage: [p.is_infinite_place() for p in F.places()]
[True, False, False]
"""
F = self.function_field()
return self.prime_ideal().ring() == F.maximal_order_infinite()
def local_uniformizer(self):
"""
Return a local uniformizer of the place.
EXAMPLES::
sage: F.<x> = FunctionField(GF(2))
sage: F.places()
[Place (1/x), Place (x), Place (x + 1)]
sage: [p.local_uniformizer() for p in F.places()]
[1/x, x, x + 1]
"""
return self.prime_ideal().gen()
def residue_field(self, name=None):
"""
Return the residue field of the place.
EXAMPLES::
sage: F.<x> = FunctionField(GF(2))
sage: O = F.maximal_order()
sage: p = O.ideal(x^2 + x + 1).place()
sage: k, fr_k, to_k = p.residue_field() # needs sage.rings.function_field
sage: k # needs sage.rings.function_field
Finite Field in z2 of size 2^2
sage: fr_k # needs sage.rings.function_field
Ring morphism:
From: Finite Field in z2 of size 2^2
To: Valuation ring at Place (x^2 + x + 1)
sage: to_k # needs sage.rings.function_field
Ring morphism:
From: Valuation ring at Place (x^2 + x + 1)
To: Finite Field in z2 of size 2^2
"""
return self.valuation_ring().residue_field(name=name)
def _residue_field(self, name=None):
"""
Return the residue field of the place along with the maps from
and to it.
INPUT:
- ``name`` -- string; name of the generator of the residue field
EXAMPLES::
sage: F.<x> = FunctionField(GF(2))
sage: O = F.maximal_order()
sage: i = O.ideal(x^2 + x + 1)
sage: p = i.place()
sage: R, fr, to = p._residue_field()
sage: R
Finite Field in z2 of size 2^2
sage: [fr(e) for e in R.list()]
[0, x, x + 1, 1]
sage: to(x*(x+1)) == to(x) * to(x+1)
True
"""
F = self.function_field()
prime = self.prime_ideal()
if self.is_infinite_place():
K = F.constant_base_field()
def from_K(e):
return F(e)
def to_K(f):
n = f.numerator()
d = f.denominator()
n_deg = n.METHOD_NAME()
d_deg = d.METHOD_NAME()
if n_deg < d_deg:
return K(0)
elif n_deg == d_deg:
return n.lc() / d.lc()
else:
raise TypeError("not in the valuation ring")
else:
O = F.maximal_order()
K, from_K, _to_K = O._residue_field(prime, name=name)
def to_K(f):
if f in O: # f.denominator() is 1
return _to_K(f.numerator())
else:
d = F(f.denominator())
n = d * f
nv = prime.valuation(O.ideal(n))
dv = prime.valuation(O.ideal(d))
if nv > dv:
return K(0)
elif dv > nv:
raise TypeError("not in the valuation ring")
s = ~prime.gen()
rd = d * s**dv # in O but not in prime
rn = n * s**nv # in O but not in prime
return to_K(rn) / to_K(rd)
return K, from_K, to_K
def valuation_ring(self):
"""
Return the valuation ring at the place.
EXAMPLES::
sage: K.<x> = FunctionField(GF(2)); _.<Y> = K[]
sage: L.<y> = K.extension(Y^2 + Y + x + 1/x) # needs sage.rings.function_field
sage: p = L.places_finite()[0] # needs sage.rings.function_field
sage: p.valuation_ring() # needs sage.rings.function_field
Valuation ring at Place (x, x*y)
"""
from .valuation_ring import FunctionFieldValuationRing
return FunctionFieldValuationRing(self.function_field(), self)
|
1,302 |
list
|
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._operations import build_list_request
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.attestation.aio.AttestationManagementClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = METHOD_NAME(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def METHOD_NAME(self, **kwargs: Any) -> _models.OperationList:
"""Lists all of the available Azure attestation operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationList or the result of cls(response)
:rtype: ~azure.mgmt.attestation.models.OperationList
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2020-10-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationList]
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("OperationList", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
METHOD_NAME.metadata = {"url": "/providers/Microsoft.Attestation/operations"} # type: ignore
|
1,303 |
new function
|
import copy
import inspect
import logging
import functools
from typing import Dict, Set
from itertools import takewhile
from weakref import WeakKeyDictionary, ref
from inspect import isgenerator
logger = logging.getLogger(__name__)
class EventsGatherMetaclass(type):
"""
Metaclass that is used for Eventful to gather events that classes declare to
publish.
"""
def __new__(cls, name, parents, d):
eventful_sub = super(EventsGatherMetaclass, cls).__new__(cls, name, parents, d)
bases = inspect.getmro(parents[0])
if name == "Eventful":
return eventful_sub
subclasses = takewhile(lambda c: c is not Eventful, bases)
relevant_classes = [eventful_sub] + list(subclasses)
# Add a class that defines '_published_events' classmethod to a dict for
# later lookup. Aggregate the events of all subclasses.
relevant_events = set()
for sub in relevant_classes:
# Not using hasattr() here because we only want classes where it's explicitly
# defined.
if "_published_events" in sub.__dict__:
relevant_events.update(sub._published_events)
Eventful.__all_events__[eventful_sub] = relevant_events
return eventful_sub
class Eventful(object, metaclass=EventsGatherMetaclass):
"""
Abstract class for objects emitting and receiving events
An eventful object can:
- publish an event with arbitrary arguments to its subscribers
- let foreign objects subscribe their methods to events emitted here
- forward events to/from other eventful objects
Any time an Eventful object is deserialized:
- All previous subscriptions need to be resubscribed
- All objects that would previously receive forwarded events need to be reconnected
"""
# Maps an Eventful subclass with a set of all the events it publishes.
__all_events__: Dict["Eventful", Set[str]] = dict()
# Set of subscribed events - used as an optimization to only publish events that someone subscribes to
__sub_events__: Set[str] = set()
# Set in subclass to advertise the events it plans to publish
_published_events: Set[str] = set()
# Event names prefixes
prefixes = ("will_", "did_", "on_")
@classmethod
def all_events(cls):
"""
Return all events that all subclasses have so far registered to publish.
"""
all_evts = set()
for cls, evts in cls.__all_events__.items():
all_evts.update(evts)
return all_evts
@staticmethod
def will_did(name, can_raise=False):
"""Pre/pos emiting signal"""
def deco(func):
@functools.wraps(func)
def METHOD_NAME(self, *args, **kw):
self._publish(f"will_{name}", *args, can_raise=can_raise, **kw)
result = func(self, *args, **kw)
self._publish(f"did_{name}", result, can_raise=can_raise)
return result
return METHOD_NAME
return deco
def __init__(self, *args, **kwargs):
# A dictionary from "event name" -> callback methods
# Note that several methods can be associated with the same object
self._signals = dict()
# a set of sink eventful objects (see forward_events_from())
self._forwards = WeakKeyDictionary()
super().__init__()
def __setstate__(self, state):
"""It wont get serialized by design, user is responsible to reconnect"""
self._signals = dict()
self._forwards = WeakKeyDictionary()
return True
def __getstate__(self):
return {}
def _unref(self, robj):
# this is called when an object that has subscribed to events emitted
# here has recently been garbage collected
# This simply removes all callback methods associated with that object
# Also if no more callbacks at all for an event name it deletes the event entry
remove = set()
for name, bucket in self._signals.items():
if robj in bucket:
del bucket[robj]
if len(bucket) == 0:
remove.add(name)
for name in remove:
del self._signals[name]
def _get_signal_bucket(self, name):
# Each event name has a bucket of callback methods
# A bucket is a dictionary obj -> set(method1, method2...)
return self._signals.setdefault(name, dict())
def _check_event(self, _name):
basename = _name
for prefix in self.prefixes:
if _name.startswith(prefix):
basename = _name[len(prefix) :]
cls = self.__class__
if basename not in cls.__all_events__[cls]:
logger.warning("Event '%s' not pre-declared. (self: %s)", _name, repr(self))
# Wrapper for _publish_impl that also makes sure the event is published from
# a class that supports it.
# The underscore _name is to avoid naming collisions with callback params
def _publish(self, _name, *args, can_raise=True, **kwargs):
# only publish if there is at least one subscriber
try:
if _name in self.__sub_events__:
self._check_event(_name)
self._publish_impl(_name, *args, **kwargs)
except Exception as e:
logger.warning("Exception raised in callback: %s", e)
if can_raise:
raise
# Separate from _publish since the recursive method call to forward an event
# shouldn't check the event.
def _publish_impl(self, _name, *args, **kwargs):
bucket = self._get_signal_bucket(_name)
for robj, methods in bucket.items():
for callback in methods:
# Need to clone any iterable args, otherwise the first usage will drain it.
# If the generator isn't available on `self`, give up and return it anyway.
new_args = (
(arg if not isgenerator(arg) else getattr(self, arg.__name__, arg))
for arg in args
)
callback(robj(), *new_args, **kwargs)
# The include_source flag indicates to prepend the source of the event in
# the callback signature. This is set on forward_events_from/to
items = tuple(self._forwards.items())
for sink, include_source in items:
if include_source:
sink._publish_impl(_name, self, *args, **kwargs)
else:
sink._publish_impl(_name, *args, **kwargs)
def subscribe(self, name, method):
assert inspect.ismethod(method), f"{method.__class__.__name__} is not a method"
obj, callback = method.__self__, method.__func__
bucket = self._get_signal_bucket(name)
robj = ref(obj, self._unref) # see unref() for explanation
bucket.setdefault(robj, set()).add(callback)
self.__sub_events__.add(name)
def forward_events_from(self, source: "Eventful", include_source: bool = False) -> None:
assert isinstance(source, Eventful), f"{source.__class__.__name__} is not Eventful"
source.forward_events_to(self, include_source=include_source)
def forward_events_to(self, sink: "Eventful", include_source: bool = False) -> None:
"""This forwards signal to sink"""
assert isinstance(sink, Eventful), f"{sink.__class__.__name__} is not Eventful"
self._forwards[sink] = include_source
def copy_eventful_state(self, new_object: "Eventful"):
new_object._forwards = copy.copy(self._forwards)
new_object._signals = copy.copy(self._signals)
|
1,304 |
previous page
|
from __future__ import annotations
import asyncio
import typing as T
import discord
from utils.default import split_list
from .Context import Context
from .views import QuotientView
class PageLine(T.NamedTuple):
line: T.Optional[str] = None
image: T.Optional[str] = None
# embed: T.Optional[discord.Embed] = None
class QuoPages:
def __init__(
self,
ctx: Context,
*,
per_page=10,
timeout=60.0,
title=None,
show_page_count=True,
embed_color=0x00FFB3,
compact=False,
):
self.ctx = ctx
self.per_page = per_page
self.timeout = timeout
self.title = title or None
self.show_page_count = show_page_count
self.enteries: T.List[PageLine] = []
self.items: T.List[discord.ui.Item] = []
self.embed_color = embed_color
self.compact = compact
self.pages: T.List[PageLine] = []
self.cur_page = 1
def add_line(self, line: PageLine):
self.enteries.append(line)
@property
def embed(self) -> discord.Embed:
_p = self.pages[self.cur_page - 1]
_e = discord.Embed(color=self.embed_color, title=self.title)
_e.description = _p.line
if _p.image:
_e.set_image(url=_p.image)
if self.show_page_count:
_e.set_footer(text=f"Page {self.pages.index(_p) + 1} of {len(self.pages)}")
return _e
@property
def current_page(self):
...
async def paginate(self):
if self.per_page <= 1:
self.pages = self.enteries
else:
for _ in split_list(self.enteries, self.per_page):
_: T.List[PageLine]
self.pages.append(PageLine("".join(ent.line for ent in _), _[0].image))
view = QuoPageView(
self.ctx,
pages=self.pages,
items=self.items,
embed=self.embed,
show_count=self.show_page_count,
need_skip=self.compact,
)
if len(self.pages) <= 1:
view.message = await self.ctx.send(embed=self.embed)
return
view.message = await self.ctx.send(embed=self.embed, view=view)
class QuoPageView(QuotientView):
def __init__(
self,
ctx: Context,
*,
pages: T.List[PageLine],
items: T.Optional[T.List[discord.ui.Item]] = None,
embed: discord.Embed,
show_count: bool,
need_skip: bool,
):
super().__init__(ctx, timeout=40)
self.pages = pages
self.items = items
self.current_page = 1
self.embed = embed
self.show_count = show_count
self.need_skip = need_skip
self.__input_lock = asyncio.Lock()
self.clear_items()
self.fill_items()
def fill_items(self) -> None:
for item in self.items: # Not sure about these item's positions
self.add_item(item)
self.add_item(self.first_page)
self.add_item(self.METHOD_NAME)
if self.need_skip:
self.add_item(self.skip_page)
self.add_item(self.next_page)
self.add_item(self.last_page)
def update_embed(self):
if self.show_count:
self.embed.set_footer(text=f"Page {self.current_page} of {len(self.pages)}")
self.embed.description = self.pages[self.current_page - 1].line
if self.pages[self.current_page - 1].image:
self.embed.set_image(url=self.pages[self.current_page - 1].image)
@discord.ui.button(
style=discord.ButtonStyle.green,
custom_id="first",
emoji="<:double_left:878668594530099220>",
)
async def first_page(self, interaction: discord.Interaction, button: discord.Button):
self.current_page = 1
self.update_embed()
await interaction.response.edit_message(embed=self.embed, view=self)
@discord.ui.button(style=discord.ButtonStyle.green, custom_id="previous", emoji="<:left:878668491660623872>")
async def METHOD_NAME(self, interaction: discord.Interaction, button: discord.Button):
if self.current_page == 1:
return
self.current_page -= 1
self.update_embed()
await interaction.response.edit_message(embed=self.embed, view=self)
@discord.ui.button(style=discord.ButtonStyle.green, custom_id="skipto", label="Skip to page ...")
async def skip_page(self, interaction: discord.Interaction, button: discord.Button):
if self.__input_lock.locked():
return await interaction.response.send_message("Already waiting for your response...", ephemeral=True)
if self.message is None:
return
async with self.__input_lock:
channel = self.message.channel
author_id = interaction.user and interaction.user.id
await interaction.response.send_message("Please enter the page number you want to skip to.", ephemeral=True)
def _msg_check(m: discord.Message) -> bool:
return m.author.id == author_id and channel == m.channel and m.content.isdigit()
try:
msg = await self.ctx.bot.wait_for("message", check=_msg_check, timeout=30.0)
except asyncio.TimeoutError:
await interaction.followup.send("Took too long.", ephemeral=True)
await asyncio.sleep(5)
else:
page = int(msg.content)
await msg.delete()
if page > len(self.pages):
await interaction.followup.send("Page number too high.", ephemeral=True)
return
self.current_page = page
self.update_embed()
if interaction.response.is_done():
if self.message:
await self.message.edit(embed=self.embed, view=self)
else:
await interaction.response.edit_message(embed=self.embed, view=self)
@discord.ui.button(style=discord.ButtonStyle.green, custom_id="next", emoji="<:right:878668370331983913>")
async def next_page(self, interaction: discord.Interaction, button: discord.Button):
if self.current_page == len(self.pages):
return
self.current_page += 1
self.update_embed()
await interaction.response.edit_message(embed=self.embed, view=self)
@discord.ui.button(
style=discord.ButtonStyle.green,
custom_id="last",
emoji="<:double_right:878668437193359392>",
)
async def last_page(self, interaction: discord.Interaction, button: discord.Button):
self.current_page = len(self.pages)
self.update_embed()
await interaction.response.edit_message(embed=self.embed, view=self)
|
1,305 |
test hybridconnetion curd
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import unittest
import time
import azure.mgmt.relay.models
from azure.mgmt.relay.models import RelayNamespace, Sku, AccessRights
from devtools_testutils import AzureMgmtRecordedTestCase, ResourceGroupPreparer, recorded_by_proxy
class TestMgmtHybridConnection(AzureMgmtRecordedTestCase):
def setup_method(self, method):
self.relay_client = self.create_mgmt_client(
azure.mgmt.relay.RelayAPI
)
@ResourceGroupPreparer()
@recorded_by_proxy
def METHOD_NAME(self, resource_group, location):
resource_group_name = resource_group.name
#Create a Namespace
namespace_name = "testingpythontestcasenamespacehybridconnection"
namespaceparameter = RelayNamespace(location=location, tags={'tag1': 'value1', 'tag2': 'value2'}, sku=Sku(tier="standard"))
creatednamespace = self.relay_client.namespaces.begin_create_or_update(resource_group_name, namespace_name, namespaceparameter).result()
assert creatednamespace.name == namespace_name
#
# # Get created Namespace
#
getnamespaceresponse = self.relay_client.namespaces.get(resource_group_name, namespace_name)
assert getnamespaceresponse.name == namespace_name
# Create a HybridConnection
hybridconnection_name = "testingpythontestcasehybridconnection"
createdhybridconnectionresponse = self.relay_client.hybrid_connections.create_or_update(resource_group_name, namespace_name, hybridconnection_name, {
"requires_client_authorization": True,
"user_metadata": "User data for HybridConnection"
})
assert createdhybridconnectionresponse.name == hybridconnection_name
assert createdhybridconnectionresponse.requires_client_authorization == True
#Get the created Hybridconnection
gethybridconnectionresponse = self.relay_client.hybrid_connections.get(resource_group_name, namespace_name, hybridconnection_name)
assert gethybridconnectionresponse.name == hybridconnection_name
assert gethybridconnectionresponse.user_metadata == "User data for HybridConnection"
#Get the List of Hybridconnection by namespaces
getlistbynamespacehybridconnectionresponse = list(self.relay_client.hybrid_connections.list_by_namespace(resource_group_name, namespace_name))
assert len(getlistbynamespacehybridconnectionresponse) > 0
updatehybridconnectionresponse = self.relay_client.hybrid_connections.create_or_update(resource_group_name, namespace_name, hybridconnection_name, {
"user_metadata": "User data for HybridConnection updated"
})
assert updatehybridconnectionresponse.name == hybridconnection_name
assert updatehybridconnectionresponse.requires_client_authorization == True
assert updatehybridconnectionresponse.user_metadata == "User data for HybridConnection updated"
# Create a new authorizationrule
authoRule_name = "testingauthrulepy"
createhybridconnectionauthorule = self.relay_client.hybrid_connections.create_or_update_authorization_rule(resource_group_name, namespace_name, hybridconnection_name, authoRule_name,{
"rights": [AccessRights('Send'),AccessRights('Listen')]
})
assert createhybridconnectionauthorule.name, authoRule_name == "Authorization rule name not as created - create_or_update_authorization_rule "
assert len(createhybridconnectionauthorule.rights) == 2
# Get the created authorizationrule
gethybridconnectionauthorule = self.relay_client.hybrid_connections.get_authorization_rule(resource_group_name, namespace_name, hybridconnection_name, authoRule_name)
assert gethybridconnectionauthorule.name, authoRule_name == "Authorization rule name not as passed as parameter - get_authorization_rule "
assert len(gethybridconnectionauthorule.rights), 2 == "Access rights mis match as created - get_authorization_rule "
# update the rights of the authorizatiorule
gethybridconnectionauthorule.rights.append('Manage')
updatehybridconnectionauthorule = self.relay_client.hybrid_connections.create_or_update_authorization_rule(resource_group_name, namespace_name, hybridconnection_name, authoRule_name, gethybridconnectionauthorule)
assert updatehybridconnectionauthorule.name, authoRule_name == "Authorization rule name not as passed as parameter for update call - create_or_update_authorization_rule "
assert len(updatehybridconnectionauthorule.rights), 3 == "Access rights mis match as updated - create_or_update_authorization_rule "
#list all the authorization ruels for the given namespace
hybridconnectionauthorulelist = list(self.relay_client.hybrid_connections.list_authorization_rules(resource_group_name, namespace_name, hybridconnection_name))
assert len(hybridconnectionauthorulelist), 1 == "number of authorization rule mismatch with the created + default = 2 - list_authorization_rules"
#List keys for the authorization rule
listkeysauthorizationrule = self.relay_client.hybrid_connections.list_keys(resource_group_name, namespace_name, hybridconnection_name, authoRule_name)
assert listkeysauthorizationrule is not None
# regenerate Keys for authorizationrule - Primary
regenratePrimarykeyauthorizationrule = self.relay_client.hybrid_connections.regenerate_keys(resource_group_name, namespace_name, hybridconnection_name, authoRule_name, {
"key_type": 'PrimaryKey'
})
assert listkeysauthorizationrule.primary_key !=regenratePrimarykeyauthorizationrule.primary_key
# regenerate Keys for authorizationrule - Primary
regenrateSecondarykeyauthorizationrule = self.relay_client.hybrid_connections.regenerate_keys(resource_group_name,namespace_name, hybridconnection_name, authoRule_name, {
"key_type": 'SecondaryKey'
})
assert listkeysauthorizationrule.secondary_key != regenrateSecondarykeyauthorizationrule.secondary_key
# delete the authorizationrule
self.relay_client.hybrid_connections.delete_authorization_rule(resource_group_name, namespace_name, hybridconnection_name, authoRule_name)
# Delete the created HybridConnection
gethybridconnectionresponse = self.relay_client.hybrid_connections.delete(resource_group_name, namespace_name, hybridconnection_name)
# Delete the create namespace
self.relay_client.namespaces.begin_delete(resource_group_name, namespace_name).result()
# ------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
1,306 |
test proto2 file shelf
|
import unittest
import shelve
import glob
from test import support
from collections.abc import MutableMapping
from test.test_dbm import dbm_iterator
def L1(s):
return s.decode("latin-1")
class byteskeydict(MutableMapping):
"Mapping that supports bytes keys"
def __init__(self):
self.d = {}
def __getitem__(self, key):
return self.d[L1(key)]
def __setitem__(self, key, value):
self.d[L1(key)] = value
def __delitem__(self, key):
del self.d[L1(key)]
def __len__(self):
return len(self.d)
def iterkeys(self):
for k in self.d.keys():
yield k.encode("latin-1")
__iter__ = iterkeys
def keys(self):
return list(self.iterkeys())
def copy(self):
return byteskeydict(self.d)
class TestCase(unittest.TestCase):
fn = "shelftemp.db"
def tearDown(self):
for f in glob.glob(self.fn+"*"):
support.unlink(f)
def test_close(self):
d1 = {}
s = shelve.Shelf(d1, protocol=2, writeback=False)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
self.assertEqual(len(s), 1)
s.close()
self.assertRaises(ValueError, len, s)
try:
s['key1']
except ValueError:
pass
else:
self.fail('Closed shelf should not find a key')
def test_ascii_file_shelf(self):
s = shelve.open(self.fn, protocol=0)
try:
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
finally:
s.close()
def test_binary_file_shelf(self):
s = shelve.open(self.fn, protocol=1)
try:
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
finally:
s.close()
def METHOD_NAME(self):
s = shelve.open(self.fn, protocol=2)
try:
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
finally:
s.close()
def test_in_memory_shelf(self):
d1 = byteskeydict()
s = shelve.Shelf(d1, protocol=0)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
d2 = byteskeydict()
s = shelve.Shelf(d2, protocol=1)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
self.assertEqual(len(d1), 1)
self.assertEqual(len(d2), 1)
self.assertNotEqual(d1.items(), d2.items())
def test_mutable_entry(self):
d1 = byteskeydict()
s = shelve.Shelf(d1, protocol=2, writeback=False)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
s['key1'].append(5)
self.assertEqual(s['key1'], [1,2,3,4])
s.close()
d2 = byteskeydict()
s = shelve.Shelf(d2, protocol=2, writeback=True)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
s['key1'].append(5)
self.assertEqual(s['key1'], [1,2,3,4,5])
s.close()
self.assertEqual(len(d1), 1)
self.assertEqual(len(d2), 1)
def test_keyencoding(self):
d = {}
key = 'Pöp'
# the default keyencoding is utf-8
shelve.Shelf(d)[key] = [1]
self.assertIn(key.encode('utf-8'), d)
# but a different one can be given
shelve.Shelf(d, keyencoding='latin-1')[key] = [1]
self.assertIn(key.encode('latin-1'), d)
# with all consequences
s = shelve.Shelf(d, keyencoding='ascii')
self.assertRaises(UnicodeEncodeError, s.__setitem__, key, [1])
def test_writeback_also_writes_immediately(self):
# Issue 5754
d = {}
key = 'key'
encodedkey = key.encode('utf-8')
s = shelve.Shelf(d, writeback=True)
s[key] = [1]
p1 = d[encodedkey] # Will give a KeyError if backing store not updated
s['key'].append(2)
s.close()
p2 = d[encodedkey]
self.assertNotEqual(p1, p2) # Write creates new object in store
def test_with(self):
d1 = {}
with shelve.Shelf(d1, protocol=2, writeback=False) as s:
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
self.assertEqual(len(s), 1)
self.assertRaises(ValueError, len, s)
try:
s['key1']
except ValueError:
pass
else:
self.fail('Closed shelf should not find a key')
from test import mapping_tests
class TestShelveBase(mapping_tests.BasicTestMappingProtocol):
fn = "shelftemp.db"
counter = 0
def __init__(self, *args, **kw):
self._db = []
mapping_tests.BasicTestMappingProtocol.__init__(self, *args, **kw)
type2test = shelve.Shelf
def _reference(self):
return {"key1":"value1", "key2":2, "key3":(1,2,3)}
def _empty_mapping(self):
if self._in_mem:
x= shelve.Shelf(byteskeydict(), **self._args)
else:
self.counter+=1
x= shelve.open(self.fn+str(self.counter), **self._args)
self._db.append(x)
return x
def tearDown(self):
for db in self._db:
db.close()
self._db = []
if not self._in_mem:
for f in glob.glob(self.fn+"*"):
support.unlink(f)
class TestAsciiFileShelve(TestShelveBase):
_args={'protocol':0}
_in_mem = False
class TestBinaryFileShelve(TestShelveBase):
_args={'protocol':1}
_in_mem = False
class TestProto2FileShelve(TestShelveBase):
_args={'protocol':2}
_in_mem = False
class TestAsciiMemShelve(TestShelveBase):
_args={'protocol':0}
_in_mem = True
class TestBinaryMemShelve(TestShelveBase):
_args={'protocol':1}
_in_mem = True
class TestProto2MemShelve(TestShelveBase):
_args={'protocol':2}
_in_mem = True
def test_main():
for module in dbm_iterator():
support.run_unittest(
TestAsciiFileShelve,
TestBinaryFileShelve,
TestProto2FileShelve,
TestAsciiMemShelve,
TestBinaryMemShelve,
TestProto2MemShelve,
TestCase
)
if __name__ == "__main__":
test_main()
|
1,307 |
report error
|
import json
import os
import pickle
from datetime import date, datetime
from logging import Logger
from typing import NamedTuple, Optional
# Delay saving as JSON for a few CumulusCI releases because
# people might downgrade a release and then their
# CCI can't parse their JSON orgfiles
#
# Thus we roll out the ability to parse JSON configs a bit
# ahead of the write behaviour.
SHOULD_SAVE_AS_JSON = os.environ.get("SHOULD_SAVE_AS_JSON", "False") != "False"
def load_config_from_json_or_pickle(b: bytes) -> dict:
"""Input should be plaintext JSON or Pickle"""
assert isinstance(b, bytes)
try:
data = try_load_config_from_json_or_pickle(b)
except pickle.PickleError as e:
# we use ValueError because Pickle and Crypto both do too
raise ValueError(str(e)) from e
return data
class JSONSerializer(NamedTuple):
type: str
to_json: callable
from_json: callable
@property
def name(self):
return self.type.__name__
# make sure that datetime comes before date
string_serializers = [
JSONSerializer(
datetime,
to_json=lambda x: x.isoformat(),
from_json=lambda x: datetime.fromisoformat(x),
),
JSONSerializer(
date, to_json=lambda x: x.isoformat(), from_json=lambda x: date.fromisoformat(x)
),
JSONSerializer(
bytes,
to_json=lambda x: x.decode("unicode_escape"),
from_json=lambda x: x.encode("unicode_escape"),
),
]
def encode_value(x):
"""Encode a value that JSON does not support natively"""
for serializer in string_serializers:
if isinstance(x, serializer.type):
return {"$type": serializer.name, "$value": serializer.to_json(x)}
raise TypeError(type(x)) # pragma: no cover
def decode_dict(x: dict):
"""Decode a dict from JSON"""
assert isinstance(x, dict)
if "$type" in x:
return decode_typed_value(x)
else:
return x
def decode_typed_value(x: dict):
"""Decode a value that JSON does not support natively"""
for serializer in string_serializers:
if x["$type"] == serializer.name:
return serializer.from_json(x["$value"])
raise TypeError(f"Unknown $type: {x['$type']}") # pragma: no cover
def try_load_config_from_json_or_pickle(data: bytes) -> dict:
"""Load JSON or Pickle into a dict"""
try:
config = json.loads(data.decode("utf-8"), object_hook=decode_dict)
# remove this debugging tool after transition
config["serialization_format"] = "json"
return config
except ValueError as e1:
try:
# first byte in a Pickle must be part of
# OPCODE Proto == \x80 == 128
# https://github.com/python/cpython/blob/1b293b60067f6f4a95984d064ce0f6b6d34c1216/Lib/pickletools.py#L2124
if data[0] != 128:
raise ValueError("Decryption error")
config = pickle.loads(data, encoding="bytes")
# remove this debugging tool after transition
config["serialization_format"] = "pickle"
return config
except pickle.UnpicklingError as e2:
raise ValueError(f"{e1}\n{e2}")
def METHOD_NAME(msg: str, e: Exception, logger: Logger):
logger.error(
"\n".join(
(
msg,
str(e),
"Please report it to the CumulusCI team.",
"For now this is just a warning. By January 2023 it may become a real error.",
"When you report it to the CumulusCI team, they will investigate",
"whether the error is in your config or in CumulusCI",
)
)
)
def check_round_trip(data: dict, logger: Logger) -> Optional[bytes]:
"""Return JSON bytes if possible, else None"""
try:
as_json_text = json.dumps(data, default=encode_value).encode("utf-8")
except Exception as e:
METHOD_NAME("CumulusCI found an unusual datatype in your config:", e, logger)
return None
try:
test_load = load_config_from_json_or_pickle(as_json_text)
assert _simplify_config(test_load) == _simplify_config(
data
), f"JSON did not round-trip-cleanly {test_load}, {data}"
except Exception as e: # pragma: no cover
METHOD_NAME("CumulusCI found a problem saving your config:", e, logger)
return None
assert isinstance(as_json_text, bytes)
return as_json_text
def _simplify_config(config: dict):
return {k: v for k, v in config.items() if k != "serialization_format"}
def serialize_config_to_json_or_pickle(config: dict, logger: Logger) -> bytes:
"""Serialize a dict to JSON if possible or Pickle otherwise"""
as_json_text = check_round_trip(config, logger)
if as_json_text and SHOULD_SAVE_AS_JSON:
assert isinstance(as_json_text, bytes)
return as_json_text
else:
return pickle.dumps(config, protocol=2)
|
1,308 |
run config
|
"""
Run PyTorch cpu benchmarking.
"""
import argparse
import os
import numpy
from typing import List, Dict, Optional
from pathlib import Path
from cpu_utils import add_path, REPO_PATH, validate, parse_str_to_list, list_metrics, get_output_dir, get_output_json, dump_output
with add_path(str(REPO_PATH)):
from torchbenchmark.util.experiment.instantiator import (list_models, load_model, load_model_isolated, TorchBenchModelConfig,
list_devices, list_tests)
from torchbenchmark.util.experiment.metrics import TorchBenchModelMetrics, get_model_test_metrics
BM_NAME = 'cpu'
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
def result_to_output_metrics(metrics: List[str], metrics_res: TorchBenchModelMetrics) -> Dict[str, float]:
result_metrics = {}
if metrics_res:
if "latencies" in metrics and metrics_res.latencies:
latency_metric = "latency"
median_latency = numpy.median(metrics_res.latencies)
assert median_latency, f"Run failed for metric {latency_metric}"
result_metrics[latency_metric] = median_latency
if "throughputs" in metrics and metrics_res.throughputs:
throughput_metric = "throughput"
median_throughput = numpy.median(metrics_res.throughputs)
assert median_throughput, f"Run failed for metric {throughput_metric}"
result_metrics[throughput_metric] = median_throughput
if "cpu_peak_mem" in metrics and metrics_res.cpu_peak_mem:
cpu_peak_mem = "cpu_peak_mem"
result_metrics[cpu_peak_mem] = metrics_res.cpu_peak_mem
return result_metrics
def dump_result_to_json(metrics, output_dir):
result = get_output_json(BM_NAME, metrics)
dump_output(BM_NAME, result, output_dir)
def METHOD_NAME(config: TorchBenchModelConfig, metrics: List[str], dryrun: bool=False) -> Optional[TorchBenchModelMetrics]:
"""This function only handles NotImplementedError, all other errors will fail."""
print(f"Running {config} ...", end='')
if dryrun:
return None
# We do not allow RuntimeError in this test
try:
if "cpu_peak_mem" in metrics:
# load the model instance within separate subprocess
model = load_model_isolated(config)
else:
# load the model instance within current process
model = load_model(config)
# get the model test metrics
result: TorchBenchModelMetrics = get_model_test_metrics(model, metrics=metrics)
except NotImplementedError as e:
print(" [NotImplemented]")
return None
print(" [Done]")
return result
def run(args: List[str], extra_args: List[str]):
device = validate(args.device, list_devices())
test = validate(args.test, list_tests())
model = validate(args.model, list_models())
metrics = validate(parse_str_to_list(args.metrics), list_metrics())
config = TorchBenchModelConfig(
name=model,
device=device,
test=test,
batch_size=args.batch_size,
extra_args=extra_args,
extra_env=None)
try:
metrics_res = METHOD_NAME(config, metrics, dryrun=args.dryrun)
except KeyboardInterrupt:
print("User keyboard interrupted!")
if not args.dryrun:
args.output = args.output if args.output else get_output_dir(BM_NAME)
target_dir = Path(args.output).joinpath(f"{config.name}-{config.test}")
target_dir.mkdir(exist_ok=True, parents=True)
metrics_dict = result_to_output_metrics(metrics, metrics_res)
dump_result_to_json(metrics_dict, target_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--device", "-d", default="cpu", help="Devices to run.")
parser.add_argument("--test", "-t", default="eval", help="Tests to run.")
parser.add_argument("--model", "-m", default=None, type=str, help="Only run the specifice model.")
parser.add_argument("--batch-size", "-b", default=None, type=int, help="Run the specifice batch size.")
parser.add_argument("--output", "-o", default=None, help="Output dir.")
parser.add_argument("--metrics", default="latencies", help="Benchmark metrics, split by comma.")
parser.add_argument("--dryrun", action="store_true", help="Dryrun the command.")
args, extra_args = parser.parse_known_args()
run(args, extra_args)
|
1,309 |
generate input
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('../')
from auto_scan_test import AutoScanTest, IgnoreReasons
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import unittest
from functools import partial
import hypothesis
from hypothesis import given, settings, seed, example, assume
import hypothesis.strategies as st
import numpy as np
class TestShapeOp(AutoScanTest):
def __init__(self, *args, **kwargs):
AutoScanTest.__init__(self, *args, **kwargs)
self.enable_testing_on_place(
TargetType.Host, [PrecisionType.Any],
DataLayoutType.NCHW,
thread=[1, 4])
# opencl_places = [
# Place(TargetType.OpenCL, PrecisionType.FP16,
# DataLayoutType.ImageDefault), Place(
# TargetType.OpenCL, PrecisionType.FP16,
# DataLayoutType.ImageFolder),
# Place(TargetType.OpenCL, PrecisionType.FP32, DataLayoutType.NCHW),
# Place(TargetType.OpenCL, PrecisionType.Any,
# DataLayoutType.ImageDefault), Place(
# TargetType.OpenCL, PrecisionType.Any,
# DataLayoutType.ImageFolder),
# Place(TargetType.OpenCL, PrecisionType.Any, DataLayoutType.NCHW),
# Place(TargetType.Host, PrecisionType.FP32)
# ]
# self.enable_testing_on_place(places=opencl_places)
self.enable_testing_on_place(TargetType.NNAdapter, PrecisionType.FP32)
self.enable_devices_on_nnadapter(device_names=[
"cambricon_mlu", "intel_openvino", "kunlunxin_xtcl"
])
def is_program_valid(self,
program_config: ProgramConfig,
predictor_config: CxxConfig) -> bool:
return True
def sample_program_configs(self, draw):
in_shape = draw(
st.lists(
st.integers(
min_value=1, max_value=64), min_size=1, max_size=4))
in_shape = draw(st.sampled_from([in_shape, []]))
input_type = draw(st.sampled_from(["float32", "int32", "int64"]))
def METHOD_NAME(*args, **kwargs):
if input_type == "float32":
return np.random.normal(1.0, 6.0, in_shape).astype(np.float32)
elif input_type == "int32":
return np.random.normal(1.0, 6.0, in_shape).astype(np.int32)
elif input_type == "int64":
return np.random.normal(1.0, 6.0, in_shape).astype(np.int64)
ops_config = OpConfig(
type="shape",
inputs={"Input": ["input_data"]},
outputs={"Out": ["output_data"]},
attrs={})
program_config = ProgramConfig(
ops=[ops_config],
weights={},
inputs={
"input_data": TensorConfig(
data_gen=partial(METHOD_NAME), lod=[[0, 2]])
},
outputs=["output_data"])
return program_config
def sample_predictor_configs(self):
return self.get_predictor_configs(), ["shape"], (1e-5, 1e-5)
def add_ignore_pass_case(self):
def teller1(program_config, predictor_config):
if predictor_config.target() == TargetType.OpenCL:
return True
self.add_ignore_check_case(
teller1, IgnoreReasons.ACCURACY_ERROR,
"The op output has diff in a specific case. We need to fix it as soon as possible."
)
def _teller2(program_config, predictor_config):
target_type = predictor_config.target()
in_shape = list(program_config.inputs["input_data"].shape)
if (target_type not in [TargetType.X86, TargetType.ARM]
) and len(in_shape) == 0:
return True
self.add_ignore_check_case(
_teller2, IgnoreReasons.PADDLELITE_NOT_SUPPORT,
"Only test 0D-tensor on CPU(X86/ARM/Host) now.")
def test(self, *args, **kwargs):
self.run_and_statis(quant=False, max_examples=100)
if __name__ == "__main__":
unittest.main(argv=[''])
|
1,310 |
session scope
|
from typing import List, Dict, Any, Optional, TYPE_CHECKING
import time
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import DatabaseError
from ..errors import AngrCorruptDBError, AngrIncompatibleDBError, AngrDBError
from ..project import Project
from .models import Base, DbInformation
from .serializers import LoaderSerializer, KnowledgeBaseSerializer
if TYPE_CHECKING:
from angr.knowledge_base import KnowledgeBase
class AngrDB:
"""
AngrDB provides a storage solution for an angr project, its knowledge bases, and some other types of data. It is
designed to use an SQL-based database as the storage backend.
"""
ALL_TABLES = [
"objects",
]
VERSION = 1
def __init__(self, project=None):
self.project = project
self.config = {}
@staticmethod
@contextmanager
def open_db(db_str="sqlite:///:memory:"):
try:
engine = create_engine(db_str)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
yield Session
except DatabaseError:
raise AngrCorruptDBError("The target file may not be an angr database or it is corrupted.")
except Exception as ex:
raise AngrDBError(str(ex))
@staticmethod
@contextmanager
def METHOD_NAME(Session):
session = Session()
try:
yield session
session.commit()
except Exception:
session.rollback()
raise
finally:
session.close()
@staticmethod
def save_info(session, key, value):
"""
Save an information entry to the database.
:param session:
:param key:
:param value:
:return:
"""
db_info = session.query(DbInformation).filter_by(key=key).scalar()
if db_info is not None:
db_info.value = value
else:
db_info = DbInformation(key=key, value=value)
session.add(db_info)
@staticmethod
def get_info(session, key):
"""
Get an information entry from the database.
:param session:
:param key:
:return:
"""
db_info = session.query(DbInformation).filter_by(key=key).scalar()
if db_info is None:
return None
return db_info.value
def update_dbinfo(self, session, extra_info: Optional[Dict[str, str]] = None):
"""
Update the information in database.
:param session:
:return:
"""
self.save_info(session, "version", str(self.VERSION))
self.save_info(session, "saved_at", str(int(time.time())))
if extra_info:
for key, value in extra_info.items():
self.save_info(session, str(key), str(value))
def get_dbinfo(self, session, extra_info: Optional[Dict[str, str]] = None):
"""
Get database information.
:param session:
:return: A dict of information entries.
"""
d = {}
# version
version = self.get_info(session, "version")
if version is not None:
version = int(version)
d["version"] = version
# saved_at
saved_at = self.get_info(session, "saved_at")
if saved_at is not None:
saved_at = int(saved_at)
d["saved_at"] = saved_at
if extra_info is not None:
# store *everything* into the dict
for entry in session.query(DbInformation):
extra_info[entry.key] = entry.value
return d
def db_compatible(self, version):
"""
Checks if the given database version is compatible with the current AngrDB class.
:param int version: The version of the database.
:return: True if compatible, False otherwise.
:rtype: bool
"""
return version == self.VERSION
def dump(self, db_path, kbs: Optional[List["KnowledgeBase"]] = None, extra_info: Optional[Dict[str, Any]] = None):
db_str = "sqlite:///%s" % db_path
with self.open_db(db_str) as Session:
with self.METHOD_NAME(Session) as session:
# Dump the loader
LoaderSerializer.dump(session, self.project.loader)
# Dump the knowledge base
if kbs is None:
kbs = [self.project.kb]
for kb in kbs:
KnowledgeBaseSerializer.dump(session, kb)
# Update the information
self.update_dbinfo(session, extra_info=extra_info)
def load(
self,
db_path: str,
kb_names: Optional[List[str]] = None,
other_kbs: Optional[Dict[str, "KnowledgeBase"]] = None,
extra_info: Optional[Dict[str, Any]] = None,
):
db_str = "sqlite:///%s" % db_path
with self.open_db(db_str) as Session:
with self.METHOD_NAME(Session) as session:
# Compatibility check
dbinfo = self.get_dbinfo(session, extra_info=extra_info)
if not self.db_compatible(dbinfo.get("version", None)):
raise AngrIncompatibleDBError(
"Version %s is incompatible with the current version of angr." % dbinfo.get("version", None)
)
# Load the loader
loader = LoaderSerializer.load(session)
# Create the project
proj = Project(loader)
if kb_names is None:
kb_names = ["global"]
if len(kb_names) != 1 or kb_names[0] != "global":
if other_kbs is None:
raise ValueError(
'You must provide a dict via "other_kbs" to collect angr KnowledgeBases '
"that are not the global one."
)
# Load knowledgebases
for kb_name in kb_names:
kb = KnowledgeBaseSerializer.load(session, proj, kb_name)
if kb is not None:
if kb_name == "global":
proj.kb = kb
else:
other_kbs[kb_name] = kb
return proj
|
1,311 |
test go application command line arguments change
|
import re
from unit.applications.lang.go import ApplicationGo
prerequisites = {'modules': {'go': 'all'}}
client = ApplicationGo()
def test_go_application_variables(date_to_sec_epoch, sec_epoch):
client.load('variables')
body = 'Test body string.'
resp = client.post(
headers={
'Host': 'localhost',
'Content-Type': 'text/html',
'Custom-Header': 'blah',
'Connection': 'close',
},
body=body,
)
assert resp['status'] == 200, 'status'
headers = resp['headers']
header_server = headers.pop('Server')
assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
date = headers.pop('Date')
assert date[-4:] == ' GMT', 'date header timezone'
assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
assert headers == {
'Content-Length': str(len(body)),
'Content-Type': 'text/html',
'Request-Method': 'POST',
'Request-Uri': '/',
'Http-Host': 'localhost',
'Server-Protocol': 'HTTP/1.1',
'Server-Protocol-Major': '1',
'Server-Protocol-Minor': '1',
'Custom-Header': 'blah',
'Connection': 'close',
}, 'headers'
assert resp['body'] == body, 'body'
def test_go_application_get_variables():
client.load('get_variables')
resp = client.get(url='/?var1=val1&var2=&var3')
assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
def test_go_application_post_variables():
client.load('post_variables')
resp = client.post(
headers={
'Host': 'localhost',
'Content-Type': 'application/x-www-form-urlencoded',
'Connection': 'close',
},
body='var1=val1&var2=&var3',
)
assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
assert resp['headers']['X-Var-3'] == '', 'POST variables 3'
def test_go_application_404():
client.load('404')
resp = client.get()
assert resp['status'] == 404, '404 status'
assert re.search(r'<title>404 Not Found</title>', resp['body']), '404 body'
def test_go_keepalive_body():
client.load('mirror')
assert client.get()['status'] == 200, 'init'
body = '0123456789' * 500
(resp, sock) = client.post(
headers={
'Host': 'localhost',
'Connection': 'keep-alive',
},
start=True,
body=body,
read_timeout=1,
)
assert resp['body'] == body, 'keep-alive 1'
body = '0123456789'
resp = client.post(sock=sock, body=body)
assert resp['body'] == body, 'keep-alive 2'
def test_go_application_cookies():
client.load('cookies')
resp = client.get(
headers={
'Host': 'localhost',
'Cookie': 'var1=val1; var2=val2',
'Connection': 'close',
}
)
assert resp['headers']['X-Cookie-1'] == 'val1', 'cookie 1'
assert resp['headers']['X-Cookie-2'] == 'val2', 'cookie 2'
def test_go_application_command_line_arguments_type():
client.load('command_line_arguments')
assert 'error' in client.conf(
'' "a b c", 'applications/command_line_arguments/arguments'
), 'arguments type'
def test_go_application_command_line_arguments_0():
client.load('command_line_arguments')
assert client.get()['headers']['X-Arg-0'] == client.conf_get(
'applications/command_line_arguments/executable'
), 'argument 0'
def test_go_application_command_line_arguments():
client.load('command_line_arguments')
arg1 = '--cc=gcc-7.2.0'
arg2 = "--cc-opt='-O0 -DNXT_DEBUG_MEMORY=1 -fsanitize=address'"
arg3 = '--debug'
assert 'success' in client.conf(
f'["{arg1}", "{arg2}", "{arg3}"]',
'applications/command_line_arguments/arguments',
)
assert client.get()['body'] == f'{arg1},{arg2},{arg3}', 'arguments'
def METHOD_NAME():
client.load('command_line_arguments')
args_path = 'applications/command_line_arguments/arguments'
assert 'success' in client.conf('["0", "a", "$", ""]', args_path)
assert client.get()['body'] == '0,a,$,', 'arguments'
assert 'success' in client.conf('["-1", "b", "%"]', args_path)
assert client.get()['body'] == '-1,b,%', 'arguments change'
assert 'success' in client.conf('[]', args_path)
assert client.get()['headers']['Content-Length'] == '0', 'arguments empty'
|
1,312 |
list plugins
|
"""
This module is used to manage Wordpress installations
:depends: wp binary from http://wp-cli.org/
"""
# Import Python Modules
import collections
# Import Salt Modules
import salt.utils.path
Plugin = collections.namedtuple("Plugin", "name status update versino")
def __virtual__():
if salt.utils.path.which("wp"):
return True
return (False, "Missing dependency: wp")
def _get_plugins(stuff):
return Plugin(stuff)
def METHOD_NAME(path, user):
"""
List plugins in an installed wordpress path
path
path to wordpress install location
user
user to run the command as
CLI Example:
.. code-block:: bash
salt '*' wordpress.list_plugins /var/www/html apache
"""
ret = []
resp = __salt__["cmd.shell"]("wp --path={} plugin list".format(path), runas=user)
for line in resp.split("\n")[1:]:
ret.append(line.split("\t"))
return [plugin.__dict__ for plugin in map(_get_plugins, ret)]
def show_plugin(name, path, user):
"""
Show a plugin in a wordpress install and check if it is installed
name
Wordpress plugin name
path
path to wordpress install location
user
user to run the command as
CLI Example:
.. code-block:: bash
salt '*' wordpress.show_plugin HyperDB /var/www/html apache
"""
ret = {"name": name}
resp = __salt__["cmd.shell"](
"wp --path={} plugin status {}".format(path, name), runas=user
).split("\n")
for line in resp:
if "Status" in line:
ret["status"] = line.split(" ")[-1].lower()
elif "Version" in line:
ret["version"] = line.split(" ")[-1].lower()
return ret
def activate(name, path, user):
"""
Activate a wordpress plugin
name
Wordpress plugin name
path
path to wordpress install location
user
user to run the command as
CLI Example:
.. code-block:: bash
salt '*' wordpress.activate HyperDB /var/www/html apache
"""
check = show_plugin(name, path, user)
if check["status"] == "active":
# already active
return None
resp = __salt__["cmd.shell"](
"wp --path={} plugin activate {}".format(path, name), runas=user
)
if "Success" in resp:
return True
elif show_plugin(name, path, user)["status"] == "active":
return True
return False
def deactivate(name, path, user):
"""
Deactivate a wordpress plugin
name
Wordpress plugin name
path
path to wordpress install location
user
user to run the command as
CLI Example:
.. code-block:: bash
salt '*' wordpress.deactivate HyperDB /var/www/html apache
"""
check = show_plugin(name, path, user)
if check["status"] == "inactive":
# already inactive
return None
resp = __salt__["cmd.shell"](
"wp --path={} plugin deactivate {}".format(path, name), runas=user
)
if "Success" in resp:
return True
elif show_plugin(name, path, user)["status"] == "inactive":
return True
return False
def is_installed(path, user=None):
"""
Check if wordpress is installed and setup
path
path to wordpress install location
user
user to run the command as
CLI Example:
.. code-block:: bash
salt '*' wordpress.is_installed /var/www/html apache
"""
retcode = __salt__["cmd.retcode"](
"wp --path={} core is-installed".format(path), runas=user
)
if retcode == 0:
return True
return False
def install(path, user, admin_user, admin_password, admin_email, title, url):
"""
Run the initial setup functions for a wordpress install
path
path to wordpress install location
user
user to run the command as
admin_user
Username for the Administrative user for the wordpress install
admin_password
Initial Password for the Administrative user for the wordpress install
admin_email
Email for the Administrative user for the wordpress install
title
Title of the wordpress website for the wordpress install
url
Url for the wordpress install
CLI Example:
.. code-block:: bash
salt '*' wordpress.install /var/www/html apache dwallace password123 \
[email protected] "Daniel's Awesome Blog" https://blog.dwallace.com
"""
retcode = __salt__["cmd.retcode"](
'wp --path={} core install --title="{}" --admin_user={} '
"--admin_password='{}' --admin_email={} --url={}".format(
path, title, admin_user, admin_password, admin_email, url
),
runas=user,
)
if retcode == 0:
return True
return False
|
1,313 |
http post
|
from typing import List, Optional, Union
from urllib.error import HTTPError, URLError
import requests
from robusta.api import (
ActionParams,
ExecutionBaseEvent,
FileBlock,
Finding,
FindingSource,
FindingType,
MarkdownBlock,
action,
)
class HTTP_GET(ActionParams):
"""
:var url: In cluster target url.
:var get_response: (optional) (Default: False) Send results to sink.
:var params: (optional) Dictionary, list of tuples or bytes to send
in the query string
"""
url: str
get_response: Optional[bool] = False
params: Optional[dict] = None
@action
def http_get(event: ExecutionBaseEvent, action_params: HTTP_GET):
"""
Run an http GET against a url, from within the cluster. Optionally, send the response as a finding.
"""
function_name = "http_get"
# https://docs.robusta.dev/master/extending/actions/findings-api.html
finding = Finding(
title=f"{action_params.url} status check",
source=FindingSource.MANUAL,
aggregation_key=function_name,
finding_type=FindingType.REPORT,
failure=False,
)
try:
result = requests.get(action_params.url, params=action_params.params)
if action_params.get_response:
finding.title = f"Response received from {action_params.url} "
finding.add_enrichment(
[
FileBlock("Response.txt: ", result.text.encode()),
]
)
event.add_finding(finding)
except HTTPError as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Status Code*\n```\n{e.code}\n```"),
]
)
event.add_finding(finding)
except URLError as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Reason*\n```\n{e.reason}\n```"),
]
)
event.add_finding(finding)
except Exception as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Error*\n```\n{e}\n```"),
]
)
event.add_finding(finding)
class HTTP_POST(ActionParams):
"""
:var url: In cluster target url.
:var get_response: (optional) (Default: False) Send results to sink.
:var data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of request.
"""
url: str
data: Union[dict, str, bytes, List[tuple]] = None # type: ignore
headers: dict = None
get_response: Optional[bool] = False
@action
def METHOD_NAME(event: ExecutionBaseEvent, action_params: HTTP_POST):
"""
Run an http POST against a url, from within the cluster. Optionally, send the response as a finding.
"""
function_name = "http_post"
# https://docs.robusta.dev/master/extending/actions/findings-api.html
finding = Finding(
title=f"{action_params.url} status check",
source=FindingSource.MANUAL,
aggregation_key=function_name,
finding_type=FindingType.REPORT,
failure=False,
)
try:
result = requests.post(action_params.url, data=action_params.data, headers=action_params.headers)
if action_params.get_response:
finding.title = f"Response received from {action_params.url} "
finding.add_enrichment(
[
FileBlock("Response.txt: ", result.text.encode()),
]
)
event.add_finding(finding)
except HTTPError as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Status Code*\n```\n{e.code}\n```"),
]
)
event.add_finding(finding)
except URLError as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Reason*\n```\n{e.reason}\n```"),
]
)
event.add_finding(finding)
except Exception as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Error*\n```\n{e}\n```"),
]
)
event.add_finding(finding)
class HTTP_PUT(ActionParams):
"""
:var url: In cluster target url.
:var get_response: (optional) (Default: False) Send results to sink.
:var data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of request.
"""
url: str
data: Union[dict, str, bytes, List[tuple]] = None # type: ignore
headers: dict = None
get_response: Optional[bool] = False
@action
def http_put(event: ExecutionBaseEvent, action_params: HTTP_PUT):
"""
Run an http PUT against a url, from within the cluster. Optionally, send the response as a finding.
"""
function_name = "http_put"
# https://docs.robusta.dev/master/extending/actions/findings-api.html
finding = Finding(
title=f"{action_params.url} status check",
source=FindingSource.MANUAL,
aggregation_key=function_name,
finding_type=FindingType.REPORT,
failure=False,
)
try:
result = requests.put(action_params.url, data=action_params.data, headers=action_params.headers)
if action_params.get_response:
finding.title = f"Response received from {action_params.url} "
finding.add_enrichment(
[
FileBlock("Response.txt: ", result.text.encode()),
]
)
event.add_finding(finding)
except HTTPError as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Status Code*\n```\n{e.code}\n```"),
]
)
event.add_finding(finding)
except URLError as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Reason*\n```\n{e.reason}\n```"),
]
)
event.add_finding(finding)
except Exception as e:
finding.title = f"{action_params.url} is un-reachable"
finding.add_enrichment(
[
MarkdownBlock(f"*Error*\n```\n{e}\n```"),
]
)
event.add_finding(finding)
|
1,314 |
show help
|
#!/usr/bin/env python
#
# Copyright (C) 2017 Inventec, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Usage: %(scriptName)s [options] command object
options:
-h | --help : this help message
-d | --debug : run with debug mode
-f | --force : ignore error during installation or clean
command:
install : install drivers and generate related sysfs nodes
clean : uninstall drivers and remove related sysfs nodes
"""
import os
import commands
import sys, getopt
import logging
import re
import time
from collections import namedtuple
DEBUG = False
args = []
FORCE = 0
i2c_prefix = '/sys/bus/i2c/devices/'
if DEBUG == True:
print sys.argv[0]
print 'ARGV :', sys.argv[1:]
def main():
global DEBUG
global args
global FORCE
if len(sys.argv)<2:
METHOD_NAME()
options, args = getopt.getopt(sys.argv[1:], 'hdf', ['help',
'debug',
'force',
])
if DEBUG == True:
print options
print args
print len(sys.argv)
for opt, arg in options:
if opt in ('-h', '--help'):
METHOD_NAME()
elif opt in ('-d', '--debug'):
DEBUG = True
logging.basicConfig(level=logging.INFO)
elif opt in ('-f', '--force'):
FORCE = 1
else:
logging.info('no option')
for arg in args:
if arg == 'install':
install()
elif arg == 'clean':
uninstall()
else:
METHOD_NAME()
return 0
def METHOD_NAME():
print __doc__ % {'scriptName' : sys.argv[0].split("/")[-1]}
sys.exit(0)
def show_log(txt):
if DEBUG == True:
print "[D7264]"+txt
return
def exec_cmd(cmd, show):
logging.info('Run :'+cmd)
status, output = commands.getstatusoutput(cmd)
show_log (cmd +"with result:" + str(status))
show_log (" output:"+output)
if status:
logging.info('Failed :'+cmd)
if show:
print('Failed :'+cmd)
return status, output
instantiate =[
#'echo pca9545 0x70> /sys/bus/i2c/devices/i2c-0/new_device',
#'echo pca9548 0x72> /sys/bus/i2c/devices/i2c-1/new_device',
#'echo pca9548 0x72> /sys/bus/i2c/devices/i2c-2/new_device',
#'echo pca9548 0x72> /sys/bus/i2c/devices/i2c-3/new_device',
#'echo pca9548 0x72> /sys/bus/i2c/devices/i2c-4/new_device',
#'echo inv_psoc 0x66> /sys/bus/i2c/devices/i2c-5/new_device',
#'echo inv_cpld 0x55> /sys/bus/i2c/devices/i2c-5/new_device',
'echo inv_eeprom 0x53> /sys/bus/i2c/devices/i2c-0/new_device']
drivers =[
'gpio-ich',
'lpc_ich',
'i2c-i801',
'i2c-mux',
'i2c-mux-pca954x',
'i2c-dev',
'inv_eeprom',
'inv_platform',
'inv_psoc',
'inv_cpld',
'inv_pthread',
'swps']
def system_install():
global FORCE
#remove default drivers to avoid modprobe order conflicts
status, output = exec_cmd("rmmod i2c_ismt ", 1)
status, output = exec_cmd("rmmod i2c-i801 ", 1)
#install drivers
for i in range(0,len(drivers)):
status, output = exec_cmd("modprobe "+drivers[i], 1)
if status:
print output
if FORCE == 0:
return status
#instantiate devices
for i in range(0,len(instantiate)):
time.sleep(1)
status, output = exec_cmd(instantiate[i], 1)
if status:
print output
if FORCE == 0:
return status
for i in range(10,17):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-2/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(18,25):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-3/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(26,33):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-4/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(34,41):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-5/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(42,49):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-6/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(50,57):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-7/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(58,65):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-8/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
for i in range(66,73):
status, output =exec_cmd("echo sff8436 0x50 > /sys/bus/i2c/devices/i2c-0/i2c-9/i2c-"+str(i)+"/new_device", 1)
if status:
print output
if FORCE == 0:
return status
return
def system_ready():
if not device_found():
return False
return True
def install():
if not device_found():
print "No device, installing...."
status = system_install()
if status:
if FORCE == 0:
return status
else:
print " D7264 devices detected...."
return
def uninstall():
global FORCE
#uninstall drivers
for i in range(len(drivers)-1,-1,-1):
status, output = exec_cmd("rmmod "+drivers[i], 1)
if status:
print output
if FORCE == 0:
return status
return
def device_found():
ret1, log = exec_cmd("ls "+i2c_prefix+"*0072", 0)
ret2, log = exec_cmd("ls "+i2c_prefix+"i2c-2", 0)
return not(ret1 or ret2)
if __name__ == "__main__":
main()
|
1,315 |
test call within randomstate
|
import sys
from numpy.testing import (
assert_, assert_array_equal, assert_raises,
)
from numpy import random
import numpy as np
class TestRegression:
def test_VonMises_range(self):
# Make sure generated random variables are in [-pi, pi].
# Regression test for ticket #986.
for mu in np.linspace(-7., 7., 5):
r = random.mtrand.vonmises(mu, 1, 50)
assert_(np.all(r > -np.pi) and np.all(r <= np.pi))
def test_hypergeometric_range(self):
# Test for ticket #921
assert_(np.all(np.random.hypergeometric(3, 18, 11, size=10) < 4))
assert_(np.all(np.random.hypergeometric(18, 3, 11, size=10) > 0))
# Test for ticket #5623
args = [
(2**20 - 2, 2**20 - 2, 2**20 - 2), # Check for 32-bit systems
]
is_64bits = sys.maxsize > 2**32
if is_64bits and sys.platform != 'win32':
# Check for 64-bit systems
args.append((2**40 - 2, 2**40 - 2, 2**40 - 2))
for arg in args:
assert_(np.random.hypergeometric(*arg) > 0)
def test_logseries_convergence(self):
# Test for ticket #923
N = 1000
np.random.seed(0)
rvsn = np.random.logseries(0.8, size=N)
# these two frequency counts should be close to theoretical
# numbers with this large sample
# theoretical large N result is 0.49706795
freq = np.sum(rvsn == 1) / N
msg = f'Frequency was {freq:f}, should be > 0.45'
assert_(freq > 0.45, msg)
# theoretical large N result is 0.19882718
freq = np.sum(rvsn == 2) / N
msg = f'Frequency was {freq:f}, should be < 0.23'
assert_(freq < 0.23, msg)
def test_shuffle_mixed_dimension(self):
# Test for trac ticket #2074
for t in [[1, 2, 3, None],
[(1, 1), (2, 2), (3, 3), None],
[1, (2, 2), (3, 3), None],
[(1, 1), 2, 3, None]]:
np.random.seed(12345)
shuffled = list(t)
random.shuffle(shuffled)
expected = np.array([t[0], t[3], t[1], t[2]], dtype=object)
assert_array_equal(np.array(shuffled, dtype=object), expected)
def METHOD_NAME(self):
# Check that custom RandomState does not call into global state
m = np.random.RandomState()
res = np.array([0, 8, 7, 2, 1, 9, 4, 7, 0, 3])
for i in range(3):
np.random.seed(i)
m.seed(4321)
# If m.state is not honored, the result will change
assert_array_equal(m.choice(10, size=10, p=np.ones(10)/10.), res)
def test_multivariate_normal_size_types(self):
# Test for multivariate_normal issue with 'size' argument.
# Check that the multivariate_normal size argument can be a
# numpy integer.
np.random.multivariate_normal([0], [[0]], size=1)
np.random.multivariate_normal([0], [[0]], size=np.int_(1))
np.random.multivariate_normal([0], [[0]], size=np.int64(1))
def test_beta_small_parameters(self):
# Test that beta with small a and b parameters does not produce
# NaNs due to roundoff errors causing 0 / 0, gh-5851
np.random.seed(1234567890)
x = np.random.beta(0.0001, 0.0001, size=100)
assert_(not np.any(np.isnan(x)), 'Nans in np.random.beta')
def test_choice_sum_of_probs_tolerance(self):
# The sum of probs should be 1.0 with some tolerance.
# For low precision dtypes the tolerance was too tight.
# See numpy github issue 6123.
np.random.seed(1234)
a = [1, 2, 3]
counts = [4, 4, 2]
for dt in np.float16, np.float32, np.float64:
probs = np.array(counts, dtype=dt) / sum(counts)
c = np.random.choice(a, p=probs)
assert_(c in a)
assert_raises(ValueError, np.random.choice, a, p=probs*0.9)
def test_shuffle_of_array_of_different_length_strings(self):
# Test that permuting an array of different length strings
# will not cause a segfault on garbage collection
# Tests gh-7710
np.random.seed(1234)
a = np.array(['a', 'a' * 1000])
for _ in range(100):
np.random.shuffle(a)
# Force Garbage Collection - should not segfault.
import gc
gc.collect()
def test_shuffle_of_array_of_objects(self):
# Test that permuting an array of objects will not cause
# a segfault on garbage collection.
# See gh-7719
np.random.seed(1234)
a = np.array([np.arange(1), np.arange(4)], dtype=object)
for _ in range(1000):
np.random.shuffle(a)
# Force Garbage Collection - should not segfault.
import gc
gc.collect()
def test_permutation_subclass(self):
class N(np.ndarray):
pass
np.random.seed(1)
orig = np.arange(3).view(N)
perm = np.random.permutation(orig)
assert_array_equal(perm, np.array([0, 2, 1]))
assert_array_equal(orig, np.arange(3).view(N))
class M:
a = np.arange(5)
def __array__(self):
return self.a
np.random.seed(1)
m = M()
perm = np.random.permutation(m)
assert_array_equal(perm, np.array([2, 1, 4, 0, 3]))
assert_array_equal(m.__array__(), np.arange(5))
|
1,316 |
read sites file
|
# -*- coding: utf-8 -*-
"""
===================
Winglink Tools
===================
* module to deal with WingLink output files
Created on Mon Aug 26 17:44:20 2013
@author: jpeacock-pr
"""
#==============================================================================
import numpy as np
#==============================================================================
#==============================================================================
# read an out file
#==============================================================================
def read_out_file(out_fn, ncol=5):
"""
read .out file from winglink
Arguments:
-----------
**out_fn** : full path to .out file from winglink
Returns:
---------
**dx, dy, dz** : np.ndarrays
cell nodes in x, y, z directions
(note x is to the East here and
y is to the north in meters.)
"""
wl_ofid = file(out_fn, 'r')
raw_data = wl_ofid.read().strip().split()
nx = int(raw_data[0])
ny = int(raw_data[1])
nz = int(raw_data[2])
dx = np.zeros(nx)
dy = np.zeros(ny)
dz = np.zeros(nz)
for xx in range(nx):
dx[xx] = raw_data[xx + ncol]
for yy in range(ny):
dy[yy] = raw_data[yy + ncol + nx]
for zz in range(nz):
dz[zz] = raw_data[zz + ncol + nx + ny]
return dx, dy, dz
#==============================================================================
# read a sites file
#==============================================================================
def METHOD_NAME(sites_fn):
"""
read sites_ file output from winglink
Arguments:
-----------
**sites_fn** : string
full path to the sites file output by winglink
Returns:
----------
**slst** : list of dictionaries for each station.
Keys include:
* station = station name
* dx = number of blocks from center of grid in
East-West direction
* dy = number of blocks from center of grid in
North-South direction
* dz = number of blocks from center of grid
vertically
* number = block number in the grid
**site_list** : list of station names
"""
sfid = file(sites_fn, 'r')
slines = sfid.readlines()
slst = []
site_list = []
for ss in slines:
sdict = {}
sline = ss.strip().split()
sdict['station'] = sline[0][0:-4]
sdict['dx'] = int(sline[1]) - 1
sdict['dy'] = int(sline[2]) - 1
sdict['dz'] = int(sline[3]) - 1
sdict['something'] = int(sline[4])
sdict['number'] = int(sline[5])
slst.append(sdict)
site_list.append(sline[0][0:-4])
return slst, site_list
#==============================================================================
# get station locations from sites file
#==============================================================================
def get_station_locations(sites_fn, out_fn, ncol=5):
"""
get x (e-w) and y (n-s) position of station and put in middle of cell for
a 3D model.
Arguments:
-----------
**sites_fn** : string
full path to sites file output from winglink
**out_fn** : string
full path to .out file output from winglink
**ncol** : int
number of columns the data is in
*default* is 5
Returns:
---------
**xarr** : np.ndarray()
array of relative distance for each station from center of
the grid. Note this is E-W direction
**yarr** : np.ndarray()
array of relative distance for each station from center of
the grid. Note this is N-S direction
"""
slst, sitelst = METHOD_NAME(sites_fn)
dx, dy, dz = read_out_file(out_fn, ncol=ncol)
ns = len(slst)
nxh = len(dx) / 2
nyh = len(dy) / 2
xarr = np.zeros(ns)
yarr = np.zeros(ns)
for ii, sdict in enumerate(slst):
xx = sdict['dx']
yy = sdict['dy']
if xx < nxh:
xarr[ii] = dx[xx:nxh].sum() - dx[xx] / 2
else:
xarr[ii] = dx[nxh:xx].sum() + dx[xx] / 2
if yy < nyh:
yarr[ii] = -1 * (dy[yy:nyh].sum() - dy[yy] / 2)
else:
yarr[ii] = -1 * (dy[nyh:yy].sum() + dy[yy] / 2)
return xarr, yarr
|
1,317 |
test reduce slice prod
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.reduce_slice_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.reduce_slice_ops.python.ops import reduce_slice_ops
from tensorflow.python.framework.test_util import TensorFlowTestCase
from tensorflow.python.platform import googletest
class ReduceSliceTest(TensorFlowTestCase):
def testReduceSliceSum1D(self):
x = np.array([1, 40, 700], dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.array([1, 741, 40, 740, 41], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceSum2D(self):
x = np.array([[1, 2, 3], [40, 50, 60], [700, 800, 900]], dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.array([[1, 2, 3], [741, 852, 963], [40, 50, 60],
[740, 850, 960], [41, 52, 63]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceSum3D(self):
x = np.array([[[1, 2], [3, 4]], [[50, 60], [70, 80]],
[[600, 700], [800, 900]]], dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.array([[[1, 2], [3, 4]],
[[651, 762], [873, 984]],
[[50, 60], [70, 80]],
[[650, 760], [870, 980]],
[[51, 62], [73, 84]]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceSumAxis1(self):
x = np.transpose(np.array([[1, 2, 3], [40, 50, 60],
[700, 800, 900]], dtype=np.int32))
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.transpose(np.array([[1, 2, 3],
[741, 852, 963],
[40, 50, 60],
[740, 850, 960],
[41, 52, 63]], dtype=np.int32))
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 1).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceSum1DIndices(self):
x = np.array([[1, 2, 3], [40, 50, 60], [700, 800, 900],
[1000, 2000, 3000], [40000, 50000, 60000]], dtype=np.int32)
indices = np.array([0, 0, 2, 5], dtype=np.int32)
result = np.array([[0, 0, 0], [41, 52, 63],
[41700, 52800, 63900]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def METHOD_NAME(self):
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.array([[1, 2, 3], [28, 80, 162], [4, 5, 6],
[28, 40, 54], [4, 10, 18]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_prod(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceMax(self):
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.array([[1, 2, 3], [7, 8, 9], [4, 5, 6],
[7, 8, 9], [4, 5, 6]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_max(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceMin(self):
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.array([[1, 2, 3], [1, 2, 3], [4, 5, 6],
[4, 5, 6], [1, 2, 3]], dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_min(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceEmptyDataRows(self):
x = np.empty((0, 1, 2, 3, 4, 5, 6), dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.zeros((5, 1, 2, 3, 4, 5, 6), dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceEmptyDataCols(self):
x = np.empty((100, 0, 2, 3, 4, 5, 6), dtype=np.int32)
indices = np.array([[0, 1], [0, 3], [1, 2], [1, 3], [0, 2]], dtype=np.int32)
result = np.empty((5, 0, 2, 3, 4, 5, 6), dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceEmptyIndicesRows(self):
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int32)
indices = np.empty((0, 2), dtype=np.int32)
result = np.empty((0, 3), dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceEmpty0Indices1D(self):
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int32)
indices = np.empty((0,), dtype=np.int32)
result = np.empty((0, 3), dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
def testReduceSliceEmpty1Indices1D(self):
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.int32)
indices = np.array([0], dtype=np.int32)
result = np.empty((0, 3), dtype=np.int32)
with self.test_session(use_gpu=True):
y_tf = reduce_slice_ops.reduce_slice_sum(x, indices, 0).eval()
self.assertAllEqual(y_tf, result)
if __name__ == "__main__":
googletest.main()
|
1,318 |
handler
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"vm extension image list-names",
)
class ListNames(AAZCommand):
"""List the names of available extensions.
:example: Find Docker extensions by publisher and location.
az vm extension image list-names --publisher Microsoft.Azure.Extensions -l westus --query "[?starts_with(name, 'Docker')]"
:example: Find CustomScript extensions by publisher and location.
az vm extension image list-names --publisher Microsoft.Azure.Extensions -l westus --query "[?starts_with(name, 'Custom')]"
"""
_aaz_info = {
"version": "2022-11-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/providers/microsoft.compute/locations/{}/publishers/{}/artifacttypes/vmextension/types", "2022-11-01"],
]
}
def METHOD_NAME(self, command_args):
super().METHOD_NAME(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.location = AAZResourceLocationArg(
help="Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.",
required=True,
id_part="name",
)
_args_schema.publisher_name = AAZStrArg(
options=["-p", "--publisher", "--publisher-name"],
help="Image publisher name.",
required=True,
id_part="child_name_1",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.VirtualMachineExtensionImagesListTypes(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class VirtualMachineExtensionImagesListTypes(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmextension/types",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"location", self.ctx.args.location,
required=True,
),
**self.serialize_url_param(
"publisherName", self.ctx.args.publisher_name,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-11-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZListType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.Element = AAZObjectType()
_element = cls._schema_on_200.Element
_element.id = AAZStrType(
flags={"read_only": True},
)
_element.location = AAZStrType(
flags={"required": True},
)
_element.name = AAZStrType(
flags={"required": True, "read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.tags = AAZDictType()
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.Element.properties
properties.compute_role = AAZStrType(
serialized_name="computeRole",
flags={"required": True},
)
properties.handler_schema = AAZStrType(
serialized_name="handlerSchema",
flags={"required": True},
)
properties.operating_system = AAZStrType(
serialized_name="operatingSystem",
flags={"required": True},
)
properties.supports_multiple_extensions = AAZBoolType(
serialized_name="supportsMultipleExtensions",
)
properties.vm_scale_set_enabled = AAZBoolType(
serialized_name="vmScaleSetEnabled",
)
tags = cls._schema_on_200.Element.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ListNamesHelper:
"""Helper class for ListNames"""
__all__ = ["ListNames"]
|
1,319 |
test breakable scheduler time restrictions
|
from datetime import datetime, timedelta
import pytest
import pytz
from flexmeasures.data.models.time_series import Sensor
from flexmeasures.data.models.planning.process import ProcessScheduler
tz = pytz.timezone("Europe/Amsterdam")
start = tz.localize(datetime(2015, 1, 2))
end = tz.localize(datetime(2015, 1, 3))
resolution = timedelta(hours=1)
@pytest.mark.parametrize(
"process_type, optimal_start",
[("INFLEXIBLE", datetime(2015, 1, 2, 0)), ("SHIFTABLE", datetime(2015, 1, 2, 8))],
)
def test_process_scheduler(add_battery_assets, process, process_type, optimal_start):
"""
Test scheduling a process of 4kW of power that last 4h using the ProcessScheduler
without time restrictions.
"""
# get the sensors from the database
epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none()
flex_model = {
"duration": "PT4H",
"process-type": process_type,
"power": 4,
}
flex_context = {
"consumption-price-sensor": epex_da.id,
}
scheduler = ProcessScheduler(
process,
start,
end,
resolution,
flex_model=flex_model,
flex_context=flex_context,
)
schedule = scheduler.compute()
optimal_start = tz.localize(optimal_start)
mask = (optimal_start <= schedule.index) & (
schedule.index < optimal_start + timedelta(hours=4)
)
assert (schedule[mask] == 4).all()
assert (schedule[~mask] == 0).all()
@pytest.mark.parametrize(
"process_type, optimal_start",
[("INFLEXIBLE", datetime(2015, 1, 2, 0)), ("SHIFTABLE", datetime(2015, 1, 2, 8))],
)
def test_duration_exceeds_planning_window(
add_battery_assets, process, process_type, optimal_start
):
"""
Test scheduling a process that last longer than the planning window.
"""
# get the sensors from the database
epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none()
flex_model = {
"duration": "PT48H",
"process-type": process_type,
"power": 4,
}
flex_context = {
"consumption-price-sensor": epex_da.id,
}
scheduler = ProcessScheduler(
process,
start,
end,
resolution,
flex_model=flex_model,
flex_context=flex_context,
)
schedule = scheduler.compute()
optimal_start = tz.localize(optimal_start)
assert (schedule == 4).all()
def test_process_scheduler_time_restrictions(add_battery_assets, process):
"""
Test ProcessScheduler with a time restrictions consisting of a block of 2h starting
at 8am. The resulting schedules avoid the 8am-10am period and schedules for a valid period.
"""
# get the sensors from the database
epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none()
# time parameters
flex_model = {
"duration": "PT4H",
"process-type": "SHIFTABLE",
"power": 4,
"time-restrictions": [
{"start": "2015-01-02T08:00:00+01:00", "duration": "PT2H"}
],
}
flex_context = {
"consumption-price-sensor": epex_da.id,
}
scheduler = ProcessScheduler(
process,
start,
end,
resolution,
flex_model=flex_model,
flex_context=flex_context,
)
schedule = scheduler.compute()
optimal_start = tz.localize(datetime(2015, 1, 2, 10))
mask = (optimal_start <= schedule.index) & (
schedule.index < optimal_start + timedelta(hours=4)
)
assert (schedule[mask] == 4).all()
assert (schedule[~mask] == 0).all()
# check that the time restrictions are fulfilled
time_restrictions = scheduler.flex_model["time_restrictions"]
time_restrictions = time_restrictions.tz_convert(tz)
assert (schedule[time_restrictions] == 0).all()
def METHOD_NAME(add_battery_assets, process):
"""
Test BREAKABLE process_type of ProcessScheduler by introducing four 1-hour restrictions
interspaced by 1 hour. The equivalent mask would be the following: [0,...,0,1,0,1,0,1,0,1,0, ...,0].
Trying to get the best prices (between 9am and 4pm), his makes the schedule choose time periods between
the time restrictions.
"""
# get the sensors from the database
epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none()
# time parameters
flex_model = {
"duration": "PT4H",
"process-type": "BREAKABLE",
"power": 4,
"time-restrictions": [
{"start": "2015-01-02T09:00:00+01:00", "duration": "PT1H"},
{"start": "2015-01-02T11:00:00+01:00", "duration": "PT1H"},
{"start": "2015-01-02T13:00:00+01:00", "duration": "PT1H"},
{"start": "2015-01-02T15:00:00+01:00", "duration": "PT1H"},
],
}
flex_context = {
"consumption-price-sensor": epex_da.id,
}
scheduler = ProcessScheduler(
process,
start,
end,
resolution,
flex_model=flex_model,
flex_context=flex_context,
)
schedule = scheduler.compute()
expected_schedule = [0] * 8 + [4, 0, 4, 0, 4, 0, 4, 0] + [0] * 8
assert (schedule == expected_schedule).all()
# check that the time restrictions are fulfilled
time_restrictions = scheduler.flex_model["time_restrictions"]
time_restrictions = time_restrictions.tz_convert(tz)
assert (schedule[time_restrictions] == 0).all()
@pytest.mark.parametrize(
"process_type, time_restrictions",
[
("BREAKABLE", [{"start": "2015-01-02T00:00:00+01:00", "duration": "PT24H"}]),
("INFLEXIBLE", [{"start": "2015-01-02T03:00:00+01:00", "duration": "PT21H"}]),
("SHIFTABLE", [{"start": "2015-01-02T03:00:00+01:00", "duration": "PT21H"}]),
],
)
def test_impossible_schedules(
add_battery_assets, process, process_type, time_restrictions
):
"""
Test schedules with time restrictions that make a 4h block not fit anytime during the
planned window.
"""
# get the sensors from the database
epex_da = Sensor.query.filter(Sensor.name == "epex_da").one_or_none()
flex_model = {
"duration": "PT4H",
"process-type": process_type,
"power": 4,
"time-restrictions": time_restrictions,
}
flex_context = {
"consumption-price-sensor": epex_da.id,
}
scheduler = ProcessScheduler(
process,
start,
end,
resolution,
flex_model=flex_model,
flex_context=flex_context,
)
with pytest.raises(ValueError):
scheduler.compute()
|
1,320 |
product id
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetWorkspaceTagProductLinkResult',
'AwaitableGetWorkspaceTagProductLinkResult',
'get_workspace_tag_product_link',
'get_workspace_tag_product_link_output',
]
@pulumi.output_type
class GetWorkspaceTagProductLinkResult:
"""
Tag-product link details.
"""
def __init__(__self__, id=None, name=None, METHOD_NAME=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'product_id' to be a str")
pulumi.set(__self__, "product_id", METHOD_NAME)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="productId")
def METHOD_NAME(self) -> str:
"""
Full resource Id of a product.
"""
return pulumi.get(self, "product_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetWorkspaceTagProductLinkResult(GetWorkspaceTagProductLinkResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWorkspaceTagProductLinkResult(
id=self.id,
name=self.name,
METHOD_NAME=self.METHOD_NAME,
type=self.type)
def get_workspace_tag_product_link(product_link_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
tag_id: Optional[str] = None,
workspace_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkspaceTagProductLinkResult:
"""
Gets the product link for the tag.
Azure REST API version: 2022-09-01-preview.
:param str product_link_id: Tag-product link identifier. Must be unique in the current API Management service instance.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
:param str tag_id: Tag identifier. Must be unique in the current API Management service instance.
:param str workspace_id: Workspace identifier. Must be unique in the current API Management service instance.
"""
__args__ = dict()
__args__['productLinkId'] = product_link_id
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
__args__['tagId'] = tag_id
__args__['workspaceId'] = workspace_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:apimanagement:getWorkspaceTagProductLink', __args__, opts=opts, typ=GetWorkspaceTagProductLinkResult).value
return AwaitableGetWorkspaceTagProductLinkResult(
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
METHOD_NAME=pulumi.get(__ret__, 'product_id'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_workspace_tag_product_link)
def get_workspace_tag_product_link_output(product_link_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
tag_id: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetWorkspaceTagProductLinkResult]:
"""
Gets the product link for the tag.
Azure REST API version: 2022-09-01-preview.
:param str product_link_id: Tag-product link identifier. Must be unique in the current API Management service instance.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
:param str tag_id: Tag identifier. Must be unique in the current API Management service instance.
:param str workspace_id: Workspace identifier. Must be unique in the current API Management service instance.
"""
...
|
1,321 |
on response
|
"""
$description Russian live-streaming and video hosting social platform.
$url vk.com
$url vk.ru
$type live, vod
$metadata id
$metadata author
$metadata title
"""
import logging
import re
from hashlib import md5
from urllib.parse import parse_qsl, unquote, urlparse, urlunparse
from streamlink.exceptions import NoStreamsError
from streamlink.plugin import Plugin, PluginError, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.dash import DASHStream
from streamlink.stream.hls import HLSStream
from streamlink.utils.url import update_qsd
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r"https?://(?:\w+\.)?vk\.(?:com|ru)/videos?(?:\?z=video)?(?P<video_id>-?\d+_\d+)",
))
@pluginmatcher(re.compile(
r"https?://(\w+\.)?vk\.(?:com|ru)/.+",
))
class VK(Plugin):
API_URL = "https://vk.com/al_video.php"
HASH_COOKIE = "hash429"
def _get_cookies(self):
def METHOD_NAME(res, **kwargs):
if res.headers.get("x-waf-redirect") == "1":
if not res.headers.get("X-WAF-Backend-Status"):
log.debug("Getting WAF cookie")
cookie = res.cookies.get(self.HASH_COOKIE)
key = md5(cookie.encode("utf-8")).hexdigest()
res.headers["Location"] = update_qsd(res.headers["Location"], qsd={"key": key})
return res
elif res.headers.get("X-WAF-Backend-Status") == "challenge_success":
self.session.http.cookies.update(res.cookies)
return res
url = urlunparse(urlparse(self.url)._replace(path="", query="", fragment=""))
self.session.http.get(url, hooks={"response": METHOD_NAME})
def _has_video_id(self):
return any(self.matches[:-1])
def follow_vk_redirect(self):
if self._has_video_id():
return
try:
parsed_url = urlparse(self.url)
true_path = next(unquote(v).split("/")[0] for k, v in parse_qsl(parsed_url.query) if k == "z" and len(v) > 0)
self.url = f"{parsed_url.scheme}://{parsed_url.netloc}/{true_path}"
if self._has_video_id():
return
except StopIteration:
pass
try:
self.url = self.session.http.get(self.url, schema=validate.Schema(
validate.parse_html(),
validate.xml_xpath_string(".//head/meta[@property='og:url'][@content]/@content"),
str,
))
except PluginError:
pass
if self._has_video_id():
return
raise NoStreamsError
def _get_streams(self):
self._get_cookies()
self.follow_vk_redirect()
video_id = self.match.group("video_id")
if not video_id:
return
log.debug(f"Video ID: {video_id}")
try:
data = self.session.http.post(
self.API_URL,
params={"act": "show"},
data={"act": "show", "al": "1", "video": video_id},
headers={"Referer": self.url},
schema=validate.Schema(
validate.transform(lambda text: re.sub(r"^\s*<!--\s*", "", text)),
validate.parse_json(),
{"payload": list},
validate.get(("payload", -1)),
list,
validate.get(-1),
{"player": {"params": [dict]}},
validate.get(("player", "params", 0)),
{
validate.optional("hls"): validate.url(),
validate.optional("manifest"): validate.startswith("<?xml"),
validate.optional("md_author"): validate.any(str, None),
validate.optional("md_title"): validate.any(str, None),
},
),
)
except PluginError:
log.error("Could not parse API response")
return
self.id = video_id
self.author = data.get("md_author")
self.title = data.get("md_title")
hls = data.get("hls")
if hls:
return HLSStream.parse_variant_playlist(self.session, hls)
dash_manifest = data.get("manifest")
if dash_manifest:
return DASHStream.parse_manifest(self.session, dash_manifest)
__plugin__ = VK
|
1,322 |
prepare contract value
|
# Copyright 2017 LasLabs Inc.
# Copyright 2018 ACSONE SA/NV.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
class SaleOrder(models.Model):
_inherit = "sale.order"
is_contract = fields.Boolean(string="Is a contract", compute="_compute_is_contract")
contract_count = fields.Integer(compute="_compute_contract_count")
need_contract_creation = fields.Boolean(compute="_compute_need_contract_creation")
@api.constrains("state")
def _check_contact_is_not_terminated(self):
for rec in self:
if rec.state not in (
"sale",
"done",
"cancel",
) and rec.order_line.filtered("contract_id.is_terminated"):
raise ValidationError(
_("You can't upsell or downsell a terminated contract")
)
def _get_line_to_create_contract(self):
"""
Override this method to define more filter criteria of line for which we create contract
:return: line to create contract
"""
self.ensure_one()
line_to_create_contract = self.order_line.filtered(
lambda r: not r.contract_id and r.is_contract
)
return line_to_create_contract
def _get_line_to_update_contract(self):
"""
Override this method to define more filter criteria of line for which we update contract
:return: line to update contract
"""
self.ensure_one()
line_to_update_contract = self.order_line.filtered(
lambda r: r.contract_id
and r.is_contract
and r not in r.contract_id.contract_line_ids.mapped("sale_order_line_id")
)
return line_to_update_contract
@api.depends("order_line.contract_id", "state")
def _compute_need_contract_creation(self):
self.update({"need_contract_creation": False})
for rec in self:
if rec.state in ("sale", "done"):
line_to_create_contract = rec._get_line_to_create_contract()
line_to_update_contract = rec._get_line_to_update_contract()
if line_to_create_contract or line_to_update_contract:
rec.need_contract_creation = True
@api.depends("order_line")
def _compute_is_contract(self):
self.is_contract = any(self.order_line.mapped("is_contract"))
def METHOD_NAME(self, contract_template):
self.ensure_one()
return {
"name": "{template_name}: {sale_name}".format(
template_name=contract_template.name, sale_name=self.name
),
"partner_id": self.partner_id.id,
"company_id": self.company_id.id,
"contract_template_id": contract_template.id,
"user_id": self.user_id.id,
"payment_term_id": self.payment_term_id.id,
"fiscal_position_id": self.fiscal_position_id.id,
"invoice_partner_id": self.partner_invoice_id.id,
"line_recurrence": self.partner_invoice_id.id,
}
def action_create_contract(self):
contract_model = self.env["contract.contract"]
contracts = []
for rec in self.filtered("is_contract"):
line_to_create_contract = rec._get_line_to_create_contract()
line_to_update_contract = rec._get_line_to_update_contract()
contract_templates = self.env["contract.template"]
for order_line in line_to_create_contract:
contract_template = order_line.product_id.with_company(
rec.company_id
).property_contract_template_id
if not contract_template:
raise ValidationError(
_(
"You must specify a contract "
"template for '{}' product in '{}' company."
).format(order_line.product_id.name, rec.company_id.name)
)
contract_templates |= contract_template
for contract_template in contract_templates:
order_lines = line_to_create_contract.filtered(
lambda r, template=contract_template: r.product_id.with_company(
r.order_id.company_id
).property_contract_template_id
== template
)
contract = contract_model.create(
rec.METHOD_NAME(contract_template)
)
contracts.append(contract)
contract._onchange_contract_template_id()
contract._onchange_contract_type()
order_lines.create_contract_line(contract)
order_lines.write({"contract_id": contract.id})
for line in line_to_update_contract:
line.create_contract_line(line.contract_id)
return contract_model.browse(contracts)
def action_confirm(self):
"""If we have a contract in the order, set it up"""
self.filtered(
lambda order: (order.company_id.create_contract_at_sale_order_confirmation)
).action_create_contract()
return super(SaleOrder, self).action_confirm()
@api.depends("order_line")
def _compute_contract_count(self):
for rec in self:
rec.contract_count = len(rec.order_line.mapped("contract_id"))
def action_show_contracts(self):
self.ensure_one()
action = self.env["ir.actions.act_window"]._for_xml_id(
"contract.action_customer_contract"
)
contracts = (
self.env["contract.line"]
.search([("sale_order_line_id", "in", self.order_line.ids)])
.mapped("contract_id")
)
action["domain"] = [
("contract_line_ids.sale_order_line_id", "in", self.order_line.ids)
]
if len(contracts) == 1:
# If there is only one contract, open it directly
action.update(
{
"res_id": contracts.id,
"view_mode": "form",
"views": filter(lambda view: view[1] == "form", action["views"]),
}
)
return action
|
1,323 |
autoplan
|
""" an autoplanning solver which takes in courses and spits a plan """
from pprint import pprint
from typing import Tuple
from ortools.sat.python import cp_model # type: ignore
from algorithms.objects.course import Course
from algorithms.objects.user import User
from server.routers.model import CONDITIONS
# Inspired by AbdallahS's code here: https://github.com/AbdallahS/planner
# with help from Martin and MJ :)
def terms_between(start: Tuple[int, int], end: Tuple[int, int]):
return (end[0] - start[0]) * 4 + end[1] - start[1]
def map_var_to_course(courses: list[Course], var: cp_model.IntVar):
return [course for course in courses if course.name == var.Name()][0]
def map_course_to_var(course: Course, variables: list[cp_model.IntVar]):
return [variable for variable in variables if course.name == variable.Name()][0]
def convert_to_term_year(number: int, start: Tuple[int, int]):
return (number // 4 + start[0], number % 4 + start[1])
def METHOD_NAME(courses: list[Course], user: User, start: Tuple[int, int], end: Tuple[int, int], uoc_max: list[int]) -> list[Tuple[str, Tuple[int, int]]]:
"""
given a list of courses, we will fill our terms in a valid ordering.
we will enforce that:
- the course must be offered in that term
- duplicate courses (usually only multiterm courses) must be taken consecutively
- the UOC max for each term is adhered to (no overloading, and the user should be allowed to manipulate this)
- the prerequisites are respected.
"""
# TODO: add a way to lock in courses
model = cp_model.CpModel()
# 1. enforces terms
variables = [model.NewIntVarFromDomain(cp_model.Domain.FromIntervals(course.term_domain(start, end)), course.name) for course in courses]
# 2. if any courses are named the same, then they must be taken consecutively
possible_course_dupes = [course.name for course in courses if not course.locked]
duplicate_courses = set(c for c in possible_course_dupes if possible_course_dupes.count(c) > 1)
for dupe in duplicate_courses:
matched_courses = [variable for variable in variables if variable.Name() == dupe]
for match, next_match in zip(matched_courses, matched_courses[1:]):
model.Add(match + 1 == next_match)
# 3. set max UOC for a term
for index, m in enumerate(uoc_max):
boolean_indexes = []
for v in variables:
# b is a 'channeling constraint'. This is done to fill the resovoir only *if* the course is in that given term
# https://developers.google.com/optimization/cp/channeling
b = model.NewBoolVar('hi')
model.Add(v == index).OnlyEnforceIf(b)
model.Add(v != index).OnlyEnforceIf(b.Not())
boolean_indexes.append(b)
# if the course is in term 'index', only allow 0 to m UOC to exist in that term.
model.AddReservoirConstraintWithActive(
variables,
list(map_var_to_course(courses, var).uoc for var in variables), # this fills the resovoir by UoC units if active
boolean_indexes, # a course is only active if in term 'index'
0,
m # uoc_max
)
# 4. enforce prereqs, only if not locked by user
for course in courses:
if course.locked:
continue
# this is the responsibility of the condition class to generate prereq model.
course.condition.condition_to_model(
model,
user,
list((variable, map_var_to_course(courses, variable)) for variable in variables),
map_course_to_var(course, variables)
)
solver = cp_model.CpSolver()
status: int = solver.Solve(model)
if status == 3 or status == 1:
raise Exception(f'your courses are impossible to put in these terms! Error code: {status}')
else:
return [(v.Name(), convert_to_term_year(solver.Value(v), start)) for v in variables]
if __name__ == '__main__':
pprint(METHOD_NAME(
[
Course("MATH1141", CONDITIONS["MATH1141"], 65, 6, {2020: [1, 3], 2021: [1, 3], 2022: [1, 3]}),
Course("MATH1081", CONDITIONS["MATH1081"], 65, 6, {2020: [1, 2, 3], 2021: [1, 2, 3], 2022: [1, 2, 3]}),
Course("COMP1511", CONDITIONS["COMP1511"], 65, 6, {2020: [1, 2, 3], 2021: [1, 2, 3], 2022: [1, 2, 3]}),
Course("COMP2521", CONDITIONS["COMP2521"], 65, 6, {2020: [2, 3], 2021: [2, 3], 2022: [2, 3]}),
Course("COMP2041", CONDITIONS["COMP2041"], 65, 6, {2020: [2], 2021: [2], 2022: [2]}),
Course("COMP1531", CONDITIONS["COMP1531"], 65, 6, {2020: [1, 3], 2021: [1, 3], 2022: [1, 3]}),
Course("COMP1521", CONDITIONS["COMP1521"], 65, 6, {2020: [1, 2], 2021: [1, 2], 2022: [1, 2]}),
Course("ENGG2600", CONDITIONS["ENGG2600"], 65, 2, {2020: [1, 2, 3], 2021: [1, 2, 3], 2022: [1, 2, 3]}),
Course("ENGG2600", CONDITIONS["ENGG2600"], 65, 2, {2020: [1, 2, 3], 2021: [1, 2, 3], 2022: [1, 2, 3]}),
Course("ENGG2600", CONDITIONS["ENGG2600"], 65, 2, {2020: [1, 2, 3], 2021: [1, 2, 3], 2022: [1, 2, 3]}),
Course("COMP2511", CONDITIONS["COMP2511"], 65, 6, {2020: [2, 3], 2021: [2, 3], 2022: [2, 3]}),
Course("MATH1241", CONDITIONS["MATH1141"], 65, 6, {2020: [2, 3], 2021: [2, 3], 2022: [2, 3]}),
Course("MATH3411", CONDITIONS["MATH3411"], 65, 6, {2020: [3], 2021: [3], 2022: [3]}),
Course("COMP3411", CONDITIONS["COMP3411"], 65, 6, {2020: [3], 2021: [0], 2022: [0]}),
Course("COMP6841", CONDITIONS["COMP6841"], 65, 6, {2020: [1], 2021: [1], 2022: [1]}),
Course("COMP3231", CONDITIONS["COMP3231"], 65, 6, {2020: [1], 2021: [1], 2022: [1]}),
Course("COMP3141", CONDITIONS["COMP3141"], 65, 6, {2020: [2], 2021: [2], 2022: [2]}),
Course("COMP3121", CONDITIONS["COMP3121"], 65, 6, {2020: [2, 3], 2021: [2, 3], 2022: [2, 3]}),
Course("COMP3131", CONDITIONS["COMP3131"], 65, 6, {2020: [1], 2021: [1], 2022: [1]}),
Course("COMP4141", CONDITIONS["COMP4141"], 65, 6, {2020: [1], 2021: [1], 2022: [1]}),
Course("COMP3311", CONDITIONS["COMP3311"], 65, 6, {2020: [1, 2, 3], 2021: [1, 2, 3], 2022: [1, 2, 3]}),
Course("ARTS1360", CONDITIONS["ARTS1360"], 65, 6, {2020: [2], 2021: [2], 2022: [2]}),
],
User({
"program": "3778",
"specialisations" : ["COMPA1"],
"courses": {}
}),
(2020, 0),
(2023, 3),
[12, 20, 20, 20, 12, 20, 20, 20, 10, 20, 20, 20]
))
|
1,324 |
bzip frames
|
#!/usr/bin/env python3
import os
import sys
import bz2
import numpy as np
import pyopencl as cl # install with `PYOPENCL_CL_PRETEND_VERSION=2.0 pip install pyopencl`
from system.hardware import PC, TICI
from common.basedir import BASEDIR
from selfdrive.test.openpilotci import BASE_URL, get_url
from system.version import get_commit
from system.camerad.snapshot.snapshot import yuv_to_rgb
from tools.lib.logreader import LogReader
from tools.lib.filereader import FileReader
TEST_ROUTE = "8345e3b82948d454|2022-05-04--13-45-33"
SEGMENT = 0
FRAME_WIDTH = 1928
FRAME_HEIGHT = 1208
FRAME_STRIDE = 2896
UV_WIDTH = FRAME_WIDTH // 2
UV_HEIGHT = FRAME_HEIGHT // 2
UV_SIZE = UV_WIDTH * UV_HEIGHT
def get_frame_fn(ref_commit, test_route, tici=True):
return f"{test_route}_debayer{'_tici' if tici else ''}_{ref_commit}.bz2"
def METHOD_NAME(frames):
data = bytes()
for y, u, v in frames:
data += y.tobytes()
data += u.tobytes()
data += v.tobytes()
return bz2.compress(data)
def unbzip_frames(url):
with FileReader(url) as f:
dat = f.read()
data = bz2.decompress(dat)
res = []
for y_start in range(0, len(data), FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2):
u_start = y_start + FRAME_WIDTH * FRAME_HEIGHT
v_start = u_start + UV_SIZE
y = np.frombuffer(data[y_start: u_start], dtype=np.uint8).reshape((FRAME_HEIGHT, FRAME_WIDTH))
u = np.frombuffer(data[u_start: v_start], dtype=np.uint8).reshape((UV_HEIGHT, UV_WIDTH))
v = np.frombuffer(data[v_start: v_start + UV_SIZE], dtype=np.uint8).reshape((UV_HEIGHT, UV_WIDTH))
res.append((y, u, v))
return res
def init_kernels(frame_offset=0):
ctx = cl.create_some_context(interactive=False)
with open(os.path.join(BASEDIR, 'system/camerad/cameras/real_debayer.cl')) as f:
build_args = ' -cl-fast-relaxed-math -cl-denorms-are-zero -cl-single-precision-constant' + \
f' -DFRAME_STRIDE={FRAME_STRIDE} -DRGB_WIDTH={FRAME_WIDTH} -DRGB_HEIGHT={FRAME_HEIGHT} -DFRAME_OFFSET={frame_offset} -DCAM_NUM=0'
if PC:
build_args += ' -DHALF_AS_FLOAT=1 -cl-std=CL2.0'
debayer_prg = cl.Program(ctx, f.read()).build(options=build_args)
return ctx, debayer_prg
def debayer_frame(ctx, debayer_prg, data, rgb=False):
q = cl.CommandQueue(ctx)
yuv_buff = np.empty(FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2, dtype=np.uint8)
cam_g = cl.Buffer(ctx, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=data)
yuv_g = cl.Buffer(ctx, cl.mem_flags.WRITE_ONLY, FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2)
local_worksize = (20, 20) if TICI else (4, 4)
ev1 = debayer_prg.debayer10(q, (UV_WIDTH, UV_HEIGHT), local_worksize, cam_g, yuv_g)
cl.enqueue_copy(q, yuv_buff, yuv_g, wait_for=[ev1]).wait()
cl.enqueue_barrier(q)
y = yuv_buff[:FRAME_WIDTH*FRAME_HEIGHT].reshape((FRAME_HEIGHT, FRAME_WIDTH))
u = yuv_buff[FRAME_WIDTH*FRAME_HEIGHT:FRAME_WIDTH*FRAME_HEIGHT+UV_SIZE].reshape((UV_HEIGHT, UV_WIDTH))
v = yuv_buff[FRAME_WIDTH*FRAME_HEIGHT+UV_SIZE:].reshape((UV_HEIGHT, UV_WIDTH))
if rgb:
return yuv_to_rgb(y, u, v)
else:
return y, u, v
def debayer_replay(lr):
ctx, debayer_prg = init_kernels()
frames = []
for m in lr:
if m.which() == 'roadCameraState':
cs = m.roadCameraState
if cs.image:
data = np.frombuffer(cs.image, dtype=np.uint8)
img = debayer_frame(ctx, debayer_prg, data)
frames.append(img)
return frames
if __name__ == "__main__":
update = "--update" in sys.argv
replay_dir = os.path.dirname(os.path.abspath(__file__))
ref_commit_fn = os.path.join(replay_dir, "debayer_replay_ref_commit")
# load logs
lr = list(LogReader(get_url(TEST_ROUTE, SEGMENT)))
# run replay
frames = debayer_replay(lr)
# get diff
failed = False
diff = ''
yuv_i = ['y', 'u', 'v']
if not update:
with open(ref_commit_fn) as f:
ref_commit = f.read().strip()
frame_fn = get_frame_fn(ref_commit, TEST_ROUTE, tici=TICI)
try:
cmp_frames = unbzip_frames(BASE_URL + frame_fn)
if len(frames) != len(cmp_frames):
failed = True
diff += 'amount of frames not equal\n'
for i, (frame, cmp_frame) in enumerate(zip(frames, cmp_frames)):
for j in range(3):
fr = frame[j]
cmp_f = cmp_frame[j]
if fr.shape != cmp_f.shape:
failed = True
diff += f'frame shapes not equal for ({i}, {yuv_i[j]})\n'
diff += f'{ref_commit}: {cmp_f.shape}\n'
diff += f'HEAD: {fr.shape}\n'
elif not np.array_equal(fr, cmp_f):
failed = True
if np.allclose(fr, cmp_f, atol=1):
diff += f'frames not equal for ({i}, {yuv_i[j]}), but are all close\n'
else:
diff += f'frames not equal for ({i}, {yuv_i[j]})\n'
frame_diff = np.abs(np.subtract(fr, cmp_f))
diff_len = len(np.nonzero(frame_diff)[0])
if diff_len > 10000:
diff += f'different at a large amount of pixels ({diff_len})\n'
else:
diff += 'different at (frame, yuv, pixel, ref, HEAD):\n'
for k in zip(*np.nonzero(frame_diff)):
diff += f'{i}, {yuv_i[j]}, {k}, {cmp_f[k]}, {fr[k]}\n'
if failed:
print(diff)
with open("debayer_diff.txt", "w") as f:
f.write(diff)
except Exception as e:
print(str(e))
failed = True
# upload new refs
if update or (failed and TICI):
from selfdrive.test.openpilotci import upload_file
print("Uploading new refs")
frames_bzip = METHOD_NAME(frames)
new_commit = get_commit()
frame_fn = os.path.join(replay_dir, get_frame_fn(new_commit, TEST_ROUTE, tici=TICI))
with open(frame_fn, "wb") as f2:
f2.write(frames_bzip)
try:
upload_file(frame_fn, os.path.basename(frame_fn))
except Exception as e:
print("failed to upload", e)
if update:
with open(ref_commit_fn, 'w') as f:
f.write(str(new_commit))
print("\nNew ref commit: ", new_commit)
sys.exit(int(failed))
|
1,325 |
set goal
|
import time
import pybullet as pb
import pybullet_data
import rospy
from soccer_common.transformation import Transformation
from soccer_pycontrol.links import Links
from soccer_pycontrol.ramp import Ramp
from soccer_pycontrol.soccerbot import Soccerbot
class Navigator:
"""
The 2D Navigator class, has a running loop that reads commands by the user and outputs actions to the soccerbot
class.
Doesn't require ROS and used for unit tests. All functions called here should be related to pybullet simulation
"""
PYBULLET_STEP = rospy.get_param("control_frequency", 0.01)
def __init__(self, real_time=False, display=True, useCalibration=True):
"""
Initialize the Navigator
:param display: Whether or not to show the pybullet visualization, turned off for quick unit tests
:param useCalibration: Whether or not to use movement calibration files located in config/robot_model.yaml, which adjusts the calibration to the movement given
"""
self.display = display
self.real_time = real_time
assert pb.isConnected() == 0
if display:
self.client_id = pb.connect(pb.GUI)
else:
self.client_id = pb.connect(pb.DIRECT)
pb.setAdditionalSearchPath(pybullet_data.getDataPath()) # optionally
pb.resetDebugVisualizerCamera(cameraDistance=1.0, cameraYaw=90, cameraPitch=0, cameraTargetPosition=[0, 0, 0.25])
pb.setGravity(0, 0, -9.81)
pb.configureDebugVisualizer(pb.COV_ENABLE_GUI, 0)
self.soccerbot = Soccerbot(Transformation(), useFixedBase=False, useCalibration=useCalibration)
self.ramp = Ramp("plane.urdf", (0, 0, 0), (0, 0, 0), lateralFriction=0.9, spinningFriction=0.9, rollingFriction=0.0)
self.terminate_walk = False
self.prepare_walk_time = rospy.get_param("prepare_walk_time", 2)
self.t = 0
def close(self):
if pb.isConnected(self.client_id):
pb.disconnect(self.client_id)
assert pb.isConnected() == 0
def ready(self) -> None:
"""
Puts the robot into a ready pose to begin walking
"""
self.soccerbot.ready()
def setPose(self, pose: Transformation) -> None:
"""
Relocate the robot at the certain pose
:param pose: 3D pose of the robot
"""
self.soccerbot.setPose(pose)
def getPose(self):
"""
Get the 3D pose of the robot
:return: The 3D pose of the robot
"""
[position, quaternion] = pb.getLinkState(self.soccerbot.body, linkIndex=Links.LEFT_LEG_6)[4:6]
return Transformation(position=position, quaternion=quaternion).pos_theta
def METHOD_NAME(self, goal: Transformation) -> None:
"""
Set the goal of the robot, will create the path to the goal that will be executed in the run() loop
:param goal: The 3D location goal for the robot
"""
self.soccerbot.createPathToGoal(goal)
def wait(self, steps) -> None:
"""
Make the robot wait for a few steps
:param steps: Defined by Navigator.PYBULLET_STEP, which is usually 0.01
"""
for i in range(steps):
if self.real_time:
time.sleep(Navigator.PYBULLET_STEP)
pb.stepSimulation()
def run(self, single_trajectory=False) -> bool:
"""
The main run loop for the navigator, executes goals given through setGoal and then stops
:param single_trajectory: If set to true, then the software will exit after a single trajectory is completed
:return: True if the robot succeeds navigating to the goal, False if it doesn't reach the goal and falls
"""
logging_id = pb.startStateLogging(pb.STATE_LOGGING_GENERIC_ROBOT, "/tmp/simulation_record.bullet", physicsClientId=self.client_id)
if self.soccerbot.robot_path.duration() == 0:
pb.stopStateLogging(logging_id)
return True
self.t = -self.prepare_walk_time
stable_count = 20
self.soccerbot.reset_imus()
self.soccerbot.reset_roll_feedback_parameters()
while self.t <= self.soccerbot.robot_path.duration():
if self.t < 0:
pitch = self.soccerbot.apply_imu_feedback_standing(self.soccerbot.get_imu())
if abs(pitch - self.soccerbot.standing_pid.setpoint) < 0.025:
stable_count = stable_count - 1
if stable_count == 0:
t = 0
else:
stable_count = 5
else:
if self.soccerbot.current_step_time <= self.t <= self.soccerbot.robot_path.duration():
imu = self.soccerbot.get_imu()
t_offset = self.soccerbot.apply_phase_difference_roll_feedback(self.t, imu)
self.soccerbot.stepPath(t_offset)
self.soccerbot.apply_imu_feedback(imu)
self.soccerbot.current_step_time = self.soccerbot.current_step_time + self.soccerbot.robot_path.step_precision
angle_threshold = 1.25 # in radian
[roll, pitch, yaw] = self.soccerbot.get_imu().orientation_euler
if pitch > angle_threshold:
print("Fallen Back")
pb.stopStateLogging(logging_id)
return False
elif pitch < -angle_threshold:
print("Fallen Front")
pb.stopStateLogging(logging_id)
return False
pb.setJointMotorControlArray(
bodyIndex=self.soccerbot.body,
controlMode=pb.POSITION_CONTROL,
jointIndices=list(range(0, 18, 1)),
targetPositions=self.soccerbot.get_angles(),
)
pb.stepSimulation()
self.t = self.t + Navigator.PYBULLET_STEP
if self.real_time:
time.sleep(Navigator.PYBULLET_STEP)
pb.stopStateLogging(logging_id)
return True
|
1,326 |
test init with duplicate gid
|
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import sys
import unittest
from antlir.compiler.items.group import (
GROUP_FILE_PATH,
GroupFile,
GroupFileLine,
GroupItem,
)
from antlir.compiler.items.tests.common import BaseItemTestCase
from antlir.compiler.requires_provides import ProvidesGroup, RequireFile
from antlir.fs_utils import Path
from antlir.subvol_utils import TempSubvolumes
_SAMPLE_ETC_GROUP = """root:x:0:
bin:x:1:
daemon:x:2:
sys:x:3:
adm:x:4:
tty:x:5:
disk:x:6:
lp:x:7:
mem:x:8:
kmem:x:9:
wheel:x:10:
cdrom:x:11:
mail:x:12:
man:x:15:
dialout:x:18:
floppy:x:19:
games:x:20:
tape:x:33:
video:x:39:
ftp:x:50:
lock:x:54:
audio:x:63:
users:x:100:
nobody:x:65534:
dbus:x:81:
utmp:x:22:
utempter:x:35:
input:x:999:
kvm:x:36:
render:x:998:
systemd-journal:x:190:
systemd-coredump:x:997:
systemd-network:x:192:
systemd-resolve:x:193:
systemd-timesync:x:996:
tss:x:59:
unbound:x:995:
sshd:x:74:
"""
def augment_group_file(contents: str, groupname: str, gid: int) -> str:
return contents.strip() + "\n" + groupname + ":x:" + str(gid) + ":\n"
class GroupItemTest(BaseItemTestCase):
"Tests GroupItem"
def test_group_item(self) -> None:
self._check_item(
GroupItem(from_target="t", name="foo"),
{ProvidesGroup("foo")},
{RequireFile(path=Path("/etc/group"))},
)
def test_build(self) -> None:
with TempSubvolumes(Path(sys.argv[0])) as ts:
sv = ts.create("root")
sv.run_as_root(["mkdir", sv.path("/etc")]).check_returncode()
sv.overwrite_path_as_root(GROUP_FILE_PATH, _SAMPLE_ETC_GROUP)
GroupItem(from_target="t", name="foo").build(sv)
self.assertEqual(
augment_group_file(_SAMPLE_ETC_GROUP, "foo", 1000),
sv.path("/etc/group").read_text(),
)
def test_build_twice(self) -> None:
with TempSubvolumes(Path(sys.argv[0])) as ts:
sv = ts.create("root")
sv.run_as_root(["mkdir", sv.path("/etc")]).check_returncode()
sv.overwrite_path_as_root(GROUP_FILE_PATH, _SAMPLE_ETC_GROUP)
GroupItem(from_target="t", name="foo").build(sv)
GroupItem(from_target="t", name="bar").build(sv)
self.assertEqual(
augment_group_file(
augment_group_file(_SAMPLE_ETC_GROUP, "foo", 1000),
"bar",
1001,
),
sv.path("/etc/group").read_text(),
)
def test_build_with_gid(self) -> None:
with TempSubvolumes(Path(sys.argv[0])) as ts:
sv = ts.create("root")
sv.run_as_root(["mkdir", sv.path("/etc")]).check_returncode()
sv.overwrite_path_as_root(GROUP_FILE_PATH, _SAMPLE_ETC_GROUP)
GroupItem(from_target="t", name="foo", id=2000).build(sv)
self.assertEqual(
augment_group_file(_SAMPLE_ETC_GROUP, "foo", 2000),
sv.path("/etc/group").read_text(),
)
class GroupFileTest(unittest.TestCase):
def test_init(self) -> None:
gf = GroupFile("root:x:0:td-agent\nbin:x:1:a,b\n\ndaemon:x:2:\n\n")
self.assertEqual(
[
GroupFileLine(name="root", id=0, members=["td-agent"]),
GroupFileLine(name="bin", id=1, members=["a", "b"]),
GroupFileLine(name="daemon", id=2, members=[]),
],
list(gf.lines.values()),
)
def test_init_with_bad_line(self) -> None:
with self.assertRaisesRegex(RuntimeError, r"^Invalid line in group file"):
GroupFile("root:0\n")
def METHOD_NAME(self) -> None:
with self.assertRaisesRegex(RuntimeError, r"^Duplicate GID in group file"):
GroupFile("root:x:42:\nbin:x:42:")
def test_init_with_duplicate_groupname(self) -> None:
with self.assertRaisesRegex(
RuntimeError, r"^Duplicate groupname in group file"
):
GroupFile("root:x:1:\nroot:x:2:")
def test_add(self) -> None:
gf = GroupFile()
gf.add("group1", 1)
self.assertEqual(
[GroupFileLine(name="group1", id=1, members=[])],
list(gf.lines.values()),
)
gf.add("group2", 2)
gf.add("group3", 3)
self.assertEqual(
[
GroupFileLine(name="group1", id=1, members=[]),
GroupFileLine(name="group2", id=2, members=[]),
GroupFileLine(name="group3", id=3, members=[]),
],
list(gf.lines.values()),
)
with self.assertRaises(ValueError):
gf.add("anothergroup2", 2)
def test_next_group_id(self) -> None:
gf = GroupFile()
gf.add("a", 1)
self.assertEqual(1000, gf.next_group_id())
gf.add("b", 999)
self.assertEqual(1000, gf.next_group_id())
gf.add("c", 1000)
self.assertEqual(1001, gf.next_group_id())
gf.add("d", 30000)
self.assertEqual(30001, gf.next_group_id())
gf.add("e", 65534)
self.assertEqual(30001, gf.next_group_id())
def test_join(self) -> None:
gf = GroupFile()
with self.assertRaisesRegex(ValueError, r"^a not found"):
gf.join("a", "me")
gf.add("a", 1)
self.assertEqual(gf.lines[1], GroupFileLine(name="a", id=1, members=[]))
gf.join("a", "me")
self.assertEqual(gf.lines[1], GroupFileLine(name="a", id=1, members=["me"]))
gf.join("a", "you")
self.assertEqual(
gf.lines[1], GroupFileLine(name="a", id=1, members=["me", "you"])
)
def test_str(self) -> None:
gf = GroupFile()
gf.add("a", 1)
gf.add("b", 1000)
gf.join("b", "me")
gf.join("b", "you")
gf.add("c", 10000)
gf.join("c", "me")
self.assertEqual("a:x:1:\nb:x:1000:me,you\nc:x:10000:me\n", str(gf))
def test_add_duplicate_name(self) -> None:
gf = GroupFile()
gf.add("a", 1)
with self.assertRaisesRegex(ValueError, r"^group a already exists"):
gf.add("a", 2)
def test_provides(self) -> None:
gf = GroupFile("root:x:0:td-agent\nbin:x:1:a,b\n\ndaemon:x:2:\n\n")
self.assertEqual(
{
ProvidesGroup("root"),
ProvidesGroup("bin"),
ProvidesGroup("daemon"),
},
set(gf.provides()),
)
def test_get_gid(self) -> None:
gf = GroupFile()
gf.add("root", 0)
gf.add("a", 1)
gf.add("b", 2)
self.assertEqual(gf.gid("root"), 0)
self.assertEqual(gf.gid("a"), 1)
self.assertIsNone(gf.gid("nope"))
|
1,327 |
filter
|
import numpy as np
import pytest
import aesara
import aesara.graph.op as op
import aesara.tensor as at
from aesara import shared
from aesara.configdefaults import config
from aesara.graph.basic import Apply, Variable
from aesara.graph.op import Op
from aesara.graph.type import Type
from aesara.graph.utils import TestValueError
from aesara.link.c.type import Generic
from aesara.tensor.math import log
from aesara.tensor.type import dmatrix, dscalar, dvector, vector
def as_variable(x):
assert isinstance(x, Variable)
return x
class MyType(Type):
def __init__(self, thingy):
self.thingy = thingy
def __eq__(self, other):
return type(other) == type(self) and other.thingy == self.thingy
def __str__(self):
return str(self.thingy)
def __repr__(self):
return str(self.thingy)
def METHOD_NAME(self, x, strict=False, allow_downcast=None):
# Dummy filter: we want this type to represent strings that
# start with `self.thingy`.
if not isinstance(x, str):
raise TypeError("Invalid type")
if not x.startswith(self.thingy):
raise ValueError("Invalid value")
return x
# Added to make those tests pass in DebugMode
@staticmethod
def may_share_memory(a, b):
# As this represent a string and string are immutable, they
# never share memory in the DebugMode sense. This is needed as
# Python reuse string internally.
return False
class MyOp(Op):
__props__ = ()
def make_node(self, *inputs):
inputs = list(map(as_variable, inputs))
for input in inputs:
if not isinstance(input.type, MyType):
raise Exception("Error 1")
outputs = [MyType(sum(input.type.thingy for input in inputs))()]
return Apply(self, inputs, outputs)
def perform(self, *args, **kwargs):
raise NotImplementedError("No Python implementation available.")
MyOp = MyOp()
class NoInputOp(Op):
"""An Op to test the corner-case of an Op with no input."""
__props__ = ()
def make_node(self):
return Apply(self, [], [MyType("test")()])
def perform(self, node, inputs, output_storage):
output_storage[0][0] = "test Op no input"
class TestOp:
# Sanity tests
def test_sanity_0(self):
r1, r2 = MyType(1)(), MyType(2)()
node = MyOp.make_node(r1, r2)
# Are the inputs what I provided?
assert [x for x in node.inputs] == [r1, r2]
# Are the outputs what I expect?
assert [x.type for x in node.outputs] == [MyType(3)]
assert node.outputs[0].owner is node and node.outputs[0].index == 0
# validate
def test_validate(self):
try:
MyOp(Generic()(), MyType(1)()) # MyOp requires MyType instances
raise Exception("Expected an exception")
except Exception as e:
if str(e) != "Error 1":
raise
def test_op_no_input(self):
x = NoInputOp()()
f = aesara.function([], x)
rval = f()
assert rval == "test Op no input"
class TestMakeThunk:
def test_no_make_node(self):
class DoubleOp(Op):
"""An Op without make_node"""
__props__ = ()
itypes = [dmatrix]
otypes = [dmatrix]
def perform(self, node, inputs, outputs):
inp = inputs[0]
output = outputs[0]
output[0] = inp * 2
x_input = dmatrix("x_input")
f = aesara.function([x_input], DoubleOp()(x_input))
inp = np.random.random((5, 4))
out = f(inp)
assert np.allclose(inp * 2, out)
def test_test_value_python_objects():
for x in ([0, 1, 2], 0, 0.5, 1):
assert np.all(op.get_test_value(x) == x)
def test_test_value_ndarray():
x = np.zeros((5, 5))
v = op.get_test_value(x)
assert np.all(v == x)
def test_test_value_constant():
x = at.as_tensor_variable(np.zeros((5, 5)))
v = op.get_test_value(x)
assert np.all(v == np.zeros((5, 5)))
def test_test_value_shared():
x = shared(np.zeros((5, 5)))
v = op.get_test_value(x)
assert np.all(v == np.zeros((5, 5)))
@config.change_flags(compute_test_value="raise")
def test_test_value_op():
x = log(np.ones((5, 5)))
v = op.get_test_value(x)
assert np.allclose(v, np.zeros((5, 5)))
@config.change_flags(compute_test_value="off")
def test_get_test_values_no_debugger():
"""Tests that `get_test_values` returns `[]` when debugger is off."""
x = vector()
assert op.get_test_values(x) == []
@config.change_flags(compute_test_value="ignore")
def test_get_test_values_ignore():
"""Tests that `get_test_values` returns `[]` when debugger is set to "ignore" and some values are missing."""
x = vector()
assert op.get_test_values(x) == []
def test_get_test_values_success():
"""Tests that `get_test_values` returns values when available (and the debugger is on)."""
for mode in ["ignore", "warn", "raise"]:
with config.change_flags(compute_test_value=mode):
x = vector()
x.tag.test_value = np.zeros((4,), dtype=config.floatX)
y = np.zeros((5, 5))
iters = 0
for x_val, y_val in op.get_test_values(x, y):
assert x_val.shape == (4,)
assert y_val.shape == (5, 5)
iters += 1
assert iters == 1
@config.change_flags(compute_test_value="raise")
def test_get_test_values_exc():
"""Tests that `get_test_values` raises an exception when debugger is set to raise and a value is missing."""
with pytest.raises(TestValueError):
x = vector()
assert op.get_test_values(x) == []
def test_op_invalid_input_types():
class TestOp(aesara.graph.op.Op):
itypes = [dvector, dvector, dvector]
otypes = [dvector]
def perform(self, node, inputs, outputs):
pass
msg = r"^Invalid input types for Op.*"
with pytest.raises(TypeError, match=msg):
TestOp()(dvector(), dscalar(), dvector())
def test_op_input_broadcastable():
# Test that we can create an op with a broadcastable subtype as input
class SomeOp(aesara.tensor.Op):
itypes = [at.dvector]
otypes = [at.dvector]
def perform(self, *_):
raise NotImplementedError()
x = at.TensorType(dtype="float64", shape=(1,))("x")
assert SomeOp()(x).type == at.dvector
|
1,328 |
database consistent
|
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
from typing import (
List, Dict
)
from kiwi.api_helper import decommissioned
from kiwi.command import command_call_type
from kiwi.system.root_bind import RootBind
from kiwi.repository.base import RepositoryBase
class PackageManagerBase:
"""
**Implements base class for Package Management**
:param object repository: instance of :class:`Repository`
:param str root_dir: root directory path name
:param list package_requests: list of packages to install or delete
:param list collection_requests: list of collections to install
:param list product_requests: list of products to install
"""
def __init__(
self, repository: RepositoryBase, custom_args: List = [],
release_version: str = ''
) -> None:
self.repository = repository
self.root_dir = repository.root_dir
self.package_requests: List[str] = []
self.collection_requests: List[str] = []
self.product_requests: List[str] = []
self.exclude_requests: List[str] = []
self.release_version = release_version or '0'
self.post_init(custom_args or [])
def post_init(self, custom_args: List = []) -> None:
"""
Post initialization method
Implementation in specialized package manager class
:param list custom_args: unused
"""
pass
def request_package(self, name: str) -> None:
"""
Queue a package request
Implementation in specialized package manager class
:param str name: unused
"""
raise NotImplementedError
def request_collection(self, name: str) -> None:
"""
Queue a package collection
Implementation in specialized package manager class
:param str name: unused
"""
raise NotImplementedError
def request_product(self, name: str) -> None:
"""
Queue a product request
Implementation in specialized package manager class
:param str name: unused
"""
raise NotImplementedError
@decommissioned
def request_package_lock(self, name: str) -> None:
pass # pragma: no cover
def request_package_exclusion(self, name: str) -> None:
"""
Queue a package exclusion(skip) request
Implementation in specialized package manager class
:param str name: unused
"""
raise NotImplementedError
def setup_repository_modules(
self, collection_modules: Dict[str, List[str]]
) -> None:
"""
Setup repository modules and streams
Implementation in specialized package manager class
:param dict collection_modules: unused
"""
raise NotImplementedError
def process_install_requests_bootstrap(
self, root_bind: RootBind = None, bootstrap_package: str = None
) -> command_call_type:
"""
Process package install requests for bootstrap phase (no chroot)
Implementation in specialized package manager class
"""
raise NotImplementedError
def process_install_requests(self) -> command_call_type:
"""
Process package install requests for image phase (chroot)
Implementation in specialized package manager class
"""
raise NotImplementedError
def process_delete_requests(self, force: bool = False) -> command_call_type:
"""
Process package delete requests (chroot)
Implementation in specialized package manager class
:param bool force: unused
"""
raise NotImplementedError
def update(self) -> command_call_type:
"""
Process package update requests (chroot)
Implementation in specialized package manager class
"""
raise NotImplementedError
def process_only_required(self) -> None:
"""
Setup package processing only for required packages
Implementation in specialized package manager class
"""
raise NotImplementedError
def process_plus_recommended(self) -> None:
"""
Setup package processing to also include recommended dependencies
Implementation in specialized package manager class
"""
raise NotImplementedError
def match_package_installed(
self, package_name: str, package_manager_output: str
) -> bool:
"""
Match expression to indicate a package has been installed
Implementation in specialized package manager class
:param str package_name: unused
:param str package_manager_output: unused
:return: True|False
:rtype: bool
"""
raise NotImplementedError
def match_package_deleted(
self, package_name: str, package_manager_output: str
) -> bool:
"""
Match expression to indicate a package has been deleted
Implementation in specialized package manager class
:param str package_name: unused
:param str package_manager_output: unused
:return: True|False
:rtype: bool
"""
raise NotImplementedError
@decommissioned
def METHOD_NAME(self) -> None:
pass # pragma: no cover
@decommissioned
def dump_reload_package_database(self, version: int = 45) -> None:
pass # pragma: no cover
def post_process_install_requests_bootstrap(
self, root_bind: RootBind = None, delta_root: bool = False
) -> None:
"""
Process extra code required after bootstrapping
Implementation in specialized package manager class
"""
pass
def post_process_delete_requests(
self, root_bind: RootBind = None
) -> None:
"""
Process extra code required after deleting packages
Implementation in specialized package manager class
"""
pass
@staticmethod
def has_failed(returncode: int) -> bool:
"""
Evaluate given result return code
Any returncode != 0 is considered an error unless
overwritten in specialized package manager class
:param int returncode: return code number
:return: True|False
:rtype: boolean
"""
return True if returncode != 0 else False
def get_error_details(self) -> str:
"""
Provide further error details
In case the package manager call failed this
method will return package manager specific error
information if there is any
:return: further error data as str or empty str
:rtype: str
"""
return ''
def clean_leftovers(self) -> None:
"""
Cleans package manager related data not needed in the
resulting image such as custom macros
Implementation in specialized package manager class
"""
pass
def cleanup_requests(self) -> None:
"""
Cleanup request queues
"""
del self.package_requests[:]
del self.collection_requests[:]
del self.product_requests[:]
del self.exclude_requests[:]
|
1,329 |
get geometry output
|
# SPDX-License-Identifier: GPL-2.0-only
#
# This file is part of Nominatim. (https://nominatim.org)
#
# Copyright (C) 2022 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Provides custom functions over command-line arguments.
"""
from typing import Optional, List, Dict, Any, Sequence, Tuple
import argparse
import logging
from functools import reduce
from pathlib import Path
from nominatim.errors import UsageError
from nominatim.config import Configuration
from nominatim.typing import Protocol
import nominatim.api as napi
LOG = logging.getLogger()
class Subcommand(Protocol):
"""
Interface to be implemented by classes implementing a CLI subcommand.
"""
def add_args(self, parser: argparse.ArgumentParser) -> None:
"""
Fill the given parser for the subcommand with the appropriate
parameters.
"""
def run(self, args: 'NominatimArgs') -> int:
"""
Run the subcommand with the given parsed arguments.
"""
class NominatimArgs:
""" Customized namespace class for the nominatim command line tool
to receive the command-line arguments.
"""
# Basic environment set by root program.
config: Configuration
project_dir: Path
# Global switches
version: bool
subcommand: Optional[str]
command: Subcommand
# Shared parameters
osm2pgsql_cache: Optional[int]
socket_timeout: int
# Arguments added to all subcommands.
verbose: int
threads: Optional[int]
# Arguments to 'add-data'
file: Optional[str]
diff: Optional[str]
node: Optional[int]
way: Optional[int]
relation: Optional[int]
tiger_data: Optional[str]
use_main_api: bool
# Arguments to 'admin'
warm: bool
check_database: bool
migrate: bool
collect_os_info: bool
analyse_indexing: bool
target: Optional[str]
osm_id: Optional[str]
place_id: Optional[int]
# Arguments to 'import'
osm_file: List[str]
continue_at: Optional[str]
reverse_only: bool
no_partitions: bool
no_updates: bool
offline: bool
ignore_errors: bool
index_noanalyse: bool
# Arguments to 'index'
boundaries_only: bool
no_boundaries: bool
minrank: int
maxrank: int
# Arguments to 'export'
output_type: str
output_format: str
output_all_postcodes: bool
language: Optional[str]
restrict_to_country: Optional[str]
# Arguments to 'refresh'
postcodes: bool
word_tokens: bool
word_counts: bool
address_levels: bool
functions: bool
wiki_data: bool
secondary_importance: bool
importance: bool
website: bool
diffs: bool
enable_debug_statements: bool
data_object: Sequence[Tuple[str, int]]
data_area: Sequence[Tuple[str, int]]
# Arguments to 'replication'
init: bool
update_functions: bool
check_for_updates: bool
once: bool
catch_up: bool
do_index: bool
# Arguments to 'serve'
server: str
engine: str
# Arguments to 'special-phrases
import_from_wiki: bool
import_from_csv: Optional[str]
no_replace: bool
# Arguments to all query functions
format: str
addressdetails: bool
extratags: bool
namedetails: bool
lang: Optional[str]
polygon_output: Optional[str]
polygon_threshold: Optional[float]
# Arguments to 'search'
query: Optional[str]
amenity: Optional[str]
street: Optional[str]
city: Optional[str]
county: Optional[str]
state: Optional[str]
country: Optional[str]
postalcode: Optional[str]
countrycodes: Optional[str]
exclude_place_ids: Optional[str]
limit: int
viewbox: Optional[str]
bounded: bool
dedupe: bool
# Arguments to 'reverse'
lat: float
lon: float
zoom: Optional[int]
layers: Optional[Sequence[str]]
# Arguments to 'lookup'
ids: Sequence[str]
# Arguments to 'details'
object_class: Optional[str]
linkedplaces: bool
hierarchy: bool
keywords: bool
polygon_geojson: bool
group_hierarchy: bool
def osm2pgsql_options(self, default_cache: int,
default_threads: int) -> Dict[str, Any]:
""" Return the standard osm2pgsql options that can be derived
from the command line arguments. The resulting dict can be
further customized and then used in `run_osm2pgsql()`.
"""
return dict(osm2pgsql=self.config.OSM2PGSQL_BINARY or self.config.lib_dir.osm2pgsql,
osm2pgsql_cache=self.osm2pgsql_cache or default_cache,
osm2pgsql_style=self.config.get_import_style_file(),
osm2pgsql_style_path=self.config.config_dir,
threads=self.threads or default_threads,
dsn=self.config.get_libpq_dsn(),
flatnode_file=str(self.config.get_path('FLATNODE_FILE') or ''),
tablespaces=dict(slim_data=self.config.TABLESPACE_OSM_DATA,
slim_index=self.config.TABLESPACE_OSM_INDEX,
main_data=self.config.TABLESPACE_PLACE_DATA,
main_index=self.config.TABLESPACE_PLACE_INDEX
)
)
def get_osm_file_list(self) -> Optional[List[Path]]:
""" Return the --osm-file argument as a list of Paths or None
if no argument was given. The function also checks if the files
exist and raises a UsageError if one cannot be found.
"""
if not self.osm_file:
return None
files = [Path(f) for f in self.osm_file]
for fname in files:
if not fname.is_file():
LOG.fatal("OSM file '%s' does not exist.", fname)
raise UsageError('Cannot access file.')
return files
def METHOD_NAME(self) -> napi.GeometryFormat:
""" Get the requested geometry output format in a API-compatible
format.
"""
if not self.polygon_output:
return napi.GeometryFormat.NONE
if self.polygon_output == 'geojson':
return napi.GeometryFormat.GEOJSON
if self.polygon_output == 'kml':
return napi.GeometryFormat.KML
if self.polygon_output == 'svg':
return napi.GeometryFormat.SVG
if self.polygon_output == 'text':
return napi.GeometryFormat.TEXT
try:
return napi.GeometryFormat[self.polygon_output.upper()]
except KeyError as exp:
raise UsageError(f"Unknown polygon output format '{self.polygon_output}'.") from exp
def get_locales(self, default: Optional[str]) -> napi.Locales:
""" Get the locales from the language parameter.
"""
if self.lang:
return napi.Locales.from_accept_languages(self.lang)
if default:
return napi.Locales.from_accept_languages(default)
return napi.Locales()
def get_layers(self, default: napi.DataLayer) -> Optional[napi.DataLayer]:
""" Get the list of selected layers as a DataLayer enum.
"""
if not self.layers:
return default
return reduce(napi.DataLayer.__or__,
(napi.DataLayer[s.upper()] for s in self.layers))
|
1,330 |
mime types
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
from core.load_modules import load_all_modules, load_all_profiles
from core.load_modules import load_all_graphs
from core.alert import messages
from flask import abort
from config import nettacker_paths
def structure(status="", msg=""):
"""
basic JSON message structure
Args:
status: status (ok, failed)
msg: the message content
Returns:
a JSON message
"""
return {
"status": status,
"msg": msg
}
def get_value(flask_request, key):
"""
get a value from GET, POST or CCOKIES
Args:
flask_request: the flask request
key: the value name to find
Returns:
the value content if found otherwise None
"""
return dict(
flask_request.args
).get(key) or dict(
flask_request.form
).get(key) or dict(
flask_request.cookies
).get(key) or ""
def METHOD_NAME():
"""
contains all mime types for HTTP request
Returns:
all mime types in json
"""
return {
".aac": "audio/aac",
".abw": "application/x-abiword",
".arc": "application/octet-stream",
".avi": "video/x-msvideo",
".azw": "application/vnd.amazon.ebook",
".bin": "application/octet-stream",
".bz": "application/x-bzip",
".bz2": "application/x-bzip2",
".csh": "application/x-csh",
".css": "text/css",
".csv": "text/csv",
".doc": "application/msword",
".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
".eot": "application/vnd.ms-fontobject",
".epub": "application/epub+zip",
".gif": "image/gif",
".htm": ".htm",
".html": "text/html",
".ico": "image/x-icon",
".ics": "text/calendar",
".jar": "application/java-archive",
".jpeg": ".jpeg",
".jpg": "image/jpeg",
".js": "application/javascript",
".json": "application/json",
".mid": ".mid",
".midi": "audio/midi",
".mpeg": "video/mpeg",
".mpkg": "application/vnd.apple.installer+xml",
".odp": "application/vnd.oasis.opendocument.presentation",
".ods": "application/vnd.oasis.opendocument.spreadsheet",
".odt": "application/vnd.oasis.opendocument.text",
".oga": "audio/ogg",
".ogv": "video/ogg",
".ogx": "application/ogg",
".otf": "font/otf",
".png": "image/png",
".pdf": "application/pdf",
".ppt": "application/vnd.ms-powerpoint",
".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
".rar": "application/x-rar-compressed",
".rtf": "application/rtf",
".sh": "application/x-sh",
".svg": "image/svg+xml",
".swf": "application/x-shockwave-flash",
".tar": "application/x-tar",
".tif": ".tif",
".tiff": "image/tiff",
".ts": "application/typescript",
".ttf": "font/ttf",
".txt": "text/plain",
".vsd": "application/vnd.visio",
".wav": "audio/x-wav",
".weba": "audio/webm",
".webm": "video/webm",
".webp": "image/webp",
".woff": "font/woff",
".woff2": "font/woff2",
".xhtml": "application/xhtml+xml",
".xls": "application/vnd.ms-excel",
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
".xml": "application/xml",
".xul": "application/vnd.mozilla.xul+xml",
".zip": "application/zip",
".3gp": "video/3gpp",
"audio/3gpp": "video",
".3g2": "video/3gpp2",
"audio/3gpp2": "video",
".7z": "application/x-7z-compressed"
}
def get_file(filename):
"""
open the requested file in HTTP requests
Args:
filename: path and the filename
Returns:
content of the file or abort(404)
"""
if not os.path.normpath(filename).startswith(nettacker_paths()["web_static_files_path"]):
abort(404)
try:
return open(filename, "rb").read()
except ValueError:
abort(404)
except IOError:
abort(404)
def api_key_is_valid(app, flask_request):
"""
check the validity of API key
Args:
app: the flask app
flask_request: the flask request
Returns:
200 HTTP code if it's valid otherwise 401 error
"""
if app.config["OWASP_NETTACKER_CONFIG"]["api_access_key"] != get_value(flask_request, "key"):
abort(401, messages("API_invalid"))
return
def languages_to_country():
"""
define list of languages with country flag for API
Returns:
HTML code for each language with its country flag
"""
from core.load_modules import load_all_languages
languages = load_all_languages()
res = ""
flags = {
"el": "gr",
"fr": "fr",
"en": "us",
"nl": "nl",
"ps": "ps",
"tr": "tr",
"de": "de",
"ko": "kr",
"it": "it",
"ja": "jp",
"fa": "ir",
"hy": "am",
"ar": "sa",
"zh-cn": "cn",
"vi": "vi",
"ru": "ru",
"hi": "in",
"ur": "pk",
"id": "id",
"es": "es",
"iw": "il",
"pt-br": "br",
"bn": "in"
}
for language in languages:
res += """<option {2} id="{0}" data-content='<span class="flag-icon flag-icon-{1}"
value="{0}"></span> {0}'></option>""".format(
language,
flags[language],
"selected" if language == "en" else ""
)
return res
def graphs():
"""
all available graphs for API
Returns:
HTML content or available graphs
"""
res = """<label><input id="" type="radio" name="graph_name" value="" class="radio"><a
class="label label-default">None</a></label> """
for graph in load_all_graphs():
res += """<label><input id="{0}" type="radio" name="graph_name" value="{0}" class="radio"><a
class="label label-default">{0}</a></label> """.format(graph)
return res
def profiles():
"""
all available profiles for API
Returns:
HTML content or available profiles
"""
res = ""
for profile in sorted(load_all_profiles().keys()):
label = "success" if (
profile == "scan"
) else "warning" if (
profile == "brute"
) else "danger" if (
profile == "vulnerability"
) else "default"
res += """<label><input id="{0}" type="checkbox" class="checkbox checkbox-{0}"><a class="label
label-{1}">{0}</a></label> """.format(profile, label)
return res
def scan_methods():
"""
all available modules for API
Returns:
HTML content or available modules
"""
methods = load_all_modules()
methods.pop("all")
res = ""
for sm in methods.keys():
label = "success" if sm.endswith(
"_scan"
) else "warning" if sm.endswith(
"_brute"
) else "danger" if sm.endswith(
"_vuln"
) else "default"
profile = "scan" if sm.endswith(
"_scan"
) else "brute" if sm.endswith(
"_brute"
) else "vuln" if sm.endswith(
"_vuln"
) else "default"
res += """<label><input id="{0}" type="checkbox" class="checkbox checkbox-{2}-module">
<a class="label label-{1}">{0}</a></label> """.format(sm, label, profile)
return res
|
1,331 |
create weight proxy
|
import operator
from abc import ABC, abstractmethod
import torch
import torch.nn.functional as F
class BiasAdditionModule(ABC):
"""
This class is used to construct the restructure computation graph for
call_module node with bias addition inside.
"""
def __init__(self, tracer, target, args, kwargs, substitute_func):
self.tracer = tracer
self.target = target
self.args = args
self.kwargs = kwargs
self.substitute_func = substitute_func
self.weight_proxy = self.METHOD_NAME()
self.bias_proxy = self._create_bias_proxy()
def METHOD_NAME(self):
"""
Create weight proxy, the node created by this proxy contains module weight.
Note: this function will be invoked during module initializing,
you should never call this function.
"""
weight_node_kind = 'get_attr'
weight_node_target = self.target + '.weight'
weight_proxy = self.tracer.create_proxy(weight_node_kind, weight_node_target, (), {})
return weight_proxy
def _create_bias_proxy(self):
"""
Create bias proxy, the node created by this proxy contains module bias.
Note: this function will be invoked during module initializing,
you should never call this function.
"""
bias_node_kind = 'get_attr'
bias_node_target = self.target + '.bias'
bias_proxy = self.tracer.create_proxy(bias_node_kind, bias_node_target, (), {})
return bias_proxy
@abstractmethod
def extract_kwargs_from_mod(self):
"""
This method is used to extract the kwargs for non-bias computation.
For example:
The kwargs for conv2d module is {} because the attributes like 'padding' or 'groups' are
considered during module initializing. However, we need to consider those attributes as kwargs
in F.conv2d.
"""
pass
def create_non_bias_func_proxy(self, input_proxy=None):
"""
This method is used to create the non_bias_func proxy, the node created by this proxy will
compute the main computation, such as convolution, with bias option banned.
"""
node_kind = 'call_function'
node_target = self.substitute_func
if input_proxy is None:
input_proxy = self.args[0]
node_args = (input_proxy, self.weight_proxy)
node_kwargs = self.extract_kwargs_from_mod()
non_bias_func_proxy = self.tracer.create_proxy(node_kind, node_target, node_args, node_kwargs)
return non_bias_func_proxy
def create_bias_addition_proxy(self, non_bias_func_proxy, bias_proxy):
"""
This method is used to create the bias_addition_proxy, the node created by this proxy will
compute the sum of non_bias_func result and bias with some reshape operation if needed.
"""
bias_add_node_kind = 'call_function'
bias_add_node_target = operator.add
bias_add_args = (non_bias_func_proxy, bias_proxy)
bias_add_proxy = self.tracer.create_proxy(bias_add_node_kind, bias_add_node_target, tuple(bias_add_args), {})
return bias_add_proxy
@abstractmethod
def generate(self):
"""
This method is used to construct the whole restructure computation graph for call_module node with bias
addition inside.
A whole restructure computation graph will contain a weight node, a bias node, a non-bias addition computation node,
a bias reshape node if needed and a bias addition node.
Use Conv2d module as an example:
The origin node is:
%conv: call_module[target=conv](args = (%x,), kwargs = {})
Restructured graph is:
%conv_weight : [#users=1] = get_attr[target=conv.weight]
%conv_bias : [#users=1] = get_attr[target=conv.bias]
%conv2d : [#users=1] = call_function[target=torch.conv2d](args = (%x, %conv_weight), kwargs = {})
%view : [#users=1] = call_method[target=view](args = (%conv_bias, [1, -1, 1, 1]), kwargs = {})
%add : [#users=1] = call_function[target=operator.add](args = (%conv2d, %view), kwargs = {})
"""
pass
module_to_func_dict = {
torch.nn.Linear: F.linear,
torch.nn.Conv1d: F.conv1d,
torch.nn.Conv2d: F.conv2d,
torch.nn.Conv3d: F.conv3d,
}
|
1,332 |
test with refcounts
|
import os, sys, unittest, getopt, time
use_resources = []
import ctypes
ctypes_symbols = dir(ctypes)
def need_symbol(name):
return unittest.skipUnless(name in ctypes_symbols,
'{!r} is required'.format(name))
class ResourceDenied(unittest.SkipTest):
"""Test skipped because it requested a disallowed resource.
This is raised when a test calls requires() for a resource that
has not be enabled. Resources are defined by test modules.
"""
def is_resource_enabled(resource):
"""Test whether a resource is enabled.
If the caller's module is __main__ then automatically return True."""
if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
return True
result = use_resources is not None and \
(resource in use_resources or "*" in use_resources)
if not result:
_unavail[resource] = None
return result
_unavail = {}
def requires(resource, msg=None):
"""Raise ResourceDenied if the specified resource is not available.
If the caller's module is __main__ then automatically return True."""
# see if the caller's module is __main__ - if so, treat as if
# the resource was set
if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
return
if not is_resource_enabled(resource):
if msg is None:
msg = "Use of the `%s' resource not enabled" % resource
raise ResourceDenied(msg)
def find_package_modules(package, mask):
import fnmatch
if (hasattr(package, "__loader__") and
hasattr(package.__loader__, '_files')):
path = package.__name__.replace(".", os.path.sep)
mask = os.path.join(path, mask)
for fnm in package.__loader__._files.iterkeys():
if fnmatch.fnmatchcase(fnm, mask):
yield os.path.splitext(fnm)[0].replace(os.path.sep, ".")
else:
path = package.__path__[0]
for fnm in os.listdir(path):
if fnmatch.fnmatchcase(fnm, mask):
yield "%s.%s" % (package.__name__, os.path.splitext(fnm)[0])
def get_tests(package, mask, verbosity, exclude=()):
"""Return a list of skipped test modules, and a list of test cases."""
tests = []
skipped = []
for modname in find_package_modules(package, mask):
if modname.split(".")[-1] in exclude:
skipped.append(modname)
if verbosity > 1:
print >> sys.stderr, "Skipped %s: excluded" % modname
continue
try:
mod = __import__(modname, globals(), locals(), ['*'])
except (ResourceDenied, unittest.SkipTest) as detail:
skipped.append(modname)
if verbosity > 1:
print >> sys.stderr, "Skipped %s: %s" % (modname, detail)
continue
for name in dir(mod):
if name.startswith("_"):
continue
o = getattr(mod, name)
if type(o) is type(unittest.TestCase) and issubclass(o, unittest.TestCase):
tests.append(o)
return skipped, tests
def usage():
print __doc__
return 1
def METHOD_NAME(runner, verbosity, testcase):
"""Run testcase several times, tracking reference counts."""
import gc
import ctypes
ptc = ctypes._pointer_type_cache.copy()
cfc = ctypes._c_functype_cache.copy()
wfc = ctypes._win_functype_cache.copy()
# when searching for refcount leaks, we have to manually reset any
# caches that ctypes has.
def cleanup():
ctypes._pointer_type_cache = ptc.copy()
ctypes._c_functype_cache = cfc.copy()
ctypes._win_functype_cache = wfc.copy()
gc.collect()
test = unittest.makeSuite(testcase)
for i in range(5):
rc = sys.gettotalrefcount()
runner.run(test)
cleanup()
COUNT = 5
refcounts = [None] * COUNT
for i in range(COUNT):
rc = sys.gettotalrefcount()
runner.run(test)
cleanup()
refcounts[i] = sys.gettotalrefcount() - rc
if filter(None, refcounts):
print "%s leaks:\n\t" % testcase, refcounts
elif verbosity:
print "%s: ok." % testcase
class TestRunner(unittest.TextTestRunner):
def run(self, test, skipped):
"Run the given test case or test suite."
# Same as unittest.TextTestRunner.run, except that it reports
# skipped tests.
result = self._makeResult()
startTime = time.time()
test(result)
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
if _unavail: #skipped:
requested = _unavail.keys()
requested.sort()
self.stream.writeln("Ran %d test%s in %.3fs (%s module%s skipped)" %
(run, run != 1 and "s" or "", timeTaken,
len(skipped),
len(skipped) != 1 and "s" or ""))
self.stream.writeln("Unavailable resources: %s" % ", ".join(requested))
else:
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
if not result.wasSuccessful():
self.stream.write("FAILED (")
failed, errored = map(len, (result.failures, result.errors))
if failed:
self.stream.write("failures=%d" % failed)
if errored:
if failed: self.stream.write(", ")
self.stream.write("errors=%d" % errored)
self.stream.writeln(")")
else:
self.stream.writeln("OK")
return result
def main(*packages):
try:
opts, args = getopt.getopt(sys.argv[1:], "rqvu:x:")
except getopt.error:
return usage()
verbosity = 1
search_leaks = False
exclude = []
for flag, value in opts:
if flag == "-q":
verbosity -= 1
elif flag == "-v":
verbosity += 1
elif flag == "-r":
try:
sys.gettotalrefcount
except AttributeError:
print >> sys.stderr, "-r flag requires Python debug build"
return -1
search_leaks = True
elif flag == "-u":
use_resources.extend(value.split(","))
elif flag == "-x":
exclude.extend(value.split(","))
mask = "test_*.py"
if args:
mask = args[0]
for package in packages:
run_tests(package, mask, verbosity, search_leaks, exclude)
def run_tests(package, mask, verbosity, search_leaks, exclude):
skipped, testcases = get_tests(package, mask, verbosity, exclude)
runner = TestRunner(verbosity=verbosity)
suites = [unittest.makeSuite(o) for o in testcases]
suite = unittest.TestSuite(suites)
result = runner.run(suite, skipped)
if search_leaks:
# hunt for refcount leaks
runner = BasicTestRunner()
for t in testcases:
METHOD_NAME(runner, verbosity, t)
return bool(result.errors)
class BasicTestRunner:
def run(self, test):
result = unittest.TestResult()
test(result)
return result
|
1,333 |
get assessment output
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetAssessmentResult',
'AwaitableGetAssessmentResult',
'get_assessment',
'get_assessment_output',
]
@pulumi.output_type
class GetAssessmentResult:
"""
An assessment created for a group in the Migration project.
"""
def __init__(__self__, e_tag=None, id=None, name=None, properties=None, type=None):
if e_tag and not isinstance(e_tag, str):
raise TypeError("Expected argument 'e_tag' to be a str")
pulumi.set(__self__, "e_tag", e_tag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> Optional[str]:
"""
For optimistic concurrency control.
"""
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def id(self) -> str:
"""
Path reference to this assessment. /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Migrate/assessmentProjects/{projectName}/groups/{groupName}/assessment/{assessmentName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Unique name of an assessment.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.AssessmentPropertiesResponse':
"""
Properties of the assessment.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the object = [Microsoft.Migrate/assessmentProjects/groups/assessments].
"""
return pulumi.get(self, "type")
class AwaitableGetAssessmentResult(GetAssessmentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAssessmentResult(
e_tag=self.e_tag,
id=self.id,
name=self.name,
properties=self.properties,
type=self.type)
def get_assessment(assessment_name: Optional[str] = None,
group_name: Optional[str] = None,
project_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAssessmentResult:
"""
Get an existing assessment with the specified name. Returns a json object of type 'assessment' as specified in Models section.
Azure REST API version: 2019-10-01.
:param str assessment_name: Unique name of an assessment within a project.
:param str group_name: Unique name of a group within a project.
:param str project_name: Name of the Azure Migrate project.
:param str resource_group_name: Name of the Azure Resource Group that project is part of.
"""
__args__ = dict()
__args__['assessmentName'] = assessment_name
__args__['groupName'] = group_name
__args__['projectName'] = project_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:migrate:getAssessment', __args__, opts=opts, typ=GetAssessmentResult).value
return AwaitableGetAssessmentResult(
e_tag=pulumi.get(__ret__, 'e_tag'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_assessment)
def METHOD_NAME(assessment_name: Optional[pulumi.Input[str]] = None,
group_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAssessmentResult]:
"""
Get an existing assessment with the specified name. Returns a json object of type 'assessment' as specified in Models section.
Azure REST API version: 2019-10-01.
:param str assessment_name: Unique name of an assessment within a project.
:param str group_name: Unique name of a group within a project.
:param str project_name: Name of the Azure Migrate project.
:param str resource_group_name: Name of the Azure Resource Group that project is part of.
"""
...
|
1,334 |
print api input
|
# Copyright (c) <2021> Side Effects Software Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. The name of Side Effects Software may not be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY SIDE EFFECTS SOFTWARE "AS IS" AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
# NO EVENT SHALL SIDE EFFECTS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" An example script that uses the API and
HoudiniEngineV2.asyncprocessor.ProcessHDA. ProcessHDA is configured with the
asset to instantiate, as well as 2 inputs: a geometry input (a cube) and a
curve input (a helix).
ProcessHDA is then activiated upon which the asset will be instantiated,
inputs set, and cooked. The ProcessHDA class's on_post_processing() function is
overridden to fetch the input structure and logged. The other state/phase
functions (on_pre_instantiate(), on_post_instantiate() etc) are overridden to
simply log the function name, in order to observe progress in the log.
"""
import math
import unreal
from HoudiniEngineV2.asyncprocessor import ProcessHDA
_g_processor = None
class ProcessHDAExample(ProcessHDA):
@staticmethod
def METHOD_NAME(in_input):
print('\t\tInput type: {0}'.format(in_input.__class__))
print('\t\tbKeepWorldTransform: {0}'.format(in_input.keep_world_transform))
print('\t\tbImportAsReference: {0}'.format(in_input.import_as_reference))
if isinstance(in_input, unreal.HoudiniPublicAPIGeoInput):
print('\t\tbPackBeforeMerge: {0}'.format(in_input.pack_before_merge))
print('\t\tbExportLODs: {0}'.format(in_input.export_lo_ds))
print('\t\tbExportSockets: {0}'.format(in_input.export_sockets))
print('\t\tbExportColliders: {0}'.format(in_input.export_colliders))
elif isinstance(in_input, unreal.HoudiniPublicAPICurveInput):
print('\t\tbCookOnCurveChanged: {0}'.format(in_input.cook_on_curve_changed))
print('\t\tbAddRotAndScaleAttributesOnCurves: {0}'.format(in_input.add_rot_and_scale_attributes_on_curves))
input_objects = in_input.get_input_objects()
if not input_objects:
print('\t\tEmpty input!')
else:
print('\t\tNumber of objects in input: {0}'.format(len(input_objects)))
for idx, input_object in enumerate(input_objects):
print('\t\t\tInput object #{0}: {1}'.format(idx, input_object))
if hasattr(in_input, 'supports_transform_offset') and in_input.supports_transform_offset():
print('\t\t\tObject Transform Offset: {0}'.format(in_input.get_input_object_transform_offset(idx)))
if isinstance(input_object, unreal.HoudiniPublicAPICurveInputObject):
print('\t\t\tbClosed: {0}'.format(input_object.is_closed()))
print('\t\t\tCurveMethod: {0}'.format(input_object.get_curve_method()))
print('\t\t\tCurveType: {0}'.format(input_object.get_curve_type()))
print('\t\t\tReversed: {0}'.format(input_object.is_reversed()))
print('\t\t\tCurvePoints: {0}'.format(input_object.get_curve_points()))
def on_failure(self):
print('on_failure')
global _g_processor
_g_processor = None
def on_complete(self):
print('on_complete')
global _g_processor
_g_processor = None
def on_pre_instantiation(self):
print('on_pre_instantiation')
def on_post_instantiation(self):
print('on_post_instantiation')
def on_post_auto_cook(self, cook_success):
print('on_post_auto_cook, success = {0}'.format(cook_success))
def on_pre_process(self):
print('on_pre_process')
def on_post_processing(self):
print('on_post_processing')
# Fetch inputs, iterate over it and log
node_inputs = self.asset_wrapper.get_inputs_at_indices()
parm_inputs = self.asset_wrapper.get_input_parameters()
if not node_inputs:
print('No node inputs found!')
else:
print('Number of node inputs: {0}'.format(len(node_inputs)))
for input_index, input_wrapper in node_inputs.items():
print('\tInput index: {0}'.format(input_index))
self.METHOD_NAME(input_wrapper)
if not parm_inputs:
print('No parameter inputs found!')
else:
print('Number of parameter inputs: {0}'.format(len(parm_inputs)))
for parm_name, input_wrapper in parm_inputs.items():
print('\tInput parameter name: {0}'.format(parm_name))
self.METHOD_NAME(input_wrapper)
def on_post_auto_bake(self, bake_success):
print('on_post_auto_bake, succes = {0}'.format(bake_success))
def get_test_hda_path():
return '/HoudiniEngine/Examples/hda/copy_to_curve_1_0.copy_to_curve_1_0'
def get_test_hda():
return unreal.load_object(None, get_test_hda_path())
def get_geo_asset_path():
return '/Engine/BasicShapes/Cube.Cube'
def get_geo_asset():
return unreal.load_object(None, get_geo_asset_path())
def build_inputs():
print('configure_inputs')
# get the API singleton
houdini_api = unreal.HoudiniPublicAPIBlueprintLib.get_api()
node_inputs = {}
# Create a geo input
geo_input = houdini_api.create_empty_input(unreal.HoudiniPublicAPIGeoInput)
# Set the input objects/assets for this input
geo_object = get_geo_asset()
geo_input.set_input_objects((geo_object, ))
# store the input data to the HDA as node input 0
node_inputs[0] = geo_input
# Create a curve input
curve_input = houdini_api.create_empty_input(unreal.HoudiniPublicAPICurveInput)
# Create a curve wrapper/helper
curve_object = unreal.HoudiniPublicAPICurveInputObject(curve_input)
# Make it a Nurbs curve
curve_object.set_curve_type(unreal.HoudiniPublicAPICurveType.NURBS)
# Set the points of the curve, for this example we create a helix
# consisting of 100 points
curve_points = []
for i in range(100):
t = i / 20.0 * math.pi * 2.0
x = 100.0 * math.cos(t)
y = 100.0 * math.sin(t)
z = i
curve_points.append(unreal.Transform([x, y, z], [0, 0, 0], [1, 1, 1]))
curve_object.set_curve_points(curve_points)
# Set the curve wrapper as an input object
curve_input.set_input_objects((curve_object, ))
# Store the input data to the HDA as node input 1
node_inputs[1] = curve_input
return node_inputs
def run():
# Create the processor with preconfigured inputs
global _g_processor
_g_processor = ProcessHDAExample(
get_test_hda(), node_inputs=build_inputs())
# Activate the processor, this will starts instantiation, and then cook
if not _g_processor.activate():
unreal.log_warning('Activation failed.')
else:
unreal.log('Activated!')
if __name__ == '__main__':
run()
|
1,335 |
region 2d to vector 3d
|
# SPDX-FileCopyrightText: 2011-2023 Blender Authors
#
# SPDX-License-Identifier: GPL-2.0-or-later
__all__ = (
"region_2d_to_vector_3d",
"region_2d_to_origin_3d",
"region_2d_to_location_3d",
"location_3d_to_region_2d",
)
def METHOD_NAME(region, rv3d, coord):
"""
Return a direction vector from the viewport at the specific 2d region
coordinate.
:arg region: region of the 3D viewport, typically bpy.context.region.
:type region: :class:`bpy.types.Region`
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
:type rv3d: :class:`bpy.types.RegionView3D`
:arg coord: 2d coordinates relative to the region:
(event.mouse_region_x, event.mouse_region_y) for example.
:type coord: 2d vector
:return: normalized 3d vector.
:rtype: :class:`mathutils.Vector`
"""
from mathutils import Vector
viewinv = rv3d.view_matrix.inverted()
if rv3d.is_perspective:
persinv = rv3d.perspective_matrix.inverted()
out = Vector((
(2.0 * coord[0] / region.width) - 1.0,
(2.0 * coord[1] / region.height) - 1.0,
-0.5
))
w = out.dot(persinv[3].xyz) + persinv[3][3]
view_vector = ((persinv @ out) / w) - viewinv.translation
else:
view_vector = -viewinv.col[2].xyz
view_vector.normalize()
return view_vector
def region_2d_to_origin_3d(region, rv3d, coord, *, clamp=None):
"""
Return the 3d view origin from the region relative 2d coords.
.. note::
Orthographic views have a less obvious origin,
the far clip is used to define the viewport near/far extents.
Since far clip can be a very large value,
the result may give with numeric precision issues.
To avoid this problem, you can optionally clamp the far clip to a
smaller value based on the data you're operating on.
:arg region: region of the 3D viewport, typically bpy.context.region.
:type region: :class:`bpy.types.Region`
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
:type rv3d: :class:`bpy.types.RegionView3D`
:arg coord: 2d coordinates relative to the region;
(event.mouse_region_x, event.mouse_region_y) for example.
:type coord: 2d vector
:arg clamp: Clamp the maximum far-clip value used.
(negative value will move the offset away from the view_location)
:type clamp: float or None
:return: The origin of the viewpoint in 3d space.
:rtype: :class:`mathutils.Vector`
"""
viewinv = rv3d.view_matrix.inverted()
if rv3d.is_perspective:
origin_start = viewinv.translation.copy()
else:
persmat = rv3d.perspective_matrix.copy()
dx = (2.0 * coord[0] / region.width) - 1.0
dy = (2.0 * coord[1] / region.height) - 1.0
persinv = persmat.inverted()
origin_start = (
(persinv.col[0].xyz * dx) +
(persinv.col[1].xyz * dy) +
persinv.translation
)
if clamp != 0.0:
if rv3d.view_perspective != 'CAMERA':
# this value is scaled to the far clip already
origin_offset = persinv.col[2].xyz
if clamp is not None:
if clamp < 0.0:
origin_offset.negate()
clamp = -clamp
if origin_offset.length > clamp:
origin_offset.length = clamp
origin_start -= origin_offset
return origin_start
def region_2d_to_location_3d(region, rv3d, coord, depth_location):
"""
Return a 3d location from the region relative 2d coords, aligned with
*depth_location*.
:arg region: region of the 3D viewport, typically bpy.context.region.
:type region: :class:`bpy.types.Region`
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
:type rv3d: :class:`bpy.types.RegionView3D`
:arg coord: 2d coordinates relative to the region;
(event.mouse_region_x, event.mouse_region_y) for example.
:type coord: 2d vector
:arg depth_location: the returned vectors depth is aligned with this since
there is no defined depth with a 2d region input.
:type depth_location: 3d vector
:return: normalized 3d vector.
:rtype: :class:`mathutils.Vector`
"""
from mathutils import Vector
coord_vec = METHOD_NAME(region, rv3d, coord)
depth_location = Vector(depth_location)
origin_start = region_2d_to_origin_3d(region, rv3d, coord)
origin_end = origin_start + coord_vec
if rv3d.is_perspective:
from mathutils.geometry import intersect_line_plane
viewinv = rv3d.view_matrix.inverted()
view_vec = viewinv.col[2].copy()
return intersect_line_plane(
origin_start,
origin_end,
depth_location,
view_vec, 1,
)
else:
from mathutils.geometry import intersect_point_line
return intersect_point_line(
depth_location,
origin_start,
origin_end,
)[0]
def location_3d_to_region_2d(region, rv3d, coord, *, default=None):
"""
Return the *region* relative 2d location of a 3d position.
:arg region: region of the 3D viewport, typically bpy.context.region.
:type region: :class:`bpy.types.Region`
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
:type rv3d: :class:`bpy.types.RegionView3D`
:arg coord: 3d world-space location.
:type coord: 3d vector
:arg default: Return this value if ``coord``
is behind the origin of a perspective view.
:return: 2d location
:rtype: :class:`mathutils.Vector` or ``default`` argument.
"""
from mathutils import Vector
prj = rv3d.perspective_matrix @ Vector((coord[0], coord[1], coord[2], 1.0))
if prj.w > 0.0:
width_half = region.width / 2.0
height_half = region.height / 2.0
return Vector((
width_half + width_half * (prj.x / prj.w),
height_half + height_half * (prj.y / prj.w),
))
else:
return default
|
1,336 |
get error description
|
# Name: sfp.py, version: 1.0
#
# Description: Module contains the definitions of SFP related APIs
# for Nokia IXS 7215 platform.
#
# Copyright (c) 2023, Nokia
# All rights reserved.
#
try:
import os
from sonic_platform_base.sonic_xcvr.sfp_optoe_base import SfpOptoeBase
from sonic_py_common.logger import Logger
from sonic_py_common import device_info
from sonic_py_common.general import getstatusoutput_noshell
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
import subprocess as cmd
COPPER_TYPE = "COPPER"
SFP_TYPE = "SFP"
# SFP PORT numbers
SFP_PORT_START = 49
SFP_PORT_END = 52
CPLD_DIR = "/sys/bus/i2c/devices/0-0041/"
logger = Logger()
class Sfp(SfpOptoeBase):
"""
Nokia IXR-7215 Platform-specific Sfp refactor class
"""
instances = []
# Paths
PLATFORM_ROOT_PATH = "/usr/share/sonic/device"
PMON_HWSKU_PATH = "/usr/share/sonic/hwsku"
HOST_CHK_CMD = "docker > /dev/null 2>&1"
PLATFORM = "armhf-nokia_ixs7215_52x-r0"
HWSKU = "Nokia-7215"
port_to_i2c_mapping = 0
# def __init__(self, index, sfp_type, stub):
def __init__(self, index, sfp_type, eeprom_path, port_i2c_map):
SfpOptoeBase.__init__(self)
self.index = index
self.port_num = index
self.sfp_type = sfp_type
self.eeprom_path = eeprom_path
self.port_to_i2c_mapping = port_i2c_map
self.name = sfp_type + str(index-1)
self.port_name = sfp_type + str(index)
self.port_to_eeprom_mapping = {}
self.port_to_eeprom_mapping[index] = eeprom_path
self._version_info = device_info.get_sonic_version_info()
self.lastPresence = False
logger.log_debug("Sfp __init__ index {} setting name to {} and eeprom_path to {}".format(index, self.name, self.eeprom_path))
Sfp.instances.append(self)
def _read_sysfs_file(self, sysfs_file):
# On successful read, returns the value read from given
# reg_name and on failure returns 'ERR'
rv = 'ERR'
if (not os.path.isfile(sysfs_file)):
return rv
try:
with open(sysfs_file, 'r') as fd:
rv = fd.read()
except Exception as e:
rv = 'ERR'
rv = rv.rstrip('\r\n')
rv = rv.lstrip(" ")
return rv
def get_eeprom_path(self):
return self.eeprom_path
def get_presence(self):
"""
Retrieves the presence
Returns:
bool: True if is present, False if not
"""
if self.sfp_type == COPPER_TYPE:
return False
sfpstatus = self._read_sysfs_file(CPLD_DIR+"sfp{}_present".format(self.index))
if sfpstatus == '1':
return True
return False
def get_name(self):
"""
Retrieves the name of the device
Returns:
string: The name of the device
"""
return self.name
def get_position_in_parent(self):
"""
Retrieves 1-based relative physical position in parent device.
Returns:
integer: The 1-based relative physical position in parent device or
-1 if cannot determine the position
"""
return -1
def is_replaceable(self):
"""
Indicate whether this device is replaceable.
Returns:
bool: True if it is replaceable.
"""
if self.sfp_type == "SFP":
return True
else:
return False
def _get_error_code(self):
"""
Get error code of the SFP module
Returns:
The error code
"""
return NotImplementedError
def METHOD_NAME(self):
"""
Get error description
Args:
error_code: The error code returned by _get_error_code
Returns:
The error description
"""
if not self.get_presence():
error_description = self.SFP_STATUS_UNPLUGGED
else:
error_description = self.SFP_STATUS_OK
return error_description
def get_reset_status(self):
"""
Retrieves the reset status of SFP
Returns:
A Boolean, True if reset enabled, False if disabled
"""
if self.sfp_type == COPPER_TYPE:
return False
if self.sfp_type == SFP_TYPE:
return False
def get_status(self):
"""
Retrieves the operational status of the device
"""
reset = self.get_reset_status()
if reset is True:
status = False
else:
status = True
return status
def reset(self):
"""
Reset SFP.
Returns:
A boolean, True if successful, False if not
"""
# RJ45 and SFP ports not resettable
return False
def set_lpmode(self, lpmode):
"""
Sets the lpmode (low power mode) of SFP
Args:
lpmode: A Boolean, True to enable lpmode, False to disable it
Note : lpmode can be overridden by set_power_override
Returns:
A boolean, True if lpmode is set successfully, False if not
"""
if self.sfp_type == COPPER_TYPE:
return False
if self.sfp_type == SFP_TYPE:
return False
def get_lpmode(self):
"""
Retrieves the lpmode (low power mode) status of this SFP
Returns:
A Boolean, True if lpmode is enabled, False if disabled
"""
if self.sfp_type == COPPER_TYPE:
return False
if self.sfp_type == SFP_TYPE:
return False
|
1,337 |
check run preconditions
|
import uuid
import os
from datetime import datetime
from django.db import transaction
from api.management.data_script import OperationalDataScript
from api.models.CompliancePeriod import CompliancePeriod
from api.models.Organization import Organization
from api.models.OrganizationActionsType import OrganizationActionsType
from api.models.OrganizationBalance import OrganizationBalance
from api.models.OrganizationStatus import OrganizationStatus
from api.models.OrganizationType import OrganizationType
from api.models.Role import Role
from api.models.User import User
from api.models.UserRole import UserRole
class LoadFTData(OperationalDataScript):
comment = 'Load BDD functional test users'
is_revertable = False
_usernames = ['bdd-fuelsupplier1',
'bdd-fuelsupplier2',
'bdd-analyst',
'bdd-director',
'bdd-fuelsupplier1admin',
'bdd-admin']
_orgs = ['TFRS Fantastic Fuels', 'TFRS IMBeing Green']
def METHOD_NAME(self):
for username in self._usernames:
if User.objects.filter(username=username).exists():
print('Found an existing user {}'.format(username))
return False
for org in self._orgs:
if Organization.objects.filter(name=org).exists():
print('Found an existing organization {}'.format(org))
return False
return True
@transaction.atomic
def run(self):
Organization(name=self._orgs[0],
actions_type=OrganizationActionsType.objects.get_by_natural_key("Buy And Sell"),
type=OrganizationType.objects.get_by_natural_key("Part3FuelSupplier"),
status=OrganizationStatus.objects.get_by_natural_key('Active'), id=2).save()
Organization(name=self._orgs[1],
actions_type=OrganizationActionsType.objects.get_by_natural_key("Buy And Sell"),
type=OrganizationType.objects.get_by_natural_key("Part3FuelSupplier"),
status=OrganizationStatus.objects.get_by_natural_key('Active'), id=3).save()
OrganizationBalance(organization=Organization.objects.get_by_natural_key(self._orgs[0]), credit_trade=None,
validated_credits=1000, effective_date=datetime.today().strftime('%Y-%m-%d')).save()
OrganizationBalance(organization=Organization.objects.get_by_natural_key(self._orgs[1]), credit_trade=None,
validated_credits=1000, effective_date=datetime.today().strftime('%Y-%m-%d')).save()
User( is_superuser='f', username='bdd-fuelsupplier1',
email='[email protected]', first_name='fuelsupplier1', last_name='bdd',
is_staff='f', is_active='t', display_name='bdd-fuelsupplier1',
organization=Organization.objects.get_by_natural_key(self._orgs[0])).save()
User( is_superuser='f', username='bdd-fuelsupplier2',
email='[email protected]', first_name='fuelsupplier2', last_name='bdd',
is_staff='f', is_active='t', display_name='bdd-fuelsupplier2',
organization=Organization.objects.get_by_natural_key(self._orgs[1])).save()
User( is_superuser='f', username='bdd-analyst',
email='[email protected]', first_name='analyst', last_name='bdd',
is_staff='f', is_active='t', display_name='bdd-analyst',
organization=Organization.objects.get_by_natural_key("Government of British Columbia")).save()
User( is_superuser='f', username='bdd-director',
email='[email protected]', first_name='director', last_name='bdd',
is_staff='f', is_active='t', display_name='bdd-director',
organization=Organization.objects.get_by_natural_key("Government of British Columbia")).save()
User( is_superuser='f', username='bdd-fuelsupplier1admin',
email='[email protected]', first_name='fuelsupplier1admin', last_name='bdd',
is_staff='f', is_active='t', display_name='bdd-fuelsupplier1admin',
organization=Organization.objects.get_by_natural_key(self._orgs[0])).save()
User( is_superuser='f', username='bdd-admin',
email='[email protected]', first_name='admin', last_name='bdd',
is_staff='f', is_active='t', display_name='bdd-admin',
organization=Organization.objects.get_by_natural_key("Government of British Columbia")).save()
UserRole(user=User.objects.get(username='bdd-fuelsupplier1'), role=Role.objects.get_by_natural_key('FSManager')).save()
UserRole(user=User.objects.get(username='bdd-fuelsupplier2'), role=Role.objects.get_by_natural_key('FSManager')).save()
UserRole(user=User.objects.get(username='bdd-analyst'), role=Role.objects.get_by_natural_key('Admin')).save()
UserRole(user=User.objects.get(username='bdd-analyst'), role=Role.objects.get_by_natural_key('GovUser')).save()
UserRole(user=User.objects.get(username='bdd-director'), role=Role.objects.get_by_natural_key('GovDirector')).save()
UserRole(user=User.objects.get(username='bdd-fuelsupplier1admin'), role=Role.objects.get_by_natural_key('FSUser')).save()
UserRole(user=User.objects.get(username='bdd-fuelsupplier1admin'), role=Role.objects.get_by_natural_key('FSManager')).save()
UserRole(user=User.objects.get(username='bdd-fuelsupplier1admin'), role=Role.objects.get_by_natural_key('FSAdmin')).save()
UserRole(user=User.objects.get(username='bdd-admin'), role=Role.objects.get_by_natural_key('Admin')).save()
UserRole(user=User.objects.get(username='bdd-admin'), role=Role.objects.get_by_natural_key('GovUser')).save()
UserRole(user=User.objects.get(username='bdd-admin'), role=Role.objects.get_by_natural_key('GovDirector')).save()
script_class = LoadFTData
|
1,338 |
test client with url ends with table
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import time
import pytest
from devtools_testutils import AzureRecordedTestCase, recorded_by_proxy
from azure.data.tables import (
TableServiceClient,
TableAnalyticsLogging,
TableMetrics,
TableRetentionPolicy,
TableCorsRule,
)
from azure.core.exceptions import ResourceNotFoundError, HttpResponseError
from _shared.testcase import TableTestCase
from preparers import tables_decorator
# ------------------------------------------------------------------------------
class TestTableServiceProperties(AzureRecordedTestCase, TableTestCase):
@tables_decorator
@recorded_by_proxy
def test_table_service_properties(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
url = self.account_url(tables_storage_account_name, "table")
tsc = TableServiceClient(url, credential=tables_primary_storage_account_key)
# Act
resp = tsc.set_service_properties(
analytics_logging=TableAnalyticsLogging(),
hour_metrics=TableMetrics(),
minute_metrics=TableMetrics(),
cors=list(),
)
# Assert
assert resp is None
if self.is_live:
time.sleep(45)
self._assert_properties_default(tsc.get_service_properties())
# --Test cases per feature ---------------------------------------
@tables_decorator
@recorded_by_proxy
def test_set_logging(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
url = self.account_url(tables_storage_account_name, "table")
tsc = TableServiceClient(url, credential=tables_primary_storage_account_key)
logging = TableAnalyticsLogging(
read=True, write=True, delete=True, retention_policy=TableRetentionPolicy(enabled=True, days=5)
)
# Act
tsc.set_service_properties(analytics_logging=logging)
# Assert
if self.is_live:
time.sleep(45)
received_props = tsc.get_service_properties()
self._assert_logging_equal(received_props["analytics_logging"], logging)
@tables_decorator
@recorded_by_proxy
def test_set_hour_metrics(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
url = self.account_url(tables_storage_account_name, "table")
tsc = TableServiceClient(url, credential=tables_primary_storage_account_key)
hour_metrics = TableMetrics(
enabled=True, include_apis=True, retention_policy=TableRetentionPolicy(enabled=True, days=5)
)
# Act
tsc.set_service_properties(hour_metrics=hour_metrics)
# Assert
if self.is_live:
time.sleep(45)
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props["hour_metrics"], hour_metrics)
@tables_decorator
@recorded_by_proxy
def test_set_minute_metrics(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
url = self.account_url(tables_storage_account_name, "table")
tsc = TableServiceClient(url, credential=tables_primary_storage_account_key)
minute_metrics = TableMetrics(
enabled=True, include_apis=True, retention_policy=TableRetentionPolicy(enabled=True, days=5)
)
# Act
tsc.set_service_properties(minute_metrics=minute_metrics)
# Assert
if self.is_live:
time.sleep(45)
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props["minute_metrics"], minute_metrics)
@tables_decorator
@recorded_by_proxy
def test_set_cors(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
url = self.account_url(tables_storage_account_name, "table")
tsc = TableServiceClient(url, credential=tables_primary_storage_account_key)
cors_rule1 = TableCorsRule(["www.xyz.com"], ["GET"])
allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"]
allowed_methods = ["GET", "PUT"]
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = TableCorsRule(allowed_origins, allowed_methods)
cors_rule2.max_age_in_seconds = max_age_in_seconds
cors_rule2.exposed_headers = exposed_headers
cors_rule2.allowed_headers = allowed_headers
cors = [cors_rule1, cors_rule2]
# Act
tsc.set_service_properties(cors=cors)
# Assert
if self.is_live:
time.sleep(45)
received_props = tsc.get_service_properties()
self._assert_cors_equal(received_props["cors"], cors)
# --Test cases for errors ---------------------------------------
@tables_decorator
@recorded_by_proxy
def test_too_many_cors_rules(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
tsc = TableServiceClient(
self.account_url(tables_storage_account_name, "table"), credential=tables_primary_storage_account_key
)
cors = []
for i in range(0, 6):
cors.append(TableCorsRule(["www.xyz.com"], ["GET"]))
# Assert
pytest.raises(HttpResponseError, tsc.set_service_properties, cors=cors)
@tables_decorator
@recorded_by_proxy
def test_retention_too_long(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
tsc = TableServiceClient(
self.account_url(tables_storage_account_name, "table"), credential=tables_primary_storage_account_key
)
minute_metrics = TableMetrics(
enabled=True, include_apis=True, retention_policy=TableRetentionPolicy(enabled=True, days=366)
)
# Assert
pytest.raises(HttpResponseError, tsc.set_service_properties, minute_metrics=minute_metrics)
@tables_decorator
@recorded_by_proxy
def METHOD_NAME(
self, tables_storage_account_name, tables_primary_storage_account_key
):
url = self.account_url(tables_storage_account_name, "table")
table_name = self.get_resource_name("mytable")
invalid_url = url + "/" + table_name
tsc = TableServiceClient(invalid_url, credential=tables_primary_storage_account_key)
with pytest.raises(ResourceNotFoundError) as exc:
tsc.create_table(table_name)
assert ("table specified does not exist") in str(exc.value)
assert ("Please check your account URL.") in str(exc.value)
with pytest.raises(ResourceNotFoundError) as exc:
tsc.create_table_if_not_exists(table_name)
assert ("table specified does not exist") in str(exc.value)
assert ("Please check your account URL.") in str(exc.value)
with pytest.raises(HttpResponseError) as exc:
tsc.set_service_properties(analytics_logging=TableAnalyticsLogging(write=True))
assert ("URI is invalid") in str(exc.value)
assert ("Please check your account URL.") in str(exc.value)
with pytest.raises(HttpResponseError) as exc:
tsc.get_service_properties()
assert ("URI is invalid") in str(exc.value)
assert ("Please check your account URL.") in str(exc.value)
tsc.delete_table(table_name)
class TestTableUnitTest(TableTestCase):
def test_retention_no_days(self):
# Assert
pytest.raises(ValueError, TableRetentionPolicy, enabled=True)
|
1,339 |
load elements
|
#!/usr/bin/python3 -u
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import logging
import os
import sys
from argparse import ArgumentParser
from subprocess import PIPE, run
from typing import Optional, Tuple
from reportclient import (_, RETURN_OK, RETURN_FAILURE)
import report
FILENAME_DUPHASH = "duphash"
FILENAME_OSINFO = "os_info"
OSINFO_BUGZILLA_PRODUCT = "REDHAT_BUGZILLA_PRODUCT="
OSINFO_BUGZILLA_PRODUCT_VERSION = "REDHAT_BUGZILLA_PRODUCT_VERSION="
logging.basicConfig(format="%(name)s [%(levelname)s] %(message)s",
level=logging.WARNING)
logger = logging.getLogger("abrt-action-find-bodhi-update")
def METHOD_NAME(dump_dir_path: str) -> Tuple[str, Optional[str], Optional[str]]:
dump_dir = report.dd_opendir(dump_dir_path, report.DD_OPEN_READONLY)
if not dump_dir:
logger.error(_("Cannot open problem directory '%s'"), dump_dir_path)
sys.exit(RETURN_FAILURE)
dd_load_flags = (report.DD_LOAD_TEXT_RETURN_NULL_ON_FAILURE |
report.DD_FAIL_QUIETLY_ENOENT)
duphash = dump_dir.load_text(FILENAME_DUPHASH, dd_load_flags)
if not duphash:
logger.error(_("Problem directory is missing file '%s'"), FILENAME_DUPHASH)
dump_dir.close()
sys.exit(RETURN_FAILURE)
os_info = dump_dir.load_text(FILENAME_OSINFO, dd_load_flags)
dump_dir.close()
if not os_info:
logger.error(_("Problem directory is missing file '%s'"), FILENAME_OSINFO)
sys.exit(RETURN_FAILURE)
# get Bugzilla Product and Version from os_info
product: Optional[str] = os.getenv("Bugzilla_Product")
version: Optional[str] = None
for line in os_info.split("\n"):
if not product and OSINFO_BUGZILLA_PRODUCT in line:
product = parse_os_release_line(line)
if OSINFO_BUGZILLA_PRODUCT_VERSION in line:
version = parse_os_release_line(line)
return duphash, product, version
def parse_os_release_line(line: str) -> str:
"""Parse key-value line and returns value"""
return line.split("=")[1].strip().strip('"')
if __name__ == "__main__":
arg_parser = ArgumentParser(
description="Find Bodhi updates based on ABRT's problem directory. "
"This tool reads duphash file in problem directory and "
"searches for new updates according to the crash data.",
epilog="environment variables: *Bugzilla_Product* -- Product bug "
"field value. Useful if you need a product different from "
"the one in PROBLEM_DIR/os_info.")
arg_parser.add_argument("-d", "--problem-dir",
type=str, default=".",
help="Path to problem directory")
arg_parser.add_argument("-b", "--without-bodhi",
action="store_true", dest="without_bodhi", default=False,
help="Run without abrt-bodhi. Prints only Bugzilla bug id of duplicate "
"if it exists.")
arg_parser.add_argument("-v", "--verbose",
action="count", dest="verbose", default=0,
help="Be verbose")
args = arg_parser.parse_args()
try:
dump_dir_path = os.path.abspath(args.problem_dir)
except FileNotFoundError as ex:
logging.error(_("Problem directory error: %s"), ex)
sys.exit(RETURN_FAILURE)
ABRT_VERBOSE = os.getenv("ABRT_VERBOSE")
if ABRT_VERBOSE:
try:
args.verbose = int(ABRT_VERBOSE)
except ValueError:
pass
if args.verbose > 0:
logger.setLevel(logging.INFO)
duphash, product, version = METHOD_NAME(dump_dir_path)
if product:
logger.info(_("Using product %s."), product)
else:
logger.info(_("Using product '%s' from /etc/os-release."), OSINFO_BUGZILLA_PRODUCT)
if version:
logger.info(_("Using product version %s."), version)
# Find bugzilla bug with abrt_hash: == $duphash_content and product ==
# $product, if OSINFO_BUGZILLA_PRODUCT from crash's os_info doesn't exist,
# the OSINFO_BUGZILLA_PRODUCT from /etc/os-release is used
cmdline = ["reporter-bugzilla", "-h", duphash]
if product:
cmdline.extend(["-p", product])
proc = run(cmdline, stdout=PIPE, check=False, encoding="utf-8")
if proc.returncode:
logger.error(_("Search for duplicate bugs failed: %s"), proc.stderr)
sys.exit(RETURN_FAILURE)
bug_id = proc.stdout.strip()
if bug_id:
logger.warning(_("Duplicate Bugzilla bug '#%s' was found"), bug_id)
else:
logger.info(_("There is no Bugzilla bug with 'abrt_hash:%s'"), duphash)
sys.exit(RETURN_OK)
if version and "rawhide" in version.lower():
logger.error(_("abrt-bodhi does not support product version 'Rawhide'"))
sys.exit(RETURN_OK)
if not args.without_bodhi:
cmdline = ["abrt-bodhi", "-b", bug_id, "-d", dump_dir_path]
if version:
cmdline.extend(["-r", version])
run(cmdline, check=False)
sys.exit(RETURN_OK)
|
1,340 |
get plan original
|
## @package predictor_py_utils
# Module caffe2.python.predictor.predictor_py_utils
from caffe2.python import core, scope
def create_predict_net(predictor_export_meta):
"""
Return the input prediction net.
"""
# Construct a new net to clear the existing settings.
net = core.Net(predictor_export_meta.predict_net.name or "predict")
net.Proto().op.extend(predictor_export_meta.predict_net.op)
net.Proto().partition_info.extend(predictor_export_meta.predict_net.partition_info)
net.Proto().external_input.extend(
predictor_export_meta.inputs + predictor_export_meta.parameters
)
net.Proto().external_output.extend(predictor_export_meta.outputs)
net.Proto().arg.extend(predictor_export_meta.predict_net.arg)
if predictor_export_meta.net_type is not None:
net.Proto().type = predictor_export_meta.net_type
if predictor_export_meta.num_workers is not None:
net.Proto().num_workers = predictor_export_meta.num_workers
return net.Proto()
def create_predict_init_net(ws, predictor_export_meta):
"""
Return an initialization net that zero-fill all the input and
output blobs, using the shapes from the provided workspace. This is
necessary as there is no shape inference functionality in Caffe2.
"""
net = core.Net("predict-init")
def zero_fill(blob):
shape = predictor_export_meta.shapes.get(blob)
if shape is None:
if blob not in ws.blobs:
raise Exception(
"{} not in workspace but needed for shape: {}".format(
blob, ws.blobs
)
)
shape = ws.blobs[blob].fetch().shape
# Explicitly null-out the scope so users (e.g. PredictorGPU)
# can control (at a Net-global level) the DeviceOption of
# these filling operators.
with scope.EmptyDeviceScope():
net.ConstantFill([], blob, shape=shape, value=0.0)
external_blobs = predictor_export_meta.inputs + predictor_export_meta.outputs
for blob in external_blobs:
zero_fill(blob)
net.Proto().external_input.extend(external_blobs)
if predictor_export_meta.extra_init_net:
net.AppendNet(predictor_export_meta.extra_init_net)
# Add the model_id in the predict_net to the init_net
AddModelIdArg(predictor_export_meta, net.Proto())
return net.Proto()
def get_comp_name(string, name):
if name:
return string + "_" + name
return string
def to_first_match_dict(kv_list):
"""
Construct dict from kv_list
"""
d = {}
for item in kv_list:
if item.key not in d:
d[item.key] = item.value
return d
def _ProtoMapGet(field, key):
"""
Given the key, get the value of the repeated field.
Helper function used by protobuf since it doesn't have map construct
"""
for v in field:
if v.key == key:
return v.value
return None
def GetPlan(meta_net_def, key):
return _ProtoMapGet(meta_net_def.plans, key)
def METHOD_NAME(meta_net_def, key):
return _ProtoMapGet(meta_net_def.plans, key)
def GetBlobs(meta_net_def, key):
blobs = _ProtoMapGet(meta_net_def.blobs, key)
if blobs is None:
return []
return blobs
def GetBlobsByTypePrefix(meta_net_def, blob_type_prefix):
blob_map = {}
for b in meta_net_def.blobs:
if b.key.startswith(blob_type_prefix):
for blob in b.value:
if blob not in blob_map:
blob_map[blob] = len(blob_map)
return sorted(blob_map, key=lambda blob: blob_map[blob])
def GetNet(meta_net_def, key):
return _ProtoMapGet(meta_net_def.nets, key)
def GetNetOriginal(meta_net_def, key):
return _ProtoMapGet(meta_net_def.nets, key)
def GetApplicationSpecificInfo(meta_net_def, key):
return _ProtoMapGet(meta_net_def.applicationSpecificInfo, key)
def GetApplicationSpecificInfoDict(meta_net_def):
return to_first_match_dict(meta_net_def.applicationSpecificInfo)
def AddBlobs(meta_net_def, blob_name, blob_def):
blobs = _ProtoMapGet(meta_net_def.blobs, blob_name)
if blobs is None:
blobs = meta_net_def.blobs.add()
blobs.key = blob_name
blobs = blobs.value
for blob in blob_def:
blobs.append(blob)
def ReplaceBlobs(meta_net_def, blob_name, blob_def):
blobs = _ProtoMapGet(meta_net_def.blobs, blob_name)
assert blobs is not None, "The blob_name:{} does not exist".format(blob_name)
del blobs[:]
for blob in blob_def:
blobs.append(blob)
def AddPlan(meta_net_def, plan_name, plan_def):
meta_net_def.plans.add(key=plan_name, value=plan_def)
def AddNet(meta_net_def, net_name, net_def):
meta_net_def.nets.add(key=net_name, value=net_def)
def SetBlobsOrder(meta_net_def, blobs_order):
for blob in blobs_order:
meta_net_def.blobsOrder.append(blob)
def SetPreLoadBlobs(meta_net_def, pre_load_blobs):
for blob in pre_load_blobs:
meta_net_def.preLoadBlobs.append(blob)
def SetRequestOnlyEmbeddings(meta_net_def, request_only_embeddings):
for blob in request_only_embeddings:
meta_net_def.requestOnlyEmbeddings.append(blob)
def GetBlobsOrder(meta_net_def):
return meta_net_def.blobsOrder
def SetTensorBoundShapes(meta_net_def, tensor_bound_shapes):
meta_net_def.tensorBoundShapes.CopyFrom(tensor_bound_shapes)
def SetAOTConfig(meta_net_def, aot_config):
meta_net_def.aotConfig.CopyFrom(aot_config)
def GetArgumentByName(net_def, arg_name):
for arg in net_def.arg:
if arg.name == arg_name:
return arg
return None
def AddModelIdArg(meta_net_def, net_def):
"""Takes the model_id from the predict_net of meta_net_def (if it is
populated) and adds it to the net_def passed in. This is intended to be
called on init_nets, as their model_id is not populated by default, but
should be the same as that of the predict_net
"""
# Get model_id from the predict_net, assuming it's an integer
model_id = GetArgumentByName(meta_net_def.predict_net, "model_id")
if model_id is None:
return
model_id = model_id.i
# If there's another model_id on the net, replace it with the new one
old_id = GetArgumentByName(net_def, "model_id")
if old_id is not None:
old_id.i = model_id
return
# Add as an integer argument, this is also assumed above
arg = net_def.arg.add()
arg.name = "model_id"
arg.i = model_id
|
1,341 |
tag
|
import os
import re
import subprocess
import sys
from contextlib import contextmanager
FORBIDDEN_CODECOV_FLAG_CHARS = re.compile(r'[^\w\.\-]')
class GoModule:
"""
A Go module abstraction.
independent specifies whether this modules is supposed to exist independently of the datadog-agent module.
If True, a check will run to ensure this is true.
"""
def __init__(
self,
path,
targets=None,
condition=lambda: True,
should_tag=True,
importable=True,
independent=False,
lint_targets=None,
):
self.path = path
self.targets = targets if targets else ["."]
self.lint_targets = lint_targets if lint_targets else self.targets
self.condition = condition
self.should_tag = should_tag
# HACK: Workaround for modules that can be tested, but not imported (eg. gohai), because
# they define a main package
# A better solution would be to automatically detect if a module contains a main package,
# at the cost of spending some time parsing the module.
self.importable = importable
self.independent = independent
self._dependencies = None
def __version(self, agent_version):
"""Return the module version for a given Agent version.
>>> mods = [GoModule("."), GoModule("pkg/util/log")]
>>> [mod.__version("7.27.0") for mod in mods]
["v7.27.0", "v0.27.0"]
"""
if self.path == ".":
return "v" + agent_version
return "v0" + agent_version[1:]
def __compute_dependencies(self):
"""
Computes the list of github.com/DataDog/datadog-agent/ dependencies of the module.
"""
prefix = "github.com/DataDog/datadog-agent/"
base_path = os.getcwd()
mod_parser_path = os.path.join(base_path, "internal", "tools", "modparser")
if not os.path.isdir(mod_parser_path):
raise Exception(f"Cannot find go.mod parser in {mod_parser_path}")
try:
output = subprocess.check_output(
["go", "run", ".", "-path", os.path.join(base_path, self.path), "-prefix", prefix],
cwd=mod_parser_path,
).decode("utf-8")
except subprocess.CalledProcessError as e:
print(f"Error while calling go.mod parser: {e.output}")
raise e
# Remove github.com/DataDog/datadog-agent/ from each line
return [line[len(prefix) :] for line in output.strip().splitlines()]
# FIXME: Change when Agent 6 and Agent 7 releases are decoupled
def METHOD_NAME(self, agent_version):
"""Return the module tag name for a given Agent version.
>>> mods = [GoModule("."), GoModule("pkg/util/log")]
>>> [mod.tag("7.27.0") for mod in mods]
[["6.27.0", "7.27.0"], ["pkg/util/log/v0.27.0"]]
"""
if self.path == ".":
return ["6" + agent_version[1:], "7" + agent_version[1:]]
return [f"{self.path}/{self.__version(agent_version)}"]
def codecov_path(self):
"""Return the path of the Go module, normalized to satisfy Codecov
restrictions on flags.
https://docs.codecov.com/docs/flags
"""
if self.path == ".":
return "main"
return re.sub(FORBIDDEN_CODECOV_FLAG_CHARS, '_', self.path)
def full_path(self):
"""Return the absolute path of the Go module."""
return os.path.abspath(self.path)
def go_mod_path(self):
"""Return the absolute path of the Go module go.mod file."""
return self.full_path() + "/go.mod"
@property
def dependencies(self):
if not self._dependencies:
self._dependencies = self.__compute_dependencies()
return self._dependencies
@property
def import_path(self):
"""Return the Go import path of the Go module
>>> mods = [GoModule("."), GoModule("pkg/util/log")]
>>> [mod.import_path for mod in mods]
["github.com/DataDog/datadog-agent", "github.com/DataDog/datadog-agent/pkg/util/log"]
"""
path = "github.com/DataDog/datadog-agent"
if self.path != ".":
path += "/" + self.path
return path
def dependency_path(self, agent_version):
"""Return the versioned dependency path of the Go module
>>> mods = [GoModule("."), GoModule("pkg/util/log")]
>>> [mod.dependency_path("7.27.0") for mod in mods]
["github.com/DataDog/[email protected]", "github.com/DataDog/datadog-agent/pkg/util/[email protected]"]
"""
return f"{self.import_path}@{self.__version(agent_version)}"
DEFAULT_MODULES = {
".": GoModule(
".",
targets=["./pkg", "./cmd", "./comp"],
),
"internal/tools": GoModule("internal/tools", condition=lambda: False, should_tag=False),
"internal/tools/proto": GoModule("internal/tools/proto", condition=lambda: False, should_tag=False),
"internal/tools/modparser": GoModule("internal/tools/modparser", condition=lambda: False, should_tag=False),
"test/e2e/containers/otlp_sender": GoModule(
"test/e2e/containers/otlp_sender", condition=lambda: False, should_tag=False
),
"test/new-e2e": GoModule(
"test/new-e2e",
independent=True,
targets=["./pkg/runner", "./pkg/utils/e2e/client"],
lint_targets=["."],
),
"test/fakeintake": GoModule("test/fakeintake", independent=True),
"pkg/obfuscate": GoModule("pkg/obfuscate", independent=True),
"pkg/gohai": GoModule("pkg/gohai", independent=True, importable=False),
"pkg/proto": GoModule("pkg/proto", independent=True),
"pkg/trace": GoModule("pkg/trace", independent=True),
"pkg/security/secl": GoModule("pkg/security/secl", independent=True),
"pkg/remoteconfig/state": GoModule("pkg/remoteconfig/state", independent=True),
"pkg/util/cgroups": GoModule("pkg/util/cgroups", independent=True, condition=lambda: sys.platform == "linux"),
"pkg/util/log": GoModule("pkg/util/log", independent=True),
"pkg/util/pointer": GoModule("pkg/util/pointer", independent=True),
"pkg/util/scrubber": GoModule("pkg/util/scrubber", independent=True),
}
MAIN_TEMPLATE = """package main
import (
{imports}
)
func main() {{}}
"""
PACKAGE_TEMPLATE = ' _ "{}"'
@contextmanager
def generate_dummy_package(ctx, folder):
"""
Return a generator-iterator when called.
Allows us to wrap this function with a "with" statement to delete the created dummy pacakage afterwards.
"""
try:
import_paths = []
for mod in DEFAULT_MODULES.values():
if mod.path != "." and mod.condition() and mod.importable:
import_paths.append(mod.import_path)
os.mkdir(folder)
with ctx.cd(folder):
print("Creating dummy 'main.go' file... ", end="")
with open(os.path.join(ctx.cwd, 'main.go'), 'w') as main_file:
main_file.write(
MAIN_TEMPLATE.format(imports="\n".join(PACKAGE_TEMPLATE.format(path) for path in import_paths))
)
print("Done")
ctx.run("go mod init example.com/testmodule")
for mod in DEFAULT_MODULES.values():
if mod.path != ".":
ctx.run(f"go mod edit -require={mod.dependency_path('0.0.0')}")
ctx.run(f"go mod edit -replace {mod.import_path}=../{mod.path}")
# yield folder waiting for a "with" block to be executed (https://docs.python.org/3/library/contextlib.html)
yield folder
# the generator is then resumed here after the "with" block is exited
finally:
# delete test_folder to avoid FileExistsError while running this task again
ctx.run(f"rm -rf ./{folder}")
|
1,342 |
unwrap
|
import sys
from collections.abc import Callable, Iterable, Mapping, Sequence
from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload
from typing_extensions import Literal, TypeAlias
if sys.version_info >= (3, 9):
from types import GenericAlias
__all__ = [
"urlparse",
"urlunparse",
"urljoin",
"urldefrag",
"urlsplit",
"urlunsplit",
"urlencode",
"parse_qs",
"parse_qsl",
"quote",
"quote_plus",
"quote_from_bytes",
"unquote",
"unquote_plus",
"unquote_to_bytes",
"DefragResult",
"ParseResult",
"SplitResult",
"DefragResultBytes",
"ParseResultBytes",
"SplitResultBytes",
]
uses_relative: list[str]
uses_netloc: list[str]
uses_params: list[str]
non_hierarchical: list[str]
uses_query: list[str]
uses_fragment: list[str]
scheme_chars: str
if sys.version_info < (3, 11):
MAX_CACHE_SIZE: int
class _ResultMixinStr:
def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ...
class _ResultMixinBytes:
def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ...
class _NetlocResultMixinBase(Generic[AnyStr]):
@property
def username(self) -> AnyStr | None: ...
@property
def password(self) -> AnyStr | None: ...
@property
def hostname(self) -> AnyStr | None: ...
@property
def port(self) -> int | None: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ...
class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): ...
class _DefragResultBase(NamedTuple, Generic[AnyStr]):
url: AnyStr
fragment: AnyStr
class _SplitResultBase(NamedTuple, Generic[AnyStr]):
scheme: AnyStr
netloc: AnyStr
path: AnyStr
query: AnyStr
fragment: AnyStr
class _ParseResultBase(NamedTuple, Generic[AnyStr]):
scheme: AnyStr
netloc: AnyStr
path: AnyStr
params: AnyStr
query: AnyStr
fragment: AnyStr
# Structured result objects for string data
class DefragResult(_DefragResultBase[str], _ResultMixinStr):
def geturl(self) -> str: ...
class SplitResult(_SplitResultBase[str], _NetlocResultMixinStr):
def geturl(self) -> str: ...
class ParseResult(_ParseResultBase[str], _NetlocResultMixinStr):
def geturl(self) -> str: ...
# Structured result objects for bytes data
class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes):
def geturl(self) -> bytes: ...
class SplitResultBytes(_SplitResultBase[bytes], _NetlocResultMixinBytes):
def geturl(self) -> bytes: ...
class ParseResultBytes(_ParseResultBase[bytes], _NetlocResultMixinBytes):
def geturl(self) -> bytes: ...
def parse_qs(
qs: AnyStr | None,
keep_blank_values: bool = False,
strict_parsing: bool = False,
encoding: str = "utf-8",
errors: str = "replace",
max_num_fields: int | None = None,
separator: str = "&",
) -> dict[AnyStr, list[AnyStr]]: ...
def parse_qsl(
qs: AnyStr | None,
keep_blank_values: bool = False,
strict_parsing: bool = False,
encoding: str = "utf-8",
errors: str = "replace",
max_num_fields: int | None = None,
separator: str = "&",
) -> list[tuple[AnyStr, AnyStr]]: ...
@overload
def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ...
@overload
def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ...
def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ...
@overload
def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ...
@overload
def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ...
if sys.version_info >= (3, 9):
def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ...
else:
def unquote(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ...
def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ...
def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ...
@overload
def urldefrag(url: str) -> DefragResult: ...
@overload
def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ...
_Q = TypeVar("_Q", bound=str | Iterable[int])
_QueryType: TypeAlias = (
Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]]
)
@overload
def urlencode(
query: _QueryType,
doseq: bool = False,
safe: str = "",
encoding: str | None = None,
errors: str | None = None,
quote_via: Callable[[AnyStr, str, str, str], str] = ...,
) -> str: ...
@overload
def urlencode(
query: _QueryType,
doseq: bool,
safe: _Q,
encoding: str | None = None,
errors: str | None = None,
quote_via: Callable[[AnyStr, _Q, str, str], str] = ...,
) -> str: ...
@overload
def urlencode(
query: _QueryType,
doseq: bool = False,
*,
safe: _Q,
encoding: str | None = None,
errors: str | None = None,
quote_via: Callable[[AnyStr, _Q, str, str], str] = ...,
) -> str: ...
def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ...
@overload
def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ...
@overload
def urlparse(
url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True
) -> ParseResultBytes: ...
@overload
def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ...
if sys.version_info >= (3, 11):
@overload
def urlsplit(
url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True
) -> SplitResultBytes: ...
else:
@overload
def urlsplit(
url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True
) -> SplitResultBytes: ...
# Requires an iterable of length 6
@overload
def urlunparse(components: Iterable[None]) -> Literal[b""]: ...
@overload
def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ...
# Requires an iterable of length 5
@overload
def urlunsplit(components: Iterable[None]) -> Literal[b""]: ...
@overload
def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ...
def METHOD_NAME(url: str) -> str: ...
|
1,343 |
init
|
from mxcubecore import HardwareRepository as HWR
from mxcubecore.BaseHardwareObjects import Device
import logging
class ALBAFrontLight(Device):
def __init__(self, *args):
Device.__init__(self, *args)
self.limits = [None, None]
self.state = None
self.register_state = None
self.current_level = None
self.memorized_level = None
self.previous_level = None
self.default_off_threshold = 0.01
self.off_threshold = None
def METHOD_NAME(self):
self.level_channel = self.get_channel_object("light_level")
self.state_channel = self.get_channel_object("state")
threshold = self.get_property("off_threshold")
if threshold is not None:
try:
self.off_threshold = float(threshold)
except Exception:
self.off_threshold = self.default_threshold
logging.getLogger("HWR").info(
"OFF Threshold for front light is not valid. Using %s"
% self.off_threshold
)
limits = self.get_property("limits")
if limits is not None:
lims = limits.split(",")
if len(lims) == 2:
self.limits = map(float, lims)
self.level_channel.connect_signal("update", self.level_changed)
self.state_channel.connect_signal("update", self.register_state_changed)
def is_ready(self):
return True
def level_changed(self, value):
self.current_level = value
self.update_current_state()
self.emit("levelChanged", self.current_level)
def register_state_changed(self, value):
self.register_state = str(value).lower()
self.update_current_state()
def update_current_state(self):
if self.register_state == "on":
if (
self.off_threshold is not None
and self.current_level < 0.9 * self.off_threshold
):
newstate = "off"
else:
newstate = "on"
elif self.register_state == "off":
newstate = "off"
else:
newstate = "fault"
if newstate != self.state:
if newstate == "off":
self.memorized_level = self.previous_level
self.state = newstate
self.emit("stateChanged", self.state)
self.previous_level = self.current_level
def get_limits(self):
return self.limits
def get_state(self):
self.register_state = str(self.state_channel.get_value()).lower()
self.update_current_state()
return self.state
def getUserName(self):
return self.username
def getLevel(self):
self.current_level = self.level_channel.get_value()
return self.current_level
def setLevel(self, level):
logging.getLogger("HWR").debug(
"Setting level in %s to %s" % (self.username, level)
)
self.level_channel.set_value(float(level))
def setOn(self):
logging.getLogger("HWR").debug("Setting front light on")
if self.memorized_level is not None:
if self.memorized_level < self.off_threshold:
value = self.off_threshold
else:
value = self.memorized_level
logging.getLogger("HWR").debug(" setting value to")
self.level_channel.set_value(value)
else:
self.level_channel.set_value(self.off_threshold)
def setOff(self):
logging.getLogger("HWR").debug("Setting front light off")
self.level_channel.set_value(0.0)
def test():
hwr = HWR.get_hardware_repository()
hwr.connect()
light = hwr.get_hardware_object("/frontlight")
print('\nLight control for "%s"\n' % light.getUserName())
print(" Level limits are:", light.get_limits())
print(" Current level is:", light.getLevel())
print(" Current state is:", light.get_state())
if __name__ == "__main__":
test()
|
1,344 |
alca harvesting
|
#!/usr/bin/env python3
"""
_Scenario_
Standard cmsRun Process building interface used for data processing
for a particular data scenario.
A scenario is a macro-data-taking setting such as cosmic running,
beam halo running, or particular validation tests.
This class defines the interfaces used by the Tier 0 and Tier 1
processing to wrap calls to ConfigBuilder in order to retrieve all the
configurations for the various types of job
"""
import FWCore.ParameterSet.Config as cms
from Configuration.DataProcessing.Merge import mergeProcess
from Configuration.DataProcessing.Repack import repackProcess
#central import, will be used by all daughter classes anyways
from Configuration.Applications.ConfigBuilder import ConfigBuilder,Options,defaultOptions
class Scenario(object):
"""
_Scenario_
"""
def __init__(self):
self.eras=cms.Modifier()
def promptReco(self, globalTag, **options):
"""
_installPromptReco_
given a skeleton process object and references
to the output modules for the products it produces,
install the standard reco sequences and event content for this
scenario
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for promptReco"
raise NotImplementedError(msg)
def expressProcessing(self, globalTag, **options):
"""
_expressProcessing_
Build an express processing configuration for this scenario.
Express processing runs conversion, reco and alca reco on each
streamer file in the express stream and writes out RAW, RECO and
a combined ALCA file that gets mergepacked in a later step
writeTiers is list of tiers to write out, not including ALCA
datasets is the list of datasets to split into for each tier
written out. Should always be one dataset
alcaDataset - if set, this means the combined Alca file is written
out with no dataset splitting, it gets assigned straight to the datase
provided
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for expressProcessing"
raise NotImplementedError(msg)
def visualizationProcessing(self, globalTag, **options):
"""
_expressProcessing_
Build a configuration for the visualization processing for this scenario.
Visualization processing runs unpacking, and reco on
streamer files and it is equipped to run on the online cluster
and writes RECO or FEVT files,
writeTiers is list of tiers to write out.
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for visualizationProcessing"
raise NotImplementedError(msg)
def alcaSkim(self, skims, **options):
"""
_alcaSkim_
Given a skeleton process install the skim splitting for given skims
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for alcaSkim"
raise NotImplementedError(msg)
def alcaReco(self, *skims, **options):
"""
_alcaSkim_
Given a skeleton process install the skim production for given skims
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for alcaReco"
raise NotImplementedError(msg)
def dqmHarvesting(self, datasetName, runNumber, globalTag, **options):
"""
_dqmHarvesting_
build a DQM Harvesting configuration
Arguments:
datasetName - aka workflow name for DQMServer, this is the name of the
dataset containing the harvested run
runNumber - The run being harvested
globalTag - The global tag being used
inputFiles - The list of LFNs being harvested
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for dqmHarvesting"
raise NotImplementedError(msg)
def METHOD_NAME(self, globalTag, datasetName, **options):
"""
_alcaHarvesting_
build an AlCa Harvesting configuration
Arguments:
globalTag - The global tag being used
inputFiles - The list of LFNs being harvested
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for alcaHarvesting"
raise NotImplementedError(msg)
def skimming(self, skims, globalTag, **options):
"""
_skimming_
Given a process install the sequences for Tier 1 skimming
and the appropriate output modules
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
msg += "Does not contain an implementation for skimming"
raise NotImplementedError(msg)
def merge(self, *inputFiles, **options):
"""
_merge_
builds a merge configuration
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
return mergeProcess(*inputFiles, **options)
def repack(self, **options):
"""
_repack_
builds a repack configuration
"""
msg = "Scenario Implementation %s\n" % self.__class__.__name__
return repackProcess(**options)
#
# helper methods
#
def dropOutputModule(self, processRef, moduleName):
"""
_dropOutputModule_
Util to prune an unwanted output module
"""
del process._Process__outputmodules[moduleName]
return
|
1,345 |
init files visit
|
# ===============================================================================
# NAME: InstanceTopologyHTMLVisitor.py
#
# DESCRIPTION: A visitor responsible for the generation of HTML tables
# of command ID's, etc.
#
# AUTHOR: reder
# EMAIL: [email protected]
# DATE CREATED : Sep. 13, 2016
#
# Copyright 2016, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
# ===============================================================================
#
# Python standard modules
#
import logging
import os
import sys
from fprime_ac.generators import formatters
# from fprime_ac.utils import DiffAndRename
from fprime_ac.generators.visitors import AbstractVisitor
from fprime_ac.models import ModelParser
#
# Python extension modules and custom interfaces
#
# from Cheetah import Template
# from fprime_ac.utils import version
from fprime_ac.utils import ConfigManager
#
# Import precompiled templates here
#
try:
from fprime_ac.generators.templates.html import HtmlCmdTablePage
except ImportError:
print("ERROR: must generate python templates first.")
sys.exit(-1)
#
# Universal globals used within module go here.
# (DO NOT USE MANY!)
#
# Global logger init. below.
PRINT = logging.getLogger("output")
DEBUG = logging.getLogger("debug")
#
# Module class or classes go here.
class InstanceTopologyCmdHTMLVisitor(AbstractVisitor.AbstractVisitor):
"""
A visitor class responsible for generation of component header
classes in C++.
"""
__instance = None
__config = None
__fp_dict = None
__form = None
__form_comment = None
__model_parser = None
def __init__(self):
"""
Constructor.
"""
super().__init__()
# self.initBase(self, "HTMLCmdTable")
self.__config = ConfigManager.ConfigManager.getInstance()
self.__form = formatters.Formatters()
self.__form_comment = formatters.CommentFormatters()
self.__model_parser = ModelParser.ModelParser.getInstance()
self.__cmd_dir = "commands"
DEBUG.info("InstanceTopologyCmdHTMLVisitor: Instanced.")
self.bodytext = ""
self.prototypetext = ""
self.__fp_dict = (
dict()
) # dictionary of instance name keyword to file handle pointer
def _writeTmpl(self, instance, c, visit_str):
"""
Wrapper to write tmpl to files desc.
"""
DEBUG.debug("InstanceTopologyCmdHTMLVisitor:%s" % visit_str)
DEBUG.debug("===================================")
DEBUG.debug(c)
self.__fp_dict[instance].writelines(c.__str__())
DEBUG.debug("===================================")
def METHOD_NAME(self, obj):
"""
Defined to generate files for generated code products.
@param obj: the instance of the model to visit.
"""
# Check for command dir here and if none create it but always switch into it
if not os.path.exists(self.__cmd_dir):
os.mkdir(self.__cmd_dir)
os.chdir(self.__cmd_dir)
# Iterate over types
for k in list(obj.get_base_id_dict().keys()):
tlist = obj.get_base_id_dict()[k]
# print "Type: %s\n" % k,
# Iterate over instances and get name
# Open file if commands exist if not do nothing
for t in tlist:
# print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1])
name = t[0]
cmd_list = t[3].get_comp_xml().get_commands()
if len(cmd_list) > 0:
filename = "%s_commands.html" % t[0]
# Open file for writing here...
DEBUG.info("Open file: %s" % filename)
try:
self.__fp_dict[name] = open(filename, "w")
DEBUG.info("Completed")
except OSError:
PRINT.info("Could not open %s file." % filename)
sys.exit(-1)
DEBUG.info(
"Generating HTML Command Table for %s:%s component instance..."
% (t[0], k)
)
os.chdir("..")
def startSourceFilesVisit(self, obj):
"""
Defined to generate starting static code within files.
"""
def includes1Visit(self, obj):
"""
Defined to generate includes within a file.
Usually used for the base classes but also for Port types
@param args: the instance of the concrete element to operation on.
"""
def includes2Visit(self, obj):
"""
Defined to generate internal includes within a file.
Usually used for data type includes and system includes.
@param args: the instance of the concrete element to operation on.
"""
def namespaceVisit(self, obj):
"""
Defined to generate namespace code within a file.
Also any pre-condition code is generated.
@param args: the instance of the concrete element to operation on.
"""
def publicVisit(self, obj):
"""
Defined to generate public stuff within a class.
@param args: the instance of the concrete element to operation on.
"""
# os.chdir(self.__cmd_dir)
c = HtmlCmdTablePage.HtmlCmdTablePage()
for k in list(obj.get_base_id_dict().keys()):
tlist = obj.get_base_id_dict()[k]
# print "Type: %s\n" % k,
c.has_commands = True
for t in tlist:
# print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1])
cmd_list = t[3].get_comp_xml().get_commands()
cobj = t[3].get_comp_xml()
c.name = "{}:{}".format(t[0], k)
if len(cmd_list) > 0:
c.has_commands = True
c.base_id = t[1]
c.commands = self.__model_parser.getCommandsList(cobj)
c.command_args = self.__model_parser.getCommandArgsDict(cobj)
self._writeTmpl(t[0], c, "InstanceTopologyHTML_Visitor")
else:
c.has_commands = False
# for cmd in cmd_list:
# print "\t\t Command: %s, opcode: %s" % (cmd.get_mnemonic(), cmd.get_opcodes())
def protectedVisit(self, obj):
"""
Defined to generate protected stuff within a class.
@param args: the instance of the concrete element to operation on.
"""
def privateVisit(self, obj):
"""
Defined to generate private stuff within a class.
@param args: the instance of the concrete element to operation on.
"""
def finishSourceFilesVisit(self, obj):
"""
Defined to generate ending static code within files.
"""
for fp in list(self.__fp_dict.keys()):
self.__fp_dict[fp].close()
PRINT.info("Completed generating HTML command tables...")
|
1,346 |
test bounding rect from offset at word2
|
#tests/unit/contentRecog/test_contentRecog.py
#A part of NonVisual Desktop Access (NVDA)
#This file is covered by the GNU General Public License.
#See the file COPYING for more details.
#Copyright (C) 2017 NV Access Limited
"""Unit tests for the contentRecog module.
"""
import unittest
import contentRecog
import textInfos
from locationHelper import RectLTWH
class TestRecogImageInfo(unittest.TestCase):
def test_noOffsetNoResize(self):
info = contentRecog.RecogImageInfo(0, 0, 1000, 2000, 1)
self.assertEqual(info.recogWidth, 1000)
self.assertEqual(info.recogHeight, 2000)
self.assertEqual(info.convertXToScreen(100), 100)
self.assertEqual(info.convertYToScreen(200), 200)
self.assertEqual(info.convertWidthToScreen(100), 100)
self.assertEqual(info.convertHeightToScreen(200), 200)
def test_withOffsetNoResize(self):
info = contentRecog.RecogImageInfo(10, 20, 1000, 2000, 1)
self.assertEqual(info.recogWidth, 1000)
self.assertEqual(info.recogHeight, 2000)
self.assertEqual(info.convertXToScreen(100), 110)
self.assertEqual(info.convertYToScreen(200), 220)
self.assertEqual(info.convertWidthToScreen(100), 100)
self.assertEqual(info.convertHeightToScreen(200), 200)
def test_noOffsetWithResize(self):
info = contentRecog.RecogImageInfo(0, 0, 1000, 2000, 2)
self.assertEqual(info.recogWidth, 2000)
self.assertEqual(info.recogHeight, 4000)
self.assertEqual(info.convertXToScreen(200), 100)
self.assertEqual(info.convertYToScreen(400), 200)
self.assertEqual(info.convertWidthToScreen(200), 100)
self.assertEqual(info.convertHeightToScreen(400), 200)
def test_withOffsetWithResize(self):
info = contentRecog.RecogImageInfo(10, 20, 1000, 2000, 2)
self.assertEqual(info.recogWidth, 2000)
self.assertEqual(info.recogHeight, 4000)
self.assertEqual(info.convertXToScreen(200), 110)
self.assertEqual(info.convertYToScreen(400), 220)
self.assertEqual(info.convertWidthToScreen(200), 100)
self.assertEqual(info.convertHeightToScreen(400), 200)
class FakeNVDAObject(object):
pass
class TestLinesWordsResult(unittest.TestCase):
"""Tests that contentRecog.LinesWordsResult and contentRecog.LwrTextInfo
correctly parse and process the JSON from a recognizer.
"""
DATA = [
[
{"x": 100, "y": 200, "width": 10, "height": 20, "text": "word1"},
{"x": 110, "y": 200, "width": 10, "height": 20, "text": "word2"}
],
[
{"x": 100, "y": 220, "width": 10, "height": 20, "text": "word3"},
{"x": 110, "y": 220, "width": 10, "height": 20, "text": "word4"}
]
]
TOP = 0
BOTTOM = 23
WORD1_OFFSETS = (0, 6)
WORD1_SECOND = 1
WORD1_LAST = 5
WORD1_RECT = RectLTWH(100, 200, 10, 20)
WORD2_START = 6
WORD2_OFFSETS = (6, 12)
WORD2_RECT = RectLTWH(110, 200, 10, 20)
WORD3_OFFSETS = (12, 18)
WORD3_START = 12
WORD3_RECT = RectLTWH(100, 220, 10, 20)
WORD4_OFFSETS = (18, 24)
WORD4_RECT = RectLTWH(110, 220, 10, 20)
LINE1_OFFSETS = (0, 12)
LINE1_SECOND = 1
LINE1_LAST = 11
LINE2_OFFSETS = (12, 24)
LINE2_START = 12
def setUp(self):
info = contentRecog.RecogImageInfo(0, 0, 1000, 2000, 1)
self.result = contentRecog.LinesWordsResult(self.DATA, info)
self.fakeObj = FakeNVDAObject()
self.textInfo = self.result.makeTextInfo(self.fakeObj, textInfos.POSITION_FIRST)
def test_text(self):
self.assertEqual(self.result.text, "word1 word2\nword3 word4\n")
def test_textLen(self):
self.assertEqual(self.result.textLen, len(self.result.text))
def test_wordOffsetsAtTop(self):
actual = self.textInfo._getWordOffsets(self.TOP)
self.assertEqual(actual, self.WORD1_OFFSETS)
def test_wordOffsetsAtWord1SecondChar(self):
actual = self.textInfo._getWordOffsets(self.WORD1_SECOND)
self.assertEqual(actual, self.WORD1_OFFSETS)
def test_wordOffsetsAtWord1LastChar(self):
actual = self.textInfo._getWordOffsets(self.WORD1_LAST)
self.assertEqual(actual, self.WORD1_OFFSETS)
def test_wordOffsetsAtWord2Start(self):
actual = self.textInfo._getWordOffsets(self.WORD2_START)
self.assertEqual(actual, self.WORD2_OFFSETS)
def test_wordOffsetsAtLine2Start(self):
actual = self.textInfo._getWordOffsets(self.LINE2_START)
self.assertEqual(actual, self.WORD3_OFFSETS)
def test_wordOffsetsAtBottom(self):
actual = self.textInfo._getWordOffsets(self.BOTTOM)
self.assertEqual(actual, self.WORD4_OFFSETS)
def test_lineOffsetsAtTop(self):
actual = self.textInfo._getLineOffsets(self.TOP)
self.assertEqual(actual, self.LINE1_OFFSETS)
def test_lineOffsetsAtLine1SecondChar(self):
actual = self.textInfo._getLineOffsets(self.LINE1_SECOND)
self.assertEqual(actual, self.LINE1_OFFSETS)
def test_lineOffsetsAtLine1LastChar(self):
actual = self.textInfo._getLineOffsets(self.LINE1_LAST)
self.assertEqual(actual, self.LINE1_OFFSETS)
def test_lineOffsetsAtLine2Start(self):
actual = self.textInfo._getLineOffsets(self.LINE2_START)
self.assertEqual(actual, self.LINE2_OFFSETS)
def test_lineOffsetsAtBottom(self):
actual = self.textInfo._getLineOffsets(self.BOTTOM)
self.assertEqual(actual, self.LINE2_OFFSETS)
def test_boundingRectFromOffsetAtTop(self):
actual = self.textInfo._getBoundingRectFromOffset(self.TOP)
self.assertEqual(actual, self.WORD1_RECT)
def test_boundingRectFromOffsetAtWord1SecondChar(self):
actual = self.textInfo._getBoundingRectFromOffset(self.WORD1_SECOND)
self.assertEqual(actual, self.WORD1_RECT)
def test_boundingRectFromOffsetAtWord1LastChar(self):
actual = self.textInfo._getBoundingRectFromOffset(self.WORD1_LAST)
self.assertEqual(actual, self.WORD1_RECT)
def METHOD_NAME(self):
actual = self.textInfo._getBoundingRectFromOffset(self.WORD2_START)
self.assertEqual(actual, self.WORD2_RECT)
def test_boundingRectFromOffsetAtLine2Start(self):
actual = self.textInfo._getBoundingRectFromOffset(self.LINE2_START)
self.assertEqual(actual, self.WORD3_RECT)
def test_boundingRectFromOffsetAtBottom(self):
actual = self.textInfo._getBoundingRectFromOffset(self.BOTTOM)
self.assertEqual(actual, self.WORD4_RECT)
def test_copyTextInfo(self):
copy = self.textInfo.copy()
self.assertEqual(copy, self.textInfo)
|
1,347 |
test access with list
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for pyvo.dal.datalink
"""
from functools import partial
import pytest
import pyvo as vo
from pyvo.dal.adhoc import DatalinkResults
from pyvo.utils import vocabularies
from astropy.utils.data import get_pkg_data_contents, get_pkg_data_filename
get_pkg_data_contents = partial(
get_pkg_data_contents, package=__package__, encoding='binary')
@pytest.fixture()
def ssa_datalink(mocker):
def callback(request, context):
return get_pkg_data_contents('data/datalink/datalink-ssa.xml')
with mocker.register_uri(
'GET', 'http://example.com/ssa_datalink', content=callback
) as matcher:
yield matcher
@pytest.fixture()
def datalink(mocker):
def callback(request, context):
return get_pkg_data_contents('data/datalink/datalink.xml')
with mocker.register_uri(
'POST', 'http://example.com/datalink', content=callback
) as matcher:
yield matcher
@pytest.fixture()
def obscore_datalink(mocker):
def callback(request, context):
return get_pkg_data_contents('data/datalink/datalink-obscore.xml')
with mocker.register_uri(
'GET', 'http://example.com/obscore', content=callback
) as matcher:
yield matcher
@pytest.fixture()
def res_datalink(mocker):
first_batch = True
def callback(request, context):
nonlocal first_batch
if first_batch:
first_batch = False
return get_pkg_data_contents('data/datalink/cutout1.xml')
else:
return get_pkg_data_contents('data/datalink/cutout2.xml')
with mocker.register_uri(
'POST', 'https://example.com/obscore-datalink', content=callback
) as matcher:
yield matcher
@pytest.fixture()
def proc(mocker):
def callback(request, context):
return get_pkg_data_contents('data/datalink/proc.xml')
with mocker.register_uri(
'GET', 'http://example.com/proc', content=callback
) as matcher:
yield matcher
@pytest.fixture()
def datalink_vocabulary(mocker):
# astropy download_file (which get_vocabluary uses) does not use
# requests, so we can't mock this as we can mock the others. We
# replace the entire function for a while
dl_voc_uri = 'http://www.ivoa.net/rdf/datalink/core'
def fake_download_file(src_url, *args, **kwargs):
assert src_url == dl_voc_uri
return get_pkg_data_filename('data/datalink/datalink.desise')
real_download_file = vocabularies.download_file
try:
vocabularies.download_file = fake_download_file
yield
finally:
vocabularies.download_file = real_download_file
@pytest.mark.usefixtures('ssa_datalink', 'datalink')
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W27")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W06")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W48")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.E02")
def test_datalink():
results = vo.spectrumsearch(
'http://example.com/ssa_datalink', (30, 30))
datalinks = next(results.iter_datalinks())
row = datalinks[0]
assert row.semantics == "#progenitor"
row = datalinks[1]
assert row.semantics == "#proc"
row = datalinks[2]
assert row.semantics == "#this"
row = datalinks[3]
assert row.semantics == "#preview"
@pytest.mark.usefixtures('obscore_datalink', 'res_datalink')
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W27")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W06")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W48")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.E02")
def test_datalink_batch():
results = vo.dal.imagesearch(
'http://example.com/obscore', (30, 30))
assert len([_ for _ in results.iter_datalinks()]) == 3
@pytest.mark.usefixtures('proc', 'datalink_vocabulary')
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W27")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W06")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.W48")
@pytest.mark.filterwarnings("ignore::astropy.io.votable.exceptions.E02")
class TestSemanticsRetrieval:
def test_access_with_string(self):
datalinks = DatalinkResults.from_result_url('http://example.com/proc')
res = [r["access_url"] for r in datalinks.bysemantics("#this")]
assert len(res) == 1
assert res[0].endswith("eq010000ms/20100927.comb_avg.0001.fits.fz")
def METHOD_NAME(self):
datalinks = DatalinkResults.from_result_url('http://example.com/proc')
res = [r["access_url"]
for r in datalinks.bysemantics(["#this", "#preview-image"])]
assert len(res) == 2
assert res[0].endswith("eq010000ms/20100927.comb_avg.0001.fits.fz")
assert res[1].endswith("20100927.comb_avg.0001.fits.fz?preview=True")
def test_access_with_expansion(self):
datalinks = DatalinkResults.from_result_url('http://example.com/proc')
res = [r["access_url"]
for r in datalinks.bysemantics(["#this", "#preview"])]
assert len(res) == 3
assert res[0].endswith("eq010000ms/20100927.comb_avg.0001.fits.fz")
assert res[1].endswith("20100927.comb_avg.0001.fits.fz?preview=True")
assert res[2].endswith("http://dc.zah.uni-heidelberg.de/wider.dat")
def test_access_without_expansion(self):
datalinks = DatalinkResults.from_result_url('http://example.com/proc')
res = [r["access_url"] for r in datalinks.bysemantics(
["#this", "#preview"], include_narrower=False)]
assert len(res) == 2
assert res[0].endswith("eq010000ms/20100927.comb_avg.0001.fits.fz")
assert res[1].endswith("http://dc.zah.uni-heidelberg.de/wider.dat")
def test_with_full_url(self):
datalinks = DatalinkResults.from_result_url('http://example.com/proc')
res = [r["access_url"]
for r in datalinks.bysemantics("urn:example:rdf/dlext#oracle")]
assert len(res) == 1
assert res[0].endswith("when-will-it-be-back")
def test_all_mixed(self):
datalinks = DatalinkResults.from_result_url('http://example.com/proc')
res = [r["access_url"]
for r in datalinks.bysemantics([
"urn:example:rdf/dlext#oracle",
'http://www.ivoa.net/rdf/datalink/core#preview',
'#this',
'non-existing-term'])]
assert len(res) == 4
assert res[0].endswith("eq010000ms/20100927.comb_avg.0001.fits.fz")
assert res[1].endswith("comb_avg.0001.fits.fz?preview=True")
assert res[2].endswith("http://dc.zah.uni-heidelberg.de/wider.dat")
assert res[3].endswith("when-will-it-be-back")
|
1,348 |
plot results
|
"""
========================================================================
Concentration Prior Type Analysis of Variation Bayesian Gaussian Mixture
========================================================================
This example plots the ellipsoids obtained from a toy dataset (mixture of three
Gaussians) fitted by the ``BayesianGaussianMixture`` class models with a
Dirichlet distribution prior
(``weight_concentration_prior_type='dirichlet_distribution'``) and a Dirichlet
process prior (``weight_concentration_prior_type='dirichlet_process'``). On
each figure, we plot the results for three different values of the weight
concentration prior.
The ``BayesianGaussianMixture`` class can adapt its number of mixture
components automatically. The parameter ``weight_concentration_prior`` has a
direct link with the resulting number of components with non-zero weights.
Specifying a low value for the concentration prior will make the model put most
of the weight on few components set the remaining components weights very close
to zero. High values of the concentration prior will allow a larger number of
components to be active in the mixture.
The Dirichlet process prior allows to define an infinite number of components
and automatically selects the correct number of components: it activates a
component only if it is necessary.
On the contrary the classical finite mixture model with a Dirichlet
distribution prior will favor more uniformly weighted components and therefore
tends to divide natural clusters into unnecessary sub-components.
"""
# Author: Thierry Guillemot <[email protected]>
# License: BSD 3 clause
import matplotlib as mpl
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
from sklearn.mixture import BayesianGaussianMixture
def plot_ellipses(ax, weights, means, covars):
for n in range(means.shape[0]):
eig_vals, eig_vecs = np.linalg.eigh(covars[n])
unit_eig_vec = eig_vecs[0] / np.linalg.norm(eig_vecs[0])
angle = np.arctan2(unit_eig_vec[1], unit_eig_vec[0])
# Ellipse needs degrees
angle = 180 * angle / np.pi
# eigenvector normalization
eig_vals = 2 * np.sqrt(2) * np.sqrt(eig_vals)
ell = mpl.patches.Ellipse(
means[n], eig_vals[0], eig_vals[1], angle=180 + angle, edgecolor="black"
)
ell.set_clip_box(ax.bbox)
ell.set_alpha(weights[n])
ell.set_facecolor("#56B4E9")
ax.add_artist(ell)
def METHOD_NAME(ax1, ax2, estimator, X, y, title, plot_title=False):
ax1.set_title(title)
ax1.scatter(X[:, 0], X[:, 1], s=5, marker="o", color=colors[y], alpha=0.8)
ax1.set_xlim(-2.0, 2.0)
ax1.set_ylim(-3.0, 3.0)
ax1.set_xticks(())
ax1.set_yticks(())
plot_ellipses(ax1, estimator.weights_, estimator.means_, estimator.covariances_)
ax2.get_xaxis().set_tick_params(direction="out")
ax2.yaxis.grid(True, alpha=0.7)
for k, w in enumerate(estimator.weights_):
ax2.bar(
k,
w,
width=0.9,
color="#56B4E9",
zorder=3,
align="center",
edgecolor="black",
)
ax2.text(k, w + 0.007, "%.1f%%" % (w * 100.0), horizontalalignment="center")
ax2.set_xlim(-0.6, 2 * n_components - 0.4)
ax2.set_ylim(0.0, 1.1)
ax2.tick_params(axis="y", which="both", left=False, right=False, labelleft=False)
ax2.tick_params(axis="x", which="both", top=False)
if plot_title:
ax1.set_ylabel("Estimated Mixtures")
ax2.set_ylabel("Weight of each component")
# Parameters of the dataset
random_state, n_components, n_features = 2, 3, 2
colors = np.array(["#0072B2", "#F0E442", "#D55E00"])
covars = np.array(
[[[0.7, 0.0], [0.0, 0.1]], [[0.5, 0.0], [0.0, 0.1]], [[0.5, 0.0], [0.0, 0.1]]]
)
samples = np.array([200, 500, 200])
means = np.array([[0.0, -0.70], [0.0, 0.0], [0.0, 0.70]])
# mean_precision_prior= 0.8 to minimize the influence of the prior
estimators = [
(
"Finite mixture with a Dirichlet distribution\nprior and " r"$\gamma_0=$",
BayesianGaussianMixture(
weight_concentration_prior_type="dirichlet_distribution",
n_components=2 * n_components,
reg_covar=0,
init_params="random",
max_iter=1500,
mean_precision_prior=0.8,
random_state=random_state,
),
[0.001, 1, 1000],
),
(
"Infinite mixture with a Dirichlet process\n prior and" r"$\gamma_0=$",
BayesianGaussianMixture(
weight_concentration_prior_type="dirichlet_process",
n_components=2 * n_components,
reg_covar=0,
init_params="random",
max_iter=1500,
mean_precision_prior=0.8,
random_state=random_state,
),
[1, 1000, 100000],
),
]
# Generate data
rng = np.random.RandomState(random_state)
X = np.vstack(
[
rng.multivariate_normal(means[j], covars[j], samples[j])
for j in range(n_components)
]
)
y = np.concatenate([np.full(samples[j], j, dtype=int) for j in range(n_components)])
# Plot results in two different figures
for title, estimator, concentrations_prior in estimators:
plt.figure(figsize=(4.7 * 3, 8))
plt.subplots_adjust(
bottom=0.04, top=0.90, hspace=0.05, wspace=0.05, left=0.03, right=0.99
)
gs = gridspec.GridSpec(3, len(concentrations_prior))
for k, concentration in enumerate(concentrations_prior):
estimator.weight_concentration_prior = concentration
estimator.fit(X)
METHOD_NAME(
plt.subplot(gs[0:2, k]),
plt.subplot(gs[2, k]),
estimator,
X,
y,
r"%s$%.1e$" % (title, concentration),
plot_title=k == 0,
)
plt.show()
|
1,349 |
test categorical is not constant
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multinomial generation ops in the XLA JIT compiler."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import stateless_random_ops
from tensorflow.python.platform import googletest
# TODO(srvasude): Merge this with
# third_party/tensorflow/python/kernel_tests/random/multinomial_op_test.py.
class CategoricalTest(xla_test.XLATestCase):
"""Test cases for random-number generating operators."""
def output_dtypes(self):
return set(self.int_types).intersection([np.int32, np.int64])
def _chi2(self, expected, actual):
"""Returns Chi2 GOF statistic."""
actual = np.asarray(actual)
expected = np.asarray(expected)
diff = actual - expected
chi2 = np.sum(diff * diff / expected)
return chi2
def _do_sampling(self, logits, num_samples):
"""Categorical samples from given input.
Args:
logits: Numpy ndarray of shape [batch_size, num_classes].
num_samples: Int; number of samples to draw.
Returns:
Frequencies from sampled classes; shape [batch_size, num_classes].
"""
with self.session(), self.test_scope():
random_seed.set_random_seed(1618)
op = random_ops.multinomial(logits, num_samples,
output_dtype=dtypes.int32)
d = self.evaluate(op)
batch_size, num_classes = logits.shape
freqs_mat = []
for i in range(batch_size):
cnts = dict(collections.Counter(d[i, :]))
# Requires drawn class labels be in range.
self.assertLess(max(cnts.keys()), num_classes)
self.assertGreaterEqual(min(cnts.keys()), 0)
freqs = [(cnts[k] * 1. / num_samples if k in cnts else 0)
for k in range(num_classes)]
freqs_mat.append(freqs)
return freqs_mat
def _testRngIsNotConstant(self, rng, dtype, output_dtype):
# Tests that 'rng' does not always return the same value.
with self.session():
with self.test_scope():
x = rng(dtype, output_dtype)
# The random-number generator, if working correctly, should produce the
# same output multiple times with low probability.
y = self.evaluate(x)
z = self.evaluate(x)
w = self.evaluate(x)
# We use exact equality here. If the random-number generator is producing
# deterministic output, all three outputs will be bitwise identical.
self.assertTrue((not np.array_equal(y, z)) or
(not np.array_equal(z, w)) or
(not np.array_equal(y, w)))
def METHOD_NAME(self):
def rng(dtype, output_dtype):
return random_ops.multinomial(np.array([[1., 1., 1.]], dtype=dtype), 10,
output_dtype=output_dtype)
dtype = np.float32
for output_dtype in self.output_dtypes():
self._testRngIsNotConstant(rng, dtype, output_dtype)
def testCategoricalIsInRange(self):
for dtype in self.float_types:
for output_dtype in self.output_dtypes():
with self.session():
with self.test_scope():
x = random_ops.multinomial(
array_ops.ones(shape=[1, 20], dtype=dtype), 1000,
output_dtype=output_dtype)
y = self.evaluate(x)
self.assertTrue((y >= 0).sum() == 1000)
self.assertTrue((y < 20).sum() == 1000)
def testSamplingCorrectness(self):
np.random.seed(1618) # Make it reproducible.
num_samples = 40000
rand_probs = np.random.dirichlet([1., 1., 2., 3.])
rand_probs2 = np.random.dirichlet([1., 4., 5.], size=3) # batched
for probs in [[.5, .5], [.85, .05, .1], rand_probs, rand_probs2]:
probs = np.asarray(probs)
if len(probs.shape) == 1:
probs = probs.reshape(1, probs.size) # singleton batch
logits = np.log(probs).astype(np.float32)
freqs = self._do_sampling(logits, num_samples)
# the test here is similar to
# python/kernel_tests/random/multinomial_op_test.py
# Note that df >= 1 in all these cases. Choosing a cutoff of 1e-3
# corresponds to an alpha value of 2.5% for df = 1, and smaller for larger
# df.
chi2 = self._chi2(probs, freqs)
self.assertLess(chi2, 1e-3)
def testStatelessMultinomialIsInRange(self):
for dtype in self.float_types.intersection(
[dtypes.float32, dtypes.bfloat16]):
for output_dtype in self.output_dtypes():
with self.session() as sess:
with self.test_scope():
seed_t = array_ops.placeholder(dtypes.int32, shape=[2])
x = stateless_random_ops.stateless_multinomial(
array_ops.ones(shape=[1, 20], dtype=dtype),
1000,
seed_t,
output_dtype=output_dtype)
y = sess.run(x, {seed_t: [0x12345678, 0xabcdef12]})
self.assertTrue((y >= 0).sum() == 1000)
self.assertTrue((y < 20).sum() == 1000)
def testDeterminismMultinomial(self):
# Stateless values should be equal iff the seeds are equal (roughly)
num_samples = 10
with self.session(), self.test_scope():
seed_t = array_ops.placeholder(dtypes.int32, shape=[2])
seeds = [(x, y) for x in range(5) for y in range(5)] * 3
for logits in ([[0.1, 0.25, 0.5, 0.15]], [[0.5, 0.5], [0.8, 0.2],
[0.25, 0.75]]):
pure = stateless_random_ops.stateless_multinomial(
logits, num_samples, seed=seed_t)
values = [(seed, pure.eval(feed_dict={seed_t: seed})) for seed in seeds]
for s0, v0 in values:
for s1, v1 in values:
self.assertEqual(s0 == s1, np.all(v0 == v1))
def testEmpty(self):
with self.session():
with self.test_scope():
x = random_ops.multinomial(
array_ops.zeros([42, 40]), 0, output_dtype=dtypes.int32)
y = self.evaluate(x)
self.assertEqual(y.shape, (42, 0))
def testEmptyStateless(self):
with self.session() as sess:
with self.test_scope():
seed_t = array_ops.placeholder(dtypes.int32, shape=[2])
x = stateless_random_ops.stateless_multinomial(
array_ops.zeros([42, 40]),
0,
seed=seed_t,
output_dtype=dtypes.int32)
y = sess.run(x, {seed_t: [0x12345678, 0xabcdef1]})
self.assertEqual(y.shape, (42, 0))
if __name__ == '__main__':
googletest.main()
|
1,350 |
test app
|
import sys
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy.test import helper
class WSGIGraftTests(helper.CPWebCase):
@staticmethod
def setup_server():
def METHOD_NAME(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
output = ['Hello, world!\n',
'This is a wsgi app running within CherryPy!\n\n']
keys = list(environ.keys())
keys.sort()
for k in keys:
output.append('%s: %s\n' % (k, environ[k]))
return [ntob(x, 'utf-8') for x in output]
def test_empty_string_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [
b'Hello', b'', b' ', b'', b'world',
]
class WSGIResponse(object):
def __init__(self, appresults):
self.appresults = appresults
self.iter = iter(appresults)
def __iter__(self):
return self
if sys.version_info >= (3, 0):
def __next__(self):
return next(self.iter)
else:
def next(self):
return self.iter.next()
def close(self):
if hasattr(self.appresults, 'close'):
self.appresults.close()
class ReversingMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
results = app(environ, start_response)
class Reverser(WSGIResponse):
if sys.version_info >= (3, 0):
def __next__(this):
line = list(next(this.iter))
line.reverse()
return bytes(line)
else:
def next(this):
line = list(this.iter.next())
line.reverse()
return ''.join(line)
return Reverser(results)
class Root:
@cherrypy.expose
def index(self):
return ntob("I'm a regular CherryPy page handler!")
cherrypy.tree.mount(Root())
cherrypy.tree.graft(METHOD_NAME, '/hosted/app1')
cherrypy.tree.graft(test_empty_string_app, '/hosted/app3')
# Set script_name explicitly to None to signal CP that it should
# be pulled from the WSGI environ each time.
app = cherrypy.Application(Root(), script_name=None)
cherrypy.tree.graft(ReversingMiddleware(app), '/hosted/app2')
wsgi_output = '''Hello, world!
This is a wsgi app running within CherryPy!'''
def test_01_standard_app(self):
self.getPage('/')
self.assertBody("I'm a regular CherryPy page handler!")
def test_04_pure_wsgi(self):
if not cherrypy.server.using_wsgi:
return self.skip('skipped (not using WSGI)... ')
self.getPage('/hosted/app1')
self.assertHeader('Content-Type', 'text/plain')
self.assertInBody(self.wsgi_output)
def test_05_wrapped_cp_app(self):
if not cherrypy.server.using_wsgi:
return self.skip('skipped (not using WSGI)... ')
self.getPage('/hosted/app2/')
body = list("I'm a regular CherryPy page handler!")
body.reverse()
body = ''.join(body)
self.assertInBody(body)
def test_06_empty_string_app(self):
if not cherrypy.server.using_wsgi:
return self.skip('skipped (not using WSGI)... ')
self.getPage('/hosted/app3')
self.assertHeader('Content-Type', 'text/plain')
self.assertInBody('Hello world')
|
1,351 |
test core invalid
|
#
# Copyright (C) 2010 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Unit tests for descrparser module."""
import unittest
from nav.ipdevpoll import descrparsers
class TestNtnuConvention(object):
sysname = 'foo-sw'
def test_lan(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'lan,math,staff')
assert d is not None
assert d['org'] == 'math'
assert d['usage'] == 'staff'
assert d['netident'] == 'math,staff'
def test_lan_with_comment_and_vlan(self):
d = descrparsers.parse_ntnu_convention(
self.sysname, 'lan,physics,students,campus_dragv,340'
)
d is not None
assert d['org'] == 'physics'
assert d['usage'] == 'students'
assert d['comment'] == 'campus_dragv'
assert d['netident'] == 'physics,students,campus_dragv'
assert d['vlan'] == 340
def test_lan_with_numbered_usage_and_comment(self):
d = descrparsers.parse_ntnu_convention(
self.sysname, 'lan,math,staff12,campus_lade'
)
d is not None
assert d['org'] == 'math'
assert d['usage'] == 'staff'
assert d['n'] == 12
assert d['netident'] == 'math,staff12,campus_lade'
assert d['comment'] == 'campus_lade'
def test_lan_with_spaces(self):
d = descrparsers.parse_ntnu_convention(
self.sysname, 'lan ,physics,students, campus_dragv, 340'
)
d is not None
assert d['org'] == 'physics'
assert d['usage'] == 'students'
assert d['comment'] == 'campus_dragv'
assert d['netident'] == 'physics,students,campus_dragv'
assert d['vlan'] == 340
def test_lan_invalid(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'lan,foo')
assert d is None
def test_link(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'link,mts-gw')
d is not None
assert d['to_router'] == 'mts-gw'
def test_link_with_comment_and_vlan(self):
d = descrparsers.parse_ntnu_convention(
self.sysname, 'link,moholt-gw,Tn_20022350,923'
)
assert d['to_router'] == 'moholt-gw'
assert d['comment'] == 'Tn_20022350'
assert d['netident'] == '%s,%s' % (self.sysname, 'moholt-gw')
assert d['vlan'] == 923
def test_core(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'core,it,wlan')
d is not None
assert d['org'] == 'it'
assert d['usage'] == 'wlan'
assert d['netident'] == 'it,wlan'
def test_core_with_comment_and_vlan(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'core,it,fddi,manring,180')
d is not None
assert d['org'] == 'it'
assert d['usage'] == 'fddi'
assert d['comment'] == 'manring'
assert d['netident'] == 'it,fddi,manring'
assert d['vlan'] == 180
def METHOD_NAME(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'core,foo')
assert d is None
def test_elink(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'elink,trd-gw,uninett')
d is not None
assert d['to_router'] == 'trd-gw'
assert d['to_org'] == 'uninett'
assert d['netident'] == '%s,%s' % (self.sysname, 'trd-gw')
def test_elink_with_empty_comment(self):
d = descrparsers.parse_ntnu_convention(
self.sysname, 'elink,sintef-gw,sintef,,902'
)
d is not None
assert d['to_router'] == 'sintef-gw'
assert d['to_org'] == 'sintef'
assert not d['comment']
assert d['netident'] == '%s,%s' % (self.sysname, 'sintef-gw')
assert d['vlan'] == 902
def test_invalid(self):
d = descrparsers.parse_ntnu_convention(self.sysname, 'foobar,bar,baz')
assert d is None
class TestUninettConvention(object):
def test_simple(self):
d = descrparsers.parse_uninett_convention(
'foo-sw', 'lokal link, uninett-gw.teknobyen-gw2'
)
assert d['comment'] == 'lokal link'
assert d['netident'] == 'uninett-gw.teknobyen-gw2'
def test_invalid(self):
d = descrparsers.parse_uninett_convention('foo-sw', 'KX182')
assert d is None
|
1,352 |
head204
|
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ..._vendor import _convert_request
from ...operations._http_success_operations import build_head200_request, build_head204_request, build_head404_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class HttpSuccessOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.sample.aio.AutoRestHeadTestService`'s
:attr:`http_success` attribute.
"""
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def head200(self, **kwargs: Any) -> bool:
"""Return 200 status code if successful.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_head200_request(
template_url=self.head200.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
head200.metadata = {"url": "/http/success/200"}
@distributed_trace_async
async def METHOD_NAME(self, **kwargs: Any) -> bool:
"""Return 204 status code if successful.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_head204_request(
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
METHOD_NAME.metadata = {"url": "/http/success/204"}
@distributed_trace_async
async def head404(self, **kwargs: Any) -> bool:
"""Return 404 status code if successful.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_head404_request(
template_url=self.head404.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
head404.metadata = {"url": "/http/success/404"}
|
1,353 |
test single
|
from bsb import topology
from bsb.config import Configuration
from bsb.exceptions import *
import unittest, numpy as np
from bsb.unittest import get_data_path
def single_layer():
c = topology.Layer(thickness=150, stack_index=0)
r = topology.Stack(children=[c])
topology.create_topology([r], np.array([0, 0, 0]), np.array([100, 100, 100]))
return r, c
class TestCreateTopology(unittest.TestCase):
def METHOD_NAME(self):
r = topology.Region(name="R", children=[])
t = topology.create_topology([r], np.array([0, 0, 0]), np.array([100, 100, 100]))
self.assertEqual(r, t, "Topology with 1 root region should be the region itself")
def test_unrelated(self):
r = topology.Region(children=[])
rb = topology.Region(children=[])
t = topology.create_topology([r, rb], np.array([0, 0, 0]), np.array([1, 1, 1]))
self.assertNotEqual(r, t, "Topology with multiple roots should be encapsulated")
def test_2gen(self):
r = topology.Region(children=[])
r2 = topology.Region(children=[r])
t = topology.create_topology([r2, r], np.array([0, 0, 0]), np.array([1, 1, 1]))
self.assertEqual(r2, t, "Dependency interpreted as additional root")
def test_3gen(self):
r = topology.Region(children=[])
r2 = topology.Region(children=[r])
r3 = topology.Region(children=[r2])
t = topology.create_topology(
[r3, r2, r], np.array([0, 0, 0]), np.array([100, 100, 100])
)
self.assertEqual(r3, t, "Recursive dependency interpreted as additional root")
class TestTopology(unittest.TestCase):
def test_stack(self):
c = topology.Layer(name="c", thickness=150, stack_index=0)
c2 = topology.Layer(name="c2", thickness=150, stack_index=1)
r = topology.Stack(name="mystack", children=[c, c2])
topology.create_topology([r], [0, 0, 0], [100, 100, 100])
self.assertEqual(0, c.data.y)
self.assertEqual(150, c2.data.height)
self.assertEqual(100, c.data.width)
self.assertEqual(100, c2.data.width)
self.assertEqual(300, r.data.height)
def test_partition_chunking(self):
r, l = single_layer()
cs = np.array([100, 100, 100])
# Test 100x150x100 layer producing 2 100x100x100 chunks on top of eachother
self.assertEqual([[0, 0, 0], [0, 1, 0]], l.to_chunks(cs).tolist())
# Test translation by whole chunk
l.data.x += cs[0]
self.assertEqual([[1, 0, 0], [1, 1, 0]], l.to_chunks(cs).tolist())
# Translate less than a chunk so that we overflow into an extra layer of x chunks
l.data.x += 1
self.assertEqual(
[[1, 0, 0], [1, 1, 0], [2, 0, 0], [2, 1, 0]], l.to_chunks(cs).tolist()
)
class TestAllenVoxels(unittest.TestCase):
def test_val(self):
cfg = Configuration.default(
region=dict(br=dict(children=["a"])),
partitions=dict(a=dict(type="allen", struct_name="VAL")),
)
part = cfg.partitions.a
vs = part.voxelset
self.assertEqual(52314, len(vs), "VAL is that many voxels")
self.assertEqual(52314 * 25**3, part.volume(), "VAL occupies this much space")
self.assertTrue(
np.allclose([(5975, 3550, 3950), (7125, 5100, 7475)], vs.bounds),
"VAL has those bounds",
)
not_impl = "We don't support transforming voxel partitions yet. Contribute it!"
for t in ("translate", "scale", "rotate"):
with self.subTest(transform=t):
transform = getattr(part, t)
with self.assertRaises(LayoutError, msg=not_impl):
transform(0)
def test_mask_nrrd(self):
cfg = Configuration.default(
region=dict(br=dict(children=["a"])),
partitions=dict(
a=dict(
type="allen",
mask_source=get_data_path("orientations", "toy_annotations.nrrd"),
struct_id=10690,
)
),
)
part = cfg.partitions.a
vs = part.voxelset
self.assertEqual(24, len(vs), "Region has that many voxels")
self.assertEqual(24 * 25**3, part.volume(), "Region occupies this much space")
|
1,354 |
test byte
|
# -*- coding: utf-8 -*-
from datetime import date
from datetime import datetime
import six
from mock import patch
from bravado_core.formatter import SwaggerFormat
from bravado_core.formatter import to_python
from bravado_core.spec import Spec
if not six.PY2:
long = int
def test_none(minimal_swagger_spec):
string_spec = {'type': 'string', 'format': 'date'}
assert to_python(minimal_swagger_spec, string_spec, None) is None
def test_no_format_returns_value(minimal_swagger_spec):
string_spec = {'type': 'string'}
assert 'boo' == to_python(minimal_swagger_spec, string_spec, 'boo')
def test_date(minimal_swagger_spec):
string_spec = {'type': 'string', 'format': 'date'}
assert date(2015, 4, 1) == to_python(
minimal_swagger_spec, string_spec, '2015-04-01',
)
def test_datetime(minimal_swagger_spec):
string_spec = {'type': 'string', 'format': 'date-time'}
result = to_python(
minimal_swagger_spec, string_spec, '2015-03-22T13:19:54',
)
assert datetime(2015, 3, 22, 13, 19, 54) == result
@patch('bravado_core.spec.warnings.warn')
def test_no_registered_format_returns_value_as_is_and_issues_warning(mock_warn, minimal_swagger_spec):
string_spec = {'type': 'string', 'format': 'bar'}
assert 'baz' == to_python(minimal_swagger_spec, string_spec, 'baz')
assert mock_warn.call_count == 1
def test_int64_long(minimal_swagger_spec):
integer_spec = {'type': 'integer', 'format': 'int64'}
result = to_python(minimal_swagger_spec, integer_spec, long(999))
assert long(999) == result
def test_int64_int(minimal_swagger_spec):
integer_spec = {'type': 'integer', 'format': 'int64'}
result = to_python(minimal_swagger_spec, integer_spec, 999)
assert long(999) == result
assert isinstance(result, long)
def test_int32_long(minimal_swagger_spec):
integer_spec = {'type': 'integer', 'format': 'int32'}
result = to_python(minimal_swagger_spec, integer_spec, long(999))
assert 999 == result
assert isinstance(result, int)
def test_int32_int(minimal_swagger_spec):
integer_spec = {'type': 'integer', 'format': 'int32'}
result = to_python(minimal_swagger_spec, integer_spec, 999)
assert 999 == result
assert isinstance(result, int)
def test_float(minimal_swagger_spec):
float_spec = {'type': 'number', 'format': 'float'}
result = to_python(minimal_swagger_spec, float_spec, float(3.14))
assert 3.14 == result
assert isinstance(result, float)
def test_double(minimal_swagger_spec):
double_spec = {'type': 'number', 'format': 'double'}
result = to_python(minimal_swagger_spec, double_spec, float(3.14))
assert 3.14 == result
assert isinstance(result, float)
def METHOD_NAME(minimal_swagger_spec):
byte_spec = {'type': 'string', 'format': 'byte'}
result = to_python(minimal_swagger_spec, byte_spec, 'x')
assert 'x' == result
assert isinstance(result, str)
def test_byte_base64(minimal_swagger_dict):
swagger_spec = Spec.from_dict(
minimal_swagger_dict, config={'use_base64_for_byte_format': True},
)
schema = {'type': 'string', 'format': 'byte'}
result = to_python(swagger_spec, schema, 'YWJj/w==')
assert b'abc\xff' == result
assert isinstance(result, bytes)
def test_ref(minimal_swagger_dict):
minimal_swagger_dict['definitions']['Int32'] = {
'type': 'integer', 'format': 'int32',
}
int_ref_spec = {'$ref': '#/definitions/Int32'}
swagger_spec = Spec.from_dict(minimal_swagger_dict)
result = to_python(swagger_spec, int_ref_spec, 999)
assert 999 == result
assert isinstance(result, int)
def test_override(minimal_swagger_dict):
class Byte(object):
def __init__(self, x):
self.x = x
def __str__(self):
return str(self.x)
def __repr__(self):
return '%s(%r)' % (self.__class__, self.x)
byteformat = SwaggerFormat(
format='byte',
to_wire=lambda x: str(x),
to_python=lambda x: Byte(x),
validate=lambda x: isinstance(x, str),
description=None,
)
number_spec = {'type': 'string', 'format': 'byte'}
swagger_spec = Spec.from_dict(minimal_swagger_dict, config={'formats': [byteformat]})
result = to_python(swagger_spec, number_spec, '8bits')
assert '8bits' == str(result)
assert repr(Byte('8bits')) == repr(result)
assert type(result) is Byte
|
1,355 |
transform
|
# Copyright 2020 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import autoai_libs.transformers.exportable
import numpy as np
import pandas as pd
import lale.datasets.data_schemas
import lale.docstrings
import lale.operators
from ._common_schemas import _hparam_activate_flag_unmodified
class _boolean2floatImpl:
def __init__(self, **hyperparams):
self._hyperparams = hyperparams
self._wrapped_model = autoai_libs.transformers.exportable.boolean2float(
**hyperparams
)
def fit(self, X, y=None):
self._wrapped_model.fit(X, y)
return self
def METHOD_NAME(self, X):
raw = self._wrapped_model.METHOD_NAME(X)
if isinstance(raw, (np.ndarray, pd.DataFrame)):
s_X = lale.datasets.data_schemas.to_schema(X)
s_result = self.transform_schema(s_X)
result = lale.datasets.data_schemas.add_schema(raw, s_result, recalc=True)
else:
result = raw
return result
def transform_schema(self, s_X):
"""Used internally by Lale for type-checking downstream operators."""
if self._hyperparams["activate_flag"]:
result = {
"type": "array",
"items": {"type": "array", "items": {"type": "number"}},
}
else:
result = s_X
return result
_hyperparams_schema = {
"allOf": [
{
"description": "This first object lists all constructor arguments with their types, but omits constraints for conditional hyperparameters.",
"type": "object",
"additionalProperties": False,
"required": ["activate_flag"],
"relevantToOptimizer": [],
"properties": {"activate_flag": _hparam_activate_flag_unmodified},
}
]
}
_input_fit_schema = {
"type": "object",
"required": ["X"],
"additionalProperties": False,
"properties": {
"X": { # Handles 1-D arrays as well
"anyOf": [
{"type": "array", "items": {"laleType": "Any"}},
{
"type": "array",
"items": {"type": "array", "items": {"laleType": "Any"}},
},
]
},
"y": {"laleType": "Any"},
},
}
_input_transform_schema = {
"type": "object",
"required": ["X"],
"additionalProperties": False,
"properties": {
"X": { # Handles 1-D arrays as well
"anyOf": [
{"type": "array", "items": {"laleType": "Any"}},
{
"type": "array",
"items": {"type": "array", "items": {"laleType": "Any"}},
},
]
}
},
}
_output_transform_schema = {
"description": "Features; the outer array is over samples.",
"type": "array",
"items": {"type": "array", "items": {"laleType": "Any"}},
}
_combined_schemas = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": """Operator from `autoai_libs`_. Converts strings that represent booleans to floats and replaces missing values with np.nan.
.. _`autoai_libs`: https://pypi.org/project/autoai-libs""",
"documentation_url": "https://lale.readthedocs.io/en/latest/modules/lale.lib.autoai_libs.boolean2float.html",
"import_from": "autoai_libs.transformers.exportable",
"type": "object",
"tags": {"pre": [], "op": ["transformer"], "post": []},
"properties": {
"hyperparams": _hyperparams_schema,
"input_fit": _input_fit_schema,
"input_transform": _input_transform_schema,
"output_transform": _output_transform_schema,
},
}
boolean2float = lale.operators.make_operator(_boolean2floatImpl, _combined_schemas)
lale.docstrings.set_docstrings(boolean2float)
|
1,356 |
create base argument parser
|
import argparse
import errno
import re
import sys
from datetime import datetime
from typing import Any, Dict, List, Sequence, Tuple, Union
import can
from can.io import BaseRotatingLogger
from can.io.generic import MessageWriter
from can.util import cast_from_string
from . import Bus, BusState, Logger, SizedRotatingLogger
from .typechecking import CanFilter, CanFilters
def METHOD_NAME(parser: argparse.ArgumentParser) -> None:
"""Adds common options to an argument parser."""
parser.add_argument(
"-c",
"--channel",
help=r"Most backend interfaces require some sort of channel. For "
r"example with the serial interface the channel might be a rfcomm"
r' device: "/dev/rfcomm0". With the socketcan interface valid '
r'channel examples include: "can0", "vcan0".',
)
parser.add_argument(
"-i",
"--interface",
dest="interface",
help="""Specify the backend CAN interface to use. If left blank,
fall back to reading from configuration files.""",
choices=sorted(can.VALID_INTERFACES),
)
parser.add_argument(
"-b", "--bitrate", type=int, help="Bitrate to use for the CAN bus."
)
parser.add_argument("--fd", help="Activate CAN-FD support", action="store_true")
parser.add_argument(
"--data_bitrate",
type=int,
help="Bitrate to use for the data phase in case of CAN-FD.",
)
parser.add_argument(
"extra_args",
nargs=argparse.REMAINDER,
help="The remaining arguments will be used for the interface and "
"logger/player initialisation. "
"For example, `-i vector -c 1 --app-name=MyCanApp` is the equivalent "
"to opening the bus with `Bus('vector', channel=1, app_name='MyCanApp')",
)
def _append_filter_argument(
parser: Union[
argparse.ArgumentParser,
argparse._ArgumentGroup,
],
*args: str,
**kwargs: Any,
) -> None:
"""Adds the ``filter`` option to an argument parser."""
parser.add_argument(
*args,
"--filter",
help="R|Space separated CAN filters for the given CAN interface:"
"\n <can_id>:<can_mask> (matches when <received_can_id> & mask =="
" can_id & mask)"
"\n <can_id>~<can_mask> (matches when <received_can_id> & mask !="
" can_id & mask)"
"\nFx to show only frames with ID 0x100 to 0x103 and 0x200 to 0x20F:"
"\n python -m can.viewer -f 100:7FC 200:7F0"
"\nNote that the ID and mask are always interpreted as hex values",
metavar="{<can_id>:<can_mask>,<can_id>~<can_mask>}",
nargs=argparse.ONE_OR_MORE,
default="",
**kwargs,
)
def _create_bus(parsed_args: Any, **kwargs: Any) -> can.BusABC:
logging_level_names = ["critical", "error", "warning", "info", "debug", "subdebug"]
can.set_logging_level(logging_level_names[min(5, parsed_args.verbosity)])
config: Dict[str, Any] = {"single_handle": True, **kwargs}
if parsed_args.interface:
config["interface"] = parsed_args.interface
if parsed_args.bitrate:
config["bitrate"] = parsed_args.bitrate
if parsed_args.fd:
config["fd"] = True
if parsed_args.data_bitrate:
config["data_bitrate"] = parsed_args.data_bitrate
return Bus(parsed_args.channel, **config)
def _parse_filters(parsed_args: Any) -> CanFilters:
can_filters: List[CanFilter] = []
if parsed_args.filter:
print(f"Adding filter(s): {parsed_args.filter}")
for filt in parsed_args.filter:
if ":" in filt:
parts = filt.split(":")
can_id = int(parts[0], base=16)
can_mask = int(parts[1], base=16)
elif "~" in filt:
parts = filt.split("~")
can_id = int(parts[0], base=16) | 0x20000000 # CAN_INV_FILTER
can_mask = int(parts[1], base=16) & 0x20000000 # socket.CAN_ERR_FLAG
else:
raise argparse.ArgumentError(None, "Invalid filter argument")
can_filters.append({"can_id": can_id, "can_mask": can_mask})
return can_filters
def _parse_additional_config(
unknown_args: Sequence[str],
) -> Dict[str, Union[str, int, float, bool]]:
for arg in unknown_args:
if not re.match(r"^--[a-zA-Z\-]*?=\S*?$", arg):
raise ValueError(f"Parsing argument {arg} failed")
def _split_arg(_arg: str) -> Tuple[str, str]:
left, right = _arg.split("=", 1)
return left.lstrip("--").replace("-", "_"), right
args: Dict[str, Union[str, int, float, bool]] = {}
for key, string_val in map(_split_arg, unknown_args):
args[key] = cast_from_string(string_val)
return args
def main() -> None:
parser = argparse.ArgumentParser(
description="Log CAN traffic, printing messages to stdout or to a "
"given file.",
)
METHOD_NAME(parser)
parser.add_argument(
"-f",
"--file_name",
dest="log_file",
help="Path and base log filename, for supported types see can.Logger.",
default=None,
)
parser.add_argument(
"-a",
"--append",
dest="append",
help="Append to the log file if it already exists.",
action="store_true",
)
parser.add_argument(
"-s",
"--file_size",
dest="file_size",
type=int,
help="Maximum file size in bytes. Rotate log file when size threshold "
"is reached. (The resulting file sizes will be consistent, but are not "
"guaranteed to be exactly what is specified here due to the rollover "
"conditions being logger implementation specific.)",
default=None,
)
parser.add_argument(
"-v",
action="count",
dest="verbosity",
help="""How much information do you want to see at the command line?
You can add several of these e.g., -vv is DEBUG""",
default=2,
)
_append_filter_argument(parser)
state_group = parser.add_mutually_exclusive_group(required=False)
state_group.add_argument(
"--active",
help="Start the bus as active, this is applied by default.",
action="store_true",
)
state_group.add_argument(
"--passive", help="Start the bus as passive.", action="store_true"
)
# print help message when no arguments were given
if len(sys.argv) < 2:
parser.print_help(sys.stderr)
raise SystemExit(errno.EINVAL)
results, unknown_args = parser.parse_known_args()
additional_config = _parse_additional_config([*results.extra_args, *unknown_args])
bus = _create_bus(results, can_filters=_parse_filters(results), **additional_config)
if results.active:
bus.state = BusState.ACTIVE
elif results.passive:
bus.state = BusState.PASSIVE
print(f"Connected to {bus.__class__.__name__}: {bus.channel_info}")
print(f"Can Logger (Started on {datetime.now()})")
logger: Union[MessageWriter, BaseRotatingLogger]
if results.file_size:
logger = SizedRotatingLogger(
base_filename=results.log_file,
max_bytes=results.file_size,
append=results.append,
**additional_config,
)
else:
logger = Logger(
filename=results.log_file,
append=results.append,
**additional_config,
)
try:
while True:
msg = bus.recv(1)
if msg is not None:
logger(msg)
except KeyboardInterrupt:
pass
finally:
bus.shutdown()
logger.stop()
if __name__ == "__main__":
main()
|
1,357 |
test uhf symm
|
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <[email protected]>
#
import unittest
import numpy
import scipy.linalg
from pyscf import lib
from pyscf import gto
from pyscf import scf
from pyscf import df
from pyscf.df import df_jk
def setUpModule():
global mol, symol
mol = gto.M(
verbose = 5,
output = '/dev/null',
atom = '''
O 0 0 0
H 0 -0.757 0.587
H 0 0.757 0.587''',
basis = 'cc-pvdz',
)
symol = gto.M(
verbose = 5,
output = '/dev/null',
atom = '''
O 0 0 0
H 0 -0.757 0.587
H 0 0.757 0.587''',
basis = 'cc-pvdz',
symmetry = 1,
)
def tearDownModule():
global mol, symol
mol.stdout.close()
symol.stdout.close()
del mol, symol
class KnownValues(unittest.TestCase):
def test_rhf(self):
mf = scf.density_fit(scf.RHF(mol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -76.025936299702536, 8)
self.assertTrue(mf._eri is None)
def test_uhf(self):
mf = scf.density_fit(scf.UHF(mol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -76.025936299702536, 8)
def test_uhf_cart(self):
pmol = mol.copy()
pmol.cart = True
mf = scf.density_fit(scf.UHF(pmol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -76.026760700636046, 8)
def test_rohf(self):
pmol = mol.copy()
pmol.charge = 1
pmol.spin = 1
pmol.build(False, False)
mf = scf.density_fit(scf.ROHF(pmol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -75.626515724371814, 8)
def test_dhf(self):
pmol = mol.copy()
pmol.build(False, False)
mf = scf.density_fit(scf.DHF(pmol), auxbasis='weigend')
mf.conv_tol = 1e-10
self.assertAlmostEqual(mf.scf(), -76.080738677021458, 8)
def test_rhf_symm(self):
mf = scf.density_fit(scf.RHF(symol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -76.025936299702536, 8)
def METHOD_NAME(self):
mf = scf.density_fit(scf.UHF(symol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -76.025936299702536, 8)
def test_rohf_symm(self):
pmol = mol.copy()
pmol.charge = 1
pmol.spin = 1
pmol.symmetry = 1
pmol.build(False, False)
mf = scf.density_fit(scf.ROHF(pmol), auxbasis='weigend')
self.assertAlmostEqual(mf.scf(), -75.626515724371814, 8)
def test_rhf_veff(self):
nao = mol.nao_nr()
numpy.random.seed(1)
dm = numpy.random.random((2,nao,nao))
mf = scf.density_fit(scf.RHF(mol), auxbasis='weigend')
vhf1 = mf.get_veff(mol, dm, hermi=0)
naux = mf._cderi.shape[0]
cderi = numpy.empty((naux,nao,nao))
for i in range(naux):
cderi[i] = lib.unpack_tril(mf._cderi[i])
vj0 = []
vk0 = []
for dmi in dm:
v1 = numpy.einsum('kij,ij->k', cderi, dmi)
vj0.append(numpy.einsum('kij,k->ij', cderi, v1))
v1 = numpy.einsum('pij,jk->pki', cderi, dmi.T)
vk0.append(numpy.einsum('pki,pkj->ij', cderi, v1))
vj1, vk1 = df_jk.get_jk(mf.with_df, dm, 0)
self.assertTrue(numpy.allclose(vj0, vj1))
self.assertTrue(numpy.allclose(numpy.array(vk0), vk1))
vhf0 = vj1 - vk1 * .5
self.assertTrue(numpy.allclose(vhf0, vhf1))
def test_uhf_veff(self):
mf = scf.density_fit(scf.UHF(mol), auxbasis='weigend')
nao = mol.nao_nr()
numpy.random.seed(1)
dm = numpy.random.random((2,4,nao,nao))
vhf = mf.get_veff(mol, dm, hermi=0)
self.assertAlmostEqual(numpy.linalg.norm(vhf), 413.82341595365853, 9)
def test_assign_cderi(self):
nao = mol.nao_nr()
w, u = scipy.linalg.eigh(mol.intor('int2e_sph', aosym='s4'))
idx = w > 1e-9
mf = scf.density_fit(scf.UHF(mol), auxbasis='weigend')
mf._cderi = (u[:,idx] * numpy.sqrt(w[idx])).T.copy()
self.assertAlmostEqual(mf.kernel(), -76.026765673110447, 8)
def test_nr_get_jk(self):
numpy.random.seed(1)
mf = scf.RHF(mol).density_fit(auxbasis='weigend')
nao = mol.nao_nr()
dms = numpy.random.random((2,nao,nao))
vj, vk = mf.get_jk(mol, dms, hermi=0)
self.assertAlmostEqual(lib.fp(vj), -194.15910890730066, 9)
self.assertAlmostEqual(lib.fp(vk), -46.365071587653517, 9)
vj = mf.get_j(mol, dms, hermi=0)
self.assertAlmostEqual(lib.fp(vj), -194.15910890730066, 9)
vk = mf.get_k(mol, dms, hermi=0)
self.assertAlmostEqual(lib.fp(vk), -46.365071587653517, 9)
mf.with_df = None
vj, vk = mf.get_jk(mol, dms, hermi=0)
self.assertAlmostEqual(lib.fp(vj), -194.08878302990749, 9)
self.assertAlmostEqual(lib.fp(vk), -46.530782983591152, 9)
vj = mf.get_j(mol, dms, hermi=0)
self.assertAlmostEqual(lib.fp(vj), -194.08878302990749, 9)
vk = mf.get_k(mol, dms, hermi=0)
self.assertAlmostEqual(lib.fp(vk), -46.530782983591152, 9)
def test_r_get_jk(self):
numpy.random.seed(1)
dfobj = df.df.DF4C(mol)
n2c = mol.nao_2c()
n4c = n2c * 2
dms = numpy.random.random((2,n4c,n4c))
vj, vk = dfobj.get_jk(dms, hermi=0)
self.assertAlmostEqual(lib.fp(vj), 12.961687328405461+55.686811159338134j, 9)
self.assertAlmostEqual(lib.fp(vk), 41.984238099875462+12.870888901217896j, 9)
def test_df_jk_density_fit(self):
mf = scf.RHF(mol).density_fit()
mf.with_df = None
mf = mf.density_fit()
self.assertTrue(mf.with_df is not None)
mf = mf.newton().density_fit(auxbasis='sto3g')
self.assertEqual(mf.with_df.auxbasis, 'sto3g')
def test_get_j(self):
numpy.random.seed(1)
nao = mol.nao_nr()
dms = numpy.random.random((2,nao,nao))
mf = scf.RHF(mol).density_fit(auxbasis='weigend')
vj0 = mf.get_j(mol, dms)
vj1 = mf.get_jk(mol, dms)[0]
self.assertAlmostEqual(abs(vj0-vj1).max(), 0, 12)
self.assertAlmostEqual(lib.fp(vj0), -194.15910890730052, 9)
if __name__ == "__main__":
print("Full Tests for df")
unittest.main()
|
1,358 |
stub request certificate
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Stub functions that are used by the AWS Certificate Manager (ACM) unit tests.
When tests are run against an actual AWS account, the stubber class does not
set up stubs and passes all calls through to the Boto3 client.
"""
from test_tools.example_stubber import ExampleStubber
class AcmStubber(ExampleStubber):
"""
A class that implements stub functions used by ACM unit tests.
The stubbed functions expect certain parameters to be passed to them as
part of the tests, and raise errors if the parameters are not as expected.
"""
def __init__(self, client, use_stubs=True):
"""
Initializes the object with a specific client and configures it for
stubbing or AWS passthrough.
:param client: A Boto3 ACM client.
:param use_stubs: When True, use stubs to intercept requests. Otherwise,
pass requests through to AWS.
"""
super().__init__(client, use_stubs)
def stub_describe_certificate(self, certificate_arn, certificate, error_code=None):
expected_params = {'CertificateArn': certificate_arn}
response = {'Certificate': certificate}
self._stub_bifurcator(
'describe_certificate', expected_params, response, error_code=error_code)
def stub_get_certificate(self, certificate_arn, cert_data, error_code=None):
expected_params = {'CertificateArn': certificate_arn}
response = cert_data
self._stub_bifurcator(
'get_certificate', expected_params, response, error_code=error_code)
def stub_list_certificates(
self, max_items, certificates, statuses=None, key_usage=None,
extended_key_usage=None, key_types=None, error_code=None):
expected_params = {'MaxItems': max_items}
if statuses is not None:
expected_params['CertificateStatuses'] = statuses
includes = {}
if key_usage is not None:
includes['keyUsage'] = key_usage
if extended_key_usage is not None:
includes['extendedKeyUsage'] = extended_key_usage
if key_types is not None:
includes['keyTypes'] = key_types
if includes:
expected_params['Includes'] = includes
response = {'CertificateSummaryList': certificates}
self._stub_bifurcator(
'list_certificates', expected_params, response, error_code=error_code)
def stub_import_certificate(
self, certificate, private_key, certificate_arn, error_code=None):
expected_params = {'Certificate': certificate, 'PrivateKey': private_key}
response = {'CertificateArn': certificate_arn}
self._stub_bifurcator(
'import_certificate', expected_params, response, error_code=error_code)
def stub_delete_certificate(self, certificate_arn, error_code=None):
expected_params = {'CertificateArn': certificate_arn}
response = {}
self._stub_bifurcator(
'delete_certificate', expected_params, response, error_code=error_code)
def stub_add_tags_to_certificate(self, certificate_arn, tags, error_code=None):
expected_params = {
'CertificateArn': certificate_arn,
'Tags': [{'Key': key, 'Value': value} for key, value in tags.items()]}
response = {}
self._stub_bifurcator(
'add_tags_to_certificate', expected_params, response, error_code=error_code)
def stub_list_tags_for_certificate(self, certificate_arn, tags, error_code=None):
expected_params = {'CertificateArn': certificate_arn}
response = {
'Tags': [{'Key': key, 'Value': value} for key, value in tags.items()]}
self._stub_bifurcator(
'list_tags_for_certificate', expected_params, response,
error_code=error_code)
def stub_remove_tags_from_certificate(self, certificate_arn, tags, error_code=None):
expected_params = {'CertificateArn': certificate_arn}
tag_list = []
for key, value in tags.items():
tag = {'Key': key}
if value is not None:
tag['Value'] = value
tag_list.append(tag)
if tag_list:
expected_params['Tags'] = tag_list
response = {}
self._stub_bifurcator(
'remove_tags_from_certificate', expected_params, response,
error_code=error_code)
def METHOD_NAME(
self, domain, alternate_domains, method, certificate_arn,
validation_domains=None, error_code=None):
expected_params = {
'DomainName': domain,
'ValidationMethod': method,
'SubjectAlternativeNames': alternate_domains}
if validation_domains is not None:
expected_params['DomainValidationOptions'] = [{
'DomainName': key,
'ValidationDomain': value
} for key, value in validation_domains.items()]
response = {'CertificateArn': certificate_arn}
self._stub_bifurcator(
'request_certificate', expected_params, response, error_code=error_code)
def stub_resend_validation_email(
self, certificate_arn, domain, validation_domain, error_code=None):
expected_params = {
'CertificateArn': certificate_arn,
'Domain': domain,
'ValidationDomain': validation_domain}
response = {}
self._stub_bifurcator(
'resend_validation_email', expected_params, response, error_code=error_code)
|
1,359 |
get state
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=C0111,R0903
"""Displays the state of a Space API endpoint
Space API is an API for hackspaces based on JSON. See spaceapi.io for
an example.
Requires the following libraries:
* requests
Parameters:
* spaceapi.url: String representation of the api endpoint
* spaceapi.format: Format string for the output
Format Strings:
* Format strings are indicated by double %%
* They represent a leaf in the JSON tree, layers separated by '.'
* Boolean values can be overwritten by appending '%true%false'
in the format string
* Example: to reference 'open' in '{'state':{'open': true}}'
you would write '%%state.open%%', if you also want
to say 'Open/Closed' depending on the boolean you
would write '%%state.open%Open%Closed%%'
contributed by `rad4day <https://github.com/rad4day>`_ - many thanks!
"""
import requests
import threading
import re
import json
import core.module
import core.widget
import core.input
import core.decorators
def formatStringBuilder(s, json):
"""
Parses Format Strings
Parameter:
s -> format string
json -> the spaceapi response object
"""
identifiers = re.findall(r"%%.*?%%", s)
for i in identifiers:
ic = i[2:-2] # Discard %%
j = ic.split("%")
# Only neither of, or both true AND false may be overwritten
if len(j) != 3 and len(j) != 1:
return "INVALID FORMAT STRING"
if len(j) == 1: # no overwrite
s = s.replace(i, json[j[0]])
elif json[j[0]]: # overwrite for True
s = s.replace(i, j[1])
else: # overwrite for False
s = s.replace(i, j[2])
return s
class Module(core.module.Module):
@core.decorators.every(minutes=15)
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.METHOD_NAME))
core.input.register(self, button=core.input.LEFT_MOUSE, cmd=self.__forceReload)
self.__data = {}
self.__error = None
self.__thread = None
# The URL representing the api endpoint
self.__url = self.parameter("url", default="http://club.entropia.de/spaceapi")
self._format = self.parameter(
"format", default=" %%space%%: %%state.open%Open%Closed%%"
)
def state(self, widget):
try:
if self.__error is not None:
return ["critical"]
elif self.__data["state.open"]:
return ["warning"]
else:
return []
except KeyError:
return ["critical"]
def update(self):
if not self.__thread or self.__thread.is_alive() == False:
self.__thread = threading.Thread(target=self.get_api_async, args=())
self.__thread.start()
def METHOD_NAME(self, widget):
text = self._format
if self.__error is not None:
text = self.__error
else:
try:
text = formatStringBuilder(self._format, self.__data)
except KeyError:
text = "KeyError"
return text
def get_api_async(self):
try:
with requests.get(self.__url, timeout=10) as request:
# Can't implement error handling for python2.7 if I use
# request.json() as it uses simplejson in newer versions
self.__data = self.__flatten(json.loads(request.text))
self.__error = None
except requests.exceptions.Timeout:
self.__error = "Timeout"
except requests.exceptions.HTTPError:
self.__error = "HTTP Error"
except ValueError:
self.__error = "Not a JSON response"
core.event.trigger("update", [self.id], redraw_only=True)
# left_mouse_button handler
def __forceReload(self, event):
if self.__thread:
self.__thread.raise_exception()
self.__error = "RELOADING"
core.event.trigger("update", [self.id], redraw_only=True)
# Flattens the JSON structure recursively, e.g. ['space']['open']
# becomes ['space.open']
def __flatten(self, json):
out = {}
for key in json:
value = json[key]
if type(value) is dict:
flattened_key = self.__flatten(value)
for fk in flattened_key:
out[key + "." + fk] = flattened_key[fk]
else:
out[key] = value
return out
# Author: Tobias Manske <[email protected]>
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
1,360 |
pmap extended
|
"""Produces memory usage information"""
import gc
import objgraph
import os
import resource
import subprocess
import tempfile
import zlib
from StringIO import StringIO
from typing import Optional # noqa
from autopush.gcdump import Stat
from cffi import FFI
# cffi's API mode is preferable but it would assume jemalloc is always
# available (and we LD_PRELOAD it)
ffi = FFI()
ffi.cdef("""
int malloc_info(int options, FILE *stream);
void malloc_stats_print(void (*write_cb) (void *, const char *),
void *cbopaque, const char *opts);
""")
lib = ffi.dlopen(None)
def memusage(do_dump_rpy_heap=True, do_objgraph=True):
# type: (Optional[bool], Optional[bool]) -> str
"""Returning a str of memory usage stats"""
def trap_err(func, *args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e: # pragma: nocover
# include both __str/repr__, sometimes one's useless
buf.writelines([func.__name__, ': ', repr(e), ': ', str(e)])
buf = StringIO()
rusage = trap_err(resource.getrusage, resource.RUSAGE_SELF)
buf.writelines([repr(rusage), '\n\n'])
trap_err(METHOD_NAME, buf)
trap_err(jemalloc_stats, buf)
trap_err(glibc_malloc_info, buf)
if hasattr(gc, 'get_stats'):
buf.writelines(['\n\n', gc.get_stats(), '\n\n'])
if do_dump_rpy_heap:
# dump rpython's heap before objgraph potentially pollutes the
# heap with its heavy workload
trap_err(dump_rpy_heap, buf)
trap_err(get_stats_asmmemmgr, buf)
buf.write('\n\n')
if do_objgraph:
trap_err(objgraph.show_most_common_types, limit=0, file=buf)
return buf.getvalue()
def METHOD_NAME(stream):
"""Write pmap (w/ the most extended stats supported) to stream"""
pid = str(os.getpid())
# -XX/-X are recent linux only
ex_args = ['XX', 'X', 'x']
while True:
cmd = ['pmap', '-' + ex_args.pop(0), pid]
try:
pmap = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError: # pragma: nocover
if not ex_args:
raise
else:
stream.writelines([' '.join(cmd[:2]), '\n', pmap, '\n\n'])
break
def dump_rpy_heap(stream): # pragma: nocover
"""Write PyPy's gcdump to the specified stream"""
if not hasattr(gc, '_dump_rpy_heap'):
# not PyPy
return
with tempfile.NamedTemporaryFile('wb') as fp:
gc._dump_rpy_heap(fp.fileno())
try:
fpsize = os.stat(fp.name).st_size
except OSError:
pass
else:
stream.write("{} size: {}\n".format(fp.name, fpsize))
stat = Stat()
stat.summarize(fp.name, stream=None)
stat.load_typeids(zlib.decompress(gc.get_typeids_z()).split("\n"))
stream.write('\n\n')
stat.print_summary(stream)
def get_stats_asmmemmgr(stream): # pragma: nocover
"""Write PyPy's get_stats_asmmemmgr to the specified stream
(The raw memory currently used by the JIT backend)
"""
try:
import pypyjit
except ImportError:
# not PyPy or no jit?
return
stream.write('\n\nget_stats_asmmemmgr: ')
stream.write(repr(pypyjit.get_stats_asmmemmgr()))
stream.write('\n')
def glibc_malloc_info(stream):
"""Write glib malloc's malloc_info(3)"""
with tempfile.NamedTemporaryFile('wb+') as fp:
if not lib.malloc_info(0, fp.file):
fp.seek(0)
stream.writelines(fp.readlines())
def jemalloc_stats(stream): # pragma: nocover
"""Write jemalloc's malloc_stats_print()"""
try:
malloc_stats_print = lib.malloc_stats_print
except AttributeError:
# not using jemalloc
return
malloc_stats_print(_jemalloc_write_cb, ffi.new_handle(stream), ffi.NULL)
stream.write('\n')
@ffi.callback("void (*write_cb) (void *, const char *)")
def _jemalloc_write_cb(handle, msg): # pragma: nocover
"""Callback for jemalloc's malloc_stats_print
Writes to a Python stream passed via the cbopaque pointer
"""
stream = ffi.from_handle(handle)
stream.write(ffi.string(msg))
|
1,361 |
set widgets enabled
|
#!/usr/bin/env python
# --!-- coding: utf8 --!--
import json
import os
from PyQt5.QtCore import Qt, QPoint
from PyQt5.QtGui import QBrush, QColor, QIcon
from PyQt5.QtWidgets import QWidget, QStyle
from manuskript import exporter
from manuskript.functions import writablePath, openURL
from manuskript.ui.exporters.exporter_ui import Ui_exporter
from manuskript.ui.exporters.exportersManager import exportersManager
from manuskript.ui import style as S
class exporterDialog(QWidget, Ui_exporter):
def __init__(self, parent=None, mw=None):
QWidget.__init__(self, parent)
self.setupUi(self)
# Var
self.mw = mw
self.currentExporter = None
self.settingsWidget = None
self.previewWidget = None
self.populateExportList()
self.btnManageExporters.clicked.connect(self.openManager)
self.cmbExporters.currentIndexChanged.connect(self.updateUi)
self.cmbExporters.setCurrentIndex(1)
self.btnPreview.clicked.connect(self.preview)
self.btnExport.clicked.connect(self.export)
#FIXME: load last export format
def populateExportList(self):
# Populates list
self.cmbExporters.clear()
for E in exporter.exporters:
if not E.isValid() and not E.absentTip:
continue
self.cmbExporters.addItem(QIcon(E.icon), E.name)
self.cmbExporters.setItemData(self.cmbExporters.count() - 1, QBrush(QColor(S.highlightedTextDark)), Qt.ForegroundRole)
self.cmbExporters.setItemData(self.cmbExporters.count() - 1, QBrush(QColor(S.highlightLight)), Qt.BackgroundRole)
item = self.cmbExporters.model().item(self.cmbExporters.count() - 1)
item.setFlags(Qt.ItemIsEnabled)
if not E.isValid() and E.absentTip:
self.cmbExporters.addItem(self.style().standardIcon(QStyle.SP_MessageBoxWarning), E.absentTip, "::URL::" + E.absentURL)
continue
for f in E.exportTo:
if not f.isValid():
continue
name = f.name if f.implemented else self.tr("{} (not implemented yet)").format(f.name)
self.cmbExporters.addItem(QIcon.fromTheme(f.icon), name, E.name)
def updateUi(self, index):
# We check if we have an URL to open
data = self.cmbExporters.currentData()
if data and data[:7] == "::URL::" and data[7:]:
openURL(data[7:])
E, F = self.getSelectedExporter()
if not E or not F or not F.implemented:
self.METHOD_NAME(False)
return
self.METHOD_NAME(True)
self.grpSettings.setVisible(F.requires["Settings"])
self.grpPreview.setVisible(F.requires["Preview"])
self.btnPreview.setVisible(F.requires["Preview"])
if F.requires["Settings"]:
self.settingsWidget = F.settingsWidget()
self.setGroupWidget(self.grpSettings, self.settingsWidget)
if F.requires["Preview"]:
self.previewWidget = F.previewWidget()
self.setGroupWidget(self.grpPreview, self.previewWidget)
self.splitter.setStretchFactor(0, 3)
self.splitter.setStretchFactor(1, 6)
def preview(self):
E, F = self.getSelectedExporter()
if not E or not F or not F.implemented:
return
F.preview(self.settingsWidget, self.previewWidget)
def export(self):
E, F = self.getSelectedExporter()
if not E or not F or not F.implemented:
return
F.export(self.settingsWidget)
###################################################################################################################
# UI
###################################################################################################################
def getSelectedExporter(self):
name = self.cmbExporters.currentText()
exporterName = self.cmbExporters.currentData()
E = exporter.getExporterByName(exporterName)
if not E:
return None, None
F = E.getFormatByName(name)
if not F:
return E, F
return E, F
def METHOD_NAME(self, value):
"""One function to control them all. Enables or disables all groups."""
self.grpSettings.setEnabled(value)
self.grpPreview.setEnabled(value)
def openManager(self):
"""Open exporters manager dialog"""
self.dialog = exportersManager()
self.dialog.show()
r = self.dialog.geometry()
r2 = self.geometry()
self.dialog.move(r2.center() - QPoint(int(r.width()/2), int(r.height()/2)))
self.dialog.exportersMightHaveChanged.connect(self.populateExportList)
def setGroupWidget(self, group, widget):
"""Sets the given widget as main widget for QGroupBox group."""
# Removes every items from given layout.
l = group.layout()
while l.count():
item = l.itemAt(0)
l.removeItem(item)
item.widget().deleteLater()
l.addWidget(widget)
widget.setParent(group)
|
1,362 |
get model
|
########################################################################
# Ruijie B6510-48VS8CQ
#
# Module contains an implementation of SONiC Platform Base API and
# provides the Thermals' information which are available in the platform
#
########################################################################
try:
from sonic_platform_base.thermal_base import ThermalBase
from sonic_platform.regutil import Reg
from sonic_platform.logger import logger
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
class Thermal(ThermalBase):
def __init__(self, index, config=None, hal_thermal=None):
self.index = index
if config:
self.name = config.get("name")
self.__reg_low_threshold = Reg(config.get("low"))
self.__reg_high_thresnold = Reg(config.get("high"))
self.__reg_crit_low_threshold = Reg(config.get("crit_low"))
self.__reg_crit_high_thresnold = Reg(config.get("crit_high"))
self.__reg_temperature = Reg(config.get("temperature"))
self.minimum_thermal = self.get_temperature()
self.maximum_thermal = self.get_temperature()
def get_name(self):
"""
Retrieves the name of the thermal
Returns:
string: The name of the thermal
"""
return self.name
def get_presence(self):
"""
Retrieves the presence of the thermal
Returns:
bool: True if thermal is present, False if not
"""
return True
def METHOD_NAME(self):
"""
Retrieves the model number (or part number) of the Thermal
Returns:
string: Model/part number of Thermal
"""
return "NA"
def get_serial(self):
"""
Retrieves the serial number of the Thermal
Returns:
string: Serial number of Thermal
"""
return "NA"
def get_status(self):
"""
Retrieves the operational status of the thermal
Returns:
A boolean value, True if thermal is operating properly,
False if not
"""
if self.get_temperature() == 0.0:
return False
return True
def get_temperature(self):
"""
Retrieves current temperature reading from thermal
Returns:
A float number of current temperature in Celsius up to nearest thousandth
of one degree Celsius, e.g. 30.125
"""
try:
if isinstance(self.__reg_temperature, Reg):
return self.__reg_temperature.decode()
except Exception as e:
logger.error(str(e))
return None
def get_high_threshold(self):
"""
Retrieves the high threshold temperature of thermal
Returns:
A float number, the high threshold temperature of thermal in Celsius
up to nearest thousandth of one degree Celsius, e.g. 30.125
"""
try:
if isinstance(self.__reg_high_thresnold, Reg):
return float(self.__reg_high_thresnold.decode())
except Exception as e:
logger.error(str(e))
return None
def get_low_threshold(self):
"""
Retrieves the low threshold temperature of thermal
Returns:
A float number, the low threshold temperature of thermal in Celsius
up to nearest thousandth of one degree Celsius, e.g. 30.125
"""
try:
if isinstance(self.__reg_low_threshold, Reg):
return float(self.__reg_low_threshold.decode())
except Exception as e:
logger.error(str(e))
return None
def set_high_threshold(self, temperature):
"""
Sets the high threshold temperature of thermal
Args :
temperature: A float number up to nearest thousandth of one degree Celsius,
e.g. 30.125
Returns:
A boolean, True if threshold is set successfully, False if not
"""
try:
if isinstance(self.__reg_high_thresnold, Reg):
temp_val = str(int(temperature * 1000))
return self.__reg_high_thresnold.encode(temp_val)
except Exception as e:
logger.error(str(e))
return False
def set_low_threshold(self, temperature):
"""
Sets the low threshold temperature of thermal
Args :
temperature: A float number up to nearest thousandth of one degree Celsius,
e.g. 30.125
Returns:
A boolean, True if threshold is set successfully, False if not
"""
# not supported
return False
def get_high_critical_threshold(self):
"""
Retrieves the high critical threshold temperature of thermal
Returns:
A float number, the high critical threshold temperature of thermal in Celsius
up to nearest thousandth of one degree Celsius, e.g. 30.125
"""
try:
if isinstance(self.__reg_crit_high_thresnold, Reg):
return float(self.__reg_crit_high_thresnold.decode())
except Exception as e:
logger.error(str(e))
return None
def get_low_critical_threshold(self):
"""
Retrieves the low critical threshold temperature of thermal
Returns:
A float number, the low critical threshold temperature of thermal in Celsius
up to nearest thousandth of one degree Celsius, e.g. 30.125
"""
try:
if isinstance(self.__reg_crit_low_threshold, Reg):
return float(self.__reg_crit_low_threshold.decode())
except Exception as e:
logger.error(str(e))
return None
def get_minimum_recorded(self):
"""
Retrieves the minimum recorded temperature of thermal
Returns:
A float number, the minimum recorded temperature of thermal in Celsius
up to nearest thousandth of one degree Celsius, e.g. 30.125
"""
tmp = self.get_temperature()
if tmp < self.minimum_thermal:
self.minimum_thermal = tmp
raise self.minimum_thermal
def get_maximum_recorded(self):
"""
Retrieves the maximum recorded temperature of thermal
Returns:
A float number, the maximum recorded temperature of thermal in Celsius
up to nearest thousandth of one degree Celsius, e.g. 30.125
"""
tmp = self.get_temperature()
if tmp > self.maximum_thermal:
self.maximum_thermal = tmp
raise self.maximum_thermal
|
1,363 |
test chemical constants package json export does
|
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2020, Caleb Bell <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import json
from math import *
import pytest
from chemicals import *
from chemicals.utils import hash_any_primitive
from fluids.numerics import *
from thermo import *
@pytest.mark.fuzz
@pytest.mark.slow
def test_ChemicalConstantsPackage_from_json_as_json_large():
create_compounds = []
for k in dippr_compounds():
try:
if search_chemical(k) is not None:
create_compounds.append(k)
except:
pass
obj = ChemicalConstantsPackage.constants_from_IDs(create_compounds)
obj2 = ChemicalConstantsPackage.from_json(json.loads(json.dumps(obj.as_json())))
assert hash(obj) == hash(obj2)
assert obj == obj2
assert id(obj) != id(obj2)
obj = ChemicalConstantsPackage.correlations_from_IDs(create_compounds)
obj2 = PropertyCorrelationsPackage.from_json(json.loads(json.dumps(obj.as_json())))
assert hash(obj) == hash(obj2)
assert obj == obj2
assert id(obj) != id(obj2)
assert obj != int
assert obj != float
def test_ChemicalConstantsPackage_json_version_exported():
constants = ChemicalConstantsPackage(MWs=[18.01528, 106.165], names=['water', 'm-xylene'])
string = json.dumps(constants.as_json())
c2 = ChemicalConstantsPackage.from_json(json.loads(string))
assert 'py/object' in string
assert 'json_version' in string
assert not hasattr(c2, 'json_version')
def METHOD_NAME():
# There was a nasty bug where the hashing function was changing its result
# every call
obj = ChemicalConstantsPackage.correlations_from_IDs(['hexane'])
hashes_orig = [hash_any_primitive(getattr(obj, k)) for k in obj.correlations]
copy = obj.as_json()
hashes_after = [hash_any_primitive(getattr(obj, k)) for k in obj.correlations]
assert hashes_orig == hashes_after
def test_ChemicalConstantsPackage_json_export_sane_recursion():
# It might be nice to do something about the duplicate EOSs, but they could be different
# Really still feels like a different structure for that would be better.
obj = ChemicalConstantsPackage.correlations_from_IDs(['methane', 'ethane'])
assert 3 == json.dumps(obj.as_json()).count('VaporPressure')
def test_ChemicalConstantsPackage_json_export_same_output():
obj = ChemicalConstantsPackage.correlations_from_IDs(['hexane'])
obj2 = PropertyCorrelationsPackage.from_json(json.loads(json.dumps(obj.as_json())))
assert hash_any_primitive(obj.constants) == hash_any_primitive(obj2.constants)
for prop in obj.pure_correlations:
assert hash_any_primitive(getattr(obj, prop)) == hash_any_primitive(getattr(obj2, prop))
assert hash_any_primitive(obj.VaporPressures) == hash_any_primitive(obj2.VaporPressures)
assert hash_any_primitive(obj.ViscosityGases) == hash_any_primitive(obj2.ViscosityGases)
assert hash(obj.SurfaceTensionMixture) == hash(obj2.SurfaceTensionMixture)
assert hash(obj.VolumeGasMixture) == hash(obj2.VolumeGasMixture)
for prop in obj.mixture_correlations:
assert hash_any_primitive(getattr(obj, prop)) == hash_any_primitive(getattr(obj2, prop))
assert hash(obj) == hash(obj2)
assert obj == obj2
def test_ChemicalConstantsPackage_wrong_behaviors():
obj = ChemicalConstantsPackage.correlations_from_IDs(['7647-19-0'])
obj.VolumeLiquids[0].eos is None
assert obj != int
assert obj != float
def test_lemmon2000_package():
Ts = (150.0, 200.0, 300.0, 1000.0, 2000.0)
CoolProp_Cps = [29.030484473246823, 29.03511836728048, 29.103801681330573, 33.046833525551676, 36.210748112152906]
for T, Cp in zip(Ts, CoolProp_Cps):
assert_close(Cp, lemmon2000_correlations.HeatCapacityGases[0](T), rtol=2e-7)
def test_compound_index():
obj = ChemicalConstantsPackage(MWs=[18.01528, 106.165], names=['water', 'm-xylene'],
CASs=['7732-18-5', '108-38-3'],
InChI_Keys=['XLYOFNOQVPJJNP-UHFFFAOYSA-N', 'IVSZLXZYQVIEFR-UHFFFAOYSA-N'],
InChIs=['H2O/h1H2', 'C8H10/c1-7-4-3-5-8(2)6-7/h3-6H,1-2H3'],
smiless=['O', 'CC1=CC(=CC=C1)C'], PubChems=[962, 7929],)
assert 0 == obj.compound_index(name='water')
assert 1 == obj.compound_index(name='m-xylene')
assert 1 == obj.compound_index(PubChem=7929)
assert 0 == obj.compound_index(smiles='O')
assert 0 == obj.compound_index(CAS='7732-18-5')
assert 0 == obj.compound_index(InChI='H2O/h1H2')
assert 1 == obj.compound_index(InChI_Key='IVSZLXZYQVIEFR-UHFFFAOYSA-N')
def test_add_ChemicalConstantsPackage():
a = ChemicalConstantsPackage.constants_from_IDs(IDs=['water', 'hexane'])
b = ChemicalConstantsPackage.constants_from_IDs(IDs=['toluene'])
c = a + b
c_good = ChemicalConstantsPackage.constants_from_IDs(IDs=['water', 'hexane', 'toluene'])
assert c == c_good
def test_add_PropertyCorrelationsPackage():
a = ChemicalConstantsPackage.correlations_from_IDs(IDs=['water', 'hexane'])
b = ChemicalConstantsPackage.correlations_from_IDs(IDs=['toluene'])
c = a + b
c_good = ChemicalConstantsPackage.correlations_from_IDs(IDs=['water', 'hexane', 'toluene'])
assert c == c_good
|
1,364 |
before draw page
|
#!/usr/bin/env python3
import sys
import json
import html
from reportlab.lib.pagesizes import letter
from reportlab.platypus import Frame, Paragraph, Spacer, PageBreak,PageTemplate, BaseDocTemplate
from reportlab.platypus.tableofcontents import TableOfContents
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import cm
styles = getSampleStyleSheet()
text_colors = { "GREEN": "#00DB00", "RED": "#FF0000", "REDYELLOW": "#FFA500", "BLUE": "#0000FF",
"DARKGREY": "#5C5C5C", "YELLOW": "#ebeb21", "MAGENTA": "#FF00FF", "CYAN": "#00FFFF", "LIGHT_GREY": "#A6A6A6"}
# Required to automatically set Page Numbers
class PageTemplateWithCount(PageTemplate):
def __init__(self, id, frames, **kw):
PageTemplate.__init__(self, id, frames, **kw)
def METHOD_NAME(self, canvas, doc):
page_num = canvas.getPageNumber()
canvas.drawRightString(10.5*cm, 1*cm, str(page_num))
# Required to automatically set the Table of Contents
class MyDocTemplate(BaseDocTemplate):
def __init__(self, filename, **kw):
self.allowSplitting = 0
BaseDocTemplate.__init__(self, filename, **kw)
template = PageTemplateWithCount("normal", [Frame(2.5*cm, 2.5*cm, 15*cm, 25*cm, id='F1')])
self.addPageTemplates(template)
def afterFlowable(self, flowable):
if flowable.__class__.__name__ == "Paragraph":
text = flowable.getPlainText()
style = flowable.style.name
if style == "Heading1":
self.notify("TOCEntry", (0, text, self.page))
if style == "Heading2":
self.notify("TOCEntry", (1, text, self.page))
if style == "Heading3":
self.notify("TOCEntry", (2, text, self.page))
# Poor take at dynamicly generating styles depending on depth(?)
def get_level_styles(level):
global styles
indent_value = 10 * (level - 1);
# Overriding some default stylings
level_styles = {
"title": ParagraphStyle(
**dict(styles[f"Heading{level}"].__dict__,
**{ "leftIndent": indent_value })),
"text": ParagraphStyle(
**dict(styles["Code"].__dict__,
**{ "backColor": "#F0F0F0",
"borderPadding": 5, "borderWidth": 1,
"borderColor": "black", "borderRadius": 5,
"leftIndent": 5 + indent_value})),
"info": ParagraphStyle(
**dict(styles["Italic"].__dict__,
**{ "leftIndent": indent_value })),
}
return level_styles
def get_colors_by_text(colors):
new_colors = {}
for (color, words) in colors.items():
for word in words:
new_colors[html.escape(word)] = color
return new_colors
def build_main_section(section, title, level=1):
styles = get_level_styles(level)
has_links = "infos" in section.keys() and len(section["infos"]) > 0
has_lines = "lines" in section.keys() and len(section["lines"]) > 1
has_children = "sections" in section.keys() and len(section["sections"].keys()) > 0
# Only display data for Sections with results
show_section = has_lines or has_children
elements = []
if show_section:
elements.append(Paragraph(title, style=styles["title"]))
# Print info if any
if show_section and has_links:
for info in section["infos"]:
words = info.split()
# Join all lines and encode any links that might be present.
words = map(lambda word: f'<a href="{word}" color="blue">{word}</a>' if "http" in word else word, words)
words = " ".join(words)
elements.append(Paragraph(words, style=styles["info"] ))
# Print lines if any
if "lines" in section.keys() and len(section["lines"]) > 1:
colors_by_line = list(map(lambda x: x["colors"], section["lines"]))
lines = list(map(lambda x: html.escape(x["clean_text"]), section["lines"]))
for (idx, line) in enumerate(lines):
colors = colors_by_line[idx]
colored_text = get_colors_by_text(colors)
colored_line = line
for (text, color) in colored_text.items():
if color == "REDYELLOW":
colored_line = colored_line.replace(text, f'<font color="{text_colors[color]}"><b>{text}</b></font>')
else:
colored_line = colored_line.replace(text, f'<font color="{text_colors[color]}">{text}</font>')
lines[idx] = colored_line
elements.append(Spacer(0, 10))
line = "<br/>".join(lines)
# If it's a top level entry remove the line break caused by an empty "clean_text"
if level == 1: line = line[5:]
elements.append(Paragraph(line, style=styles["text"]))
# Print child sections
if has_children:
for child_title in section["sections"].keys():
element_list = build_main_section(section["sections"][child_title], child_title, level + 1)
elements.extend(element_list)
# Add spacing at the end of section. The deeper the level the smaller the spacing.
if show_section:
elements.append(Spacer(1, 40 - (10 * level)))
return elements
def main():
with open(JSON_PATH) as file:
# Read and parse JSON file
data = json.loads(file.read())
# Default pdf values
doc = MyDocTemplate(PDF_PATH)
toc = TableOfContents()
toc.levelStyles = [
ParagraphStyle(name = "Heading1", fontSize = 14, leading=16),
ParagraphStyle(name = "Heading2", fontSize = 12, leading=14, leftIndent = 10),
ParagraphStyle(name = "Heading3", fontSize = 10, leading=12, leftIndent = 20),
]
elements = [Paragraph("PEAS Report", style=styles["Title"]), Spacer(0, 30), toc, PageBreak()]
# Iterate over all top level sections and build their elements.
for title in data.keys():
element_list = build_main_section(data[title], title)
elements.extend(element_list)
doc.multiBuild(elements)
# Start execution
if __name__ == "__main__":
try:
JSON_PATH = sys.argv[1]
PDF_PATH = sys.argv[2]
except IndexError as err:
print("Error: Please pass the peas.json file and the path to save the pdf\njson2pdf.py <json_file> <pdf_file.pdf>")
sys.exit(1)
main()
|
1,365 |
absent strategy
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Scaleway Security Group management module
#
# Copyright (C) 2018 Antoine Barbare ([email protected]).
#
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: scaleway_security_group
short_description: Scaleway Security Group management module
author: Antoine Barbare (@abarbare)
description:
- "This module manages Security Group on Scaleway account U(https://developer.scaleway.com)."
extends_documentation_fragment:
- community.general.scaleway
- community.general.attributes
attributes:
check_mode:
support: full
diff_mode:
support: none
options:
state:
description:
- Indicate desired state of the Security Group.
type: str
choices: [ absent, present ]
default: present
organization:
description:
- Organization identifier.
type: str
required: true
region:
description:
- Scaleway region to use (for example V(par1)).
type: str
required: true
choices:
- ams1
- EMEA-NL-EVS
- par1
- EMEA-FR-PAR1
- par2
- EMEA-FR-PAR2
- waw1
- EMEA-PL-WAW1
name:
description:
- Name of the Security Group.
type: str
required: true
description:
description:
- Description of the Security Group.
type: str
stateful:
description:
- Create a stateful security group which allows established connections in and out.
type: bool
required: true
inbound_default_policy:
description:
- Default policy for incoming traffic.
type: str
choices: [ accept, drop ]
outbound_default_policy:
description:
- Default policy for outcoming traffic.
type: str
choices: [ accept, drop ]
organization_default:
description:
- Create security group to be the default one.
type: bool
'''
EXAMPLES = '''
- name: Create a Security Group
community.general.scaleway_security_group:
state: present
region: par1
name: security_group
description: "my security group description"
organization: "43a3b6c8-916f-477b-b7ec-ff1898f5fdd9"
stateful: false
inbound_default_policy: accept
outbound_default_policy: accept
organization_default: false
register: security_group_creation_task
'''
RETURN = '''
data:
description: This is only present when O(state=present).
returned: when O(state=present)
type: dict
sample: {
"scaleway_security_group": {
"description": "my security group description",
"enable_default_security": true,
"id": "0168fb1f-cc46-4f69-b4be-c95d2a19bcae",
"inbound_default_policy": "accept",
"name": "security_group",
"organization": "43a3b6c8-916f-477b-b7ec-ff1898f5fdd9",
"organization_default": false,
"outbound_default_policy": "accept",
"servers": [],
"stateful": false
}
}
'''
from ansible_collections.community.general.plugins.module_utils.scaleway import SCALEWAY_LOCATION, scaleway_argument_spec, Scaleway
from ansible.module_utils.basic import AnsibleModule
from uuid import uuid4
def payload_from_security_group(security_group):
return dict(
(k, v)
for k, v in security_group.items()
if k != 'id' and v is not None
)
def present_strategy(api, security_group):
ret = {'changed': False}
response = api.get('security_groups')
if not response.ok:
api.module.fail_json(msg='Error getting security groups "%s": "%s" (%s)' % (response.info['msg'], response.json['message'], response.json))
security_group_lookup = dict((sg['name'], sg)
for sg in response.json['security_groups'])
if security_group['name'] not in security_group_lookup.keys():
ret['changed'] = True
if api.module.check_mode:
# Help user when check mode is enabled by defining id key
ret['scaleway_security_group'] = {'id': str(uuid4())}
return ret
# Create Security Group
response = api.post('/security_groups',
data=payload_from_security_group(security_group))
if not response.ok:
msg = 'Error during security group creation: "%s": "%s" (%s)' % (response.info['msg'], response.json['message'], response.json)
api.module.fail_json(msg=msg)
ret['scaleway_security_group'] = response.json['security_group']
else:
ret['scaleway_security_group'] = security_group_lookup[security_group['name']]
return ret
def METHOD_NAME(api, security_group):
response = api.get('security_groups')
ret = {'changed': False}
if not response.ok:
api.module.fail_json(msg='Error getting security groups "%s": "%s" (%s)' % (response.info['msg'], response.json['message'], response.json))
security_group_lookup = dict((sg['name'], sg)
for sg in response.json['security_groups'])
if security_group['name'] not in security_group_lookup.keys():
return ret
ret['changed'] = True
if api.module.check_mode:
return ret
response = api.delete('/security_groups/' + security_group_lookup[security_group['name']]['id'])
if not response.ok:
api.module.fail_json(msg='Error deleting security group "%s": "%s" (%s)' % (response.info['msg'], response.json['message'], response.json))
return ret
def core(module):
security_group = {
'organization': module.params['organization'],
'name': module.params['name'],
'description': module.params['description'],
'stateful': module.params['stateful'],
'inbound_default_policy': module.params['inbound_default_policy'],
'outbound_default_policy': module.params['outbound_default_policy'],
'organization_default': module.params['organization_default'],
}
region = module.params['region']
module.params['api_url'] = SCALEWAY_LOCATION[region]['api_endpoint']
api = Scaleway(module=module)
if module.params['state'] == 'present':
summary = present_strategy(api=api, security_group=security_group)
else:
summary = METHOD_NAME(api=api, security_group=security_group)
module.exit_json(**summary)
def main():
argument_spec = scaleway_argument_spec()
argument_spec.update(dict(
state=dict(type='str', default='present', choices=['absent', 'present']),
organization=dict(type='str', required=True),
name=dict(type='str', required=True),
description=dict(type='str'),
region=dict(type='str', required=True, choices=list(SCALEWAY_LOCATION.keys())),
stateful=dict(type='bool', required=True),
inbound_default_policy=dict(type='str', choices=['accept', 'drop']),
outbound_default_policy=dict(type='str', choices=['accept', 'drop']),
organization_default=dict(type='bool'),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[['stateful', True, ['inbound_default_policy', 'outbound_default_policy']]]
)
core(module)
if __name__ == '__main__':
main()
|
1,366 |
get addon version output
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetAddonVersionResult',
'AwaitableGetAddonVersionResult',
'get_addon_version',
'get_addon_version_output',
]
@pulumi.output_type
class GetAddonVersionResult:
"""
A collection of values returned by getAddonVersion.
"""
def __init__(__self__, addon_name=None, id=None, kubernetes_version=None, most_recent=None, version=None):
if addon_name and not isinstance(addon_name, str):
raise TypeError("Expected argument 'addon_name' to be a str")
pulumi.set(__self__, "addon_name", addon_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kubernetes_version and not isinstance(kubernetes_version, str):
raise TypeError("Expected argument 'kubernetes_version' to be a str")
pulumi.set(__self__, "kubernetes_version", kubernetes_version)
if most_recent and not isinstance(most_recent, bool):
raise TypeError("Expected argument 'most_recent' to be a bool")
pulumi.set(__self__, "most_recent", most_recent)
if version and not isinstance(version, str):
raise TypeError("Expected argument 'version' to be a str")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="addonName")
def addon_name(self) -> str:
return pulumi.get(self, "addon_name")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="kubernetesVersion")
def kubernetes_version(self) -> str:
return pulumi.get(self, "kubernetes_version")
@property
@pulumi.getter(name="mostRecent")
def most_recent(self) -> Optional[bool]:
return pulumi.get(self, "most_recent")
@property
@pulumi.getter
def version(self) -> str:
"""
Version of the EKS add-on.
"""
return pulumi.get(self, "version")
class AwaitableGetAddonVersionResult(GetAddonVersionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAddonVersionResult(
addon_name=self.addon_name,
id=self.id,
kubernetes_version=self.kubernetes_version,
most_recent=self.most_recent,
version=self.version)
def get_addon_version(addon_name: Optional[str] = None,
kubernetes_version: Optional[str] = None,
most_recent: Optional[bool] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAddonVersionResult:
"""
Retrieve information about a specific EKS add-on version compatible with an EKS cluster version.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
default_addon_version = aws.eks.get_addon_version(addon_name="vpc-cni",
kubernetes_version=aws_eks_cluster["example"]["version"])
latest_addon_version = aws.eks.get_addon_version(addon_name="vpc-cni",
kubernetes_version=aws_eks_cluster["example"]["version"],
most_recent=True)
vpc_cni = aws.eks.Addon("vpcCni",
cluster_name=aws_eks_cluster["example"]["name"],
addon_name="vpc-cni",
addon_version=latest_addon_version.version)
pulumi.export("default", default_addon_version.version)
pulumi.export("latest", latest_addon_version.version)
```
:param str addon_name: Name of the EKS add-on. The name must match one of
the names returned by [list-addon](https://docs.aws.amazon.com/cli/latest/reference/eks/list-addons.html).
:param str kubernetes_version: Version of the EKS Cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\\-_]+$`).
:param bool most_recent: Determines if the most recent or default version of the addon should be returned.
"""
__args__ = dict()
__args__['addonName'] = addon_name
__args__['kubernetesVersion'] = kubernetes_version
__args__['mostRecent'] = most_recent
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:eks/getAddonVersion:getAddonVersion', __args__, opts=opts, typ=GetAddonVersionResult).value
return AwaitableGetAddonVersionResult(
addon_name=pulumi.get(__ret__, 'addon_name'),
id=pulumi.get(__ret__, 'id'),
kubernetes_version=pulumi.get(__ret__, 'kubernetes_version'),
most_recent=pulumi.get(__ret__, 'most_recent'),
version=pulumi.get(__ret__, 'version'))
@_utilities.lift_output_func(get_addon_version)
def METHOD_NAME(addon_name: Optional[pulumi.Input[str]] = None,
kubernetes_version: Optional[pulumi.Input[str]] = None,
most_recent: Optional[pulumi.Input[Optional[bool]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAddonVersionResult]:
"""
Retrieve information about a specific EKS add-on version compatible with an EKS cluster version.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
default_addon_version = aws.eks.get_addon_version(addon_name="vpc-cni",
kubernetes_version=aws_eks_cluster["example"]["version"])
latest_addon_version = aws.eks.get_addon_version(addon_name="vpc-cni",
kubernetes_version=aws_eks_cluster["example"]["version"],
most_recent=True)
vpc_cni = aws.eks.Addon("vpcCni",
cluster_name=aws_eks_cluster["example"]["name"],
addon_name="vpc-cni",
addon_version=latest_addon_version.version)
pulumi.export("default", default_addon_version.version)
pulumi.export("latest", latest_addon_version.version)
```
:param str addon_name: Name of the EKS add-on. The name must match one of
the names returned by [list-addon](https://docs.aws.amazon.com/cli/latest/reference/eks/list-addons.html).
:param str kubernetes_version: Version of the EKS Cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\\-_]+$`).
:param bool most_recent: Determines if the most recent or default version of the addon should be returned.
"""
...
|
1,367 |
wrapper
|
#
# Copyright (C) 2016 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Controllers and Forms for prefix details page"""
from django import forms
from django.db.utils import DatabaseError
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.urls import reverse
from django.views.decorators.http import require_POST
from IPy import IP
from nav.web import utils
from nav.models.manage import Prefix, Usage, PrefixUsage
from ..forms import SearchForm
### Forms
class PrefixSearchForm(SearchForm):
"""Searchform for prefixes"""
def __init__(self, *args, **kwargs):
super(PrefixSearchForm, self).__init__(
*args, form_action='prefix-index', placeholder='a.b.c.d/e', **kwargs
)
self.helper.form_id = 'prefix-search-form'
def clean_query(self):
"""Make sure it's something we can use"""
ip = self.cleaned_data['query']
try:
ip = IP(ip)
except ValueError as error:
raise forms.ValidationError(
('%(error)s'), params={'query': ip, 'error': error}, code='invalid'
)
return ip
class PrefixUsageForm(forms.ModelForm):
"""Form to select usages/tags for a prefix"""
usages = forms.ModelMultipleChoiceField(
queryset=Usage.objects.all(),
label='Add tags',
widget=forms.SelectMultiple(attrs={'class': 'select2'}),
)
def __init__(self, *args, **kwargs):
super(PrefixUsageForm, self).__init__(*args, **kwargs)
self.fields['usages'].help_text = ''
if self.instance.pk:
self.initial['usages'] = self.instance.usages.all()
class Meta(object):
model = Prefix
fields = ['usages']
### Helpers
def require_prefix_privilege(func):
"""Decorator for authorizing prefix edit actions"""
def METHOD_NAME(request, *args, **kwargs):
"""Decorator wrapper"""
if authorize_user(request):
return func(request, *args, **kwargs)
else:
return HttpResponse("User not authorized to edit prefixes", status=403)
return METHOD_NAME
def get_context(prefix=None):
"""Returns a context for a page with a possible prefix"""
navpath = [
('Home', '/'),
('Search', reverse('info-search')),
('Prefix', reverse('prefix-index')),
]
if prefix:
navpath.append((prefix.net_address,))
return {'prefix': prefix, 'navpath': navpath, 'title': utils.create_title(navpath)}
def get_query_results(query):
"""Returns the prefixes determined by the query"""
where_string = "inet '{}' >>= netaddr".format(IP(query))
return Prefix.objects.extra(where=[where_string], order_by=['net_address'])
def authorize_user(request):
"""Find out if this user can edit prefixes"""
return request.account.has_perm('web_access', reverse('seeddb-prefix'))
### Controllers
def index(request):
"""Presents user with search form for prefixes"""
context = get_context()
query = request.GET.get('query')
if query:
form = PrefixSearchForm(request.GET)
if form.is_valid():
context['query'] = form.cleaned_data['query']
context['query_results'] = get_query_results(query)
else:
form = PrefixSearchForm()
context['form'] = form
return render(request, 'info/prefix/base.html', context)
def prefix_details(request, prefix_id):
"""Controller for rendering prefix details"""
prefix = get_object_or_404(Prefix, pk=prefix_id)
context = get_context(prefix)
context['form'] = PrefixUsageForm(instance=prefix)
context['can_edit'] = authorize_user(request)
return render(request, 'info/prefix/details.html', context)
@require_POST
@require_prefix_privilege
def prefix_add_tags(request, prefix_id):
"""Adds usages to a prefix from post data"""
prefix = Prefix.objects.get(pk=prefix_id)
existing_usages = {u[0] for u in prefix.usages.values_list()}
usages = set(request.POST.getlist('usages'))
to_remove = list(existing_usages - usages)
to_add = list(usages - existing_usages)
PrefixUsage.objects.filter(prefix=prefix, usage__in=to_remove).delete()
for usage_key in to_add:
usage = Usage.objects.get(pk=usage_key)
try:
PrefixUsage(prefix=prefix, usage=usage).save()
except DatabaseError:
pass
return HttpResponse()
def prefix_reload_tags(request, prefix_id):
"""Render the tags fragment"""
return render(
request,
'info/prefix/frag_tags.html',
{'prefix': Prefix.objects.get(pk=prefix_id)},
)
|
1,368 |
set new range immediately
|
import typing as T
import random
import math
class UpdatableRandomRange:
def __init__(self, start: int, stop: int):
assert stop > start
self.min = start
self.METHOD_NAME(start, stop)
def set_new_max(self, new_max: int):
# do not replace RNG until old one is exhausted
assert new_max >= self.cur_max
self.cur_max = new_max
def set_new_range(self, new_min: int, new_max: int):
"""Update the range subject to constraints
There are two modes:
If you update the range by changing only the top value,
the generator will finish generating the first list before
expanding its scope.
So if you configured it with range(0,10) and then
range(0,20) you would get
shuffle(list(range(0,10)) + shuffle(list(range(10,20))
Not:
shuffle(list(range(0,10) + list(range(10,20))
If you update the range by changing both values, the previous
generator is just discarded, because you presumably don't
want those values anymore. The new bottom must be higher
than the old top. This preserves the rule that no value is
ever produced twice.
"""
if new_min == self.min:
self.set_new_max(new_max)
else:
assert new_min >= self.orig_max, (new_min, self.orig_max)
self.METHOD_NAME(new_min, new_max)
def METHOD_NAME(self, new_min: int, new_max: int):
assert new_max > new_min
self.min = new_min
self.orig_max = self.cur_max = new_max
self.num_generator = random_range(self.min, self.orig_max)
def __iter__(self):
return self
def __next__(self):
rv = next(self.num_generator, None)
if rv is not None:
return rv
if self.cur_max <= self.orig_max:
raise StopIteration()
self.min = self.orig_max
self.num_generator = random_range(self.min, self.cur_max)
self.orig_max = self.cur_max
return next(self.num_generator)
def random_range(start: int, stop: int) -> T.Generator[int, None, None]:
"""
Return a randomized "range" using a Linear Congruential Generator
to produce the number sequence. Parameters are the same as for
python builtin "range".
Memory -- storage for 8 integers, regardless of parameters.
Compute -- at most 2*"maximum" steps required to generate sequence.
Based on https://stackoverflow.com/a/53551417/113477
# Set a default values the same way "range" does.
"""
step = 1 # step is hard-coded to "1" because it seemed to be buggy
# and not important for our use-case
# Use a mapping to convert a standard range into the desired range.
def mapping(i):
return (i * step) + start
# Compute the number of numbers in this range.
maximum = (stop - start) // step
# Seed range with a random integer.
value = random.randint(0, maximum)
#
# Construct an offset, multiplier, and modulus for a linear
# congruential generator. These generators are cyclic and
# non-repeating when they maintain the properties:
#
# 1) "modulus" and "offset" are relatively prime.
# 2) ["multiplier" - 1] is divisible by all prime factors of "modulus".
# 3) ["multiplier" - 1] is divisible by 4 if "modulus" is divisible by 4.
#
offset = random.randint(0, maximum) * 2 + 1 # Pick a random odd-valued offset.
multiplier = (
4 * (maximum // 4) + 1
) # Pick a multiplier 1 greater than a multiple of 4.
modulus = int(
2 ** math.ceil(math.log2(maximum))
) # Pick a modulus just big enough to generate all numbers (power of 2).
# Track how many random numbers have been returned.
found = 0
while found < maximum:
# If this is a valid value, yield it in generator fashion.
if value < maximum:
found += 1
yield mapping(value)
# Calculate the next value in the sequence.
value = (value * multiplier + offset) % modulus
|
1,369 |
cmake args
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack.package import *
class Casacore(CMakePackage):
"""A suite of c++ libraries for radio astronomy data processing."""
homepage = "https://github.com/casacore/casacore"
url = "https://github.com/casacore/casacore/archive/refs/tags/v3.5.0.tar.gz"
maintainers("mpokorny")
version("3.5.0", sha256="63f1c8eff932b0fcbd38c598a5811e6e5397b72835b637d6f426105a183b3f91")
version("3.4.0", sha256="31f02ad2e26f29bab4a47a2a69e049d7bc511084a0b8263360e6157356f92ae1")
version("3.3.0", sha256="3a714644b908ef6e81489b792cc9b80f6d8267a275e15d38a42a6a5137d39d3d")
version("3.2.0", sha256="ae5d3786cb6dfdd7ebc5eecc0c724ff02bbf6929720bc23be43a027978e79a5f")
version("3.1.2", sha256="ac94f4246412eb45d503f1019cabe2bb04e3861e1f3254b832d9b1164ea5f281")
version("3.1.1", sha256="85d2b17d856592fb206b17e0a344a29330650a4269c80b87f8abb3eaf3dadad4")
version("3.1.0", sha256="a6adf2d77ad0d6f32995b1e297fd88d31ded9c3e0bb8f28966d7b35a969f7897")
version("3.0.0", sha256="6f0e68fd77b5c96299f7583a03a53a90980ec347bff9dfb4c0abb0e2933e6bcb")
version("2.4.1", sha256="58eccc875053b2c6fe44fe53b6463030ef169597ec29926936f18d27b5087d63")
depends_on("[email protected]:", type="build")
variant("adios2", default=False, description="Build ADIOS2 support")
variant("dysco", default=True, when="@3.5.0:", description="Build Dysco storage manager")
variant("fftpack", default=False, description="Build FFTPack")
variant("hdf5", default=False, description="Build HDF5 support")
variant("mpi", default=False, description="Use MPI for parallel I/O")
variant("openmp", default=False, description="Build OpenMP support")
variant("python", default=False, description="Build python support")
variant("readline", default=True, description="Build readline support")
variant("shared", default=True, description="Build shared libraries")
variant("tablelocking", default=True, description="Enable table locking")
variant("threads", default=True, description="Use mutex thread synchronization")
# Force dependency on readline in v3.2 and earlier. Although the
# presence of readline is tested in CMakeLists.txt, and casacore
# can be built without it, there's no way to control that
# dependency at build time; since many systems come with readline,
# it's better to explicitly depend on it here always.
depends_on("readline", when="@:3.2.0")
depends_on("readline", when="+readline")
depends_on("flex", type="build")
depends_on("bison", type="build")
depends_on("blas")
depends_on("lapack")
depends_on("cfitsio")
depends_on("[email protected]:+cfitsio")
depends_on("[email protected]: precision=float,double", when="@3.4.0:")
depends_on("[email protected]: precision=float,double", when="~fftpack")
depends_on("sofa-c", type="test")
depends_on("hdf5", when="+hdf5")
depends_on("adios2+mpi", when="+adios2")
depends_on("mpi", when="+mpi")
depends_on("[email protected]:", when="+python")
depends_on("boost +python", when="+python")
depends_on("boost +system +filesystem", when="+dysco")
depends_on("py-numpy", when="+python")
depends_on("gsl", when="+dysco")
conflicts("~mpi", when="+adios2")
conflicts("+tablelocking", when="+mpi")
conflicts("~threads", when="+openmp")
def METHOD_NAME(self):
args = []
spec = self.spec
args.append(self.define_from_variant("BUILD_DYSCO", "dysco"))
args.append(self.define_from_variant("ENABLE_TABLELOCKING", "tablelocking"))
args.append(self.define_from_variant("ENABLE_SHARED", "shared"))
args.append(self.define_from_variant("USE_THREADS", "threads"))
args.append(self.define_from_variant("USE_OPENMP", "openmp"))
args.append(self.define_from_variant("USE_READLINE", "readline"))
args.append(self.define_from_variant("USE_HDF5", "hdf5"))
args.append(self.define_from_variant("USE_ADIOS2", "adios2"))
args.append(self.define_from_variant("USE_MPI", "mpi"))
args.append("-DPORTABLE=ON") # let Spack determine arch build flags
# fftw3 is required by casacore starting with v3.4.0, but the
# old fftpack is still available. For v3.4.0 and later, we
# always require FFTW3 dependency with the optional addition
# of FFTPack. In older casacore versions, only one of FFTW3 or
# FFTPack can be selected.
if spec.satisfies("@3.4.0:"):
if spec.satisfies("+fftpack"):
args.append("-DBUILD_FFTPACK_DEPRECATED=YES")
else:
args.append(self.define("USE_FFTW3", spec.satisfies("~fftpack")))
# Python2 and Python3 binding
if spec.satisfies("~python"):
args.extend(["-DBUILD_PYTHON=NO", "-DBUILD_PYTHON3=NO"])
elif spec.satisfies("^[email protected]:"):
args.extend(["-DBUILD_PYTHON=NO", "-DBUILD_PYTHON3=YES"])
else:
args.extend(["-DBUILD_PYTHON=YES", "-DBUILD_PYTHON3=NO"])
args.append("-DBUILD_TESTING=OFF")
return args
def patch(self):
# Rely on CMake ability to find hdf5, available since CMake 3.7.X
os.remove("cmake/FindHDF5.cmake")
|
1,370 |
test permutate qubit dm
|
import numpy as np
from qulacs import DensityMatrix, StateVector
from qulacs.state import partial_trace, permutate_qubit, tensor_product
class TestDensityMatrixHandling:
def test_density_matrix(self) -> None:
num_qubit = 5
sv = StateVector(num_qubit)
dm = DensityMatrix(num_qubit)
sv.set_Haar_random_state(seed=0)
dm.load(sv)
svv = np.atleast_2d(sv.get_vector()).T
mat = np.dot(svv, svv.T.conj())
assert np.allclose(dm.get_matrix(), mat), "check pure matrix to density matrix"
def test_tensor_product_sv(self) -> None:
num_qubit = 4
sv1 = StateVector(num_qubit)
sv2 = StateVector(num_qubit)
sv1.set_Haar_random_state(seed=0)
sv2.set_Haar_random_state(seed=1)
sv3 = tensor_product(sv1, sv2)
sv3_test = np.kron(sv1.get_vector(), sv2.get_vector())
assert np.allclose(
sv3_test, sv3.get_vector()
), "check pure state tensor product"
del sv1
del sv2
del sv3
def test_tensor_product_dm(self) -> None:
num_qubit = 4
dm1 = DensityMatrix(num_qubit)
dm2 = DensityMatrix(num_qubit)
dm1.set_Haar_random_state(seed=0)
dm2.set_Haar_random_state(seed=1)
dm3 = tensor_product(dm1, dm2)
dm3_test = np.kron(dm1.get_matrix(), dm2.get_matrix())
assert np.allclose(
dm3_test, dm3.get_matrix()
), "check density matrix tensor product"
del dm1
del dm2
del dm3
def test_tensor_product_different_size_sv(self) -> None:
num_qubit = 4
sv1 = StateVector(num_qubit)
sv2 = StateVector(num_qubit + 1)
sv1.set_Haar_random_state(seed=0)
sv2.set_Haar_random_state(seed=1)
sv3 = tensor_product(sv1, sv2)
sv3_test = np.kron(sv1.get_vector(), sv2.get_vector())
assert np.allclose(
sv3_test, sv3.get_vector()
), "check pure state tensor product"
del sv1
del sv2
del sv3
def test_tensor_product_different_size_dm(self) -> None:
num_qubit = 4
dm1 = DensityMatrix(num_qubit)
dm2 = DensityMatrix(num_qubit + 1)
dm1.set_Haar_random_state(seed=0)
dm2.set_Haar_random_state(seed=1)
dm3 = tensor_product(dm1, dm2)
dm3_test = np.kron(dm1.get_matrix(), dm2.get_matrix())
assert np.allclose(
dm3_test, dm3.get_matrix()
), "check density matrix tensor product"
del dm1
del dm2
del dm3
def test_permutate_qubit_sv(self) -> None:
num_qubit = 8
sv = StateVector(num_qubit)
sv.set_Haar_random_state(seed=0)
order = np.arange(num_qubit)
np.random.shuffle(order)
arr = []
for ind in range(2**num_qubit):
s = format(ind, "0{}b".format(num_qubit))
s = np.array(list(s[::-1])) # type: ignore
v = np.array(["*"] * num_qubit)
for ind in range(len(s)):
v[order[ind]] = s[ind]
s = ("".join(v))[::-1]
arr.append(int(s, 2))
sv_perm = permutate_qubit(sv, order) # type: ignore
assert np.allclose(
sv.get_vector()[arr], sv_perm.get_vector()
), "check pure state permutation"
del sv_perm
del sv
def METHOD_NAME(self) -> None:
num_qubit = 3
dm = DensityMatrix(num_qubit)
dm.set_Haar_random_state(seed=0)
order = np.arange(num_qubit)
np.random.shuffle(order)
arr = []
for ind in range(2**num_qubit):
s = format(ind, "0{}b".format(num_qubit))
s = np.array(list(s[::-1])) # type: ignore
v = np.array(["*"] * num_qubit)
for ind in range(len(s)):
v[order[ind]] = s[ind]
s = ("".join(v))[::-1]
arr.append(int(s, 2))
dm_perm = permutate_qubit(dm, order) # type: ignore
dm_perm_test = dm.get_matrix()
dm_perm_test = dm_perm_test[arr, :]
dm_perm_test = dm_perm_test[:, arr]
assert np.allclose(
dm_perm_test, dm_perm.get_matrix()
), "check density matrix permutation"
del dm_perm
del dm
def test_partial_trace_dm(self) -> None:
num_qubit = 5
num_traceout = 2
dm = DensityMatrix(num_qubit)
dm.set_Haar_random_state(seed=0)
mat = dm.get_matrix()
target = np.arange(num_qubit)
np.random.shuffle(target)
target = target[:num_traceout]
target_cor = [num_qubit - 1 - i for i in target]
target_cor.sort()
dmt = mat.reshape([2, 2] * num_qubit)
for cnt, val in enumerate(target_cor):
ofs = num_qubit - cnt
dmt = np.trace(dmt, axis1=val - cnt, axis2=ofs + val - cnt)
dmt = dmt.reshape(
[2 ** (num_qubit - num_traceout), 2 ** (num_qubit - num_traceout)]
)
pdm = partial_trace(dm, target) # type: ignore
assert np.allclose(pdm.get_matrix(), dmt), "check density matrix partial trace"
del dm, pdm
def test_partial_trace_sv(self) -> None:
num_qubit = 6
num_traceout = 4
sv = StateVector(num_qubit)
sv.set_Haar_random_state(seed=0)
svv = np.atleast_2d(sv.get_vector()).T
mat = np.dot(svv, svv.T.conj())
target = np.arange(num_qubit)
np.random.shuffle(target)
target = target[:num_traceout]
target_cor = [num_qubit - 1 - i for i in target]
target_cor.sort()
dmt = mat.reshape([2, 2] * num_qubit)
for cnt, val in enumerate(target_cor):
ofs = num_qubit - cnt
dmt = np.trace(dmt, axis1=val - cnt, axis2=ofs + val - cnt)
dmt = dmt.reshape(
[2 ** (num_qubit - num_traceout), 2 ** (num_qubit - num_traceout)]
)
pdm = partial_trace(sv, target) # type: ignore
assert np.allclose(pdm.get_matrix(), dmt), "check pure state partial trace"
|
1,371 |
format
|
#!/usr/bin/env python3
import multiprocessing
import os
import re
import shutil
import subprocess
from flipper.app import App
SOURCE_CODE_FILE_EXTENSIONS = [".h", ".c", ".cpp", ".cxx", ".hpp"]
SOURCE_CODE_FILE_PATTERN = r"^[0-9A-Za-z_]+\.[a-z]+$"
SOURCE_CODE_DIR_PATTERN = r"^[0-9A-Za-z_]+$"
class Main(App):
def init(self):
self.subparsers = self.parser.add_subparsers(help="sub-command help")
# generate
self.parser_check = self.subparsers.add_parser(
"check", help="Check source code format and file names"
)
self.parser_check.add_argument("input", nargs="+")
self.parser_check.set_defaults(func=self.check)
# merge
self.parser_format = self.subparsers.add_parser(
"format", help="Format source code and fix file names"
)
self.parser_format.add_argument(
"input",
nargs="+",
)
self.parser_format.set_defaults(func=self.METHOD_NAME)
@staticmethod
def _filter_lint_directories(dirnames: list[str]):
# Skipping 3rd-party code - usually resides in subfolder "lib"
if "lib" in dirnames:
dirnames.remove("lib")
# Skipping hidden folders
for dirname in dirnames.copy():
if dirname.startswith("."):
dirnames.remove(dirname)
def _check_folders(self, folders: list):
show_message = False
pattern = re.compile(SOURCE_CODE_DIR_PATTERN)
for folder in folders:
for dirpath, dirnames, filenames in os.walk(folder):
self._filter_lint_directories(dirnames)
for dirname in dirnames:
if not pattern.match(dirname):
to_fix = os.path.join(dirpath, dirname)
self.logger.warning(f"Found incorrectly named folder {to_fix}")
show_message = True
if show_message:
self.logger.warning(
"Folders are not renamed automatically, please fix it by yourself"
)
def _find_sources(self, folders: list):
output = []
for folder in folders:
for dirpath, dirnames, filenames in os.walk(folder):
self._filter_lint_directories(dirnames)
for filename in filenames:
ext = os.path.splitext(filename.lower())[1]
if ext not in SOURCE_CODE_FILE_EXTENSIONS:
continue
output.append(os.path.join(dirpath, filename))
return output
@staticmethod
def _format_source(task):
try:
subprocess.check_call(task)
return True
except subprocess.CalledProcessError:
return False
def _format_sources(self, sources: list, dry_run: bool = False):
args = ["clang-format", "--Werror", "--style=file", "-i"]
if dry_run:
args.append("--dry-run")
files_per_task = 69
tasks = []
while len(sources) > 0:
tasks.append(args + sources[:files_per_task])
sources = sources[files_per_task:]
pool = multiprocessing.Pool()
results = pool.map(self._format_source, tasks)
return all(results)
def _fix_filename(self, filename: str):
return filename.replace("-", "_")
def _replace_occurrence(self, sources: list, old: str, new: str):
old = old.encode()
new = new.encode()
for source in sources:
content = open(source, "rb").read()
if content.count(old) > 0:
self.logger.info(f"Replacing {old} with {new} in {source}")
content = content.replace(old, new)
open(source, "wb").write(content)
def _apply_file_naming_convention(self, sources: list, dry_run: bool = False):
pattern = re.compile(SOURCE_CODE_FILE_PATTERN)
good = []
bad = []
# Check sources for invalid filenames
for source in sources:
basename = os.path.basename(source)
if not pattern.match(basename):
new_basename = self._fix_filename(basename)
if not pattern.match(new_basename):
self.logger.error(f"Unable to fix name for {basename}")
return False
bad.append((source, basename, new_basename))
else:
good.append(source)
# Notify about errors or replace all occurrences
if dry_run:
if len(bad) > 0:
self.logger.error(f"Found {len(bad)} incorrectly named files")
self.logger.info(bad)
return False
else:
# Replace occurrences in text files
for source, old, new in bad:
self._replace_occurrence(sources, old, new)
# Rename files
for source, old, new in bad:
shutil.move(source, source.replace(old, new))
return True
def _apply_file_permissions(self, sources: list, dry_run: bool = False):
execute_permissions = 0o111
re.compile(SOURCE_CODE_FILE_PATTERN)
good = []
bad = []
# Check sources for unexpected execute permissions
for source in sources:
st = os.stat(source)
perms_too_many = st.st_mode & execute_permissions
if perms_too_many:
good_perms = st.st_mode & ~perms_too_many
bad.append((source, oct(perms_too_many), good_perms))
else:
good.append(source)
# Notify or fix
if dry_run:
if len(bad) > 0:
self.logger.error(f"Found {len(bad)} incorrect permissions")
self.logger.info([record[0:2] for record in bad])
return False
else:
for source, perms_too_many, new_perms in bad:
os.chmod(source, new_perms)
return True
def _perform(self, dry_run: bool):
result = 0
sources = self._find_sources(self.args.input)
if not self._format_sources(sources, dry_run=dry_run):
result |= 0b001
if not self._apply_file_naming_convention(sources, dry_run=dry_run):
result |= 0b010
if not self._apply_file_permissions(sources, dry_run=dry_run):
result |= 0b100
self._check_folders(self.args.input)
return result
def check(self):
return self._perform(dry_run=True)
def METHOD_NAME(self):
return self._perform(dry_run=False)
if __name__ == "__main__":
Main()()
|
1,372 |
on getaddr
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
A node should never send anything other than VERSION/VERACK/REJECT until it's
received a VERACK.
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't.
"""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
banscore = 10
class CLazyNode(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
def bad_message(self, message):
self.unexpected_msg = True
self.log.info("should not have received message: %s" % message.command)
def on_open(self):
self.ever_connected = True
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_reject(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def METHOD_NAME(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
# Node that never sends a version. We'll use this to send a bunch of messages
# anyway, and eventually get disconnected.
class CNodeNoVersionBan(CLazyNode):
# send a bunch of veracks without sending a message. This should get us disconnected.
# NOTE: implementation-specific check here. Remove if raptoreumd ban behavior changes
def on_open(self):
super().on_open()
for i in range(banscore):
self.send_message(msg_verack())
def on_reject(self, message): pass
# Node that never sends a version. This one just sits idle and hopes to receive
# any message (it shouldn't!)
class CNodeNoVersionIdle(CLazyNode):
def __init__(self):
super().__init__()
# Node that sends a version but not a verack.
class CNodeNoVerackIdle(CLazyNode):
def __init__(self):
self.version_received = False
super().__init__()
def on_reject(self, message): pass
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PLeakTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-banscore='+str(banscore)]]
def run_test(self):
no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False)
no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False)
no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle())
network_thread_start()
wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
# Mine a block and make sure that it's not sent to the connected nodes
self.nodes[0].generate(1)
#Give the node enough time to possibly leak out a message
time.sleep(5)
#This node should have been banned
assert not no_version_bannode.is_connected
self.nodes[0].disconnect_p2ps()
# Wait until all connections are closed
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
# Make sure no unexpected messages came in
assert(no_version_bannode.unexpected_msg == False)
assert(no_version_idlenode.unexpected_msg == False)
assert(no_verack_idlenode.unexpected_msg == False)
if __name__ == '__main__':
P2PLeakTest().main()
|
1,373 |
prior realization
|
r"""
04. Bayesian Inversion
======================
This tutorial focuses on Bayesian inversion, a special type of inverse problem
that aims at incorporating prior information in terms of model and data
probabilities in the inversion process.
In this case we will be dealing with the same problem that we discussed in
:ref:`sphx_glr_tutorials_solvers.py`, but instead of defining ad-hoc
regularization or preconditioning terms we parametrize and model our input
signal in the frequency domain in a probabilistic fashion: the central
frequency, amplitude and phase of the three sinusoids have gaussian
distributions as follows:
.. math::
X(f) = \sum_{i=1}^3 a_i e^{j \phi_i} \delta(f - f_i)
where :math:`f_i \sim N(f_{0,i}, \sigma_{f,i})`,
:math:`a_i \sim N(a_{0,i}, \sigma_{a,i})`, and
:math:`\phi_i \sim N(\phi_{0,i}, \sigma_{\phi,i})`.
Based on the above definition, we construct some prior models in the frequency
domain, convert each of them to the time domain and use such an ensemble
to estimate the prior mean :math:`\mu_\mathbf{x}` and model
covariance :math:`\mathbf{C_x}`.
We then create our data by sampling the true signal at certain locations
and solve the resconstruction problem within a Bayesian framework. Since we are
assuming gaussianity in our priors, the equation to obtain the posterion mean
can be derived analytically:
.. math::
\mathbf{x} = \mathbf{x_0} + \mathbf{C}_x \mathbf{R}^T
(\mathbf{R} \mathbf{C}_x \mathbf{R}^T + \mathbf{C}_y)^{-1} (\mathbf{y} -
\mathbf{R} \mathbf{x_0})
"""
import matplotlib.pyplot as plt
# sphinx_gallery_thumbnail_number = 2
import numpy as np
from scipy.sparse.linalg import lsqr
import pylops
plt.close("all")
np.random.seed(10)
###############################################################################
# Let's start by creating our true model and prior realizations
def METHOD_NAME(f0, a0, phi0, sigmaf, sigmaa, sigmaphi, dt, nt, nfft):
"""Create realization from prior mean and std for amplitude, frequency and
phase
"""
f = np.fft.rfftfreq(nfft, dt)
df = f[1] - f[0]
ifreqs = [int(np.random.normal(f, sigma) / df) for f, sigma in zip(f0, sigmaf)]
amps = [np.random.normal(a, sigma) for a, sigma in zip(a0, sigmaa)]
phis = [np.random.normal(phi, sigma) for phi, sigma in zip(phi0, sigmaphi)]
# input signal in frequency domain
X = np.zeros(nfft // 2 + 1, dtype="complex128")
X[ifreqs] = (
np.array(amps).squeeze() * np.exp(1j * np.deg2rad(np.array(phis))).squeeze()
)
# input signal in time domain
FFTop = pylops.signalprocessing.FFT(nt, nfft=nfft, real=True)
x = FFTop.H * X
return x
# Priors
nreals = 100
f0 = [5, 3, 8]
sigmaf = [0.5, 1.0, 0.6]
a0 = [1.0, 1.0, 1.0]
sigmaa = [0.1, 0.5, 0.6]
phi0 = [-90.0, 0.0, 0.0]
sigmaphi = [0.1, 0.2, 0.4]
sigmad = 1e-2
# Prior models
nt = 200
nfft = 2**11
dt = 0.004
t = np.arange(nt) * dt
xs = np.array(
[
METHOD_NAME(f0, a0, phi0, sigmaf, sigmaa, sigmaphi, dt, nt, nfft)
for _ in range(nreals)
]
)
# True model (taken as one possible realization)
x = METHOD_NAME(f0, a0, phi0, [0, 0, 0], [0, 0, 0], [0, 0, 0], dt, nt, nfft)
###############################################################################
# We have now a set of prior models in time domain. We can easily use sample
# statistics to estimate the prior mean and covariance. For the covariance, we
# perform a second step where we average values around the main
# diagonal for each row and find a smooth, compact filter that we use to
# define a convolution linear operator that mimics the action of the covariance
# matrix on a vector
x0 = np.average(xs, axis=0)
Cm = ((xs - x0).T @ (xs - x0)) / nreals
N = 30 # lenght of decorrelation
diags = np.array([Cm[i, i - N : i + N + 1] for i in range(N, nt - N)])
diag_ave = np.average(diags, axis=0)
# add a taper at the end to avoid edge effects
diag_ave *= np.hamming(2 * N + 1)
fig, ax = plt.subplots(1, 1, figsize=(12, 4))
ax.plot(t, xs.T, "r", lw=1)
ax.plot(t, x0, "g", lw=4)
ax.plot(t, x, "k", lw=4)
ax.set_title("Prior realizations and mean")
ax.set_xlim(0, 0.8)
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(10, 4))
im = ax1.imshow(
Cm, interpolation="nearest", cmap="seismic", extent=(t[0], t[-1], t[-1], t[0])
)
ax1.set_title(r"$\mathbf{C}_m^{prior}$")
ax1.axis("tight")
ax2.plot(np.arange(-N, N + 1) * dt, diags.T, "--r", lw=1)
ax2.plot(np.arange(-N, N + 1) * dt, diag_ave, "k", lw=4)
ax2.set_title("Averaged covariance 'filter'")
plt.tight_layout()
###############################################################################
# Let's define now the sampling operator as well as create our covariance
# matrices in terms of linear operators. This may not be strictly necessary
# here but shows how even Bayesian-type of inversion can very easily scale to
# large model and data spaces.
# Sampling operator
perc_subsampling = 0.2
ntsub = int(np.round(nt * perc_subsampling))
iava = np.sort(np.random.permutation(np.arange(nt))[:ntsub])
iava[-1] = nt - 1 # assume we have the last sample to avoid instability
Rop = pylops.Restriction(nt, iava, dtype="float64")
# Covariance operators
Cm_op = pylops.signalprocessing.Convolve1D(nt, diag_ave, offset=N)
Cd_op = sigmad**2 * pylops.Identity(ntsub)
###############################################################################
# We model now our data and add noise that respects our prior definition
n = np.random.normal(0, sigmad, nt)
y = Rop * x
yn = Rop * (x + n)
ymask = Rop.mask(x)
ynmask = Rop.mask(x + n)
###############################################################################
# First we apply the Bayesian inversion equation
xbayes = x0 + Cm_op * Rop.H * (
lsqr(Rop * Cm_op * Rop.H + Cd_op, yn - Rop * x0, iter_lim=400)[0]
)
# Visualize
fig, ax = plt.subplots(1, 1, figsize=(12, 5))
ax.plot(t, x, "k", lw=6, label="true")
ax.plot(t, ymask, ".k", ms=25, label="available samples")
ax.plot(t, ynmask, ".r", ms=25, label="available noisy samples")
ax.plot(t, xbayes, "r", lw=3, label="bayesian inverse")
ax.legend()
ax.set_title("Signal")
ax.set_xlim(0, 0.8)
plt.tight_layout()
###############################################################################
# So far we have been able to estimate our posterion mean. What about its
# uncertainties (i.e., posterion covariance)?
#
# In real-life applications it is very difficult (if not impossible)
# to directly compute the posterior covariance matrix. It is much more
# useful to create a set of models that sample the posterion probability.
# We can do that by solving our problem several times using different prior
# realizations as starting guesses:
xpost = [
x0
+ Cm_op
* Rop.H
* (lsqr(Rop * Cm_op * Rop.H + Cd_op, yn - Rop * x0, iter_lim=400)[0])
for x0 in xs[:30]
]
xpost = np.array(xpost)
x0post = np.average(xpost, axis=0)
Cm_post = ((xpost - x0post).T @ (xpost - x0post)) / nreals
# Visualize
fig, ax = plt.subplots(1, 1, figsize=(12, 5))
ax.plot(t, x, "k", lw=6, label="true")
ax.plot(t, xpost.T, "--r", lw=1)
ax.plot(t, x0post, "r", lw=3, label="bayesian inverse")
ax.plot(t, ymask, ".k", ms=25, label="available samples")
ax.plot(t, ynmask, ".r", ms=25, label="available noisy samples")
ax.legend()
ax.set_title("Signal")
ax.set_xlim(0, 0.8)
fig, ax = plt.subplots(1, 1, figsize=(5, 4))
im = ax.imshow(
Cm_post, interpolation="nearest", cmap="seismic", extent=(t[0], t[-1], t[-1], t[0])
)
ax.set_title(r"$\mathbf{C}_m^{posterior}$")
ax.axis("tight")
plt.tight_layout()
###############################################################################
# Note that here we have been able to compute a sample posterior covariance
# from its estimated samples. By displaying it we can see how both the overall
# variances and the correlation between different parameters have become
# narrower compared to their prior counterparts.
|
1,374 |
test read and write scardec from open
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import os
import numpy as np
import warnings
import obspy
from obspy.core.util.base import NamedTemporaryFile
from obspy.io.scardec.core import _is_scardec
class TestScardec():
"""
Test suite for obspy.io.scardec.
The tests usually directly utilize the registered function with the
read_events() to also test the integration.
"""
def test_read_and_write_scardec_from_files(self, testdata):
"""
Tests that reading and writing a SCARDECfile does not change
anything.
Note: The test file is not one from the catalogue, since it was
impossible to recreate the number formatting. Therefore, the test
file has been created with ObsPy, but was manually checked to be
consistent with the original file
"""
filename = testdata['test.scardec']
with open(filename, "rb") as fh:
data = fh.read()
cat = obspy.read_events(filename)
with NamedTemporaryFile() as tf:
temp_filename = tf.name
try:
# raises two UserWarnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
cat.write(temp_filename, format="SCARDEC")
assert len(w) == 2
assert w[0].category == UserWarning
assert 'No moment wave magnitude found' in str(w[0])
assert w[1].category == UserWarning
assert 'No derived origin attached' in str(w[1])
with open(temp_filename, "rb") as fh:
new_data = fh.read()
finally:
try:
os.remove(temp_filename)
except Exception:
pass
# Test file header
assert data.decode().splitlines()[0:2] == \
new_data.decode().splitlines()[0:2]
for line_data, line_new in zip(data.decode().splitlines()[2:],
new_data.decode().splitlines()[2:]):
# Compare time stamps
assert np.allclose(float(line_data.split()[0]),
float(line_new.split()[0]))
# Compare moment rate values
assert np.allclose(float(line_data.split()[1]),
float(line_new.split()[1]))
def METHOD_NAME(self, testdata):
"""
Tests that reading and writing a SCARDEC file does not change
anything.
This time it tests reading from and writing to open files.
"""
filename = testdata['test.scardec']
with open(filename, "rb") as fh:
data = fh.read()
fh.seek(0, 0)
cat = obspy.read_events(fh)
with NamedTemporaryFile() as tf:
# raises two UserWarnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
cat.write(tf, format="SCARDEC")
tf.seek(0, 0)
new_data = tf.read()
assert len(w) == 2
assert w[0].category == UserWarning
assert 'No moment wave magnitude found' in str(w[0])
assert w[1].category == UserWarning
assert 'No derived origin attached' in str(w[1])
# Test file header
assert data.decode().splitlines()[0:2] == \
new_data.decode().splitlines()[0:2]
for line_data, line_new in zip(data.decode().splitlines()[2:],
new_data.decode().splitlines()[2:]):
# Compare time stamps
assert np.allclose(float(line_data.split()[0]),
float(line_new.split()[0]))
# Compare moment rate values
assert np.allclose(float(line_data.split()[1]),
float(line_new.split()[1]))
def test_read_and_write_scardec_from_bytes_io(self, testdata):
"""
Tests that reading and writing a SCARDEC file does not change
anything.
This time it tests reading from and writing to BytesIO objects.
"""
filename = testdata['test.scardec']
with open(filename, "rb") as fh:
buf = io.BytesIO(fh.read())
data = buf.read()
buf.seek(0, 0)
with buf:
buf.seek(0, 0)
cat = obspy.read_events(buf)
# raises two UserWarnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
with io.BytesIO() as buf2:
cat.write(buf2, format="SCARDEC")
buf2.seek(0, 0)
new_data = buf2.read()
assert len(w) == 2
assert w[0].category == UserWarning
assert 'No moment wave magnitude found' in str(w[0])
assert w[1].category == UserWarning
assert 'No derived origin attached' in str(w[1])
# Test file header
assert data.decode().splitlines()[0:2] == \
new_data.decode().splitlines()[0:2]
for line_data, line_new in zip(data.decode().splitlines()[2:],
new_data.decode().splitlines()[2:]):
# Compare time stamps
assert np.allclose(float(line_data.split()[0]),
float(line_new.split()[0]))
# Compare moment rate values
assert np.allclose(float(line_data.split()[1]),
float(line_new.split()[1]))
def test_is_scardec(self, testdata, datapath):
"""
Tests the is_scardec function.
"""
good_files = [testdata['test.scardec'],
testdata['test2.scardec']]
bad_files = [
datapath.parent / "test_core.py",
datapath.parent / "__init__.py"]
for filename in good_files:
assert _is_scardec(filename)
for filename in bad_files:
assert not _is_scardec(filename)
|
1,375 |
mult
|
# Ref: MIPS® Architecture for Programmers Volume II-A: The MIPS32® Instruction Set Manual
# Document Number: MD00086 Revision 6.05
# Accession: G00021
__all__ = [
# R-format
"SLL", "SRL", "SRA", "JR", "JALR", "MFHI", "MTHI", "MFLO", "MTLO", "DIV", "DIVU", "MULT",
"MULTU", "ADD", "ADDU", "SUB", "SUBU", "AND", "OR", "XOR", "NOR", "SLT", "SLTU",
# J-format
"J", "JAL",
# I-format
"BEQ", "BNE", "BLEZ", "BGTZ", "ADDI", "ADDIU", "SLTI", "SLTIU", "ANDI", "ORI", "XORI",
"LUI", "LB", "LH", "LW", "LBU", "LHU", "SB", "SH", "SW",
# Misc
"MFC0", "MTC0", "DERET", "SDBBP", "SYNC", "SYNCI", "CACHE",
# Pseudo
"NOP", "B",
]
# Instruction formats
def R_FORMAT(op, rs, rt, rd, sa, fn):
return (((op & 0b111111) << 26) |
((rs & 0b11111) << 21) |
((rt & 0b11111) << 16) |
((rd & 0b11111) << 11) |
((sa & 0b11111) << 6) |
((fn & 0b111111) << 0))
def I_FORMAT(op, rs, rt, im):
return (((op & 0b111111) << 26) |
((rs & 0b11111) << 21) |
((rt & 0b11111) << 16) |
((im & 0xffff) << 0))
def J_FORMAT(op, tg):
return (((op & 0b111111) << 26) |
((tg & 0x3ffffff) << 0))
# R-instructions
def SLL (rd, rt, sa): return R_FORMAT(op=0x00, rs= 0, rt=rt, rd=rd, sa=sa, fn=0x00)
def SRL (rd, rt, sa): return R_FORMAT(op=0x00, rs= 0, rt=rt, rd=rd, sa=sa, fn=0x02)
def SRA (rd, rt, sa): return R_FORMAT(op=0x00, rs= 0, rt=rt, rd=rd, sa=sa, fn=0x03)
def JR (rs): return R_FORMAT(op=0x00, rs=rs, rt= 0, rd= 0, sa=0, fn=0x08)
def JALR (rd, rs): return R_FORMAT(op=0x00, rs=rs, rt= 0, rd=rd, sa=0, fn=0x09)
def MFHI (rd): return R_FORMAT(op=0x00, rs= 0, rt= 0, rd=rd, sa=0, fn=0x10)
def MTHI (rd): return R_FORMAT(op=0x00, rs= 0, rt= 0, rd=rd, sa=0, fn=0x11)
def MFLO (rs): return R_FORMAT(op=0x00, rs=rs, rt= 0, rd= 0, sa=0, fn=0x12)
def MTLO (rs): return R_FORMAT(op=0x00, rs=rs, rt= 0, rd= 0, sa=0, fn=0x13)
def DIV (rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd= 0, sa=0, fn=0x1A)
def DIVU (rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd= 0, sa=0, fn=0x1B)
def METHOD_NAME (rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd= 0, sa=0, fn=0x18)
def MULTU(rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd= 0, sa=0, fn=0x19)
def ADD (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x20)
def ADDU (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x21)
def SUB (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x22)
def SUBU (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x23)
def AND (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x24)
def OR (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x25)
def XOR (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x26)
def NOR (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x27)
def SLT (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x2A)
def SLTU (rd, rs, rt): return R_FORMAT(op=0x00, rs=rs, rt=rt, rd=rd, sa=0, fn=0x2B)
# J-instructions
def J (tg): return J_FORMAT(op=0x02, tg=tg)
def JAL (tg): return J_FORMAT(op=0x03, tg=tg)
# I-instructions
def BEQ (rs, rt, im): return I_FORMAT(op=0x04, rs=rs, rt=rt, im=im)
def BNE (rs, rt, im): return I_FORMAT(op=0x05, rs=rs, rt=rt, im=im)
def BLEZ (rs, im): return I_FORMAT(op=0x06, rs=rs, rt= 0, im=im)
def BGTZ (rs, im): return I_FORMAT(op=0x07, rs=rs, rt= 0, im=im)
def ADDI (rt, rs, im): return I_FORMAT(op=0x08, rs=rs, rt=rt, im=im)
def ADDIU(rt, rs, im): return I_FORMAT(op=0x09, rs=rs, rt=rt, im=im)
def SLTI (rt, rs, im): return I_FORMAT(op=0x0A, rs=rs, rt=rt, im=im)
def SLTIU(rt, rs, im): return I_FORMAT(op=0x0B, rs=rs, rt=rt, im=im)
def ANDI (rt, rs, im): return I_FORMAT(op=0x0C, rs=rs, rt=rt, im=im)
def ORI (rt, rs, im): return I_FORMAT(op=0x0D, rs=rs, rt=rt, im=im)
def XORI (rt, rs, im): return I_FORMAT(op=0x0E, rs=rs, rt=rt, im=im)
def LUI (rt, im): return I_FORMAT(op=0x0F, rs= 0, rt=rt, im=im)
def LB (rt, im, rs): return I_FORMAT(op=0x20, rs=rs, rt=rt, im=im)
def LH (rt, im, rs): return I_FORMAT(op=0x21, rs=rs, rt=rt, im=im)
def LW (rt, im, rs): return I_FORMAT(op=0x23, rs=rs, rt=rt, im=im)
def LBU (rt, im, rs): return I_FORMAT(op=0x24, rs=rs, rt=rt, im=im)
def LHU (rt, im, rs): return I_FORMAT(op=0x25, rs=rs, rt=rt, im=im)
def SB (rt, im, rs): return I_FORMAT(op=0x28, rs=rs, rt=rt, im=im)
def SH (rt, im, rs): return I_FORMAT(op=0x29, rs=rs, rt=rt, im=im)
def SW (rt, im, rs): return I_FORMAT(op=0x2B, rs=rs, rt=rt, im=im)
# Misc instructions
def MFC0 (rt, rd, sel=0): return R_FORMAT(op=0x10, rs=0x00, rt=rt, rd=rd, sa=0, fn=sel & 0b111)
def MTC0 (rt, rd, sel=0): return R_FORMAT(op=0x10, rs=0x04, rt=rt, rd=rd, sa=0, fn=sel & 0b111)
def DERET(): return R_FORMAT(op=0x10, rs=0x10, rt=0x00, rd= 0, sa=0, fn=0x1f)
def SDBBP(): return R_FORMAT(op=0x1c, rs=0x00, rt=0x00, rd= 0, sa=0, fn=0x3f)
def SYNC (): return R_FORMAT(op=0x00, rs=0x00, rt=0x00, rd= 0, sa=0, fn=0x0f)
def SYNCI(im, rs): return I_FORMAT(op=0x01, rs=rs, rt=0x1f, im=im)
def CACHE(op, im, rs): return I_FORMAT(op=0x2f, rs=rs, rt=op, im=im)
# Pseudo-instructions
def NOP (): return SLL(0, 0, 0)
def B (im): return BEQ(0, 0, im)
|
1,376 |
mock process
|
import libqtile.config
from libqtile.widget.check_updates import CheckUpdates, Popen # noqa: F401
from test.widgets.conftest import FakeBar
wrong_distro = "Barch"
good_distro = "Arch"
cmd_0_line = "export toto" # quick "monkeypatch" simulating 0 output, ie 0 update
cmd_1_line = "echo toto" # quick "monkeypatch" simulating 1 output, ie 1 update
cmd_error = "false"
nus = "No Update Available"
# This class returns None when first polled (to simulate that the task is still running)
# and then 0 on the second call.
class MockPopen:
def __init__(self, *args, **kwargs):
self.call_count = 0
def poll(self):
if self.call_count == 0:
self.call_count += 1
return None
return 0
# Bit of an ugly hack to replicate the above functionality but for a method.
class MockSpawn:
call_count = 0
@classmethod
def call_process(cls, *args, **kwargs):
if cls.call_count == 0:
cls.call_count += 1
return "Updates"
return ""
def test_unknown_distro():
"""test an unknown distribution"""
cu = CheckUpdates(distro=wrong_distro)
text = cu.poll()
assert text == "N/A"
def test_update_available(fake_qtile, fake_window):
"""test output with update (check number of updates and color)"""
cu2 = CheckUpdates(
distro=good_distro, custom_command=cmd_1_line, colour_have_updates="#123456"
)
fakebar = FakeBar([cu2], window=fake_window)
cu2._configure(fake_qtile, fakebar)
text = cu2.poll()
assert text == "Updates: 1"
assert cu2.layout.colour == cu2.colour_have_updates
def test_no_update_available_without_no_update_string(fake_qtile, fake_window):
"""test output with no update (without dedicated string nor color)"""
cu3 = CheckUpdates(distro=good_distro, custom_command=cmd_0_line)
fakebar = FakeBar([cu3], window=fake_window)
cu3._configure(fake_qtile, fakebar)
text = cu3.poll()
assert text == ""
def test_no_update_available_with_no_update_string_and_color_no_updates(fake_qtile, fake_window):
"""test output with no update (with dedicated string and color)"""
cu4 = CheckUpdates(
distro=good_distro,
custom_command=cmd_0_line,
no_update_string=nus,
colour_no_updates="#654321",
)
fakebar = FakeBar([cu4], window=fake_window)
cu4._configure(fake_qtile, fakebar)
text = cu4.poll()
assert text == nus
assert cu4.layout.colour == cu4.colour_no_updates
def test_update_available_with_restart_indicator(monkeypatch, fake_qtile, fake_window):
"""test output with no indicator where restart needed"""
cu5 = CheckUpdates(
distro=good_distro,
custom_command=cmd_1_line,
restart_indicator="*",
)
monkeypatch.setattr("os.path.exists", lambda x: True)
fakebar = FakeBar([cu5], window=fake_window)
cu5._configure(fake_qtile, fakebar)
text = cu5.poll()
assert text == "Updates: 1*"
def test_update_available_with_execute(manager_nospawn, minimal_conf_noscreen, monkeypatch):
"""test polling after executing command"""
# Use monkeypatching to patch both Popen (for execute command) and call_process
# This class returns None when first polled (to simulate that the task is still running)
# and then 0 on the second call.
class MockPopen:
def __init__(self, *args, **kwargs):
self.call_count = 0
def poll(self):
if self.call_count == 0:
self.call_count += 1
return None
return 0
# Bit of an ugly hack to replicate the above functionality but for a method.
class MockSpawn:
call_count = 0
@classmethod
def call_process(cls, *args, **kwargs):
if cls.call_count == 0:
cls.call_count += 1
return "Updates"
return ""
cu6 = CheckUpdates(
distro=good_distro,
custom_command="dummy",
execute="dummy",
no_update_string=nus,
)
# Patch the necessary object
monkeypatch.setattr(cu6, "call_process", MockSpawn.call_process)
monkeypatch.setattr("libqtile.widget.check_updates.Popen", MockPopen)
config = minimal_conf_noscreen
config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([cu6], 10))]
manager_nospawn.start(config)
topbar = manager_nospawn.c.bar["top"]
assert topbar.info()["widgets"][0]["text"] == "Updates: 1"
# Clicking the widget triggers the execute command
topbar.fake_button_press(0, "top", 0, 0, button=1)
# The second time we poll the widget, the update process is complete
# and there are no more updates
_, result = manager_nospawn.c.widget["checkupdates"].eval("self.poll()")
assert result == nus
def test_update_process_error(fake_qtile, fake_window):
"""test output where update check gives error"""
cu7 = CheckUpdates(
distro=good_distro,
custom_command=cmd_error,
no_update_string="ERROR",
)
fakebar = FakeBar([cu7], window=fake_window)
cu7._configure(fake_qtile, fakebar)
text = cu7.poll()
assert text == "ERROR"
def test_line_truncations(fake_qtile, monkeypatch, fake_window):
"""test update count is reduced"""
# Mock output to return 5 lines of text
def METHOD_NAME(*args, **kwargs):
return "1\n2\n3\n4\n5\n"
# Fedora is set up to remove 1 from line count
cu8 = CheckUpdates(distro="Fedora")
monkeypatch.setattr(cu8, "call_process", METHOD_NAME)
fakebar = FakeBar([cu8], window=fake_window)
cu8._configure(fake_qtile, fakebar)
text = cu8.poll()
# Should have 4 updates
assert text == "Updates: 4"
|
1,377 |
ninputs
|
from textwrap import TextWrapper
import re, os
# precompile a URL matching regular expression
urlexpr = re.compile(r"((https?):((//)|(\\\\))+[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.MULTILINE|re.UNICODE)
def inputs(args):
'''Keeps only the input arguments in a list of elements.
'''
try:
return [arg for arg in args['only'] if arg.I and not arg.O]
except:
return [arg for arg in args if arg.I]
def METHOD_NAME(fun):
'''Counts the number of input arguments in the input list'''
return len(inputs(fun.req)) + len(inputs(fun.opt))
def outputs(args):
'''Determines whether any of the given arguments is an output
reference, and returns a list of only those elements.
In OpenCV, output references are preceded by CV_OUT or has *OutputArray* type
'''
try:
return [arg for arg in args['only'] if arg.O and not arg.I]
except:
return [arg for arg in args if arg.O]
def only(args):
'''Returns exclusively the arguments which are only inputs
or only outputs'''
d = {};
d['only'] = args
return d
def void(arg):
'''Is the input 'void' '''
return arg == 'void'
def flip(arg):
'''flip the sign of the input'''
return not arg
def noutputs(fun):
'''Counts the number of output arguments in the input list'''
return int(not void(fun.rtp)) + len(outputs(fun.req)) + len(outputs(fun.opt))
def convertibleToInt(string):
'''Can the input string be evaluated to an integer?'''
salt = '1+'
try:
exec(salt+string)
return True
except:
return False
def binaryToDecimal(string):
'''Attempt to convert the input string to floating point representation'''
try:
return str(eval(string))
except:
return string
def formatMatlabConstant(string, table):
'''
Given a string representing a Constant, and a table of all Constants,
attempt to resolve the Constant into a valid Matlab expression
For example, the input
DEPENDENT_VALUE = 1 << FIXED_VALUE
needs to be converted to
DEPENDENT_VALUE = bitshift(1, cv.FIXED_VALUE);
'''
# split the string into expressions
words = re.split('(\W+)', string)
# add a 'cv' prefix if an expression is also a key in the lookup table
words = ''.join([('cv.'+word if word in table else word) for word in words])
# attempt to convert arithmetic expressions and binary/hex to decimal
words = binaryToDecimal(words)
# convert any remaining bitshifts to Matlab 'bitshift' methods
shift = re.sub('[\(\) ]', '', words).split('<<')
words = 'bitshift('+shift[0]+', '+shift[1]+')' if len(shift) == 2 else words
return words
def matlabURL(string):
"""This filter is used to construct a Matlab specific URL that calls the
system browser instead of the (insanely bad) builtin Matlab browser"""
return re.sub(urlexpr, '<a href="matlab: web(\'\\1\', \'-browser\')">\\1</a>', string)
def capitalizeFirst(text):
'''Capitalize only the first character of the text string'''
return text[0].upper() + text[1:]
def toUpperCamelCase(text):
'''variable_name --> VariableName'''
return ''.join([capitalizeFirst(word) for word in text.split('_')])
def toLowerCamelCase(text):
'''variable_name --> variableName'''
upper_camel = toUpperCamelCase(test)
return upper_camel[0].lower() + upper_camel[1:]
def toUnderCase(text):
'''VariableName --> variable_name'''
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', text)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def stripTags(text):
'''
strip or convert html tags from a text string
<code>content</code> --> content
<anything> --> ''
< --> <
> --> >
&le --> <=
&ge --> >=
'''
upper = lambda pattern: pattern.group(1).upper()
text = re.sub('<code>(.*?)</code>', upper, text)
text = re.sub('<([^=\s].*?)>', '', text)
text = re.sub('<', '<', text)
text = re.sub('>', '>', text)
text = re.sub('&le', '<=', text)
text = re.sub('&ge', '>=', text)
return text
def qualify(text, name):
'''Adds uppercase 'CV.' qualification to any occurrences of name in text'''
return re.sub(name.upper(), 'CV.'+name.upper(), text)
def slugify(text):
'''A_Function_name --> a-function-name'''
return text.lower().replace('_', '-')
def filename(fullpath):
'''Returns only the filename without an extension from a file path
eg. /path/to/file.txt --> file
'''
return os.path.splitext(os.path.basename(fullpath))[0]
def split(text, delimiter=' '):
'''Split a text string into a list using the specified delimiter'''
return text.split(delimiter)
def csv(items, sep=', '):
'''format a list with a separator (comma if not specified)'''
return sep.join(item for item in items)
def cellarray(items, escape='\''):
'''format a list of items as a matlab cell array'''
return '{' + ', '.join(escape+item+escape for item in items) + '}'
def stripExtraSpaces(text):
'''Removes superfluous whitespace from a string, including the removal
of all leading and trailing whitespace'''
return ' '.join(text.split())
def comment(text, wrap=80, escape='% ', escape_first='', escape_last=''):
'''comment filter
Takes a string in text, and wraps it to wrap characters in length with
preceding comment escape sequence on each line. escape_first and
escape_last can be used for languages which define block comments.
Examples:
C++ inline comment comment(80, '// ')
C block comment: comment(80, ' * ', '/*', ' */')
Matlab comment: comment(80, '% ')
Matlab block comment: comment(80, '', '%{', '%}')
Python docstrings: comment(80, '', '\'\'\'', '\'\'\'')
'''
tw = TextWrapper(width=wrap-len(escape))
if escape_first:
escape_first = escape_first+'\n'
if escape_last:
escape_last = '\n'+escape_last
escapn = '\n'+escape
lines = text.split('\n')
wlines = (tw.wrap(line) for line in lines)
return escape_first+escape+escapn.join(escapn.join(line) for line in wlines)+escape_last
|
1,378 |
login
|
"""
$description TV and live video game broadcasts, artist performances and personal daily-life video blogs & shows.
$url play.afreecatv.com
$type live
"""
import logging
import re
from streamlink.plugin import Plugin, pluginargument, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream, HLSStreamReader, HLSStreamWriter
log = logging.getLogger(__name__)
class AfreecaHLSStreamWriter(HLSStreamWriter):
def should_filter_sequence(self, sequence):
return "preloading" in sequence.segment.uri or super().should_filter_sequence(sequence)
class AfreecaHLSStreamReader(HLSStreamReader):
__writer__ = AfreecaHLSStreamWriter
class AfreecaHLSStream(HLSStream):
__reader__ = AfreecaHLSStreamReader
@pluginmatcher(re.compile(
r"https?://play\.afreecatv\.com/(?P<username>\w+)(?:/(?P<bno>:\d+))?",
))
@pluginargument(
"username",
sensitive=True,
requires=["password"],
metavar="USERNAME",
help="The username used to register with afreecatv.com.",
)
@pluginargument(
"password",
sensitive=True,
metavar="PASSWORD",
help="A afreecatv.com account password to use with --afreeca-username.",
)
@pluginargument(
"purge-credentials",
action="store_true",
help="Purge cached AfreecaTV credentials to initiate a new session and reauthenticate.",
)
class AfreecaTV(Plugin):
_re_bno = re.compile(r"var nBroadNo = (?P<bno>\d+);")
CHANNEL_API_URL = "http://live.afreecatv.com/afreeca/player_live_api.php"
CHANNEL_RESULT_OK = 1
QUALITYS = ["original", "hd", "sd"]
QUALITY_WEIGHTS = {
"original": 1080,
"hd": 720,
"sd": 480,
}
_schema_channel = validate.Schema(
{
"CHANNEL": {
"RESULT": validate.transform(int),
validate.optional("BPWD"): str,
validate.optional("BNO"): str,
validate.optional("RMD"): str,
validate.optional("AID"): str,
validate.optional("CDN"): str,
},
},
validate.get("CHANNEL"),
)
_schema_stream = validate.Schema(
{
validate.optional("view_url"): validate.url(
scheme=validate.any("rtmp", "http"),
),
"stream_status": str,
},
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._authed = (
self.session.http.cookies.get("PdboxBbs")
and self.session.http.cookies.get("PdboxSaveTicket")
and self.session.http.cookies.get("PdboxTicket")
and self.session.http.cookies.get("PdboxUser")
and self.session.http.cookies.get("RDB")
)
@classmethod
def stream_weight(cls, key):
weight = cls.QUALITY_WEIGHTS.get(key)
if weight:
return weight, "afreeca"
return Plugin.stream_weight(key)
def _get_channel_info(self, broadcast, username):
data = {
"bid": username,
"bno": broadcast,
"from_api": "0",
"mode": "landing",
"player_type": "html5",
"pwd": "",
"stream_type": "common",
"type": "live",
}
res = self.session.http.post(self.CHANNEL_API_URL, data=data)
return self.session.http.json(res, schema=self._schema_channel)
def _get_hls_key(self, broadcast, username, quality):
data = {
"bid": username,
"bno": broadcast,
"from_api": "0",
"mode": "landing",
"player_type": "html5",
"pwd": "",
"quality": quality,
"stream_type": "common",
"type": "aid",
}
res = self.session.http.post(self.CHANNEL_API_URL, data=data)
return self.session.http.json(res, schema=self._schema_channel)
def _get_stream_info(self, broadcast, quality, rmd):
params = {
"return_type": "gs_cdn_pc_web",
"broad_key": f"{broadcast}-common-{quality}-hls",
}
res = self.session.http.get(f"{rmd}/broad_stream_assign.html", params=params)
return self.session.http.json(res, schema=self._schema_stream)
def _get_hls_stream(self, broadcast, username, quality, rmd):
keyjson = self._get_hls_key(broadcast, username, quality)
if keyjson["RESULT"] != self.CHANNEL_RESULT_OK:
return
key = keyjson["AID"]
info = self._get_stream_info(broadcast, quality, rmd)
if "view_url" in info:
return AfreecaHLSStream(self.session, info["view_url"], params={"aid": key})
def METHOD_NAME(self, username, password):
data = {
"szWork": "login",
"szType": "json",
"szUid": username,
"szPassword": password,
"isSaveId": "true",
"isSavePw": "false",
"isSaveJoin": "false",
"isLoginRetain": "Y",
}
res = self.session.http.post("https://login.afreecatv.com/app/LoginAction.php", data=data)
data = self.session.http.json(res)
log.trace(f"{data!r}")
if data["RESULT"] != self.CHANNEL_RESULT_OK:
return False
self.save_cookies()
return True
def _get_streams(self):
login_username = self.get_option("username")
login_password = self.get_option("password")
self.session.http.headers.update({"Referer": self.url, "Origin": "http://play.afreecatv.com"})
if self.options.get("purge_credentials"):
self.clear_cookies()
self._authed = False
log.info("All credentials were successfully removed")
if self._authed:
log.debug("Attempting to authenticate using cached cookies")
elif login_username and login_password:
log.debug("Attempting to login using username and password")
if self.METHOD_NAME(login_username, login_password):
log.info("Login was successful")
else:
log.error("Failed to login")
m = self.match.groupdict()
username = m["username"]
bno = m["bno"]
if bno is None:
res = self.session.http.get(self.url)
m = self._re_bno.search(res.text)
if not m:
log.error("Could not find broadcast number.")
return
bno = m.group("bno")
channel = self._get_channel_info(bno, username)
log.trace(f"{channel!r}")
if channel.get("BPWD") == "Y":
log.error("Stream is Password-Protected")
return
elif channel.get("RESULT") == -6:
log.error("Login required")
return
elif channel.get("RESULT") != self.CHANNEL_RESULT_OK:
return
(broadcast, rmd) = (channel["BNO"], channel["RMD"])
if not (broadcast and rmd):
return
for qkey in self.QUALITYS:
hls_stream = self._get_hls_stream(broadcast, username, qkey, rmd)
if hls_stream:
yield qkey, hls_stream
__plugin__ = AfreecaTV
|
1,379 |
set message
|
"""Chip viewer and widget.
In the lower left corner of the main Pynche window, you will see two
ChipWidgets, one for the selected color and one for the nearest color. The
selected color is the actual RGB value expressed as an X11 #COLOR name. The
nearest color is the named color from the X11 database that is closest to the
selected color in 3D space. There may be other colors equally close, but the
nearest one is the first one found.
Clicking on the nearest color chip selects that named color.
The ChipViewer class includes the entire lower left quandrant; i.e. both the
selected and nearest ChipWidgets.
"""
from Tkinter import *
import ColorDB
class ChipWidget:
_WIDTH = 150
_HEIGHT = 80
def __init__(self,
master = None,
width = _WIDTH,
height = _HEIGHT,
text = 'Color',
initialcolor = 'blue',
presscmd = None,
releasecmd = None):
# create the text label
self.__label = Label(master, text=text)
self.__label.grid(row=0, column=0)
# create the color chip, implemented as a frame
self.__chip = Frame(master, relief=RAISED, borderwidth=2,
width=width,
height=height,
background=initialcolor)
self.__chip.grid(row=1, column=0)
# create the color name
self.__namevar = StringVar()
self.__namevar.set(initialcolor)
self.__name = Entry(master, textvariable=self.__namevar,
relief=FLAT, justify=CENTER, state=DISABLED,
font=self.__label['font'])
self.__name.grid(row=2, column=0)
# create the message area
self.__msgvar = StringVar()
self.__name = Entry(master, textvariable=self.__msgvar,
relief=FLAT, justify=CENTER, state=DISABLED,
font=self.__label['font'])
self.__name.grid(row=3, column=0)
# set bindings
if presscmd:
self.__chip.bind('<ButtonPress-1>', presscmd)
if releasecmd:
self.__chip.bind('<ButtonRelease-1>', releasecmd)
def set_color(self, color):
self.__chip.config(background=color)
def get_color(self):
return self.__chip['background']
def set_name(self, colorname):
self.__namevar.set(colorname)
def METHOD_NAME(self, message):
self.__msgvar.set(message)
def press(self):
self.__chip.configure(relief=SUNKEN)
def release(self):
self.__chip.configure(relief=RAISED)
class ChipViewer:
def __init__(self, switchboard, master=None):
self.__sb = switchboard
self.__frame = Frame(master, relief=RAISED, borderwidth=1)
self.__frame.grid(row=3, column=0, ipadx=5, sticky='NSEW')
# create the chip that will display the currently selected color
# exactly
self.__sframe = Frame(self.__frame)
self.__sframe.grid(row=0, column=0)
self.__selected = ChipWidget(self.__sframe, text='Selected')
# create the chip that will display the nearest real X11 color
# database color name
self.__nframe = Frame(self.__frame)
self.__nframe.grid(row=0, column=1)
self.__nearest = ChipWidget(self.__nframe, text='Nearest',
presscmd = self.__buttonpress,
releasecmd = self.__buttonrelease)
def update_yourself(self, red, green, blue):
# Selected always shows the #rrggbb name of the color, nearest always
# shows the name of the nearest color in the database. BAW: should
# an exact match be indicated in some way?
#
# Always use the #rrggbb style to actually set the color, since we may
# not be using X color names (e.g. "web-safe" names)
colordb = self.__sb.colordb()
rgbtuple = (red, green, blue)
rrggbb = ColorDB.triplet_to_rrggbb(rgbtuple)
# find the nearest
nearest = colordb.nearest(red, green, blue)
nearest_tuple = colordb.find_byname(nearest)
nearest_rrggbb = ColorDB.triplet_to_rrggbb(nearest_tuple)
self.__selected.set_color(rrggbb)
self.__nearest.set_color(nearest_rrggbb)
# set the name and messages areas
self.__selected.set_name(rrggbb)
if rrggbb == nearest_rrggbb:
self.__selected.METHOD_NAME(nearest)
else:
self.__selected.METHOD_NAME('')
self.__nearest.set_name(nearest_rrggbb)
self.__nearest.METHOD_NAME(nearest)
def __buttonpress(self, event=None):
self.__nearest.press()
def __buttonrelease(self, event=None):
self.__nearest.release()
rrggbb = self.__nearest.get_color()
red, green, blue = ColorDB.rrggbb_to_triplet(rrggbb)
self.__sb.update_views(red, green, blue)
|
1,380 |
get configuration category
|
# -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge-iot.readthedocs.io/
# FLEDGE_END
"""Common FledgeProcess Class"""
from abc import ABC, abstractmethod
import argparse
import time
from fledge.common.storage_client.storage_client import StorageClientAsync, ReadingsStorageClientAsync
from fledge.common import logger
from fledge.common.microservice_management_client.microservice_management_client import MicroserviceManagementClient
__author__ = "Ashwin Gopalakrishnan, Amarendra K Sinha"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_logger = logger.setup(__name__)
class ArgumentParserError(Exception):
""" Override default exception to not terminate application """
def __init__(self, message):
self.message = message
def __str__(self):
fmt = '%(message)s'
return fmt % dict(message=self.message)
class SilentArgParse(argparse.ArgumentParser):
def error(self, message):
""" Override default error functionality to not terminate application """
raise ArgumentParserError(message)
class FledgeProcess(ABC):
""" FledgeProcess for all non-core python processes.
All processes will inherit from FledgeProcess and must implement pure virtual method run()
"""
_core_management_host = None
""" string containing core's micro-service management host """
_core_management_port = None
""" int containing core's micro-service management port """
_name = None
""" name of process """
_core_microservice_management_client = None
""" MicroserviceManagementClient instance """
_readings_storage_async = None
""" fledge.common.storage_client.storage_client.ReadingsStorageClientAsync """
_storage_async = None
""" async fledge.common.storage_client.storage_client.StorageClientAsync """
_start_time = None
""" time at which this python process started """
_dryrun = False
""" this is a dry run invocation of the process used to populate configuration """
def __init__(self):
""" All processes must have these three command line arguments passed:
--address [core microservice management host]
--port [core microservice management port]
--name [process name]
"""
self._start_time = time.time()
try:
parser = SilentArgParse()
parser.add_argument("--name", required=True)
parser.add_argument("--address", required=True)
parser.add_argument("--port", required=True, type=int)
namespace, args = parser.parse_known_args()
self._name = getattr(namespace, 'name')
self._core_management_host = getattr(namespace, 'address')
self._core_management_port = getattr(namespace, 'port')
r = range(1, 65536)
if self._core_management_port not in r:
raise ArgumentParserError("Invalid Port: {}".format(self._core_management_port))
for item in args:
if item == "--dryrun":
self._dryrun = True
elif item.startswith('--'):
kv = item.split('=')
if len(kv) == 2:
if len(kv[1].strip()) == 0:
raise ArgumentParserError("Invalid value {} for optional arg {}".format(kv[1], kv[0]))
except ArgumentParserError as ex:
_logger.error(ex, "Arg parser error.")
raise
self._core_microservice_management_client = MicroserviceManagementClient(self._core_management_host,
self._core_management_port)
self._readings_storage_async = ReadingsStorageClientAsync(self._core_management_host,
self._core_management_port)
self._storage_async = StorageClientAsync(self._core_management_host, self._core_management_port)
# pure virtual method run() to be implemented by child class
@abstractmethod
def run(self):
pass
def get_services_from_core(self, name=None, _type=None):
return self._core_microservice_management_client.get_services(name, _type)
def register_service_with_core(self, service_registration_payload):
""" Register a microservice with core
Keyword Arguments:
service_registration_payload -- json format dictionary
Return Values:
Argument value (as a string)
None (if argument was not passed)
Known Exceptions:
HTTPError
"""
return self._core_microservice_management_client.register_service(service_registration_payload)
def unregister_service_with_core(self, microservice_id):
""" Unregister a microservice with core
Keyword Arguments:
microservice_id (uuid as a string)
"""
return self._core_microservice_management_client.unregister_service(microservice_id)
def register_interest_with_core(self):
# cat name
# callback module
# self.microservice_id
raise NotImplementedError
def unregister_interest_with_core(self):
# cat name
# self.microservice_id
raise NotImplementedError
def get_configuration_categories(self):
"""
:return:
"""
return self._core_microservice_management_client.METHOD_NAME()
def METHOD_NAME(self, category_name=None):
"""
:param category_name:
:return:
"""
return self._core_microservice_management_client.METHOD_NAME(category_name)
def get_configuration_item(self, category_name, config_item):
"""
:param category_name:
:param config_item:
:return:
"""
return self._core_microservice_management_client.get_configuration_item(category_name, config_item)
def create_configuration_category(self, category_data):
"""
:param category_data:
:return:
"""
return self._core_microservice_management_client.create_configuration_category(category_data)
def update_configuration_item(self, category_name, config_item):
"""
:param category_name:
:param config_item:
:return:
"""
return self._core_microservice_management_client.update_configuration_item(category_name, config_item)
def delete_configuration_item(self, category_name, config_item):
"""
:param category_name:
:param config_item:
:return:
"""
return self._core_microservice_management_client.delete_configuration_item(category_name, config_item)
def is_dry_run(self):
"""
Return true if this is a dry run of the process. Dry runs are used
to allow a task or service to add configuration without that task or
service performing whatever operation it normally does. Thus the user
can then update the configuration before the task or service is ever
started in anger.
:return: Boolean
"""
return self._dryrun
|
1,381 |
delete root probable dead object list index
|
#
# Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For any questions about this software or licensing,
# please email [email protected] or [email protected].
#
import os
import sys
import time
import subprocess
import yaml
from framework import S3PyCliTest
from framework import Config
from framework import logit
class MotrConfig():
def __init__(self):
lctl_cmd = "sudo lctl list_nids | head -1"
result = subprocess.check_output(lctl_cmd, shell=True).decode().split()[0]
self.LOCAL_NID = result
self.cfg_dir = os.path.join(os.path.dirname(__file__), 'cfg')
config_file = os.path.join(self.cfg_dir, 'motrkvscli.yaml')
with open(config_file, 'r') as f:
s3config = yaml.safe_load(f)
self.KVS_IDX = str(s3config['S3_MOTR_IDX_SERVICE_ID'])
self.LOCAL_EP = s3config['S3_MOTR_LOCAL_EP']
self.HA_EP = s3config['S3_MOTR_HA_EP']
self.PROFILE_FID = s3config['S3_MOTR_PROF']
self.PROCESS_FID = s3config['S3_MOTR_PROCESS_FID']
self.LOCAL_EP = self.LOCAL_NID + self.LOCAL_EP
self.HA_EP = self.LOCAL_NID + self.HA_EP
class S3OID():
def __init__(self, oid_hi="0x0", oid_lo="0x0"):
self.oid_hi = oid_hi
self.oid_lo = oid_lo
def set_oid(self, oid_hi, oid_lo):
self.oid_hi = oid_hi
self.oid_lo = oid_lo
return self
class S3kvTest(S3PyCliTest):
def __init__(self, description):
motr_conf = MotrConfig()
if "LD_LIBRARY_PATH" in os.environ:
self.cmd = "sudo env LD_LIBRARY_PATH=%s ../motrkvscli.sh" % os.environ["LD_LIBRARY_PATH"]
else:
self.cmd = "sudo ../motrkvscli.sh"
self.common_args = " --motr_local_addr=" + motr_conf.LOCAL_EP + " --motr_ha_addr=" + motr_conf.HA_EP + " --motr_profile=" + motr_conf.PROFILE_FID + " --motr_proc=" + motr_conf.PROCESS_FID + " --kvstore=" + motr_conf.KVS_IDX + " "
super(S3kvTest, self).__init__(description)
def root_bucket_account_index_records(self):
root_oid = self.root_bucket_account_index()
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + root_oid.oid_hi + " --index_lo=" + root_oid.oid_lo + " --op_count=3 --action=next"
self.with_cli(kvs_cmd)
return self
def root_bucket_metadata_index_records(self):
root_oid = self.root_bucket_account_index()
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + root_oid.oid_hi + " --index_lo=" + root_oid.oid_lo + " --op_count=3 --action=next"
self.with_cli(kvs_cmd)
return self
def root_probable_dead_object_list_index_records(self):
root_oid = self.root_probable_dead_object_list_index()
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + root_oid.oid_hi + " --index_lo=" + root_oid.oid_lo + " --op_count=6 --action=next"
self.with_cli(kvs_cmd)
return self
# Root index table OID used by S3server is a constant derived by
# adding 1 to M0_MOTR_ID_APP define and packing it using M0_FID_TINIT
def root_bucket_account_index(self):
root_oid = S3OID("0x7800000000000000", "0x100001")
return root_oid
# Root bucket metadata index table OID used by S3server is a constant derived by
# adding 2 to M0_MOTR_ID_APP define and packing it using M0_FID_TINIT
def root_bucket_metadata_index(self):
root_oid = S3OID("0x7800000000000000", "0x100002")
return root_oid
# Root probable dead object list index table OID used by S3server is a constant derived by
# adding 3 to M0_MOTR_ID_APP define and packing it using M0_FID_TINIT
def root_probable_dead_object_list_index(self):
root_oid = S3OID("0x7800000000000000", "0x100003")
return root_oid
def next_keyval(self, oid, key, count):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --action=next" + " --key=" + key +" --op_count=" + str(count)
self.with_cli(kvs_cmd)
return self
def delete_keyval(self, oid, key):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --key=" + key + " --action=del"
self.with_cli(kvs_cmd)
return self
def get_keyval(self, oid, key):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --key=" + key + " --action=get"
self.with_cli(kvs_cmd)
return self
def put_keyval(self, oid, key, val):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --key=" + key + " --value=" + val + " --action=put"
self.with_cli(kvs_cmd)
return self
def create_index(self, oid):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --action=createidx"
self.with_cli(kvs_cmd)
return self
def delete_index(self, oid):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --action=deleteidx"
self.with_cli(kvs_cmd)
return self
def list_index(self, oid):
kvs_cmd = self.cmd + self.common_args + " --index_hi=" + oid.oid_hi + " --index_lo=" + oid.oid_lo + " --action=next"
self.with_cli(kvs_cmd)
return self
def create_root_bucket_account_index(self):
root_oid = self.root_bucket_account_index()
return self.create_index(root_oid)
def delete_root_bucket_account_index(self):
root_oid = self.root_bucket_account_index()
return self.delete_index(root_oid)
def create_root_bucket_metadata_index(self):
root_oid = self.root_bucket_metadata_index()
return self.create_index(root_oid)
def delete_root_bucket_metadata_index(self):
root_oid = self.root_bucket_metadata_index()
return self.delete_index(root_oid)
def create_root_probable_dead_object_list_index(self):
root_oid = self.root_probable_dead_object_list_index()
return self.create_index(root_oid)
def METHOD_NAME(self):
root_oid = self.root_probable_dead_object_list_index()
return self.delete_index(root_oid)
def list_root_probable_dead_object_list_index(self):
root_oid = self.root_probable_dead_object_list_index()
return self.list_index(root_oid)
|
1,382 |
get dedicated ip pool output
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetDedicatedIpPoolResult',
'AwaitableGetDedicatedIpPoolResult',
'get_dedicated_ip_pool',
'get_dedicated_ip_pool_output',
]
@pulumi.output_type
class GetDedicatedIpPoolResult:
"""
A collection of values returned by getDedicatedIpPool.
"""
def __init__(__self__, arn=None, dedicated_ips=None, id=None, pool_name=None, scaling_mode=None, tags=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if dedicated_ips and not isinstance(dedicated_ips, list):
raise TypeError("Expected argument 'dedicated_ips' to be a list")
pulumi.set(__self__, "dedicated_ips", dedicated_ips)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if pool_name and not isinstance(pool_name, str):
raise TypeError("Expected argument 'pool_name' to be a str")
pulumi.set(__self__, "pool_name", pool_name)
if scaling_mode and not isinstance(scaling_mode, str):
raise TypeError("Expected argument 'scaling_mode' to be a str")
pulumi.set(__self__, "scaling_mode", scaling_mode)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def arn(self) -> str:
"""
ARN of the Dedicated IP Pool.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="dedicatedIps")
def dedicated_ips(self) -> Sequence['outputs.GetDedicatedIpPoolDedicatedIpResult']:
"""
A list of objects describing the pool's dedicated IP's. See `dedicated_ips`.
"""
return pulumi.get(self, "dedicated_ips")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="poolName")
def pool_name(self) -> str:
return pulumi.get(self, "pool_name")
@property
@pulumi.getter(name="scalingMode")
def scaling_mode(self) -> str:
"""
(Optional) IP pool scaling mode. Valid values: `STANDARD`, `MANAGED`.
"""
return pulumi.get(self, "scaling_mode")
@property
@pulumi.getter
def tags(self) -> Mapping[str, str]:
"""
A map of tags attached to the pool.
"""
return pulumi.get(self, "tags")
class AwaitableGetDedicatedIpPoolResult(GetDedicatedIpPoolResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDedicatedIpPoolResult(
arn=self.arn,
dedicated_ips=self.dedicated_ips,
id=self.id,
pool_name=self.pool_name,
scaling_mode=self.scaling_mode,
tags=self.tags)
def get_dedicated_ip_pool(pool_name: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDedicatedIpPoolResult:
"""
Data source for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool.
## Example Usage
### Basic Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.sesv2.get_dedicated_ip_pool(pool_name="my-pool")
```
:param str pool_name: Name of the dedicated IP pool.
:param Mapping[str, str] tags: A map of tags attached to the pool.
"""
__args__ = dict()
__args__['poolName'] = pool_name
__args__['tags'] = tags
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:sesv2/getDedicatedIpPool:getDedicatedIpPool', __args__, opts=opts, typ=GetDedicatedIpPoolResult).value
return AwaitableGetDedicatedIpPoolResult(
arn=pulumi.get(__ret__, 'arn'),
dedicated_ips=pulumi.get(__ret__, 'dedicated_ips'),
id=pulumi.get(__ret__, 'id'),
pool_name=pulumi.get(__ret__, 'pool_name'),
scaling_mode=pulumi.get(__ret__, 'scaling_mode'),
tags=pulumi.get(__ret__, 'tags'))
@_utilities.lift_output_func(get_dedicated_ip_pool)
def METHOD_NAME(pool_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Optional[Mapping[str, str]]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDedicatedIpPoolResult]:
"""
Data source for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool.
## Example Usage
### Basic Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.sesv2.get_dedicated_ip_pool(pool_name="my-pool")
```
:param str pool_name: Name of the dedicated IP pool.
:param Mapping[str, str] tags: A map of tags attached to the pool.
"""
...
|
1,383 |
get data
|
"""
Stores application data.
"""
# standard libraries
import copy
import json
import pathlib
import threading
import typing
# third party libraries
from nion.swift.model import Utility
from nion.utils import Event
from nion.utils import StructuredModel
class ApplicationData:
"""Application data is a singleton that stores application data."""
def __init__(self, file_path: typing.Optional[pathlib.Path] = None) -> None:
self.__lock = threading.RLock()
self.__file_path = file_path
self.__data_dict: typing.Optional[typing.Dict[str, typing.Any]] = None
self.data_changed_event = Event.Event()
@property
def file_path(self) -> typing.Optional[pathlib.Path]:
return self.__file_path
@file_path.setter
def file_path(self, value: pathlib.Path) -> None:
self.__file_path = value
def get_data_dict(self) -> typing.Dict[str, typing.Any]:
with self.__lock:
data_changed = self.__read_data_dict()
result = copy.deepcopy(self.__data_dict) if self.__data_dict else dict()
if data_changed:
self.data_changed_event.fire()
return result
def set_data_dict(self, d: typing.Mapping[str, typing.Any]) -> None:
with self.__lock:
self.__data_dict = dict(d)
self.__write_data_dict()
self.data_changed_event.fire()
def __read_data_dict(self) -> bool:
if self.__data_dict is None and self.__file_path and self.__file_path.exists():
with open(self.__file_path) as f:
self.__data_dict = json.load(f)
return True
return False
def __write_data_dict(self) -> None:
if self.__file_path:
with Utility.AtomicFileWriter(self.__file_path) as fp:
json.dump(self.__data_dict, fp, skipkeys=True, indent=4)
__application_data = ApplicationData()
def set_file_path(file_path: pathlib.Path) -> None:
__application_data.file_path = file_path
# requires special bootstrapping for session metadata
__session_metadata.update()
def METHOD_NAME() -> typing.Dict[str, typing.Any]:
return __application_data.get_data_dict()
def set_data(d: typing.Mapping[str, typing.Any]) -> None:
__application_data.set_data_dict(d)
#
class SessionMetadata:
"""Session data is a singleton that stores application data via the ApplicationData singleton."""
def __init__(self) -> None:
site_field = StructuredModel.define_field("site", StructuredModel.STRING)
instrument_field = StructuredModel.define_field("instrument", StructuredModel.STRING)
task_field = StructuredModel.define_field("task", StructuredModel.STRING)
microscopist_field = StructuredModel.define_field("microscopist", StructuredModel.STRING)
sample_field = StructuredModel.define_field("sample", StructuredModel.STRING)
sample_area_field = StructuredModel.define_field("sample_area", StructuredModel.STRING)
fields = [site_field, instrument_field, task_field, microscopist_field, sample_field, sample_area_field]
self.__field_descriptions = typing.cast(typing.List[typing.Mapping[str, typing.Any]], fields)
schema = StructuredModel.define_record("SessionMetadata", fields)
self.__model = typing.cast(StructuredModel.RecordModel, StructuredModel.build_model(schema, value=METHOD_NAME().get("session_metadata", dict())))
def model_changed() -> None:
data = METHOD_NAME()
data["session_metadata"] = self.__model.to_dict_value()
set_data(data)
self.__model_changed_listener = self.__model.model_changed_event.listen(model_changed)
@property
def model(self) -> StructuredModel.RecordModel:
return self.__model
def update(self) -> None:
# handle special bootstrapping for session metadata. this can be solved in the long
# run by using a data structure which properly notifies listeners of changes when reloaded.
field_names = [fd["name"] for fd in self.__field_descriptions]
fields = METHOD_NAME().get("session_metadata", dict())
for field in fields:
if field in field_names:
setattr(self.__model, field, fields[field])
__session_metadata = SessionMetadata()
def get_session_metadata_model() -> StructuredModel.RecordModel:
return __session_metadata.model
def get_session_metadata_dict() -> typing.Dict[str, typing.Any]:
return dict(typing.cast(typing.Mapping[str, typing.Any], __session_metadata.model.to_dict_value()))
|
1,384 |
isint
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2022.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Primitive abstract base class."""
from __future__ import annotations
from abc import ABC
from collections.abc import Sequence
import numpy as np
from qiskit.circuit import QuantumCircuit
from qiskit.providers import Options
class BasePrimitive(ABC):
"""Primitive abstract base class."""
def __init__(self, options: dict | None = None):
self._run_options = Options()
if options is not None:
self._run_options.update_options(**options)
@property
def options(self) -> Options:
"""Return options values for the estimator.
Returns:
options
"""
return self._run_options
def set_options(self, **fields):
"""Set options values for the estimator.
Args:
**fields: The fields to update the options
"""
self._run_options.update_options(**fields)
@staticmethod
def _validate_circuits(
circuits: Sequence[QuantumCircuit] | QuantumCircuit,
) -> tuple[QuantumCircuit, ...]:
if isinstance(circuits, QuantumCircuit):
circuits = (circuits,)
elif not isinstance(circuits, Sequence) or not all(
isinstance(cir, QuantumCircuit) for cir in circuits
):
raise TypeError("Invalid circuits, expected Sequence[QuantumCircuit].")
elif not isinstance(circuits, tuple):
circuits = tuple(circuits)
if len(circuits) == 0:
raise ValueError("No circuits were provided.")
return circuits
@staticmethod
def _validate_parameter_values(
parameter_values: Sequence[Sequence[float]] | Sequence[float] | float | None,
default: Sequence[Sequence[float]] | Sequence[float] | None = None,
) -> tuple[tuple[float, ...], ...]:
# Allow optional (if default)
if parameter_values is None:
if default is None:
raise ValueError("No default `parameter_values`, optional input disallowed.")
parameter_values = default
# Support numpy ndarray
if isinstance(parameter_values, np.ndarray):
parameter_values = parameter_values.tolist()
elif isinstance(parameter_values, Sequence):
parameter_values = tuple(
vector.tolist() if isinstance(vector, np.ndarray) else vector
for vector in parameter_values
)
# Allow single value
if _isreal(parameter_values):
parameter_values = ((parameter_values,),)
elif isinstance(parameter_values, Sequence) and not any(
isinstance(vector, Sequence) for vector in parameter_values
):
parameter_values = (parameter_values,)
# Validation
if (
not isinstance(parameter_values, Sequence)
or not all(isinstance(vector, Sequence) for vector in parameter_values)
or not all(all(_isreal(value) for value in vector) for vector in parameter_values)
):
raise TypeError("Invalid parameter values, expected Sequence[Sequence[float]].")
return tuple(tuple(float(value) for value in vector) for vector in parameter_values)
@staticmethod
def _cross_validate_circuits_parameter_values(
circuits: tuple[QuantumCircuit, ...], parameter_values: tuple[tuple[float, ...], ...]
) -> None:
if len(circuits) != len(parameter_values):
raise ValueError(
f"The number of circuits ({len(circuits)}) does not match "
f"the number of parameter value sets ({len(parameter_values)})."
)
for i, (circuit, vector) in enumerate(zip(circuits, parameter_values)):
if len(vector) != circuit.num_parameters:
raise ValueError(
f"The number of values ({len(vector)}) does not match "
f"the number of parameters ({circuit.num_parameters}) for the {i}-th circuit."
)
def METHOD_NAME(obj: Sequence[Sequence[float]] | Sequence[float] | float) -> bool:
"""Check if object is int."""
int_types = (int, np.integer)
return isinstance(obj, int_types) and not isinstance(obj, bool)
def _isreal(obj: Sequence[Sequence[float]] | Sequence[float] | float) -> bool:
"""Check if object is a real number: int or float except ``±Inf`` and ``NaN``."""
float_types = (float, np.floating)
return METHOD_NAME(obj) or isinstance(obj, float_types) and float("-Inf") < obj < float("Inf")
|
1,385 |
is normalized
|
# The Notices and Disclaimers for Ocean Worlds Autonomy Testbed for Exploration
# Research and Simulation can be found in README.md in the root directory of
# this repository.
from math import sqrt, isclose, acos
from geometry_msgs.msg import Quaternion, Vector3
def _types_match(a, b):
return type(a) == type(b)
def add(a, b):
"""Adds two vectors
a -- geometry_msgs Vector3 or Point
b -- geometry_msgs Vector3 or Point
returns a Vector3 that represents a + b
"""
return Vector3(a.x + b.x, a.y + b.y, a.z + b.z)
def subtract(a, b):
"""Subtracts two vectors
a -- geometry_msgs Vector3 or Point
b -- geometry_msgs Vector3 or Point
returns a Vector3 that represents a - b
"""
return Vector3(a.x - b.x, a.y - b.y, a.z - b.z)
def scalar_multiply(a, b):
"""Computes the multiplication of the scalar a to the vector b
a -- float
b -- geometry_msgs Vector3 or Point
returns a vector that is the result of ab
"""
return type(b)(a * b.x, a * b.y, a * b.z)
def dot(a, b):
"""Computes dot product between vectors/Quaternions a and b (a * b)
a -- geometry_msgs Vector3, Point, or Quaternions
b -- geometry_msgs Vector3, Point, or Quaternions
returns a float that is the result of a * b
"""
assert(_types_match(a, b))
dot = a.x * b.x + a.y * b.y + a.z * b.z
if hasattr(a, 'w'):
dot += a.w * b.w
return dot
def cross(a, b):
"""Computes the cross product between vectors a and b (a x b)
a -- geometry_msgs Vector3 or Point
b -- geometry_msgs Vector3 or Point
returns a vector that is the result of a x b
"""
assert(_types_match(a, b))
return type(a)(a.y*b.z - a.z*b.y, a.z*b.x - a.x*b.z, a.x*b.y - a.y*b.x)
def norm_squared(v):
"""Computes the squared norm (or length) of a vector or quaternion
v -- geometry_msgs Vector3, Point, or Quaternion
returns the squared norm of v
"""
return dot(v, v)
def norm(v):
"""Computes the norm (or length) of a vector or quaternion
v -- geometry_msgs Vector3, Point, or Quaternion
returns the norm of v
"""
return sqrt(norm_squared(v))
def normalize(v):
"""Normalizes a vector or quaternion
v -- geometry_msgs Vector3, Point, or Quaternion
returns the normalized version of v
"""
n = norm(v)
assert(n != 0)
if hasattr(v, 'w'):
return type(v)(v.x / n, v.y / n, v.z / n, v.w / n)
else:
return type(v)(v.x / n, v.y / n, v.z / n)
def METHOD_NAME(v):
"""Normalization check
v -- geometry_msgs Vector3 or Point
returns true if v is normalized
"""
return norm_squared(v) == 1.0
def distance(a, b):
"""Compute the distance between vectors a and b, same as norm(b - a)
a -- geometry_msgs Vector3 or Point
b -- geometry_msgs Vector3 or Point
returns a float that represents the distance between two vectors
"""
return norm(subtract(b, a))
def orthogonal(v):
"""Returns an orthogonal vector to v
v -- geometry_msgs Vector3 or Point
returns the orthogonal vector of v
"""
normalized = normalize(v)
t = type(v)
x = abs(normalized.x)
y = abs(normalized.y)
z = abs(normalized.z)
basis = None
if x < y:
basis = t(1, 0, 0) if x < z else t(0, 0, 1)
else:
basis = t(0, 1, 0) if y < z else t(0, 0, 1)
return cross(normalized, basis)
def quaternion_rotation_between(a, b):
"""Computes the quaternion rotation between the vectors a and b.
a -- geometry_msgs Vector3 or Point
b -- geometry_msgs Vector3 or Point
returns a quaternion that represents a rotation from a to b
"""
a = normalize(a)
b = normalize(b)
k = dot(a, b)
ab_norm = sqrt(norm_squared(a) * norm_squared(b))
if isclose(k / ab_norm, -1): # special case of a = -b
o = normalize(orthogonal(a))
return Quaternion(o.x, o.y, o.z, 0)
w = k + ab_norm
v = cross(a, b)
return normalize(Quaternion(v.x, v.y, v.z, w))
def poses_approx_equivalent(pose1, pose2, linear_tolerance, angular_tolerance):
"""Checks if two poses are nearly the same position and orientation.
pose1 -- geometry_msgs Pose
pose2 -- geometry_msgs Pose to be checked against pose1
linear_tolerance -- The maximal distance positions can differ by (meters)
angular_tolerance -- The maximal spherical distance orientations can differ by
(radians)
returns True if the difference between pose1 and pose2's position and
orientations are below their respective tolerances. False otherwise.
"""
p1 = pose1.position
p2 = pose2.position
if distance(p1, p2) <= linear_tolerance:
o1 = pose1.orientation
o2 = pose2.orientation
# check that the geodesic norm between the 2 quaternions is below tolerance
dp = dot(o1, o2)
if acos(2*dp*dp-1) <= angular_tolerance:
return True
return False
|
1,386 |
get current
|
# This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
The SVG preview panel widget.
"""
import os
import sys
from PyQt5 import QtCore
from PyQt5.QtWidgets import (QComboBox, QHBoxLayout, QLabel, QPushButton,
QSpinBox, QToolButton, QVBoxLayout, QWidget)
import app
import qutil
import resultfiles
from . import view
from . import svgfiles
class SvgView(QWidget):
def __init__(self, dockwidget):
super().__init__(dockwidget)
self._document = None
self._setting_zoom = False
self.view = view.View(self)
self.pageLabel = QLabel()
self.pageCombo = QComboBox(sizeAdjustPolicy=QComboBox.AdjustToContents)
layout = QVBoxLayout(spacing=0)
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
hbox = QHBoxLayout(spacing=0)
hbox.addWidget(self.pageLabel)
hbox.addWidget(self.pageCombo)
self.zoomInButton = QToolButton(autoRaise=True)
self.zoomOutButton = QToolButton(autoRaise=True)
self.zoomOriginalButton = QToolButton(autoRaise=True)
self.zoomNumber = QSpinBox(minimum=10, maximum=1000, suffix='%')
ac = dockwidget.actionCollection
self.zoomInButton.setDefaultAction(ac.svg_zoom_in)
self.zoomOutButton.setDefaultAction(ac.svg_zoom_out)
self.zoomOriginalButton.setDefaultAction(ac.svg_zoom_original)
hbox.addWidget(self.zoomInButton)
hbox.addWidget(self.zoomNumber)
hbox.addWidget(self.zoomOutButton)
hbox.addWidget(self.zoomOriginalButton)
self.resetButton = QPushButton("reload", self)
self.resetButton.clicked.connect(self.reLoadDoc)
hbox.addWidget(self.resetButton)
self.saveButton = QPushButton("save edits", self)
self.saveButton.clicked.connect(self.callSave)
hbox.addWidget(self.saveButton)
hbox.addStretch(1)
layout.addLayout(hbox)
layout.addWidget(self.view)
app.jobFinished.connect(self.initSvg)
app.documentClosed.connect(self.slotDocumentClosed)
app.documentLoaded.connect(self.initSvg)
self.pageCombo.currentIndexChanged.connect(self.changePage)
self.zoomNumber.valueChanged.connect(self.slotZoomNumberChanged)
self.view.zoomFactorChanged.connect(self.slotViewZoomChanged)
dockwidget.mainwindow().currentDocumentChanged.connect(self.initSvg)
self.zoomNumber.setValue(100)
doc = dockwidget.mainwindow().currentDocument()
if doc:
self.initSvg(doc)
app.translateUI(self)
def translateUI(self):
self.pageLabel.setText(_("Page:"))
def mainwindow(self):
return self.parent().mainwindow()
def initSvg(self, doc):
"""Opens first page of score after compilation"""
if doc == self.mainwindow().currentDocument():
files = svgfiles.SvgFiles.instance(doc)
model = files.model() # forces update
if files:
self._document = doc
with qutil.signalsBlocked(self.pageCombo):
self.pageCombo.setModel(model)
self.pageCombo.setCurrentIndex(files.current)
self.view.load(files.url(files.current))
def reLoadDoc(self):
"""Reloads current document."""
if self._document:
self.initSvg(self._document)
def callSave(self):
"""Call save function"""
self.view.evalSave()
def METHOD_NAME(self):
files = svgfiles.SvgFiles.instance(self._document)
return files.filename(files.current)
def slotZoomNumberChanged(self, value):
self._setting_zoom = True
self.view.setZoomFactor(value / 100.0)
self._setting_zoom = False
def slotViewZoomChanged(self):
if not self._setting_zoom:
self.zoomNumber.setValue(int(self.view.zoomFactor() * 100))
def changePage(self, page_index):
"""change page of score"""
doc = self._document
if doc:
files = svgfiles.SvgFiles.instance(doc)
if files:
files.current = page_index
svg = files.url(page_index)
self.view.load(svg)
def slotDocumentClosed(self, doc):
if doc == self._document:
self._document = None
if self.pageCombo.model():
self.pageCombo.model().deleteLater()
self.pageCombo.clear()
self.pageCombo.update() # otherwise it doesn't redraw
self.view.clear()
|
1,387 |
validate sample list1
|
# Copyright(C) 1999-2020 National Technology & Engineering Solutions
# of Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525 with
# NTESS, the U.S. Government retains certain rights in this software.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of NTESS nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from phactori import *
from .PhactoriOperationBlock import *
from .PhactoriVectorLibrary import *
from paraview.simple import *
from .PhactoriSegment import *
from .PhactoriMpiUtilities import *
from .PhactoriParaviewMultiBlockRecursion import *
from .PhactoriSampledCellInfo import *
import vtk
import json
#phactori_combine_to_single_python_file_subpiece_begin_1
class PhactoriStructuredGridSampler(PhactoriOperationSpecifics):
"""given a multiblock dataset which has leaves that ar structured grids,
take specified i/j/k sample ranges from specified blocks. Optionally
make mask variable for sampled cells with programmable filter"""
def __init__(self):
self.myCopyOfInputFilter = None
self.SampleControlSet = None
def METHOD_NAME(self, sampleList):
if len(sampleList) < 1:
myDebugPrint3AndException(
"PhactoriStructuredGridSampler:ValidateSampleList1\n" \
"list of samples must have at least one entry\n")
for oneSample in sampleList:
sampleBlockName = oneSample[0]
sampleBlockType = oneSample[1]
validType = False
if sampleBlockType == "imax":
validType = True
if sampleBlockType == "imin":
validType = True
if sampleBlockType == "jmax":
validType = True
if sampleBlockType == "jmin":
validType = True
if sampleBlockType == "kmax":
validType = True
if sampleBlockType == "kmin":
validType = True
if validType != True:
myDebugPrint3AndException(
"PhactoriStructuredGridSampler:ValidateSampleList1\n" \
"invalid sample type: " + str(sampleBlockType) + "\n" \
"must be 'imin' 'imax' 'jmin' 'jmax' 'kmin' or 'kmax'\n")
def ParseParametersFromJson(self, inJson):
keyval1 = "samples to be taken from all blocks"
if keyval1 in inJson:
sampleControlList = inJson[keyval1]
self.METHOD_NAME(sampleControlList)
self.SampleControlSet = {}
for oneSampleControl in sampleControlList:
self.SampleControlSet[oneSampleControl[0]] = oneSampleControl[1]
@staticmethod
def GatherStructuredSampledCellsInBlock(recursionObject, inInputCsData, inParameters):
if PhactoriDbg(100):
myDebugPrint3("GatherStructuredSampledCellsInBlock entered\n")
numCells = inInputCsData.GetNumberOfCells()
numPoints = inInputCsData.GetNumberOfPoints()
inParameters.leafVisitCount += 1
if (numCells == 0) or (numPoints == 0):
#no cells here
return
if PhactoriDbg(100):
myDebugPrint3("GatherStructuredSampledCellsInBlock returning\n")
def GatherStructuredSampledCellsOnThisProcess(self):
if PhactoriDbg(100):
myDebugPrint3("GatherStructuredCellsFromSeedCells entered\n")
recursionParams = GatherStructuredSampledCellsOnThisProcessRecursionParams()
recursionParams.SetUpForRecursion(self)
recursionObj = PhactoriParaviewMultiBlockRecursionControl()
recursionObj.mParameters = recursionParams
recursionObj.mOperationToDoPerBlock = self.GatherStructuredSampledCellsInBlock
PhactoriRecusivelyDoMethodPerBlockFromParaViewFilter(recursionObj, self.myCopyOfInputFilter)
if PhactoriDbg(100):
myDebugPrint3("GatherStructuredCellsFromSeedCells returning\n")
return recursionParams.CellsPerSeedCell
cellData = inInputCsData.GetCellData()
outputCellArray = None
#if cellData != None:
# outputCellArray = cellData.GetArray(inParameters.dataArrayName)
if outputCellArray != None:
dataArrayNumCmpnts = outputCellArray.GetNumberOfComponents()
defaultTuple = []
for ii in range(0, dataArrayNumCmpnts):
defaultTuple.append(-1.0)
else:
dataArrayNumCmpnts = -1
defaultTuple = []
def CreateParaViewFilter(self, inInputFilter):
if PhactoriDbg(100):
myDebugPrint3("PhactoriStructuredGridSampler.CreateParaViewFilter entered\n", 100)
savedActiveSource = GetActiveSource()
##segmentListJson = ReadAndMpiBroadcastJsonFile(self.JsonListFileName)
self.myCopyOfInputFilter = inInputFilter
UpdatePipelineWithCurrentTimeArgument(self.myCopyOfInputFilter)
#need to figure global extents
self.GatherStructuredSampledCellsOnThisProcess()
if PhactoriDbg(100):
myDebugPrint3("PhactoriStructuredGridSampler.CreateParaViewFilter returning\n", 100)
#phactori_combine_to_single_python_file_subpiece_end_1
|
1,388 |
set basic data json
|
"""This module contains function to save building element classes."""
import teaser.logic.utilities as utilities
import warnings
import collections
import json
def save_type_element(element, data_class):
"""Save information about building element to json.
Saves typical building elements according to their construction
year and their construction type in the json file for type building
elements. If the Project parent is set, it automatically saves it to
the file given in Project.data. Alternatively you can specify a path to
a file of TypeBuildingElements. If this file does not exist,
a new file is created.
Parameters
----------
element : BuildingElement()
Instance of BuildingElement or inherited Element of TEASER
data_class : DataClass()
DataClass containing the bindings for TypeBuildingElement and
Material (typically this is the data class stored in prj.data,
but the user can individually change that.
"""
data_class.element_bind["version"] = "0.7"
add_to_json = True
warning_text = (
"Construction Type and building age "
"group already exist in this json, consider revising "
"your inputs. The Element is NOT saved into json"
)
check_str = "{}_{}_{}".format(
type(element).__name__, element.building_age_group, element.construction_type
)
if check_str in data_class.element_bind.keys():
warnings.warn(warning_text)
add_to_json = False
return
if add_to_json is True:
data_class.element_bind[check_str] = collections.OrderedDict()
METHOD_NAME(
element=element, wall_out=data_class.element_bind[check_str]
)
_set_layer_data_json(
element=element, wall_out=data_class.element_bind[check_str]
)
with open(utilities.get_full_path(data_class.path_tb), "w") as file:
file.write(
json.dumps(data_class.element_bind, indent=4, separators=(",", ": "))
)
def delete_type_element(element, data_class):
"""Delete typical element in json.
Deletes typical building elements according to their construction
year and their construction type in the the json file for type building
elements. If the Project parent is set, it automatically saves it to
the file given in Project.data. Alternatively you can specify a path to
a file of TypeBuildingElements. If this file does not exist,
a new file is created.
Parameters
----------
element : BuildingElement()
Instance of BuildingElement or inherited Element of TEASER
data_class : DataClass()
DataClass containing the bindings for TypeBuildingElement and
Material (typically this is the data class stored in prj.data,
but the user can individually change that.
"""
check_str = "{}_{}_{}".format(
type(element).__name__, element.building_age_group, element.construction_type
)
del data_class.element_bind[check_str]
with open(utilities.get_full_path(data_class.path_tb), "w") as file:
file.write(
json.dumps(data_class.element_bind, indent=4, separators=(",", ": "))
)
def METHOD_NAME(element, wall_out):
"""Set basic data of building element.
Helper function.
Parameters
----------
element : BuildingElement()
Instance of BuildingElement or inherited Element of TEASER
wall_out: dictionary
Dictionary with information about walls.
"""
wall_out["building_age_group"] = element.building_age_group
wall_out["construction_type"] = element.construction_type
wall_out["inner_radiation"] = element.inner_radiation
wall_out["inner_convection"] = element.inner_convection
if type(element).__name__ == "Window":
wall_out["outer_radiation"] = element.outer_radiation
wall_out["outer_convection"] = element.outer_convection
wall_out["g_value"] = element.g_value
wall_out["a_conv"] = element.a_conv
wall_out["shading_g_total"] = element.shading_g_total
wall_out["shading_max_irr"] = element.shading_max_irr
elif (
type(element).__name__ == "OuterWall"
or type(element).__name__ == "Rooftop"
or type(element).__name__ == "Door"
):
wall_out["outer_radiation"] = element.outer_radiation
wall_out["outer_convection"] = element.outer_convection
def _set_layer_data_json(element, wall_out):
"""Set layer data of building element.
Helper function.
Parameters
----------
element : BuildingElement()
Instance of BuildingElement or inherited Element of TEASER
wall_out: dictionary
Dictionary with information about walls.
"""
layer_dict = collections.OrderedDict()
for layer in element.layer:
layer_dict[layer.id] = collections.OrderedDict()
layer_dict[layer.id]["thickness"] = layer.thickness
layer_dict[layer.id]["material"] = collections.OrderedDict()
layer_dict[layer.id]["material"]["name"] = layer.material.name
layer_dict[layer.id]["material"]["material_id"] = layer.material.material_id
wall_out["layer"] = layer_dict
|
1,389 |
run test
|
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Copyright (c) DeFi Blockchain Developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
"""Test rejection of invalid custom TXs."""
from test_framework.test_framework import DefiTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.util import assert_equal
class RejectCustomTx(DefiTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [
[
"-txnotokens=0",
"-amkheight=1",
"-bayfrontheight=1",
"-bayfrontgardensheight=1",
"-dakotaheight=1",
"-fortcanningheight=120",
]
]
def METHOD_NAME(self):
self.nodes[0].generate(101)
# Get and fund collateral address
collateral = self.nodes[0].getnewaddress("", "legacy")
self.nodes[0].sendtoaddress(collateral, 1)
self.nodes[0].generate(2)
block_count = self.nodes[0].getblockcount()
# Record the number of MNs
num_mns = len(self.nodes[0].listmasternodes())
# Make create MN TX, get details and wipe from mempool
txid = self.nodes[0].createmasternode(collateral)
rawtx = self.nodes[0].getrawtransaction(txid, 1)
rawtx_hex = self.nodes[0].getrawtransaction(txid)
self.nodes[0].clearmempool()
# Get push data and use to double push data in op_return
pushdata = rawtx["vout"][0]["scriptPubKey"]["hex"][2:]
result = rawtx_hex.find(pushdata)
rawtx_hex_multiop = (
rawtx_hex[: result - 4]
+ hex(len(pushdata) + 1)[2:]
+ "6a"
+ pushdata
+ rawtx_hex[result:]
)
signed_multiop = self.nodes[0].signrawtransactionwithwallet(rawtx_hex_multiop)
# Send TX
self.nodes[0].sendrawtransaction(signed_multiop["hex"])
self.nodes[0].generate(1)
# Check that MNs have increased by 1
assert_equal(num_mns + 1, len(self.nodes[0].listmasternodes()))
# Rollback and wipe TXs.
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(block_count))
self.nodes[0].clearmempool()
# Test sending DfTx in a vout other than 0
rawtx_opreturn_vout1 = self.nodes[0].createrawtransaction(
[{"txid": rawtx["vin"][0]["txid"], "vout": rawtx["vin"][0]["vout"]}],
[{collateral: 0.99999000}, {"data": pushdata[2:]}],
)
signed_opreturn_vout1 = self.nodes[0].signrawtransactionwithwallet(
rawtx_opreturn_vout1
)
self.nodes[0].sendrawtransaction(signed_opreturn_vout1["hex"])
# Wipe TX
self.nodes[0].clearmempool()
# Test sending DfTx with None type
rawtx_none = self.nodes[0].createrawtransaction(
[{"txid": rawtx["vin"][0]["txid"], "vout": rawtx["vin"][0]["vout"]}],
[{collateral: 0.99999000}, {"data": "4466547800"}],
)
signed_none = self.nodes[0].signrawtransactionwithwallet(rawtx_none)
self.nodes[0].sendrawtransaction(signed_none["hex"])
# Wipe TX
self.nodes[0].clearmempool()
# Test sending DfTx with unknown type z
rawtx_unknown = self.nodes[0].createrawtransaction(
[{"txid": rawtx["vin"][0]["txid"], "vout": rawtx["vin"][0]["vout"]}],
[{collateral: 0.99999000}, {"data": "4466547879"}],
)
signed_unknown = self.nodes[0].signrawtransactionwithwallet(rawtx_unknown)
self.nodes[0].sendrawtransaction(signed_unknown["hex"])
# Wipe TX
self.nodes[0].clearmempool()
# Move to FortCanning height
self.nodes[0].generate(20)
# Make sure MNs still at original count
assert_equal(num_mns, len(self.nodes[0].listmasternodes()))
# Try and send multi opcode TX after hard fork
try:
self.nodes[0].sendrawtransaction(signed_multiop["hex"])
assert False
except JSONRPCException as e:
errorString = e.error["message"]
assert "Invalid custom transaction" in errorString
# Try and send DfTx in a vout other than 0 after hard fork
try:
self.nodes[0].sendrawtransaction(signed_opreturn_vout1["hex"])
assert False
except JSONRPCException as e:
errorString = e.error["message"]
assert "Invalid custom transaction" in errorString
# Try and send DfTx with None type after hard fork
try:
self.nodes[0].sendrawtransaction(signed_none["hex"])
assert False
except JSONRPCException as e:
errorString = e.error["message"]
assert "Invalid custom transaction" in errorString
# Try and send DfTx with unknown type after hard fork
try:
self.nodes[0].sendrawtransaction(signed_unknown["hex"])
assert False
except JSONRPCException as e:
errorString = e.error["message"]
assert "Invalid custom transaction" in errorString
if __name__ == "__main__":
RejectCustomTx().main()
|
1,390 |
file exists error
|
"""
Compatibility module similar to six lib, which helps maintaining
a single code base working with both python 2.7 and 3.x.
"""
import errno
import os
import sys
import types
PY3 = sys.version_info[0] == 3
_SENTINEL = object()
if PY3:
def u(s):
return s
def b(s):
return s.encode("latin-1")
getcwdu = os.getcwd
unicode = str
xrange = range
long = int
else:
def u(s):
return unicode(s)
def b(s):
return s
getcwdu = os.getcwdu
unicode = unicode
xrange = xrange
long = long
# removed in 3.0, reintroduced in 3.2
try:
callable = callable
except Exception:
def callable(obj):
for klass in type(obj).__mro__:
if "__call__" in klass.__dict__:
return True
return False
# --- exceptions
if PY3:
FileNotFoundError = FileNotFoundError # NOQA
METHOD_NAME = METHOD_NAME # NOQA
else:
# https://github.com/PythonCharmers/python-future/blob/exceptions/
# src/future/types/exceptions/pep3151.py
import platform
def _instance_checking_exception(base_exception=Exception):
def wrapped(instance_checker):
class TemporaryClass(base_exception):
def __init__(self, *args, **kwargs):
if len(args) == 1 and isinstance(args[0], TemporaryClass):
unwrap_me = args[0]
for attr in dir(unwrap_me):
if not attr.startswith('__'):
setattr(self, attr, getattr(unwrap_me, attr))
else:
super(TemporaryClass, self).__init__(*args, **kwargs)
class __metaclass__(type):
def __instancecheck__(cls, inst):
return instance_checker(inst)
def __subclasscheck__(cls, classinfo):
value = sys.exc_info()[1]
return isinstance(value, cls)
TemporaryClass.__name__ = instance_checker.__name__
TemporaryClass.__doc__ = instance_checker.__doc__
return TemporaryClass
return wrapped
@_instance_checking_exception(EnvironmentError)
def FileNotFoundError(inst):
return getattr(inst, 'errno', _SENTINEL) == errno.ENOENT
@_instance_checking_exception(EnvironmentError)
def METHOD_NAME(inst):
return getattr(inst, 'errno', _SENTINEL) == errno.EEXIST
if platform.python_implementation() != "CPython":
try:
raise OSError(errno.EEXIST, "perm")
except METHOD_NAME:
pass
except OSError:
raise RuntimeError(
"broken or incompatible Python implementation, see: "
"https://github.com/giampaolo/psutil/issues/1659")
# Python 3 super().
# Taken from "future" package.
# Credit: Ryan Kelly
if PY3:
super = super
else:
_builtin_super = super
def super(type_=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1):
"""Like Python 3 builtin super(). If called without any arguments
it attempts to infer them at runtime.
"""
if type_ is _SENTINEL:
f = sys._getframe(framedepth)
try:
# Get the function's first positional argument.
type_or_obj = f.f_locals[f.f_code.co_varnames[0]]
except (IndexError, KeyError):
raise RuntimeError('super() used in a function with no args')
try:
# Get the MRO so we can crawl it.
mro = type_or_obj.__mro__
except (AttributeError, RuntimeError):
try:
mro = type_or_obj.__class__.__mro__
except AttributeError:
raise RuntimeError('super() used in a non-newstyle class')
for type_ in mro:
# Find the class that owns the currently-executing method.
for meth in type_.__dict__.values():
# Drill down through any wrappers to the underlying func.
# This handles e.g. classmethod() and staticmethod().
try:
while not isinstance(meth, types.FunctionType):
if isinstance(meth, property):
# Calling __get__ on the property will invoke
# user code which might throw exceptions or
# have side effects
meth = meth.fget
else:
try:
meth = meth.__func__
except AttributeError:
meth = meth.__get__(type_or_obj, type_)
except (AttributeError, TypeError):
continue
if meth.func_code is f.f_code:
break # found
else:
# Not found. Move onto the next class in MRO.
continue
break # found
else:
raise RuntimeError('super() called outside a method')
# Dispatch to builtin super().
if type_or_obj is not _SENTINEL:
return _builtin_super(type_, type_or_obj)
return _builtin_super(type_)
|
1,391 |
write chartsheet
|
###############################################################################
#
# Chartsheet - A class for writing the Excel XLSX Worksheet file.
#
# Copyright 2013-2017, John McNamara, [email protected]
#
from . import worksheet
from .drawing import Drawing
class Chartsheet(worksheet.Worksheet):
"""
A class for writing the Excel XLSX Chartsheet file.
"""
###########################################################################
#
# Public API.
#
###########################################################################
def __init__(self):
"""
Constructor.
"""
super(Chartsheet, self).__init__()
self.is_chartsheet = True
self.drawing = None
self.chart = None
self.charts = []
self.zoom_scale_normal = 0
self.orientation = 0
self.protection = False
def set_chart(self, chart):
"""
Set the chart object for the chartsheet.
Args:
chart: Chart object.
Returns:
chart: A reference to the chart object.
"""
chart.embedded = False
chart.protection = self.protection
self.chart = chart
self.charts.append([0, 0, chart, 0, 0, 1, 1])
return chart
def protect(self, password='', options=None):
"""
Set the password and protection options of the worksheet.
Args:
password: An optional password string.
options: A dictionary of worksheet objects to protect.
Returns:
Nothing.
"""
# Overridden from parent worksheet class.
if self.chart:
self.chart.protection = True
else:
self.protection = True
if not options:
options = {}
options = options.copy()
options['sheet'] = False
options['content'] = True
options['scenarios'] = True
# Call the parent method.
super(Chartsheet, self).protect(password, options)
###########################################################################
#
# Private API.
#
###########################################################################
def _assemble_xml_file(self):
# Assemble and write the XML file.
# Write the XML declaration.
self._xml_declaration()
# Write the root worksheet element.
self.METHOD_NAME()
# Write the worksheet properties.
self._write_sheet_pr()
# Write the sheet view properties.
self._write_sheet_views()
# Write the sheetProtection element.
self._write_sheet_protection()
# Write the printOptions element.
self._write_print_options()
# Write the worksheet page_margins.
self._write_page_margins()
# Write the worksheet page setup.
self._write_page_setup()
# Write the headerFooter element.
self._write_header_footer()
# Write the drawing element.
self._write_drawings()
# Close the worksheet tag.
self._xml_end_tag('chartsheet')
# Close the file.
self._xml_close()
def _prepare_chart(self, index, chart_id, drawing_id):
# Set up chart/drawings.
self.chart.id = chart_id - 1
self.drawing = Drawing()
self.drawing.orientation = self.orientation
self.external_drawing_links.append(['/drawing',
'../drawings/drawing'
+ str(drawing_id)
+ '.xml'])
self.drawing_links.append(['/chart',
'../charts/chart'
+ str(chart_id)
+ '.xml'])
###########################################################################
#
# XML methods.
#
###########################################################################
def METHOD_NAME(self):
# Write the <worksheet> element. This is the root element.
schema = 'http://schemas.openxmlformats.org/'
xmlns = schema + 'spreadsheetml/2006/main'
xmlns_r = schema + 'officeDocument/2006/relationships'
attributes = [
('xmlns', xmlns),
('xmlns:r', xmlns_r)]
self._xml_start_tag('chartsheet', attributes)
def _write_sheet_pr(self):
# Write the <sheetPr> element for Sheet level properties.
attributes = []
if self.filter_on:
attributes.append(('filterMode', 1))
if (self.fit_page or self.tab_color):
self._xml_start_tag('sheetPr', attributes)
self._write_tab_color()
self._write_page_set_up_pr()
self._xml_end_tag('sheetPr')
else:
self._xml_empty_tag('sheetPr', attributes)
|
1,392 |
async result
|
# Copyright (c) 2020 Charles University, Faculty of Arts,
# Institute of the Czech National Corpus
# Copyright (c) 2020 Martin Zimandl <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# dated June, 1991.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import asyncio
import importlib
import logging
import re
import sys
import time
from typing import Type, TypeVar, Union
import ujson as json
from action.errors import UserReadableException
from bgcalc.adapter.abstract import AbstractBgClient, AbstractResultWrapper
from bgcalc.errors import BgCalcError, CalcTaskNotFoundError
from redis import Redis
from rq import Queue
from rq.exceptions import NoSuchJobError
from rq.job import Job
from rq_scheduler import Scheduler
T = TypeVar('T')
class ResultWrapper(AbstractResultWrapper[T]):
status_map = dict(
queued='PENDING',
started='STARTED',
deferred='deferred', # TODO Rq specific
finished='SUCCESS',
failed='FAILURE'
)
def __init__(self, job):
self._job = job
self.result: Union[T, Exception] = None
def _infer_error(self, exc_info, job_id):
last = [x for x in re.split(r'\n', exc_info) if x.strip() != ''][-1]
srch = re.match(r'^([\w_\.]+):\s+(.+)$', last)
if srch is not None:
path = srch.group(1)
if '.' in path:
module, cls_name = srch.group(1).rsplit('.', 1)
try:
m = importlib.import_module(module)
cls = getattr(m, cls_name, None)
err = cls(srch.group(2)) if cls else Exception(srch.group(2))
except ModuleNotFoundError:
logging.getLogger(__name__).warning(
'Failed to infer calc backend job error {}'.format(path))
err = Exception(f'Task failed: {job_id}')
else:
cls = getattr(sys.modules['builtins'], path, None)
err = cls(srch.group(2)) if cls else Exception(srch.group(2))
return err
return Exception(f'Task failed: {job_id}')
def get(self, timeout=None):
try:
total_time = 0
while True:
time.sleep(0.5)
if self._job.is_finished:
self.result = self._job.result
break
elif self._job.is_failed:
self._job.refresh()
self.result = self._infer_error(self._job.exc_info, self._job.id)
break
elif timeout and total_time > timeout:
self.result = Exception(f'Task result timeout: {self._job}')
break
total_time += 0.5
except Exception as e:
self.result = e
return self.result
@property
def status(self):
if self._job and self._job.get_status():
return ResultWrapper.status_map[self._job.get_status()]
return 'FAILURE'
@property
def id(self):
return self._job.id
class RqConfig:
HOST = None
PORT = None
DB = None
SCHEDULER_CONF_PATH = None
class Control:
def __init__(self, redis_conn):
self._conn = redis_conn
def revoke(self, task_id, terminate=None, signal=None):
try:
job = Job.fetch(task_id, connection=self._conn)
job.cancel()
except NoSuchJobError as ex:
raise CalcTaskNotFoundError(str(ex))
class RqClient(AbstractBgClient):
def __init__(self, conf: RqConfig, prefix: str = ''):
self.redis_conn = Redis(host=conf.HOST, port=conf.PORT, db=conf.DB)
self.queue = Queue(connection=self.redis_conn)
self.prefix = prefix
self.scheduler = Scheduler(connection=self.redis_conn, queue=self.queue)
self.scheduler_conf_path = conf.SCHEDULER_CONF_PATH
self._control = Control(self.redis_conn)
def init_scheduler(self):
# remove old scheduled tasks
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
# create new tasks from config file
if self.scheduler_conf_path:
with open(self.scheduler_conf_path) as f:
for entry in json.load(f):
self.scheduler.cron(
entry['schedule'],
f'{self.prefix}.{entry["task"]}',
kwargs=entry['kwargs'] if 'kwargs' in entry else None,
use_local_timezone=True,
)
logging.getLogger(__name__).info(
f'Loaded configuration for Rq-scheduler from {self.scheduler_conf_path}')
else:
logging.getLogger(__name__).warning(
'No Rq-scheduler configuration path defined. '
'Regular system maintenance will be disabled which may lead to disks becoming full.')
@staticmethod
def _resolve_limit(softl, hardl):
if softl is not None and hardl is not None:
return min(softl, hardl)
elif softl is not None:
return softl
elif hardl is not None:
return hardl
return None
@property
def control(self):
return self._control
def send_task_sync(self, name, ans_type: Type[T], args=None, time_limit=None, soft_time_limit=None) -> ResultWrapper[T]:
tl = self._resolve_limit(time_limit, soft_time_limit)
try:
job = self.queue.enqueue(f'{self.prefix}.{name}', job_timeout=tl, args=args)
return ResultWrapper(job)
except Exception as ex:
logging.getLogger(__name__).error(ex)
async def send_task(self, name, ans_type: Type[T], args=None, time_limit=None, soft_time_limit=None) -> ResultWrapper[T]:
"""
Send a task to the worker.
Please note that Rq does not know hard vs. soft time limit. In case both
values are filled in (time_limit, soft_time_limit), the smaller one is
selected. Otherwise, the non-None is applied.
"""
return await asyncio.get_event_loop().run_in_executor(
None, self.send_task_sync, name, ans_type, args, time_limit, soft_time_limit)
def get_task_error(self, task_id):
try:
job = Job.fetch(task_id, connection=self.redis_conn)
if job.get_status() == 'failed':
return BgCalcError(job.exc_info)
except NoSuchJobError as ex:
return CalcTaskNotFoundError(ex)
return None
def METHOD_NAME(self, ident):
try:
return ResultWrapper(Job.fetch(ident, connection=self.redis_conn))
except NoSuchJobError:
logging.getLogger(__name__).warning(f'Job {ident} not found')
return None
def is_wrapped_user_error(self, err):
return isinstance(err, UserReadableException)
|
1,393 |
custom loss
|
# Copyright 2020 The AutoKeras Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import keras_tuner
import pytest
import autokeras as ak
from autokeras import graph as graph_module
def test_input_output_disconnect():
input_node1 = ak.Input()
output_node = input_node1
_ = ak.DenseBlock()(output_node)
input_node = ak.Input()
output_node = input_node
output_node = ak.DenseBlock()(output_node)
output_node = ak.RegressionHead()(output_node)
with pytest.raises(ValueError) as info:
graph_module.Graph(inputs=input_node1, outputs=output_node)
assert "Inputs and outputs not connected." in str(info.value)
def test_hyper_graph_cycle():
input_node1 = ak.Input()
input_node2 = ak.Input()
output_node1 = ak.DenseBlock()(input_node1)
output_node2 = ak.DenseBlock()(input_node2)
output_node = ak.Merge()([output_node1, output_node2])
head = ak.RegressionHead()
output_node = head(output_node)
head.outputs = output_node1
with pytest.raises(ValueError) as info:
graph_module.Graph(
inputs=[input_node1, input_node2], outputs=output_node
)
assert "The network has a cycle." in str(info.value)
def test_input_missing():
input_node1 = ak.Input()
input_node2 = ak.Input()
output_node1 = ak.DenseBlock()(input_node1)
output_node2 = ak.DenseBlock()(input_node2)
output_node = ak.Merge()([output_node1, output_node2])
output_node = ak.RegressionHead()(output_node)
with pytest.raises(ValueError) as info:
graph_module.Graph(inputs=input_node1, outputs=output_node)
assert "A required input is missing for HyperModel" in str(info.value)
def test_graph_basics():
input_node = ak.Input(shape=(30,))
output_node = input_node
output_node = ak.DenseBlock()(output_node)
output_node = ak.RegressionHead(shape=(1,))(output_node)
model = graph_module.Graph(inputs=input_node, outputs=output_node).build(
keras_tuner.HyperParameters()
)
assert model.input_shape == (None, 30)
assert model.output_shape == (None, 1)
def test_adamw_optimizer():
input_node = ak.Input(shape=(30,))
output_node = input_node
output_node = ak.DenseBlock()(output_node)
output_node = ak.RegressionHead(shape=(1,))(output_node)
hp = keras_tuner.HyperParameters()
hp.Choice("optimizer", ["adam", "sgd", "adam_weight_decay"], default="adam")
hp.values["optimizer"] = "adam_weight_decay"
graph = graph_module.Graph(inputs=input_node, outputs=output_node)
graph.num_samples = 10000
graph.inputs[0].batch_size = 32
graph.epochs = 10
model = graph.build(hp)
assert model.input_shape == (None, 30)
assert model.output_shape == (None, 1)
def test_graph_save_load(tmp_path):
input1 = ak.Input()
input2 = ak.Input()
output1 = ak.DenseBlock()(input1)
output2 = ak.ConvBlock()(input2)
output = ak.Merge()([output1, output2])
output1 = ak.RegressionHead()(output)
output2 = ak.ClassificationHead()(output)
graph = graph_module.Graph(
inputs=[input1, input2],
outputs=[output1, output2],
)
path = os.path.join(tmp_path, "graph")
graph.save(path)
graph = graph_module.load_graph(path)
assert len(graph.inputs) == 2
assert len(graph.outputs) == 2
assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock)
assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock)
def test_merge():
input_node1 = ak.Input(shape=(30,))
input_node2 = ak.Input(shape=(40,))
output_node1 = ak.DenseBlock()(input_node1)
output_node2 = ak.DenseBlock()(input_node2)
output_node = ak.Merge()([output_node1, output_node2])
output_node = ak.RegressionHead(shape=(1,))(output_node)
model = graph_module.Graph(
inputs=[input_node1, input_node2], outputs=output_node
).build(keras_tuner.HyperParameters())
assert model.input_shape == [(None, 30), (None, 40)]
assert model.output_shape == (None, 1)
def test_save_custom_metrics_loss(tmp_path):
def custom_metric(y_pred, y_true):
return 1
def METHOD_NAME(y_pred, y_true):
return y_pred - y_true
head = ak.ClassificationHead(
loss=METHOD_NAME, metrics=["accuracy", custom_metric]
)
input_node = ak.Input()
output_node = head(input_node)
graph = graph_module.Graph(input_node, output_node)
path = os.path.join(tmp_path, "graph")
graph.save(path)
new_graph = graph_module.load_graph(
path,
custom_objects={
"custom_metric": custom_metric,
"custom_loss": METHOD_NAME,
},
)
assert new_graph.blocks[0].metrics[1](0, 0) == 1
assert new_graph.blocks[0].loss(3, 2) == 1
def test_cat_to_num_with_img_input_error():
input_node = ak.ImageInput()
output_node = ak.CategoricalToNumerical()(input_node)
with pytest.raises(TypeError) as info:
graph_module.Graph(input_node, outputs=output_node).compile()
assert "CategoricalToNumerical can only be used" in str(info.value)
def test_graph_can_init_with_one_missing_output():
input_node = ak.ImageInput()
output_node = ak.ConvBlock()(input_node)
output_node = ak.RegressionHead()(output_node)
ak.ClassificationHead()(output_node)
graph_module.Graph(input_node, output_node)
|
1,394 |
verify
|
#!/usr/bin/env python3
#
# Copyright (c) 2021, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import logging
import unittest
import pktverify
from pktverify import packet_verifier
from pktverify.consts import MA1, MA1g, MA2
import config
import thread_cert
# Test description:
# The purpose of this test is to verify the functionality of ping command.
#
# Topology:
#
#
# ROUTER_2 ----- ROUTER_1 ---- ROUTER_3
#
#
ROUTER_1 = 1
ROUTER_2 = 2
ROUTER_3 = 3
class TestPing(thread_cert.TestCase):
USE_MESSAGE_FACTORY = False
SUPPORT_NCP = False
TOPOLOGY = {
ROUTER_1: {
'name': 'Router_1',
'allowlist': [ROUTER_2, ROUTER_3],
},
ROUTER_2: {
'name': 'Router_2',
'allowlist': [ROUTER_1],
},
ROUTER_3: {
'name': 'Router_3',
'allowlist': [ROUTER_1],
},
}
def test(self):
router1 = self.nodes[ROUTER_1]
router2 = self.nodes[ROUTER_2]
router3 = self.nodes[ROUTER_3]
router1.start()
self.simulator.go(config.LEADER_STARTUP_DELAY)
self.assertEqual('leader', router1.get_state())
router2.start()
self.simulator.go(config.ROUTER_STARTUP_DELAY)
self.assertEqual('router', router2.get_state())
router3.start()
self.simulator.go(config.ROUTER_STARTUP_DELAY)
self.assertEqual('router', router3.get_state())
# 1. ROUTER_1 pings ROUTER_2.
self.assertTrue(router1.ping(router2.get_ip6_address(config.ADDRESS_TYPE.RLOC)))
# 2. ROUTER_1 pings ROUTER_2 multiple times.
self.assertTrue(router1.ping(router2.get_ip6_address(config.ADDRESS_TYPE.RLOC), count=5))
# 3. ROUTER_2 pings ROUTER_1 from the link-local address to the
# link-local address.
self.assertTrue(
router2.ping(router1.get_ip6_address(config.ADDRESS_TYPE.LINK_LOCAL),
interface=router2.get_ip6_address(config.ADDRESS_TYPE.LINK_LOCAL)))
# 4. ROUTER_2 pings ROUTER_3 using the RLOC.
self.assertTrue(router2.ping(router3.get_ip6_address(config.ADDRESS_TYPE.RLOC)))
# 5. ROUTER_2 pings ROUTER_3's link-local address. The ping should fail.
self.assertFalse(router2.ping(router3.get_ip6_address(config.ADDRESS_TYPE.LINK_LOCAL)))
# 6. ROUTER_2 pings ROUTER_3's RLOC from the link-local address. The
# ping should fail.
self.assertFalse(
router2.ping(router3.get_ip6_address(config.ADDRESS_TYPE.RLOC),
interface=router2.get_ip6_address(config.ADDRESS_TYPE.LINK_LOCAL)))
# 7. ROUTER_2 pings ROUTER_3's RLOC from an non-existent address. The
# ping command should be rejected by CLI.
self.assertFalse(router2.ping(router3.get_ip6_address(config.ADDRESS_TYPE.RLOC), interface='1::1'))
self.collect_ipaddrs()
self.collect_rloc16s()
self.collect_rlocs()
self.collect_extra_vars()
def METHOD_NAME(self, pv: pktverify.packet_verifier.PacketVerifier):
pkts = pv.pkts
vars = pv.vars
pv.summary.show()
logging.info(f'vars = {vars}')
# Ensure the topology is formed correctly
pv.verify_attached('Router_2', 'Router_1')
pv.verify_attached('Router_3', 'Router_1')
# 1. Router_1 pings Router_2.
_pkt = pkts.filter_wpan_src64(vars['Router_1']) \
.filter_ipv6_2dsts(vars['Router_2_RLOC'], vars['Router_2_LLA']) \
.filter_ping_request() \
.must_next()
pkts.filter_wpan_src64(vars['Router_2']) \
.filter_ipv6_dst(_pkt.ipv6.src) \
.filter_ping_reply(identifier=_pkt.icmpv6.echo.identifier) \
.must_next()
# 2. Router_1 pings Router_2 multiple times.
for i in range(5):
_pkt = pkts.filter_wpan_src64(vars['Router_1']) \
.filter_ipv6_2dsts(vars['Router_2_RLOC'], vars['Router_2_LLA']) \
.filter_ping_request() \
.must_next()
pkts.filter_wpan_src64(vars['Router_2']) \
.filter_ipv6_dst(_pkt.ipv6.src) \
.filter_ping_reply(identifier=_pkt.icmpv6.echo.identifier) \
.must_next()
# 3. Router_2 pings Router_1 from the link-local address to the
# link-local address.
_pkt = pkts.filter_wpan_src64(vars['Router_2']) \
.filter_ipv6_src_dst(vars['Router_2_LLA'], vars['Router_1_LLA']) \
.filter_ping_request() \
.must_next()
pkts.filter_wpan_src64(vars['Router_1']) \
.filter_ipv6_dst(_pkt.ipv6.src) \
.filter_ping_reply(identifier=_pkt.icmpv6.echo.identifier) \
.must_next()
# 4. Router_2 pings Router_3 using the RLOC.
_pkt = pkts.filter_wpan_src64(vars['Router_2']) \
.filter_ipv6_dst(vars['Router_3_RLOC']) \
.filter_ping_request() \
.must_next()
pkts.filter_wpan_src64(vars['Router_3']) \
.filter_ipv6_dst(_pkt.ipv6.src) \
.filter_ping_reply(identifier=_pkt.icmpv6.echo.identifier) \
.must_next()
# 5. Router_2 pings Router_3's link-local address. The ping should fail.
_pkt = pkts.filter_wpan_src64(vars['Router_2']) \
.filter_ipv6_dst(vars['Router_3_LLA']) \
.filter_ping_request() \
.must_next()
pkts.filter_wpan_src64(vars['Router_3']) \
.filter_ipv6_dst(_pkt.ipv6.src) \
.filter_ping_reply(identifier=_pkt.icmpv6.echo.identifier) \
.must_not_next()
# 5. Router_2 pings Router_3's RLOC from the link-local address. The
# ping should fail.
_pkt = pkts.filter_wpan_src64(vars['Router_2']) \
.filter_ipv6_src_dst(vars['Router_2_LLA'], vars['Router_3_RLOC']) \
.filter_ping_request() \
.must_next()
if __name__ == '__main__':
unittest.main()
|
1,395 |
disable threads viewer patch add layer
|
import contextlib
from contextlib import suppress
import pytest
from qtpy.QtWidgets import QDialog, QInputDialog, QMessageBox
from PartSeg._roi_analysis.partseg_settings import PartSettings
from PartSeg._roi_mask.main_window import ChosenComponents
from PartSeg._roi_mask.stack_settings import StackSettings
from PartSeg.common_backend.base_settings import BaseSettings
from PartSeg.common_gui import napari_image_view
@pytest.fixture()
def base_settings(image, tmp_path, measurement_profiles, qapp):
settings = BaseSettings(tmp_path)
settings.image = image
return settings
@pytest.fixture()
def part_settings(image, tmp_path, measurement_profiles, qapp):
settings = PartSettings(tmp_path)
settings.image = image
for el in measurement_profiles:
settings.measurement_profiles[el.name] = el
return settings
@pytest.fixture()
def stack_settings(tmp_path, image, qapp):
settings = StackSettings(tmp_path)
settings.image = image
chose = ChosenComponents()
settings.chosen_components_widget = chose
yield settings
chose.deleteLater()
@pytest.fixture()
def part_settings_with_project(image, analysis_segmentation2, tmp_path, qapp):
settings = PartSettings(tmp_path)
settings.image = image
settings.set_project_info(analysis_segmentation2)
return settings
@pytest.fixture(autouse=True)
def _disable_threads_viewer_patch_prepare_leyers(monkeypatch):
def _prepare_layers(self, image, parameters, replace):
self._add_image(napari_image_view._prepare_layers(image, parameters, replace))
monkeypatch.setattr(napari_image_view.ImageView, "_prepare_layers", _prepare_layers)
@pytest.fixture(autouse=True)
def METHOD_NAME(monkeypatch, request):
if "no_patch_add_layer" in request.keywords:
return
def _add_layer_util(self, index, layer, filters):
if layer not in self.viewer.layers:
self.viewer.add_layer(layer)
monkeypatch.setattr(napari_image_view.ImageView, "_add_layer_util", _add_layer_util)
@pytest.fixture(autouse=True)
def _check_opened_windows(qapp):
yield
widgets = qapp.topLevelWidgets()
for widget in widgets:
assert not widget.isVisible()
@pytest.fixture(autouse=True)
def _block_threads(monkeypatch, request):
if "enablethread" in request.keywords:
return
from pytestqt.qt_compat import qt_api
from qtpy.QtCore import QThread, QTimer
old_start = QTimer.start
class OldTimer(QTimer):
def start(self, time=None):
if time is not None:
old_start(self, time)
else:
old_start(self)
def not_start(self):
raise RuntimeError("Thread should not be used in test")
monkeypatch.setattr(QTimer, "start", not_start)
monkeypatch.setattr(QThread, "start", not_start)
monkeypatch.setattr(qt_api.QtCore, "QTimer", OldTimer)
@pytest.fixture(autouse=True)
def _clean_settings():
try:
try:
from napari.settings import SETTINGS
except ImportError:
from napari.utils.settings import SETTINGS
SETTINGS.reset()
yield
with suppress(AttributeError):
SETTINGS.reset()
except ImportError:
yield
@pytest.fixture(autouse=True)
def _reset_napari_settings(monkeypatch, tmp_path):
def _mock_save(self, path=None, **dict_kwargs):
return # skipcq: PTC-W0049
from napari import settings
cp = settings.NapariSettings.__private_attributes__["_config_path"]
monkeypatch.setattr(cp, "default", tmp_path / "save.yaml")
monkeypatch.setattr(settings.NapariSettings, "save", _mock_save)
settings._SETTINGS = None
@pytest.fixture(autouse=True)
def _block_message_box(monkeypatch, request):
def raise_on_call(*_, **__):
raise RuntimeError("exec_ call") # pragma: no cover
monkeypatch.setattr(QMessageBox, "exec_", raise_on_call)
monkeypatch.setattr(QMessageBox, "critical", raise_on_call)
monkeypatch.setattr(QMessageBox, "information", raise_on_call)
monkeypatch.setattr(QMessageBox, "question", raise_on_call)
monkeypatch.setattr(QMessageBox, "warning", raise_on_call)
monkeypatch.setattr("PartSeg.common_gui.error_report.QMessageFromException.exec_", raise_on_call)
monkeypatch.setattr(QInputDialog, "getText", raise_on_call)
if "enabledialog" not in request.keywords:
monkeypatch.setattr(QDialog, "exec_", raise_on_call)
class DummyConnect:
def __init__(self, li):
self.li = li
def connect(self, func):
self.li.append(func)
class DummyThrottler:
def __init__(self, *args, **kwargs):
self._call_list = []
def setTimeout(self, *args, **kwargs):
pass # as it is dummy throttler then timeout is obsolete.
def throttle(self, *args, **kwargs):
for cl in self._call_list:
cl(*args, **kwargs)
@property
def triggered(self):
return DummyConnect(self._call_list)
@pytest.fixture(autouse=True)
def _mock_throttler(monkeypatch):
with contextlib.suppress(ImportError):
from napari._qt import qt_main_window
if hasattr(qt_main_window, "QSignalThrottler"):
monkeypatch.setattr(qt_main_window, "QSignalThrottler", DummyThrottler)
|
1,396 |
execute process
|
# =================================================================
#
# Authors: Martin Pontius <[email protected]>
# Tom Kralidis <[email protected]>
#
# Copyright (c) 2022 52°North Spatial Information Research GmbH
# Copyright (c) 2022 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
from pathlib import Path
from werkzeug.wrappers import Request
from werkzeug.test import create_environ
from multiprocessing import Process, Manager
import json
from tinydb import TinyDB, Query
import pytest
from pygeoapi.api import (
API, APIRequest
)
from pygeoapi.util import yaml_load
from .util import get_test_file_path
@pytest.fixture()
def config():
with open(get_test_file_path('pygeoapi-test-config.yml')) as fh:
return yaml_load(fh)
@pytest.fixture()
def api_(config):
return API(config)
def METHOD_NAME(api, request, process_id, index, processes_out):
headers, http_status, response = api.execute_process(request, process_id)
processes_out[index] = {"headers": headers, "http_status": http_status,
"response": response}
def _create_request(name, message, locales):
data = {
"mode": "async",
"response": "raw",
"inputs": {
"name": name,
"message": message
}
}
environ = create_environ(
base_url='http://localhost:5000/processes/hello-world/execution',
method="POST", json=data)
req = Request(environ)
return APIRequest.with_data(req, locales)
def test_async_hello_world_process_parallel(api_, config):
index_name = Path(config['server']['manager']['connection'])
if index_name.exists():
index_name.unlink()
NUM_PROCS = 4
process_id = "hello-world"
req = _create_request("World", "Hello", api_.locales)
manager = Manager()
processes_out = manager.dict()
procs = []
for i in range(0, NUM_PROCS):
procs.append(Process(target=METHOD_NAME,
args=(api_, req, process_id, i, processes_out)))
# Run processes in parallel
procs_started = []
for p in procs:
p.start()
procs_started.append(p)
for p in procs_started:
# let main process wait until sub-processes completed
p.join()
# Test if jobs are registered and run correctly
db = TinyDB(index_name)
query = Query()
for process_out in processes_out.values():
try:
assert process_out['http_status'] == 200
job_id = process_out['headers']['Location'].split('/')[-1]
job_dict = db.search(query.identifier == job_id)[0]
assert job_dict["identifier"] == job_id
assert job_dict["process_id"] == process_id
assert job_dict["mimetype"] == process_out['headers'][
'Content-Type']
try:
with open(f'{index_name.parent}/hello-world-{job_id}') as fh:
out_json = json.load(fh)
assert out_json["id"] == "echo"
assert out_json["value"] == "Hello World! Hello"
except FileNotFoundError as e:
assert False, e
except json.decoder.JSONDecodeError as e:
assert False, e
except Exception as e:
assert False, e
|
1,397 |
init
|
#@+leo-ver=5-thin
#@+node:ekr.20160928073518.1: * @file ../plugins/pyplot_backend.py
"""
A helper for the viewrendered plugin.
This is *NOT* a real plugin.
"""
#@+<< pyplot_backend imports >>
#@+node:ekr.20160928074801.1: ** << pyplot_backend imports >>
from leo.core import leoGlobals as g
from leo.plugins import viewrendered as vr
from leo.core.leoQt import FocusPolicy
try:
import matplotlib.backend_bases as backend_bases
FigureManagerBase = backend_bases.FigureManagerBase
from matplotlib.backends.qt_compat import QtWidgets
from matplotlib.backends.backend_qtagg import(
FigureCanvas, FigureManager)
from matplotlib.figure import Figure
from matplotlib import pyplot as plt
except ImportError:
g.es_exception()
# import matplotlib
#@-<< pyplot_backend imports >>
#@+others
#@+node:ekr.20160928073605.1: ** init
def METHOD_NAME():
"""Return True if the plugin has loaded successfully."""
g.trace('pyplot_backend.py is not a plugin.')
return False
#@+node:ekr.20160928082006.1: ** Leo backend
#@+node:ekr.20160928074615.2: *3* new_figure_manager
def new_figure_manager(num, *args, **kwargs):
"""
Create a new figure manager instance
"""
FigureClass = kwargs.pop('FigureClass', Figure)
thisFig = FigureClass(*args, **kwargs)
return new_figure_manager_given_figure(num, thisFig)
#@+node:ekr.20160928074615.3: *3* new_figure_manager_given_figure
def new_figure_manager_given_figure(num, figure):
"""
Create a new figure manager instance for the given figure.
"""
canvas = FigureCanvas(figure)
return LeoFigureManagerQT(canvas, num)
#@+node:ekr.20160929050151.1: *3* class LeoFigureManagerQT
# From backend_qt5.py
# pylint: disable=no-member
# matplotlib.backends.backend_qt5.FigureManager probably does exist. See:
# https://github.com/matplotlib/matplotlib/blob/master/lib/matplotlib/backends/backend_qt5.py
class LeoFigureManagerQT(FigureManager):
"""
Public attributes
canvas : The FigureCanvas instance
num : The Figure number
toolbar : The qt.QToolBar
window : The qt.QMainWindow (not set)
"""
#@+others
#@+node:ekr.20160929050151.2: *4* __init__ (LeoFigureManagerQt)
# Do NOT call the base class ctor. It creates a Qt MainWindow.
# pylint: disable=super-init-not-called
# pylint: disable=non-parent-init-called
def __init__(self, canvas, num):
"""Ctor for the LeoFigureManagerQt class."""
self.c = c = g.app.log.c
super().__init__(canvas, num)
self.canvas = canvas
# New code for Leo: embed the canvas in the viewrendered area.
self.vr_controller = vc = vr.controllers.get(c.hash())
self.splitter = c.free_layout.get_top_splitter()
self.frame = w = QtWidgets.QFrame()
w.setLayout(QtWidgets.QVBoxLayout())
w.layout().addWidget(self.canvas)
if vc:
vc.embed_widget(w)
class DummyWindow:
def __init__(self, c):
self.c = c
self._destroying = None
def windowTitle(self):
return self.c.p.h
def show(self):
pass
self.window = None #DummyWindow(c)
# See comments in the base class ctor, in backend_qt5.py.
self.canvas.setFocusPolicy(FocusPolicy.StrongFocus)
self.canvas.setFocus()
self.canvas._destroying = False
self.toolbar = self._get_toolbar(self.canvas, self.frame)
if self.toolbar is not None:
# The toolbar is a matplotlib.backends.backend_qt.NavigationToolbar2QT.
layout = self.frame.layout()
layout.addWidget(self.toolbar)
# add text label to status bar
self.statusbar_label = QtWidgets.QLabel()
layout.addWidget(self.statusbar_label)
self.canvas.draw_idle()
def notify_axes_change(fig):
# This will be called whenever the current axes is changed
if self.toolbar is not None:
self.toolbar.update()
self.canvas.figure.add_axobserver(notify_axes_change)
# Close the figure so that we don't create too many figure instances
plt.close(canvas.figure)
#@+node:ekr.20160929083114.1: *4* destroy
def destroy(self, *args):
# Causes problems.
# self.frame.deleteLater()
self.frame = None
#@-others
#@-others
#@@language python
#@-leo
|
1,398 |
normal
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from funasr.modules.data2vec.multihead_attention import MultiheadAttention
class Fp32LayerNorm(nn.LayerNorm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def forward(self, input):
output = F.layer_norm(
input.float(),
self.normalized_shape,
self.weight.float() if self.weight is not None else None,
self.bias.float() if self.bias is not None else None,
self.eps,
)
return output.type_as(input)
class Fp32GroupNorm(nn.GroupNorm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def forward(self, input):
output = F.group_norm(
input.float(),
self.num_groups,
self.weight.float() if self.weight is not None else None,
self.bias.float() if self.bias is not None else None,
self.eps,
)
return output.type_as(input)
class TransposeLast(nn.Module):
def __init__(self, deconstruct_idx=None):
super().__init__()
self.deconstruct_idx = deconstruct_idx
def forward(self, x):
if self.deconstruct_idx is not None:
x = x[self.deconstruct_idx]
return x.transpose(-2, -1)
class SamePad(nn.Module):
def __init__(self, kernel_size, causal=False):
super().__init__()
if causal:
self.remove = kernel_size - 1
else:
self.remove = 1 if kernel_size % 2 == 0 else 0
def forward(self, x):
if self.remove > 0:
x = x[:, :, : -self.remove]
return x
def pad_to_multiple(x, multiple, dim=-1, value=0):
# Inspired from https://github.com/lucidrains/local-attention/blob/master/local_attention/local_attention.py#L41
if x is None:
return None, 0
tsz = x.size(dim)
m = tsz / multiple
remainder = math.ceil(m) * multiple - tsz
if m.is_integer():
return x, 0
pad_offset = (0,) * (-1 - dim) * 2
return F.pad(x, (*pad_offset, 0, remainder), value=value), remainder
def gelu_accurate(x):
if not hasattr(gelu_accurate, "_a"):
gelu_accurate._a = math.sqrt(2 / math.pi)
return (
0.5 * x * (1 + torch.tanh(gelu_accurate._a * (x + 0.044715 * torch.pow(x, 3))))
)
def gelu(x: torch.Tensor) -> torch.Tensor:
return torch.nn.functional.gelu(x.float()).type_as(x)
def get_available_activation_fns():
return [
"relu",
"gelu",
"gelu_fast", # deprecated
"gelu_accurate",
"tanh",
"linear",
]
def get_activation_fn(activation: str):
"""Returns the activation function corresponding to `activation`"""
if activation == "relu":
return F.relu
elif activation == "gelu":
return gelu
elif activation == "gelu_accurate":
return gelu_accurate
elif activation == "tanh":
return torch.tanh
elif activation == "linear":
return lambda x: x
elif activation == "swish":
return torch.nn.SiLU
else:
raise RuntimeError("--activation-fn {} not supported".format(activation))
def init_bert_params(module):
"""
Initialize the weights specific to the BERT Model.
This overrides the default initializations depending on the specified arguments.
1. If normal_init_linear_weights is set then weights of linear
layer will be initialized using the normal distribution and
bais will be set to the specified value.
2. If normal_init_embed_weights is set then weights of embedding
layer will be initialized using the normal distribution.
3. If normal_init_proj_weights is set then weights of
in_project_weight for MultiHeadAttention initialized using
the normal distribution (to be validated).
"""
def METHOD_NAME(data):
# with FSDP, module params will be on CUDA, so we cast them back to CPU
# so that the RNG is consistent with and without FSDP
data.copy_(data.cpu().METHOD_NAME(mean=0.0, std=0.02).to(data.device))
if isinstance(module, nn.Linear):
METHOD_NAME(module.weight.data)
if module.bias is not None:
module.bias.data.zero_()
if isinstance(module, nn.Embedding):
METHOD_NAME(module.weight.data)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
if isinstance(module, MultiheadAttention):
METHOD_NAME(module.q_proj.weight.data)
METHOD_NAME(module.k_proj.weight.data)
METHOD_NAME(module.v_proj.weight.data)
|
1,399 |
create base
|
"""
Tutorial Diagrams
-----------------
This script plots the flow-charts used in the scikit-learn tutorials.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Circle, Rectangle, Polygon, Arrow, FancyArrow
def METHOD_NAME(box_bg="#CCCCCC", arrow1="#88CCFF", arrow2="#88FF88", supervised=True):
fig = plt.figure(figsize=(9, 6), facecolor="w")
ax = plt.axes((0, 0, 1, 1), xticks=[], yticks=[], frameon=False)
ax.set_xlim(0, 9)
ax.set_ylim(0, 6)
patches = [
Rectangle((0.3, 3.6), 1.5, 1.8, zorder=1, fc=box_bg),
Rectangle((0.5, 3.8), 1.5, 1.8, zorder=2, fc=box_bg),
Rectangle((0.7, 4.0), 1.5, 1.8, zorder=3, fc=box_bg),
Rectangle((2.9, 3.6), 0.2, 1.8, fc=box_bg),
Rectangle((3.1, 3.8), 0.2, 1.8, fc=box_bg),
Rectangle((3.3, 4.0), 0.2, 1.8, fc=box_bg),
Rectangle((0.3, 0.2), 1.5, 1.8, fc=box_bg),
Rectangle((2.9, 0.2), 0.2, 1.8, fc=box_bg),
Circle((5.5, 3.5), 1.0, fc=box_bg),
Polygon([[5.5, 1.7], [6.1, 1.1], [5.5, 0.5], [4.9, 1.1]], fc=box_bg),
FancyArrow(
2.3, 4.6, 0.35, 0, fc=arrow1, width=0.25, head_width=0.5, head_length=0.2
),
FancyArrow(
3.75, 4.2, 0.5, -0.2, fc=arrow1, width=0.25, head_width=0.5, head_length=0.2
),
FancyArrow(
5.5, 2.4, 0, -0.4, fc=arrow1, width=0.25, head_width=0.5, head_length=0.2
),
FancyArrow(
2.0, 1.1, 0.5, 0, fc=arrow2, width=0.25, head_width=0.5, head_length=0.2
),
FancyArrow(
3.3, 1.1, 1.3, 0, fc=arrow2, width=0.25, head_width=0.5, head_length=0.2
),
FancyArrow(
6.2, 1.1, 0.8, 0, fc=arrow2, width=0.25, head_width=0.5, head_length=0.2
),
]
if supervised:
patches += [
Rectangle((0.3, 2.4), 1.5, 0.5, zorder=1, fc=box_bg),
Rectangle((0.5, 2.6), 1.5, 0.5, zorder=2, fc=box_bg),
Rectangle((0.7, 2.8), 1.5, 0.5, zorder=3, fc=box_bg),
FancyArrow(
2.3, 2.9, 2.0, 0, fc=arrow1, width=0.25, head_width=0.5, head_length=0.2
),
Rectangle((7.3, 0.85), 1.5, 0.5, fc=box_bg),
]
else:
patches += [Rectangle((7.3, 0.2), 1.5, 1.8, fc=box_bg)]
for p in patches:
ax.add_patch(p)
plt.text(
1.45,
4.9,
"Training\nText,\nDocuments,\nImages,\netc.",
ha="center",
va="center",
fontsize=14,
)
plt.text(3.6, 4.9, "Feature\nVectors", ha="left", va="center", fontsize=14)
plt.text(
5.5, 3.5, "Machine\nLearning\nAlgorithm", ha="center", va="center", fontsize=14
)
plt.text(
1.05,
1.1,
"New Text,\nDocument,\nImage,\netc.",
ha="center",
va="center",
fontsize=14,
)
plt.text(3.3, 1.7, "Feature\nVector", ha="left", va="center", fontsize=14)
plt.text(5.5, 1.1, "Predictive\nModel", ha="center", va="center", fontsize=12)
if supervised:
plt.text(1.45, 3.05, "Labels", ha="center", va="center", fontsize=14)
plt.text(8.05, 1.1, "Expected\nLabel", ha="center", va="center", fontsize=14)
plt.text(
8.8, 5.8, "Supervised Learning Model", ha="right", va="top", fontsize=18
)
else:
plt.text(
8.05,
1.1,
"Likelihood\nor Cluster ID\nor Better\nRepresentation",
ha="center",
va="center",
fontsize=12,
)
plt.text(
8.8, 5.8, "Unsupervised Learning Model", ha="right", va="top", fontsize=18
)
def plot_supervised_chart(annotate=False):
METHOD_NAME(supervised=True)
if annotate:
fontdict = {"color": "r", "weight": "bold", "size": 14}
plt.text(
1.9,
4.55,
"X = vec.fit_transform(input)",
fontdict=fontdict,
rotation=20,
ha="left",
va="bottom",
)
plt.text(
3.7,
3.2,
"clf.fit(X, y)",
fontdict=fontdict,
rotation=20,
ha="left",
va="bottom",
)
plt.text(
1.7,
1.5,
"X_new = vec.transform(input)",
fontdict=fontdict,
rotation=20,
ha="left",
va="bottom",
)
plt.text(
6.1,
1.5,
"y_new = clf.predict(X_new)",
fontdict=fontdict,
rotation=20,
ha="left",
va="bottom",
)
def plot_unsupervised_chart():
METHOD_NAME(supervised=False)
if __name__ == "__main__":
plot_supervised_chart(False)
plot_supervised_chart(True)
plot_unsupervised_chart()
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.