max_stars_repo_path
stringlengths 4
182
| max_stars_repo_name
stringlengths 6
116
| max_stars_count
int64 0
191k
| id
stringlengths 7
7
| content
stringlengths 100
10k
| size
int64 100
10k
|
---|---|---|---|---|---|
edk2toollib/uefi/edk2/parsers/inf_parser.py
|
mikeytdisco/edk2-pytool-library
| 32 |
2026793
|
# @file inf_parser.py
# Code to help parse EDK2 INF files
#
# Copyright (c) Microsoft Corporation
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from edk2toollib.uefi.edk2.parsers.base_parser import HashFileParser
import os
AllPhases = ["SEC", "PEIM", "PEI_CORE", "DXE_DRIVER", "DXE_CORE", "DXE_RUNTIME_DRIVER", "UEFI_DRIVER",
"SMM_CORE", "DXE_SMM_DRIVER", "UEFI_APPLICATION"]
class InfParser(HashFileParser):
def __init__(self):
HashFileParser.__init__(self, 'ModuleInfParser')
self.Lines = []
self.Parsed = False
self.Dict = {}
self.LibraryClass = ""
self.SupportedPhases = []
self.PackagesUsed = []
self.LibrariesUsed = []
self.ProtocolsUsed = []
self.GuidsUsed = []
self.PpisUsed = []
self.PcdsUsed = []
self.Sources = []
self.Binaries = []
self.Path = ""
def ParseFile(self, filepath):
self.Logger.debug("Parsing file: %s" % filepath)
if(not os.path.isabs(filepath)):
fp = self.FindPath(filepath)
else:
fp = filepath
self.Path = fp
f = open(fp, "r")
self.Lines = f.readlines()
f.close()
InDefinesSection = False
InPackagesSection = False
InLibraryClassSection = False
InProtocolsSection = False
InGuidsSection = False
InPpiSection = False
InPcdSection = False
InSourcesSection = False
InBinariesSection = False
for line in self.Lines:
sline = self.StripComment(line)
if(sline is None or len(sline) < 1):
continue
if InDefinesSection:
if sline.strip()[0] == '[':
InDefinesSection = False
else:
if sline.count("=") == 1:
tokens = sline.split('=', 1)
self.Dict[tokens[0].strip()] = tokens[1].strip()
#
# Parse Library class and phases in special manor
#
if(tokens[0].strip().lower() == "library_class"):
self.LibraryClass = tokens[1].partition("|")[0].strip()
self.Logger.debug("Library class found")
if(len(tokens[1].partition("|")[2].strip()) < 1):
self.SupportedPhases = AllPhases
elif(tokens[1].partition("|")[2].strip().lower() == "base"):
self.SupportedPhases = AllPhases
else:
self.SupportedPhases = tokens[1].partition("|")[2].strip().split()
self.Logger.debug("Key,values found: %s = %s" % (tokens[0].strip(), tokens[1].strip()))
continue
elif InPackagesSection:
if sline.strip()[0] == '[':
InPackagesSection = False
else:
self.PackagesUsed.append(sline.partition("|")[0].strip())
continue
elif InLibraryClassSection:
if sline.strip()[0] == '[':
InLibraryClassSection = False
else:
self.LibrariesUsed.append(sline.partition("|")[0].strip())
continue
elif InProtocolsSection:
if sline.strip()[0] == '[':
InProtocolsSection = False
else:
self.ProtocolsUsed.append(sline.partition("|")[0].strip())
continue
elif InGuidsSection:
if sline.strip()[0] == '[':
InGuidsSection = False
else:
self.GuidsUsed.append(sline.partition("|")[0].strip())
continue
elif InPcdSection:
if sline.strip()[0] == '[':
InPcdSection = False
else:
self.PcdsUsed.append(sline.partition("|")[0].strip())
continue
elif InPpiSection:
if sline.strip()[0] == '[':
InPpiSection = False
else:
self.PpisUsed.append(sline.partition("|")[0].strip())
continue
elif InSourcesSection:
if sline.strip()[0] == '[':
InSourcesSection = False
else:
self.Sources.append(sline.partition("|")[0].strip())
continue
elif InBinariesSection:
if sline.strip()[0] == '[':
InBinariesSection = False
else:
self.Binaries.append(sline.partition("|")[0].strip())
continue
# check for different sections
if sline.strip().lower().startswith('[defines'):
InDefinesSection = True
elif sline.strip().lower().startswith('[packages'):
InPackagesSection = True
elif sline.strip().lower().startswith('[libraryclasses'):
InLibraryClassSection = True
elif sline.strip().lower().startswith('[protocols'):
InProtocolsSection = True
elif sline.strip().lower().startswith('[ppis'):
InPpiSection = True
elif sline.strip().lower().startswith('[guids'):
InGuidsSection = True
elif sline.strip().lower().startswith('[pcd') or \
sline.strip().lower().startswith('[patchpcd') or \
sline.strip().lower().startswith('[fixedpcd') or \
sline.strip().lower().startswith('[featurepcd'):
InPcdSection = True
elif sline.strip().lower().startswith('[sources'):
InSourcesSection = True
elif sline.strip().lower().startswith('[binaries'):
InBinariesSection = True
self.Parsed = True
| 6,153 |
backend/api/views.py
|
kh-026-webui-python/backend
| 0 |
2025526
|
"""
Views for api app
"""
import os
import shutil
from io import TextIOWrapper
import psycopg2
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db.utils import IntegrityError
from django.http import JsonResponse
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from utils.FileValidator import FileValidator
from utils.parser import CSVParser
from utils.user_manager import UserManager
from .models import Document, Course
from .serializers import UserSerializer, CourseSerializer
class UserViewSet(viewsets.ModelViewSet): # pylint: disable=too-many-ancestors
"""
API endpoint for USERS
"""
queryset = User.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
class HealthCheckView(APIView):
"""
Ping server and database
"""
def get(self, request):
"""
Making JSON response for endpoint's get request
"""
try:
psycopg2.connect(host=os.environ.get('DB_HOST', None),
database=os.environ.get('DB_NAME', 'db.postgres'),
user=os.environ.get('DB_USER', ''),
password=os.environ.get('DB_PASSWORD', ''))
except psycopg2.OperationalError as error:
print(error)
database = "error"
else:
database = "pong"
return JsonResponse({"server": "pong", "database": database}, status=status.HTTP_200_OK)
class UploadResumeView(APIView):
"""
Validate and save file on server
"""
validator = FileValidator(
allowed_extensions=['pdf'],
allowed_mimetypes=['application/pdf'],
min_size=307,
max_size=3 * 1024 * 1024
)
def post(self, request):
"""
Handle post request on server's endpoint
"""
if request.data.get('file'):
uploaded_file = request.data.get('file')
try:
self.validator(uploaded_file)
except ValidationError as error:
print(error)
return JsonResponse({'error': error.message}, status=status.HTTP_400_BAD_REQUEST)
else:
return JsonResponse({'error': "there is no file"}, status=status.HTTP_400_BAD_REQUEST)
folder = 'CVs/'
filename = uploaded_file.name
storage = FileSystemStorage(location=folder)
temp_cv = Document()
temp_cv.path = f"{storage.location}/{filename}"
try:
temp_cv.save()
storage.save(filename, uploaded_file)
except IntegrityError as error:
print(error.args[0])
message = {'error': 'file with same name already exists'}
return JsonResponse(message, status=status.HTTP_400_BAD_REQUEST)
return Response(status=status.HTTP_201_CREATED)
def delete(self, request):
Document.objects.all().delete()
path = FileSystemStorage("CVs/")
shutil.rmtree(f"{path.location}")
return Response(status=status.HTTP_200_OK)
class FileUploadView(APIView):
"""
API endpoint for CSV File upload
"""
def post(self, request, filename, format=None):
file = TextIOWrapper(request.FILES[filename].file, encoding=request.encoding)
user_data = CSVParser.read_from_memory(file)
user_serializer = UserSerializer()
response = []
for user in user_data:
current_user = UserManager.to_user_data(user)
verified_data = user_serializer.validate(current_user)
responce_data = verified_data.copy()
try:
existing_user = User.objects.get(username=verified_data['username'])
except User.DoesNotExist:
user_serializer.create(verified_data)
else:
responce_data['error'] = str(existing_user.username) + ' already exist'
response.append(responce_data)
return Response(response)
class CoursesView(APIView):
"""
View that help manipulate with courses
"""
def get(self, request):
"""
:param request:
:return:
TODO JsonResponse with courses
"""
all_courses = list(Course.objects.all())
dict = {}
for course in all_courses:
dict[course.id] = CourseSerializer(course).data
return JsonResponse(dict)
| 4,573 |
rl/high-card-poker/tf-pfhc-rl/pfhc_env.py
|
Simsso/Machine-Learning-Tinkering
| 2 |
2026911
|
from player import Player
from pfhc_game import PreFlopHighCardGame
class HighCardEnv:
"""
Environment class for the pre-flop high card poker game.
In every round, each of the player gets a single card.
"""
INITIAL_STACK = 100
def __init__(self, player1: Player, player2: Player):
self.p1 = player1
self.p2 = player2
self.game: PreFlopHighCardGame = None
self.reset()
def step(self, action):
"""
:param action: Tuple containing a char ['b', 'f', 'c'] and a positive integral value.
The latter is only taken into account if 'b' is chosen, as it is the bet size.
:return: Tuple (new state, rewards, game over, misc). Rewards is an array with two elements,
one reward for each player. It is [0, 0] unless a payout has happened.
The new state is, as seen from the perspective of the player who is acting next.
This might be the same player as the one who is conducting the current action.
"""
action_code = action[0]
bet_size = action[1]
game = self.game
player = game.next_player
count_ba = game.round_count # round count before action
if action_code == 'b':
# scale bet size to [0, player stack]
game.bet(bet_size=max(0, min(player.stack(), bet_size)))
elif action_code == 'c':
game.call()
else:
game.fold()
reward = [0, 0]
if count_ba < game.round_count: # new round (showdown has taken place or a player folded)
reward = game.last_gain
return self.get_state(game.next_player), reward, game.over, {}
def reset(self):
"""
Resets the environment. That is resetting the players (but keeping the object instances) and creating a new
pre-flop high card game object.
"""
self.p1.reset()
self.p2.reset()
self.game = PreFlopHighCardGame(self.p1, self.p2)
def get_state(self, me: Player):
"""
:return: The private game state, i.e. game as seen from one player's point of view.
A 5-tuple containing (card, stack, opponent stack, bet size, opponent bet size)
"""
game = self.game
opponent = game.other(me)
state = [me.get_card().get_rank(), me.stack(), opponent.stack(), me.bet_size(), opponent.bet_size()]
return state
def __str__(self):
return str(str(self.p1.name) + "'s state: " + str(self.get_state(self.p1))) + \
str(str(self.p2.name) + "'s state: " + str(self.get_state(self.p2)))
| 2,663 |
testapp/workflows.py
|
PrimarySite/django-transitions
| 19 |
2026584
|
# -*- coding: utf-8 -*-
"""Example Lifecycle workflow."""
from django.utils import timezone
from django_transitions.workflow import StateMachineMixinBase
from django_transitions.workflow import StatusBase
from transitions import Machine
class LiveStatus(StatusBase):
"""Workflow for Lifecycle."""
# Define the states as constants
DEVELOP = 'develop'
LIVE = 'live'
MAINTENANCE = 'maintenance'
DELETED = 'deleted'
# Give the states a human readable label
STATE_CHOICES = (
(DEVELOP, 'Under Development'),
(LIVE, 'Live'),
(MAINTENANCE, 'Under Maintenance'),
(DELETED, 'Deleted'),
)
# Define the transitions as constants
PUBLISH = 'publish'
MAKE_PRIVATE = 'make_private'
MARK_DELETED = 'mark_deleted'
REVERT_DELETED = 'revert_delete'
# Give the transitions a human readable label and css class
# which will be used in the django admin
TRANSITION_LABELS = {
PUBLISH : {'label': 'Make live', 'cssclass': 'default'},
MAKE_PRIVATE: {'label': 'Under maintenance'},
MARK_DELETED: {'label': 'Mark as deleted', 'cssclass': 'deletelink'},
REVERT_DELETED: {'label': 'Revert Delete', 'cssclass': 'default'},
}
# Construct the values to pass to the state machine constructor
# The states of the machine
SM_STATES = [
DEVELOP, LIVE, MAINTENANCE, DELETED,
]
# The machines initial state
SM_INITIAL_STATE = DEVELOP
# The transititions as a list of dictionaries
SM_TRANSITIONS = [
# trigger, source, destination
{
'trigger': PUBLISH,
'source': [DEVELOP, MAINTENANCE],
'dest': LIVE,
},
{
'trigger': MAKE_PRIVATE,
'source': LIVE,
'dest': MAINTENANCE,
},
{
'trigger': MARK_DELETED,
'source': [
DEVELOP, LIVE, MAINTENANCE,
],
'dest': DELETED,
},
{
'trigger': REVERT_DELETED,
'source': DELETED,
'dest': MAINTENANCE,
},
]
class LifecycleStateMachineMixin(StateMachineMixinBase):
"""Lifecycle workflow state machine."""
status_class = LiveStatus
machine = Machine(
model=None,
finalize_event='wf_finalize',
auto_transitions=False,
**status_class.get_kwargs() # noqa: C815
)
@property
def state(self):
"""Get the items workflowstate or the initial state if none is set."""
if self.wf_state:
return self.wf_state
return self.machine.initial
@state.setter
def state(self, value):
"""Set the items workflow state."""
self.wf_state = value
return self.wf_state
def wf_finalize(self, *args, **kwargs):
"""Run this on all transitions."""
self.wf_date = timezone.now()
| 2,924 |
Assignments/Assignment 6/leap_year copy.py
|
Emilysav/CMPT-120L-910-20F
| 0 |
2024543
|
# """
# - Add code in the defined function to figure out whether or not the given year is a leap year or not.
# - Every year that is exactly divisible by four is a leap year, except for years that are exactly
#
# divisible by 100, but these centurial years are leap years if they are exactly divisible by 400. For example,
#
# the years 1700, 1800, and 1900 are not leap years, but the years 1600 and 2000 are. - Wikipedia
# - Take in a parameter called year and return “Is a leap year” or “Not a leap year”
# """
def leap_year(year):
remainder1 = year % 4
remainder2 = year % 100
remainder3 = year % 400
print(remainder1, remainder2, remainder3)
if remainder1 == 0 and remainder2 == 0 and remainder3 == 0:
print("Is a leap year")
elif remainder1 == 0 and remainder2 != 0:
print("Is a leap year")
else:
print("Not a leap year")
if __name__ == "__main__":
years = [2000, 1994, 1912, 3002, 1700, 1400]
answers = []
for year in years:
answers.append(leap_year(year))
print(answers)
| 1,106 |
tests/policy/test_action_alertmanager.py
|
Spellchaser/powerfulseal
| 1,362 |
2026579
|
import pytest
from mock import MagicMock, patch
from tests.fixtures import action_alertmanager, ActionUnmuteAlertManager, ActionAlertManager
# single target cases
def test_happy_path(action_alertmanager):
mock_resp = 'silence_id'
action_alertmanager.mute = MagicMock(
return_value=mock_resp
)
action_alertmanager.execute()
# muting should store silence id
assert(action_alertmanager.silences == {
'http://example.com': 'silence_id'
})
assert(action_alertmanager.proxies == dict(http='http',https='https'))
# test default silence duration
action_alertmanager.mute.assert_called_once_with('http://example.com', 900)
# muting should push unmuting action for cleaning up
cleanup_action = action_alertmanager.cleanup_actions[0]
assert(type(cleanup_action) is ActionUnmuteAlertManager)
assert(cleanup_action.silence_id == 'silence_id')
assert(cleanup_action.alertmanager_url == 'http://example.com')
assert(cleanup_action.proxies == dict(http='http',https='https'))
def test_no_proxy(action_alertmanager):
action_alertmanager.proxies = {}
action_alertmanager.mute = MagicMock()
action_alertmanager.execute()
assert(action_alertmanager.cleanup_actions[0].proxies == {})
# autoUnmute is set to false
def test_autounmute_disabled(action_alertmanager):
action_alertmanager.schema["actions"][0]["mute"] = {'autoUnmute': False}
action_alertmanager.mute = MagicMock()
action_alertmanager.execute()
# no clean up action when autoUnmute is false
assert(len(action_alertmanager.cleanup_actions) == 0)
# what if 'mute' is None
def test_mute_is_none(action_alertmanager):
action_alertmanager.schema["actions"][0]["mute"] = None
action_alertmanager.mute = MagicMock()
action_alertmanager.execute()
# test default silence duration
action_alertmanager.mute.assert_called_once_with('http://example.com', 900)
def test_duration_override(action_alertmanager):
action_alertmanager.schema["actions"][0]["mute"] = {'duration': 123}
action_alertmanager.mute = MagicMock()
action_alertmanager.execute()
# test default silence duration
action_alertmanager.mute.assert_called_once_with('http://example.com', 123)
# multiple targets cases
@patch.object(ActionAlertManager, 'mute')
def test_multiple_targets_happy_path(mute):
mute.side_effect = ['silence_1', 'silence_2']
logger = MagicMock()
action_alertmanager = ActionAlertManager(
name="test alert manager action",
schema=dict(
targets = [
dict(url="http://example1.com"),
dict(url="http://example2.com")
],
actions= [
dict(mute = dict())
],
proxies=dict(http='http',https='https')
),
logger=logger,
)
action_alertmanager.execute()
assert(action_alertmanager.silences == {
'http://example1.com': 'silence_1',
'http://example2.com': 'silence_2'
})
cleanup_action = action_alertmanager.cleanup_actions
assert(len(cleanup_action) == 2)
| 3,111 |
python/one-liner/runner_up.py
|
Hamng/python-sources
| 0 |
2026353
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 23 19:18:55 2019
@author: Ham
HackerRanch Challenge: Find the Runner-Up Score!
Given the participants' score sheet for your University Sports Day,
you are required to find the runner-up score.
You are given N scores.
Store them in a list and find the score of the runner-up.
Input Format
The first line contains N.
The second line contains an array A of N integers each separated by a space.
Constraints
Output Format
Print the runner-up score.
Sample Input 0
5
2 3 6 6 5
Sample Output 0
5
"""
if __name__ == '__main__':
#n = int(input())
#arr = map(int, input().split())
arr = '2 3 6 6 5'.split()
print(sorted(list(set(arr)), reverse=True)[1])
| 760 |
klaverjas/game/meld.py
|
liacs/klaverjas
| 0 |
2026646
|
from .cards import Card, Rank, Suit
meld_20 = []
meld_50 = []
meld_100 = []
for suit in Suit:
for idx in range(6):
meld_20.append({Card(suit, Rank(rank))
for rank in range(idx, idx + 3)})
for idx in range(5):
meld_50.append({Card(suit, Rank(rank))
for rank in range(idx, idx + 4)})
for rank in Rank:
meld_100.append({Card(suit, rank) for suit in Suit})
def meld_points(trick, trump_suit):
for meld in meld_100:
if meld <= set(trick):
return 100
points = 0
royal = {Card(trump_suit, Rank.QUEEN), Card(trump_suit, Rank.KING)}
if royal <= set(trick):
points = 20
for meld in meld_50:
if meld <= set(trick):
return points + 50
for meld in meld_20:
if meld <= set(trick):
return points + 20
return points
| 878 |
dcnn_visualizer/traceable_chain.py
|
tochikuji/DNN-Visualizer
| 3 |
2025603
|
import contextlib
import chainer
from chainercv.links import PickableSequentialChain
class TraceableChain(PickableSequentialChain):
"""
A traceable chain that can pick all intermediate layers and backtrace the calculation chains.
The forward propagation must be sequential,
that means forward propagation can be written as a composite of callable objects.
>>> class MLP(TraceableChain):
... def __init__(self):
... with self.init_scope():
... self.fc1 = L.Linear(None, 100)
... self.fc1_relu = F.relu
... self.fc2 = L.Linear(None, 10)
... self.fc2_relu = F.relu
... self.fc3 = L.Linear(None, 10)
... self.fc3_pred = F.softmax
The instance of TraceableChain is callable. Its `__call__` performs as a forward propagation.
All intermediate activations will be retained automatically, and picked with `pick`, e.g.,
>>> y = model(x)
>>> act_fc2 = model.pick('fc2')
"""
def __init__(self):
super().__init__()
self.pick = self.layer_names
def renew_pick(self):
self.pick = self.layer_names
@contextlib.contextmanager
def init_scope(self):
try:
with super().init_scope():
yield
finally:
self.renew_pick()
if __name__ == '__main__':
import chainer.functions as F
import numpy
from dcnn_visualizer.traceable_nodes import TraceableLinear
class TraceableModel(TraceableChain):
def __init__(self):
super().__init__()
with self.init_scope():
self.fc1 = TraceableLinear(10, 10)
self.fc1_relu = F.relu
self.fc2 = TraceableLinear(10, 10)
self.fc2_relu = F.relu
self.fc3 = TraceableLinear(10, 1)
self.fc3_sigm = F.sigmoid
model = TraceableModel()
x = numpy.random.rand(1, 10).astype('f')
# noinspection PyTypeChecker
v = model(x)
print({model.layer_names[i]: y for i, y in enumerate(v)})
| 2,105 |
dashedbot/utils.py
|
rhg/dashedbot
| 0 |
2025167
|
from json import load, dump
import logging
import sys
def get_logger(name: str) -> logging.Logger:
'''returns a logger with the given name with an info level
and write to stderr'''
logger = logging.getLogger(name=name)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(stream=sys.stderr))
return logger
logger = get_logger('dashedbot.utils')
def read_json(name: str, *, default={}):
'''load json from a file'''
try:
with open(name, 'r') as f:
logger.debug('loaded %s', name)
return load(f)
except FileNotFoundError:
logger.debug('%s not found', name)
return default
def write_json(obj, name: str):
'''write json to a file'''
with open(name, 'w') as f:
dump(obj, f)
logger.debug('wrote %s', name)
def map_values(f, d: dict) -> dict:
return {k: f(v) for (k, v) in d.items()}
| 916 |
test/hello/main.py
|
oxr463/setup-buildozer
| 0 |
2025314
|
#!/usr/bin/env python
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.uix.label import Label
class helloWorld(App):
def build(self):
lbl=Label(text='Hello World!')
return lbl
if __name__ == '__main__':
helloWorld().run()
| 270 |
tests.py
|
wa-pis/api-cloudvps-py
| 0 |
2026633
|
import unittest
import cloudvps
class CloudTestCase(unittest.TestCase):
def setUp(self):
self.api = cloudvps.Api("some_fake_token")
def testCheckVersion(self):
self.assertEqual(cloudvps.__version__, "0.1.7")
def testHelpfullFunction(self):
pass
if __name__ == "__main__":
unittest.main()
| 334 |
ramsay/ramsay/settings/prod.py
|
cjswedes/real-wear
| 0 |
2025573
|
from .base import *
DEBUG = False
ALLOWED_HOSTS += ['ramsay-project-env.i5empc3jzg.us-east-2.elasticbeanstalk.com', 'localhost']
| 134 |
userapp/urls.py
|
cy4ok/my-region
| 0 |
2025500
|
import userapp.views as userapp
from django.urls import path
app_name = 'userapp'
urlpatterns = [
path('instructors/', userapp.InstructorList.as_view(), name='instructor_list'),
# path('create/', userapp..as_view(), name='user_create'),
path('traveler_details/<int:pk>/', userapp.TravelerDetailView.as_view(), name='traveler_details'),
path('instructor_details/<int:pk>/', userapp.InstructorDetailView.as_view(), name='instructor_details'),
path('traveler_profile/', userapp.TravelerProfileView.as_view(), name='traveler_profile'),
path('instructor_profile/', userapp.InstructorDetailView.as_view(), name='instructor_profile'),
]
| 655 |
benchmarking/experiments/nermodel.py
|
ltgoslo/norBERT
| 19 |
2023401
|
from transformers import BertModel
from torch import nn
class NERmodel(nn.Module):
def __init__(self, ner_vocab, model_path=None, freeze=False):
super().__init__()
self._bert = BertModel.from_pretrained(
model_path if model_path else "bert-base-multilingual-cased"
)
hidden_size = self._bert.config.hidden_size
self._linear = nn.Linear(hidden_size, len(ner_vocab))
if freeze:
for param in self._bert.parameters():
param.requires_grad = False
def forward(self, batch, mask):
b = self._bert(
input_ids=batch["input_ids"], attention_mask=batch["attention_mask"]
)
pooler = b.last_hidden_state[:, mask].diagonal().permute(2, 0, 1)
return self._linear(pooler)
| 799 |
tests/model_tests/test_uvvisSpectrum.py
|
edbeard/chemdataextractor-uvvis2018
| 6 |
2026750
|
import unittest
from chemdataextractor.model import UvvisSpectrum, UvvisPeak
class TestUvvisSpectrum(unittest.TestCase):
#def test_merge_uvvis(self):
# self.fail()
#def test_merge_peaks_and_uvvis(self):
# self.fail()
#def test_merge_peaks(self):
# self.fail()
def test_just_value_one_value(self):
'''Tests just value for a spectrum with one value'''
u = UvvisSpectrum()
peak = UvvisPeak()
peak.value = 467
u.peaks.append(peak)
self.assertEqual(u.just_value(), True)
def test_just_value_one_extinction(self):
'''Tests just value for failure case with one extinction'''
u = UvvisSpectrum()
peak = UvvisPeak()
peak.extinction = 12345
u.peaks.append(peak)
self.assertEqual(u.just_value(), False)
def test_just_value_multiple_values(self):
u = UvvisSpectrum()
peaks = [UvvisPeak(),UvvisPeak(),UvvisPeak()]
values = [467,234,123]
for i, peak in enumerate(peaks):
peak.value = values[i]
u.peaks.append(peak)
self.assertEqual(u.just_value(), True)
#def test_just_extinction(self):
# self.fail()
if __name__ == '__main__':
unittest.main()
| 1,261 |
docs/architecture/termination/issues/issue_23395.py
|
karahbit/radical.pilot
| 47 |
2026835
|
#!/usr/bin/env python
# https://bugs.python.org/issue23395
import signal, threading, thread, time
signal.signal(signal.SIGINT, signal.SIG_DFL) # or SIG_IGN
def thread_run():
# NOTE: This should interrupt the main thread w/o an error, but
# We see an error (int not callable)
thread.interrupt_main()
t = threading.Thread(target=thread_run)
t.start()
time.sleep(1)
| 387 |
setup.py
|
mredolatti/mmh3cffi
| 1 |
2025410
|
from setuptools import setup, find_packages
setup(
name='mmh3cffi',
version='0.1.1',
packages=find_packages(exclude=['test, csrc']),
setup_requires=['cffi>=1.4.0', 'pytest-runner'],
install_requires=['cffi>=1.4.0', 'future'],
tests_require=['pytest'],
cffi_modules=['build_mmh3cffi.py:FFI_BUILDER']
)
| 330 |
src/xia2/Modules/Analysis/__init__.py
|
graeme-winter/xia2
| 10 |
2026594
|
from collections import OrderedDict
from libtbx import phil
from dials.pychef import dose_phil_str
batch_phil_scope = """\
batch
.multiple = True
{
id = None
.type = str
range = None
.type = ints(size=2, value_min=0)
}
"""
phil_scope = phil.parse(
"""\
d_min = None
.type = float(value_min=0)
d_max = None
.type = float(value_min=0)
resolution_bins = 20
.type = int
anomalous = False
.type = bool
use_internal_variance = False
.type = bool
.help = Use internal variance of the data in the calculation of the merged sigmas
.short_caption = "Use internal variance"
eliminate_sys_absent = False
.type = bool
.help = Eliminate systematically absent reflections before computation of merging statistics.
.short_caption = "Eliminate systematic absences before calculation"
range {
width = 1
.type = float(value_min=0)
min = None
.type = float(value_min=0)
max = None
.type = float(value_min=0)
}
cc_half_significance_level = 0.01
.type = float(value_min=0, value_max=1)
cc_half_method = *half_dataset sigma_tau
.type = choice
chef_min_completeness = None
.type = float(value_min=0, value_max=1)
.help = "Minimum value of completeness in outer resolution shell used to "
"determine suitable resolution cutoff for CHEF analysis"
%s
xtriage_analysis = True
.type = bool
include_radiation_damage = True
.type = bool
%s
"""
% (dose_phil_str, batch_phil_scope)
)
class separate_unmerged:
def __init__(self, unmerged_intensities, batches_all, id_to_batches=None):
intensities = OrderedDict()
batches = OrderedDict()
if id_to_batches is None:
run_id_to_batch_id = None
run_id = 0
unique_batches = sorted(set(batches_all.data()))
last_batch = None
run_start = unique_batches[0]
for i, batch in enumerate(unique_batches):
if (
last_batch is not None
and batch > (last_batch + 1)
or (i + 1) == len(unique_batches)
):
if (i + 1) == len(unique_batches):
last_batch += 1
batch_sel = (batches_all.data() >= run_start) & (
batches_all.data() <= last_batch
)
batches[run_id] = batches_all.select(batch_sel)
intensities[run_id] = unmerged_intensities.select(batch_sel)
run_id += 1
run_start = batch
last_batch = batch
else:
run_id_to_batch_id = OrderedDict()
run_id = 0
for batch_id, batch_range in id_to_batches.items():
run_id_to_batch_id[run_id] = batch_id
run_start, last_batch = batch_range
batch_sel = (batches_all.data() >= run_start) & (
batches_all.data() <= last_batch
)
batches[run_id] = batches_all.select(batch_sel)
intensities[run_id] = unmerged_intensities.select(batch_sel)
run_id += 1
self.run_id_to_batch_id = run_id_to_batch_id
self.intensities = intensities
self.batches = batches
| 3,264 |
backend/rethinkdb_tools/db_classes.py
|
al-indigo/vmemperor
| 1 |
2023722
|
CREATE_DB_FOR_CLASSES = set()
def create_db_for_me(cl):
CREATE_DB_FOR_CLASSES.add(cl)
CREATE_DB_FOR_CLASSES_WITH_ACL = set()
def create_acl_db_for_me(cl):
CREATE_DB_FOR_CLASSES_WITH_ACL.add(cl)
| 208 |
src/database/guid.py
|
roycoding/thumbs-up-api
| 3 |
2022838
|
#!/usr/bin/env python3
"""
GUID type for SQLAlchemy
See: https://docs.sqlalchemy.org/en/latest/core/custom_types.html#backend-agnostic-guid-type
"""
from sqlalchemy.types import TypeDecorator, CHAR
import uuid
class GUID(TypeDecorator):
"""Uses CHAR(32), storing as stringified hex values."""
impl = CHAR
def load_dialect_impl(self, dialect):
return dialect.type_descriptor(CHAR(32))
def process_bind_param(self, value, _):
if value is None:
return value
else:
if not isinstance(value, uuid.UUID):
return "%.32x" % uuid.UUID(value).int
else:
# hexstring
return "%.32x" % value.int
def process_result_value(self, value, _):
if value is None:
return value
else:
if not isinstance(value, uuid.UUID):
value = uuid.UUID(value)
return value
| 939 |
LeetCode-All-Solution/Python3/LC-1614-Maximum-Nesting-Depth-of-the-Parentheses.py
|
YuweiYin/Algorithm_YuweiYin
| 0 |
2026895
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""=================================================================
@Project : Algorithm_YuweiYin/LeetCode-All-Solution/Python3
@File : LC-1614-Maximum-Nesting-Depth-of-the-Parentheses.py
@Author : [YuweiYin](https://github.com/YuweiYin)
@Date : 2022-01-07
=================================================================="""
# import functools
import sys
import time
# from typing import List
"""
LeetCode - 1614 - (Easy) - Maximum Nesting Depth of the Parentheses
https://leetcode.com/problems/maximum-nesting-depth-of-the-parentheses/
Description:
A string is a valid parentheses string (denoted VPS) if it meets one of the following:
It is an empty string "", or a single character not equal to "(" or ")",
It can be written as AB (A concatenated with B), where A and B are VPS's, or
It can be written as (A), where A is a VPS.
We can similarly define the nesting depth depth(S) of any VPS S as follows:
depth("") = 0
depth(C) = 0, where C is a string with a single character not equal to "(" or ")".
depth(A + B) = max(depth(A), depth(B)), where A and B are VPS's.
depth("(" + A + ")") = 1 + depth(A), where A is a VPS.
For example, "", "()()", and "()(()())" are VPS's (with nesting depths 0, 1, and 2),
and ")(" and "(()" are not VPS's.
Requirement:
Given a VPS represented as string s, return the nesting depth of s.
Example 1:
Input: s = "(1+(2*3)+((8)/4))+1"
Output: 3
Explanation: Digit 8 is inside of 3 nested parentheses in the string.
Example 2:
Input: s = "(1)+((2))+(((3)))"
Output: 3
Constraints:
1 <= s.length <= 100
s consists of digits 0-9 and characters '+', '-', '*', '/', '(', and ')'.
It is guaranteed that parentheses expression s is a VPS.
"""
class Solution:
def maxDepth(self, s: str) -> int:
# exception case
if not isinstance(s, str) or len(s) <= 0:
return 0
# border case
if len(s) == 1:
# assert s != "(" and s != ")"
return 0
if len(s) == 2:
# assert (s[0] != "(" and s[1] != ")") or s == "()"
return 1 if s == "()" else 0
# main method: dfs & Stack
return self._maxDepth(s)
def _maxDepth(self, s: str) -> int:
# now, 3 <= len(s)
len_s = len(s)
# op_set = {"+", "-", "*", "/"} # operator set (Do NOT care about numbers and operators!)
paren_stack = [] # store the index of every "("
max_stack_depth = 0 # this is just the answer: the max depth of stack == the max depth of paired parentheses
cur_index = 0
cur_stack_depth = 0
while cur_index < len_s: # scan every char from leftmost to rightmost
cur_char = s[cur_index]
if cur_char == "(":
paren_stack.append(cur_index) # put "(" into stack
cur_stack_depth += 1 # stack depth plus 1
max_stack_depth = max(max_stack_depth, cur_stack_depth) # update max stack depth
elif cur_char == ")":
paren_stack.pop() # pop "(" out of stack
cur_stack_depth -= 1 # stack depth minus 1
else:
pass # do nothing
cur_index += 1
return max_stack_depth
def main():
# Example 1: Output: 3
s = "(1+(2*3)+((8)/4))+1"
# Example 2: Output: 3
# s = "(1)+((2))+(((3)))"
# init instance
solution = Solution()
# run & time
start = time.process_time()
ans = solution.maxDepth(s)
end = time.process_time()
# show answer
print('\nAnswer:')
print(ans)
# show time consumption
print('Running Time: %.5f ms' % ((end - start) * 1000))
if __name__ == "__main__":
sys.exit(main())
| 3,843 |
test/torch_basics_test.py
|
IcevorHu/bluefog
| 1 |
2026844
|
# Copyright 2020 Bluefog Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import inspect
import os
import warnings
import unittest
import numpy as np
import networkx as nx
import pytest
import torch
from common import mpi_env_rank_and_size
import bluefog.torch as bf
from bluefog.torch import (
ExponentialGraph,
RingGraph,
StarGraph,
MeshGrid2DGraph,
FullyConnectedGraph,
)
from bluefog.torch import (
IsTopologyEquivalent,
InferDestinationFromSourceRanks,
InferSourceFromDestinationRanks,
)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
class BasicsTests(unittest.TestCase):
"""
Tests for basics.py
"""
def __init__(self, *args, **kwargs):
super(BasicsTests, self).__init__(*args, **kwargs)
warnings.simplefilter("module")
os.environ['BLUEFOG_NODES_PER_MACHINE'] = '2'
def test_bluefog_rank(self):
"""Test that the rank returned by bf.rank() is correct."""
true_rank, _ = mpi_env_rank_and_size()
bf.init()
rank = bf.rank()
# print("Rank: ", true_rank, rank)
assert true_rank == rank
def test_bluefog_size(self):
"""Test that the size returned by bf.size() is correct."""
_, true_size = mpi_env_rank_and_size()
bf.init()
size = bf.size()
# print("Size: ", true_size, size)
assert true_size == size
def test_bluefog_local_size(self):
_, true_size = mpi_env_rank_and_size()
bf.init()
local_size = bf.local_size()
assert local_size == min(2, true_size)
def test_bluefog_local_rank(self):
true_rank, true_size = mpi_env_rank_and_size()
bf.init()
local_rank = bf.local_rank()
assert true_rank % min(2, true_size) == local_rank
def test_set_topology_fail_with_win_create(self):
bf.init()
size = bf.size()
if size <= 1:
fname = inspect.currentframe().f_code.co_name
warnings.warn("Skip {} due to size 1".format(fname))
return
tensor = torch.FloatTensor([1])
window_name = "win_create_test"
is_created = bf.win_create(tensor, window_name)
assert is_created, "bf.win_create do not create window object successfully."
if size == 1:
expected_topology = nx.from_numpy_array(
np.array([[0.5]]), create_using=nx.DiGraph
)
elif size == 2:
expected_topology = nx.from_numpy_array(
np.array([[0, 0.2], [0.2, 0]]), create_using=nx.DiGraph
)
else:
expected_topology = RingGraph(size)
is_set = bf.set_topology(expected_topology)
assert not is_set, "bf.set_topology do not fail due to win_create."
topology = bf.load_topology()
assert isinstance(topology, nx.DiGraph)
assert IsTopologyEquivalent(topology, ExponentialGraph(size))
is_freed = bf.win_free()
assert is_freed, "bf.win_free do not free window object successfully."
def test_set_and_load_topology(self):
bf.init()
size = bf.size()
if size == 4:
expected_topology = nx.DiGraph(
np.array(
[
[1 / 3.0, 1 / 3.0, 1 / 3.0, 0.0],
[0.0, 1 / 3.0, 1 / 3.0, 1 / 3.0],
[1 / 3.0, 0.0, 1 / 3.0, 1 / 3.0],
[1 / 3.0, 1 / 3.0, 0.0, 1 / 3.0],
]
)
)
elif size == 1:
expected_topology = nx.DiGraph(np.array([[1.0]]))
else:
expected_topology = ExponentialGraph(size)
topology = bf.load_topology()
assert isinstance(topology, nx.DiGraph)
assert IsTopologyEquivalent(expected_topology, topology)
def test_in_out_neighbors_expo2(self):
bf.init()
rank = bf.rank()
size = bf.size()
assert bf.set_topology(ExponentialGraph(size))
in_neighbors = bf.in_neighbor_ranks()
out_neighbors = bf.out_neighbor_ranks()
degree = int(np.ceil(np.log2(size)))
expected_in_neighbors = sorted(
[(rank - 2 ** i) % size for i in range(degree)])
expected_out_neighbors = sorted(
[(rank + 2 ** i) % size for i in range(degree)])
assert sorted(in_neighbors) == expected_in_neighbors
assert sorted(out_neighbors) == expected_out_neighbors
def test_in_out_neighbors_biring(self):
bf.init()
rank = bf.rank()
size = bf.size()
assert bf.set_topology(RingGraph(size))
in_neighbors = bf.in_neighbor_ranks()
out_neighbors = bf.out_neighbor_ranks()
expected_in_neighbors = list(
set(map(lambda x: x % size, [rank - 1, rank + 1])))
expected_out_neighbors = list(
set(map(lambda x: x % size, [rank - 1, rank + 1]))
)
if size <= 1:
expected_in_neighbors = []
expected_out_neighbors = []
assert sorted(in_neighbors) == expected_in_neighbors
assert sorted(out_neighbors) == expected_out_neighbors
@pytest.mark.parametrize(
"topo_func",
[ExponentialGraph, RingGraph, StarGraph, MeshGrid2DGraph, FullyConnectedGraph],
)
def test_infer_destination_from_source_ranks(topo_func):
bf.init()
size = bf.size()
bf.set_topology(topo_func(size))
topo = bf.load_topology()
in_neighbors = bf.in_neighbor_ranks()
out_neighbors = bf.out_neighbor_ranks()
# Make the W into average rule.
expected_W = (nx.to_numpy_array(topo) > 0).astype(float)
expected_W /= expected_W.sum(axis=0)
src_ranks, W = InferDestinationFromSourceRanks(
src_ranks=in_neighbors, construct_adjacency_matrix=True
)
assert sorted(src_ranks) == out_neighbors
np.testing.assert_allclose(W, expected_W)
@pytest.mark.parametrize(
"topo_func",
[ExponentialGraph, RingGraph, StarGraph, MeshGrid2DGraph, FullyConnectedGraph],
)
def test_infer_source_from_destination_ranks(topo_func):
bf.init()
size = bf.size()
bf.set_topology(topo_func(size))
topo = bf.load_topology()
in_neighbors = bf.in_neighbor_ranks()
out_neighbors = bf.out_neighbor_ranks()
# Make the W into average rule.
expected_W = (nx.to_numpy_array(topo) > 0).astype(float)
expected_W /= expected_W.sum(axis=0)
dst_ranks, W = InferSourceFromDestinationRanks(
dst_ranks=out_neighbors, construct_adjacency_matrix=True
)
assert sorted(dst_ranks) == in_neighbors
np.testing.assert_allclose(W, expected_W)
if __name__ == "__main__":
unittest.main()
| 7,480 |
webui/views.py
|
aidun/seite50
| 0 |
2026987
|
from django.shortcuts import render
from api.models.book import Book
def index(request):
book_list = Book.objects.all()
context = {
'book_list': book_list,
}
return render(request, 'book/index.html', context)
| 235 |
DiscordBot/main.py
|
Pradhyuman12/your-awesome-projects
| 7 |
2025956
|
import discord
import os
import requests
import json
from alive import keep_alive
client = discord.Client()
@client.event
async def on_message(message):
if message.author == client.user:
return
msg = message.content
if msg.startswith('pls hello'):
await message.channel.send("Hello Noob")
elif msg.startswith('pls bye'):
await message.channel.send("Bye! Get Lost")
keep_alive()
client.run(os.getenv('YOUR BOT TOKEN')
| 444 |
extensions/xmpp/management/commands/ejabberd_cmd.py
|
nirgal/ngw
| 0 |
2026969
|
import logging
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from ngw.core.models import FIELD_LOGIN, ContactFieldValue, ContactGroup
def check_login_exists(login):
try:
ContactFieldValue.objects.get(contact_field_id=FIELD_LOGIN,
value=login)
except ContactFieldValue.DoesNotExist:
raise CommandError('User "{}" does not exist'.format(login))
def call(*args):
'Call subprocess with arguments, log'
logging.debug("Subprocess call: %s", args)
return subprocess.check_output(args)
def get_roster(login):
raw = call('sudo', '/usr/sbin/ejabberdctl',
'get_roster', login, settings.XMPP_DOMAIN)
raw = str(raw, encoding='utf-8')
return [line.split('\t')
for line in raw.split('\n')]
def cross_subscribe(login1, login2):
'''
login1 & 2 must exists.
Check before calling.
'''
def _subscribe1(login1, login2):
call('sudo', '/usr/sbin/ejabberdctl',
'add_rosteritem',
login1, settings.XMPP_DOMAIN,
login2, settings.XMPP_DOMAIN,
login2+'@'+settings.XMPP_DOMAIN,
settings.XMPP_ROSTERNAME,
'both')
_subscribe1(login1, login2)
_subscribe1(login2, login1)
def subscribe_everyone(login, allusers, exclude=None):
logging.info('subscribe_everyone for %s. Exclude=%s', login, exclude)
exclude = exclude or []
check_login_exists(login)
for user in allusers:
username = user.get_fieldvalue_by_id(FIELD_LOGIN)
if username == login or username in exclude:
continue # skip
logging.debug('Cross subscribe %s:%s', login, username)
cross_subscribe(login, username)
class Command(BaseCommand):
help = 'ngw/ejabberd synchronisation tools'
def add_arguments(self, parser):
parser.add_argument(
'-x', '--exclude',
action='append', dest='exclude', default=[],
metavar='USERNAME',
help="exclude username from suball")
parser.add_argument(
'--subs',
action='append', dest='add_subs', default=[],
metavar='USERNAME1:USERNAME2',
help="user1:user2 cross subscribe user1 and user2")
parser.add_argument(
'--suball',
action='store', dest='suball',
metavar='USERNAME',
help="Subscribe a user to everyone")
def handle(self, *args, **options):
verbosity = options.get('verbosity', '1')
if verbosity == '3':
loglevel = logging.DEBUG
elif verbosity == '2':
loglevel = logging.INFO
elif verbosity == '1':
loglevel = logging.WARNING
else:
loglevel = logging.ERROR
logging.basicConfig(level=loglevel,
format='{asctime} {levelname} {message}',
style='{')
user_set = (ContactGroup.objects.get(pk=settings.XMPP_GROUP)
.get_all_members())
for logins in options['add_subs']:
login1, login2 = logins.split(':')
check_login_exists(login1)
check_login_exists(login2)
cross_subscribe(login1, login2)
if options['suball']:
subscribe_everyone(login=options['suball'],
allusers=user_set,
exclude=options['exclude'])
| 3,554 |
examples/python/http_api/hello_fastapi.py
|
estebanza20/ie0317
| 0 |
2025195
|
"""
Hello world FastAPI module
This module is to test if FastAPI can run correctly.
"""
from fastapi import FastAPI
app = FastAPI()
@app.get("/")
def root():
return {"message": "Hello world!"}
| 202 |
src/lasso_winequality1.py
|
AnaharaYasuo/mlPractice
| 0 |
2025678
|
import lasso
import numpy as np
import csv
if __name__ == "__main__":
Xy = []
with open("winequality-red.csv") as fp:
for row in csv.reader(fp, delimiter=";"):
Xy.append(row)
Xy = np.array(Xy[1:], dtype=np.float64)
np.random.seed(0)
np.random.shuffle(Xy)
train_X = Xy[:-1000, :-1]
train_y = Xy[:-1000, -1]
test_X = Xy[-1000:, :-1]
test_y = Xy[-1000:, -1]
for lambda_ in [1., 0.1, 0.01]:
model = lasso.Lasso(lambda_)
model.fit(train_X, train_y)
y = model.predict(test_X)
print("--- lamda = {} ---".format(lambda_))
print("coefficients:")
print(model.w_)
mse = ((y - test_y) ** 2).mean()
print("MSE:{:.3f}".format(mse))
| 744 |
source/Record.py
|
duanduan2015/Termine
| 1 |
2025294
|
import os.path
from datetime import date
import curses
class Record:
def __init__(self, recordWin):
self.win = recordWin
def displayRecords(self, fileName, highLightTime):
fileIsExist = os.path.isfile(fileName)
if fileIsExist:
recordsFile = open(fileName, "r")
else:
recordsFile = open(fileName, "w")
recordsFile.close()
recordsFile = open(fileName, "r")
self.drawRecordWindow()
starty = 4
attr = curses.A_BOLD | curses.color_pair(2)
line = recordsFile.readline()
records = []
highLighted = False
if line != '':
records = line.split('#')
for r in records:
if r == '':
continue
strings = r.split(' ')
highLight = curses.A_BOLD | curses.color_pair(4)
if highLightTime != None and float(strings[1]) == round(highLightTime, 2) and highLighted == False:
highLighted = True
self.win.addstr(starty, 2, strings[0], highLight)
self.win.addstr(starty, 12, strings[1] + 's', highLight)
else:
self.win.addstr(starty, 2, strings[0], attr)
self.win.addstr(starty, 12, strings[1] + 's', attr)
starty = starty + 1
recordsFile.close()
star = ord('*') | attr
self.win.border(star, star, star, star, star, star, star, star)
self.win.refresh()
return
def addNewRecord(self, fileName, totalTime):
fileIsExist = os.path.isfile(fileName)
if fileIsExist:
recordsFile = open(fileName, "r")
else:
recordsFile = open(fileName, "w")
recordsFile.close()
recordsFile = open(fileName, "r")
#time = int(totalTime)
time = round(totalTime, 2)
today = date.today().strftime("%d/%m/%y")
records = []
line = recordsFile.readline()
if line != '':
oldRecords = line.split('#')
for x in range(len(oldRecords) - 1):
records.append(oldRecords[x])
added = False
for x in range(len(records)):
r = records[x].split(' ')
#t = int(r[1])
t = float(r[1])
if t > time:
newLine = today + ' ' + str(time)
records.insert(x, newLine)
added = True
break
if not added:
newLine = today + ' ' + str(time)
records.append(newLine)
else:
newLine = today + ' ' + str(time)
records.append(newLine)
recordsFile.close()
recordsFile = open(fileName, 'w+')
newFile = ''
num = 0
for line in records:
num = num + 1
newFile = newFile + line + ' ' + '#'
if num == 10:
break
recordsFile.write(newFile)
recordsFile.close()
def drawRecordWindow(self):
attr = curses.A_BOLD | curses.color_pair(2)
height, width = self.win.getmaxyx()
star = ord('*') | attr
self.win.border(star, star, star, star, star, star, star, star)
self.win.addstr(1, 9, 'RECORDS', attr)
self.win.addstr(2, 2, 'Date', attr)
self.win.addstr(2, 12, 'Time', attr)
def eraseRecord(self):
self.win.erase()
self.win.refresh()
| 3,552 |
selfdrive/crash.py
|
1Thamer/openpilot1
| 0 |
2026117
|
"""Install exception handler for process crash."""
import os
import sys
import threading
import capnp
from selfdrive.version import version, dirty
from selfdrive.swaglog import cloudlog
if os.getenv("NOLOG") or os.getenv("NOCRASH"):
def capture_exception(*exc_info):
pass
def bind_user(**kwargs):
pass
def bind_extra(**kwargs):
pass
def install():
pass
else:
from raven import Client
from raven.transport.http import HTTPTransport
import json
error_tags = {'dirty': dirty, 'username': 'char_error'}
try:
with open("/data/data/ai.comma.plus.offroad/files/persistStore/persist-auth", "r") as f:
auth = json.loads(f.read())
auth = json.loads(auth['commaUser'])
tags = ['username', 'email']
for tag in tags:
try:
error_tags[tag] = ''.join(char for char in auth[tag].decode('utf-8', 'ignore') if char.isalnum())
except:
pass
except:
pass
try:
with open("/data/params/d/CommunityPilotUser", "r") as f:
auth = json.loads(f.read())
tags = ['username', 'email']
for tag in tags:
try:
error_tags[tag] = ''.join(char for char in auth[tag].decode('utf-8', 'ignore') if char.isalnum())
except:
pass
except:
pass
logging_data = {"branch": "/data/params/d/GitBranch", "commit": "/data/params/d/GitCommit", "remote": "/data/params/d/GitRemote"}
for key in logging_data:
try:
with open(logging_data[key], "r") as f:
error_tags[key] = str(f.read())
except:
error_tags[key] = "unknown"
client = Client('https://84d713b5bd674bcbb7030d1b86115dcb:[email protected]/1405628',
install_sys_hook=False, transport=HTTPTransport, release=version, tags={'dirty': dirty})
def capture_exception(*args, **kwargs):
exc_info = sys.exc_info()
if not exc_info[0] is capnp.lib.capnp.KjException:
client.captureException(*args, **kwargs)
cloudlog.error("crash", exc_info=kwargs.get('exc_info', 1))
def capture_warning(warning_string):
client.captureMessage(warning_string, level='warning')
def bind_user(**kwargs):
client.user_context(kwargs)
def bind_extra(**kwargs):
client.extra_context(kwargs)
def install():
# installs a sys.excepthook
__excepthook__ = sys.excepthook
def handle_exception(*exc_info):
if exc_info[0] not in (KeyboardInterrupt, SystemExit):
capture_exception(exc_info=exc_info)
__excepthook__(*exc_info)
sys.excepthook = handle_exception
"""
Workaround for `sys.excepthook` thread bug from:
http://bugs.python.org/issue1230540
Call once from the main thread before creating any threads.
Source: https://stackoverflow.com/a/31622038
"""
init_original = threading.Thread.__init__
def init(self, *args, **kwargs):
init_original(self, *args, **kwargs)
run_original = self.run
def run_with_except_hook(*args2, **kwargs2):
try:
run_original(*args2, **kwargs2)
except Exception:
sys.excepthook(*sys.exc_info())
self.run = run_with_except_hook
threading.Thread.__init__ = init
| 3,164 |
itunesdb/test/web/crud/albums/test_create_album.py
|
paul-ko/itunes-to-sqlite
| 0 |
2023407
|
from typing import Optional
import pytest
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
from itunesdb.web import crud
from itunesdb.web import models
from itunesdb.web import schemas
@pytest.mark.parametrize(
"name, artist, compilation, year",
[
("name", "artist", False, 1900),
(None, "artist", False, 1900),
("artist", "artist", True, None),
("name", "artist", True, 1900),
(None, "artist", True, 1900),
("artist", "artist", False, None),
],
)
def test_create_album_successfully(
db: Session,
genre_ambient: models.Genre,
name: Optional[str],
artist: str,
compilation: bool,
year: Optional[int],
):
album = schemas.AlbumCreate(
name=name,
artist=artist,
year=year,
compilation=compilation,
genre_id=genre_ambient.id,
)
created_album = crud.create_album(album, db)
assert created_album is not None
assert created_album.name == name
assert created_album.artist == artist
assert created_album.compilation == compilation
assert created_album.year == year
assert created_album.genre_id == genre_ambient.id
assert created_album.id > 0
def test_create_album_unique_constraint_violation(
db: Session, album_ambient_1: models.Album
):
album = schemas.AlbumCreate(
name=album_ambient_1.name,
artist=album_ambient_1.artist,
compilation=False,
genre_id=album_ambient_1.genre_id,
)
with pytest.raises(IntegrityError):
crud.create_album(album, db)
def test_create_album_genre_foreign_key_violaton(db: Session):
album = schemas.AlbumCreate(name="abc", artist="def", compilation=False, genre_id=1)
with pytest.raises(IntegrityError):
crud.create_album(album, db)
| 1,824 |
pdf2txt.py
|
singulart/pyutilz
| 0 |
2026229
|
from pathlib import Path
import pdftotext
import sys
def main(argv):
with open('output.txt', 'w') as out:
path = Path(argv[0]).glob("**/*.pdf")
for src_file in path:
print('Processing %s' % src_file.absolute())
with open(src_file, 'rb') as f:
pdf = pdftotext.PDF(f)
for page in pdf:
out.write(page)
if __name__ == "__main__":
main(sys.argv[1:])
| 449 |
ansible/lib/ansible/modules/extras/windows/win_acl.py
|
kiv-box/kafka
| 0 |
2026143
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015, <NAME> <<EMAIL>>
# Copyright 2015, <NAME>
# Copyright 2015, <NAME> <<EMAIL>>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_acl
version_added: "2.0"
short_description: Set file/directory permissions for a system user or group.
description:
- Add or remove rights/permissions for a given user or group for the specified src file or folder.
options:
path:
description:
- File or Directory
required: yes
user:
description:
- User or Group to add specified rights to act on src file/folder
required: yes
default: none
state:
description:
- Specify whether to add C(present) or remove C(absent) the specified access rule
required: no
choices:
- present
- absent
default: present
type:
description:
- Specify whether to allow or deny the rights specified
required: yes
choices:
- allow
- deny
default: none
rights:
description:
- The rights/permissions that are to be allowed/denyed for the specified user or group for the given src file or directory. Can be entered as a comma separated list (Ex. "Modify, Delete, ExecuteFile"). For more information on the choices see MSDN FileSystemRights Enumeration.
required: yes
choices:
- AppendData
- ChangePermissions
- Delete
- DeleteSubdirectoriesAndFiles
- ExecuteFile
- FullControl
- ListDirectory
- Modify
- Read
- ReadAndExecute
- ReadAttributes
- ReadData
- ReadExtendedAttributes
- ReadPermissions
- Synchronize
- TakeOwnership
- Traverse
- Write
- WriteAttributes
- WriteData
- WriteExtendedAttributes
default: none
inherit:
description:
- Inherit flags on the ACL rules. Can be specified as a comma separated list (Ex. "ContainerInherit, ObjectInherit"). For more information on the choices see MSDN InheritanceFlags Enumeration.
required: no
choices:
- ContainerInherit
- ObjectInherit
- None
default: For Leaf File, None; For Directory, ContainerInherit, ObjectInherit;
propagation:
description:
- Propagation flag on the ACL rules. For more information on the choices see MSDN PropagationFlags Enumeration.
required: no
choices:
- None
- NoPropagateInherit
- InheritOnly
default: "None"
author: <NAME> (@schwartzmx), <NAME> (@trondhindenes), <NAME> (@h0nIg)
'''
EXAMPLES = '''
# Restrict write,execute access to User Fed-Phil
$ ansible -i hosts -m win_acl -a "user=Fed-Phil path=C:\Important\Executable.exe type=deny rights='ExecuteFile,Write'" all
# Playbook example
# Add access rule to allow IIS_IUSRS FullControl to MySite
---
- name: Add IIS_IUSRS allow rights
win_acl:
path: 'C:\inetpub\wwwroot\MySite'
user: 'IIS_IUSRS'
rights: 'FullControl'
type: 'allow'
state: 'present'
inherit: 'ContainerInherit, ObjectInherit'
propagation: 'None'
# Remove previously added rule for IIS_IUSRS
- name: Remove FullControl AccessRule for IIS_IUSRS
path: 'C:\inetpub\wwwroot\MySite'
user: 'IIS_IUSRS'
rights: 'FullControl'
type: 'allow'
state: 'absent'
inherit: 'ContainerInherit, ObjectInherit'
propagation: 'None'
# Deny Intern
- name: Deny Deny
path: 'C:\Administrator\Documents'
user: 'Intern'
rights: 'Read,Write,Modify,FullControl,Delete'
type: 'deny'
state: 'present'
'''
| 4,255 |
nodeconductor/iaas/tests/test_admin.py
|
p-p-m/nodeconductor
| 0 |
2026942
|
import mock
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test import TestCase
from nodeconductor.core.models import SynchronizationStates
from nodeconductor.iaas.tests import factories
class BaseAdminTestCase(TestCase):
def setUp(self):
self.login()
def login(self):
username = 'admin'
password = '<PASSWORD>'
user = get_user_model().objects.create_user(username, '<EMAIL>', password)
user.is_staff = True
user.is_superuser = True
user.save()
self.client.login(username=username, password=password)
def apply_action(self, url, action, items):
pks = [str(item.pk) for item in items]
data = {
'action': action,
'_selected_action': pks
}
return self.client.post(url, data, follow=True)
@mock.patch('nodeconductor.iaas.admin.send_task')
class RecoverCloudProjectMembershipTest(BaseAdminTestCase):
def test_erred_cpm_is_passed_to_backend_task(self, mock_task):
erred_cpm = factories.CloudProjectMembershipFactory(
state=SynchronizationStates.ERRED)
response = self.recover_cpm([erred_cpm])
self.assertContains(response, 'One cloud project membership scheduled for recovery')
mock_task('structure', 'recover_erred_services').assert_called_with([erred_cpm.to_string()])
def test_synced_cpm_is_skipped(self, mock_task):
synced_cpm = factories.CloudProjectMembershipFactory(
state=SynchronizationStates.IN_SYNC)
response = self.recover_cpm([synced_cpm])
self.assertFalse(mock_task.called)
def recover_cpm(self, items):
url = reverse('admin:iaas_cloudprojectmembership_changelist')
return self.apply_action(url, 'recover_erred_cloud_memberships', items)
| 1,859 |
primeii_ros_bridge/nodes/primeii_ros_bridge.py
|
QuartzYan/PrimeIIRosBridge
| 0 |
2026912
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
import json
import socket
import threading
import rospy
from primeii_ros_msgs.msg import FingerFlex, GloveData, GlovesData, GloveHaptic
sub = None
pub = None
client = None
fingerNames = [ "thumb", "index", "middle", "ring", "pinky" ]
def callBack(msg):
for i in range(len(msg.hapticPower)):
if msg.hapticPower[i] > 1.0:
rospy.logwarn("GloveHaptic message hapticPower data error!!!")
return
jj = {'dongleid':msg.dongleid, 'handtype':msg.handtype, 'power':msg.hapticPower}
ss = json.dumps(jj)
try:
client.send(ss)
except:
rospy.logwarn("send message faild!!!")
def str2RosMsg(string):
try:
jj = json.loads(string)
except ValueError as ee:
#rospy.logerr()
return None
msg = GlovesData()
msg.header.stamp = rospy.Time.now()
msg.header.frame_id = ''
for i in range(len(jj)):
glove = GloveData()
gloveNum = 'glove' + str(i+1)
glove.deviceid = jj[gloveNum]['deviceid']
glove.dongleid = jj[gloveNum]['dongleid']
glove.handtype = jj[gloveNum]['handtype']
glove.wristIMU.x = jj[gloveNum]['wristIMU']['x']
glove.wristIMU.y = jj[gloveNum]['wristIMU']['y']
glove.wristIMU.z = jj[gloveNum]['wristIMU']['z']
glove.wristIMU.w = jj[gloveNum]['wristIMU']['w']
fingersFlex = jj[gloveNum]['fingers']['fingersFlex']
for j in range(len(fingersFlex)):
glove.fingersFlex[j].Joint1Spread = fingersFlex[fingerNames[j]]['Joint1Spread']
glove.fingersFlex[j].Joint1Stretch = fingersFlex[fingerNames[j]]['Joint1Stretch']
glove.fingersFlex[j].Joint2Stretch = fingersFlex[fingerNames[j]]['Joint2Stretch']
glove.fingersFlex[j].Joint3Stretch = fingersFlex[fingerNames[j]]['Joint3Stretch']
fingersIMU = jj[gloveNum]['fingers']['fingersIMU']
for j in range(len(fingersIMU)):
glove.fingersIMU[j].x = fingersIMU[fingerNames[j]]['x']
glove.fingersIMU[j].y = fingersIMU[fingerNames[j]]['y']
glove.fingersIMU[j].z = fingersIMU[fingerNames[j]]['z']
glove.fingersIMU[j].w = fingersIMU[fingerNames[j]]['w']
msg.glovesData.append(glove)
return msg
def main():
global sub, pub, client
rospy.init_node("primeii_ros_bridge")
#get param
hostname = rospy.get_param("~hostname", default="192.168.3.141")
hostport = rospy.get_param("~hostport", default=10086)
#init Subscriber and Publisher
sub = rospy.Subscriber("GloveHaptic", GloveHaptic, callBack)
pub = rospy.Publisher("GlovesData", GlovesData, queue_size=1)
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.settimeout(5)
while not rospy.is_shutdown():
er = client.connect_ex((hostname, hostport))
if er == 0:
rospy.loginfo("connect %s successful...", hostname)
break
else:
rospy.loginfo("connect faild, error num:%d", er)
rospy.loginfo("try connect %s again", hostname)
time.sleep(1)
def loop():
r = rospy.Rate(300)
while not rospy.is_shutdown():
try:
string = client.recv(4096)
except:
rospy.logwarn("read message timeout!!!")
continue
#print len(string)
msg = str2RosMsg(string)
if msg:
pub.publish(msg)
else:
pass
r.sleep()
t = threading.Thread(target=loop)
t.start()
rospy.spin()
if __name__ == "__main__":
try:
main()
except rospy.ROSInterruptException:
pass
| 3,423 |
strain_detangler/cli.py
|
lauren-mak/strain-detangler
| 0 |
2026649
|
import click
from .api import (
build_pangenome,
find_strains,
)
@click.group()
def main():
pass
@main.command('pangenome')
def cli_pangenome():
"""Do something pangenome related."""
build_pangenome(logger=lambda x: click.echo(x, err=True))
@main.command('find-strains')
def cli_find_strains():
"""Do something related to finding strains."""
find_strains(logger=lambda x: click.echo(x, err=True))
if __name__ == '__main__':
main()
| 473 |
tests/unit/concurrently/test_CommunicationChannel.py
|
shane-breeze/AlphaTwirl
| 0 |
2026092
|
# <NAME> <<EMAIL>>
import logging
import pytest
try:
import unittest.mock as mock
except ImportError:
import mock
from alphatwirl.concurrently import CommunicationChannel, TaskPackage
##__________________________________________________________________||
@pytest.fixture()
def dropbox():
return mock.MagicMock()
@pytest.fixture()
def obj(dropbox):
return CommunicationChannel(dropbox=dropbox)
##__________________________________________________________________||
def test_repr(obj):
repr(obj)
def test_begin_end(obj, dropbox):
dropbox.open.assert_not_called()
dropbox.close.assert_not_called()
obj.begin()
dropbox.open.assert_called_once()
dropbox.close.assert_not_called()
obj.begin()
dropbox.open.assert_called_once() # don't open twice
dropbox.close.assert_not_called()
obj.end()
dropbox.open.assert_called_once()
dropbox.close.assert_called_once()
obj.end()
dropbox.open.assert_called_once()
dropbox.close.assert_called_once() # don't close twice
obj.begin()
assert 2 == dropbox.open.call_count # can open again
dropbox.close.assert_called_once()
def test_begin_terminate_end(obj, dropbox):
obj.begin()
assert 0 == dropbox.terminate.call_count
obj.terminate()
assert 1 == dropbox.terminate.call_count
obj.end()
def test_put_when_closed(obj, dropbox, caplog):
task1 = mock.MagicMock(name='task1')
with caplog.at_level(logging.WARNING, logger='alphatwirl'):
obj.put(task1)
assert len(caplog.records) == 1
assert caplog.records[0].levelname == 'WARNING'
assert 'CommunicationChannel' in caplog.records[0].name
assert 'the drop box is not open' in caplog.records[0].msg
dropbox.put.assert_not_called()
def test_receive_when_closed(obj, dropbox, caplog):
result1 = mock.MagicMock(name='result1')
dropbox.receive = mock.MagicMock(return_value=[(0, result1)])
with caplog.at_level(logging.WARNING, logger='alphatwirl'):
result = obj.receive()
assert len(caplog.records) == 1
assert caplog.records[0].levelname == 'WARNING'
assert 'CommunicationChannel' in caplog.records[0].name
assert 'the drop box is not open' in caplog.records[0].msg
assert result is None
obj.end()
##__________________________________________________________________||
| 2,352 |
remote_procedure_call/rabbit_remote_procedure_call.py
|
id23cat/DMC-compute-nodes
| 0 |
2026929
|
# -*- coding: utf-8 -*-
import time
import uuid
from logs import get_logger
from rabbitmq.rabbitmq import Rabbitmq
from remote_procedure_call.base_remote_procedure_call import (
RPCFunctionListenerInterface, RPCFunctionCallerInterface
)
from typing import Dict, NoReturn, Tuple, Optional
from pika.adapters.blocking_connection import BlockingChannel
from pika.spec import Basic, BasicProperties
RPC_EXCHANGE_NAME = 'rpc'
RPC_REQUEST_ROUTING_KEY_PATTERN = 'rpc.request.'
logger = get_logger(__name__)
class RabbitRPCFunctionListener(RPCFunctionListenerInterface):
AUTO_DELETE_QUEUE = False
QUEUE_TTL = None
def __init__(self, *args, **kwargs):
super(RabbitRPCFunctionListener, self).__init__(*args, **kwargs)
if self.namespace:
self.name_of_function = '{}.{}'.format(self.namespace, self.name_of_function)
self.queue_name = RPC_REQUEST_ROUTING_KEY_PATTERN + self.name_of_function
self.request_rabbit = Rabbitmq()
self.response_rabbit = Rabbitmq()
self.consumer_tag = self.name_of_function + '.' + str(uuid.uuid4())
self._init_rabbit()
self.message_is_requested = False
self.gotten_data = None
def _init_rabbit(self):
self.request_rabbit.check_connect()
self.request_rabbit.declare_rpc_exchange(RPC_EXCHANGE_NAME)
self.request_rabbit.declare_rpc_function_queue(
self.queue_name, self.queue_name, RPC_EXCHANGE_NAME,
auto_delete=self.AUTO_DELETE_QUEUE, expires_at=self.QUEUE_TTL
)
def _rabbit_consumer(self, channel, method, properties, body):
# type: (BlockingChannel, Basic.Deliver, BasicProperties, bytes) -> None
if not self.message_is_requested: # protecting of processing multiple message per one time
channel.basic_nack(method.delivery_tag)
return
channel.basic_ack(method.delivery_tag)
self.message_is_requested = False
reply_to = properties.reply_to
correlation_id = properties.correlation_id
self.gotten_data = ({'reply_to': reply_to, 'correlation_id': correlation_id}, body)
def receive_call(self):
# type: () -> Tuple[Dict[str, str], bytes]
# important to be 1 to prevent consumer work under multiple messages
if not self.request_rabbit.check_connect(reconnect=False):
self._init_rabbit()
self.request_rabbit.start_consuming(self.queue_name, self.consumer_tag, self._rabbit_consumer, prefetch_count=1)
self.message_is_requested = True
flag = self.request_rabbit.process_one_message()
if not flag:
logger.info('ERROR while process one message, try to reconnect to rabbit')
while not self.request_rabbit.check_connect():
logger.info('connect to rabbit failed. try again in 5 second')
time.sleep(5)
self._init_rabbit()
self.request_rabbit.stop_consuming(self.consumer_tag)
self.request_rabbit.process_one_message()
gotten_data = self.gotten_data
self.gotten_data = None
return gotten_data
def send_return(self, call_properties, return_data, content_type=None):
# type: (Dict[str, str], bytes, Optional[str]) -> None
self.response_rabbit.send_msg(
RPC_EXCHANGE_NAME, call_properties.get('reply_to'),
return_data, custom_properties=BasicProperties(
correlation_id=call_properties.get('correlation_id'),
content_type=content_type,
), ignore_routing_key_project_autofill=True
)
class TemporaryRabbitRPCFunctionListener(RabbitRPCFunctionListener):
QUEUE_TTL = 60 * 1000
class RabbitRPCFunctionCaller(RPCFunctionCallerInterface):
RPC_QUEUE_RETURN_NAME_PATTERN = 'rpc.'
RPC_RESPONSE_BIND_PATTERN = 'rpc.response.{}.'
def __init__(self, *args, **kwargs):
super(RabbitRPCFunctionCaller, self).__init__(*args, **kwargs)
if self.namespace:
self.name_of_function = '{}.{}'.format(self.namespace, self.name_of_function)
self.response_bind_pattern = self.RPC_RESPONSE_BIND_PATTERN.format(self.name_of_function)
self.rabbit = Rabbitmq()
self._init_rabbit()
self.gotten_message = None
self.id_of_call = None
self.name_of_queue = None
def _init_rabbit(self):
self.rabbit.declare_rpc_exchange(RPC_EXCHANGE_NAME)
def _rabbit_consumer(self, channel, method, properties, body):
# type: (BlockingChannel, Basic.Deliver, BasicProperties, bytes) -> NoReturn
self.gotten_message = body
channel.basic_ack(method.delivery_tag)
def call(self, params, content_type=None):
# type: (bytes, Optional[str]) -> NoReturn
self.rabbit.check_connect()
self.id_of_call = str(uuid.uuid4())
self.name_of_queue = self.RPC_QUEUE_RETURN_NAME_PATTERN + self.name_of_function + '.' + self.id_of_call
self.rabbit.init_queue(self.name_of_queue, auto_delete=True, exclusive=True)
self.rabbit.binding_to_the_exchange(
RPC_EXCHANGE_NAME, self.name_of_queue, self.response_bind_pattern + self.id_of_call
)
self.rabbit.start_consuming(self.name_of_queue, self.name_of_queue, self._rabbit_consumer, 1)
self.rabbit.send_msg(
RPC_EXCHANGE_NAME, RPC_REQUEST_ROUTING_KEY_PATTERN + self.name_of_function, params,
custom_properties=BasicProperties(
reply_to=self.response_bind_pattern + self.id_of_call,
correlation_id=self.id_of_call, content_type=content_type
)
)
def fetch_response(self):
# type: () -> bytes
self.rabbit.process_one_message()
self.rabbit.stop_consuming(self.name_of_queue)
self.rabbit.disconnect()
gotten_message, self.gotten_message = self.gotten_message, None # clear variable and assign to new one
return gotten_message
class RabbitNoReturnRPCFunctionCaller(RPCFunctionCallerInterface):
def __init__(self, *args, **kwargs):
super(RabbitNoReturnRPCFunctionCaller, self).__init__(*args, **kwargs)
if self.namespace:
self.name_of_function = '{}.{}'.format(self.namespace, self.name_of_function)
self.rabbit = Rabbitmq()
self._init_rabbit()
def _init_rabbit(self):
self.rabbit.declare_rpc_exchange(RPC_EXCHANGE_NAME)
def call(self, params, content_type=None):
# type: (bytes, Optional[str]) -> NoReturn
self.rabbit.check_connect()
self.rabbit.send_msg(
RPC_EXCHANGE_NAME, RPC_REQUEST_ROUTING_KEY_PATTERN + self.name_of_function, params,
custom_properties=BasicProperties(
reply_to=None,
correlation_id=None, content_type=content_type
)
)
def fetch_response(self):
# type: () -> bytes
raise NotImplemented
| 6,179 |
tests/conftest.py
|
aratz-lasa/aio-multiplexer
| 3 |
2026601
|
from asyncio import StreamReader, StreamWriter
from unittest.mock import MagicMock
import pytest
@pytest.fixture
def connection_mock(event_loop):
reader_mock, writer_mock = (
MagicMock(spec_set=StreamReader, wraps=StreamReader(loop=event_loop)),
MagicMock(spec_set=StreamWriter),
)
yield reader_mock, writer_mock
| 344 |
wallace/wallace.py
|
zhangcong2019/gaussian-random-number
| 0 |
2026531
|
import numpy as np
import random
import math
class Wallace():
def __init__(self, seed=0):
self.seed = seed
random.seed(self.seed)
self.R = 2
self.L = 1024
self.K = 4
self.N = self.L * self.K
m = [-1, 1, 1, 1,
1, -1, 1, 1,
-1, -1, 1, -1,
-1, -1, -1, 1]
self.A = [np.zeros([self.K, self.K]), np.zeros([self.K, self.K])]
for i in range(self.K):
for j in range(self.K):
self.A[0][i][j] = m[i * self.K + j]
self.A[0] *= 0.5
self.A[1] = -self.A[0]
self.pool = np.random.randn(self.K * self.L)
self.shuffle = [i for i in range(self.N)]
for i in range(self.N - 1):
j = random.randint(i, self.N - 1)
[self.shuffle[i], self.shuffle[j]] = [self.shuffle[j], self.shuffle[i]]
def generate(self) -> list:
for i in range(self.N - 1):
j = random.randint(i, self.N - 1)
[self.shuffle[i], self.shuffle[j]] = [self.shuffle[j], self.shuffle[i]]
for i in range(self.R):
for j in range(self.L):
pos = self.shuffle[(j * self.K) : (j * self.K) + self.K]
x = self.pool[pos]
x = np.reshape(x,[4,1])
x_ = np.matmul(self.A[i], x)
for z in range(self.K):
self.pool[pos[z]] = x_[z]
# s = math.sqrt(math.fabs(self.pool[self.N -1]))
return self.pool[:-1]
if "__main__" == __name__:
print("this is wallace grng")
grng_ = Wallace(seed=99)
list_gauss = []
for i in range(10000):
listTemp = grng_.generate()
for g in listTemp:
list_gauss.append(g)
import os, sys
scriptPath = os.path.realpath(os.path.dirname(sys.argv[0]))
sys.path.append(scriptPath + "/..")
import grng
grng.plot(list_gauss)
| 1,921 |
geometria_archivos/geometria_archivos.py
|
jabaier/iic1103.20152.s5
| 0 |
2025192
|
from graphics import *
from getchar import *
class color:
def __init__(self,red,green,blue):
self.red=red
self.green=green
self.blue=blue
class vector:
def __init__(self,x,y):
self.x=x
self.y=y
def __add__(self,v):
return vector(self.x+v.x,self.y+v.y)
def __sub__(self,v):
return vector(self.x-v.x,self.y-v.y)
def __str__(self):
return "("+str(self.x)+","+str(self.y)+")"
class cuadrado:
def __init__(self,x,y,lado,c=color(0,0,0)):
self.x=x
self.y=y
self.lado=lado
self.color=c
self.r="undef"
def pintar(self,color):
self.color=color
def escalar(self,escala):
self.lado = self.lado*escala
def __str__(self):
return "Cuadrado en ("+str(self.x)+","+str(self.y)+") con lado "+str(self.lado)
def desplazar(self,v):
self.x=self.x+v.x
self.y=self.y+v.y
def inscribir(self,circ):
self.lado = circ.radio*(2**.5)
self.x = circ.x - self.lado/2
self.y = circ.y - self.lado/2
def dibujar(self,window):
if self.r != "undef":
self.r.undraw()
self.r=Rectangle(Point(self.x,self.y),Point(self.x+self.lado,self.y+self.lado))
self.r.setOutline(color_rgb(self.color.red,self.color.green,self.color.blue))
self.r.draw(window)
def codificar(self):
# retorna un string que corresponde a la codificacion del circulo
return " ".join(['cuadrado',str(self.x),str(self.y),str(self.lado)])
class circulo:
def __init__(self,x,y,radio):
self.x=x
self.y=y
self.radio=radio
self.color=color(0,0,0)
self.r = "undef"
def escalar(self,escala):
self.radio = self.radio*escala
def desplazar(self,vec):
self.x = self.x + vec.x
self.y = self.y + vec.y
def inscribir(self,cuad):
self.radio = cuad.lado/2
self.x = cuad.x + self.radio
self.y = cuad.y + self.radio
def __str__(self):
return "Circulo en ("+str(self.x)+","+str(self.y)+") con radio "+str(self.radio)
def dibujar(self,window):
if self.r != "undef":
self.r.undraw()
self.r=Circle(Point(self.x,self.y),self.radio)
self.r.setOutline(color_rgb(self.color.red,self.color.green,self.color.blue))
self.r.setWidth(2)
self.r.draw(window)
def codificar(self):
# retorna un string que corresponde a la codificacion del circulo
return " ".join(['circulo',str(self.x),str(self.y),str(self.radio)])
class lienzo:
def __init__(self,tamano_x,tamano_y):
self.objetos=[]
self.ventana = GraphWin("Una ventana",tamano_x,tamano_y)
def dibujar(self):
for obj in self.objetos:
obj.dibujar(self.ventana)
def agregar(self,nuevo_objeto): ## agrega un objeto al lienzo
self.objetos.append(nuevo_objeto)
def listar(self):
i = 0
while i < len(self.objetos):
print(str(i)+": ",self.objetos[i])
i += 1
def seleccionar_objeto(self):
self.listar()
indice=-1
while not indice in range(len(self.objetos)):
indice = int(input("Indice del objeto a modificar: "))
return self.objetos[indice]
def guardar(self, filename):
#filename es el nombre del archivo a Guardar
f = open(filename,'w')
for obj in self.objetos:
f.write(obj.codificar()+"\n")
f.close()
def leer(self, filename):
f = open(filename,'r')
for linea in f:
l = linea.split()
if l[0] == "cuadrado":
objeto = cuadrado(int(l[1]),int(l[2]),float(l[3]))
elif l[0] == "circulo":
objeto = circulo(int(l[1]),int(l[2]),float(l[3]))
self.agregar(objeto)
f.close()
def modificar_objeto(self,objeto):
getch=Getch()
D=10
up = vector(0,-D)
down = vector(0,D)
left = vector(-D,0)
right = vector(D,0)
print("'a': agrandar, 'z': achicar, flechas para mover y 'f' para salir")
c=''
while c!='f':
c=getch()
if c == 'a':
objeto.escalar(1.05)
elif c == 'z':
objeto.escalar(1/1.05)
elif ord(c) == 27:
c1=getch()
c=getch()
if ord(c1)==91:
desplazamiento=vector(0,0)
if ord(c) == 68:
desplazamiento=left
elif ord(c) == 67:
desplazamiento=right
elif ord(c) == 65:
desplazamiento=up
elif ord(c) == 66:
desplazamiento=down
objeto.desplazar(desplazamiento)
self.dibujar()
def pregunta_opcion():
## muestra el menu en pantalla y retorna la opcion ingresada
opciones_legales = [str(x+1) for x in range(6)]
while True:
print("Cuadrados y Círculos")
print("\t1. Crear un Círculo")
print("\t2. Crear un Cuadrado")
print("\t3. Modificar Objeto")
print("\t4. Guardar a un Archivo")
print("\t5. Leer desde un Archivo")
print("\t6. Salir")
opt = input("Opcion? ")
if opt in opciones_legales:
return opt
def pregunta_circulo():
## pregunta por un circulo, lo crea y lo retorna
while True:
linea=input("Ingresa [radio] [pos_x] [pos_y]: ")
l = linea.split()
if len(l) == 3 and l[0].isnumeric() and l[1].isnumeric() and l[2].isnumeric():
break
print("Ingresa un input válido de tres numeros!")
c = circulo(int(l[1]),int(l[2]),int(l[0]))
return c
def pregunta_cuadrado():
## pregunta por un cuadrado y lo crea
while True:
linea=input("Ingresa [lado] [pos_x] [pos_y]: ")
l = linea.split()
if len(l) == 3 and l[0].isnumeric() and l[1].isnumeric() and l[2].isnumeric():
break
print("Ingresa un input válido de tres numeros!")
cuad = cuadrado(int(l[1]),int(l[2]),int(l[0]))
return cuad
def get_nombre_archivo():
return input("Nombre del archivo: ")
canvas=lienzo(500,500)
opcion='-1' ## valor arbitrario para entrar al while
while opcion != '6':
opcion = pregunta_opcion()
if opcion == '1':
circ = pregunta_circulo()
canvas.agregar(circ)
canvas.dibujar()
elif opcion == '2':
cuad = pregunta_cuadrado()
canvas.agregar(cuad)
canvas.dibujar()
elif opcion == '3':
objeto=canvas.seleccionar_objeto()
canvas.modificar_objeto(objeto)
elif opcion == '4':
print("Guardar")
fname = get_nombre_archivo()
canvas.guardar(fname)
elif opcion == '5':
print("Leer")
fname = get_nombre_archivo()
canvas.leer(fname)
canvas.dibujar()
print("Nos vemos!")
| 7,048 |
lib/testing.py
|
willwill2will54/MATSimCheck
| 0 |
2026936
|
def tester(table, MAT, algorithm=['geormsd', 'wgt', '500'], number=None, testing=False, go=1):
from collections import Counter
from tinydb import TinyDB, Query
from pprint import pprint
import defaults as defs
import numpy as np
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware
MATs = TinyDB('./dbs/MATS.json', storage=CachingMiddleware(JSONStorage))
core = TinyDB('./dbs/Core.json', storage=CachingMiddleware(JSONStorage))
tab = MATs.table(table)
cand = tab.all()
tested = tab.get(Query()['Trust name'] == MAT)
algorithm2 = []
for a, b, c, d in zip(algorithm[:-3:4], algorithm[1:-2:4], algorithm[2:-1:4], algorithm[3::4]):
algorithm2 += [a + b, c, d]
algorithm = algorithm2
try:
assert all(x in ['wgt', 'is', 'isnot'] or x.endswith('gets') for x in algorithm[1:-1:3])
except AssertionError:
raise
try:
assert 'wgt' in algorithm[1:-1:3]
except AssertionError:
raise
crits = list(zip(algorithm[:-2:3], algorithm[1:-1:3], algorithm[2::3]))
cand = list([x for x in cand if x['Trust name'] not in (tested['Trust name'], '')])
for x in cand:
x['sims'] = 0
for key, method, value in crits:
try:
if method == 'is':
if value is 'same':
value = tested[key]
cand[:] = [x for x in cand if str(x[key]) == str(value)]
elif method == 'isnot':
if value is 'same':
value = tested[key]
cand[:] = [x for x in cand if str(x[key]) != str(value)]
elif method.endswith('notgets'):
method = method[:-7]
if method is 'same':
method = tested[key]
for x in cand:
try:
if str(x[key]) != str(method):
x['sims'] += float(value)
except Exception as e:
print(x, flush=True)
pprint(cand)
raise e
elif method.endswith('gets'):
method = method[:-4]
if method is 'same':
method = tested[key]
for x in cand:
try:
if str(x[key]) == str(method):
x['sims'] += float(value)
except Exception as e:
print(x, flush=True)
pprint(cand)
raise e
elif method == 'wgt':
donecand = []
errorcount = 0
if not any(key in x.keys() for x in cand):
print('{} has not been calculated. Purge and recompile, \
then try again if you want to use this variable.'.format(key))
continue
elif key not in tested.keys():
print('{} has not been calculated for the tested MAT. Purge and recompile, \
then try again if you want to use this variable.'.format(key), flush=True)
continue
for x in cand:
try:
x['sims'] += (abs((float(tested[key]) - float(x[key])) /
float(tested[key]))) * float(value) * 100
except ZeroDivisionError:
pass
except KeyError as e:
x['sims'] += float(value)
errorcount += 1
donecand.append(dict(x))
if errorcount > 0 and go == 0:
print('Weight exception: \'{}\' not found ({}/{})'.format(key, errorcount, len(cand)), flush=True)
cand = donecand[:]
except KeyError:
print(x, tested, key, flush=True)
raise
MATs.close()
for x in cand:
x['sims'] *= 1000
simsmax = max([x['sims'] for x in cand])
simsmin = min([x['sims'] for x in cand])
counterpack = list(cand)
for x in counterpack:
x['sims'] = simsmax - x['sims']
counter = Counter({x['Trust name']: x['sims'] for x in counterpack})
finlist = [(a, int((b / (simsmax + 1)) * 100)) for a, b in counter.most_common()]
if testing:
lastthing = finlist
else:
dict1 = {x: [] for x in defs.ProgressScoreHeaders}
dict2 = {x: [] for x in defs.ProgressScoreHeaders}
for x in finlist[:number]:
for ID in next(item for item in counterpack if item['Trust name'] == x[0])['IDs']:
school = core.get(doc_id=ID)
for y, z in school.items():
if y in defs.ProgressScoreHeaders:
dict1[y].append(float(z))
for ID in tested['IDs']:
school = core.get(doc_id=ID)
for y, z in school.items():
if y in defs.ProgressScoreHeaders:
dict2[y].append(float(z))
resultavg = {}
subjectavg = {}
for x, y in dict1.items():
if len(y) > 0:
resultavg['Average ' + x] = round(np.average(np.array(y)), 2)
else:
resultavg['Average ' + x] = 'NaN'
for x, y in dict2.items():
if len(y) > 0:
subjectavg['Subject ' + x] = round(np.average(np.array(y)), 2)
else:
subjectavg['Subject ' + x] = 'NaN'
lastthing = (subjectavg, resultavg)
return (['{} (Score: {}%)'.format(a, b) for a, b in finlist[:number]], lastthing, MAT)
"""for x in cand:
if x['sims'] == simsmin:
return x"""
| 5,733 |
coleta_dados_fiis.py
|
lucasHashi/coleta-dados-acoes-bdrs-b3
| 0 |
2024476
|
import pandas as pd
import json
from time import sleep
from datetime import datetime
from selenium.common.exceptions import NoSuchElementException
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from fake_useragent import UserAgent
from webdriver_manager.chrome import ChromeDriverManager
from auxiliar_constantes import URL_LINKS_FIIS
def main():
df_fiis = coletar_dados_todos_fiis_B3()
PASTA_DE_DADOS = 'dados_coletados'
NOME_ARQUIVO_fiis = 'dados_completos_fiis'
data_mes_hoje = datetime.today().strftime('%Y-%m')
df_fiis.to_excel('{}\\{}_{}.xlsx'.format(PASTA_DE_DADOS, NOME_ARQUIVO_fiis, data_mes_hoje), index=False)
df_fiis.to_pickle('{}\\{}_{}.pickle'.format(PASTA_DE_DADOS, NOME_ARQUIVO_fiis, data_mes_hoje))
print(df_fiis)
| 805 |
murano-7.0.0/murano/tests/unit/dsl/test_extension_methods.py
|
scottwedge/OpenStack-Stein
| 91 |
2026542
|
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from yaql.language import exceptions
from yaql.language import specs
from yaql.language import yaqltypes
from murano.dsl import dsl
from murano.tests.unit.dsl.foundation import object_model as om
from murano.tests.unit.dsl.foundation import test_case
class TestExtensionMethods(test_case.DslTestCase):
def setUp(self):
@dsl.name('extcls.Extender')
class PythonClass(object):
def __init__(self, arg):
self.value = arg
@staticmethod
@specs.meta('Usage', 'Extension')
@specs.parameter('arg', yaqltypes.Integer())
def python_extension(arg):
return arg * arg
@classmethod
@specs.meta('Usage', 'Extension')
@specs.parameter('arg', yaqltypes.Integer())
def python_extension2(cls, arg):
return cls(2 * arg).value
super(TestExtensionMethods, self).setUp()
self.package_loader.load_class_package(
'extcls.Extender', None).register_class(PythonClass)
self._runner = self.new_runner(om.Object('extcls.TestClass'))
def test_call_self_extension_method(self):
self.assertEqual([123, 123], self._runner.testSelfExtensionMethod())
def test_call_imported_extension_method(self):
self.assertEqual(
[246, 246], self._runner.testImportedExtensionMethod())
def test_call_nullable_extension_method(self):
self.assertEqual(
[123, None], self._runner.testNullableExtensionMethod())
def test_extensions_precedence(self):
self.assertEqual(111, self._runner.testExtensionsPrecedence())
def test_explicit_call(self):
self.assertEqual(222, self._runner.testCallExtensionExplicitly())
def test_explicit_call_on_instance_fails(self):
self.assertRaises(
exceptions.NoMatchingMethodException,
self._runner.testExplicitCallDoenstWorkOnInstance)
def test_call_on_primitive_types(self):
self.assertEqual('qWERTy', self._runner.testCallOnPrimitiveTypes())
def test_call_python_extension(self):
self.assertEqual(16, self._runner.testCallPythonExtension())
def test_call_python_extension_explicitly(self):
self.assertEqual(25, self._runner.testCallPythonExtensionExplicitly())
def test_call_python_classmethod_extension(self):
self.assertEqual(14, self._runner.testCallPythonClassmethodExtension())
| 3,068 |
bites/bite221.py
|
ChidinmaKO/Chobe-bitesofpy
| 0 |
2026769
|
import requests
import json
YOUR_KEY = '123abc'
DEFAULT_LIST = 'hardcover-nonfiction'
URL_NON_FICTION = (f'https://api.nytimes.com/svc/books/v3/lists/current/'
f'{DEFAULT_LIST}.json?api-key={YOUR_KEY}')
URL_FICTION = URL_NON_FICTION.replace('nonfiction', 'fiction')
def get_best_seller_titles(url=URL_NON_FICTION):
"""Use the NY Times Books API endpoint above to get the titles that are
on the best seller list for the longest time.
Return a list of (title, weeks_on_list) tuples, e.g. for the nonfiction:
[('BETWEEN THE WORLD AND ME', 86),
('EDUCATED', 79),
('BECOMING', 41),
('THE SECOND MOUNTAIN', 18),
... 11 more ...
]
Dev docs: https://developer.nytimes.com/docs/books-product/1/overview
"""
with requests.Session() as request:
response = request.get(url)
try:
data = response.json()
except json.JSONDecodeError:
print(f"JSON Decode Error")
outcome = data['results']['books']
# result = [(book['title'], book['weeks_on_list']) for book in outcome]
# sorted_result = sorted(result, key=lambda result: result[1], reverse=True)
# return sorted_result
sorted_weeks = sorted(outcome, key=lambda outcome: outcome['weeks_on_list'], reverse=True)
title_week_tuple = [(book['title'], book['weeks_on_list']) for book in sorted_weeks]
return title_week_tuple
if __name__ == '__main__':
ret = get_best_seller_titles()
print(ret)
# tests
import json
from pathlib import Path
from unittest.mock import patch
from urllib.request import urlretrieve
# from nyt import (get_best_seller_titles, URL_NON_FICTION, URL_FICTION)
TMP = Path('/tmp')
FICTION = TMP / 'nyt-fiction.json'
if not FICTION.exists():
urlretrieve('https://bit.ly/2L7S5zz', FICTION)
NON_FICTION = TMP / 'nyt-nonfiction.json'
if not NON_FICTION.exists():
urlretrieve('https://bit.ly/2LhVvyr', NON_FICTION)
def mocked_requests_get(*args, **kwargs):
"""https://stackoverflow.com/a/28507806"""
class MockResponse:
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
return self.json_data
url = args[0]
fname = NON_FICTION if 'nonfiction' in url else FICTION
with open(fname) as f:
return MockResponse(json.loads(f.read()), 200)
return MockResponse(None, 404)
@patch('requests.get', side_effect=mocked_requests_get)
def test_response_nonfiction(mock_get):
assert get_best_seller_titles(url=URL_NON_FICTION) == [
('BETWEEN THE WORLD AND ME', 86),
('EDUCATED', 79),
('BECOMING', 41),
('THE SECOND MOUNTAIN', 18),
('THE PIONEERS', 16),
('MAYBE YOU SHOULD TALK TO SOMEONE', 14),
('UNFREEDOM OF THE PRESS', 14),
('RANGE', 9),
('THREE WOMEN', 7),
('<NAME>', 3),
('HOW TO BE AN ANTIRACIST', 2),
('KOCHLAND', 2),
('THANK YOU FOR MY SERVICE', 1),
('THE OUTLAW OCEAN', 1),
('GODS OF THE UPPER AIR', 1)
]
@patch('requests.get', side_effect=mocked_requests_get)
def test_response_fiction(mock_get):
assert get_best_seller_titles(url=URL_FICTION) == [
('WHERE THE CRAWDADS SING', 51),
('THE SILENT PATIENT', 25),
('<NAME> STARTS OVER', 7),
('THE NICKEL BOYS', 6),
('ASK AGAIN, YES', 6),
('ONE GOOD DEED', 5),
('THE INN', 3),
('THE TURN OF THE KEY', 3),
('OUTFOX', 3),
('THE BITTERROOTS', 2),
('INLAND', 2),
('OLD BONES', 1),
('THE LAST WIDOW', 1),
('THE WHISPER MAN', 1),
('TIDELANDS', 1)
]
| 3,758 |
smallpebble/nn/training.py
|
AdityaKane2001/SmallPebble
| 84 |
2026901
|
# Copyright 2021 The SmallPebble authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
import math
import numpy
import smallpebble.array_library as np
import smallpebble.core as core
# ---------------- SMALLPEBBLE OPERATIONS
# Operations on variables, that return variables.
# The operations are either,
# written in terms of NumPy/CuPy operations,
# in which case local_gradients needs to be defined,
# or,
# written in terms of other SmallPebble operations, in which case,
# local_gradients doesn't need to be defined.
def leaky_relu(a, alpha=0.02):
"Elementwise leaky relu."
multiplier = np.where(a.array > 0, np.array(1, a.dtype), np.array(alpha, a.dtype))
value = a.array * multiplier
local_gradients = [(a, lambda path_value: path_value * multiplier)]
return core.Variable(value, local_gradients)
def softmax(a, axis=-1):
"Softmax on `axis`."
exp_a = core.exp(a - core.Variable(np.max(a.array)))
# ^ Shift to improve numerical stability. See:
# https://eli.thegreenplace.net/2016/the-softmax-function-and-its-derivative/
sum_shape = list(a.shape)
sum_shape[axis] = 1
return exp_a / core.reshape(core.sum(exp_a, axis=axis), sum_shape)
# ---------------- LEARNABLES
def learnable(variable):
"Flag `variable` as learnable."
variable.is_learnable = True
return variable
def get_learnables(lazy_node):
"Get `variables` where is_learnable=True from a lazy graph."
learnable_vars = []
def find_learnables(node):
for child in getattr(node, "arguments", []):
if getattr(child, "is_learnable", False):
learnable_vars.append(child)
find_learnables(child)
find_learnables(lazy_node)
return learnable_vars
# ----------------
# ---------------- NEURAL NETWORKS
# ----------------
# Helper functions for creating neural networks.
class Adam:
"""Adam optimization for SmallPebble variables.
See Algorithm 1, https://arxiv.org/abs/1412.6980
<NAME>. Adam: A Method for Stochastic Optimization. 2017.
"""
def __init__(self, alpha=0.001, beta1=0.9, beta2=0.999, eps=1e-8):
self.alpha = alpha
self.beta1 = beta1
self.beta2 = beta2
self.eps = eps
self.t = defaultdict(lambda: 0)
self.m = defaultdict(lambda: 0)
self.v = defaultdict(lambda: 0)
def training_step(self, variables, gradients):
for variable in variables:
self.t[variable] += 1
g = gradients[variable]
self.m[variable] = self.beta1 * self.m[variable] + (1 - self.beta1) * g
self.v[variable] = self.beta2 * self.v[variable] + (1 - self.beta2) * g ** 2
m_ = self.m[variable] / (1 - self.beta1 ** self.t[variable])
v_ = self.v[variable] / (1 - self.beta2 ** self.t[variable])
variable.array = variable.array - self.alpha * m_ / (np.sqrt(v_) + self.eps)
def batch(X, y, size, seed=None):
"Yield sub-batches of X,y, randomly selecting with replacement."
assert (y.ndim == 1) and (X.shape[0] == y.size), "unexpected dimensions."
if seed:
np.random.seed(seed)
while True:
idx = np.random.randint(0, y.size, size)
yield X[idx, ...], y[idx]
def convlayer(height, width, depth, n_kernels, padding="VALID", strides=(1, 1)):
"Create a convolutional neural network layer."
sigma = np.sqrt(6 / (height * width * depth + height * width * n_kernels))
kernels_init = sigma * (np.random.random([height, width, depth, n_kernels]) - 0.5)
kernels = learnable(core.Variable(kernels_init))
def func(images, kernels):
return core.conv2d(images, kernels, padding, strides)
return lambda images: core.Lazy(func)(images, kernels)
def cross_entropy(y_pred: core.Variable, y_true: np.array, axis=-1) -> core.Variable:
"""Cross entropy.
Args:
y_pred: A sp.Variable instance of shape [batch_size, n_classes]
y_true: A NumPy array, of shape [batch_size], containing the true class labels.
Returns:
A scalar, reduced by summation.
"""
indices = (np.arange(len(y_true)), y_true)
return core.neg(sum(core.log(core.getitem(y_pred, indices))))
def he_init(insize, outsize) -> np.array:
"He weight initialization."
sigma = np.sqrt(4 / (insize + outsize))
return np.random.random([insize, outsize]) * sigma - sigma / 2
def linearlayer(insize, outsize) -> core.Lazy:
"Create a linear fully connected neural network layer."
weights = learnable(core.Variable(he_init(insize, outsize)))
bias = learnable(core.Variable(np.ones([outsize], np.float32)))
def func(a, weights, bias):
return core.matmul(a, weights) + bias
return lambda a: core.Lazy(func)(a, weights, bias)
def onehot(y, n_classes) -> np.array:
"Onehot encode vector y with classes 0 to n_classes-1."
result = np.zeros([len(y), n_classes])
result[np.arange(len(y)), y] = 1
return result
def sgd_step(variables, gradients, learning_rate=0.001) -> None:
"A single step of gradient descent. Modifies each variable.array directly."
for variable in variables:
gradient = gradients[variable]
variable.array -= learning_rate * gradient
| 5,789 |
typeidea/typeidea/settings/develop.py
|
persue/typeidea
| 0 |
2024847
|
# -*- coding: utf-8 -*-
from .base import * # NOQA
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
'''
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
'''
DATABASES = {
'default':{
'ENGINE': 'django.db.backends.mysql',
'NAME':'typeidea_db',
'USER':'root',
'PASSWORD':'<PASSWORD>',
'HOST':'127.0.0.1',
'PORT':3306,
#'CONN_MAx_AGE':5*60,
'OPTIONS':{'charset':'utf8mb4'},
}
}
INSTALLED_APPS += [
'debug_toolbar',
]
MIDDLEWARE += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
CONFIG_TOOLBAR_CONFIG = {
# Toolbar options
'JQUERY_URL': '//cdn.bootcss.com/jquery/3.3.1/jquery.min.js',
}
INTERNAL_IPS = ['127.0.0.1']
STATIC_ROOT = os.path.join(BASE_DIR, 'static_files/')
# cache
CACHES ={
'default':{
'BACKEND':'django.core.cache.backends.locmem.LocMemCache',
'LOCATION':'unique-snowflake',
}
}
| 1,668 |
piperci_gman/orm/models.py
|
AFCYBER-DREAM/piedpiper-gman
| 0 |
2024055
|
import uuid
from urllib.parse import urlparse, urlunparse, ParseResult
from marshmallow import fields as marshmallow_fields
from marshmallow_peewee import ModelSchema, Related
from marshmallow_peewee.convert import ModelConverter
from peewee import (DatabaseProxy,
DateTimeField,
ForeignKeyField,
IntegerField,
Model,
DoesNotExist,
SqliteDatabase,
TextField)
import subresource_integrity as integrity
db = DatabaseProxy()
class ZeroResults(DoesNotExist):
pass
class QueryFailed(Exception):
pass
class SRIField(TextField):
'''A field type that stores an HTML SRI value
Value must be of from hash: hashvalue.
'''
def db_value(self, value):
if isinstance(value, integrity.Hash):
return str(value)
else:
try:
return str(integrity.parse(value)[0])
except IndexError:
raise ValueError('This does not appear to be of type "Hash" or "str"')
def python_value(self, value):
return integrity.parse(value)[0]
class URIField(TextField):
def db_value(self, value):
if isinstance(value, ParseResult):
return urlunparse(value)
elif isinstance(value, str):
return urlunparse(urlparse(value))
else:
raise ValueError(f'Non urlparse parseable type {type(value)}')
def python_value(self, value):
if isinstance(value, ParseResult):
return value
elif isinstance(value, str):
return urlparse(value)
else:
raise ValueError(f'Non urlparse parseable type {type(value)}')
def _serialize(self, value, attr, obj, **kwargs):
return urlunparse(value)
def _deserialize(self, value, attr, data, **kwargs):
return urlparse(value)
class Task(Model):
task_id = TextField(primary_key=True, default=uuid.uuid4)
run_id = TextField(null=False)
project = TextField(null=False)
caller = TextField(null=False)
parent_id = TextField(null=True)
thread_id = TextField(null=True)
class Meta:
database = db
class TaskEvent(Model):
event_id = TextField(primary_key=True, default=uuid.uuid4)
task = ForeignKeyField(Task)
timestamp = DateTimeField()
status = TextField(null=False, choices=(('started', 'started'),
('completed', 'completed'),
('failed', 'failed'),
('delegated', 'delegated'),
('received', 'received'),
('info', 'info')))
message = TextField(null=False)
return_code = IntegerField(null=True)
class Meta:
database = db
class ExtendedConverter(ModelConverter):
def __new__(cls, **kwargs):
cls.TYPE_MAPPING.append((URIField, marshmallow_fields.String))
return ModelConverter.__new__(cls)
def convert_field(self, field, **kwargs):
converted = super(ExtendedConverter, self).convert_field(field, **kwargs)
if hasattr(field, '_deserialize'):
converted._deserialize = field._deserialize
if hasattr(field, '_serialize'):
converted._serialize = field._serialize
return converted
class TaskSchema(ModelSchema):
class Meta:
model = Task
class TaskEventSchema(ModelSchema):
task = Related()
class Meta:
model = TaskEvent
class Artifact(Model):
artifact_id = TextField(primary_key=True, default=uuid.uuid4)
uri = URIField(null=False)
sri = SRIField(null=False)
status = TextField(null=False, choices=(('unique', 'unique'),
('collision', 'collision'),
('deleted', 'deleted'),
('unknown', 'unknown')))
type = TextField(null=False, choices=(('log', 'log'),
('container', 'container'),
('artifact', 'artifact'),
('source', 'source')))
task = ForeignKeyField(Task)
event_id = ForeignKeyField(TaskEvent)
class Meta:
database = db
class ArtifactSchema(ModelSchema):
task = Related()
class Meta:
model = Artifact
model_converter = ExtendedConverter
def db_init(db_config):
if db_config.type == 'sqlite':
db_run = SqliteDatabase(db_config.uri,
pragmas={'foreign_keys': 1})
else:
raise Exception(f'Database type {db_config.type}'
' not yet supported')
# Configure our proxy to use the db we specified in config.
db.initialize(db_run)
db.connect()
db.create_tables([Task, TaskEvent, Artifact], safe=True)
| 5,032 |
api/tests/opentrons/protocol_api_experimental/test_well.py
|
anuwrag/opentrons
| 235 |
2026102
|
"""Tests for the Protocol API v3 Well interface."""
import pytest
from decoy import Decoy
from opentrons.protocols.models import WellDefinition
from opentrons.protocol_engine.clients import SyncClient as ProtocolEngineClient
from opentrons.protocol_api_experimental import Labware, Well
from opentrons_shared_data.labware import dev_types
@pytest.fixture
def engine_client(decoy: Decoy) -> ProtocolEngineClient:
"""Get a mock instance of a ProtocolEngineClient."""
return decoy.mock(cls=ProtocolEngineClient)
@pytest.fixture
def labware(decoy: Decoy) -> Labware:
"""Get a mock instance of a Labware."""
lw = decoy.mock(cls=Labware)
lw.labware_id = "labware_id" # type: ignore
return lw
@pytest.fixture
def min_well_definition(
minimal_labware_def: dev_types.LabwareDefinition,
) -> WellDefinition:
"""Create a well definition fixture."""
well_def = minimal_labware_def["wells"]["A1"]
return WellDefinition.parse_obj(well_def)
@pytest.fixture
def subject(
decoy: Decoy,
engine_client: ProtocolEngineClient,
labware: Labware,
min_well_definition: WellDefinition,
) -> Well:
"""Get a Well test subject with its dependencies mocked out."""
decoy.when(
engine_client.state.labware.get_well_definition(
labware_id="labware_id", well_name="well_name"
)
).then_return(min_well_definition)
return Well(engine_client=engine_client, labware=labware, well_name="well_name")
def test_max_volume(subject: Well) -> None:
"""Test Well has max_volume property."""
assert subject.max_volume == 100
def test_diameter(subject: Well) -> None:
"""Test Well has diameter property."""
assert subject.diameter == 30.0
def test_depth(subject: Well) -> None:
"""Test Well has depth property."""
assert subject.depth == 40.0
def test_equality(
decoy: Decoy,
engine_client: ProtocolEngineClient,
labware: Labware,
min_well_definition: WellDefinition,
subject: Well,
) -> None:
"""Two same-named Wells in the same labware should be considered equal."""
decoy.when(
engine_client.state.labware.get_well_definition(
labware_id="labware_id", well_name="well_name"
)
).then_return(min_well_definition)
well1 = Well(engine_client=engine_client, labware=labware, well_name="well_name")
well2 = Well(engine_client=engine_client, labware=labware, well_name="well_name")
assert well1 == well2
| 2,467 |
math/approxes/val_in_point_on_equallyspaced_points.py
|
dasfex/Algorithms
| 1 |
2026632
|
#! /usr/bin/env python3
import sympy as sp
import numpy as np
from datetime import datetime as dt
def F(x):
return np.exp(-x * x)
def getEquallySpacedPoints(l, r, count):
X, Y = [], []
sh = (r - l) / (count - 1)
for i in np.arange(l, r + 0.0000001, sh):
X.append(i)
Y.append(F(i))
return [X, Y, sh]
def newtonPolynomial(X, Y):
x = sp.symbols('x')
polynom = 0
for i in range(len(X)):
res = Y[0][i]
for j in range(i):
res *= (x - X[j])
polynom += res
polynom = sp.simplify(polynom)
return polynom
def value_in_point(X, sh, count, point):
nghbr_points = []
nghbr = 0
for i in range(count):
if np.abs(X[i] - point) <= sh:
nghbr_points.append(X[i])
nghbr = i
nghbr -= 1
Y = np.zeros(shape=(count, count))
for i in range(2):
Y[i][i] = F(nghbr_points[i])
Y[0][1] = (Y[0][0] - Y[1][1]) / (-sh)
x = sp.symbols('x')
current_val = newtonPolynomial(nghbr_points, Y).subs(x, point)
new_val = current_val + 1
offset, add, sign, ind = -1, -1, -1, 1
while abs(current_val - new_val) > 1e-10:
nghbr_points.append(X[nghbr + add])
sz = len(nghbr_points) - 1
for i in range(sz, -1, -1):
if i == sz:
Y[i][sz] = F(nghbr_points[sz])
else:
Y[i][sz] = (Y[i][sz - 1] - Y[i + 1][sz]) / \
(nghbr_points[i] - nghbr_points[sz])
offset *= sign
if offset > 0:
ind += 1
if 0 <= nghbr + offset * ind < count:
add = offset * ind
elif nghbr + offset * ind >= count:
offset, sign = 1, 1
add = -ind
elif nghbr + offset * ind < 0:
sign = 1
add += 1
current_val = new_val
new_val = newtonPolynomial(nghbr_points, Y).subs(x, point)
return new_val, len(nghbr_points)
def value_in_points(l, r, count, xs):
info = getEquallySpacedPoints(l, r, count)
for point in xs:
start = dt.now()
val, cnt = value_in_point(info[0], info[2], count, point)
end = dt.now()
print('Value in {} point ended in {} seconds. '
'Value is {}. {} knots.'
.format(point, end - start, val, cnt))
def main():
l, r = -5, 5
value_in_points(l, r, 100, [np.sqrt(2) / 2, np.pi / 7])
main()
| 2,424 |
src/reversion_rest_framework/mixins.py
|
anudeepsamaiya/django-reversion-rest-framework
| 0 |
2026891
|
import reversion
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from reversion.models import Version
from .serializers import VersionSerializer
class HistoryModelMixin:
version_model = None
version_serializer = VersionSerializer
def _get_version_model(self):
if self.version_model:
return self.version_model
serializer_class = self.get_serializer_class()
if issubclass(serializer_class, ModelSerializer):
return serializer_class.Meta.model
@action(detail=True, methods=['GET'], name='Get History')
def history(self, request, pk=None):
object = self.get_object()
versions = Version.objects.get_for_object(object)
serializer = self.version_serializer(versions, many=True)
return Response(serializer.data)
@action(detail=False, methods=['GET'], name='Get Deleted')
def deleted(self, request):
versions = Version.objects.get_deleted(self._get_version_model())
versions = versions.order_by('-revision__date_created')
serializer = self.version_serializer(versions, many=True)
return Response(serializer.data)
@action(detail=True, methods=['POST'], name='Revert Version',
url_path='aaaa/(?P<version_pk>\d+)')
def revert(self, request, pk=None, version_pk=None, *args, **kwargs):
if not version_pk:
return Response(
{'error': 'Invalid Version Id'},
status=status.HTTP_400_BAD_REQUEST,
)
object = self.get_object()
versions = Version.objects.get_for_object_reference(object, pk)
version = versions.filter(pk=version_pk).first()
if not version:
return Response(
{'error': 'Version Not Found'},
status=status.HTTP_404_NOT_FOUND,
)
try:
version.revision.revert()
object.refresh_from_db()
with reversion.create_revision():
object.save()
reversion.set_user(request.user)
reversion.set_comment(
'Reverted to version {}'.format(version_pk))
except Exception as e:
return Response(
{'error': 'Reverting Failed', 'msg': str(e)},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = self.version_serializer(version)
return Response(serializer.data)
| 2,571 |
proj/config/configer.py
|
NanYoMy/mmregnet
| 7 |
2027024
|
import configparser
import os
import sys
class RegParser:
def __init__(self, argv='', config_type='all'):
filename_=None
nargs_ = len(argv)
if nargs_ == 3 or nargs_==4:
if (argv[1] == '-h') or (argv[1] == '-help'):
self.print_help()
exit()
filename_ = argv[1]
else:
# filename_ = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../config_demo.ini"))
# print('Reading default config dirutil in: %s.' % filename_)
# exit(-9999)
pass
self.config_file = configparser.ConfigParser()
if filename_ is not None:
self.config_file.read(filename_)
else:
print('Using defaults due to missing config dirutil.')
self.config_type = config_type.lower()
self.config = self.get_defaults()
self.check_defaults()
self.print()
def check_defaults(self):
for section_key in self.config.keys():
if section_key in self.config_file:
for key, value in self.config[section_key].items():
if key in self.config_file[section_key] and self.config_file[section_key][key]:
if type(value) == str:
self.config[section_key][key] = os.path.expanduser(self.config_file[section_key][key])
else:
self.config[section_key][key] = eval(self.config_file[section_key][key])
# else:
# print('Default set in [''%s'']: %s = %s' % (section_key, key, value))
# else:
# print('Default section set: [''%s'']' % section_key)
def __getitem__(self, key):
return self.config[key]
def print(self):
print('')
for section_key, section_value in self.config.items():
for key, value in section_value.items():
print('[''%s'']: %s: %s' % (section_key, key, value))
print('')
def get_defaults(self):
home_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../"))
network = {'network_type': 'local'}
data = {'dir_moving_image': os.path.join(home_dir, 'data/train/mr_images'),
'dir_fixed_image': os.path.join(home_dir, 'data/train/us_images'),
'dir_moving_label': os.path.join(home_dir, 'data/train/mr_labels'),
'dir_fixed_label': os.path.join(home_dir, 'data/train/us_labels'),
'ori_train_img':"error",
'ori_train_lab':'error',
'structure':'heart',
'tag':'error',
'ori_test_img':'error',
'ori_test_lab':'error',
'mannual_train_crop_img':"error",
'mannual_train_crop_lab':'error',
'mannual_test_crop_img':'error'}
loss = {'similarity_type': 'dice',
'similarity_scales': [0, 1, 2, 4, 8, 16],
'regulariser_type': 'bending',
'regulariser_weight': 0.5,
'consistent_weight':0.01}
train = {'total_iterations': int(1e5),
'learning_rate': 1e-5,
'minibatch_size': 2,
'freq_info_print': 100,
'freq_model_save': 500,
'file_model_save': os.path.join(home_dir, 'data/model.ckpt')}
inference = {'file_model_saved': train['file_model_save'],
'dir_moving_image': os.path.join(home_dir, 'data/test/mr_images'),
'dir_fixed_image': os.path.join(home_dir, 'data/test/us_images'),
'dir_save': os.path.join(home_dir, 'data/'),
'dir_moving_label': '',
'dir_fixed_label': '',
'ori_test_img':'error',
'ori_test_lab':'error'}
if self.config_type == 'training':
config = {'Data': data, 'Network': network, 'Loss': loss, 'Train': train}
elif self.config_type == 'inference':
config = {'Network': network, 'Inference': inference}
else:
config = {'Data': data, 'Network': network, 'Loss': loss, 'Train': train, 'Inference': inference}
return config
@staticmethod
def print_help():
print('\n'.join([
'',
'************************************************************',
' Weakly-Supervised CNNs for Multimodal Image Registration',
' 2018 <NAME> <<EMAIL>> ',
' LabelReg package is licensed under: ',
' http://www.apache.org/licenses/LICENSE-2.0',
'************************************************************',
'',
'Training script:',
' python3 training_20.py myConfig.ini',
'',
'Inference script:',
' python3 _inference_test_20.py myConfig.ini',
'',
'Options in config dirutil myConfig.ini:',
' network_type: {local, global, composite}',
' similarity_type: {dice, cross-entropy, mean-squared, jaccard}',
' regulariser_type: {bending, gradient-l2, gradient-l1}',
'See other parameters in the template config dirutil config_demo.ini.',
''
]))
# reg_config = RegParser(sys.argv, 'all')
class VoteNetParser(RegParser):
def __init__(self, argv='', config_type='all'):
filename_=None
nargs_ = len(argv)
if nargs_ == 3 or nargs_==4:
if (argv[1] == '-h') or (argv[1] == '-help'):
self.print_help()
exit()
filename_ = argv[2]
else:
# filename_ = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../config_demo.ini"))
# print('Reading default config dirutil in: %s.' % filename_)
# exit(-9999)
print("")
self.config_file = configparser.ConfigParser()
if filename_ is not None:
self.config_file.read(filename_)
else:
print('Using defaults due to missing config dirutil.')
self.config_type = config_type.lower()
self.config = self.get_defaults()
self.check_defaults()
self.print()
def get_defaults(self):
home_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../"))
network = {'network_type': 'local'}
generator={
'output_train_dir':"error",
'output_test_dir':"error",
'reg_model': "error"
}
data = {'dir_atlas_image': "error",
'dir_target_image': "error",
'dir_atlas_label': "error",
'dir_target_label': "error",
'structure':'myo',
'tag':'error'}
loss = {'similarity_type': 'dice',
'similarity_scales': [0, 1, 2, 4, 8, 16],
'regulariser_weight': 0.5
}
train = {'total_iterations': int(1e5),
'learning_rate': 1e-5,
'minibatch_size': 2,
'freq_info_print': 100,
'freq_model_save': 500,
'file_model_save': os.path.join(home_dir, 'data/model.ckpt')}
inference = {'file_model_saved': train['file_model_save'],
'dir_atlas_image': "",
'dir_target_image': "",
'dir_save': "",
'dir_atlas_label': '',
'dir_target_label': '',
'fusion_out':'',
'forward_process_start_dir':''}
if self.config_type == 'training':
config = {'Data': data, 'Network': network, 'Loss': loss, 'Train': train}
elif self.config_type == 'inference':
config = {'Network': network, 'Inference': inference}
elif self.config_type=='generator':
config = {'Generator':generator}
else:
config = {'Data': data, 'Network': network, 'Loss': loss,'Generator':generator, 'Train': train, 'Inference': inference}
return config
# vote_config = VoteNetParser(sys.argv, 'all')
# if vote_config["Generator"]['reg_model']!= reg_config['Train']['file_model_save']:
# print("error!!!!! the model didn't match")
# exit(-998)
def get_reg_config():
global reg_config
if reg_config==None:
reg_config=RegParser(sys.argv, 'all')
return reg_config
def get_vote_config():
global vote_config
if vote_config==None:
vote_config=VoteNetParser(sys.argv, 'all')
return vote_config
| 8,802 |
pycon/migrations/0005_edusummittalkproposal.py
|
azkarmoulana/pycon
| 154 |
2026832
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('proposals', '0003_set_cached_tags'),
('pycon', '0004_specialevent'),
]
operations = [
migrations.CreateModel(
name='EduSummitTalkProposal',
fields=[
('proposalbase_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='proposals.ProposalBase')),
('audience_level', models.IntegerField(help_text='Level of audience expertise assumed in Python.', verbose_name='Python level', choices=[(1, 'Novice'), (3, 'Intermediate'), (2, 'Experienced')])),
('overall_status', models.IntegerField(default=1, help_text='The status of the proposal.', choices=[(1, b'Not Yet Reviewed'), (2, b'In Kittendome'), (3, b'In Thunderdome'), (4, b'Accepted'), (5, b'Damaged'), (6, b'Rejected')])),
('damaged_score', models.IntegerField(help_text="Numerical indicator of the amount of interest in a talk set to 'damaged' status.", null=True, blank=True)),
('rejection_status', models.IntegerField(blank=True, help_text='The reason the proposal was rejected.', null=True, choices=[(1, b'Suggest re-submission as poster.'), (2, b'Suggest lightning talk.'), (3, b'Re-submitted under appropriate category.'), (4, b'Duplicate'), (5, b'Administrative Action (Other)'), (6, b"No really: rejected. It's just plain bad.")])),
('recording_release', models.BooleanField(default=True, help_text="By submitting your talk proposal, you agree to give permission to the Python Software Foundation to record, edit, and release audio and/or video of your presentation. If you do not agree to this, please uncheck this box. See <a href='https://us.pycon.org/2017/speaking/recording/' target='_blank'>PyCon 2017 Recording Release</a> for details.")),
('additional_requirements', models.TextField(help_text="Please let us know if you have any specific needs (A/V requirements, multiple microphones, a table, etc). Note for example that 'audio out' is not provided for your computer unless you tell us in advance.", verbose_name='Additional requirements', blank=True)),
('category', models.ForeignKey(to='pycon.PyConProposalCategory')),
],
options={
'verbose_name': 'Python Education Summit talk proposal',
},
bases=('proposals.proposalbase',),
),
]
| 2,581 |
vae/vae.py
|
Taoerwang/tensorflow-models-with-simple-ways
| 2 |
2026249
|
from __future__ import print_function, division
import tensorflow as tf
from tensorflow import keras
import numpy as np
from keras.utils import np_utils
import matplotlib.pyplot as plt
import os
class VAE:
model_name = "VAE"
paper_name = "Auto-Encoding Variational Bayes(变分自编码器)"
paper_url = "https://arxiv.org/abs/1312.6114"
data_sets = "MNIST and Fashion-MNIST"
def __init__(self, data_name):
self.data_name = data_name
self.img_counts = 60000
self.img_rows = 28
self.img_cols = 28
self.dim = 1
self.noise_dim = 100
(self.train_images, self.train_labels), (self.test_images, self.test_labels) = self.load_data()
self.train_images = np.reshape(self.train_images, (-1, self.img_rows * self.img_cols)) / 255
self.test_images = np.reshape(self.test_images, (-1, self.img_rows * self.img_cols)) / 255
self.train_labels_one_dim = self.train_labels
self.test_labels_one_dim = self.test_labels
self.train_labels = np_utils.to_categorical(self.train_labels)
self.test_labels = np_utils.to_categorical(self.test_labels)
def load_data(self):
if self.data_name == "fashion_mnist":
data_sets = keras.datasets.fashion_mnist
elif self.data_name == "mnist":
data_sets = keras.datasets.mnist
else:
data_sets = keras.datasets.mnist
return data_sets.load_data()
def encoder(self, e):
with tf.variable_scope('encoder', reuse=tf.AUTO_REUSE):
e = tf.layers.dense(e, 1024, tf.nn.relu, name='e_1')
e = tf.layers.dense(e, 512, tf.nn.relu, name='e_2')
e = tf.layers.dense(e, 256, tf.nn.relu, name='e_3')
out_mean = tf.layers.dense(e, 2, name='e_4')
out_stddev = 1e-6 + tf.layers.dense(e, 2, tf.nn.softplus, name='e_4')
return out_mean, out_stddev
def decoder(self, d):
with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):
d = tf.layers.dense(d, 128, tf.nn.relu, name='d_1')
d = tf.layers.dense(d, 256, tf.nn.relu, name='d_2')
d = tf.layers.dense(d, 512, tf.nn.relu, name='d_3')
out = tf.layers.dense(d, 784, name='d_4')
return out
def build_model(self, learning_rate=0.0002):
x_real = tf.placeholder(tf.float32, [None, self.img_rows * self.img_cols])
z_noise = tf.placeholder(tf.float32, [None, 2])
z_mean, z_stddev = self.encoder(x_real)
# 我们选择拟合logσ2而不是直接拟合σ2,是因为σ2总是非负的,
# 需要加激活函数处理,而拟合logσ2不需要加激活函数,因为它可正可负。
# guessed_z = z_mean + tf.exp(z_log_stddev2 / 2) * samples
samples = tf.random_normal(tf.shape(z_stddev), 0, 1, dtype=tf.float32)
guessed_z = z_mean + z_stddev * samples
x_fake = self.decoder(guessed_z)
z_real = self.decoder(z_noise)
# marginal_likelihood = -tf.reduce_sum(
# x_real * tf.log(1e-8 + x_fake) + (1 - x_real) * tf.log(1e-8 + 1 - x_fake),1)
# 用下面的函数接口,计算方法和上述一样,当最后输出是去除sigmoid激活函数
marginal_likelihood = tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=x_fake, labels=x_real),
reduction_indices=1)
# # 拟合logσ2
# kl_divergence = -0.5 * tf.reduce_sum(1 + z_stddev - tf.pow(z_mean, 2) - tf.exp(z_stddev),
# reduction_indices=1)
# 拟合σ
kl_divergence = 0.5 * tf.reduce_sum(
tf.square(z_mean) + tf.square(z_stddev - 1), 1)
# kl_divergence = 0.5 * tf.reduce_sum(
# tf.square(z_mean) + tf.square(z_stddev) - tf.log(1e-8 + tf.square(z_stddev)) - 1, 1)
cost = tf.reduce_mean(marginal_likelihood + kl_divergence)
t_vars = tf.trainable_variables()
e_d_vars = [var for var in t_vars if 'encoder' or 'decoder' in var.name]
optimizer = tf.train.AdamOptimizer(0.0001).minimize(cost, var_list=e_d_vars)
return x_real, x_fake, cost, optimizer, z_noise, z_real, guessed_z
def train(self, train_steps=100000, batch_size=100, learning_rate=0.001, save_model_numbers=3):
x_real, x_fake, cost, optimizer, z_noise, z_real, guessed_z = self.build_model(learning_rate)
saver = tf.train.Saver(max_to_keep=save_model_numbers)
if not os.path.exists('out/'):
os.makedirs('out/')
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# merged_summary_op = tf.summary.merge_all()
# summary_writer = tf.summary.FileWriter('log/mnist_with_summaries', sess.graph)
for i in range(train_steps):
batch_index = np.random.randint(0, self.img_counts, batch_size)
batch_real = self.train_images[batch_index]
sess.run(optimizer, feed_dict={x_real: batch_real})
if i % 1000 == 0:
auto_encoder_loss_curr = sess.run(cost, feed_dict={x_real: batch_real})
print('step: ' + str(i))
print('D_loss: ' + str(auto_encoder_loss_curr))
print()
saver.save(sess, 'ckpt/mnist.ckpt', global_step=i)
x_fake_ = sess.run(x_fake, feed_dict={x_real: batch_real})
r, c = 10, 10
fig, axs = plt.subplots(r, c)
cnt = 0
for p in range(r):
for q in range(c):
axs[p, q].imshow(np.reshape(batch_real[cnt], (28, 28)), cmap='gray')
axs[p, q].axis('off')
cnt += 1
fig.savefig("out/%d_real.png" % i)
plt.close()
r, c = 10, 10
fig, axs = plt.subplots(r, c)
cnt = 0
for p in range(r):
for q in range(c):
axs[p, q].imshow(np.reshape(x_fake_[cnt], (28, 28)), cmap='gray')
axs[p, q].axis('off')
cnt += 1
fig.savefig("out/%d_fake.png" % i)
plt.close()
test_z = sess.run(guessed_z, feed_dict={x_real: self.train_images})
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.scatter(test_z[:, 0], test_z[:, 1], c=self.train_labels_one_dim, s=1)
fig.savefig("out/%d_prediction.png" % i)
plt.close()
def restore_model(self):
x_real, x_fake, cost, optimizer, z_noise, z_real, guessed_z = self.build_model()
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
model_file = tf.train.latest_checkpoint('ckpt/')
saver.restore(sess, model_file)
test_z = sess.run(guessed_z, feed_dict={x_real: self.test_images})
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
# ax.imshow(np.reshape(test_z, (28, 28)), cmap='gray')
ax.scatter(test_z[:, 0], test_z[:, 1], c=self.test_labels_one_dim, s=1)
# ax.scatter(test_z[:, 0], test_z[:, 1], s=10)
# ax.scatter(x[:, 0], x[:, 2], x[:, 3], c=y, s=10)
plt.show()
if __name__ == '__main__':
data = ['fashion_mnist', 'mnist']
model = VAE(data[1])
model.train()
# model.restore_model()
| 7,549 |
run.py
|
rajacuannih/Yahu
| 0 |
2024448
|
#!/usr/bin/env python
import os, uuid, pause, json, sys
from subprocess import call
from faker import Faker
def gen_uname(full_name):
a = full_name.split()
b = str(uuid.uuid4()).split("-")[-1].join([v for v in a])
return b
if __name__=="__main__":
fake = Faker()
a = open('n.txt').read().splitlines()
i = 0
for b in a:
try:
i+=1
fake_name = fake.name()
fname = fake_name.split()[0]
lname = fake_name.split()[1]
full_name = fname + " " + lname
username = gen_uname(full_name)
data = {
"first_name":fname,
"last_name":lname,
"phone_number":b,
"username":username
}
with open("data.json", "a+") as f:
f.write(json.dumps(data, indent=4) + "\n")
f.close()
pause.seconds(5)
ARGUMENT = f"{data['first_name']},{data['last_name']},{username},{data['phone_number']}"
call(f"start cmd /c python main.py {ARGUMENT}", cwd=r'C:\\Users\\botpo\\john-doe\\python\\Yahoo', shell=True)
if i%2==0:
x= input("Press any key to the next 2 numbers..")
except KeyboardInterrupt:
x=input("Press any key to continue..")
if "y" in x:
continue
else:
sys.exit(0)
except Exception as e:
continue
| 1,217 |
kubeconform/analyse.py
|
kirecek/dockerfiles
| 20 |
2026368
|
#!/usr/bin/env python3
#
# This script iterates over all kustomize overlays under specified
# directory, renders the resulting manifests and performs a kubeconform check on them
import subprocess
import tempfile
import argparse
import os.path
import sys
from pathlib import Path
parser = argparse.ArgumentParser()
parser.add_argument('analyse_dir', nargs='?', default='.')
parser.add_argument('-schema-location', action='append')
args = parser.parse_args()
kubeconform_cache = os.environ.get('KUBECONFORM_CACHE')
if not kubeconform_cache:
kubeconform_cache = tempfile.mkdtemp()
k8s_version = os.environ.get('KUBECONFORM_K8S_VERSION')
def analyse_overlay(overlay_dir):
print(f"🧐 {overlay_dir}: \t", end='', flush=True)
extra_args = []
if args.schema_location:
for location in args.schema_location:
extra_args.extend(['-schema-location', location])
if k8s_version:
extra_args.extend(['-kubernetes-version', k8s_version])
p1 = subprocess.Popen(["kubectl", "kustomize", overlay_dir], stdout=subprocess.PIPE)
p2 = subprocess.Popen(
["kubeconform", "-summary", "-exit-on-error", "-cache", kubeconform_cache, *extra_args],
stdin=p1.stdout,
)
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
return_code = p2.wait()
if return_code != 0:
raise SystemExit(return_code)
def analyse_k8s(dir):
overlays_dir = Path(dir) / 'overlays'
if overlays_dir.is_dir():
overlay_dirs = list(overlays_dir.glob('*'))
else:
overlay_dirs = list(Path(dir).glob('*/overlays/*'))
if len(overlay_dirs) == 0:
print('No overlay directories found', file=sys.stderr)
raise SystemExit(1)
for overlay_dir in overlay_dirs:
analyse_overlay(overlay_dir)
if __name__ == '__main__':
os.makedirs(kubeconform_cache, exist_ok=True)
analyse_k8s(args.analyse_dir)
| 1,908 |
beta_rec/experiment/experiment.py
|
mengzaiqiao/TVBR
| 126 |
2026147
|
# coding=utf-8
"""
This is the implementation of experimental pipeline.
This class is still under development.
"""
import pandas as pd
from tabulate import tabulate
def print_result_as_table(results, tag=None):
"""Print results as a table."""
eval_infos = set()
for result in results:
eval_infos.update(result.keys())
eval_infos = list(eval_infos)
print("-" * 80)
if tag is not None:
print(tag)
for result in results:
for eval_info in eval_infos:
if eval_info not in result:
result["eval_info"] = "--"
df = pd.DataFrame(results)
df = df.set_index("model")
df = df.T
print(tabulate(df, headers=df.columns, tablefmt="psql"))
print("-" * 80)
class Experiment:
"""This enables the flow of an experiment with the beta-rec platform.
Args:
datasets: array of :obj: '<beta_rec.datasets>', required
the experimental datasets (e.g. MovieLens)
eval_methods: : array of string, required
the evaluation method (e.g. ['load_leave_one_out'])
models: array of :obj:`<beta_rec.recommenders>`, required
A collection of recommender models to evaluate, e.g., [MF, GCN].
metrics: array of string, default: None and every model has its default
evaluation metrics in the configuration file.
A collection of metrics to use to evaluate all the recommender
models, e.g., ['ndcg', 'precision', 'recall'].
eval_score: array of integer, default: None and every model has its default
evaluation score in the configuration file.
A list integer values to define evaluation scope on, \
e.g., [1, 10, 20].
model_dir: str, optional, default: None
Path to a directory for loading a pretrained model
save_dir: str, optional, default: None
Path to a directory for storing trained models and logs. If None,
models will NOT be stored and logs will be saved in the current
working directory.
result_file: str, optional, default: None and every model will be saved
in a result file that indicated in the configuration.
The name of the result saving file, which starts with the model name
and followed by the given result file string as the affix.
"""
def __init__(
self,
datasets,
models,
metrics=None,
eval_scopes=None,
model_dir=None,
result_file=None,
save_dir=None,
):
"""Initialise required inputs for the expriment pipeline."""
self.datasets = datasets
self.models = models
self.metrics = metrics
self.eval_scopes = eval_scopes
self.result_file = result_file
self.save_dir = save_dir
self.update_config()
def run(self):
"""Run the experiment."""
results = []
for data in self.datasets:
for model in self.models:
model.train(data)
result = model.test(data.test[0])
results.extend(result)
print_result_as_table(results)
def load_pretrained_model(self):
"""Load the pretrained model."""
for data in self.datasets:
for model in self.models:
model.init_engine(data)
model.load(model_dir=self.model_dir)
model.predict(data.test[0])
def update_config(self):
"""Update the configuration of models."""
if self.metrics is not None:
for model in self.models:
model.config["system"]["metrics"] = self.metrics
if self.eval_scopes is not None:
for model in self.models:
model.config["system"]["k"] = self.eval_scopes
if self.result_file is not None:
for idx, model in enumerate(self.models):
model.config["system"]["result_file"] = (
"model_"
+ str(idx)
+ "_"
+ self.config["model"]["model"]
+ "_"
+ self.result_file
)
if self.save_dir is not None:
for model in self.models:
model.config["system"]["result_dir"] = self.save_dir
| 4,273 |
ejercicios/basico/ejercicios/problema_11.py
|
carlosviveros/Soluciones
| 1 |
2026120
|
"""AyudaEnPython: https://www.facebook.com/groups/ayudapython
Escribir un programa para una empresa que tiene salas de juegos para
todas las edades y quiere calcular de forma automática el precio que
debe cobrar a sus clientes por entrar. El programa debe preguntar al
usuario la edad del cliente y mostrar el precio de la entrada. Si el
cliente es menor de 4 años puede entrar gratis, si tiene entre 4 y 8
años debe pagar S/.5 y si es mayor a 18 años, S/.10.
"""
edad = int(input("Ingresar edad: "))
if edad < 4:
print("Entrada gratis")
elif 4 <= edad <= 8:
print("Entrada S/.5")
else:
print("Entrada S/.10")
| 624 |
Sidekick_4.5/xgrid_insertion_efficiency_from_file.py
|
hallba/Sidekick
| 0 |
2026507
|
#!/usr/bin/python
import sys,os
import HAConf
from random import randint
import time
starting_point = os.getcwd()
if "-d" in sys.argv:
print "Daemonizing..."
time.sleep(5)
#To allow for very long gaps between submissions turn into a daemon to start with
import daemonize
daemonize.daemonize('/dev/null',starting_point + "/submission.log",starting_point + "/submission.log")
print "Submission Daemon started with pid %d" % os.getpid()
print "Started %s" % time.ctime()
os.chdir(starting_point)
from xgrid_tools import *
input_file = sys.argv[1]
number_of_repeats = 50
job_list =[]
seed_packet = [randint(0,100000) for i in range(number_of_repeats)]
#Tree structure is different here
#Type/Sequence/Repeats
os.mkdir("Rotation-Translation")
os.chdir("Rotation-Translation")
sequences = [line[:-1] for line in open(starting_point + "/" + input_file,'r')]
for mutant in sequences:
print mutant
#mutant_directory = starting_point + "/" + mutant
try:
os.mkdir(mutant)
except:
continue
os.chdir(mutant)
#Now create run directories and run
job_list.append(job_submit(HAConf.programs['hippo_tr'] + " " + mutant + " -dAgg "))
os.chdir("..")
os.chdir(starting_point)
os.mkdir("MARTINI")
os.chdir("MARTINI")
for mutant in sequences:
print mutant
try:
os.mkdir(mutant)
except:
continue
os.chdir(mutant)
for random_seed in seed_packet:
os.mkdir(str(random_seed))
os.chdir(str(random_seed))
job_list.append(job_submit(HAConf.programs['Insertion_Efficiency'] + " " + mutant + " " + str(random_seed) + " -dAgg "))
os.chdir("..")
#Finally, return to the starting directory
os.chdir("..")
os.chdir(starting_point)
#Hang around and collect the results as a daemon
collect_results_daemon(job_list,starting_point+"/daemon.log",starting_point+"/restart.pickle")
#Scrape off the calculated insertion efficiencies from each MARTINI run and produce a net efficiency for each sequence
martini_location = starting_point + "/MARTINI"
summary_table = open(starting_point + "/MARTINI_Insertion_Summary.txt", "w")
for mutant in sequences:
sequence_location = martini_location + "/" + mutant
efficiency = 0
for random_seed in seed_packet:
efficiency_file = sequence_location + "/" + str(seed) + "/efficiency.dat"
efficiency += float(efficiency_file.readline())
efficiency_file.close()
efficiency /= len(seed_packet)
print >> summary_table, mutant, efficiency
summary_table.close()
| 2,642 |
main.py
|
woog-life/potsdam-booking-scraper
| 0 |
2025123
|
import dataclasses
import inspect
import json
import logging
import os
import socket
import sys
from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import Tuple, Optional, Callable, Union, NewType, List, Dict
import pytz
import requests
import urllib3
from bs4 import BeautifulSoup, Tag
from requests import Response
from telegram import Bot
BOOKING_URL = "https://www.blp-shop.de/de/eticket_applications/select_timeslot_list/10/{}/"
# noinspection HttpUrlsUsage
# cluster internal communication
BACKEND_URL = os.getenv("BACKEND_URL") or "http://api:80"
BACKEND_PATH = os.getenv("BACKEND_PATH") or "lake/{}/booking"
UUID = os.getenv("POTSDAM_UUID")
API_KEY = os.getenv("API_KEY")
WATER_INFORMATION = NewType("WaterInformation", Tuple[str, float])
@dataclass
class EventDetails:
booking_link: str
begin_time: datetime
end_time: datetime
sale_start: datetime
is_available: bool
def __repr__(self):
return f"is_available={self.is_available} ({self.booking_link})"
def json(self) -> Dict[str, Union[bool, str, int]]:
return {
"bookingLink": self.booking_link,
"isAvailable": self.is_available,
"beginTime": f"{self.begin_time.isoformat()}Z",
"endTime": f"{self.end_time.isoformat()}Z",
"saleStartTime": f"{self.sale_start.isoformat()}Z",
}
def _utc(input_time: datetime) -> datetime:
naive_time = input_time.replace(tzinfo=None)
input_tz = pytz.timezone("Europe/Berlin")
local_time = input_tz.localize(naive_time)
utc_time = local_time.astimezone(pytz.utc)
return utc_time.replace(tzinfo=None)
def create_logger(name: str, level: int = logging.DEBUG) -> logging.Logger:
logger = logging.Logger(name)
ch = logging.StreamHandler(sys.stdout)
formatting = "[{}] %(asctime)s\t%(levelname)s\t%(module)s.%(funcName)s#%(lineno)d | %(message)s".format(name)
formatter = logging.Formatter(formatting)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.setLevel(level)
return logger
def send_telegram_alert(message: str, token: str, chatlist: List[str]):
logger = create_logger(inspect.currentframe().f_code.co_name)
if not token:
logger.error("TOKEN not defined in environment, skip sending telegram message")
return
if not chatlist:
logger.error("chatlist is empty (env var: TELEGRAM_CHATLIST)")
for user in chatlist:
Bot(token=token).send_message(chat_id=user, text=f"Error while executing: {message}")
def get_website(date: str) -> Tuple[str, bool]:
logger = create_logger(inspect.currentframe().f_code.co_name)
url = BOOKING_URL.format(date)
logger.debug(f"Requesting {url}")
response = requests.get(url)
content = response.content.decode("ISO-8859-1")
logger.debug(content)
return content, response.status_code == 200
def parse_website_xml(xml: str) -> BeautifulSoup:
return BeautifulSoup(xml, "html.parser")
def extract_table_row(html: BeautifulSoup):
logger = create_logger(inspect.currentframe().f_code.co_name)
table = html.find("table")
if not table:
logger.error(f"table not found in html {html}")
return None
rows = table.find_all("tr")
if not rows or len(rows) < 2:
logger.error(f"tr not found or len(rows) < 2 in {table}")
return None
try:
for idx, row in enumerate(rows):
columns = row.find_all("td")
if columns:
return row
except IndexError:
pass
logger.error("Couldn't find a row for bookings")
return None
def get_tag_text_from_xml(xml: Union[BeautifulSoup, Tag], name: str, conversion: Callable) -> Optional:
tag = xml.find(name)
if not tag:
return None
return conversion(tag.text)
def get_booking_information(soup: BeautifulSoup, date: str) -> Optional[Tuple[datetime, datetime, bool, str]]:
logger = create_logger(inspect.currentframe().f_code.co_name)
start_slot_col = soup.find("td", attrs={"data-title": "Von"})
end_slot_col = soup.find("td", attrs={"data-title": "Bis"})
available_slots_col = soup.find("td", attrs={"data-title": "Freie E-Tickets"})
childs = list(soup.children)
booking_link_a = [td.find("a", attrs={"title": "<NAME>"}) for td in soup.find_all("td") if
td.find("a")]
if not (start_slot_col and end_slot_col and available_slots_col):
logger.error(f"{start_slot_col}, {end_slot_col}, {available_slots_col}, {booking_link_a})")
return None
time = datetime.strptime(f"{date} {start_slot_col.text.strip()}", "%d.%m.%Y %H:%M Uhr")
start_slot = _utc(time)
time = datetime.strptime(f"{date} {end_slot_col.text.strip()}", "%d.%m.%Y %H:%M Uhr")
end_slot = _utc(time)
is_available = booking_link_a and not "ausverkauft" in available_slots_col.text.lower()
if not is_available:
is_available = False
booking_link = booking_link_a[0].get("href") if is_available else "https://not.available"
# noinspection PyTypeChecker
# at this point pycharm doesn't think that the return type can be optional despite the many empty returns beforehand
return start_slot, end_slot, is_available, booking_link
def send_data_to_backend(variation: str, details: List[EventDetails]) -> Tuple[
Optional[Response], str]:
logger = create_logger(inspect.currentframe().f_code.co_name)
path = BACKEND_PATH.format(UUID)
url = "/".join([BACKEND_URL, path])
try:
body = {
"variation": "Stadtbad Babelsberg",
"events": [event.json() for event in details],
}
response = requests.put(
url,
json=body,
headers={"Authorization": f"Bearer {API_KEY}"}
)
logger.debug(f"success: {response.ok} | content: {response.content}")
except (requests.exceptions.ConnectionError, socket.gaierror, urllib3.exceptions.MaxRetryError):
logger.exception(f"Error while connecting to backend ({url})", exc_info=True)
return None, url
return response, url
def main() -> Tuple[bool, str]:
if not UUID:
root_logger.error("POTSDAM_UUID not defined in environment")
return False, "POTSDAM_UUID not defined"
elif not API_KEY:
root_logger.error("API_KEY not defined in environment")
return False, "API_KEY not defined"
logger = create_logger(inspect.currentframe().f_code.co_name)
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
sale_start_time = _utc(today)
details = []
for i in range(14):
date = today + timedelta(days=i)
content, success = get_website(date.strftime("%Y-%m-%d"))
if not success:
message = f"Couldn't retrieve website: {content}"
logger.error(message)
return False, message
soup = parse_website_xml(content)
booking_row = extract_table_row(soup)
if not booking_row:
logger.error("Couldn't find correct row")
return False, "Couldn't find correct row"
booking_information = get_booking_information(booking_row, date.strftime("%d.%m.%Y"))
if not booking_information:
message = f"Couldn't retrieve water information from {soup}"
logger.error(message)
return False, message
start_time, end_time, is_available, booking_link = booking_information
detail = EventDetails(booking_link=booking_link,
begin_time=start_time,
end_time=end_time,
sale_start=sale_start_time,
is_available=is_available)
details.append(detail)
response, generated_backend_url = send_data_to_backend("Stadtbad Babelsberg", details)
if not response or not response.ok:
message = f"Failed to put data 'variation': 'Stadtbad Badelsberg', 'events': {details}) to backend: {generated_backend_url}\n{response.content}"
logger.error(message)
return False, message
return True, ""
root_logger = create_logger("__main__")
success, message = main()
if not success:
root_logger.error(f"Something went wrong ({message})")
token = os.getenv("TOKEN")
chatlist = os.getenv("TELEGRAM_CHATLIST") or "139656428"
send_telegram_alert(message, token=token, chatlist=chatlist.split(","))
sys.exit(1)
| 8,506 |
cornflow-server/cornflow/models/dag_permissions.py
|
ggsdc/corn
| 2 |
2025626
|
from cornflow_core.models import TraceAttributesModel
from cornflow_core.shared import db
# from .meta_model import TraceAttributes
from .dag import DeployedDAG
class PermissionsDAG(TraceAttributesModel):
__tablename__ = "permission_dag"
__table_args__ = (db.UniqueConstraint("dag_id", "user_id"),)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
dag_id = db.Column(
db.String(128), db.ForeignKey("deployed_dags.id"), nullable=False
)
user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
user = db.relationship("UserModel", viewonly=True)
def __init__(self, data):
super().__init__()
self.dag_id = data.get("dag_id")
self.user_id = data.get("user_id")
def __repr__(self):
return f"<DAG permission user: {self.user_id}, DAG: {self.dag_id}>"
@classmethod
def get_user_dag_permissions(cls, user_id):
return cls.query.filter_by(user_id=user_id).all()
@staticmethod
def add_all_permissions_to_user(user_id):
dags = DeployedDAG.get_all_objects()
permissions = [
PermissionsDAG({"dag_id": dag.id, "user_id": user_id}) for dag in dags
]
for permission in permissions:
permission.save()
@staticmethod
def delete_all_permissions_from_user(user_id):
permissions = PermissionsDAG.get_user_dag_permissions(user_id)
for perm in permissions:
perm.delete()
@staticmethod
def check_if_has_permissions(user_id, dag_id):
permission = PermissionsDAG.query.filter_by(
user_id=user_id, dag_id=dag_id
).first()
if permission is None:
return False
return True
| 1,742 |
uniman/model/network_conf.py
|
TrackMan/uniman
| 0 |
2027053
|
from typing import Any, List
from statham.schema.constants import Maybe
from statham.schema.elements import (
Array,
Boolean,
Element,
Integer,
Object,
String,
)
from statham.schema.property import Property
class Meta(Object):
rc: str = Property(String(), required=True)
class WanProviderCapabilities(Object):
upload_kilobits_per_second: int = Property(Integer(), required=True)
download_kilobits_per_second: int = Property(Integer(), required=True)
class DataItem(Object):
_id: Maybe[str] = Property(String())
attr_no_delete: Maybe[bool] = Property(Boolean())
attr_hidden_id: Maybe[str] = Property(String())
wan_networkgroup: Maybe[str] = Property(String())
site_id: Maybe[str] = Property(String())
purpose: Maybe[str] = Property(String())
name: Maybe[str] = Property(String())
wan_type: Maybe[str] = Property(String())
wan_provider_capabilities: Maybe[WanProviderCapabilities] = Property(WanProviderCapabilities)
report_wan_event: Maybe[bool] = Property(Boolean())
wan_ip_aliases: Maybe[List[Any]] = Property(Array(Element()))
wan_type_v6: Maybe[str] = Property(String())
wan_load_balance_type: Maybe[str] = Property(String())
wan_load_balance_weight: Maybe[int] = Property(Integer())
wan_egress_qos: Maybe[str] = Property(String())
wan_dhcp_options: Maybe[List[Any]] = Property(Array(Element()))
wan_dns1: Maybe[str] = Property(String())
vlan_enabled: Maybe[bool] = Property(Boolean())
ip_subnet: Maybe[str] = Property(String())
ipv6_interface_type: Maybe[str] = Property(String())
domain_name: Maybe[str] = Property(String())
is_nat: Maybe[bool] = Property(Boolean())
dhcpd_enabled: Maybe[bool] = Property(Boolean())
dhcpd_start: Maybe[str] = Property(String())
dhcpd_stop: Maybe[str] = Property(String())
dhcpdv6_enabled: Maybe[bool] = Property(Boolean())
ipv6_ra_enabled: Maybe[bool] = Property(Boolean())
lte_lan_enabled: Maybe[bool] = Property(Boolean())
networkgroup: Maybe[str] = Property(String())
dhcpd_leasetime: Maybe[int] = Property(Integer())
dhcpd_dns_enabled: Maybe[bool] = Property(Boolean())
dhcpd_gateway_enabled: Maybe[bool] = Property(Boolean())
dhcpd_time_offset_enabled: Maybe[bool] = Property(Boolean())
ipv6_pd_start: Maybe[str] = Property(String())
ipv6_pd_stop: Maybe[str] = Property(String())
gateway_type: Maybe[str] = Property(String())
enabled: Maybe[bool] = Property(Boolean())
dhcp_relay_enabled: Maybe[bool] = Property(Boolean())
nat_outbound_ip_addresses: Maybe[List[Any]] = Property(Array(Element()))
dpi_enabled: Maybe[bool] = Property(Boolean())
dpigroup_id: Maybe[str] = Property(String())
dhcpd_dns_1: Maybe[str] = Property(String())
dhcpd_dns_2: Maybe[str] = Property(String())
ipsec_interface: Maybe[str] = Property(String())
vpn_type: Maybe[str] = Property(String())
route_distance: Maybe[int] = Property(Integer())
ipsec_profile: Maybe[str] = Property(String())
remote_vpn_subnets: Maybe[List[str]] = Property(Array(String()))
ipsec_key_exchange: Maybe[str] = Property(String())
ipsec_encryption: Maybe[str] = Property(String())
ipsec_hash: Maybe[str] = Property(String())
ipsec_dh_group: Maybe[int] = Property(Integer())
ipsec_ike_dh_group: Maybe[int] = Property(Integer())
ipsec_esp_dh_group: Maybe[int] = Property(Integer())
ipsec_pfs: Maybe[bool] = Property(Boolean())
ipsec_dynamic_routing: Maybe[bool] = Property(Boolean())
x_ipsec_pre_shared_key: Maybe[str] = Property(String())
ipsec_local_ip: Maybe[str] = Property(String())
ipsec_peer_ip: Maybe[str] = Property(String())
ifname: Maybe[str] = Property(String())
class NetworkConf(Object):
meta: Maybe[Meta] = Property(Meta)
data: Maybe[List[DataItem]] = Property(Array(DataItem))
| 3,943 |
Z_ALL_FILE/Py1/socks_srv.py
|
omikabir/omEngin
| 0 |
2025789
|
#ref: https://rushter.com/blog/python-socks-server/
from socketserver import ThreadingMixIn, TCPServer, StreamRequestHandler
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
pass
class SocksProxy(StreamRequestHandler):
def handle(self):
# Our main logic will be here
pass
if __name__ == '__main__':
with ThreadingTCPServer(('127.0.0.1', 9011), SocksProxy) as server:
server.serve_forever()
| 432 |
ddpg/critic.py
|
Jash-2000/reinforcement_learning
| 97 |
2025596
|
# Deep Deterministic Policy Gradient
# following paper: Continuous control with deep reinforcement learning
# (https://arxiv.org/pdf/1509.02971.pdf)
#
# ---
# @author <NAME>
# @email luyiren [at] seas [dot] upenn [dot] edu
#
# MIT License
import tensorflow as tf
import tf_utils
class CriticNetwork(object):
def __init__(self, state_size, action_size, lr, n_h1=400, n_h2=300, tau=0.001):
self.state_size = state_size
self.action_size = action_size
self.optimizer = tf.train.AdamOptimizer(lr)
self.tau = tau
self.n_h1 = n_h1
self.n_h2 = n_h2
self.input_s, self.action, self.critic_variables, self.q_value = self._build_network("critic")
self.input_s_target, self.action_target, self.critic_variables_target, self.q_value_target = self._build_network("critic_target")
self.target = tf.placeholder(tf.float32, [None])
self.l2_loss = tf.add_n([tf.nn.l2_loss(v) for v in self.critic_variables])
self.loss = tf.reduce_mean(tf.square(self.target - self.q_value)) + 0.01*self.l2_loss
self.optimize = self.optimizer.minimize(self.loss)
self.update_target_op = [self.critic_variables_target[i].assign(tf.multiply(self.critic_variables[i], self.tau) + tf.multiply(self.critic_variables_target[i], 1 - self.tau)) for i in range(len(self.critic_variables))]
self.action_gradients = tf.gradients(self.q_value, self.action)
def _build_network(self, name):
input_s = tf.placeholder(tf.float32, [None, self.state_size])
action = tf.placeholder(tf.float32, [None, self.action_size])
with tf.variable_scope(name):
layer_1 = tf_utils.fc(input_s, self.n_h1, scope="fc1", activation_fn=tf.nn.relu,
initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_IN"))
# tf.concat((layer_1, action), 1)
layer_2 = tf_utils.fc(tf.concat((layer_1, action), 1), self.n_h2, scope="fc2", activation_fn=tf.nn.relu,
initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_IN"))
q_value = tf_utils.fc(layer_2, 1, scope="out", initializer=tf.random_uniform_initializer(-3e-3, 3e-3))
critic_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=name)
return input_s, action, critic_variables, tf.squeeze(q_value)
def get_qvalue_target(self, state, action, sess):
return sess.run(self.q_value_target, feed_dict={
self.input_s_target: state,
self.action_target: action
})
def get_gradients(self, state, action, sess):
return sess.run(self.action_gradients, feed_dict={
self.input_s: state,
self.action: action
})
def train(self, state, action, target, sess):
_, loss = sess.run([self.optimize, self.loss], feed_dict={
self.input_s: state,
self.action: action,
self.target: target
})
return loss
def update_target(self, sess):
sess.run(self.update_target_op)
| 2,917 |
file name and extension.py
|
Max143/Python_programs
| 0 |
2025788
|
"""
Input a filename and print the extension of that
"""
filename = input("Input the Filename : ")
file_extension = filename.split(".")
print("The extension of the file is : " + repr(file_extension[-1]))
| 219 |
generative_models/VanillaGAN_TensorFlow/models.py
|
kumayu0108/model-zoo
| 43 |
2025278
|
import tensorflow as tf
from tensorflow.keras.layers import Dense, Dropout, Input, BatchNormalization,LeakyReLU
from tensorflow.keras.models import Model,Sequential
from tensorflow.keras.optimizers import Adam
def adam_optimizer(learning_rate,beta_1):
return Adam(lr=learning_rate, beta_1=beta_1)
def create_generator(learning_rate,beta_1,encoding_dims):
generator=Sequential()
generator.add(Dense(units=256,input_dim=encoding_dims))
generator.add(LeakyReLU(0.2))
generator.add(BatchNormalization(momentum=0.8))
generator.add(Dense(units=512))
generator.add(LeakyReLU(0.2))
generator.add(BatchNormalization(momentum=0.8))
generator.add(Dense(units=1024))
generator.add(LeakyReLU(0.2))
generator.add(BatchNormalization(momentum=0.8))
generator.add(Dense(units=784, activation='tanh'))
generator.compile(loss='binary_crossentropy', optimizer=adam_optimizer(learning_rate,beta_1))
return generator
def create_discriminator(learning_rate,beta_1):
discriminator=Sequential()
discriminator.add(Dense(units=1024,input_dim=784))
discriminator.add(LeakyReLU(0.2))
discriminator.add(Dropout(0.3))
discriminator.add(Dense(units=512))
discriminator.add(LeakyReLU(0.2))
discriminator.add(Dropout(0.3))
discriminator.add(Dense(units=256))
discriminator.add(LeakyReLU(0.2))
discriminator.add(Dense(units=1, activation='sigmoid'))
discriminator.compile(loss='binary_crossentropy', optimizer=adam_optimizer(learning_rate,beta_1))
return discriminator
def create_gan(discriminator, generator,encoding_dims):
discriminator.trainable=False
gan_input = Input(shape=(encoding_dims,))
x = generator(gan_input)
gan_output= discriminator(x)
gan= Model(inputs=gan_input, outputs=gan_output)
gan.compile(loss='binary_crossentropy', optimizer='adam')
return gan
| 1,883 |
callmail_project/aboutus/admin.py
|
q8groups/callnmail
| 0 |
2026820
|
from django.contrib import admin
from .models import StaticContents
# Register your models here.
admin.site.register(StaticContents)
| 132 |
utils.py
|
greerviau/BetterRobotIdeas
| 21 |
2026879
|
def load_text(filename):
file = open(filename, 'r')
text = file.read()
file.close()
return text
def clean_text(doc):
import string, os
doc = doc.replace('\n',' ')
tokens = doc.split()
table = str.maketrans('','',string.punctuation)
tokens = [w.translate(table) for w in tokens]
tokens = [word for word in tokens if word.isalpha()]
tokens = [word.lower() for word in tokens]
return tokens
def save_text(lines, filename):
data = '\n'.join(lines)
file = open(filename, 'w')
file.write(data)
file.close()
| 556 |
shopify_api_py/session.py
|
stcstores/shopify_api
| 0 |
2026814
|
"""Session manager for the shopify API."""
from pathlib import Path
from typing import Any, Callable, Optional, Union
import shopify
import toml
from .exceptions import LoginCredentialsNotSetError
class ShopifyAPISession:
"""Session manager for the shopify API."""
SHOP_URL = None
API_VERSION = None
API_PASSWORD = <PASSWORD>
CONFIG_FILENAME = ".shopify_api.toml"
def __enter__(self) -> shopify.Session:
if not self.__class__.credentails_are_set():
config_path = self.__class__.find_config_filepath()
if config_path is not None:
self.__class__.load_from_config_file(config_file_path=config_path)
if not self.__class__.credentails_are_set():
raise LoginCredentialsNotSetError()
session = shopify.Session(
shop_url=self.__class__.SHOP_URL,
version=self.__class__.API_VERSION,
token=self.__class__.API_PASSWORD,
)
shopify.ShopifyResource.activate_session(session)
return session
def __exit__(self, exc_type: None, exc_value: None, exc_tb: None) -> None:
shopify.ShopifyResource.clear_session()
@classmethod
def set_login(
cls,
shop_url: Optional[str] = None,
api_version: Optional[str] = None,
api_password: Optional[str] = None,
) -> None:
"""Set required login credentials (shop url, API version and API password)."""
cls.SHOP_URL = shop_url
cls.API_VERSION = api_version
cls.API_PASSWORD = <PASSWORD>
@classmethod
def credentails_are_set(cls) -> bool:
"""Return True if all login credentials are set, otherwise False."""
if None in (cls.SHOP_URL, cls.API_VERSION, cls.API_PASSWORD):
return False
else:
return True
@classmethod
def find_config_filepath(cls) -> Optional[Path]:
"""
Return the path to a shopify config file or None.
Recursivly scan backwards from the current working directory and return the
path to a file matching cls.CONFIG_FILENAME if one exists, otherwise returns
None.
"""
path = Path.cwd()
while path.parent != path:
config_file = path / cls.CONFIG_FILENAME
if config_file.exists():
return config_file
path = path.parent
return None
@classmethod
def load_from_config_file(cls, config_file_path: Union[Path, str]) -> None:
"""Set login credentials as specified in a toml file located at config_file_path."""
with open(config_file_path) as f:
config = toml.load(f)
cls.set_login(
shop_url=config.get("SHOP_URL"),
api_version=config.get("API_VERSION"),
api_password=config.get("API_PASSWORD"),
)
def shopify_api_session(func: Callable) -> Callable:
"""Use a shopify API session as a method decorator."""
def wrapper_shopify_api_session(*args: Any, **kwargs: Any) -> Any:
with ShopifyAPISession():
return func(*args, **kwargs)
return wrapper_shopify_api_session
| 3,153 |
tests/test_loss.py
|
assansanogo/TransformerTTS
| 894 |
2026914
|
import unittest
import numpy as np
from utils.losses import new_scaled_crossentropy, masked_crossentropy
class TestCharTokenizer(unittest.TestCase):
def test_crossentropy(self):
scaled_crossent = new_scaled_crossentropy(index=2, scaling=5)
targets = np.array([[0, 1, 2]])
logits = np.array([[[.3, .2, .1], [.3, .2, .1], [.3, .2, .1]]])
loss = scaled_crossent(targets, logits)
self.assertAlmostEqual(2.3705523014068604, float(loss))
scaled_crossent = new_scaled_crossentropy(index=2, scaling=1)
loss = scaled_crossent(targets, logits)
self.assertAlmostEqual(0.7679619193077087, float(loss))
loss = masked_crossentropy(targets, logits)
self.assertAlmostEqual(0.7679619193077087, float(loss))
| 819 |
video/core/migrations/0004_video_scores.py
|
eltonjncorreia/youtube-deeper
| 0 |
2024017
|
# Generated by Django 2.0.5 on 2018-05-21 17:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0003_theme_points'),
]
operations = [
migrations.AddField(
model_name='video',
name='scores',
field=models.DecimalField(decimal_places=2, default=1, max_digits=5),
preserve_default=False,
),
]
| 441 |
microbepy/tests/common/test_schema.py
|
ScienceStacks/MicrobEPy
| 1 |
2026699
|
from microbepy.common.schema \
import Schemas, ColumnSchemas, TableSchemas, FunctionalDependency
from microbepy.common import constants as cn
import copy
import collections
import unittest
COL_A = 'col_a'
COL_B = 'col_b'
COL_C = 'col_c'
DATA = range(5)
COLUMN_SCHEMAS = {COL_A: DATA, COL_B: DATA}
TABLE_A = 'table_a'
TABLE_B = 'table_b'
########################################
class TestSchema(unittest.TestCase):
def setUp(self):
self.schemas = Schemas()
def testConstructor(self):
self.assertTrue(isinstance(self.schemas.schemas, dict))
def testGet(self):
self.schemas.schemas = COLUMN_SCHEMAS
self.assertEqual(self.schemas.getSchema(COL_A), DATA)
def testValidate(self):
self.schemas.schemas = COLUMN_SCHEMAS
# The following should not generate an exception
self.schemas.validate(COL_A, is_present=True)
self.schemas.validate(COL_C, is_present=False)
self.schemas.validate([COL_A, COL_B], is_present=True)
#
with self.assertRaises(ValueError):
self.schemas.validate(COL_C, is_present=True)
with self.assertRaises(ValueError):
self.schemas.validate(COL_B, is_present=False)
class TestColumnSchemas(unittest.TestCase):
def setUp(self):
self.schemas = ColumnSchemas()
def testConstructor(self):
self.assertTrue(isinstance(self.schemas.schemas, dict))
def testAdd(self):
self.schemas.addSchema(COL_A, data_type=int)
self.assertEqual(self.schemas.getType(COL_A), int)
self.schemas.addSchema(COL_B)
self.assertEqual(self.schemas.getType(COL_B), str)
with self.assertRaises(ValueError):
self.schemas.addSchema(COL_B)
class TestTableSchemas(unittest.TestCase):
def setUp(self):
self.schemas = TableSchemas()
def testConstructor(self):
self.assertTrue(isinstance(self.schemas.schemas, dict))
def testAddSchema(self):
self.schemas.column_schemas.addSchema(
[COL_A, COL_B], data_type=int)
self.schemas.addSchema(TABLE_A, [COL_A, COL_B], COL_A)
self.assertEqual(self.schemas.getSchema(TABLE_A).name, TABLE_A)
def testAddFD(self):
self.schemas.column_schemas.addSchema(
[COL_A, COL_B], data_type=int)
self.schemas.addFD(COL_A, COL_B)
self.assertEqual(self.schemas.functional_dependencies[0],
FunctionalDependency(ind=COL_A, dep=COL_B))
def testGetColumns2(self):
columns = cn.TABLE_SCHEMAS.getColumns([
cn.TABLE_MUTATION,
cn.TABLE_ISOLATE_MUTATION_LINK,
cn.TABLE_ISOLATE,
cn.TABLE_GENE_DESCRIPTION,
])
self.assertTrue(cn.KEY_ISOLATE in columns)
if __name__ == '__main__':
unittest.main()
| 2,634 |
esp8266/accesspointWS/blinky.py
|
dbadb/robodyn
| 2 |
2026913
|
import time
import asyncio
import websockets
# https://websockets.readthedocs.io/en/stable/intro.html
async def doit():
url = "ws://192.168.4.1:81"
l = "1"
async with websockets.connect(url) as websocket:
res = await websocket.recv()
print(res)
await websocket.send("/ard?verbose=1&")
res = await websocket.recv()
print("verbose 1: " + res)
v = 0
while True:
v += 1
# print(l)
for i in range(2):
# handshake
if i == 0:
if v == 100:
await websocket.send("/ard?verbose=0")
res = await websocket.recv()
print("verbose 0: " + res);
l = "0" if l == "1" else "1"
msg = "/ard?led=%s&" % l
else:
msg = "/esp?led=%s&" % l
# print(msg)
await websocket.send(msg)
res = await websocket.recv()
if res == "break":
break
elif res != "ok":
print("unknown ack: %s" % res)
time.sleep(.1)
# first request ard to send esp an update, this
# could be automatic, but for now...
await websocket.send("/ard/ping")
res = await websocket.recv()
if res == "ok":
# retrieve the esp-side ping state
await websocket.send("/esp/getping")
res = await websocket.recv()
print(res)
time.sleep(.1)
asyncio.get_event_loop().run_until_complete(doit())
| 1,706 |
test/visualization/visualize_smbo.py
|
usualwitch/lite-bo
| 1 |
2026416
|
import os
import sys
import numpy as np
from ConfigSpace.hyperparameters import UniformFloatHyperparameter
sys.path.append(os.getcwd())
from litebo.optimizer.generic_smbo import SMBO
from litebo.utils.config_space import ConfigurationSpace
def branin(x):
xs = x.get_dictionary()
x1 = xs['x1']
x2 = xs['x2']
a = 1.
b = 5.1 / (4. * np.pi ** 2)
c = 5. / np.pi
r = 6.
s = 10.
t = 1. / (8. * np.pi)
ret = a * (x2 - b * x1 ** 2 + c * x1 - r) ** 2 + s * (1 - t) * np.cos(x1) + s
return {'objs': (ret,)}
cs = ConfigurationSpace()
x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=0)
x2 = UniformFloatHyperparameter("x2", 0, 15, default_value=0)
cs.add_hyperparameters([x1, x2])
bo = SMBO(branin, cs, advisor_type='default', max_runs=50, time_limit_per_trial=3, task_id='hp1')
bo.run()
inc_value = bo.get_incumbent()
print('BO', '=' * 30)
print(inc_value)
# Execute the following command:
# Step 1: `python test/visualization/visualize_smbo.py`
# Step 2: `tensorboard --logdir logs/hp1`
| 1,041 |
03-opencv-lab/haar_cascade.py
|
iproduct/course-robotics-npmg
| 0 |
2026546
|
import cv2 as cv
import sys
WIDTH = 640
HEIGHT = 480
if __name__ == '__main__':
faceCascade = cv.CascadeClassifier(cv.data.haarcascades + 'haarcascade_frontalface_default.xml')
video = cv.VideoCapture(0)
video.set(cv.CAP_PROP_FRAME_WIDTH, WIDTH)
video.set(cv.CAP_PROP_FRAME_HEIGHT, HEIGHT)
video.set(cv.CAP_PROP_BRIGHTNESS, 170)
while True:
success, img = video.read()
if not success:
sys.exit("Could not find video.")
gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=9,
minSize=(30, 30),
flags=cv.CASCADE_SCALE_IMAGE
)
for result in faces:
x,y,w,h = result
x1, y1 = x+w, y+w
cv.rectangle(img, (x,y), (x1,y1), (0, 0, 255), 2)
cv.imshow("Video", img)
if cv.waitKey(30) & 0xFF == 27:
break;
video.release()
cv.destroyAllWindows()
sys.exit(0)
| 1,034 |
app/schemas/base.py
|
neurothrone/project-dot
| 0 |
2026924
|
from typing import TypeVar
from sqlmodel import SQLModel
TSQLModel = TypeVar("TSQLModel", bound="SQLModel")
class SQLModelBase(SQLModel):
pass
| 151 |
overthewire/leviathan/leviathan4.py
|
GambuzX/Cybersecurity_Practice
| 1 |
2026264
|
binary = "01010100 01101001 01110100 01101000 00110100 01100011 01101111 01101011 01100101 01101001 00001010"
password = ''.join([chr(int(x, 2)) for x in binary.split(' ')])
print password
| 191 |
vise/tests/test_user_settings.py
|
kumagai-group/vise
| 16 |
2026984
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020. Distributed under the terms of the MIT License.
import os
import pytest
from vise.defaults import UserSettings
@pytest.fixture
def user_settings(tmpdir):
second_last_dir = tmpdir
os.chdir(second_last_dir)
second_last_dir.join("test.yaml").write("""
float: 0.01
dict:
d1: 5
d2: 3""")
terminal_dir = tmpdir.join("terminal_dir")
terminal_dir.mkdir()
os.chdir("terminal_dir")
terminal_dir.join("test.yaml").write("""
str: s1 s2
dict:
d1: 10
d2: 3""")
return UserSettings("test.yaml"), second_last_dir, terminal_dir
def test_get_yaml_filenames(user_settings):
user_settings, second_last_dir, terminal_dir = user_settings
actual = user_settings.yaml_files_from_root_dir
expected = [second_last_dir / "test.yaml", terminal_dir / "test.yaml"]
assert actual == expected
def test_overridden_user_settings(user_settings):
user_settings, _, _ = user_settings
actual = user_settings.user_settings
expected = {"float": 0.01,
"dict": {"d1": 10, "d2": 3},
"str": "s1 s2"}
assert actual == expected
def test_parse_hidden_file(tmpdir):
os.chdir(tmpdir)
tmpdir.join("test.yaml").write("""
float: 0.01""")
user_settings = UserSettings(yaml_filename="test.yaml")
actual = user_settings.user_settings
expected = {"float": 0.01}
assert actual == expected
def test_add_absolute_path(tmpdir):
os.chdir(tmpdir)
tmpdir.join("test.yaml").write("""
path: a/b""")
user_settings = UserSettings(yaml_filename="test.yaml")
actual = user_settings.user_settings["path"]
expected = tmpdir / "a/b"
assert actual == expected
| 1,775 |
scripts/plot_bpass_fesc.py
|
lconaboy/seren3
| 1 |
2026236
|
import seren3
iout = 108
# sim = seren3.load("RT2")
sin_sim = seren3.load("2Mpc_BPASS_SIN")
bin_sim = seren3.load("2Mpc_BPASS_BIN")
sin_snap = sin_sim[iout]
bin_snap = bin_sim[iout]
cosmo = sin_snap.cosmo
import matplotlib
import matplotlib.pylab as plt
from seren3.analysis.plots import fit_scatter, fit_median
nbins=5
# In[60]:
import numpy as np
import pickle
import random
from seren3.scripts.mpi import write_fesc_hid_dict
def load_fesc(snapshot):
db = write_fesc_hid_dict.load_db(snapshot.path, snapshot.ioutput)
hids = db.keys()
mvir = np.zeros(len(hids))
fesc = np.zeros(len(hids))
nphotons = np.zeros(len(hids))
count=0.0
for i in range(len(hids)):
hid = hids[i]
res = db[hid]
ifesc = res["fesc"]
if ifesc > 1. and ifesc < 10.:
# if ifesc > 1.:
fesc[i] = random.uniform(0.9, 1.0)
count += 1.0
elif ifesc > 0. and ifesc <= 1.:
fesc[i] = ifesc
else:
# if (res["fesc"] > 10.):
# print "%e %e" % (res["tot_mass"], res["fesc"])
continue
mvir[i] = res["hprops"]["mvir"]
Nion_d_now = db[hid]["Nion_d_now"].in_units("s**-1 Msol**-1")
star_mass = db[hid]["star_mass"].in_units("Msol")
nphotons[i] = (Nion_d_now * star_mass).sum()
print count/float(len(mvir))
print len(mvir)
ix = np.where(fesc > 0)
fesc = fesc[ix]
mvir = mvir[ix]
nphotons = nphotons[ix]
log_mvir = np.log10(mvir)
log_fesc = np.log10(fesc)
# ix = np.where(np.logical_and(log_mvir >= 7.5, np.log10(fesc*100.) >= -1))
# ix = np.where(log_mvir >= 7.5)
# log_mvir = log_mvir[ix]
# fesc = fesc[ix]
# nphotons = nphotons[ix]
ix = np.where(~np.isnan(fesc))
log_mvir = log_mvir[ix]
fesc = fesc[ix]
nphotons = nphotons[ix]
print 'Loaded data for %d halos' % len(log_mvir)
return log_mvir, fesc, nphotons
# fig, axes = plt.subplots(nrows=2, ncols=3, figsize=(18,6))
# axs = axes[0,:]
# iouts = [ [108, 108], [bin_sim.redshift(8), sin_sim.redshift(8)], [sin_sim.redshift(9), sin_sim.redshift(9)] ]
# for ax, iout_lst in zip(axs.flatten(), iouts):
# data = {"BIN" : load_fesc(bin_sim[iout_lst[0]]), "SIN" : load_fesc(sin_sim[iout_lst[0]])}
# binned_data = {}
# cosmo = bin_sim[iout_lst[0]].cosmo
# for key, ls, lw, c in zip(data.keys(), ["-", "--"], [3., 1.5], ["r", "b"]):
# log_mvir, fesc, nphotons = data[key]
# if (iout_lst[0] == 108):
# cosmo["z"] = 6
# if (iout_lst[0] == bin_sim.redshift(8)):
# cosmo["z"] = 8
# if (iout_lst[0] == bin_sim.redshift(9)):
# cosmo["z"] = 9
# # log_mvir, fesc, tint_fesc, nphotons = data[key]
# # fesc_percent = tint_fesc * 100.
# fesc_percent = fesc * 100.
# bin_centres, mean, std, sterr = fit_scatter(log_mvir, fesc_percent, nbins=nbins, ret_sterr=True)
# binned_data[key] = (bin_centres, mean, std)
# # bin_centres, median = fit_median(log_mvir, fesc_percent, nbins=nbins)
# ax.scatter(log_mvir, fesc_percent, alpha=0.25, color=c, s=15, marker=".")
# e = ax.errorbar(bin_centres, mean, yerr=std, color=c, label='%s z=%1.2f' % (key, cosmo['z']), fmt="o", markerfacecolor=c, mec='k', capsize=2, capthick=2, elinewidth=2, linestyle=ls, linewidth=lw)
# print binned_data["BIN"][1] / binned_data["SIN"][1]
# axs[0].set_ylabel(r'f$_{\mathrm{esc}}$(t) [%]', fontsize=20)
# for ax in axs.flatten():
# ax.legend(loc="lower right", frameon=False, prop={"size":16})
# # ax.set_xlabel(r'log$_{10}$ M$_{\mathrm{vir}}$ [M$_{\odot}$/h]', fontsize=20)
# # ax.set_ylabel(r'f$_{\mathrm{esc}}$ (<t$_{\mathrm{H}}$) [%]', fontsize=20)
# ax.set_yscale("log")
# ax.set_ylim(1e-1, 1.e2)
# ax.set_xlim(7.5, 10.)
# axs = axes[1,:]
# iouts = [ [108, 108], [bin_sim.redshift(8), sin_sim.redshift(8)], [sin_sim.redshift(9), sin_sim.redshift(9)] ]
# for ax, iout_lst in zip(axs.flatten(), iouts):
# data = {"BIN" : load_fesc(bin_sim[iout_lst[0]]), "SIN" : load_fesc(sin_sim[iout_lst[0]])}
# binned_data = {}
# cosmo = bin_sim[iout_lst[0]].cosmo
# for key, ls, lw, c in zip(data.keys(), ["-", "--"], [3., 1.5], ["r", "b"]):
# log_mvir, fesc, nphotons = data[key]
# if (iout_lst[0] == 108):
# cosmo["z"] = 6
# if (iout_lst[0] == bin_sim.redshift(8)):
# cosmo["z"] = 8
# if (iout_lst[0] == bin_sim.redshift(9)):
# cosmo["z"] = 9
# # log_mvir, fesc, tint_fesc, nphotons = data[key]
# # fesc_percent = tint_fesc * 100.
# log_nphotons_esc = np.log10(nphotons * fesc)
# bin_centres, mean, std, sterr = fit_scatter(log_mvir, log_nphotons_esc, nbins=nbins, ret_sterr=True)
# binned_data[key] = (bin_centres, mean, std)
# # bin_centres, median = fit_median(log_mvir, fesc_percent, nbins=nbins)
# ax.scatter(log_mvir, log_nphotons_esc, alpha=0.25, color=c, s=15, marker=".")
# e = ax.errorbar(bin_centres, mean, yerr=std, color=c, label='%s z=%1.2f' % (key, cosmo['z']), fmt="o", markerfacecolor=c, mec='k', capsize=2, capthick=2, elinewidth=2, linestyle=ls, linewidth=lw)
# print binned_data["BIN"][1] / binned_data["SIN"][1]
# axs[0].set_ylabel(r'log$_{10}$ $\dot{\mathrm{N}}_{\mathrm{ion}}(t)$ f$_{\mathrm{esc}}$ ($t$) [#/s]', fontsize=20)
# for ax in axs.flatten():
# ax.legend(loc="lower right", frameon=False, prop={"size":16})
# ax.set_xlabel(r'log$_{10}$ M$_{\mathrm{vir}}$ [M$_{\odot}$/h]', fontsize=20)
# ax.set_xlim(7.5, 10.)
# ax.set_ylim(43.5, 54)
# plt.tight_layout()
# plt.show()
def load_tint_fesc(snapshot):
# fname = "%s/pickle/ConsistentTrees/fesc_database_%05i.p" % (snapshot.path, snapshot.ioutput)
fname = "%s/pickle/ConsistentTrees/time_int_fesc_all_halos_%05i.p" % (snapshot.path, snapshot.ioutput)
data = pickle.load(open(fname, "rb"))
db = write_fesc_hid_dict.load_db(snapshot.path, snapshot.ioutput)
mvir = []
fesc = []
tint_fesc = []
# hids = []
nphotons = []
for i in range(len(data)):
res = data[i].result
ifesc = res["fesc"][0]
# if ifesc > 1. and ifesc < 10.:
if ifesc > 1.:
fesc.append(random.uniform(0.9, 1.0))
elif ifesc > 0. and ifesc <= 1.:
fesc.append(ifesc)
else:
# if (res["fesc"] > 10.):
# print "%e %e" % (res["tot_mass"], res["fesc"])
continue
mvir.append(res["Mvir"])
tint_fesc.append(res["tint_fesc_hist"][0])
hid = int(data[i].idx)
# hids.append(hid)
Nion_d_now = db[hid]["Nion_d_now"].in_units("s**-1 Msol**-1")
star_mass = db[hid]["star_mass"].in_units("Msol")
nphotons.append( (Nion_d_now * star_mass).sum() )
mvir = np.array(mvir)
fesc = np.array(fesc)
tint_fesc = np.array(tint_fesc)
# hids = np.array(hids)
nphotons = np.array(nphotons)
# print count/float(len(mvir))
ix = np.where(fesc > 0)
fesc = fesc[ix]
tint_fesc = tint_fesc[ix]
mvir = mvir[ix]
# hids = hids[ix]
nphotons = nphotons[ix]
log_mvir = np.log10(mvir)
log_fesc = np.log10(fesc)
# ix = np.where(np.logical_and(log_mvir >= 7.5, log_fesc > -3))
# ix = np.where(log_mvir >= 7.4)
ix = np.where(np.logical_and(log_mvir >= 6.5, log_mvir <= 8.))
log_mvir = log_mvir[ix]
fesc = fesc[ix]
tint_fesc = tint_fesc[ix]
# hids = hids[ix]
nphotons = nphotons[ix]
ix = np.where(~np.isnan(tint_fesc))
log_mvir = log_mvir[ix]
fesc = fesc[ix]
tint_fesc = tint_fesc[ix]
# hids = hids[ix]
nphotons = nphotons[ix]
print 'Loaded data for %d halos' % len(log_mvir)
return log_mvir, fesc, tint_fesc, nphotons
from scipy import interpolate
nbins=5
ax = plt.gca()
cosmo["z"] = 6
data = {"BIN" : load_tint_fesc(bin_snap), "SIN" : load_tint_fesc(sin_snap)}
cols = ["r", "b"]
binned_data = {}
for key, ls, lw, c in zip(data.keys(), ["-", "--"], [3., 1.5], cols):
# log_mvir, fesc, tint_fesc, hids = data[key]
log_mvir, fesc, tint_fesc, nphotons = data[key]
fesc_percent = tint_fesc * 100.
bin_centres, mean, std = fit_scatter(log_mvir, fesc_percent, nbins=nbins)
binned_data[key] = (bin_centres, mean, std)
# bin_centres, median = fit_median(log_mvir, log_fesc, nbins=nbins)
ax.scatter(log_mvir, fesc_percent, alpha=0.1, color=c, s=5)
fn = interpolate.interp1d(bin_centres, np.log10(mean), fill_value="extrapolate")
mass_milky_way = 1e12 # approx halo mass in solar masses
fesc_milky_way = 10**fn(np.log10(mass_milky_way))
print key, "fesc Milky Way = %1.2f" % fesc_milky_way
x = np.linspace(8, 12, 100)
y = 10**fn(x)
# ax.plot(x, y, color=c, linestyle=":", linewidth=5)
# e = plt.errorbar(bin_centres, median, yerr=std, color=c, label='%s z=%1.2f' % ("BPASS_BIN", cosmo['z']),\
# fmt="o", markerfacecolor=c, mec='k', capsize=2, capthick=2, elinewidth=2, linestyle='-', linewidth=2.)
e = ax.errorbar(bin_centres, mean, yerr=std, color=c, label='%s z=%1.2f' % (key, cosmo['z']), fmt="o", markerfacecolor=c, mec='k', capsize=2, capthick=2, elinewidth=2, linestyle=ls, linewidth=lw)
print binned_data["BIN"][1] / binned_data["SIN"][1]
ax.legend(loc="lower right", frameon=False, prop={"size":16})
ax.set_xlabel(r'log$_{10}$ M$_{\mathrm{vir}}$ [M$_{\odot}$/h]', fontsize=20)
ax.set_ylabel(r'$\langle$f$\rangle$$_{\mathrm{esc}}$ ($\leq t_{\mathrm{H}}$) [%]', fontsize=20)
ax.set_yscale("log")
# ax.set_xlim(7.5, 10.)
ax.set_ylim(1e0, 1.e2)
plt.show()
| 9,716 |
chromewhip/helpers.py
|
spinda/chromewhip
| 97 |
2025996
|
import copy
import json
import logging
import re
import sys
class PayloadMixin:
@classmethod
def build_send_payload(cls, method: str, params: dict):
return {
"method": ".".join([cls.__name__, method]),
"params": {k: v for k, v in params.items() if v is not None}
}
@classmethod
def convert_payload(cls, types: dict):
def convert(result: dict):
"""
:param result:
:return:
"""
types_ = copy.copy(types)
for name, val in result.items():
try:
expected_ = types_.pop(name)
expected_type_ = expected_['class']
except KeyError:
raise KeyError('name %s not in expected payload of %s' % (name, types))
if issubclass(expected_type_, ChromeTypeBase):
result[name] = expected_type_(**val)
elif re.match(r'.*Id$', name) and isinstance(val, str):
result[name] = expected_type_(val)
elif not isinstance(val, expected_type_):
raise ValueError('%s is not expected type %s, instead is %s' % (val, expected_type_, val))
for rn, rv in types_.items():
if not rv.get('optional', False):
raise ValueError('expected payload param "%s" is missing!' % rn)
return result
return convert
log = logging.getLogger(__name__)
class BaseEvent:
js_name = 'chromewhipBaseEvent'
hashable = []
is_hashable = False
def hash_(self):
hashable_params = {}
for k, v in self.__dict__.items():
if k in self.hashable:
hashable_params[k] = v
else:
try:
hashable_params['%sId' % k] = v.id
except KeyError:
pass
except AttributeError:
# TODO: make better, fails for event that has 'timestamp` as a param
pass
serialized_id_params = ','.join(['='.join([p, str(v)]) for p, v in hashable_params.items()])
h = '{}:{}'.format(self.js_name, serialized_id_params)
log.debug('generated hash = %s' % h)
return h
# TODO: how do
def json_to_event(payload) -> BaseEvent:
try:
prot_name, js_event = payload['method'].split('.')
except KeyError:
log.error('invalid event JSON, must have a "method" key')
return None
except ValueError:
log.error('invalid method name "%s", must contain a module and event joined with a "."' % payload['method'])
return None
module_name = 'chromewhip.protocol.%s' % prot_name.lower()
try:
prot_module = sys.modules[module_name]
except KeyError:
msg = '"%s" is not available in sys.modules!' % module_name
log.error(msg)
raise KeyError(msg)
py_event_name = '{}{}Event'.format(js_event[0].upper(), js_event[1:])
event_cls = getattr(prot_module, py_event_name)
try:
result = event_cls(**payload['params'])
except TypeError as e:
raise TypeError('%s unable to deserialise: %s' % (event_cls.__name__, e))
return result
class ChromeTypeBase:
def to_dict(self):
return self.__dict__
class ChromewhipJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, BaseEvent):
return {'method': obj.js_name, 'params': obj.__dict__}
if isinstance(obj, ChromeTypeBase):
return obj.__dict__
return json.JSONEncoder.default(self, obj)
| 3,651 |
animations/stacked_animation.py
|
onlyjus/qt_examples
| 0 |
2025379
|
import sys
from PyQt4 import QtCore, QtGui
COLOR_LIST = ['red', 'blue', 'green', 'black', 'cyan', 'magenta']
ANIMATION_SPEED = 400
def make_callback(func, *param):
'''
Helper function to make sure lambda functions are cached and not lost.
'''
return lambda: func(*param)
class App(QtGui.QMainWindow):
def __init__(self, app, parent=None):
QtGui.QMainWindow.__init__(self, parent)
# reference to qapp instance
self.app = app
self.animating = False
self.stack_animation = None
self.resize(QtCore.QSize(500, 200))
# widgets
self.mainwidget = QtGui.QWidget()
self.setCentralWidget(self.mainwidget)
self.listwidget = QtGui.QListWidget()
self.listwidget.addItems(COLOR_LIST)
self.listwidget.itemSelectionChanged.connect(self.change_color)
self.stackedwidget = QtGui.QStackedWidget()
for color in COLOR_LIST:
widget = QtGui.QWidget()
widget.setStyleSheet('QWidget{'
' background-color: '+color+';'
'}')
widget.setObjectName(color)
self.stackedwidget.addWidget(widget)
# layouts
self.hlayout = QtGui.QHBoxLayout(self.mainwidget)
self.mainwidget.setLayout(self.hlayout)
self.hlayout.addWidget(self.listwidget)
self.hlayout.addWidget(self.stackedwidget)
def change_color(self):
new_color = str(self.listwidget.currentItem().text())
old_color = str(self.stackedwidget.currentWidget().objectName())
old_index = self.stackedwidget.currentIndex()
new_index = 0
for i in range(self.stackedwidget.count()):
widget = self.stackedwidget.widget(i)
if new_color == str(widget.objectName()):
new_index = i
break
print('Changing from:', old_color, old_index,
'To:', new_color, new_index)
self.animate(old_index, new_index)
def animate(self, from_, to, direction='vertical'):
""" animate changing of qstackedwidget """
# check to see if already animating
if self.animating and self.stack_animation is not None:
self.stack_animation.stop()
from_widget = self.stackedwidget.widget(from_)
to_widget = self.stackedwidget.widget(to)
# get from geometry
width = from_widget.frameGeometry().width()
height = from_widget.frameGeometry().height()
# offset
# bottom to top
if direction == 'vertical' and from_ < to:
offsetx = 0
offsety = height
# top to bottom
elif direction == 'vertical' and from_ > to:
offsetx = 0
offsety = -height
elif direction == 'horizontal' and from_ < to:
offsetx = width
offsety = 0
elif direction == 'horizontal' and from_ > to:
offsetx = -width
offsety = 0
else:
return
# move to widget and show
# set the geometry of the next widget
to_widget.setGeometry(0 + offsetx, 0 + offsety, width, height)
to_widget.show()
# animate
# from widget
animnow = QtCore.QPropertyAnimation(from_widget, "pos")
animnow.setDuration(ANIMATION_SPEED)
animnow.setEasingCurve(QtCore.QEasingCurve.InOutQuint)
animnow.setStartValue(
QtCore.QPoint(0,
0))
animnow.setEndValue(
QtCore.QPoint(0 - offsetx,
0 - offsety))
# to widget
animnext = QtCore.QPropertyAnimation(to_widget, "pos")
animnext.setDuration(ANIMATION_SPEED)
animnext.setEasingCurve(QtCore.QEasingCurve.InOutQuint)
animnext.setStartValue(
QtCore.QPoint(0 + offsetx,
0 + offsety))
animnext.setEndValue(
QtCore.QPoint(0,
0))
# animation group
self.stack_animation = QtCore.QParallelAnimationGroup()
self.stack_animation.addAnimation(animnow)
self.stack_animation.addAnimation(animnext)
self.stack_animation.finished.connect(
make_callback(self.animate_stacked_widget_finished,
from_, to)
)
self.stack_animation.stateChanged.connect(
make_callback(self.animate_stacked_widget_finished,
from_, to)
)
self.animating = True
self.stack_animation.start()
def animate_stacked_widget_finished(self, from_, to):
""" cleanup after animation """
if self.stack_animation.state() == QtCore.QAbstractAnimation.Stopped:
self.stackedwidget.setCurrentIndex(to)
from_widget = self.stackedwidget.widget(from_)
from_widget.hide()
from_widget.move(0, 0)
self.animating = False
def animate_state_changed(self, from_, to):
""" check to see if the animation has been stopped """
self.animate_stacked_widget_finished(from_, to)
if __name__ == '__main__':
qapp = QtGui.QApplication(sys.argv)
app = App(qapp)
app.show()
qapp.exec_()
qapp.deleteLater()
sys.exit()
| 5,366 |
navicatGA/fitness_functions_selfies.py
|
lcmd-epfl/NaviCatGA
| 1 |
2026038
|
import logging
from navicatGA.score_modifiers import score_modifier
from navicatGA.wrappers_selfies import (
sc2logp,
sc2ilogp,
sc2mw,
sc2mv,
sc2nmw,
sc2mwilogp,
sc2levenshtein_to_target,
sc2tanimoto_to_target,
sc2krr,
)
from navicatGA.quantum_wrappers_selfies import sc2gap, sc2ehomo, sc2elumo
logger = logging.getLogger(__name__)
def fitness_function_target_property(
target, function_number=1, score_modifier_number=1, parameter=1
):
if function_number == 1: # sc2logp logp
return lambda chromosome: score_modifier(
sc2logp(chromosome), target, score_modifier_number, parameter
)
if function_number == 3: # sc2mw molecular weight
return lambda chromosome: score_modifier(
sc2mw(chromosome), target, score_modifier_number, parameter
)
if function_number == 6: # sc2mv molecular volume
return lambda chromosome: score_modifier(
sc2mv(chromosome), target, score_modifier_number, parameter
)
if function_number == 9: # sc2gap homo-lumo gap
return lambda chromosome: score_modifier(
sc2gap(chromosome), target, score_modifier_number, parameter
)
def fitness_function_target_selfies(target_selfie, function_number=1):
if function_number == 1: # Tanimoto distance
return lambda chromosome: sc2tanimoto_to_target(chromosome, target_selfie)
if function_number == 2: # Levenshtein distance
return lambda chromosome: sc2levenshtein_to_target(chromosome, target_selfie)
def fitness_function_selfies(function_number=1):
if function_number == 1: # sc2logp logp
return lambda chromosome: sc2logp(chromosome)
if function_number == 2: # sc2ilogp inverse logp
return lambda chromosome: sc2ilogp(chromosome)
if function_number == 3: # sc2mw molecular weight
return lambda chromosome: sc2mw(chromosome)
if function_number == 4: # sc2nmw negative molecular weight to avoid singularity
return lambda chromosome: sc2nmw(chromosome)
if function_number == 5: # sc2mwilogp product of mw and inverse logp
return lambda chromosome: sc2mwilogp(chromosome)
if function_number == 6: # sc2mv molecular volume
return lambda chromosome: sc2mv(chromosome)
if function_number == 7: # sc2ehomo homo energy
return lambda chromosome: sc2ehomo(chromosome)
if function_number == 8: # sc2elumo lumo energy
return lambda chromosome: sc2elumo(chromosome)
if function_number == 9: # sc2gap gap
return lambda chromosome: sc2gap(chromosome)
| 2,651 |
tunobase/commenting/tasks.py
|
unomena/tunobase
| 0 |
2026048
|
"""
Commenting App
This module provides an interface to Celery tasks for removing
flagged comments.
"""
from celery.decorators import task
from tunobase.commenting import models
@task(ignore_result=True)
def remove_flagged_comments():
"""Delete flagged comments."""
models.CommentModel.objects.remove_flagged_comments()
| 334 |
examples/请求重试.py
|
Jie-Yuan/MeUtils
| 3 |
2025851
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Project : MeUtils.
# @File : retry_demo
# @Time : 2021/2/3 12:18 上午
# @Author : yuanjie
# @Email : <EMAIL>
# @Software : PyCharm
# @Description : https://www.cnblogs.com/wuzhibinsuib/p/13443622.html
from meutils.pipe import *
from tenacity import retry, stop_after_delay, stop_after_attempt
a = 0
s = time.time()
# @retry(stop=stop_after_delay(0.001) | stop_after_attempt(100), reraise=True) # reraise抛出原始错误
# def test_retry():
# global a
# a += 1
# print(f"请求第{a}次")
# print(time.time() - s)
#
# raise Exception
#
#
# test_retry()
# from tenacity import retry, stop_after_attempt, retry_if_result
#
# def return_last_value(retry_state):
# print("执行回调函数")
# return retry_state.outcome.result() #表示原函数的返回值
#
# def is_false(value):
# return value is False
#
# @retry(stop=stop_after_attempt(3), retry_error_callback=return_last_value,
# retry=retry_if_result(is_false))
# def test_retry():
# print("等待重试.....")
# return False
#
# print(test_retry())
# from meutils.http_utils import request
#
# request()
if __name__ == '__main__':
print(__file__)
| 1,196 |
exercises/Palindrome/Method2.py
|
Justobioma/master-to-pythonista
| 12 |
2025987
|
# Iterative Method: Run a loop from starting to length/2 and check the first character to the last character of the string and second to second last one and so on ….
# If any character mismatches, the string wouldn’t be a palindrome.
# Below is the implementation of above approach:
# function to check string is
# palindrome or not
def isPalindrome(str):
# Run loop from 0 to len/2
for i in range(0, int(len(str)/2)):
if str[i] != str[len(str)-i-1]:
return False
return True
# main function
s = "malayalam"
ans = isPalindrome(s)
if (ans):
print("Yes")
else:
print("No")
| 590 |
bulk-screen-capture.py
|
chkk525/bulk-screen-capture
| 0 |
2026210
|
# -*- coding: utf-8 -*-
import sys
import importlib
# START: Frawned approach to change default encoding
# But I intentionally take this approach since it's easy and believe it non-problematic in this limited program.
# See discussion detail here.
# https://stackoverflow.com/questions/3828723/why-should-we-not-use-sys-setdefaultencodingutf-8-in-a-py-script
# importlib.reload(sys)
# sys.setdefaultencoding('UTF8')
# END: Frawned approach to change default encoding
import io
import os
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from PIL import Image
import time
import errno
from optparse import OptionParser
import re
import hashlib
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_filename(text):
if re.search('[^\w\.\-_]', text):
return hashlib.sha256(text.encode('utf-8')).hexdigest()
else:
return text
def save_snapshot(driver, word, idx):
fname = os.path.join(Options.dir, "%s.jpg" % get_filename(Options.prefix + word))
idx = "%03d" % (idx + 1)
if os.path.isfile(fname) and (not Options.force_save):
print((" ! %s: %s exists!" % (idx, fname)))
return
url_template = Engines[Options.engine]
driver.get(url_template % word)
if Options.js_before_save:
with open(Options.js_before_save) as f:
driver.execute_script(f.read())
driver.execute_script("document.body.style.overflow = 'hidden';")
# See: https://gist.github.com/jsok/9502024
screen_png = driver.get_screenshot_as_png()
screen_io = io.BytesIO(screen_png)
image = Image.open(screen_io)
image.convert("RGB").save(fname, 'JPEG', optimize=True)
print((" %s %s: %s" % ('\u2713', idx, fname)))
time.sleep(Options.sleep)
def get_words_from_file(fname):
with open(fname) as f:
content = f.readlines()
content = [x.split("\t")[0].rstrip() for x in content]
return content
def retrieve_snapshot_for_words(driver, words):
for idx, word in enumerate(words):
save_snapshot(driver, word, idx)
Options = {}
Engines = {
"google": 'https://www.google.com/search?gl=us&hl=en&pws=0&gws_rd=cr&tbm=isch&safe=active&q=%s',
"google_unsafe": 'https://www.google.com/search?gl=us&hl=en&pws=0&gws_rd=cr&tbm=isch&q=%s',
"bing": 'https://www.bing.com/images/search?safeSearch=Moderate&mkt=en-US&q=%s',
"bing_unsafe": 'https://www.bing.com/images/search?safeSearch=Off&mkt=en-US&q=%s',
}
def main():
global Options
usage = "usage: %prog [options] word-list"
scroll_to_first_image_of_google = "document.getElementById('islmp').scrollIntoView(true)"
scroll_to_first_carousel_of_google = "document.getElementsByTagName('scrolling-carousel')[0].scrollIntoView()"
parser = OptionParser(usage=usage)
parser.add_option("-d", "--dir", dest="dir", help="Directory to write captured images.", default="slideshow/imgs")
parser.add_option("-j", "--js-before-save", dest="js_before_save", help="Eval js file before save mainly to scroll to element", default="")
parser.add_option("-f", "--force-save", action="store_true", dest="force_save", help="Overwrite existing file if exists", default=False)
parser.add_option("-p", "--prefix", dest="prefix", help="Prefix for filename", default="")
parser.add_option("-w", "--window", dest="window", help="Window size. 1280x720 by default.", default="1280x720")
parser.add_option("-e", "--engine", dest="engine", help="Image search engine to use one of %s" % list(Engines.keys()), default="google")
parser.add_option("-s", "--show", action="store_true", dest="show", help="Do not hide chrome browser", default=False)
parser.add_option("--sleep", dest="sleep", type="float", help="Sleep duration on each take", default=1.0)
(Options, args) = parser.parse_args()
if Options.engine not in Engines:
print(("Engine must be one of %s" % list(Engines.keys())))
exit(1)
chrome_options = webdriver.ChromeOptions()
if not Options.show:
chrome_options.add_argument('--headless')
chrome_options.add_argument('--hide-scrollbars')
driver = webdriver.Chrome(options=chrome_options)
(screen_width, screen_height) = Options.window.split("x")
driver.set_window_size(screen_width, screen_height)
print(Options)
mkdir_p(Options.dir)
for file in args:
print((file + ': start'))
retrieve_snapshot_for_words(driver, get_words_from_file(file))
driver.quit()
main()
| 4,652 |
usaspending_api/transactions/management/commands/transfer_assistance_records.py
|
g4brielvs/usaspending-api
| 217 |
2026392
|
from django.core.management.base import BaseCommand
from usaspending_api.transactions.agnostic_transaction_loader import AgnosticTransactionLoader
from usaspending_api.transactions.models.source_assistance_transaction import SourceAssistanceTransaction
class Command(AgnosticTransactionLoader, BaseCommand):
help = "Upsert assistance transactions from a Broker database into an USAspending database"
broker_source_table_name = SourceAssistanceTransaction().broker_source_table
delete_management_command = "delete_assistance_records"
destination_table_name = SourceAssistanceTransaction().table_name
extra_predicate = [{"field": "is_active", "op": "EQUAL", "value": "true"}]
last_load_record = "source_assistance_transaction"
lookback_minutes = 15
shared_pk = "afa_generated_unique"
working_file_prefix = "assistance_load_ids"
broker_full_select_sql = 'SELECT "{id}" FROM "{table}" WHERE "is_active" IS TRUE'
broker_incremental_select_sql = """
select "{id}"
from "{table}"
where "is_active" is true and
"submission_id" in (
select "submission_id"
from "submission"
where "d2_submission" is true and
"publish_status_id" in (2, 3)
{optional_predicate}
)
"""
| 1,391 |
tests/test_meta.py
|
vkvam/fpipe
| 18 |
2025251
|
import hashlib
from unittest import TestCase
from fpipe.gen import Program, Meta
from fpipe.meta import MD5, Path, Size
from fpipe.exceptions import FileDataException
from fpipe.meta.stream import Stream
from test_utils.test_file import ReversibleTestFile, TestStream
class TestMeta(TestCase):
@staticmethod
def __checksum(data: bytes):
sig = hashlib.md5()
sig.update(data)
return sig.hexdigest()
def test_chaining_test_stream(self):
stream_sizes = [2 ** i for i in range(18, 23)]
# Get expected results from FileDataGenerators
md5_of_files = [
self.__checksum(ReversibleTestFile(s).read()) for s in stream_sizes
]
md5_of_reversed_files = [
self.__checksum(bytes(reversed(ReversibleTestFile(s).read()))) for s in stream_sizes
]
# Get checksum for initial files
gen = Meta(MD5, Size).chain(
TestStream(s, f'{s}', reversible=True) for s in stream_sizes
)
# Reverse stdout
gen = Program("rev").chain(gen)
gen = Program("tr -d '\n'").chain(gen)
# Get checksum for reversed files
for f in Meta(MD5, Size).chain(gen):
d = f[Stream].read(1)
# Assert that we are not able to retrieve calculated data before
# files have been completely read
with self.assertRaises(FileDataException):
x = f[MD5]
with self.assertRaises(FileDataException):
x = f[Size]
f[Stream].read()
# Assert that checksum created in two different ways are equal
self.assertEqual(f.parent.parent[MD5], md5_of_files.pop(0))
self.assertEqual(f[MD5], md5_of_reversed_files.pop(0))
self.assertEqual(f[Path], str(f[Size]))
# Assert that we've checked all files
self.assertEqual(len(md5_of_files) + len(md5_of_reversed_files), 0)
| 1,948 |
tests/iscsi/CEPH-10572_10460_10450.py
|
jennkimerson/cephci
| 0 |
2026471
|
import datetime
import logging
from time import sleep
from ceph.parallel import parallel
from tests.iscsi.iscsi_utils import IscsiUtils
log = logging
def run(**kw):
log.info("Running test")
ceph_nodes = kw.get('ceph_nodes')
test_data = kw.get('test_data')
iscsi_util = IscsiUtils(ceph_nodes)
iscsi_initiators = iscsi_util.get_iscsi_initiator_linux()
initiatorname = iscsi_util.get_initiatorname()
iscsi_util.write_multipath(iscsi_initiators)
iscsi_util.write_chap(initiatorname, iscsi_initiators)
no_of_luns = test_data['no_of_luns']
rc = []
device_list = iscsi_util.get_devicelist_luns(no_of_luns)
iscsi_util.create_directory_with_io(
device_list, iscsi_initiators, io_size="1G")
with parallel() as p:
p.spawn(iscsi_util.do_ios, iscsi_initiators, device_list)
p.spawn(do_failover, iscsi_initiators, device_list, ceph_nodes)
for op in p:
rc.append(op)
uuid = []
iscsi_initiators.exec_command(
sudo=True, cmd="cp /etc/fstab /etc/fstab.backup")
out, err = iscsi_initiators.exec_command(sudo=True, cmd="cat /etc/fstab")
output = out.read().decode()
fstab = output.rstrip("\n")
for device in device_list:
out, err = iscsi_initiators.exec_command(
sudo=True, cmd="blkid /dev/mapper/mpa" + device + ""
" -s UUID -o value", long_running=True)
output = out.rstrip("\n")
uuid.append(output)
for i in range(no_of_luns):
temp = "\nUUID=" + uuid[i] + "\t/mnt/" + \
device_list[i] + "/\text4\t_netdev\t0 0"
fstab += temp
fstab_file = iscsi_initiators.write_file(
sudo=True, file_name='/etc/fstab', file_mode='w')
fstab_file.write(fstab)
fstab_file.flush()
mnted_disks = list_mnted_disks(iscsi_initiators)
iscsi_initiators.exec_command(sudo=True, cmd="reboot", check_ec=False)
sleep(200)
iscsi_initiators.reconnect()
iscsi_util.do_iptables_flush()
mnted_disks_after_reboot = list_mnted_disks(iscsi_initiators)
log.info("i/o exit code: {}, failover exit code{}".format(rc[0], rc[1]))
log.info("disks before reboot:\n" + str(mnted_disks))
log.info("disks after reboot:\n" + str(mnted_disks_after_reboot))
log.info("number number of disks before reboot:" + str(len(mnted_disks)))
log.info("number number of disks after reboot:" + str(len(mnted_disks_after_reboot)))
if sum(rc) == 0 and mnted_disks_after_reboot == mnted_disks:
iscsi_util.umount_directory(device_list, iscsi_initiators)
iscsi_util.dissconect_linux_initiator(iscsi_initiators)
iscsi_initiators.exec_command(
sudo=True, cmd="mv /etc/fstab.backup /etc/fstab")
return 0
else:
return 1
def list_mnted_disks(iscsi_initiator):
out, err = iscsi_initiator.exec_command(
sudo=True, cmd="df -h | grep '/dev/mapper/mpa'| awk '{print $1}'")
disks = out.read().decode()
disks = disks.rstrip()
disks = sorted(disks.split())
return disks
def do_failover(iscsi_initiators, device_list, ceph_nodes):
sleep(10)
out, err = iscsi_initiators.exec_command(
sudo=True, cmd="multipath -ll |grep -A 9 mpa" + device_list[0] + " "
"|grep -A 1 status=active |awk -F "
'" "'" '{print $(NF - 4)}'")
active_device = out.read().decode()
active_device = active_device.rstrip("\n")
active_device = active_device.split()
out, err = iscsi_initiators.exec_command(
sudo=True, cmd="ls -l /dev/disk/by-path | grep "
"" + active_device[1] + " |awk -F "
'" "'
" '{print $(NF - 2)}' |cut -d: -f1 | uniq", long_running=True)
ip_to_restart = out
ip_to_restart = ip_to_restart.rstrip("\n")
ip_to_restart = ip_to_restart.split("-")
for node in ceph_nodes:
if node.role == "osd":
out, err = node.exec_command(cmd="hostname -I")
output = out.read().decode()
output = output.rstrip()
if output == ip_to_restart[1]:
node.exec_command(sudo=True, cmd="reboot", check_ec=False)
sleep(5)
break
sleep(40)
out, err = iscsi_initiators.exec_command(
sudo=True, cmd="multipath -ll |grep -A 9 mpa" + device_list[0] + " "
"|grep -A 1 status=active |awk -F "
'" "'" '{print $(NF - 4)}'")
active_device_after_reboot = out.read().decode()
active_device_after_reboot = active_device_after_reboot.rstrip("\n")
active_device_after_reboot = active_device_after_reboot.split()
t1 = datetime.datetime.now()
time_plus_5 = t1 + datetime.timedelta(minutes=15)
log.info("wating to get failed device active")
while (1):
t2 = datetime.datetime.now()
if (t2 <= time_plus_5):
sleep(40)
out, err = iscsi_initiators.exec_command(
sudo=True, cmd="multipath -ll |grep -A 9 mpa"
"" + device_list[0] + " |grep -B 1 " + active_device[1] + " "
"|awk -F "
'" "'
" '{print $(NF - 2)}'")
active_device_status = out.read().decode()
active_device_status = active_device_status.rstrip("\n")
active_device_status = active_device_status.split()
print(active_device_status)
if (active_device_status[1] == "active"):
rc = "active"
break
else:
for node in ceph_nodes:
if node.role == "osd":
out, err = node.exec_command(cmd="hostname -I")
output = out.read().decode()
output = output.rstrip()
if output == ip_to_restart[1]:
node.exec_command(sudo=True, cmd="iptables -F")
sleep(5)
else:
log.info("failed device didn't came up to active")
rc = "not"
print(active_device_status)
print(active_device)
print(active_device_after_reboot)
if active_device[1] != active_device_after_reboot[1] and rc == "active":
return 0
else:
return 1
| 6,212 |
AI Based Augmented Writing Tools/code/process.py
|
AozakiHayate/RESEARCH-CV-NLP
| 0 |
2026052
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import pandas as pd
import MeCab
# 数据预处理
def preprocess(mecab, doc, stopwords):
doc = doc.strip()
if len(doc) > 0:
seg_line = mecab.parse(doc) # 分词
# print(seg_line)
return seg_line
if __name__ == '__main__':
# 读取停用词表
# stopwords = [w.strip() for w in open('data/stopwordslist.txt', 'r', encoding='utf-8').readlines()]
stopwords = []
#分词第一步,第二部是停用词处理
#词性筛选 分词之后 根据任务,
# 读取数据集
data = pd.read_csv("ldgourmet/ratings.csv")
data_text = data['body'].tolist() # 评论数据集
# print(data_text)
# mecab = MeCab.Tagger("-Ochasen") # 带词性标注
# おわかち
mecab = MeCab.Tagger("-Owakati") # 仅仅分词
# 数据预处理
corpus = [preprocess(mecab, line, stopwords) for line in data_text if len(line) > 0]
print(corpus)
# 输出语料
with open("result/corpus.text", 'w', encoding="utf-8") as f:
for text in corpus:
f.write(text + '\n')
| 1,016 |
tools/python/get_submodules.py
|
codemzs/onnxruntime
| 4 |
2025712
|
from operator import attrgetter
from pathlib import Path
import argparse
import configparser
import json
import re
import pygit2
def format_component(submod):
return {"component": {"type": "git", "git": {"commitHash": str(submod.head_id), "repositoryUrl": submod.url}}}
def lookup_submodule(repo, submodule_path):
submodule = repo.lookup_submodule(submodule_path)
try:
# Some submodules have names which don't correspond to the actual path in the repo
# (e.g. 'git submodule init' was called with the --name option, or the submodule
# was moved and the old name was kept). listall_submodules() returns submodule paths,
# but pygit up to 1.0.1 requires the submodule name (not the path) in lookup_submodule
# to be able to access the URL and other properties.
# This seems to be a bug in pygit2, since its documentation says the submodules can
# be opened by path.
# If accessing the URL throws a RuntimeError, we get the submodule name manually from
# .gitmodules.
submodule.url
return submodule
except RuntimeError:
pass
config = configparser.ConfigParser()
config.read(Path(repo.workdir, '.gitmodules'))
for section in config.sections():
if config[section]['path'] == submodule_path:
name = re.fullmatch('submodule "(.*)"', section).group(1)
submodule = repo.lookup_submodule(name)
return submodule
raise NotImplementedError() # should not be reached
def process_component(repo):
return [lookup_submodule(repo, submod) for submod in repo.listall_submodules()]
def recursive_process(base_repo):
processed_subs = []
repos_to_process = [base_repo]
while repos_to_process:
repo = repos_to_process.pop()
submodules = process_component(repo)
processed_subs.extend(submodules)
repos_to_process.extend([mod.open() for mod in submodules])
return {"Registrations":[format_component(component) for component in processed_subs]}
def main(repo_path, output_file):
repo = pygit2.Repository(repo_path)
registrations = recursive_process(repo)
with open(output_file, 'w') as f:
json.dump(registrations, f, indent=4, sort_keys=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("base_repository", help="path to base repository to get registrations for.")
parser.add_argument("-o", "--output", help="output file name.", default="cgmanifest.json")
args = parser.parse_args()
main(args.base_repository, args.output)
| 2,608 |
archive/colorgenetics_v2.py
|
austinpursley/audio-evo-algo
| 0 |
2025348
|
# color_evolution_v5.py
# <NAME>
# 5/6/2017
# The use of evolvetools for use with colors.
import evolvetools as et
import itertools
import random
import math
class Palette:
# class for color values and functions to change those values.
def __init__(self):
self.num_colors = 0
self.palette = []
self.rgb_palette = []
def set_num_colors(self, num_colors):
self.num_colors = num_colors
def create_palette_rand(self, num_colors):
self.rgb_palette.clear()
self.num_colors = num_colors
for i in range(0, self.num_colors):
self.rgb_palette.append([])
for j in range(0, 3):
self.rgb_palette[i].append(random.randint(0,255))
self.palette.append("#%02x%02x%02x" % ((self.rgb_palette[i])[0],
(self.rgb_palette[i])[1],
(self.rgb_palette[i])[2]))
def rgb_child(rgb_parent1, rgb_parent2, mutate=0):
"""
From two parents color values, make child color.
:param rgb_parent1: List of int values corresponding to RGB colors.
:param rgb_parent2: Second list of RGB colors.
:param mutate: Optional probability value for if bit will mutate.
:return: a RGB color list with bits from both parents.
"""
rgb_color = [0, 0, 0]
for j in range(0, 3):
rgb_color[j] = et.offspring(rgb_parent1[j], rgb_parent2[j],mutate)
return rgb_color
def palette_child(parent1, parent2, mutate = 0):
"""
Palete is list of colors. From two palettes, make similar child palette.
:param parent1: A list of RGB colors.
:param parent2: A list of RGB colors.
:param mutate: Optional probability value for if bit will mutate.
:return: A palette with bits from from both parents.
"""
plte_child = Palette()
plte_child.num_colors = parent1.num_colors
for i in range(plte_child.num_colors):
mom = parent1.rgb_palette[i]
dad = parent2.rgb_palette[i]
child = rgb_child(mom, dad, mutate)
plte_child.rgb_palette.append(child)
plte_child.palette.append("#%02x%02x%02x"%(child[0],child[1],child[2]))
return(plte_child)
def palette_child_comb(parents,min_ospring_num, mutate = 0):
"""
Return a child for all combination of parents from a list of parents.
:param parents: list of parents.
:param min_ospring_num: A minimum number of children to make.
:param mutate: Optional probability value for if bit will mutate.
:return: A list of children made from parents.
"""
palette_child_list = []
L = len(parents)
parent_pair_num = int(math.factorial(L)/math.factorial(L-2))
opp = int(round(min_ospring_num / parent_pair_num)) + 1
for p in itertools.permutations(parents, 2):
mom = p[0]
dad = p[1]
for i in range(opp):
palette_child_list.append(palette_child(mom, dad, mutate))
return palette_child_list
| 3,072 |
flod_sak/api/rapport_resource.py
|
Trondheim-kommune/Tilskuddsbasen
| 0 |
2026789
|
# -*- coding: utf-8 -*-
from flask import request
from flask.ext.restful import abort, marshal
from flask.ext.bouncer import requires, ensure, POST, PUT, GET
from api.fields import rapport_fields
from base_resource import BaseResource
from domain.models import Rapport
from repo.rapport_repo import RapportRepo
from validation.rapport_validator import RapportValidator
class RapportResource(BaseResource):
repo = RapportRepo()
type_name = "rapport"
@requires(GET, Rapport)
def get(self, soknad_id=None, rapport_id=None):
if rapport_id:
rapport = self.get_by_id(rapport_id)
if rapport.soknad_id != soknad_id:
abort(403)
ensure(GET, rapport)
return marshal(rapport, rapport_fields)
else:
if soknad_id:
rapporter = self.repo.find_by_where("soknad_id", soknad_id)
return marshal(rapporter, rapport_fields)
abort(404)
@requires(PUT, Rapport)
def put(self, soknad_id=None, rapport_id=None):
data = request.get_json()
rapport = self.get_by_id(rapport_id)
if rapport.soknad_id != soknad_id:
abort(403)
ensure(PUT, rapport)
self.validate_put_fields(data)
rapport = self.repo.save(rapport, data)
return marshal(rapport, rapport_fields)
@staticmethod
def validate_put_fields(data):
validator = RapportValidator(data).validate_put_fields()
if validator.has_errors():
abort(400, __error__=validator.errors)
@requires(POST, Rapport)
def post(self, soknad_id):
rapport = Rapport()
rapport.soknad_id = soknad_id
ensure(POST, rapport)
rapport = self.repo.create(rapport)
return marshal(rapport, rapport_fields), 201
| 1,823 |
play.py
|
h-mayorquin/sequence_storage_capacity
| 0 |
2026605
|
from pprint import pprint
import numpy as np
m = 12
k = 4
o = 2
p = 3
# Initialize arrays
p_array = np.ones(m) * p
overlap_array = np.ones(m) * (o + 1)
overlaps_dic = {}
array = np.array([], dtype='int').reshape(0, m)
sequences = []
index = 0
capacity = 0
flag = True
#while(flag):
for i in range(2):
# Initialize the array
array_line = np.zeros(m, dtype='int')
# Throw a sequence
bulding_sequence_index = 0
while bulding_sequence_index < k:
# Get p_index
p_index = index
# Check p_index
if p_array[p_index] > 0:
p_flag = True
# Get o index
# o_index = index
# Check overlap index
# if overlap_array[o_index] > 0:
# o_flag = True
# Check for local overlaps
overlaps_dic[capacity] = {index:0 for index in range(0, capacity)}
o_dic = overlaps_dic[capacity]
# Check every element of past sequencesif o_dic[sequence_index]
local_overlap_flag = True
for sequence_index in range(capacity):
if o_dic[sequence_index] > o:
local_overlap_flag = False
# Lay the element
if p_flag and local_overlap_flag:
array_line[index] = capacity + 1
p_array[index] -= 1
bulding_sequence_index += 1
index += 1
else:
index += 1
# Add overlap
for sequence_index in range(capacity):
# If there is an element
if array[sequence_index, index] == 1:
o_dic[sequence_index] += 1
# After finishing adding the sequence
capacity += 1
array = np.vstack((array, array_line))
pprint(array)
| 1,700 |
app.py
|
cipherLord/Kronos
| 20 |
2026838
|
from flask import Flask, request,render_template, url_for, redirect, send_file, send_from_directory
import string
import json
from SearchGrades import SearchGrades
app = Flask(__name__)
numberRecords = 0
jsonFile = open("data/Grades/yearWiseGrades.json", "r")
data = json.load(jsonFile)
jsonFile.close()
courses = []
with open("data/courses.json","r") as f:
courses_having_data = json.load(f)
for key in data:
string = ""
try:
string = key + " : " + courses_having_data[key]["name"]
except:
string = key
courses.append(string)
Grades = {}
@app.route('/', methods = ['GET','POST'])
def home():
if request.method == "POST":
code = request.form.get('getCode')
code = code.upper()
code = "".join(code.split())
code = code[:7]
Courselink = ""
with open("data/data_file.json","r") as read_file:
data = json.load(read_file)
for key in data.keys():
if(key == code ):
Courselink = "https://wiki.metakgp.org" + data[key]
Grades = SearchGrades(code)
numberRecords = len(Grades)
if( Grades == 'NA'):
if len (code) == 7 and code[:2].isalpha() and code[-5:].isdigit() :
return render_template('kronos.html',courseCode = code, Grades = Grades, result = "no-data", courses = courses)
else:
return render_template('kronos.html',courseCode = code, Grades = Grades, result = "invalid-code", courses = courses)
else:
return render_template('kronos.html',courseCode = code, Grades = Grades, result = "show-grades",courses = courses, Cwikilink = Courselink)
else:
Grades = {}
return render_template('kronos.html',courseCode = '', Grades = Grades, result = "on-start",courses = courses)
if __name__=="__main__" :
app.run()
else:
print(__name__)
| 1,920 |
docker-branch-tagging/docker_branch_tagging.py
|
Tethik/lame-cli-programs
| 1 |
2026938
|
import os
import json
import sys
import re
import subprocess
from pathlib import Path
import click
TEMPLATE = """
{
"develop": ["latest","{git_branch}"],
"feature/(.+)": ["{git_branch}"],
"master": ["master","{git_latest_version_tag}"]
}
"""
CIRCLECI_TEMPLATE = """
{
"develop": ["latest","develop-{CIRCLE_BUILD_NUM}","{git_branch}"],
"feature/(.+)": ["{git_branch}"],
"master": ["master","master-{CIRCLE_BUILD_NUM}","{git_latest_version_tag}"]
}
"""
FILENAME = '.docker-branch-tagging'
@click.group()
@click.pass_context
def cli(ctx):
f = Path(FILENAME)
if not f.exists():
init.invoke(ctx)
ctx.obj['config'] = json.loads(f.read_text())
@cli.command()
@click.option('--circleci', default=False, is_flag=True)
def init(circleci=False):
template = CIRCLECI_TEMPLATE if circleci else TEMPLATE
Path(FILENAME).write_text(template.strip())
click.secho('Wrote sample {FILENAME} file'.format(FILENAME=FILENAME), color="blue")
def tag_name(template, extras):
locals().update(os.environ) # magic ;)
locals().update(extras)
return template.format(**locals()).replace("/", "-")
def git_branch():
cmd = 'git rev-parse --abbrev-ref HEAD'
return subprocess.check_output([cmd], shell=True).decode('utf-8').strip()
def git_latest_version_tag():
try:
cmd = 'git describe --abbrev=0 --match=[0-9]*.[0-9]*.[0-9]*'
return subprocess.check_output([cmd], shell=True).decode('utf-8').strip()
except:
return 'unknown_version'
def tag_names(ctx, image_name):
extras = {
'git_branch': git_branch(),
'git_latest_version_tag': git_latest_version_tag()
}
print(extras['git_branch'])
for branch_name_pattern, tags in ctx.obj['config'].items():
if re.match(branch_name_pattern, extras['git_branch']):
return ["{image_name}:{tag_name_text}".format(image_name=image_name, tag_name_text=tag_name(template, extras)) for template in tags]
@cli.command()
@click.argument('image_name')
@click.option('--dockerfile')
@click.option('--build-arg', multiple=True)
@click.pass_context
def build(ctx, image_name, dockerfile=None, build_arg=None):
tags = tag_names(ctx, image_name)
if not tags:
print("No build needed.")
return
if dockerfile:
dockerfile = "-f {dockerfile}".format(dockerfile=dockerfile)
else:
dockerfile = ""
tag_text = " -t ".join(tags)
options = ["-t {tag_text}".format(tag_text=tag_text)]
if build_arg:
for b in build_arg:
options.append("--build-arg {b}".format(b=b))
cmd = 'docker build {dockerfile} {options} .'.format(dockerfile=dockerfile, options=" ".join(options))
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit(1)
@cli.command()
@click.argument('image_name')
@click.pass_context
def push(ctx, image_name):
tags = tag_names(ctx, image_name)
if not tags:
print("No push needed.")
return
for tag in tags:
cmd = "docker push {tag}".format(tag=tag)
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit(1)
def main():
cli(obj={})
if __name__ == "__main__":
main()
| 3,209 |
datasets/pedes.py
|
wettera/PersonReID-NAFS
| 47 |
2026022
|
import torch.utils.data as data
import numpy as np
import os
import pickle
import h5py
import json
from PIL import Image
from utils.directory import check_exists
from scipy.misc import imread, imresize
import datasets.preprocess as preprocess
class Namespace:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class CuhkPedes(data.Dataset):
'''
Args:
root (string): Base root directory of dataset where [split].pkl and [split].h5 exists
split (string): 'train', 'val' or 'test'
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed vector. E.g, ''transform.RandomCrop'
target_transform (callable, optional): A funciton/transform that tkes in the
targt and transfomrs it.
'''
def __init__(self, image_root, anno_root, split, max_length, transform=None, target_transform=None, \
cap_transform=None, vocab_path='', min_word_count=0):
self.image_root = image_root
self.anno_root = anno_root
self.max_length = max_length
self.transform = transform
self.target_transform = target_transform
self.cap_transform = cap_transform
self.split = split.lower()
self.vocab_path = vocab_path
self.min_word_count = min_word_count
if not check_exists(self.image_root):
raise RuntimeError('Dataset not found or corrupted.' +
'Please follow the directions to generate datasets')
print('Reading data from json')
data = self.get_data_from_json()
self.read_data(data)
def add_caption_to_data(self, split_data, data):
fname_dict = {jj['file_path']:jj for jj in split_data}
judge = {jj['file_path']: 0 for jj in split_data}
for fname in data['images_path']:
judge[fname] = judge[fname] + 1
caption = []
for fname in data['images_path']:
jj = fname_dict[fname]
caption.append(jj['captions'][judge[fname] - 1])
judge[fname] = judge[fname] - 1
assert len(caption) == len(data['images_path'])
data['captions'] = caption
return data
def get_data_from_json(self):
args = Namespace(min_word_count=self.min_word_count, remove_stopwords = None, out_root=None)
split_data = self.load_split(self.split)
if self.vocab_path == '':
print('Building vocabulary...')
vocab = preprocess.build_vocab(split_data, args, write=False)
else:
print('Loading vocabulary from {}'.format(self.vocab_path))
vocab = self.load_vocab(self.vocab_path)
split_metadata = preprocess.process_metadata(self.split, split_data, args, write=False)
split_decodedata = preprocess.process_decodedata(split_metadata, vocab)
data = preprocess.process_dataset(self.split, split_decodedata, args, write=False)
data = self.add_caption_to_data(split_data, data)
return data
def load_split(self, split):
split_root = os.path.join(self.anno_root, split + '_reid.json')
with open(split_root, 'r') as f:
split_data = json.load(f)
print('load {} data from json done'.format(split))
return split_data
def load_vocab(self, vocab_path):
with open(os.path.join(vocab_path), 'rb') as f:
word_to_idx = pickle.load(f)
vocab = preprocess.Vocabulary(word_to_idx, len(word_to_idx))
print('load vocabulary done')
return vocab
def read_data(self, data):
if self.split == 'train':
self.train_labels = data['labels']
self.train_captions = data['captions']
self.train_images = data['images_path']
elif self.split == 'val':
self.val_labels = data['labels']
self.val_captions = data['captions']
self.val_images = data['images_path']
elif self.split == 'test':
self.test_labels = data['labels']
self.test_captions = data['captions']
self.test_images = data['images_path']
unique = []
new_test_images = []
for test_image in self.test_images:
if test_image in new_test_images:
unique.append(0)
else:
unique.append(1)
new_test_images.append(test_image)
self.unique = unique
else:
raise RuntimeError('Wrong split which should be one of "train","val" or "test"')
def __getitem__(self, index):
"""
Args:
index(int): Index
Returns:
tuple: (images, labels, captions)
"""
if self.split == 'train':
img_path, caption, label = self.train_images[index], self.train_captions[index], self.train_labels[index]
elif self.split == 'val':
img_path, caption, label = self.val_images[index], self.val_captions[index], self.val_labels[index]
else:
img_path, caption, label = self.test_images[index], self.test_captions[index], self.test_labels[index]
middle_path = "CUHK-PEDES/imgs"
if middle_path not in img_path:
img_path = os.path.join(self.image_root, middle_path, img_path)
else:
img_path = os.path.join(self.image_root, img_path)
img = imread(img_path)
img = imresize(img, (384,128))
if len(img.shape) == 2:
img = np.dstack((img,img,img))
img = Image.fromarray(img)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
label = self.target_transform(label)
return img, caption, label
def __len__(self):
if self.split == 'train':
return len(self.train_labels)
elif self.split == 'val':
return len(self.val_labels)
else:
return len(self.test_labels)
| 6,224 |
rental_property/migrations/0016_rentalunit_rent_amount.py
|
shumwe/rental-house-management-system
| 1 |
2025561
|
# Generated by Django 4.0.2 on 2022-03-24 10:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rental_property', '0015_alter_maintanancenotice_maintanance_status'),
]
operations = [
migrations.AddField(
model_name='rentalunit',
name='rent_amount',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True),
),
]
| 468 |
nfv/nfv-vim/nfv_vim/tables/_tenant_table.py
|
SidneyAn/nfv
| 2 |
2025876
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_vim import database
from nfv_vim.tables._table import Table
_tenant_table = None
class TenantTable(Table):
"""
Tenant Table
"""
def __init__(self):
super(TenantTable, self).__init__()
def _persist_value(self, value):
database.database_tenant_add(value)
def _unpersist_value(self, key):
database.database_tenant_delete(key)
def tables_get_tenant_table():
"""
Get the tenant table
"""
return _tenant_table
def tenant_table_initialize():
"""
Initialize the tenant table
"""
global _tenant_table
_tenant_table = TenantTable()
_tenant_table.persist = False
tenants = database.database_tenant_get_list()
for tenant in tenants:
_tenant_table[tenant.uuid] = tenant
_tenant_table.persist = True
def tenant_table_finalize():
"""
Finalize the tenant table
"""
global _tenant_table
del _tenant_table
| 1,035 |
tests/test_base.py
|
Salaah01/df-data-cleaner
| 5 |
2026304
|
"""Unitests for the `base` module."""
from unittest import TestCase
from types import SimpleNamespace
import pandas as pd
from pandas_data_cleaner import base, exceptions
from tests.utils import strategies
class TestCleaningStrategy(TestCase):
"""Unittests for the `CleaningStrategy` class."""
def test_info(self):
"""Test that the `info` method returns a formated version of the
docstring.
"""
self.assertIsInstance(base.CleaningStrategy.info(), str)
def test_can_use_cleaner_pass(self):
"""Test that the `can_use_cleaner` method indicates `True` when a
model has the attributes the method expects.
"""
instance = SimpleNamespace(
dataframe=None,
required_options=['option_1'],
option_1=1,
)
self.assertEqual(
base.CleaningStrategy.can_use_cleaner(instance),
(True, [])
)
def test_can_use_cleaner_fail(self):
"""Test that the `can_use_cleaner` method indicates `False` when a
model does not have the attributes the method expects.
"""
instance = SimpleNamespace(
dataframe=None,
required_options=['option_1'],
)
can_use, missing_opts = base.CleaningStrategy.can_use_cleaner(instance)
self.assertFalse(can_use, missing_opts)
self.assertEqual(len(missing_opts), 1)
def test_validate_options_pass(self):
"""Test that the `validate_options` method does not raise an error when
the model has the attributes the method expects.
"""
instance = SimpleNamespace(
can_use_cleaner=lambda: (True, []),
)
base.CleaningStrategy.validate_options(instance)
def test_validate_options_fail(self):
"""Test that the `validate_options` method raises an error when the
model does not have the attributes the method expects.
"""
instance = SimpleNamespace(
dataframe=None,
model=SimpleNamespace(),
can_use_cleaner=lambda: (False, ["Failed"]),
)
with self.assertRaises(exceptions.MissingOptionsError):
base.CleaningStrategy.validate_options(instance)
class TestCleanData(TestCase):
"""Unittests for the `clean_data` function."""
def test_can_apply_strategies(self):
"""Test that the method is able to apply various cleaning strategies on
a dataframe.
"""
# Create a test dataframe.
dataframe = pd.DataFrame({
"col1": [1, 2, 3],
"col2": [4, 5, 6],
"col3": [7, 8, 9],
})
# make a copy of the dataframe to test that the strategies are applied
# in the correct order.
results = base.clean_data(
dataframe,
[strategies.ReverseStrategy, strategies.FirstXRowsStrategy],
x_top_rows=2
).reset_index(drop=True)
expected_results = pd.DataFrame({
"col1": [3, 2],
"col2": [6, 5],
"col3": [9, 8],
})
self.assertTrue(
results.equals(expected_results),
f"\nActual:\n{results}\nExpected:\n{expected_results}",
)
def test_no_mutations(self):
"""Test that the original dataframe is not mutated."""
dataframe = pd.DataFrame({
"col1": [1, 2, 3],
"col2": [4, 5, 6],
"col3": [7, 8, 9],
})
dataframe_copy = dataframe.copy()
base.clean_data(dataframe, [strategies.ReverseStrategy])
self.assertTrue(dataframe.equals(dataframe_copy))
| 3,661 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.