hexsha
stringlengths 40
40
| size
int64 6
782k
| ext
stringclasses 7
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
237
| max_stars_repo_name
stringlengths 6
72
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
53k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
184
| max_issues_repo_name
stringlengths 6
72
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
27.1k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
184
| max_forks_repo_name
stringlengths 6
72
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
12.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 6
782k
| avg_line_length
float64 2.75
664k
| max_line_length
int64 5
782k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
59199fd5c7bd0ee285b4d929307bd829186a5fe3
| 382 |
py
|
Python
|
python/encoding/ascii_characters.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/encoding/ascii_characters.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/encoding/ascii_characters.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
whitespace = " \t\n\r\v\f"
ascii_lowercase = "abcdefghijklmnopqrstuvwxyz"
ascii_uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
ascii_letters = ascii_lowercase + ascii_uppercase
digits = "0123456789"
hexdigitx = digits + "abcdef" + "ABCDEF"
octdigits = "01234567"
punctuation = r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
printable = digits + ascii_letters + punctuation + whitespace
| 38.2 | 62 | 0.685864 |
596b3ddf0ad9b02c9bc31444d039b941f788aea2
| 1,280 |
py
|
Python
|
rbac/server/blockchain_transactions/user_transaction.py
|
fthornton67/sawtooth-next-directory
|
79479afb8d234911c56379bb1d8abf11f28ef86d
|
[
"Apache-2.0"
] | 75 |
2018-04-06T09:13:34.000Z
|
2020-05-18T18:59:47.000Z
|
rbac/server/blockchain_transactions/user_transaction.py
|
fthornton67/sawtooth-next-directory
|
79479afb8d234911c56379bb1d8abf11f28ef86d
|
[
"Apache-2.0"
] | 989 |
2018-04-18T21:01:56.000Z
|
2019-10-23T15:37:09.000Z
|
rbac/server/blockchain_transactions/user_transaction.py
|
fthornton67/sawtooth-next-directory
|
79479afb8d234911c56379bb1d8abf11f28ef86d
|
[
"Apache-2.0"
] | 72 |
2018-04-13T18:29:12.000Z
|
2020-05-29T06:00:33.000Z
|
# Copyright 2019 Contributors to Hyperledger Sawtooth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
""" Common Transaction Creation
"""
from rbac.common.user.delete_user import DeleteUser
from rbac.common.sawtooth import batcher
def create_delete_user_txns(txn_key, next_id, txn_list):
"""Create the delete transactions for user."""
user_delete = DeleteUser()
message = user_delete.make(signer_keypair=txn_key, next_id=next_id)
payload = user_delete.make_payload(
message=message, signer_keypair=txn_key, signer_user_id=next_id
)
transaction = batcher.make_transaction(payload=payload, signer_keypair=txn_key)
txn_list.extend([transaction])
return txn_list
| 41.290323 | 83 | 0.719531 |
abef6972d5c452e516a9a095b4daeee9157c3607
| 946 |
py
|
Python
|
config.py
|
WhereGroup/mara-ptm-importer
|
bca8f134be066c0966b360f8bc15bc3139261ac5
|
[
"MIT"
] | null | null | null |
config.py
|
WhereGroup/mara-ptm-importer
|
bca8f134be066c0966b360f8bc15bc3139261ac5
|
[
"MIT"
] | null | null | null |
config.py
|
WhereGroup/mara-ptm-importer
|
bca8f134be066c0966b360f8bc15bc3139261ac5
|
[
"MIT"
] | null | null | null |
# general configuration
LOCAL_OTP_PORT = 8088 # port for OTP to use, HTTPS will be served on +1
TEMP_DIRECTORY = "mara-ptm-temp"
PROGRESS_WATCHER_INTERVAL = 5 * 60 * 1000 # milliseconds
JVM_PARAMETERS = "-Xmx8G" # 6-8GB of RAM is good for bigger graphs
# itinerary filter parameters
CAR_KMH = 50
CAR_TRAVEL_FACTOR = 1.4 # as the crow flies vs street, how much longer is realistic
# note: the factor that public transport may take longer is configured in the GUI
# itinerary parameters
ALLOWED_TRANSIT_MODES = ["WALK", "BUS", "TRAM", "SUBWAY", "RAIL"]
MAX_WALK_DISTANCE = 1000 # meters
OTP_PARAMETERS_TEMPLATE = "&".join([
"fromPlace=1:{origin}",
"toPlace=1:{destination}",
"time=00%3A00",
"date={date}",
"mode=TRANSIT%2CWALK",
"maxWalkDistance={max_walk_distance}",
"arriveBy=false",
"searchWindow=86400",
"numOfItineraries=99999",
"keepNumOfItineraries=99999",
"showIntermediateStops=true",
])
| 33.785714 | 84 | 0.711416 |
e9b4fb030c17c63682095e9e29f2141ad4cde05a
| 98 |
py
|
Python
|
apps/community/apps.py
|
OpenAdaptronik/Rattler
|
c3bdde0ca56b6d77f49bc830fa2b8bb41a26bae4
|
[
"MIT"
] | 2 |
2018-05-18T08:38:29.000Z
|
2018-05-22T08:26:09.000Z
|
apps/community/apps.py
|
IT-PM-OpenAdaptronik/Webapp
|
c3bdde0ca56b6d77f49bc830fa2b8bb41a26bae4
|
[
"MIT"
] | 118 |
2017-10-31T13:45:09.000Z
|
2018-02-24T20:51:42.000Z
|
apps/community/apps.py
|
OpenAdaptronik/Rattler
|
c3bdde0ca56b6d77f49bc830fa2b8bb41a26bae4
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class CommunityConfig(AppConfig):
name = 'apps.community'
| 16.333333 | 33 | 0.765306 |
75903d457a23aa0d54cdcae8e7599516cb0be5f6
| 4,643 |
py
|
Python
|
logya/core.py
|
yaph/logya
|
9647f58a0b8653b56ad64332e235a76cab3acda9
|
[
"MIT"
] | 12 |
2015-03-04T03:23:56.000Z
|
2020-11-17T08:09:17.000Z
|
logya/core.py
|
elaOnMars/logya
|
a9f256ac8840e21b348ac842b35683224e25b613
|
[
"MIT"
] | 78 |
2015-01-05T11:40:41.000Z
|
2022-01-23T21:05:39.000Z
|
logya/core.py
|
elaOnMars/logya
|
a9f256ac8840e21b348ac842b35683224e25b613
|
[
"MIT"
] | 6 |
2015-04-20T06:58:42.000Z
|
2022-01-31T00:36:29.000Z
|
# -*- coding: utf-8 -*-
import yaml
from collections import ChainMap
from os import walk
from pathlib import Path
from sys import exit
from typing import Dict
from logya.content import read, process_extensions
from logya.template import init_env
from logya.util import load_yaml, paths, slugify
class Logya:
"""Object to store data such as site index and settings."""
def __init__(self, dir_site: str = '.', verbose: bool = False) -> None:
"""Set required logya object properties."""
self.verbose = verbose
self.paths = paths(dir_site)
try:
self.settings = load_yaml(self.paths.root.joinpath('site.yaml').read_text())
except FileNotFoundError:
exit('Error: The site configuration file site.yaml was not found.')
except yaml.scanner.ScannerError:
exit('Error: The site configuration file site.yaml could not be parsed.')
# Initialize index and collections so scripts can generate indexed content before build.
self.doc_index: Dict[str, dict] = {}
self.collections = self.settings.get('collections', {})
for coll in self.collections.values():
coll['index'] = {}
# Simplify access to these settings.
extensions = self.settings.get('extensions', {})
self.jinja_extensions = extensions.get('jinja', [])
self.markdown_extensions = extensions.get('markdown', [])
self.languages = self.settings.get('languages', {})
def build(self):
"""Read content and initialize template environment."""
# Read all recognized files in content directory and create document index and collections.
self.read_content()
# Create a ChainMap view from collection indexes.
self.collection_index = ChainMap(*[coll['index'] for coll in self.collections.values()])
# Initialize template env.
init_env(self)
def info(self, msg: str):
"""Print message if in verbose mode."""
if self.verbose:
print(msg)
def read_content(self):
"""Read content and update index and collections.
Previously indexed content can exist. If a file inside the content directory has the same URL as already indexed
content, the existing content will be replaced.
"""
for root, _, files in walk(self.paths.content):
for f in files:
path = Path(root, f)
if path.suffix not in process_extensions:
continue
if doc := read(path, path.relative_to(self.paths.content), self.markdown_extensions):
if self.collections:
self.update_collections(doc)
self.doc_index[doc['url']] = {'doc': doc, 'path': path}
def update_collections(self, doc: dict):
"""Update collections index for given doc."""
# Iterate over copy so attributes can be added.
for attr, values in doc.copy().items():
if attr not in self.collections:
continue
coll = self.collections[attr]
# Process unique values while preserving their order to handle potentially duplicate collection values.
seen = set()
for value in values:
if value in seen:
continue
seen.add(value)
url = f'/{coll["path"]}/{slugify(value).lower()}/'
# Prepend language code to URL if language is specified in doc and exists in configuration.
if 'language' in doc and doc['language'] in self.languages:
url = f'/{doc["language"]}{url}'
if url in self.doc_index:
print(f'Collection not created because content exists at {url}.')
continue
# Add attribute for creating collection links in templates.
links = attr + '_links'
doc[links] = doc.get(links, []) + [(url, value)]
# Update or create collection index value.
if coll_data := coll['index'].get(url):
if doc['url'] not in coll_data['doc_urls']:
coll_data['doc_urls'].add(doc['url'])
coll_data['docs'].append(doc)
else:
coll['index'][url] = {
'doc_urls': {doc['url']},
'docs': [doc],
'title': value,
'template': coll['template'],
'url': url
}
| 38.371901 | 120 | 0.571829 |
dddc77243e4db6b1f05a3608f0547ebc85e1cf72
| 3,564 |
py
|
Python
|
research/cv/swin_transformer/src/tools/schedulers.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/swin_transformer/src/tools/schedulers.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/swin_transformer/src/tools/schedulers.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""LearningRate scheduler functions"""
import numpy as np
__all__ = ["multistep_lr", "cosine_lr", "constant_lr", "get_policy", "exp_lr"]
def get_policy(name):
"""get lr policy from name"""
if name is None:
return constant_lr
out_dict = {
"constant_lr": constant_lr,
"cosine_lr": cosine_lr,
"multistep_lr": multistep_lr,
"exp_lr": exp_lr,
}
return out_dict[name]
def constant_lr(args, batch_num):
"""Get constant lr"""
learning_rate = []
def _lr_adjuster(epoch):
if epoch < args.warmup_length:
lr = _warmup_lr(args.warmup_lr, args.base_lr, args.warmup_length, epoch)
else:
lr = args.base_lr
return lr
for epoch in range(args.epochs):
for batch in range(batch_num):
learning_rate.append(_lr_adjuster(epoch + batch / batch_num))
learning_rate = np.clip(learning_rate, args.min_lr, max(learning_rate))
return learning_rate
def exp_lr(args, batch_num):
"""Get exp lr """
learning_rate = []
def _lr_adjuster(epoch):
if epoch < args.warmup_length:
lr = _warmup_lr(args.warmup_lr, args.base_lr, args.warmup_length, epoch)
else:
lr = args.base_lr * args.lr_gamma ** epoch
return lr
for epoch in range(args.epochs):
for batch in range(batch_num):
learning_rate.append(_lr_adjuster(epoch + batch / batch_num))
learning_rate = np.clip(learning_rate, args.min_lr, max(learning_rate))
return learning_rate
def cosine_lr(args, batch_num):
"""Get cosine lr"""
learning_rate = []
def _lr_adjuster(epoch):
if epoch < args.warmup_length:
lr = _warmup_lr(args.warmup_lr, args.base_lr, args.warmup_length, epoch)
else:
e = epoch - args.warmup_length
es = args.epochs - args.warmup_length
lr = 0.5 * (1 + np.cos(np.pi * e / es)) * args.base_lr
return lr
for epoch in range(args.epochs):
for batch in range(batch_num):
learning_rate.append(_lr_adjuster(epoch + batch / batch_num))
learning_rate = np.clip(learning_rate, args.min_lr, max(learning_rate))
return learning_rate
def multistep_lr(args, batch_num):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
learning_rate = []
def _lr_adjuster(epoch):
lr = args.base_lr * (args.lr_gamma ** (epoch / args.lr_adjust))
return lr
for epoch in range(args.epochs):
for batch in range(batch_num):
learning_rate.append(_lr_adjuster(epoch + batch / batch_num))
learning_rate = np.clip(learning_rate, args.min_lr, max(learning_rate))
return learning_rate
def _warmup_lr(warmup_lr, base_lr, warmup_length, epoch):
"""Linear warmup"""
return epoch / warmup_length * (base_lr - warmup_lr) + warmup_lr
| 31.539823 | 84 | 0.648148 |
9bacf94d0fd94e790c4eff51f93ad7140981f0c1
| 592 |
py
|
Python
|
Licence 1/I11/TP2/tp2_2.py
|
axelcoezard/licence
|
1ed409c4572dea080169171beb7e8571159ba071
|
[
"MIT"
] | 8 |
2020-11-26T20:45:12.000Z
|
2021-11-29T15:46:22.000Z
|
Licence 1/I11/TP2/tp2_2.py
|
axelcoezard/licence
|
1ed409c4572dea080169171beb7e8571159ba071
|
[
"MIT"
] | null | null | null |
Licence 1/I11/TP2/tp2_2.py
|
axelcoezard/licence
|
1ed409c4572dea080169171beb7e8571159ba071
|
[
"MIT"
] | 6 |
2020-10-23T15:29:24.000Z
|
2021-05-05T19:10:45.000Z
|
a = int(input("a= "))
b = int(input("b= "))
c = int(input("c= "))
if a == 0:
if b != 0:
print("La solution est", -c / b)
else:
if c == 0:
print("INF")
else:
print("Aucune solution")
else:
delta = (b ** 2) - 4 * a * c
if delta > 0:
print(
"Les deux solutions sont ",
(- b - (delta ** 0.5)) / (2 * a),
"et",
(- b + (delta ** 0.5)) / (2 * a),
)
elif delta == 0:
print("La solution est", -b / (2 * a))
else:
print("Pas de solutions reelles")
| 21.925926 | 46 | 0.383446 |
1fc3f3ff08eb8b1848a1ba0565e1a68e75fc61f0
| 1,243 |
py
|
Python
|
Curso-Em-Video-Python/1Materias/014_Estrutura_de_repeticao_while/#014 - Estrutura de repetição while.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso-Em-Video-Python/1Materias/014_Estrutura_de_repeticao_while/#014 - Estrutura de repetição while.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso-Em-Video-Python/1Materias/014_Estrutura_de_repeticao_while/#014 - Estrutura de repetição while.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
''' Exemplo 1
c = 1
while c < 11:
print(c)
c += 1
print('Fim')
'''
'''eXEMPLO 2
n = 1
while n!= 0:
n = int(input('Digite um valor: '))
print('Fim') '''
''' Exemplo 3
r = 'S'
while r == 'S':
n = int(input('Digite um valor: '))
r = str(input('Quer continuar? [S/N] ')).upper()
print('Fim')'''
''' Exemplo 4
n = 1
par = impar = 0
while n != 0:
n = int(input('Digite um numero: '))
if n != 0:
if n % 2 == 0:
par +=1
else:
impar += 1
print('Voce digitou {} numeros pares e {} numeros impares!'.format(par, impar))'''
'''Exemplo 5
from random import randint
computador = randint(1, 10)
print('Sou seu computador... Acabei de pensar em um numero entre 0 e 10.')
acertou = False
palpites = 0
while not acertou:
jogador = int(input('Qual o seu palpite: '))
palpites += 1
if jogador == computador:
acertou = True
else:
if jogador < computador:
print('Mais... tente mais uma vez.')
elif jogador > computador:
print('Menos... tente mais uma vez')
print('Acertou com {} tentativas. Parabens!!'.format(palpites))'''
n = int(input('digite um numero:'))
c = 0
while c < 5:
c += 1
print('Menor ' if n < 3 else 'Maior')
| 23.903846 | 82 | 0.557522 |
1ff8bce17816354b2f006da170007b45f81f9a27
| 3,272 |
py
|
Python
|
custom_components/stadtreinigung_hamburg/config_flow.py
|
topic2k/sensor.stadtreinigung_hamburg
|
32420374c5c3d46224d80abfa19c75e89835e9a3
|
[
"MIT"
] | 17 |
2019-08-02T20:29:39.000Z
|
2021-12-31T18:48:23.000Z
|
custom_components/stadtreinigung_hamburg/config_flow.py
|
topic2k/sensor.stadtreinigung_hamburg
|
32420374c5c3d46224d80abfa19c75e89835e9a3
|
[
"MIT"
] | 10 |
2019-08-20T18:27:10.000Z
|
2022-01-10T09:04:51.000Z
|
custom_components/stadtreinigung_hamburg/config_flow.py
|
topic2k/sensor.stadtreinigung_hamburg
|
32420374c5c3d46224d80abfa19c75e89835e9a3
|
[
"MIT"
] | 7 |
2019-09-04T12:39:41.000Z
|
2022-01-09T18:47:59.000Z
|
from homeassistant import config_entries
import voluptuous as vol
from homeassistant.util import slugify
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant, callback
from stadtreinigung_hamburg.StadtreinigungHamburg import *
DOMAIN = "stadtreinigung_hamburg"
@callback
def stadtreinigung_hamburg_names(hass: HomeAssistant):
"""Return configurations of Stadtreinigung Hamburg component."""
return set(
(slugify(entry.data[CONF_NAME]))
for entry in hass.config_entries.async_entries(DOMAIN)
)
@config_entries.HANDLERS.register(DOMAIN)
class StadtreinigungHamburgConfigFlow(config_entries.ConfigFlow):
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize."""
self._street = None
self._number = None
self._errors = {}
async def async_step_user(self, user_input=None):
self._errors = {}
if user_input is not None:
name = slugify(user_input[CONF_NAME])
try:
srh = StadtreinigungHamburg()
await self.hass.async_add_executor_job(
srh.get_garbage_collections,
user_input["street"],
user_input["number"],
)
except StreetNotFoundException as error:
self._errors["street"] = "street_not_found"
except StreetNumberNotFoundException as error:
self._errors["number"] = "number_not_found"
numbers = [x[0] for x in error.args[1]]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str,
vol.Required("street", default=user_input["street"]): str,
vol.Required(
"number", default=user_input["number"]
): vol.In(numbers),
}
),
errors=self._errors,
)
if not self._name_in_configuration_exists(name):
if self._errors == {}:
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
else:
self._errors[CONF_NAME] = "name_exists"
else:
user_input = {CONF_NAME: None, "street": None, "number": None}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str,
vol.Required("street", default=user_input["street"]): str,
vol.Required("number", default=user_input["number"]): str,
}
),
errors=self._errors,
)
def _name_in_configuration_exists(self, name: str) -> bool:
"""Return True if name exists in configuration."""
if name in stadtreinigung_hamburg_names(self.hass):
return True
return False
| 35.565217 | 88 | 0.56143 |
9517e736c54a7ccebdb322410c0f126dfe187fa1
| 2,011 |
py
|
Python
|
src/ztc/system/ntp_time.py
|
magistersart/ZTC_fork
|
ce72734ea575d9846b5b81f3efbfd14fa1f7e532
|
[
"PostgreSQL"
] | null | null | null |
src/ztc/system/ntp_time.py
|
magistersart/ZTC_fork
|
ce72734ea575d9846b5b81f3efbfd14fa1f7e532
|
[
"PostgreSQL"
] | null | null | null |
src/ztc/system/ntp_time.py
|
magistersart/ZTC_fork
|
ce72734ea575d9846b5b81f3efbfd14fa1f7e532
|
[
"PostgreSQL"
] | null | null | null |
#!/usr/bin/env python
'''
Time monitoring class for ZTC
Copyright (c) 2010-2012 Vladimir Rusinov <[email protected]>
Copyright (c) 2010 Murano Software [http://muranosoft.com]
Copyright (c) 2010 Docufide, Inc. [http://docufide.com]
Copyright (c) 2011 Wrike, Inc. [http://www.wrike.com]
License: GNU GPL v3
Requirements:
* ntpq
'''
import socket
import ntplib
from ztc.check import ZTCCheck
class DumbNtpResponse(object):
"""dumb ntp response - mock object on ntplib response object"""
offset = 3600.0
delay = 3600.0
precision = 1
class TimeCheck(ZTCCheck):
name = 'time'
_ntp_response = None
def _myinit(self):
"""__init__"""
self._ntp_addr = self.config.get('ntp_server', 'pool.ntp.org')
# default timeout is 1 second
self._timeout = self.config.get('timeout', 1)
def _get(self, metric, *args, **kwargs):
"""Get some ntp mertic. Howewer, only jitter is currently supported"""
return self.__getattribute__(metric)
def _read_ntp(self):
"""Connect to ntp server and read vars from it
Returns: ntplib response object"""
if self._ntp_response:
return self._ntp_response
try:
self.logger.debug("connecting to ntp server %s" % self._ntp_addr)
c = ntplib.NTPClient()
# TODO: add timeout param
response = c.request(self._ntp_addr, version=3)
self._ntp_response = response
except socket.timeout:
self.logger.exception("Failed to read from ntp server")
response = DumbNtpResponse()
self._ntp_response = response
return response
@property
def offset(self):
response = self._read_ntp()
return abs(response.offset)
@property
def delay(self):
response = self._read_ntp()
return abs(response.delay)
@property
def precision(self):
response = self._read_ntp()
return abs(2 ** response.precision)
| 26.460526 | 78 | 0.636002 |
c806b13c3ee93413f5b64ec4d90776b263957255
| 796 |
py
|
Python
|
api/urls.py
|
nathanwelsh8/guts_2021_hackathon
|
d875000051a423db4a4bb1a8df35d6d9a9e9db63
|
[
"MIT"
] | 1 |
2021-02-05T15:58:02.000Z
|
2021-02-05T15:58:02.000Z
|
api/urls.py
|
nathanwelsh8/guts_2021_hackathon
|
d875000051a423db4a4bb1a8df35d6d9a9e9db63
|
[
"MIT"
] | null | null | null |
api/urls.py
|
nathanwelsh8/guts_2021_hackathon
|
d875000051a423db4a4bb1a8df35d6d9a9e9db63
|
[
"MIT"
] | 2 |
2021-02-05T20:27:48.000Z
|
2021-03-31T19:58:07.000Z
|
from django.conf import settings
from django.contrib import admin
from django.urls import include, path
from . import views
from . import jwt_views
admin.autodiscover()
urlpatterns = [
path("me/", views.Profile.as_view(), name="me"),
path("token/", jwt_views.Login.as_view(), name="token"),
path(
"token/refresh/", jwt_views.RefreshToken.as_view(),
name="token-refresh"
),
path("token/logout/", jwt_views.Logout.as_view(), name="logout"),
path("ping/", views.Ping.as_view(), name="ping"),
path("admin/", admin.site.urls),
]
urlpatterns += [
path("api-auth/", include('rest_framework.urls'))
]
if not settings.ON_SERVER:
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| 24.875 | 69 | 0.66206 |
a952b97929b169f4063d6f78dbba77560267a97f
| 5,963 |
py
|
Python
|
tests/onegov/agency/test_layouts.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
tests/onegov/agency/test_layouts.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
tests/onegov/agency/test_layouts.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.agency.collections import ExtendedAgencyCollection
from onegov.agency.collections import ExtendedPersonCollection
from onegov.agency.layout import AgencyCollectionLayout
from onegov.agency.layout import AgencyLayout
from onegov.agency.layout import ExtendedPersonCollectionLayout
from onegov.agency.layout import ExtendedPersonLayout
from onegov.agency.layout import MembershipLayout
from onegov.agency.models import ExtendedAgency
from onegov.agency.models import ExtendedPerson
from onegov.people import AgencyMembership
class DummyOrg(object):
geo_provider = None
open_files_target_blank = True
class DummyApp(object):
org = DummyOrg()
class DummyRequest(object):
locale = 'en'
is_logged_in = False
is_manager = False
is_admin = False
session = None
permissions = {}
def __init__(self):
self.app = DummyApp()
def translate(self, text):
return str(text)
def include(self, *args, **kwargs):
pass
def link(self, model, name=''):
if isinstance(model, str):
return f'{model}/{name}'
return f'{model.__class__.__name__}/{name}'
def exclude_invisible(self, objects):
return objects
def new_csrf_token(self):
return 'x'
def has_permission(self, model, permission):
permissions = self.permissions.get(model.__class__.__name__, [])
return permission.__name__ in permissions
def path(links):
return '/'.join([link.attrs['href'].strip('/') for link in links])
def hrefs(items):
for item in items:
if hasattr(item, 'links'):
for ln in item.links:
yield (
ln.attrs.get('href')
or ln.attrs.get('ic-delete-from')
or ln.attrs.get('ic-post-to')
)
else:
yield (
item.attrs.get('href')
or item.attrs.get('ic-delete-from')
or item.attrs.get('ic-post-to')
)
def test_agency_collection_layout():
request = DummyRequest()
model = ExtendedAgencyCollection(None)
layout = AgencyCollectionLayout(model, request)
assert layout.editbar_links is None
assert path(layout.breadcrumbs) == 'DummyOrg/ExtendedAgencyCollection'
assert layout.move_agency_url_template == 'AgencyMove/?csrf-token=x'
# Add permission
request.permissions = {'ExtendedAgencyCollection': ['Private']}
layout = AgencyCollectionLayout(model, request)
assert list(hrefs(layout.editbar_links)) == [
'ExtendedAgencyCollection/create-pdf',
'ExtendedAgencyCollection/sort',
'ExtendedAgencyCollection/new'
]
def test_agency_layout():
request = DummyRequest()
model = ExtendedAgency('Agency')
layout = AgencyLayout(model, request)
assert isinstance(layout.collection, ExtendedAgencyCollection)
assert layout.editbar_links is None
assert path(layout.breadcrumbs) == \
'DummyOrg/ExtendedAgencyCollection/ExtendedAgency'
assert layout.move_agency_url_template == 'AgencyMove/?csrf-token=x'
assert layout.move_membership_within_agency_url_template == \
'AgencyMembershipMoveWithinAgency/?csrf-token=x'
# Add permission
request.permissions = {'ExtendedAgency': ['Private']}
layout = AgencyLayout(model, request)
assert list(hrefs(layout.editbar_links)) == [
'AgencyProxy/edit',
'AgencyProxy/move',
'AgencyProxy/sort',
'ExtendedAgency/?csrf-token=x',
'AgencyProxy/create-pdf',
'AgencyProxy/new',
'AgencyProxy/new-membership',
'AgencyProxy/sort-children?csrf-token=x',
'AgencyProxy/sort-relationships?csrf-token=x',
]
def test_membership_layout():
request = DummyRequest()
model = AgencyMembership(agency=ExtendedAgency(title='Agency'))
layout = MembershipLayout(model, request)
assert layout.editbar_links is None
assert path(layout.breadcrumbs) == \
'DummyOrg/ExtendedAgencyCollection/ExtendedAgency/AgencyMembership'
# Add permission
request.permissions = {'AgencyMembership': ['Private']}
layout = MembershipLayout(model, request)
assert list(hrefs(layout.editbar_links)) == [
'AgencyMembership/edit',
'AgencyMembership/?csrf-token=x'
]
def test_extended_person_collection_layout():
request = DummyRequest()
model = ExtendedPersonCollection(None)
layout = ExtendedPersonCollectionLayout(model, request)
assert layout.editbar_links is None
assert path(layout.breadcrumbs) == 'DummyOrg/#'
# Log in as manager
request.is_manager = True
layout = ExtendedPersonCollectionLayout(model, request)
assert list(hrefs(layout.editbar_links)) == [
'ExtendedPersonCollection/create-people-xlsx',
'ExtendedPersonCollection/new'
]
# AgencyPathMixin
root = ExtendedAgency('Root')
child = ExtendedAgency('Child', parent=root)
assert layout.agency_path(root) == 'Root'
assert layout.agency_path(child) == 'Root > Child'
def test_extended_person_layout():
request = DummyRequest()
model = ExtendedPerson(
first_name="Hans",
last_name="Maulwurf",
email="[email protected]"
)
layout = ExtendedPersonLayout(model, request)
assert layout.editbar_links is None
assert path(layout.breadcrumbs) == \
'DummyOrg/ExtendedPersonCollection/ExtendedPerson'
# Add permission
request.permissions = {'ExtendedPerson': ['Private']}
layout = ExtendedPersonLayout(model, request)
assert list(hrefs(layout.editbar_links)) == [
'ExtendedPerson/edit',
'ExtendedPerson/sort',
'ExtendedPerson/?csrf-token=x'
]
# AgencyPathMixin
root = ExtendedAgency('Root')
child = ExtendedAgency('Child', parent=root)
assert layout.agency_path(root) == 'Root'
assert layout.agency_path(child) == 'Root > Child'
| 31.057292 | 75 | 0.679356 |
8d213bf4bc6c4a3a27b6ce0e83b0b299ed0382ae
| 321 |
py
|
Python
|
PYTHON/Numpy/dot_and_cross.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
PYTHON/Numpy/dot_and_cross.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
PYTHON/Numpy/dot_and_cross.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
import numpy
N = int(input())
arr = []
for _ in range(0, N):
temp = list(map(int, input().split()))
arr.append(temp)
n = numpy.array(arr)
arr = []
for _ in range(0, N):
temp = list(map(int, input().split()))
arr.append(temp)
m = numpy.array(arr)
print(numpy.dot(n, m))
| 17.833333 | 42 | 0.604361 |
a5c3b7572364919c21d1b516377070ce152c5dd3
| 1,266 |
py
|
Python
|
src/server/bo/Student.py
|
muenstermannmarius/ElectionSystem
|
a6e60d9147423787e869587b808def4771f89cb7
|
[
"RSA-MD"
] | null | null | null |
src/server/bo/Student.py
|
muenstermannmarius/ElectionSystem
|
a6e60d9147423787e869587b808def4771f89cb7
|
[
"RSA-MD"
] | null | null | null |
src/server/bo/Student.py
|
muenstermannmarius/ElectionSystem
|
a6e60d9147423787e869587b808def4771f89cb7
|
[
"RSA-MD"
] | null | null | null |
from server.bo.User import User
class Student(User):
"""The implementation of a exemplary student class.
"""
def __init__(self):
super().__init__()
self._matrikel_nr = ""
self._study = ""
def set_matrikel_nr(self, matrikel_nr):
"""Set the matrikel number of a Student"""
self._matrikel_nr = matrikel_nr
def get_matrikel_nr(self):
"""Get the matrikel number of a Student"""
return self._matrikel_nr
def set_study(self, study):
"""Set the study of a Student"""
self._study = study
def get_study(self):
"""Get the study of a Student"""
return self._study
@staticmethod
def to_dict(dicti=dict()):
"""Converts a Python dict() into a Student()."""
student = Student()
student.set_id(dicti["id"])
student.set_date(dicti["creation_date"])
student.set_name(dicti["name"])
student.set_google_user_id(dicti["google_user_id"])
student.set_firstname(dicti["firstname"])
student.set_mail(dicti["mail"])
student.set_role_id(dicti["role_id"])
student.set_matrikel_nr(dicti["matrikel_nr"])
student.set_study(dicti["study"])
return student
| 24.823529 | 59 | 0.610585 |
193938576e0ac0d34ffac2faa2f1081e9ff7eac7
| 283 |
py
|
Python
|
dailyprogrammer/315e/soln.py
|
mmcloughlin/problems
|
6095842ffe007a12ec8c2093850515aa4e046616
|
[
"MIT"
] | 11 |
2019-02-08T06:54:34.000Z
|
2021-08-07T18:57:39.000Z
|
dailyprogrammer/315e/soln.py
|
mmcloughlin/problems
|
6095842ffe007a12ec8c2093850515aa4e046616
|
[
"MIT"
] | 1 |
2019-05-21T08:14:10.000Z
|
2019-05-21T08:14:10.000Z
|
dailyprogrammer/315e/soln.py
|
mmcloughlin/problems
|
6095842ffe007a12ec8c2093850515aa4e046616
|
[
"MIT"
] | null | null | null |
import sys
def xormult(x, y):
z = 0
while y:
if y&1:
z ^= x
x <<= 1
y >>= 1
return z
def main():
x, y = map(int, sys.argv[1:])
print '{x}@{y} = {z}'.format(x=x, y=y, z=xormult(x, y))
if __name__ == '__main__':
main()
| 13.47619 | 59 | 0.420495 |
7e0a4948565c8c5253a8a84d4cb13a90340de0c5
| 849 |
py
|
Python
|
UIUCTF/2021/crypto/dhke_adventure/dhke_adventure.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | 1 |
2021-11-02T20:53:58.000Z
|
2021-11-02T20:53:58.000Z
|
UIUCTF/2021/crypto/dhke_adventure/dhke_adventure.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | null | null | null |
UIUCTF/2021/crypto/dhke_adventure/dhke_adventure.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | null | null | null |
from random import randint
from Crypto.Util.number import isPrime
from Crypto.Cipher import AES
from hashlib import sha256
print("I'm too lazy to find parameters for my DHKE, choose for me.")
print("Enter prime at least 1024 at most 2048 bits: ")
# get user's choice of p
p = input()
p = int(p)
# check prime valid
if p.bit_length() < 1024 or p.bit_length() > 2048 or not isPrime(p):
exit("Invalid input.")
# prepare for key exchange
g = 2
a = randint(2,p-1)
b = randint(2,p-1)
# generate key
dio = pow(g,a,p)
jotaro = pow(g,b,p)
key = pow(dio,b,p)
key = sha256(str(key).encode()).digest()
with open('flag.txt', 'rb') as f:
flag = f.read()
iv = b'uiuctf2021uiuctf'
cipher = AES.new(key, AES.MODE_CFB, iv)
ciphertext = cipher.encrypt(flag)
print("Dio sends: ", dio)
print("Jotaro sends: ", jotaro)
print("Ciphertext: ", ciphertext.hex())
| 24.970588 | 68 | 0.687868 |
fde1a6c8bfc9c24d8e794d9f67ffd4e62778127f
| 3,737 |
py
|
Python
|
loesungen/chapter05/houses.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | 2 |
2021-09-20T06:16:41.000Z
|
2022-01-17T14:24:43.000Z
|
loesungen/chapter05/houses.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | null | null | null |
loesungen/chapter05/houses.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | null | null | null |
from csp import CSP
AGE = 0
HOUSE_COLOR = 1
HOUSE_NUMBER = 2
ages = [36, 37, 39, 42]
house_colors = ["rot", "grün", "blau", "gelb"]
house_numbers = [2, 4, 6, 8]
domain_template = []
for age in ages:
for house_color in house_colors:
for house_number in house_numbers:
domain_template.append([age, house_color, house_number])
house_variables = ["Anna", "Filiz", "Hector", "Klaus"]
house_domains = {}
for variable in house_variables:
house_domains[variable] = domain_template
def find(assignment, feature, value):
for person in assignment:
if assignment[person][feature] == value:
return person
return None
def house_constraint(assignment):
# Zugeordnete Werte dürfen nicht identisch sein
if len(assignment) > 1:
for feature in [AGE, HOUSE_COLOR, HOUSE_NUMBER]:
feature_list = []
for person in assignment:
feature_list.append(assignment[person][feature])
if len(feature_list) != len(set(feature_list)):
return False
# Die jüngste Person wohnt im gelben Haus?
youngest = find(assignment, AGE, 36)
yellow = find(assignment, HOUSE_COLOR, "gelb")
if youngest and yellow and youngest != yellow:
return False
# Klaus ist kein Nachbar von Hector?
if "Klaus" in assignment and "Hector" in assignment:
if abs(assignment["Klaus"][HOUSE_NUMBER] - assignment["Hector"][HOUSE_NUMBER]) == 2:
return False
# Das grüne Haus steht nicht neben dem gelben?
green = find(assignment, HOUSE_COLOR, "green")
if green and yellow:
if abs(assignment[yellow][HOUSE_NUMBER] - assignment[green][HOUSE_NUMBER]) == 2:
return False
# Hector wohnt nicht in einem der äußeren Häuser?
if "Hector" in assignment:
hector = assignment["Hector"]
if hector[HOUSE_NUMBER] == 2 or hector[HOUSE_NUMBER] == 8:
return False
# Die Person im grünen Haus ist 42 Jahre alt?
if green:
age42 = find(assignment, AGE, 42)
if age42 and green != age42:
return False
# Anna wohnt nicht im roten Haus?
if "Anna" in assignment:
if assignment["Anna"][HOUSE_COLOR] == "rot":
return False
# Filiz ist nicht 39 Jahre alt?
if "Filiz" in assignment:
if assignment["Filiz"][AGE] == 39:
return False
# Das gelbe Haus steht ganz rechts?
yellow = find(assignment, HOUSE_COLOR, "gelb")
if yellow and assignment[yellow][HOUSE_NUMBER] != 8:
return False
# Anna oder Klaus wohnt in Hausnummer 4?
house4 = find(assignment, HOUSE_NUMBER, 4)
if house4 and house4 != "Anna" and house4 != "Klaus":
return False
# Die 39-jährige Person wohnt ganz links?
age39 = find(assignment, AGE, 39)
if age39 and assignment[age39][HOUSE_NUMBER] != 2:
return False
# Das rote Haus steht weiter links als das blaue?
red = find(assignment, HOUSE_COLOR, "rot")
blue = find(assignment, HOUSE_COLOR, "blue")
if red and blue and red > blue:
return False
# Anna ist älter als Hector, dessen Haus nicht rot ist?
if "Hector" in assignment:
if assignment["Hector"][HOUSE_COLOR] == "rot":
return False
if "Anna" in assignment and assignment["Anna"][AGE] < assignment["Hector"][AGE]:
return False
# Alle Bedingungen erfüllt
return True
house_csp = CSP(house_variables, house_domains, house_constraint)
solution = house_csp.solve()
if solution:
for person in sorted(solution.items(), key = lambda item: item[1][HOUSE_NUMBER]):
#print(f"{person}: {solution[person]}")
print(person)
else:
print("Keine Lösung gefunden.")
| 37 | 92 | 0.642494 |
e3256b18ec8695709bab6ec5d06af6549ccf99b4
| 459 |
py
|
Python
|
listings/chapter10/rent-scatter-4.py
|
rh0-me/ITHandbuch10
|
1582d6317e11cc958a14e157440dccf94d44583b
|
[
"Apache-2.0"
] | 6 |
2021-08-04T19:42:53.000Z
|
2022-01-17T14:24:45.000Z
|
listings/chapter10/rent-scatter-4.py
|
rh0-me/ITHandbuch10
|
1582d6317e11cc958a14e157440dccf94d44583b
|
[
"Apache-2.0"
] | 1 |
2021-08-30T06:32:08.000Z
|
2021-08-30T06:42:46.000Z
|
listings/chapter10/rent-scatter-4.py
|
rh0-me/ITHandbuch10
|
1582d6317e11cc958a14e157440dccf94d44583b
|
[
"Apache-2.0"
] | 1 |
2022-02-17T10:32:17.000Z
|
2022-02-17T10:32:17.000Z
|
import matplotlib.pyplot as plt
import csv
# Daten importieren
rent_data = []
rent_file = open('size-rent-4.csv', 'r')
reader = csv.reader(rent_file)
for line in reader:
rent_data.append([float(line[0]), float(line[1])])
rent_file.close()
# Die Grafik erstellen
plt.title("Size to rent ratio")
plt.xlabel("Size in square meters")
plt.ylabel("Rent in Euros")
plt.scatter(
[row[0] for row in rent_data],
[row[1] for row in rent_data]
)
plt.show()
| 21.857143 | 54 | 0.701525 |
8be9b25fe8fb4d95a58fe66fdd2fc05d4f545be6
| 336 |
py
|
Python
|
showcase6/com/aaron/helloworld.py
|
qsunny/python
|
ace8c3178a9a9619de2b60ca242c2079dd2f825e
|
[
"MIT"
] | null | null | null |
showcase6/com/aaron/helloworld.py
|
qsunny/python
|
ace8c3178a9a9619de2b60ca242c2079dd2f825e
|
[
"MIT"
] | 2 |
2021-03-25T22:00:07.000Z
|
2022-01-20T15:51:48.000Z
|
showcase6/com/aaron/helloworld.py
|
qsunny/python
|
ace8c3178a9a9619de2b60ca242c2079dd2f825e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""枚举"""
__author__="Aaron.qiu"
import wx
if __name__=="__main__":
app = wx.App()
window = wx.Frame(None, title="wxPython - www.yiibai.com", size=(400, 300))
panel = wx.Panel(window)
label = wx.StaticText(panel, label="Hello World", pos=(100, 100))
window.Show(True)
app.MainLoop()
| 18.666667 | 79 | 0.610119 |
9a55a7fdede2ec154d5a719d7b1588591e7da3d3
| 9,901 |
py
|
Python
|
external/argh/dispatching.py
|
richardseifert/Hydra_pipeline
|
a31d782219359bae7fa82fa9b081fb72bef69fce
|
[
"MIT"
] | 1 |
2017-11-04T15:08:42.000Z
|
2017-11-04T15:08:42.000Z
|
pythonlib/argh/dispatching.py
|
Jumpscale/web
|
8e8ec2ce01f3105c7647ee8a0c90af09311cbbeb
|
[
"Apache-2.0"
] | 1 |
2018-11-05T17:28:58.000Z
|
2018-11-05T18:20:00.000Z
|
external/argh/dispatching.py
|
richardseifert/Hydra_pipeline
|
a31d782219359bae7fa82fa9b081fb72bef69fce
|
[
"MIT"
] | null | null | null |
# coding: utf-8
#
# Copyright © 2010—2014 Andrey Mikhaylenko and contributors
#
# This file is part of Argh.
#
# Argh is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README.rst for copying conditions.
#
"""
Dispatching
~~~~~~~~~~~
"""
import argparse
import sys
from types import GeneratorType
from argh import compat, io
from argh.constants import (ATTR_WRAPPED_EXCEPTIONS,
ATTR_WRAPPED_EXCEPTIONS_PROCESSOR,
ATTR_EXPECTS_NAMESPACE_OBJECT,
PARSER_FORMATTER)
from argh.completion import autocomplete
from argh.assembling import add_commands, set_default_command
from argh.exceptions import DispatchingError, CommandError
from argh.utils import get_arg_spec
__all__ = ['dispatch', 'dispatch_command', 'dispatch_commands',
'PARSER_FORMATTER', 'EntryPoint']
def dispatch(parser, argv=None, add_help_command=True,
completion=True, pre_call=None,
output_file=sys.stdout, errors_file=sys.stderr,
raw_output=False, namespace=None):
"""
Parses given list of arguments using given parser, calls the relevant
function and prints the result.
The target function should expect one positional argument: the
:class:`argparse.Namespace` object. However, if the function is decorated with
:func:`~argh.decorators.plain_signature`, the positional and named
arguments from the namespace object are passed to the function instead
of the object itself.
:param parser:
the ArgumentParser instance.
:param argv:
a list of strings representing the arguments. If `None`, ``sys.argv``
is used instead. Default is `None`.
:param add_help_command:
if `True`, converts first positional argument "help" to a keyword
argument so that ``help foo`` becomes ``foo --help`` and displays usage
information for "foo". Default is `True`.
:param output_file:
A file-like object for output. If `None`, the resulting lines are
collected and returned as a string. Default is ``sys.stdout``.
:param errors_file:
Same as `output_file` but for ``sys.stderr``.
:param raw_output:
If `True`, results are written to the output file raw, without adding
whitespaces or newlines between yielded strings. Default is `False`.
:param completion:
If `True`, shell tab completion is enabled. Default is `True`. (You
will also need to install it.) See :mod:`argh.completion`.
By default the exceptions are not wrapped and will propagate. The only
exception that is always wrapped is :class:`~argh.exceptions.CommandError`
which is interpreted as an expected event so the traceback is hidden.
You can also mark arbitrary exceptions as "wrappable" by using the
:func:`~argh.decorators.wrap_errors` decorator.
"""
if completion:
isatty = hasattr(output_file, 'isatty') and output_file.isatty()
autocomplete(parser, allow_warnings=isatty)
if argv is None:
argv = sys.argv[1:]
if add_help_command:
if argv and argv[0] == 'help':
argv.pop(0)
argv.append('--help')
# this will raise SystemExit if parsing fails
args = parser.parse_args(argv, namespace=namespace)
if hasattr(args, 'function'):
if pre_call: # XXX undocumented because I'm unsure if it's OK
# Actually used in real projects:
# * https://google.com/search?q=argh+dispatch+pre_call
# * https://github.com/madjar/aurifere/blob/master/aurifere/cli.py#L92
pre_call(args)
lines = _execute_command(args, errors_file)
else:
# no commands declared, can't dispatch; display help message
lines = [parser.format_usage()]
if output_file is None:
# user wants a string; we create an internal temporary file-like object
# and will return its contents as a string
if sys.version_info < (3,0):
f = compat.BytesIO()
else:
f = compat.StringIO()
else:
# normally this is stdout; can be any file
f = output_file
for line in lines:
# print the line as soon as it is generated to ensure that it is
# displayed to the user before anything else happens, e.g.
# raw_input() is called
io.dump(line, f)
if not raw_output:
# in most cases user wants on message per line
io.dump('\n', f)
if output_file is None:
# user wanted a string; return contents of our temporary file-like obj
f.seek(0)
return f.read()
def _execute_command(args, errors_file):
"""
Asserts that ``args.function`` is present and callable. Tries different
approaches to calling the function (with an `argparse.Namespace` object or
with ordinary signature). Yields the results line by line.
If :class:`~argh.exceptions.CommandError` is raised, its message is
appended to the results (i.e. yielded by the generator as a string).
All other exceptions propagate unless marked as wrappable
by :func:`wrap_errors`.
"""
assert hasattr(args, 'function') and hasattr(args.function, '__call__')
# the function is nested to catch certain exceptions (see below)
def _call():
# Actually call the function
if getattr(args.function, ATTR_EXPECTS_NAMESPACE_OBJECT, False):
result = args.function(args)
else:
# namespace -> dictionary
_flat_key = lambda key: key.replace('-', '_')
all_input = dict((_flat_key(k), v) for k,v in vars(args).items())
# filter the namespace variables so that only those expected by the
# actual function will pass
spec = get_arg_spec(args.function)
positional = [all_input[k] for k in spec.args]
kwonly = getattr(spec, 'kwonlyargs', [])
keywords = dict((k, all_input[k]) for k in kwonly)
# *args
if spec.varargs:
positional += getattr(args, spec.varargs)
# **kwargs
varkw = getattr(spec, 'varkw', getattr(spec, 'keywords', []))
if varkw:
not_kwargs = ['function'] + spec.args + [spec.varargs] + kwonly
extra = [k for k in vars(args) if k not in not_kwargs]
for k in extra:
keywords[k] = getattr(args, k)
result = args.function(*positional, **keywords)
# Yield the results
if isinstance(result, (GeneratorType, list, tuple)):
# yield each line ASAP, convert CommandError message to a line
for line in result:
yield line
else:
# yield non-empty non-iterable result as a single line
if result is not None:
yield result
wrappable_exceptions = [CommandError]
wrappable_exceptions += getattr(args.function, ATTR_WRAPPED_EXCEPTIONS, [])
try:
result = _call()
for line in result:
yield line
except tuple(wrappable_exceptions) as e:
processor = getattr(args.function, ATTR_WRAPPED_EXCEPTIONS_PROCESSOR,
lambda e: '{0.__class__.__name__}: {0}'.format(e))
errors_file.write(compat.text_type(processor(e)))
errors_file.write('\n')
def dispatch_command(function, *args, **kwargs):
"""
A wrapper for :func:`dispatch` that creates a one-command parser.
Uses :attr:`PARSER_FORMATTER`.
This::
dispatch_command(foo)
...is a shortcut for::
parser = ArgumentParser()
set_default_command(parser, foo)
dispatch(parser)
This function can be also used as a decorator.
"""
parser = argparse.ArgumentParser(formatter_class=PARSER_FORMATTER)
set_default_command(parser, function)
dispatch(parser, *args, **kwargs)
def dispatch_commands(functions, *args, **kwargs):
"""
A wrapper for :func:`dispatch` that creates a parser, adds commands to
the parser and dispatches them.
Uses :attr:`PARSER_FORMATTER`.
This::
dispatch_commands([foo, bar])
...is a shortcut for::
parser = ArgumentParser()
add_commands(parser, [foo, bar])
dispatch(parser)
"""
parser = argparse.ArgumentParser(formatter_class=PARSER_FORMATTER)
add_commands(parser, functions)
dispatch(parser, *args, **kwargs)
class EntryPoint(object):
"""
An object to which functions can be attached and then dispatched.
When called with an argument, the argument (a function) is registered
at this entry point as a command.
When called without an argument, dispatching is triggered with all
previously registered commands.
Usage::
from argh import EntryPoint
entrypoint = EntryPoint()
@entrypoint
def ls():
for i in range(10):
print i
@entrypoint
def greet():
print 'hello'
if __name__ == '__main__':
entrypoint()
"""
def __init__(self, name=None):
self.name = name or 'unnamed'
self.commands = []
def __call__(self, f=None):
if f:
self._register_command(f)
return f
return self._dispatch()
def _register_command(self, f):
self.commands.append(f)
def _dispatch(self):
if not self.commands:
raise DispatchingError('no commands for entry point "{0}"'
.format(self.name))
if len(self.commands) == 1:
dispatch_command(*self.commands)
else:
dispatch_commands(self.commands)
| 32.146104 | 82 | 0.631956 |
323083981977cde0f72b191cb381747fc2bcf0cd
| 246 |
py
|
Python
|
python/python_backup/PRAC_PYTHON/23_palindrome.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16 |
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
python/python_backup/PRAC_PYTHON/23_palindrome.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8 |
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
python/python_backup/PRAC_PYTHON/23_palindrome.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5 |
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
def palindrome(n):
a0=n
s=0
while a0>0:
d=a0%10
s=s*10+d
a0=a0//10
if s==n:
return 1
else:
return 0
n=int(input("Enter a number "))
if palindrome(n)==1:
print("palindrome ...")
else:
print("Not palindrome..")
| 12.947368 | 32 | 0.556911 |
3291fd0093f8da89c4856d4dffdb758ef05f886d
| 4,666 |
py
|
Python
|
test.py
|
Patsch36/ProjektMichaela-Rebecca-Tim
|
ca21d117b5073d1e91943bdfdb4a55ccaa628e64
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
Patsch36/ProjektMichaela-Rebecca-Tim
|
ca21d117b5073d1e91943bdfdb4a55ccaa628e64
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
Patsch36/ProjektMichaela-Rebecca-Tim
|
ca21d117b5073d1e91943bdfdb4a55ccaa628e64
|
[
"Apache-2.0"
] | null | null | null |
import csv
from pathlib import Path
DEBUG_INFO = False # Additional debug infos during run time (switch)
header_a = [] # Array of header values
alldata = [] # Matrix (array of arrays) of csv data entries
header_index = {} # Header Index Dictionary
header_dict = {} # Header Dictionary
# [P] CSV Datei lesen => Werte weiterverarbeiten
# TODO Was macht das header_a Objekt, was alldata?
def read_csv_file(filename):
with open(filename, newline='') as csvfile:
reader = csv.reader(csvfile)
rownum = 1
for row in reader:
if rownum == 1:
header = row[0]
header_a = header.split(";")
# if DEBUG_INFO:
# print("Header:")
# print(header_a)
else:
data = row[0]
data_a = data.split(";")
# if DEBUG_INFO:
# print("Data/rownum =", rownum)
# print(data_a)
alldata.append(data_a)
rownum += 1
return header_a, alldata
# [P] Sucht Position von gesuchter Gr��e (Spalte)
def get_index(header_a, search_term='Liefermenge'):
for i in range(len(header_a)):
if header_a[i] == search_term:
break
header_index[search_term] = i
return header_index
# [P] Sortiert Titel in Array
def create_header_dict(header_a):
for i in range(len(header_a)):
header_dict[header_a[i]] = i
return header_dict
# [P] Formatierungen
def german_to_english_float(germfloat_string):
# if DEBUG_INFO: print("germfloat before transform: ", germfloat_string)
if "." in germfloat_string: germfloat_string = germfloat_string.replace(".", "")
if "," in germfloat_string: germfloat_string = germfloat_string.replace(",", ".")
# if DEBUG_INFO: print("germfloat after transform: ", germfloat_string)
return germfloat_string
def calc_mean_by_index(alldata, search_term='Liefermenge'):
sum = 0.0
for lines in range(len(alldata)):
sum = sum + float(alldata[lines][header_index[search_term]])
# for lines in alldata:
# sum = sum + float(lines[header_index[search_term]])
mean = sum/(len(alldata))
return mean
# len(alldata) gibt uns die Anzahl der Datenzeilen in der CSV-Datei
# header_index[search_term] liefert den Spaltenindex f�r den gesuchten Term (search_term) innerhalb der CSV-Datei
# alldata[i] liefert die i-te Datenzeile
# alldata[i][j] liefert den j-ten Wert (Spalte j) der i-ten Datenzeile
# Nun m�sssen wir �ber alle Zeilen der CSV-Datei iterieren (Schleife!),
# um dort alle Werten aufzusummieren, die dem Spaltenindex j f�r den gesuchten Term entsprechen:
# float(alldata[i][j])
# Der Durchschnitt ist die Summe aller Werte geteilt durch die Anzahl aller Datenzeilen
# Dieser Durchschnitt wird per return als Ausgabewert der Funktion �bergeben
# hier folgt ihr Code ...
# __main__
# Einlesen und Parsen der CSV-Datei von filename1
filename = "100_Pivot_Grunddaten.CSV"
header_a, alldata = read_csv_file(filename)
header_dict = create_header_dict(header_a)
if DEBUG_INFO:
print("--- Start debug infos ---")
print("Kopfzeile: ", header_a)
print("... header_dict: ", header_dict)
print("Datenzeilen: ", alldata)
print("--- End debug infos ---")
# [P] Hier gibt er sein Men� auf der Konsole aus und liest Eingabe ein
while True:
# [P] String gibt an, welche Gr��en gesucht sind
search_nr = int(input("""Welchen der folgenden Terme wollen Sie untersuchen?
1: (gew.) Durchschnitt von Bestellmenge
2: (gew.) Durchschnitt von Liefermenge
3: (gew.) Durchschnitt von Wert
4: gew. Summe von Produktgruppe
9: Exit
>>> """))
if search_nr == 1:
search_term = 'Bestellmenge'
elif search_nr == 2:
search_term = 'Liefermenge'
elif search_nr == 3:
search_term = 'Wert'
elif search_nr == 4:
search_term = 'Produktgruppe'
elif search_nr == 9:
break
else:
print("Bitte korrekte Zahl eingeben!")
break
# Welche Index-Nummer hat der gesuchte Term innerhalb der CSV-Datei
header_index = get_index(header_a, search_term)
if search_nr in (1, 2, 3):
# a. Berechnung und Ausgabe des Mittelwerts f�r alle Werte des gesuchten Terms
mean = calc_mean_by_index(alldata, search_term)
print("Durchschnittliche {0}: {1:6.2f}".format(search_term, mean))
print("\n---------------------\n")
| 34.562963 | 118 | 0.624089 |
721804561ae49c44380dc571cf4f74b85503c27f
| 243 |
py
|
Python
|
Curso-Em-Video-Python/2Exercicios/038_Comparando_numeros.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso-Em-Video-Python/2Exercicios/038_Comparando_numeros.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso-Em-Video-Python/2Exercicios/038_Comparando_numeros.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
n1 = int(input('Digite um numero: '))
n2 = int(input('Digite um numero: '))
if n1 > n2:
print('{} é maior que o {}.'.format(n1, n2))
elif n1 < n2:
print('{} é maior que o {}.'.format(n2, n1))
else:
print('Os numeros são iguais!!')
| 27 | 48 | 0.572016 |
9d326d6794c1d25827954b931d76eb439c0f8060
| 885 |
py
|
Python
|
euler-52.py
|
TFabijo/euler2
|
7da205ce02ae3bd12754f99c1fe69fbf20b1e3d0
|
[
"MIT"
] | null | null | null |
euler-52.py
|
TFabijo/euler2
|
7da205ce02ae3bd12754f99c1fe69fbf20b1e3d0
|
[
"MIT"
] | null | null | null |
euler-52.py
|
TFabijo/euler2
|
7da205ce02ae3bd12754f99c1fe69fbf20b1e3d0
|
[
"MIT"
] | null | null | null |
def euler():
d = True
x = 1
while d == True:
x += 1
x_1 = str(x)
x_2 = f"{2 * x}"
x_3 = f"{3 * x}"
x_4 = f"{4 * x}"
x_5 = f"{5 * x}"
x_6 = f"{6 * x}"
if len(x_1) != len(x_6):
continue
sez1 = []
sez2 = []
sez3 = []
sez4 = []
sez5 = []
sez6 = []
for t in range(len(x_1)):
sez1.append( x_1[t])
sez2.append(x_2[t])
sez3.append(x_3[t])
sez4.append(x_4[t])
sez5.append(x_5[t])
sez6.append(x_6[t])
sez1.sort()
sez2.sort()
sez3.sort()
sez4.sort()
sez5.sort()
sez6.sort()
if sez1 == sez2 == sez3 == sez4 == sez5 == sez6:
return x
return False
euler()
| 22.692308 | 57 | 0.348023 |
c2206ee1a878e27a9db7c49f40f8ab68df880580
| 97 |
py
|
Python
|
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-007/ph-7.15-exer.3-namta.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-007/ph-7.15-exer.3-namta.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-007/ph-7.15-exer.3-namta.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
def namta(p=1):
for i in range(1, 11):
print(p, "x", i, "=", p*i)
namta(5)
namta()
| 12.125 | 34 | 0.463918 |
26941fac8e87b447a58ba0bddcfeb2f007ac10c9
| 869 |
py
|
Python
|
marsyas-vamp/marsyas/misc/check-trusted.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/misc/check-trusted.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/misc/check-trusted.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import os
# yes, it's another Not Invented Here solution for Marsyas!
# I'm in the mood to actually look at code, not play with build
# systems, so I'm setting up this framework here.
# these are in the src/marsyas/ dir
# the overall idea is to SLOWLY increase the number of "trusted
# files", once each file passes the below checks, and maybe
# has a unit test or something.
TRUSTED_FILES = """
realvec.cpp
""".split()
CHECK_COMMAND = "g++ -c -Wall -Wextra -Werror "
# apparently you can be even stricter than -Wall -Wextra:
#CHECK_COMMAND = "g++ -c -Wall -Wextra -Weffc++ -Werror "
# TODO: maybe add other checkers, like cppcheck, oink, ccc,
# maybe even clang/llvm.
for filename in TRUSTED_FILES:
print ("Checking %s" % (filename))
cmd = CHECK_COMMAND + os.path.join("..", "src", "marsyas", filename)
#print (cmd)
os.system(cmd)
| 26.333333 | 69 | 0.700806 |
cd81054e8a85316f594093cf961038e0f6e2f9db
| 1,211 |
py
|
Python
|
Hackerrank_problems/Sales by Match/solution.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 165 |
2020-10-03T08:01:11.000Z
|
2022-03-31T02:42:08.000Z
|
Hackerrank_problems/Sales by Match/solution.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 383 |
2020-10-03T07:39:11.000Z
|
2021-11-20T07:06:35.000Z
|
Hackerrank_problems/Sales by Match/solution.py
|
gbrls/CompetitiveCode
|
b6f1b817a655635c3c843d40bd05793406fea9c6
|
[
"MIT"
] | 380 |
2020-10-03T08:05:04.000Z
|
2022-03-19T06:56:59.000Z
|
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the sockMerchant function below.
def sockMerchant(n, ar):
ar2 = []
ar1 =sorted(ar)
pair = 0
numb = 0
# The while loop is used so that the index in ar1[numb] does not go out of index limit.
# First I tried doing while numb <= len(ar1) which gave indexError.
# Hence I removed the = sign as the range is '1 less than ar1' .
while numb < len(ar1):
# Used this foor loop to slice ar1 in same item lists.
# To use the same list again and again O cleared the list below using del().
for i in ar1:
if i == ar1[numb]:
ar2.append(i)
a = len(ar2)
del(ar2[:])
if a >= 2:
if a % 2 == 0:
pair += round(a/2)
else:
pair += round((a-1)/2)
numb += a
return pair
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
ar = list(map(int, input().rstrip().split()))
result = sockMerchant(n, ar)
fptr.write(str(result) + '\n')
fptr.close()
| 23.288462 | 88 | 0.522709 |
3e3a77e91b532a60c1f3b1a30eed1ff0c6de69d7
| 1,081 |
py
|
Python
|
src/training/pytorch/symbol_detector_dataset.py
|
klawr/deepmech
|
61de238f1d4b1b867ec1d5f4e4af2a3b25a5abff
|
[
"MIT"
] | 1 |
2020-04-17T12:27:06.000Z
|
2020-04-17T12:27:06.000Z
|
src/training/pytorch/symbol_detector_dataset.py
|
klawr/deepmech
|
61de238f1d4b1b867ec1d5f4e4af2a3b25a5abff
|
[
"MIT"
] | 1 |
2022-02-27T13:13:17.000Z
|
2022-02-27T13:13:17.000Z
|
src/training/pytorch/symbol_detector_dataset.py
|
klawr/deepmech
|
61de238f1d4b1b867ec1d5f4e4af2a3b25a5abff
|
[
"MIT"
] | null | null | null |
import os
import pandas as pd
from torch.utils.data import Dataset
from torchvision.io import read_image
from torchvision.io.image import ImageReadMode
labels = [None, "r", "t"]
class SymbolDataSet(Dataset):
def __init__(
self, annotations_file, img_dir, transform=None, target_transform=None
):
self.img_labels = pd.read_json(annotations_file)
self.img_dir = img_dir
self.transform = transform
self.target_transform = target_transform
def __len__(self):
return len(self.img_labels)
def __getitem__(self, idx):
img_path = os.path.join(self.img_dir, self.img_labels.iloc[idx, 0])
image = read_image(img_path, mode=ImageReadMode.GRAY)
float_image = image / 255.0
label = self.img_labels.iloc[idx, 1]
label = labels.index(label) # Tensors need numbers. No string allowed.
if self.transform:
float_image = self.transform(float_image)
if self.target_transform:
label = self.target_transform(label)
return [float_image, label]
| 30.885714 | 79 | 0.677151 |
5f55e277acb4236e86c288860dd4538c55ae3e0c
| 604 |
py
|
Python
|
leetcode/179-Largest-Number/LargestNumber_001.py
|
cc13ny/all-in
|
bc0b01e44e121ea68724da16f25f7e24386c53de
|
[
"MIT"
] | 1 |
2015-12-16T04:01:03.000Z
|
2015-12-16T04:01:03.000Z
|
leetcode/179-Largest-Number/LargestNumber_001.py
|
cc13ny/all-in
|
bc0b01e44e121ea68724da16f25f7e24386c53de
|
[
"MIT"
] | 1 |
2016-02-09T06:00:07.000Z
|
2016-02-09T07:20:13.000Z
|
leetcode/179-Largest-Number/LargestNumber_001.py
|
cc13ny/all-in
|
bc0b01e44e121ea68724da16f25f7e24386c53de
|
[
"MIT"
] | 2 |
2019-06-27T09:07:26.000Z
|
2019-07-01T04:40:13.000Z
|
class Solution:
# @param {integer[]} nums
# @return {string}
def largestNumber(self, nums):
nums = sorted(nums, cmp=self.compare)
res, j = '', 0
for i in range(len(nums) - 1):
if nums[i] != 0:
break
else:
j += 1
for k in range(j, len(nums)):
res += str(nums[k])
return res
def compare(self, x, y):
tmp1, tmp2 = str(x) + str(y), str(y) + str(x)
res = 0
if tmp1 > tmp2:
res = -1
elif tmp1 < tmp2:
res = 1
return res
| 24.16 | 53 | 0.427152 |
4b3c493313f5dd62a9bd6cbd5d38872c45383fc7
| 1,350 |
py
|
Python
|
plugins/tff_backend/to/global_stats.py
|
threefoldfoundation/app_backend
|
b3cea2a3ff9e10efcc90d3d6e5e8e46b9e84312a
|
[
"Apache-2.0"
] | null | null | null |
plugins/tff_backend/to/global_stats.py
|
threefoldfoundation/app_backend
|
b3cea2a3ff9e10efcc90d3d6e5e8e46b9e84312a
|
[
"Apache-2.0"
] | 178 |
2017-08-02T12:58:06.000Z
|
2017-12-20T15:01:12.000Z
|
plugins/tff_backend/to/global_stats.py
|
threefoldfoundation/app_backend
|
b3cea2a3ff9e10efcc90d3d6e5e8e46b9e84312a
|
[
"Apache-2.0"
] | 2 |
2018-01-10T10:43:12.000Z
|
2018-03-18T10:42:23.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
from framework.to import TO
from mcfw.properties import unicode_property, long_property, typed_property, float_property, \
bool_property
class CurrencyValueTO(TO):
currency = unicode_property('currency')
value = float_property('value')
timestamp = long_property('timestamp')
auto_update = bool_property('auto_update')
class GlobalStatsTO(TO):
id = unicode_property('id')
name = unicode_property('name')
token_count = long_property('token_count')
unlocked_count = long_property('unlocked_count')
value = float_property('value')
currencies = typed_property('currencies', CurrencyValueTO, True) # type: list[CurrencyValueTO]
market_cap = float_property('market_cap')
| 36.486486 | 99 | 0.745185 |
8acd7b7987d471b5a06b0b85cf4d87efbe582fc7
| 267 |
py
|
Python
|
2124-check-if-all-as-appears-before-all-bs/2124-check-if-all-as-appears-before-all-bs.py
|
hyeseonko/LeetCode
|
48dfc93f1638e13041d8ce1420517a886abbdc77
|
[
"MIT"
] | 2 |
2021-12-05T14:29:06.000Z
|
2022-01-01T05:46:13.000Z
|
2124-check-if-all-as-appears-before-all-bs/2124-check-if-all-as-appears-before-all-bs.py
|
hyeseonko/LeetCode
|
48dfc93f1638e13041d8ce1420517a886abbdc77
|
[
"MIT"
] | null | null | null |
2124-check-if-all-as-appears-before-all-bs/2124-check-if-all-as-appears-before-all-bs.py
|
hyeseonko/LeetCode
|
48dfc93f1638e13041d8ce1420517a886abbdc77
|
[
"MIT"
] | null | null | null |
class Solution:
def checkString(self, s: str) -> bool:
a_indices = [i for i, x in enumerate(s) if x=='a']
try:
if a_indices[-1] > s.index('b'):
return False
except:
pass
return True
| 24.272727 | 58 | 0.460674 |
867d520d4826eb65d1b663a7548bf124b82ade2b
| 5,026 |
py
|
Python
|
Bwinf-Aufgabe2-Wildschweine/boarflow.py
|
laugengebaeck/Bwinf-36-Runde2
|
0bc410b4708e6e153bd1a44d40754434c57e615f
|
[
"Apache-2.0"
] | null | null | null |
Bwinf-Aufgabe2-Wildschweine/boarflow.py
|
laugengebaeck/Bwinf-36-Runde2
|
0bc410b4708e6e153bd1a44d40754434c57e615f
|
[
"Apache-2.0"
] | null | null | null |
Bwinf-Aufgabe2-Wildschweine/boarflow.py
|
laugengebaeck/Bwinf-36-Runde2
|
0bc410b4708e6e153bd1a44d40754434c57e615f
|
[
"Apache-2.0"
] | null | null | null |
import networkx as nx
# Breitensuche
def breadth_first_search(adj, firstlevel):
seen = {} # Dictionary mit Entfernung vom Start
level = 0 # aktuelle Ebene
nextlevel = firstlevel # Knoten auf der nächsten Ebene
while nextlevel:
thislevel = nextlevel # neue Ebene
nextlevel = {} # Dictionary für nächste Ebene
for v in thislevel:
if v not in seen:
seen[v] = level # Entfernung für Knoten setzen
nextlevel.update(adj[v]) # Nachbarn für nächste Ebene hinzufügen
yield (v, level)
level += 1
del seen
# Residualnetzwerk erstellen
def build_residual_network(G):
# neues Netzwerk mit gleichen Knoten
R = nx.DiGraph()
R.add_nodes_from(G)
inf = float('inf')
# Kanten-Liste holen
edge_list = [(u, v, attr) for u, v, attr in G.edges(data=True)]
for u, v, attr in edge_list:
r = min(attr.get('capacity', inf), inf)
if not R.has_edge(u, v):
# Hinkante hat entsprechende Kapazität
R.add_edge(u, v, capacity=r)
# Rückkante mit Kapazität null
R.add_edge(v, u, capacity=0)
else:
# Kapazität für Rückkante doch definiert
# wird entsprechend gesetzt
R[u][v]['capacity'] = r
# Fluss auf allen Kanten null
for u in R:
for e in R[u].values():
e['flow'] = 0
return R
# Edmonds-Karp-Algorithmus
def edmonds_karp(G, s, t):
R = build_residual_network(G)
R_pred = R.pred
R_succ = R.succ
# Fluss entlang eines Pfades augmentieren
def augment(path):
flow = float('inf')
# minimale Residualkapazität entlang des Pfades bestimmen
it = iter(path)
u = next(it)
for v in it:
attr = R_succ[u][v]
flow = min(flow, attr['capacity'] - attr['flow'])
u = v
# augmentieren
it = iter(path)
u = next(it)
for v in it:
# Fluss auf Hinkante erhöhen
R_succ[u][v]['flow'] += flow
# Fluss auf Rückkante verringern
R_succ[v][u]['flow'] -= flow
u = v
return flow
# bidirektionale Breitensuche nach augmentierendem Pfad
def bidirectional_bfs():
pred = {s: None}
q_s = [s]
succ = {t: None}
q_t = [t]
while True:
q = []
# von s aus suchen
if len(q_s) <= len(q_t):
for u in q_s:
for v, attr in R_succ[u].items():
# nur wenn nächster Knoten noch nicht im Pfad
# ... und entsprechende Kante noch nicht gesättigt
if v not in pred and attr['flow'] < attr['capacity']:
pred[v] = u
# Pfade treffen sich
if v in succ:
return v, pred, succ
q.append(v)
# kein augmentierender Pfad mehr vorhanden
if not q:
return None, None, None
q_s = q
# von t aus suchen -> gleiches Prinzip
else:
for u in q_t:
for v, attr in R_pred[u].items():
if v not in succ and attr['flow'] < attr['capacity']:
succ[v] = u
if v in pred:
return v, pred, succ
q.append(v)
if not q:
return None, None, None
q_t = q
# maximalen Fluss bestimmen
flow_value = 0
while flow_value < float('inf'):
v, pred, succ = bidirectional_bfs()
# kein augmentierender Pfad mehr vorhanden -> max. Fluss
if pred is None:
break
path = [v]
# Pfad von s bis zum Treffpunkt
u = v
while u != s:
u = pred[u]
path.append(u)
path.reverse()
# weiterer Pfad bis zu t
u = v
while u != t:
u = succ[u]
path.append(u)
# Fluss augmentieren
flow_value += augment(path)
R.graph['flow_value'] = flow_value
return R
# minimaler Schnitt
def minimum_cut(flowgraph, s, t):
R = edmonds_karp(flowgraph, s, t)
# gesättigte Kanten aus Residualnetzwerk entfernen
cutset = [(u, v, d) for u, v, d in R.edges(data=True)
if d['flow'] == d['capacity']]
R.remove_edges_from(cutset)
# per Breitensuche von s aus ...
# ... werden von s erreichbare Knoten gesammelt
nextlevel = {s: 1}
non_reachable = set(dict(dict(breadth_first_search(R.adj, nextlevel))))
# nicht erreichbar sind alle anderen Knoten
# => in 2 Partitionen geteilt
partition = (set(flowgraph) - non_reachable, non_reachable)
return R.graph['flow_value'], partition
| 31.810127 | 81 | 0.509152 |
86aac998523d08ca882c093f6d0737cb28ffbca4
| 433 |
py
|
Python
|
INBa/2015/ORSHONOVA_E_G/task_5_17.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
INBa/2015/ORSHONOVA_E_G/task_5_17.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
INBa/2015/ORSHONOVA_E_G/task_5_17.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
#Задача 5 Вариант 17.
#Напишите программу, которая бы при запуске случайным образом отображала название одной из трех стран, входящих в военно-политический блок "Тройственный союз".
#Orshonova. E. G.
#23.05.2016
print("Программа выводит на экран одну из трех стран, входящих в военно-политический блок 'Тройственный союз'")
import random
a = random.choice(['Германия', 'Австро-Венгрия', 'Италия'])
print (a)
input("\nНажмите Enter")
| 43.3 | 159 | 0.771363 |
8143316f54ae66eb5e431715bb009a4f140321f2
| 1,090 |
py
|
Python
|
python/oneflow/support/async_util.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | 3,285 |
2020-07-31T05:51:22.000Z
|
2022-03-31T15:20:16.000Z
|
oneflow/python/lib/core/async_util.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 2,417 |
2020-07-31T06:28:58.000Z
|
2022-03-31T23:04:14.000Z
|
oneflow/python/lib/core/async_util.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 520 |
2020-07-31T05:52:42.000Z
|
2022-03-29T02:38:11.000Z
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import threading
def Await(counter, func):
assert counter > 0
cond_var = threading.Condition()
counter_box = [counter]
result_list = []
def Yield(result=None):
result_list.append(result)
cond_var.acquire()
assert counter_box[0] > 0
counter_box[0] -= 1
cond_var.notify()
cond_var.release()
func(Yield)
cond_var.acquire()
while counter_box[0] > 0:
cond_var.wait()
cond_var.release()
return result_list
| 27.948718 | 72 | 0.705505 |
d49d76f4645e4a9bf3da638359d3af74e6c199bc
| 6,359 |
py
|
Python
|
pretrain_and_retrain.py
|
freakanth/tensorflow-tips
|
67e5f86024166c77a50572b7dd425645690183c3
|
[
"Unlicense"
] | null | null | null |
pretrain_and_retrain.py
|
freakanth/tensorflow-tips
|
67e5f86024166c77a50572b7dd425645690183c3
|
[
"Unlicense"
] | null | null | null |
pretrain_and_retrain.py
|
freakanth/tensorflow-tips
|
67e5f86024166c77a50572b7dd425645690183c3
|
[
"Unlicense"
] | null | null | null |
import os
import tensorflow as tf
import numpy as np
from random import seed, shuffle
# MLP hyperparameters
N_HIDS = [50, 30, 10]
ACTS = [tf.nn.relu, tf.nn.leaky_relu, tf.nn.relu]
# Dataset hyperparameters
N_ITEMS = 1024
N_DIMS = 30
BATCH_SIZE = 32
N_OUT = 1
# Training hyperparameters
LEARNING_RATE = 1e-3
N_ITERS = 10
# Tensorflow session configuration parameters
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
def create_two_class_data(n_dims, n_items, m_a=0.0, m_b=2.0, s_a=1.0, s_b=0.5):
"""Create a two-class synthetic dataset of points sampled from Gaussian distributions."""
inputs = np.concatenate((np.random.normal(loc=m_a, scale=s_a, size=(n_items, n_dims)),
np.random.normal(loc=m_b, scale=s_b, size=(n_items, n_dims))))
labels = np.concatenate((np.ones((n_items, 1)),
np.zeros((n_items, 1))))
np.random.seed(0xbeef)
np.random.shuffle(inputs)
np.random.seed(0xbeef)
np.random.shuffle(labels)
return (inputs, labels)
def split_into_batches(inputs, labels, batch_size=32):
"""Split data-matrix into a list of batches."""
return zip([inputs[i*batch_size:(i+1)*batch_size, :]
for i in xrange(inputs.shape[0] // batch_size)],
[labels[i*batch_size:(i+1)*batch_size, :]
for i in xrange(labels.shape[0] // batch_size)])
def initialise_mlp(n_dims, n_hids, acts):
"""Initialise an MLP graph."""
def mlp(inputs, n_hiddens, activations, reuse=False):
"""Initialise MLP."""
with tf.variable_scope("mlp_classifier") as mlp_scope:
layer = tf.layers.dense(
inputs,
units=n_hiddens[0],
activation=activations[0],
kernel_initializer=tf.contrib.layers.xavier_initializer(),
name='input_layer')
for idx, (n_hid, act) in enumerate(zip(n_hiddens[1:], activations[1:])):
layer = tf.layers.dense(
layer,
units=n_hid,
activation=act,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
name='layer' + str(idx))
output = tf.layers.dense(
layer,
units=1,
activation=None,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
name='output_layer')
return output
X = tf.placeholder(name='X', dtype=tf.float32, shape=[None, n_dims])
Y_gt = tf.placeholder(name='Y_gt', dtype=tf.float32, shape=[None, 1])
Y_pr = mlp(X, n_hids, acts)
loss = tf.losses.sigmoid_cross_entropy(Y_gt, Y_pr)
updates = tf.train.AdamOptimizer().minimize(loss)
return X, Y_gt, loss, updates
def training_loop(data, session, loss, updates, X, Y_gt, n_iters):
"""Simple training loop."""
for itr in range(n_iters):
shuffle(pretrain_data)
loss_vals = []
for batch in pretrain_data:
loss_val, _ = session.run(
[loss, updates],
feed_dict={X: batch[0], Y_gt: batch[1]})
loss_vals.append(loss_val)
print "Iteration %d, training loss %.3f" % (itr, np.mean(loss_vals))
def pretraining(data, n_dims, n_hids, acts, n_iters, save_path='pretrain_MLP'):
"""Randomly initialise and train a model."""
# Create a new graph for the model in its pretraining stage.
pretrain_graph = tf.Graph()
with pretrain_graph.as_default():
X, Y_gt, loss, updates = initialise_mlp(n_dims, n_hids, acts)
with tf.Session(config=config) as pretrain_session:
pretrain_session.run(tf.global_variables_initializer())
training_loop(data, pretrain_session, loss, updates, X, Y_gt, n_iters)
pretrain_saver = tf.train.Saver()
pretrain_saver.save(pretrain_session, os.path.join(save_path, 'model'))
return save_path
def retraining(data, n_dims, n_hids, acts, n_iters, save_path='retrain_MLP'):
"""Initialise a model's parameters to a pre-trained model and train it ."""
# Load pretrained model and retrieve its parameters as numpy tensors
loaded_graph = tf.Graph()
with loaded_graph.as_default():
with tf.Session(config=config) as load_session:
saver = tf.train.import_meta_graph(os.path.join(model_path, 'model.meta'))
saver.restore(load_session, tf.train.latest_checkpoint(model_path))
loaded_variables = sorted(tf.trainable_variables(), key=lambda v: v.name)
params_pretrain = dict((var.name, var.eval(session=load_session))
for var in loaded_variables)
# Create re-train graph and apply the pre-trained model parameters.
retrain_graph = tf.Graph()
with retrain_graph.as_default():
X, Y_gt, loss, updates = initialise_mlp(n_dims, n_hids, acts)
assignments = dict((var.name, tf.assign(var, params_pretrain[var.name]))
for var in sorted(tf.trainable_variables(), key=lambda v: v.name))
with tf.Session(config=config) as retrain_session:
# Initialise all graph variables to random/initialisation values.
retrain_session.run(tf.global_variables_initializer())
# IMPORTANT: Update trainable variable values to pre-trained values.
retrain_session.run(assignments.values())
training_loop(data, retrain_session, loss, updates, X, Y_gt, n_iters)
retrain_saver = tf.train.Saver()
retrain_saver.save(retrain_session, os.path.join(save_path, 'model'))
if __name__ == "__main__":
tf.set_random_seed(31386)
# Pretraining step
pretrain_data = split_into_batches(
*create_two_class_data(N_DIMS, N_ITEMS, m_a=0.0, m_b=2.0, s_a=2.0, s_b=2.0),
batch_size=32)
model_path = pretraining(pretrain_data, N_DIMS, N_HIDS, ACTS, N_ITERS)
# NOTE: All we have at this stage is the string model_path giving us the path to the saved model
# Retraining step
retrain_data = split_into_batches(
*create_two_class_data(N_DIMS, N_ITEMS, m_a=2.0, m_b=0.0, s_a=0.5, s_b=1.0),
batch_size=32)
model_path = retraining(retrain_data, N_DIMS, N_HIDS, ACTS, N_ITERS)
| 37.85119 | 100 | 0.638622 |
0792833fe1b0fa11f8c6604cebca9da891714184
| 13,723 |
py
|
Python
|
casts/tests/test_cast.py
|
rocky-roll-call/rrc-backend
|
02e8e11c3dab7661e48650e2e861a4a97788a4ce
|
[
"MIT"
] | null | null | null |
casts/tests/test_cast.py
|
rocky-roll-call/rrc-backend
|
02e8e11c3dab7661e48650e2e861a4a97788a4ce
|
[
"MIT"
] | null | null | null |
casts/tests/test_cast.py
|
rocky-roll-call/rrc-backend
|
02e8e11c3dab7661e48650e2e861a4a97788a4ce
|
[
"MIT"
] | null | null | null |
# stdlib
from datetime import datetime
from shutil import rmtree
# django
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import reverse
# library
from rest_framework import status
from rest_framework.test import APIClient
# app
from users.tests.test_user_photo import make_image
from ..models import Cast
class CastModelTestCase(TestCase):
"""
Tests the Cast model directly
"""
def setUp(self):
self.profile = User.objects.create_user(
username="test", email="[email protected]", password="testing"
).profile
self.cast = Cast.objects.create(name="Test Cast")
def test_details(self):
"""Check that features were created"""
self.assertEqual(self.cast.slug, "test-cast")
self.assertIsInstance(self.cast.created, datetime)
self.assertIsInstance(self.cast.modified, datetime)
def _add_check_remove(self, fadd, fcheck, fremv):
"""Runs lifecycle checks on a user"""
self.assertFalse(fcheck(self.profile))
fadd(self.profile)
self.assertTrue(fcheck(self.profile))
with self.assertRaises(ValueError):
fadd(self.profile)
fremv(self.profile)
self.assertFalse(fcheck(self.profile))
with self.assertRaises(ValueError):
fremv(self.profile)
def test_managers(self):
"""Tests manager lifecycle"""
# Requires membership first
with self.assertRaises(ValueError):
self.cast.add_manager(self.profile)
self.cast.add_member(self.profile)
self._add_check_remove(
self.cast.add_manager, self.cast.is_manager, self.cast.remove_manager
)
self.cast.remove_member(self.profile)
def test_members(self):
"""Tests membership lifecycle"""
self._add_check_remove(
self.cast.add_member, self.cast.is_member, self.cast.remove_member
)
def test_member_add_removes_request(self):
"""Tests adding a member removes an existing request"""
self.cast.add_member_request(self.profile)
self.assertTrue(self.cast.has_requested_membership(self.profile))
self.assertFalse(self.cast.is_member(self.profile))
self.cast.add_member(self.profile)
self.assertTrue(self.cast.is_member(self.profile))
self.assertFalse(self.cast.has_requested_membership(self.profile))
def test_member_add_if_blocked(self):
"""Can't add member if blocked"""
self.cast.block_user(self.profile)
self.assertTrue(self.cast.is_blocked(self.profile))
with self.assertRaises(ValueError):
self.cast.add_member(self.profile)
self.assertFalse(self.cast.is_member(self.profile))
self.assertTrue(self.cast.is_blocked(self.profile))
self.cast.unblock_user(self.profile)
def test_member_remove_if_manager(self):
"""Cannot remove member if a manager"""
self.cast.add_member(self.profile)
self.cast.add_manager(self.profile)
self.assertTrue(self.cast.is_manager(self.profile))
with self.assertRaises(ValueError):
self.cast.remove_member(self.profile)
self.assertTrue(self.cast.is_manager(self.profile))
self.cast.remove_manager(self.profile)
self.cast.remove_member(self.profile)
def test_requests(self):
"""Tests membership request lifecycle"""
self._add_check_remove(
self.cast.add_member_request,
self.cast.has_requested_membership,
self.cast.remove_member_request,
)
def test_request_if_member(self):
"""Can't request if already a member"""
self.cast.add_member(self.profile)
self.assertTrue(self.cast.is_member(self.profile))
with self.assertRaises(ValueError):
self.cast.add_member_request(self.profile)
self.assertFalse(self.cast.has_requested_membership(self.profile))
self.cast.remove_member(self.profile)
def test_request_if_blocked(self):
"""Can't request if blocked"""
self.cast.block_user(self.profile)
self.assertFalse(self.cast.has_requested_membership(self.profile))
with self.assertRaises(ValueError):
self.cast.add_member_request(self.profile)
self.assertFalse(self.cast.has_requested_membership(self.profile))
self.cast.unblock_user(self.profile)
def test_blocked(self):
"""Tests blocked user lifecycle"""
self._add_check_remove(
self.cast.block_user, self.cast.is_blocked, self.cast.unblock_user
)
def test_block_if_manager(self):
"""Can't block a manager"""
self.cast.add_member(self.profile)
self.cast.add_manager(self.profile)
self.assertTrue(self.cast.is_manager(self.profile))
with self.assertRaises(ValueError):
self.cast.block_user(self.profile)
self.assertFalse(self.cast.is_blocked(self.profile))
self.assertTrue(self.cast.is_manager(self.profile))
self.cast.remove_manager(self.profile)
self.cast.remove_member(self.profile)
def test_block_removes_members(self):
"""Blocking an existing member should remove them"""
self.cast.add_member(self.profile)
self.assertTrue(self.cast.is_member(self.profile))
self.cast.block_user(self.profile)
self.assertTrue(self.cast.is_blocked(self.profile))
self.assertFalse(self.cast.is_member(self.profile))
class CastAPITestCase(TestCase):
"""
Test the Cast API
"""
def setUp(self):
rmtree(settings.MEDIA_ROOT, ignore_errors=True)
user = User.objects.create_user(
username="test", email="[email protected]", password="testing"
)
self.profile = user.profile
self.cast1 = Cast.objects.create(name="Test Cast")
self.cast2 = Cast.objects.create(name="Another Cast")
self.cast1.add_member(self.profile)
self.cast1.add_manager(self.profile)
self.client = APIClient()
self.client.force_authenticate(user=user)
def tearDown(self):
rmtree(settings.MEDIA_ROOT, ignore_errors=True)
def test_list(self):
"""Tests calling cast list"""
response = self.client.get(reverse("casts"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
def test_create(self):
"""Tests creating a new cast"""
name, desc, email = "New Cast", "A new cast", "[email protected]"
response = self.client.post(
reverse("casts"), {"name": name, "description": desc, "email": email}
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
cast = Cast.objects.get(pk=response.data["id"])
self.assertEqual(cast.name, name)
self.assertEqual(cast.description, desc)
self.assertEqual(cast.email, email)
self.assertEqual(cast.slug, "new-cast")
self.assertTrue(cast.is_member(self.profile))
self.assertTrue(cast.is_manager(self.profile))
def test_unique_name(self):
"""Casts must have a unique name because of the generated url slug"""
response = self.client.post(reverse("casts"), {"name": self.cast1.name})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve(self):
"""Tests cast detail request"""
response = self.client.get(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("name", response.data)
def test_update(self):
""""Tests updating cast details"""
self.assertEqual(self.cast1.name, "Test Cast")
name, slug = "Updated Cast", "updated-cast"
response = self.client.patch(
reverse("cast", kwargs={"pk": self.cast1.pk}), data={"name": name}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["name"], name)
self.assertEqual(response.data["slug"], slug)
cast = Cast.objects.get(pk=self.cast1.pk)
self.assertEqual(cast.name, name)
self.assertEqual(cast.slug, slug)
def test_forbidden_update(self):
"""Prohibit updates to other casts"""
response = self.client.patch(reverse("cast", kwargs={"pk": self.cast2.pk}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_update_image(self):
"""Tests updating a cast logo"""
self.assertEqual(self.cast1.logo, "")
tmpim = make_image()
with open(tmpim.name, "rb") as data:
response = self.client.patch(
reverse("cast", kwargs={"pk": self.cast1.pk}),
{"logo": data},
format="multipart",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(response.data["logo"].endswith(".jpg"))
self.assertIn("logo", response.data["logo"])
cast = Cast.objects.get(pk=self.cast1.pk)
self.assertTrue(cast.logo.path.endswith(".jpg"))
self.assertIn("logo", cast.logo.path)
self.assertIn(self.cast1.slug, cast.logo.path)
def test_delete(self):
"""Tests that a manager can delete their casts but not others"""
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast2.pk}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# Delete should fail if the cast has more than one manager
profile = User.objects.create_user(
username="mctest", email="[email protected]", password="testing mctest"
).profile
self.cast1.add_member(profile)
self.cast1.add_manager(profile)
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.cast1.remove_manager(profile)
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class CastListAPITestCase(TestCase):
"""
Test the Cast API
"""
def setUp(self):
user = User.objects.create_user(
username="test", email="[email protected]", password="testing"
)
self.profile1 = user.profile
self.profile2 = User.objects.create_user(
username="mctest", email="[email protected]", password="testing mctest"
).profile
self.cast1 = Cast.objects.create(name="Test Cast")
self.cast2 = Cast.objects.create(name="Another Cast")
self.cast1.add_member(self.profile1)
self.cast1.add_manager(self.profile1)
self.client = APIClient()
self.client.force_authenticate(user=user)
def _list_add_check_remove(self, url_name: str, fcheck: str):
"""Tests list endpoint's add, check, remove, and dne events"""
url = reverse(url_name, kwargs={"pk": self.cast1.pk, "pid": self.profile2.pk})
# Cannot remove non-added profile
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(getattr(self.cast1, fcheck)(self.profile2))
# Add profile
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(getattr(self.cast1, fcheck)(self.profile2))
# Cannot re-add profile
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# Remove profile
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(getattr(self.cast1, fcheck)(self.profile2))
# Bad cast
response = self.client.delete(
reverse(url_name, kwargs={"pk": 0, "pid": self.profile2.pk})
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# Bad profile
response = self.client.delete(
reverse(url_name, kwargs={"pk": self.cast1.pk, "pid": 0})
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# Not allowed for other casts
response = self.client.post(
reverse(url_name, kwargs={"pk": self.cast2.pk, "pid": self.profile2.pk})
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_member_list(self):
"""Tests member add/remove endpoint"""
self._list_add_check_remove("cast-member", "is_member")
def test_manager_list(self):
"""Tests member add/remove endpoint"""
# Cannot add a non-member
response = self.client.post(
reverse(
"cast-manager", kwargs={"pk": self.cast1.pk, "pid": self.profile2.pk}
)
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.cast1.add_member(self.profile2)
self._list_add_check_remove("cast-manager", "is_manager")
def test_request_list(self):
"""Tests member request add/remove endpoint"""
self._list_add_check_remove("cast-member-request", "has_requested_membership")
def test_blocked_list(self):
"""Tests blocked user add/remove endpoint"""
self._list_add_check_remove("cast-blocked", "is_blocked")
| 40.964179 | 86 | 0.659914 |
07a35227eb5d2c6e83d0d8d308629313c4b6bd5b
| 13,542 |
py
|
Python
|
rfvision/datasets/arti_utils.py
|
mvig-robotflow/rfvision
|
cc662f213dfe5a3e8864a6b5685a668a4436e397
|
[
"Apache-2.0"
] | 6 |
2021-09-25T03:53:06.000Z
|
2022-02-19T03:25:11.000Z
|
rfvision/datasets/arti_utils.py
|
mvig-robotflow/rfvision
|
cc662f213dfe5a3e8864a6b5685a668a4436e397
|
[
"Apache-2.0"
] | 1 |
2021-07-21T13:14:54.000Z
|
2021-07-21T13:14:54.000Z
|
rfvision/datasets/arti_utils.py
|
mvig-robotflow/rfvision
|
cc662f213dfe5a3e8864a6b5685a668a4436e397
|
[
"Apache-2.0"
] | 2 |
2021-07-16T03:25:04.000Z
|
2021-11-22T06:04:01.000Z
|
'''
ARTI is a dataset created by Liu Liu (@liuliu66)
'''
import numpy as np
from math import pi ,sin, cos
import itertools
from matplotlib import pyplot as plt
def get_3d_bbox(scale, shift = 0):
"""
Input:
scale: [3] or scalar
shift: [3] or scalar
Return
bbox_3d: [3, N]
"""
if hasattr(scale, "__iter__"):
bbox_3d = np.array([[scale[0] / 2, +scale[1] / 2, scale[2] / 2],
[scale[0] / 2, +scale[1] / 2, -scale[2] / 2],
[-scale[0] / 2, +scale[1] / 2, scale[2] / 2],
[-scale[0] / 2, +scale[1] / 2, -scale[2] / 2],
[+scale[0] / 2, -scale[1] / 2, scale[2] / 2],
[+scale[0] / 2, -scale[1] / 2, -scale[2] / 2],
[-scale[0] / 2, -scale[1] / 2, scale[2] / 2],
[-scale[0] / 2, -scale[1] / 2, -scale[2] / 2]]) + shift
else:
bbox_3d = np.array([[scale / 2, +scale / 2, scale / 2],
[scale / 2, +scale / 2, -scale / 2],
[-scale / 2, +scale / 2, scale / 2],
[-scale / 2, +scale / 2, -scale / 2],
[+scale / 2, -scale / 2, scale / 2],
[+scale / 2, -scale / 2, -scale / 2],
[-scale / 2, -scale / 2, scale / 2],
[-scale / 2, -scale / 2, -scale / 2]]) +shift
bbox_3d = bbox_3d.transpose()
return bbox_3d
def pts_inside_box(pts, bbox):
u1 = bbox[5, :] - bbox[4, :]
u2 = bbox[7, :] - bbox[4, :]
u3 = bbox[0, :] - bbox[4, :]
up = pts - np.reshape(bbox[4, :], (1, 3))
p1 = np.matmul(up, u1.reshape((3, 1)))
p2 = np.matmul(up, u2.reshape((3, 1)))
p3 = np.matmul(up, u3.reshape((3, 1)))
p1 = np.logical_and(p1>0, p1<np.dot(u1, u1))
p2 = np.logical_and(p2>0, p2<np.dot(u2, u2))
p3 = np.logical_and(p3>0, p3<np.dot(u3, u3))
return np.logical_and(np.logical_and(p1, p2), p3)
def iou_3d(bbox1, bbox2, nres=50):
bmin = np.min(np.concatenate((bbox1, bbox2), 0), 0)
bmax = np.max(np.concatenate((bbox1, bbox2), 0), 0)
xs = np.linspace(bmin[0], bmax[0], nres)
ys = np.linspace(bmin[1], bmax[1], nres)
zs = np.linspace(bmin[2], bmax[2], nres)
pts = np.array([x for x in itertools.product(xs, ys, zs)])
flag1 = pts_inside_box(pts, bbox1)
flag2 = pts_inside_box(pts, bbox2)
intersect = np.sum(np.logical_and(flag1, flag2))
union = np.sum(np.logical_or(flag1, flag2))
if union==0:
return 1
else:
return intersect/float(union)
def transform_coordinates_3d(coordinates, RT):
"""
Input:
coordinates: [3, N]
RT: [4, 4]
Return
new_coordinates: [3, N]
"""
if coordinates.shape[0] != 3 and coordinates.shape[1]==3:
coordinates = coordinates.transpose()
coordinates = np.vstack([coordinates, np.ones((1, coordinates.shape[1]), dtype=np.float32)])
new_coordinates = RT @ coordinates
new_coordinates = new_coordinates[:3, :]/new_coordinates[3, :]
return new_coordinates
def calculate_2d_projections(coordinates_3d, intrinsics):
"""
Input:
coordinates: [3, N]
intrinsics: [3, 3]
Return
projected_coordinates: [N, 2]
"""
projected_coordinates = intrinsics @ coordinates_3d
projected_coordinates = projected_coordinates[:2, :] / projected_coordinates[2, :]
projected_coordinates = projected_coordinates.transpose()
projected_coordinates = np.array(projected_coordinates, dtype=np.int32)
return projected_coordinates
def compute_RT_distances(RT_1, RT_2):
'''
:param RT_1: [4, 4]. homogeneous affine transformation
:param RT_2: [4, 4]. homogeneous affine transformation
:return: theta: angle difference of R in degree, shift: l2 difference of T in centimeter
'''
if RT_1 is None or RT_2 is None:
return -1
try:
assert np.array_equal(RT_1[3, :], RT_2[3, :])
assert np.array_equal(RT_1[3, :], np.array([0, 0, 0, 1]))
except AssertionError:
print(RT_1[3, :], RT_2[3, :])
R1 = RT_1[:3, :3]/np.cbrt(np.linalg.det(RT_1[:3, :3]))
T1 = RT_1[:3, 3]
R2 = RT_2[:3, :3]/np.cbrt(np.linalg.det(RT_2[:3, :3]))
T2 = RT_2[:3, 3]
R = R1 @ R2.transpose()
theta = np.arccos((np.trace(R) - 1)/2) * 180/np.pi
shift = np.linalg.norm(T1-T2) * 100
# print(theta, shift)
if theta < 5 and shift < 5:
return 10 - theta - shift
else:
return -1
def axis_diff_degree(v1, v2):
v1 = v1.reshape(-1)
v2 = v2.reshape(-1)
r_diff = np.arccos(np.sum(v1*v2)/(np.linalg.norm(v1) * np.linalg.norm(v2))) * 180 / np.pi
return min(r_diff, 180-r_diff)
def rot_diff_degree(rot1, rot2):
return rot_diff_rad(rot1, rot2) / np.pi * 180
def rot_diff_rad(rot1, rot2):
return np.arccos( ( np.trace(np.matmul(rot1, rot2.T)) - 1 ) / 2 ) % (2*np.pi)
def rotate_points_with_rotvec(points, rot_vecs):
"""Rotate points by given rotation vectors.
Rodrigues' rotation formula is used.
"""
theta = np.linalg.norm(rot_vecs, axis=1)[:, np.newaxis]
with np.errstate(invalid='ignore'):
v = rot_vecs / theta
v = np.nan_to_num(v)
dot = np.sum(points * v, axis=1)[:, np.newaxis]
cos_theta = np.cos(theta)
sin_theta = np.sin(theta)
return cos_theta * points + sin_theta * np.cross(v, points) + dot * (1 - cos_theta) * v
def dist_between_3d_lines(p1, e1, p2, e2):
p1 = p1.reshape(-1)
p2 = p2.reshape(-1)
e1 = e1.reshape(-1)
e2 = e2.reshape(-1)
orth_vect = np.cross(e1, e2)
product = np.sum(orth_vect * (p1 - p2))
dist = product / np.linalg.norm(orth_vect)
return np.abs(dist)
def project3d(pcloud_target, projMat, height=512, width=512):
pcloud_projected = np.dot(pcloud_target, projMat.T)
pcloud_projected_ndc = pcloud_projected/pcloud_projected[:, 3:4]
img_coord = (pcloud_projected_ndc[:, 0:2] + 1)/(1/256)
print('transformed image coordinates:\n', img_coord.shape)
u = img_coord[:, 0]
v = img_coord[:, 1]
u = u.astype(np.int16)
v = v.astype(np.int16)
v = 512 - v
print('u0, v0:\n', u[0], v[0])
return u, v # x, y in cv coords
def point_3d_offset_joint(joint, point):
"""
joint: [x, y, z] or [[x, y, z] + [rx, ry, rz]]
point: N * 3
"""
if len(joint) == 2:
P0 = np.array(joint[0])
P = np.array(point)
l = np.array(joint[1]).reshape(1, 3)
P0P= P - P0
PP = np.dot(P0P, l.T) * l / np.linalg.norm(l)**2 - P0P
return PP
def rotate_pts(source, target):
'''
func: compute rotation between source: [N x 3], target: [N x 3]
'''
source = source - np.mean(source, 0, keepdims=True)
target = target - np.mean(target, 0, keepdims=True)
M = np.matmul(target.T, source)
U, D, Vh = np.linalg.svd(M, full_matrices=True)
d = (np.linalg.det(U) * np.linalg.det(Vh)) < 0.0
if d:
D[-1] = -D[-1]
U[:, -1] = -U[:, -1]
R = np.matmul(U, Vh)
return R
def transform_pts(source, target):
# source: [N x 3], target: [N x 3]
# pre-centering and compute rotation
source_centered = source - np.mean(source, 0, keepdims=True)
target_centered = target - np.mean(target, 0, keepdims=True)
rotation = rotate_pts(source_centered, target_centered)
scale = scale_pts(source_centered, target_centered)
# compute translation
translation = np.mean(target.T-scale*np.matmul(rotation, source.T), 1)
return rotation, scale, translation
def scale_pts(source, target):
'''
func: compute scaling factor between source: [N x 3], target: [N x 3]
'''
pdist_s = source.reshape(source.shape[0], 1, 3) - source.reshape(1, source.shape[0], 3)
A = np.sqrt(np.sum(pdist_s**2, 2)).reshape(-1)
pdist_t = target.reshape(target.shape[0], 1, 3) - target.reshape(1, target.shape[0], 3)
b = np.sqrt(np.sum(pdist_t**2, 2)).reshape(-1)
scale = np.dot(A, b) / (np.dot(A, A)+1e-6)
return scale
def compute_3d_rotation_axis(pts_0, pts_1, rt, orientation=None, line_pts=None, methods='H-L', item='eyeglasses', viz=False):
"""
pts_0: points in NOCS space of cannonical status(scaled)
pts_1: points in camera space retrieved from depth image;
rt: rotation + translation in 4 * 4
"""
num_parts = len(rt)
print('we have {} parts'.format(num_parts))
chained_pts = [None] * num_parts
chained_pts[0] = np.dot( np.concatenate([ pts_0[0], np.ones((pts_0[0].shape[0], 1)) ], axis=1), rt[0].T )
axis_list = []
angle_list= []
if item == 'eyeglasses':
for j in range(1, num_parts):
chained_pts[j] = np.dot(np.concatenate([ pts_0[j], np.ones((pts_0[j].shape[0], 1)) ], axis=1), rt[0].T)
if methods == 'H-L':
RandIdx = np.random.randint(chained_pts[j].shape[1], size=5)
orient, position= estimate_joint_HL(chained_pts[j][RandIdx, 0:3], pts_1[j][RandIdx, 0:3])
joint_axis = {}
joint_axis['orient'] = orient
joint_axis['position'] = position
source_offset_arr= point_3d_offset_joint([position.reshape(1, 3), orient], chained_pts[j][RandIdx, 0:3])
rotated_offset_arr= point_3d_offset_joint([position.reshape(1, 3), orient.reshape(1, 3)], pts_1[j][RandIdx, 0:3])
angle = []
for m in range(RandIdx.shape[0]):
modulus_0 = np.linalg.norm(source_offset_arr[m, :])
modulus_1 = np.linalg.norm(rotated_offset_arr[m, :])
cos_angle = np.dot(source_offset_arr[m, :].reshape(1, 3), rotated_offset_arr[m, :].reshape(3, 1))/(modulus_0 * modulus_1)
angle_per_pair = np.arccos(cos_angle)
angle.append(angle_per_pair)
print('angle per pair from multiple pairs: {}', angle)
angle_list.append(sum(angle)/len(angle))
axis_list.append(joint_axis)
angle_list.append(angle)
return axis_list, angle_list
def point_rotate_about_axis(pts, anchor, unitvec, theta):
a, b, c = anchor.reshape(3)
u, v, w = unitvec.reshape(3)
x = pts[:, 0]
y = pts[:, 1]
z = pts[:, 2]
ss = u*x + v*y + w*z
x_rotated = (a*(v**2 + w**2) - u*(b*v + c*w - ss)) * (1 - cos(theta)) + x * cos(theta) + (-c*v + b*w - w*y + v*z) * sin(theta)
y_rotated = (b*(u**2 + w**2) - v*(a*u + c*w - ss)) * (1 - cos(theta)) + y * cos(theta) + (c*u - a*w + w*x - u*z) * sin(theta)
z_rotated = (c*(u**2 + v**2) - w*(a*u + b*v - ss)) * (1 - cos(theta)) + z * cos(theta) + (-b*u + a*v - v*x + u*y) * sin(theta)
rotated_pts = np.zeros_like(pts)
rotated_pts[:, 0] = x_rotated
rotated_pts[:, 1] = y_rotated
rotated_pts[:, 2] = z_rotated
return rotated_pts
def estimate_joint_HL(source_pts, rotated_pts):
# estimate offsets
delta_P = rotated_pts - source_pts
assert delta_P.shape[1] == 3, 'points coordinates dimension is wrong, current is {}'.format(delta_P.shape)
mid_pts = (source_pts + rotated_pts)/2
CC = np.zeros((3, 3), dtype=np.float32)
BB = np.zeros((delta_P.shape[0], 1), dtype=np.float32)
for j in range(0, delta_P.shape[0]):
CC += np.dot(delta_P[j, :].reshape(3, 1), delta_P[j, :].reshape(1, 3))
BB[j] = np.dot(delta_P[j, :].reshape(1, 3), mid_pts[j, :].reshape((3, 1)))
w, v = np.linalg.eig(CC)
print('eigen vectors are: \n', v)
print('eigne values are: \n', w)
orient = v[:, np.argmin(np.squeeze(w))].reshape(3, 1)
# we already decouple the orient & position
mat_1 = np.linalg.pinv( np.dot(delta_P.T, delta_P) )
position = np.dot( np.dot(mat_1, delta_P.T), BB)
print('orient has shape {}, position has shape {}'.format(orient.shape, position.shape))
return orient, position
if __name__ == '__main__':
#>>>>>>>>> 3D IOU compuatation
from scipy.spatial.transform import Rotation
bbox1 = np.array([[-1, 1, 1], [1, 1, 1], [1, -1, 1], [-1, -1, 1], [-1, 1, -1], [1, 1, -1], [1, -1, -1], [-1, -1, -1]])
print('bbox1.shape: ', bbox1.shape)
rotmatrix = Rotation.from_rotvec(np.pi/4 * np.array([np.sqrt(3)/3, np.sqrt(3)/3, np.sqrt(3)/3])).as_dcm()
bbox2 = np.matmul(bbox1, rotmatrix.T)
bbox3 = bbox1 + np.array([[1, 0, 0]])
rotmatrix2 = Rotation.from_rotvec(np.pi/4 * np.array([0, 0, 1])).as_dcm()
bbox4 = np.matmul(bbox1, rotmatrix2.T)
bbox5 = bbox1 + np.array([[2, 0, 0]])
print(iou_3d(bbox1, bbox1))
print(iou_3d(bbox1, bbox2))
print(iou_3d(bbox1, bbox3))
print(iou_3d(bbox1, bbox4))
print(iou_3d(bbox1, bbox5))
#>>>>>>>>> test for joint parameters fitting
source_pts = np.array([[5, 1, 5], [0, 0, 1], [0.5,0.5,0.5], [2, 0, 1], [3, 3, 5]])
p1 = np.array([0,0,0])
p2 = np.array([1,1,1])
unitvec = (p2 - p1) / np.linalg.norm(p2 - p1)
anchor = p1
rotated_pts = point_rotate_about_axis(source_pts, anchor, unitvec, pi)
joint_axis, position = estimate_joint_HL(source_pts, rotated_pts)
print(joint_axis, position)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(source_pts[:, 0], source_pts[:, 1], source_pts[:, 2], c='r',
marker='o', label='source pts')
ax.scatter(rotated_pts[:, 0], rotated_pts[:, 1], rotated_pts[:, 2], c='b',
marker='o', label='rotated pts')
linepts = unitvec * np.mgrid[-5:5:2j][:, np.newaxis] + np.array(p1).reshape(1, 3)
ax.plot3D(*linepts.T, linewidth=5, c='green')
ax.legend(loc='lower left')
plt.show()
| 37.932773 | 141 | 0.578792 |
07e031318c79a7b25249268ebfc543fd24677322
| 434 |
py
|
Python
|
Simplification/Image_Greyscaling.py
|
Klark007/Selbstfahrendes-Auto-im-Modell
|
d7fe81392de2b29b7dbc7c9d929fa0031b89900b
|
[
"MIT"
] | null | null | null |
Simplification/Image_Greyscaling.py
|
Klark007/Selbstfahrendes-Auto-im-Modell
|
d7fe81392de2b29b7dbc7c9d929fa0031b89900b
|
[
"MIT"
] | null | null | null |
Simplification/Image_Greyscaling.py
|
Klark007/Selbstfahrendes-Auto-im-Modell
|
d7fe81392de2b29b7dbc7c9d929fa0031b89900b
|
[
"MIT"
] | null | null | null |
import numpy as np
from PIL import Image, ImageEnhance
def prepare_image(path, width, brightness):
img = Image.open(path).convert('L')
enhancer = ImageEnhance.Brightness(img)
img_out = enhancer.enhance(brightness)
w, h = img_out.size
height = int(h * (width / w))
new_img = img_out.resize((width, height))
small = np.array(new_img)
# Shows treshold image
# new_img.show()
return small
| 18.869565 | 45 | 0.663594 |
135a792149335c36b8d42bfe907d419427ac71ba
| 1,300 |
py
|
Python
|
RunFaster/parllelism/worker_ex.py
|
BALAVIGNESHDOSTRIX/pyexpert
|
300498f66a3a4f6b3060d51b3d6643d8e63cf746
|
[
"CC0-1.0"
] | null | null | null |
RunFaster/parllelism/worker_ex.py
|
BALAVIGNESHDOSTRIX/pyexpert
|
300498f66a3a4f6b3060d51b3d6643d8e63cf746
|
[
"CC0-1.0"
] | null | null | null |
RunFaster/parllelism/worker_ex.py
|
BALAVIGNESHDOSTRIX/pyexpert
|
300498f66a3a4f6b3060d51b3d6643d8e63cf746
|
[
"CC0-1.0"
] | null | null | null |
from multiprocessing import Queue, Process, current_process
import queue, time
'''
Parllalism done by running the same method on different cores at same time for achieving the various results.
where we are adding tasks to the queue, then creating processes and starting them, then using join() to complete the processes. Finally we are printing the log from the second queue
'''
def worker(task_to_complete_queue, task_done_queue):
while True:
try:
task = task_to_complete_queue.get_nowait()
except queue.Empty:
break
else:
print(task)
task_done_queue.put(task + " done by " + current_process().name)
time.sleep(.5)
return True
def main():
number_of_task = 20
number_of_proc = 10
process_l = []
task_to_complete_queue = Queue()
task_done_queue = Queue()
for task in range(1, number_of_task):
task_to_complete_queue.put("Tasks - " + str(task))
for _ in range(1, number_of_proc):
prx = Process(target=worker, args=(task_to_complete_queue, task_done_queue))
process_l.append(prx)
prx.start()
for pross in process_l:
pross.join()
while not task_done_queue.empty():
print(task_done_queue.get())
main()
| 28.888889 | 185 | 0.66 |
dbffd025a8c86b3e2b89ae663e2aac84dc57a5b7
| 286 |
py
|
Python
|
listings/chapter02/vectors.py
|
rh0-me/ITHandbuch10
|
1582d6317e11cc958a14e157440dccf94d44583b
|
[
"Apache-2.0"
] | 6 |
2021-08-04T19:42:53.000Z
|
2022-01-17T14:24:45.000Z
|
listings/chapter02/vectors.py
|
rh0-me/ITHandbuch10
|
1582d6317e11cc958a14e157440dccf94d44583b
|
[
"Apache-2.0"
] | 1 |
2021-08-30T06:32:08.000Z
|
2021-08-30T06:42:46.000Z
|
listings/chapter02/vectors.py
|
rh0-me/ITHandbuch10
|
1582d6317e11cc958a14e157440dccf94d44583b
|
[
"Apache-2.0"
] | 1 |
2022-02-17T10:32:17.000Z
|
2022-02-17T10:32:17.000Z
|
import matplotlib.pyplot as plt
plt.xlabel('x')
plt.ylabel('y')
plt.arrow(0, 0, 4, 3, head_width=0.1)
plt.arrow(0, 0, 3, 4, head_width=0.1)
plt.text(4.1, 3.1, 'a')
plt.text(3.1, 4.1, 'b')
plt.grid()
plt.axis([-1, 6, -1, 6])
plt.axhline(linewidth=2)
plt.axvline(linewidth=2)
plt.show()
| 20.428571 | 37 | 0.643357 |
16ce8312bb8fe9c9bc4bdbb458b86e2b724a45af
| 162 |
py
|
Python
|
abfahrt/testutils/test_generator/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | 1 |
2022-01-30T14:30:02.000Z
|
2022-01-30T14:30:02.000Z
|
abfahrt/testutils/test_generator/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
abfahrt/testutils/test_generator/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
"""
This is the module test_generator for automatic testing/validating random generated input
"""
from abfahrt.testutils.test_generator import test_generator
| 32.4 | 93 | 0.814815 |
bcaac9e704dea75ae3c1aa93d1920e38194051c4
| 3,414 |
py
|
Python
|
unpack.py
|
CodersArmyTeam/unpacker
|
f5798d576b7a3a0045614fa17bddce7d41d25f60
|
[
"MIT"
] | 1 |
2021-01-24T10:14:46.000Z
|
2021-01-24T10:14:46.000Z
|
unpack.py
|
CodersArmyTeam/unpacker
|
f5798d576b7a3a0045614fa17bddce7d41d25f60
|
[
"MIT"
] | null | null | null |
unpack.py
|
CodersArmyTeam/unpacker
|
f5798d576b7a3a0045614fa17bddce7d41d25f60
|
[
"MIT"
] | null | null | null |
import re
import mmap
import os
png_names = []
ogg_names = []
level_names = []
def save_to_file(filename, content):
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, 'wb') as file:
file.write(content)
def log(string):
print("[UNPACKER] " + string)
def png_files(content):
log("UNPACKING PNG FILES...")
p1 = re.compile(b'\x89\x50\x4E\x47\x0D\x0A\x1A\x0A')
starts = []
for start in p1.finditer(content):
starts.append(start.start())
p2 = re.compile(b'\x49\x45\x4E\x44\xAE\x42\x60\x82')
ends = []
for end in p2.finditer(content):
ends.append(end.end())
files_count = len(png_names)
for x in range(files_count):
save_to_file(png_names[x], content[starts[x]:ends[x]])
log("UNPACKING PNG FILES: DONE")
def ogg_files(content):
log("UNPACKING OGG FILES...")
p = re.compile(b'\x4F\x67\x67\x53\x00\x02')
headers = []
for header in p.finditer(content):
headers.append(header.start())
files_count = len(ogg_names)
for x in range(files_count):
#result_file = open('OGG/' + str(x) + '.ogg', 'wb')
if x < len(headers)-1:
#result_file.write(content[headers[x]:headers[x+1]-1])
save_to_file(ogg_names[x], content[headers[x]:headers[x+1]-1])
#else:
#save_to_file(ogg_names[x], content[headers[x]:])
#result_file.write(content[headers[x]:])
log("UNPACKING OGG FILES: DONE")
def text(content):
log("UNPACKING LANG...")
text_content = content[23435323:]
result_file = open('dog.txt', 'wb')
result_file.write(text_content)
result_file.close()
log("UNPACKING LANG: DONE")
def file_names(content):
name_content = content[:34632]
p = re.compile(b'\x41\x53\x53\x45\x54\x53')
pos = []
for x in p.finditer(name_content):
pos.append(x.start())
unknown = 0
names = []
for x in range(len(pos)):
if x < len(pos)-1:
names.append(name_content[pos[x]:pos[x+1]-12])
else:
names.append(name_content[pos[x]:])
for name in names:
if name.find(b".png") > -1:
png_names.append(name)
elif name.find(b".ogg") > -1:
ogg_names.append(name)
elif name.find(b".poziom") > -1:
level_names.append(name)
else:
log("UNKNOWN FILE TYPE")
unknown += 1
log("NUMBER OF FILES WITH UNKNOWN EXTENSIONS: " + str(unknown))
def level_files(content):
log("UNPACKING LEVEL FILES...")
level_content = content[23131321:23435322]
level_count = len(level_names) #int((23435322-23131321)/1152)+1
print(level_count)
for x in range(level_count):
if x < level_count:
save_to_file(level_names[x], level_content[x*1152:(x+1)*1152])
else:
save_to_file(level_names[x], level_content[x*1152:])
log("LEVEL FILES: DONE")
with open('../gamedata', 'r') as file:
with mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content:
log("VERSION 0.1.5")
log("WORKS ONLY WITH GAMEDATA FROM PB 1.0!!!")
file_names(content)
text(content)
#level_files(content) <-- not work
png_files(content)
ogg_files(content)
| 28.932203 | 75 | 0.580551 |
d5d18c7d3c68c6ebb0fbdd6c19b128337687ee69
| 409 |
py
|
Python
|
tests/service/test_json_data_writer.py
|
jonashellmann/informaticup21-team-chillow
|
f2e519af0a5d9a9368d62556703cfb1066ebb58f
|
[
"MIT"
] | 3 |
2021-01-17T23:32:07.000Z
|
2022-01-30T14:49:16.000Z
|
tests/service/test_json_data_writer.py
|
jonashellmann/informaticup21-team-chillow
|
f2e519af0a5d9a9368d62556703cfb1066ebb58f
|
[
"MIT"
] | 2 |
2021-01-17T13:37:56.000Z
|
2021-04-14T12:28:49.000Z
|
tests/service/test_json_data_writer.py
|
jonashellmann/informaticup21-team-chillow
|
f2e519af0a5d9a9368d62556703cfb1066ebb58f
|
[
"MIT"
] | 2 |
2021-04-02T14:53:38.000Z
|
2021-04-20T11:10:17.000Z
|
import unittest
from chillow.service.data_writer import JSONDataWriter
from chillow.model.action import Action
class JSONDataWriterTest(unittest.TestCase):
def setUp(self):
self.sut = JSONDataWriter()
def test_action_should_be_represented_in_json(self):
action = Action.speed_up
result = self.sut.write(action)
self.assertEqual(result, '{"action": "speed_up"}')
| 22.722222 | 58 | 0.723716 |
fc31910284b865007ca4fde4cdac3fe2e4bb4a7d
| 2,026 |
py
|
Python
|
research/audio/tacotron2/src/hparams.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/audio/tacotron2/src/hparams.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/audio/tacotron2/src/hparams.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
''' configs file '''
from src.text import symbols
class hparams:
''' configs '''
text_cleaners = ['english_cleaners']
# Preprocessing parameters
num_mels = 80
num_freq = 513
sample_rate = 22050
frame_length_ms = 50
frame_shift_ms = 12.5
preemphasis = 0.85
min_level_db = -100
ref_level_db = 20
power = 1.5
gl_iters = 100
fmin = 125
fmax = 7600
hop_length = 256
win_length = 1024
# Model Parameters
symbols_embedding_dim = 512
# Encoder parameters
encoder_kernel_size = 5
encoder_n_convolutions = 3
encoder_embedding_dim = 512
# Decoder parameters
n_frames_per_step = 3
decoder_rnn_dim = 1024
prenet_dim = 256
max_decoder_steps = 1000
gate_threshold = 0.5
p_attention_dropout = 0.1
p_decoder_dropout = 0.1
# Attention parameters
attention_rnn_dim = 1024
attention_dim = 256
# Location Layer parameters
attention_location_n_filters = 32
attention_location_kernel_size = 31
# Mel-post processing network parameters
postnet_embedding_dim = 512
postnet_kernel_size = 5
postnet_n_convolutions = 5
lr = 0.002
epoch_num = 2000
batch_size = 16
test_batch_size = 1
mask_padding = True
p = 10 # mel spec loss penalty
max_text_len = 189
n_symbols = len(symbols)
| 25.64557 | 78 | 0.67078 |
fc5dabb6446b286c0aeee07d24bf9adc9de197e0
| 9,804 |
py
|
Python
|
python/pyqt/fbs/src/main/python/main.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/pyqt/fbs/src/main/python/main.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/pyqt/fbs/src/main/python/main.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
from fbs_runtime.application_context.PyQt5 import (
ApplicationContext,
cached_property,
)
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
import sys
import time
import random
class AppContext(ApplicationContext): # 1. Subclass Application
def run(self): # 2. Implement run()
self.main_window.show()
return self.app.exec_()
@cached_property
def main_window(self):
return MainWindow(self)
@cached_property
def img_bomb(self):
return QImage(self.get_resource('images/bug.png'))
@cached_property
def img_flag(self):
return QImage(self.get_resource('images/flag.png'))
@cached_property
def img_start(self):
return QImage(self.get_resource('images/rocket.png'))
@cached_property
def img_clock(self):
return QImage(self.get_resource('images/clock-select.png'))
@cached_property
def status_icons(self):
return {
STATUS_READY: QIcon(self.get_resource("images/plus.png")),
STATUS_PLAYING: QIcon(self.get_resource("images/smiley.png")),
STATUS_FAILED: QIcon(self.get_resource("images/cross.png")),
STATUS_SUCCESS: QIcon(self.get_resource("images/smiley-lol.png")),
}
NUM_COLORS = {
1: QColor('#f44336'),
2: QColor('#9C27B0'),
3: QColor('#3F51B5'),
4: QColor('#03A9F4'),
5: QColor('#00BCD4'),
6: QColor('#4CAF50'),
7: QColor('#E91E63'),
8: QColor('#FF9800'),
}
LEVELS = [
(8, 10),
(16, 40),
(24, 99),
]
STATUS_READY = 0
STATUS_PLAYING = 1
STATUS_FAILED = 2
STATUS_SUCCESS = 3
class Pos(QWidget):
expandable = pyqtSignal(int, int)
clicked = pyqtSignal()
ohno = pyqtSignal()
def __init__(self, x, y, img_flag, img_start, img_bomb):
super(Pos, self).__init__()
self.setFixedSize(QSize(20, 20))
self.img_flag = img_flag
self.img_start = img_start
self.img_bomb = img_bomb
self.x = x
self.y = y
def reset(self):
self.is_start = False
self.is_mine = False
self.adjacent_n = 0
self.is_revealed = False
self.is_flagged = False
self.update()
def paintEvent(self, event):
p = QPainter(self)
p.setRenderHint(QPainter.Antialiasing)
r = event.rect()
if self.is_revealed:
color = self.palette().color(QPalette.Background)
outer, inner = color, color
else:
outer, inner = Qt.gray, Qt.lightGray
p.fillRect(r, QBrush(inner))
pen = QPen(outer)
pen.setWidth(1)
p.setPen(pen)
p.drawRect(r)
if self.is_revealed:
if self.is_start:
p.drawPixmap(r, QPixmap(self.img_start))
elif self.is_mine:
p.drawPixmap(r, QPixmap(self.img_bomb))
elif self.adjacent_n > 0:
pen = QPen(NUM_COLORS[self.adjacent_n])
p.setPen(pen)
f = p.font()
f.setBold(True)
p.setFont(f)
p.drawText(r, Qt.AlignHCenter | Qt.AlignVCenter, str(self.adjacent_n))
elif self.is_flagged:
p.drawPixmap(r, QPixmap(self.img_flag))
def flag(self):
self.is_flagged = True
self.update()
self.clicked.emit()
def reveal(self):
self.is_revealed = True
self.update()
def click(self):
if not self.is_revealed:
self.reveal()
if self.adjacent_n == 0:
self.expandable.emit(self.x, self.y)
self.clicked.emit()
def mouseReleaseEvent(self, e):
if (e.button() == Qt.RightButton and not self.is_revealed):
self.flag()
elif (e.button() == Qt.LeftButton):
self.click()
if self.is_mine:
self.ohno.emit()
class MainWindow(QMainWindow):
def __init__(self, ctx):
super(MainWindow, self).__init__()
self.ctx = ctx
self.b_size, self.n_mines = LEVELS[1]
w = QWidget()
hb = QHBoxLayout()
self.mines = QLabel()
self.mines.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
self.clock = QLabel()
self.clock.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
f = self.mines.font()
f.setPointSize(24)
f.setWeight(75)
self.mines.setFont(f)
self.clock.setFont(f)
self._timer = QTimer()
self._timer.timeout.connect(self.update_timer)
self._timer.start(1000)
self.mines.setText("%03d" % self.n_mines)
self.clock.setText("000")
self.button = QPushButton()
self.button.setFixedSize(QSize(32, 32))
self.button.setIconSize(QSize(32, 32))
self.button.setIcon(self.ctx.status_icons[STATUS_PLAYING])
self.button.setFlat(True)
self.button.pressed.connect(self.button_pressed)
l = QLabel()
l.setPixmap(QPixmap.fromImage(self.ctx.img_bomb))
l.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
hb.addWidget(l)
hb.addWidget(self.mines)
hb.addWidget(self.button)
hb.addWidget(self.clock)
l = QLabel()
l.setPixmap(QPixmap.fromImage(self.ctx.img_clock))
l.setAlignment(Qt.AlignLeft | Qt.AlignVCenter)
hb.addWidget(l)
vb = QVBoxLayout()
vb.addLayout(hb)
self.grid = QGridLayout()
self.grid.setSpacing(5)
vb.addLayout(self.grid)
w.setLayout(vb)
self.setCentralWidget(w)
self.init_map()
self.update_status(STATUS_READY)
self.reset_map()
self.update_status(STATUS_READY)
self.show()
def init_map(self):
# Add positions to the map
for x in range(0, self.b_size):
for y in range(0, self.b_size):
w = Pos(x, y, self.ctx.img_flag, self.ctx.img_start, self.ctx.img_bomb)
self.grid.addWidget(w, y, x)
# Connect signal to handle expansion
w.clicked.connect(self.trigger_start)
w.expandable.connect(self.expand_reveal)
w.ohno.connect(self.game_over)
def reset_map(self):
# Clear all mine positions
for x in range(0, self.b_size):
for y in range(0, self.b_size):
w = self.grid.itemAtPosition(y, x).widget()
w.reset()
# Add mines to the positions
positions = []
while len(positions) < self.n_mines:
x, y = random.randint(0, self.b_size - 1), random.randint(0, self.b_size - 1)
if (x, y) not in positions:
w = self.grid.itemAtPosition(y, x).widget()
w.is_mine = True
positions.append((x, y))
def get_adjacency_n(x, y):
positions = self.get_surrounding(x, y)
n_mines = sum(1 if w.is_mine else 0 for w in positions)
return n_mines
# Add adjacencies to the positions
for x in range(0, self.b_size):
for y in range(0, self.b_size):
w = self.grid.itemAtPosition(y, x).widget()
w.adjacent_n = get_adjacency_n(x, y)
# Place starting marker
while True:
x, y = random.randint(0, self.b_size - 1), random.randint(0, self.b_size - 1)
w = self.grid.itemAtPosition(y, x).widget()
# We don't want to start on a mine
if (x, y) not in positions:
w = self.grid.itemAtPosition(y, x).widget()
w.is_start = True
# Reveal all positions around this, if they are not mines either
for w in self.get_surrounding(x, y):
if not w.is_mine:
w.click()
break
def get_surrounding(self, x, y):
positions = []
for xi in range(max(0, x - 1), min(x + 2, self.b_size)):
for yi in range(max(0, y - 1), min(y + 2, self.b_size)):
positions.append(self.grid.itemAtPosition(yi, xi).widget())
return positions
def button_pressed(self):
if self.status == STATUS_PLAYING:
self.update_status(STATUS_FAILED)
self.reveal_map()
elif self.status == STATUS_FAILED:
self.update_status(STATUS_READY)
self.reset_map()
def reveal_map(self):
for x in range(0, self.b_size):
for y in range(0, self.b_size):
w = self.grid.itemAtPosition(y, x).widget()
w.reveal()
def expand_reveal(self, x, y):
for xi in range(max(0, x - 1), min(x + 2, self.b_size)):
for yi in range(max(0, y - 1), min(y + 2, self.b_size)):
w = self.grid.itemAtPosition(yi, xi).widget()
if not w.is_mine:
w.click()
def trigger_start(self, *args):
if self.status != STATUS_PLAYING:
# First click
self.update_status(STATUS_PLAYING)
# Start timer
self._timer_start_nsecs = int(time.time())
def update_status(self, status):
self.status = status
self.button.setIcon(self.ctx.status_icons[self.status])
def update_timer(self):
if self.status == STATUS_PLAYING:
n_secs = int(time.time()) - self._timer_start_nsecs
self.clock.setText("%03d" % n_secs)
def game_over(self):
self.reveal_map()
self.update_status(STATUS_FAILED)
if __name__ == '__main__':
appctxt = AppContext() # 4. Instantiate the subclass
exit_code = appctxt.run() # 5. Invoke run()
sys.exit(exit_code)
| 29.353293 | 89 | 0.568033 |
fc8a9de701da6632622980797425d9b37ae38773
| 655 |
py
|
Python
|
torch/utils/data/datapipes/map/__init__.py
|
vuanvin/pytorch
|
9267fd8d7395074001ad7cf2a8f28082dbff6b0b
|
[
"Intel"
] | 183 |
2018-04-06T21:10:36.000Z
|
2022-03-30T15:05:24.000Z
|
torch/utils/data/datapipes/map/__init__.py
|
vuanvin/pytorch
|
9267fd8d7395074001ad7cf2a8f28082dbff6b0b
|
[
"Intel"
] | 631 |
2018-06-05T16:59:11.000Z
|
2022-03-31T16:26:57.000Z
|
torch/utils/data/datapipes/map/__init__.py
|
vuanvin/pytorch
|
9267fd8d7395074001ad7cf2a8f28082dbff6b0b
|
[
"Intel"
] | 58 |
2018-06-05T16:40:18.000Z
|
2022-03-16T15:37:29.000Z
|
# Functional DataPipe
from torch.utils.data.datapipes.map.callable import MapperMapDataPipe as Mapper
from torch.utils.data.datapipes.map.combinatorics import ShufflerMapDataPipe as Shuffler
from torch.utils.data.datapipes.map.combining import (
ConcaterMapDataPipe as Concater,
ZipperMapDataPipe as Zipper
)
from torch.utils.data.datapipes.map.grouping import (
BatcherMapDataPipe as Batcher
)
from torch.utils.data.datapipes.map.utils import SequenceWrapperMapDataPipe as SequenceWrapper
__all__ = ['Batcher', 'Concater', 'Mapper', 'SequenceWrapper', 'Shuffler', 'Zipper']
# Please keep this list sorted
assert __all__ == sorted(__all__)
| 36.388889 | 94 | 0.803053 |
5dc4ce14f6dbab34d64c999fd01ff84a1c3918f3
| 948 |
py
|
Python
|
NeuralNet/Alex/models/model_00.py
|
alex-ta/Fontinator
|
7ca9effe3b61ded032176557520127e1d4b7a5ef
|
[
"Apache-2.0"
] | 6 |
2017-04-12T14:05:19.000Z
|
2021-01-29T11:23:50.000Z
|
NeuralNet/Alex/models/model_00.py
|
alex-ta/Fontinator
|
7ca9effe3b61ded032176557520127e1d4b7a5ef
|
[
"Apache-2.0"
] | null | null | null |
NeuralNet/Alex/models/model_00.py
|
alex-ta/Fontinator
|
7ca9effe3b61ded032176557520127e1d4b7a5ef
|
[
"Apache-2.0"
] | null | null | null |
# the model gets created
# in this file specific layers can be defined and changed
# the default data contains 40 x 1200 x 3 data as defined by the input dataformat
# if the data for test and validation is change the first layer format can change
# model contains a sequential keras model that can be applied with different layers
model.add(Conv2D(8, kernel_size=(3, 3),
activation='relu',
input_shape=(40,1200,3)))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(16, kernel_size=(3, 3),
activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(24, kernel_size=(3, 3),
activation='sigmoid'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.4))
model.add(Dense(len(classes), activation='softmax'))
model.compile(loss=keras.losses.mean_squared_error, optimizer="rmsprop", metrics=['accuracy'])
| 37.92 | 94 | 0.738397 |
54cd5b96a87f4054d299df1dec523e0ce1b2ca77
| 5,162 |
py
|
Python
|
qmk_firmware/lib/python/qmk/cli/generate/config_h.py
|
DanTupi/personal_setup
|
911b4951e4d8b78d6ea8ca335229e2e970fda871
|
[
"MIT"
] | 2 |
2021-04-16T23:29:01.000Z
|
2021-04-17T02:26:22.000Z
|
qmk_firmware/lib/python/qmk/cli/generate/config_h.py
|
DanTupi/personal_setup
|
911b4951e4d8b78d6ea8ca335229e2e970fda871
|
[
"MIT"
] | null | null | null |
qmk_firmware/lib/python/qmk/cli/generate/config_h.py
|
DanTupi/personal_setup
|
911b4951e4d8b78d6ea8ca335229e2e970fda871
|
[
"MIT"
] | null | null | null |
"""Used by the make system to generate info_config.h from info.json.
"""
from pathlib import Path
from dotty_dict import dotty
from milc import cli
from qmk.decorators import automagic_keyboard, automagic_keymap
from qmk.info import info_json
from qmk.json_schema import json_load
from qmk.keyboard import keyboard_completer, keyboard_folder
from qmk.path import is_keyboard, normpath
def direct_pins(direct_pins):
"""Return the config.h lines that set the direct pins.
"""
rows = []
for row in direct_pins:
cols = ','.join(map(str, [col or 'NO_PIN' for col in row]))
rows.append('{' + cols + '}')
col_count = len(direct_pins[0])
row_count = len(direct_pins)
return """
#ifndef MATRIX_COLS
# define MATRIX_COLS %s
#endif // MATRIX_COLS
#ifndef MATRIX_ROWS
# define MATRIX_ROWS %s
#endif // MATRIX_ROWS
#ifndef DIRECT_PINS
# define DIRECT_PINS {%s}
#endif // DIRECT_PINS
""" % (col_count, row_count, ','.join(rows))
def pin_array(define, pins):
"""Return the config.h lines that set a pin array.
"""
pin_num = len(pins)
pin_array = ', '.join(map(str, [pin or 'NO_PIN' for pin in pins]))
return f"""
#ifndef {define}S
# define {define}S {pin_num}
#endif // {define}S
#ifndef {define}_PINS
# define {define}_PINS {{ {pin_array} }}
#endif // {define}_PINS
"""
def matrix_pins(matrix_pins):
"""Add the matrix config to the config.h.
"""
pins = []
if 'direct' in matrix_pins:
pins.append(direct_pins(matrix_pins['direct']))
if 'cols' in matrix_pins:
pins.append(pin_array('MATRIX_COL', matrix_pins['cols']))
if 'rows' in matrix_pins:
pins.append(pin_array('MATRIX_ROW', matrix_pins['rows']))
return '\n'.join(pins)
@cli.argument('-o', '--output', arg_only=True, type=normpath, help='File to write to')
@cli.argument('-q', '--quiet', arg_only=True, action='store_true', help="Quiet mode, only output error messages")
@cli.argument('-kb', '--keyboard', type=keyboard_folder, completer=keyboard_completer, help='Keyboard to generate config.h for.')
@cli.subcommand('Used by the make system to generate info_config.h from info.json', hidden=True)
@automagic_keyboard
@automagic_keymap
def generate_config_h(cli):
"""Generates the info_config.h file.
"""
# Determine our keyboard(s)
if not cli.config.generate_config_h.keyboard:
cli.log.error('Missing parameter: --keyboard')
cli.subcommands['info'].print_help()
return False
if not is_keyboard(cli.config.generate_config_h.keyboard):
cli.log.error('Invalid keyboard: "%s"', cli.config.generate_config_h.keyboard)
return False
# Build the info_config.h file.
kb_info_json = dotty(info_json(cli.config.generate_config_h.keyboard))
info_config_map = json_load(Path('data/mappings/info_config.json'))
config_h_lines = ['/* This file was generated by `qmk generate-config-h`. Do not edit or copy.' ' */', '', '#pragma once']
# Iterate through the info_config map to generate basic things
for config_key, info_dict in info_config_map.items():
info_key = info_dict['info_key']
key_type = info_dict.get('value_type', 'str')
to_config = info_dict.get('to_config', True)
if not to_config:
continue
try:
config_value = kb_info_json[info_key]
except KeyError:
continue
if key_type.startswith('array'):
config_h_lines.append('')
config_h_lines.append(f'#ifndef {config_key}')
config_h_lines.append(f'# define {config_key} {{ {", ".join(map(str, config_value))} }}')
config_h_lines.append(f'#endif // {config_key}')
elif key_type == 'bool':
if config_value:
config_h_lines.append('')
config_h_lines.append(f'#ifndef {config_key}')
config_h_lines.append(f'# define {config_key}')
config_h_lines.append(f'#endif // {config_key}')
elif key_type == 'mapping':
for key, value in config_value.items():
config_h_lines.append('')
config_h_lines.append(f'#ifndef {key}')
config_h_lines.append(f'# define {key} {value}')
config_h_lines.append(f'#endif // {key}')
else:
config_h_lines.append('')
config_h_lines.append(f'#ifndef {config_key}')
config_h_lines.append(f'# define {config_key} {config_value}')
config_h_lines.append(f'#endif // {config_key}')
if 'matrix_pins' in kb_info_json:
config_h_lines.append(matrix_pins(kb_info_json['matrix_pins']))
# Show the results
config_h = '\n'.join(config_h_lines)
if cli.args.output:
cli.args.output.parent.mkdir(parents=True, exist_ok=True)
if cli.args.output.exists():
cli.args.output.replace(cli.args.output.parent / (cli.args.output.name + '.bak'))
cli.args.output.write_text(config_h)
if not cli.args.quiet:
cli.log.info('Wrote info_config.h to %s.', cli.args.output)
else:
print(config_h)
| 33.303226 | 129 | 0.646455 |
074678f08d529720da416976cfa34e7055fb8167
| 8,667 |
py
|
Python
|
classification/rest.py
|
Ilgmi/IWIbot
|
c5ac71865fbb11f1676ec3239b96bab8e22257ee
|
[
"Apache-2.0"
] | null | null | null |
classification/rest.py
|
Ilgmi/IWIbot
|
c5ac71865fbb11f1676ec3239b96bab8e22257ee
|
[
"Apache-2.0"
] | null | null | null |
classification/rest.py
|
Ilgmi/IWIbot
|
c5ac71865fbb11f1676ec3239b96bab8e22257ee
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import atexit
import json
import os
import cf_deployment_tracker
import metrics_tracker_client
# use natural language toolkit
import nltk
from classifier.classifier import Classifier
from classifier.startup import populate_intents, populate_entities_for_meal, populate_entities_for_timetables, populate_entities_for_navigation
from classifier.trainer import Trainer
from cloudant import Cloudant
from flask import Flask, render_template, request, jsonify
###
# Text Classification using Artificial Neural Networks (ANN)
# Based on https://machinelearnings.co/text-classification-using-neural-networks-f5cd7b8765c6
###
nltk.download('punkt')
# Emit Bluemix deployment event
cf_deployment_tracker.track()
metrics_tracker_client.DSX('org/repo')
app = Flask(__name__)
client = None
db = None
if 'VCAP_SERVICES' in os.environ:
vcap = json.loads(os.getenv('VCAP_SERVICES'))
print('Found VCAP_SERVICES')
if 'cloudantNoSQLDB' in vcap:
creds = vcap['cloudantNoSQLDB'][0]['credentials']
user = creds['username']
password = creds['password']
url = 'https://' + creds['host']
client = Cloudant(user, password, url=url, connect=True)
client.create_database('trainer', throw_on_exists=False)
client.create_database('synapse', throw_on_exists=False)
elif os.path.isfile('vcap-local.json'):
with open('vcap-local.json') as f:
vcap = json.load(f)
print('Found local VCAP_SERVICES')
creds = vcap['services']['cloudantNoSQLDB'][0]['credentials']
user = creds['username']
password = creds['password']
url = 'https://' + creds['host']
client = Cloudant(user, password, url=url, connect=True)
client.create_database('trainer', throw_on_exists=False)
client.create_database('synapse', throw_on_exists=False)
cache = dict()
if client is not None:
# create Classifier cache on startup
#cache engine
cache["intents"] = Classifier("intents", client)
cache["intents"].load()
cache["entities@timetables"] = Classifier("entities@timetables", client)
cache["entities@timetables"].load()
cache["entities@meal"] = Classifier("entities@meal", client)
cache["entities@meal"].load()
# On Bluemix, get the port number from the environment variable PORT
# When running this app on the local machine, default the port to 8000
port = int(os.getenv('PORT', 8000))
def removekey(d, key):
r = dict(d)
del r[key]
return r
@app.route('/')
def home():
return render_template('index.html')
# /**
# * Endpoint to classify a conversation service request JSON for the intent.
# *
# * @return A JSON response with the classification
# */
@app.route('/api/testIntent', methods=['POST'])
def testIntent():
request_object = request.json
sentence = request.json['sentence']
if client is not None:
if sentence == 'populate':
# populate database with base data and train all neuronal netwroks
populate_intents(client)
populate_entities_for_meal(client)
populate_entities_for_timetables(client)
populate_entities_for_navigation(client)
cache["intents"].load()
cache["entities@timetables"].load()
cache["entities@meal"].load()
classification = dict()
classification['intent'] = "Populated"
else:
if 'intents' not in cache.keys():
cache["intents"] = Classifier("intents", client)
classifier = cache["intents"]
results = classifier.classify(sentence)
classification = dict()
if len(results) > 0:
classification['intent'] = results[0][0]
else:
classification['intent'] = ""
else:
print("NO DATABASE")
classification = dict()
classification['intent'] = "NO DATABASE"
response_object = removekey(request_object, "sentence")
response_object["classifications"] = classification
return 'Results: %s' % classification['intent']
# /**
# * Endpoint to classify a conversation service request JSON for the intent.
# *
# * @return A JSON response with the classification
# */
@app.route('/api/getIntent', methods=['POST'])
def getIntent():
request_object = request.json
sentence = request.json['sentence']
if client is not None:
if 'intents' not in cache.keys():
cache["intents"] = Classifier("intents", client)
classifier = cache["intents"]
results = classifier.classify(sentence)
classification = dict()
if len(results) > 0:
classification['intent'] = results[0][0]
else:
classification['intent'] = ""
else:
print("NO DATABASE")
classification = dict()
classification['intent'] = "NO DATABASE"
response_object = removekey(request_object, "sentence")
response_object["classifications"] = classification
return jsonify(response_object)
# /**
# * Endpoint to classify a conversation service request JSON for its entity
# * based on the priorIntent given.
# *
# * @return A JSON response with the classification
# */
@app.route('/api/getEntity', methods=['POST'])
def getEntity():
request_object = request.json
sentence = request.json['sentence']
prior_intents = request.json['context']["priorIntent"]["intent"]
if client is not None:
classifier_name = "entities@" + prior_intents
if classifier_name not in cache.keys():
cache[classifier_name] = Classifier(classifier_name, client)
classifier = cache[classifier_name]
results = classifier.classify(sentence)
classification = dict()
if len(results) > 0:
classification['entity'] = results[0][0]
else:
classification['entity'] = ""
else:
print("NO DATABASE")
classification = dict()
classification['entity'] = "NO DATABASE"
response_object = removekey(request_object, "sentence")
response_object["classifications"] = classification
return jsonify(response_object)
# /**
# * Endpoint to add a classification to a training set for classifying
# * the intent.
# *
# * @return No response
# */
@app.route('/api/addIntent', methods=['POST'])
def addIntent():
sentence = request.json['sentence']
intent = request.json['intent']
if client is not None:
intents = Trainer("intents", client)
intents.add_to_traingset(sentence, intent, True)
return jsonify([])
else:
print("NO DATABASE")
return "NO DATABASE"
# /**
# * Endpoint to train a neural network for classifying an intent.
# *
# * @return No response
# */
@app.route('/api/trainIntents', methods=['POST'])
def trainIntents():
if client is not None:
intents = Trainer("intents", client)
intents.start_training()
if 'intents' not in cache.keys():
cache['intents'] = Classifier('intents', client)
else:
cache['intents'].load()
return jsonify([])
else:
print("NO DATABASE")
return "NO DATABASE"
# /**
# * Endpoint to add a classification to a training set for classifying
# * the entities of an intent.
# *
# * @return No response
# */
@app.route('/api/addEntity', methods=['POST'])
def addEntity():
intent = request.json['intent']
sentence = request.json['sentence']
entity = request.json['entity']
if client is not None:
classifier_name = "entities@" + intent
entities = Trainer(classifier_name, client)
entities.add_to_traingset(sentence, entity, True)
return jsonify([])
else:
print("NO DATABASE")
return "NO DATABASE"
# /**
# * Endpoint to train a neural network for classifying the entities of an intent.
# *
# * @return No response
# */
@app.route('/api/trainEntity', methods=['POST'])
def trainEntity():
intent = request.json['intent']
if client is not None:
classifier_name = "entities@" + intent
entities = Trainer(classifier_name, client)
entities.start_training()
if classifier_name not in cache.keys():
cache[classifier_name] = Classifier(classifier_name, client)
else:
cache[classifier_name].load()
return jsonify([])
else:
print("NO DATABASE")
return "NO DATABASE"
@atexit.register
def shutdown():
if client is not None:
client.disconnect()
if __name__ == '__main__':
app.run(host='0.0.0.0', port=port, debug=True)
| 29.580205 | 143 | 0.645091 |
074fe69b8f4756bf997c2064d57c94008be5c239
| 1,529 |
py
|
Python
|
numbersforwatch.py
|
BogyMitutoyoCTL/Riesen-Tetris
|
8bbbaf0b7aeae7890da724d3d72719a7d068237a
|
[
"MIT"
] | 1 |
2019-04-27T07:28:52.000Z
|
2019-04-27T07:28:52.000Z
|
numbersforwatch.py
|
BogyMitutoyoCTL/Riesen-Tetris
|
8bbbaf0b7aeae7890da724d3d72719a7d068237a
|
[
"MIT"
] | null | null | null |
numbersforwatch.py
|
BogyMitutoyoCTL/Riesen-Tetris
|
8bbbaf0b7aeae7890da724d3d72719a7d068237a
|
[
"MIT"
] | null | null | null |
from block import Block
from rainbow import rainbowcolors
class Number:
number_pixels = [
[[1, 1, 1, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 1, 1, 1]],
[[0, 0, 0, 2],
[0, 0, 0, 2],
[0, 0, 0, 2],
[0, 0, 0, 2],
[0, 0, 0, 2]],
[[3, 3, 3, 3],
[0, 0, 0, 3],
[3, 3, 3, 3],
[3, 0, 0, 0],
[3, 3, 3, 3]],
[[4, 4, 4, 4],
[0, 0, 0, 4],
[0, 4, 4, 4],
[0, 0, 0, 4],
[4, 4, 4, 4]],
[[5, 0, 0, 5],
[5, 0, 0, 5],
[5, 5, 5, 5],
[0, 0, 0, 5],
[0, 0, 0, 5]],
[[6, 6, 6, 6],
[6, 0, 0, 0],
[6, 6, 6, 6],
[0, 0, 0, 6],
[6, 6, 6, 6]],
[[7, 7, 7, 7],
[7, 0, 0, 0],
[7, 7, 7, 7],
[7, 0, 0, 7],
[7, 7, 7, 7]],
[[8, 8, 8, 8],
[0, 0, 0, 8],
[0, 0, 0, 8],
[0, 0, 0, 8],
[0, 0, 0, 8]],
[[9, 9, 9, 9],
[9, 0, 0, 9],
[9, 9, 9, 9],
[9, 0, 0, 9],
[9, 9, 9, 9]],
[[10, 10, 10, 10],
[10, 0, 0, 10],
[10, 10, 10, 10],
[0, 0, 0, 10],
[10, 10, 10, 10]]
]
number_colors = rainbowcolors(len(number_pixels))
def __init__(self, number: int):
self.pixel = Number.number_pixels[number]
self.color = Number.number_colors[number]
self.block = Block(self.pixel, self.color)
| 20.662162 | 53 | 0.29758 |
075d460ecdf77c8545715bdddc238a6e442bd909
| 818 |
py
|
Python
|
app/auth/forms.py
|
uosorio/heroku_face
|
7d6465e71dba17a15d8edaef520adb2fcd09d91e
|
[
"Apache-2.0"
] | null | null | null |
app/auth/forms.py
|
uosorio/heroku_face
|
7d6465e71dba17a15d8edaef520adb2fcd09d91e
|
[
"Apache-2.0"
] | null | null | null |
app/auth/forms.py
|
uosorio/heroku_face
|
7d6465e71dba17a15d8edaef520adb2fcd09d91e
|
[
"Apache-2.0"
] | null | null | null |
"""
AUTOR: Juanjo
FECHA DE CREACIÓN: 24/01/2019
"""
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, PasswordField, BooleanField
from wtforms.validators import DataRequired, Email, Length
class SignupForm(FlaskForm):
name = StringField('Nombre', validators=[DataRequired(), Length(max=64)])
password = PasswordField('Password', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
contrato = StringField('Contrato', validators=[DataRequired()])
submit = SubmitField('Registrar')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Recuérdame')
submit = SubmitField('Login')
| 30.296296 | 77 | 0.733496 |
db7f0ca08be3b006105debdc40e8b9818c6e457d
| 2,471 |
py
|
Python
|
Contrib-Inspur/openbmc/poky/bitbake/lib/bb/ui/uihelper.py
|
opencomputeproject/Rack-Manager
|
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
|
[
"MIT"
] | 5 |
2019-11-11T07:57:26.000Z
|
2022-03-28T08:26:53.000Z
|
Contrib-Inspur/openbmc/poky/bitbake/lib/bb/ui/uihelper.py
|
opencomputeproject/Rack-Manager
|
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
|
[
"MIT"
] | 3 |
2019-09-05T21:47:07.000Z
|
2019-09-17T18:10:45.000Z
|
Contrib-Inspur/openbmc/poky/bitbake/lib/bb/ui/uihelper.py
|
opencomputeproject/Rack-Manager
|
e1a61d3eeeba0ff655fe9c1301e8b510d9b2122a
|
[
"MIT"
] | 11 |
2019-07-20T00:16:32.000Z
|
2022-01-11T14:17:48.000Z
|
#
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
# Copyright (C) 2006 - 2007 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
import bb.build
import time
class BBUIHelper:
def __init__(self):
self.needUpdate = False
self.running_tasks = {}
# Running PIDs preserves the order tasks were executed in
self.running_pids = []
self.failed_tasks = []
self.tasknumber_current = 0
self.tasknumber_total = 0
def eventHandler(self, event):
if isinstance(event, bb.build.TaskStarted):
if event._mc != "default":
self.running_tasks[event.pid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time() }
else:
self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time() }
self.running_pids.append(event.pid)
self.needUpdate = True
elif isinstance(event, bb.build.TaskSucceeded):
del self.running_tasks[event.pid]
self.running_pids.remove(event.pid)
self.needUpdate = True
elif isinstance(event, bb.build.TaskFailedSilent):
del self.running_tasks[event.pid]
self.running_pids.remove(event.pid)
# Don't add to the failed tasks list since this is e.g. a setscene task failure
self.needUpdate = True
elif isinstance(event, bb.build.TaskFailed):
del self.running_tasks[event.pid]
self.running_pids.remove(event.pid)
self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
self.needUpdate = True
elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
self.tasknumber_total = event.stats.total
self.needUpdate = True
elif isinstance(event, bb.build.TaskProgress):
if event.pid > 0:
self.running_tasks[event.pid]['progress'] = event.progress
self.running_tasks[event.pid]['rate'] = event.rate
self.needUpdate = True
else:
return False
return True
def getTasks(self):
self.needUpdate = False
return (self.running_tasks, self.failed_tasks)
| 41.881356 | 145 | 0.61554 |
53438df96cd3cbe75e92a4d763b256cba0a0c0bd
| 2,925 |
py
|
Python
|
python/en/archive/practice_python/practice_python-exercise01-Character Input.py
|
aimldl/coding
|
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
|
[
"MIT"
] | null | null | null |
python/en/archive/practice_python/practice_python-exercise01-Character Input.py
|
aimldl/coding
|
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
|
[
"MIT"
] | null | null | null |
python/en/archive/practice_python/practice_python-exercise01-Character Input.py
|
aimldl/coding
|
70ddbfaa454ab92fd072ee8dc614ecc330b34a70
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
PRACTICE PYTHON, https://www.practicepython.org/
Exercise 1. Character Input
https://www.practicepython.org/exercise/2014/01/29/01-character-input.html
Create a program that asks the user to enter their name and their age. Print
out a message addressed to them that tells them the year that they will turn
100 years old.
Extras:
1. Add on to the previous program by asking the user for another number and
printing out that many copies of the previous message.
(Hint: order of operations exists in Python)
2. Print out that many copies of the previous message on separate lines.
(Hint: the string "\n is the same as pressing the ENTER button)
Solution
https://www.practicepython.org/solution/2014/02/05/01-character-input-solutions.html
"""
# Exercise 1
#name = input("What is your name?")
#age = input("What is your age? Please enter a number. For example, 20 for twenty.")
#year = 100 - int(age) + 2018
#print(name, ", you'll become 100 years old in", year )
# Extras 1
#name = input("What is your name?")
#age = input("What is your age? Please enter a number. For example, 20 for twenty.")
#year = 100 - int(age) + 2018
#num_of_copies = input("How many times would you like to repeat the answer?")
#
#for n in range( int(num_of_copies) ):
# print(name, ", you'll become 100 years old in", year, ".", end='')
# Extras 2
#for n in range( int(num_of_copies) ):
# print(name, ", you'll become 100 years old in", year, ".")
#T , you'll become 100 years old in 2075 .
#T , you'll become 100 years old in 2075 .
# The following is a code that prints without the trailing space.
# That is,
#T , you'll become 100 years old in 2075 . (Wrong)
#T, you'll become 100 years old in 2075. (Right)
import sys
name = input("What is your name?")
age = input("What is your age? Please enter a number. For example, 20 for twenty.")
year = 100 - int(age) + 2018
num_of_copies = input("How many times would you like to repeat the answer?")
for n in range( int(num_of_copies) ):
message = name + ", you'll become 100 years old in " + str(year) + "."
sys.stdout.write(message)
"""
input("message")
int(age)
TypeError: 'str' object cannot be interpreted as an integer
Python: How to Print Without Newline? (The Idiomatic Way)
https://www.afternerd.com/blog/how-to-print-without-a-newline-in-python/
Print without a new line.
print("message", end='')
sys.stdout.write(name, ", you'll become 100 years old in", year, ".")
TypeError: write() takes 2 positional arguments but 5 were given
String Concatenation and Formatting
https://www.pythonforbeginners.com/concatenation/string-concatenation-and-formatting-in-python
TypeError: must be str, not int
message = name + ", you'll become 100 years old in " + year + "."
=> message = name + ", you'll become 100 years old in " + str(year) + "."
"""
| 36.5625 | 95 | 0.686496 |
536665abd87787f43a38df8ff673aef32fc2fc25
| 1,390 |
py
|
Python
|
main.py
|
Evil0ctal/Mail_sender
|
bf0c2f196f2a80a24431df533c7713776d642688
|
[
"MIT"
] | null | null | null |
main.py
|
Evil0ctal/Mail_sender
|
bf0c2f196f2a80a24431df533c7713776d642688
|
[
"MIT"
] | null | null | null |
main.py
|
Evil0ctal/Mail_sender
|
bf0c2f196f2a80a24431df533c7713776d642688
|
[
"MIT"
] | null | null | null |
from smtplib import SMTP_SSL
from email.mime.text import MIMEText
from retrying import retry
@retry(stop_max_attempt_number=15)
def sendmail(message, recipient_show, to_addrs):
# 填写真实的发邮件服务器用户名、密码
user = '[email protected]'
password = 'WeakPassw0rd'
# 邮件内容
msg = MIMEText(message, 'plain', _charset="utf-8")
# 邮件主题描述
msg["Subject"] = 'This is a test subject'
# 发件人显示,不起实际作用
msg["from"] = user
# 收件人显示,不起实际作用
msg["to"] = recipient_show
# 抄送人显示,不起实际作用
msg["Cc"] = ''
with SMTP_SSL(host="smtp.example.com", port=465) as smtp:
# 登录发邮件服务器
smtp.login(user=user, password=password)
# 实际发送、接收邮件配置
smtp.sendmail(from_addr=user, to_addrs=to_addrs.split(','), msg=msg.as_string())
if __name__ == '__main__':
# 将content.txt内的内容作为信息
with open('content.txt', 'r', encoding='utf-8') as f:
message = f.read()
# 逐行读取邮件数据
with open('emails.txt', 'r', encoding='utf-8') as f:
emails = f.readlines()
try:
# 显示收件人
sender_show = '[email protected]'
for i in emails:
# 将已发送的邮箱存储在文本中方便发生错误时手动清理数据
with open('emails_done.txt', 'a', encoding='utf-8') as f:
f.write(i)
sendmail(message, i, i)
except Exception as e:
print("The Error happened: " + str(e))
| 29.574468 | 88 | 0.590647 |
72b5e6fe640440dc7030c25a790f360eebc45501
| 1,811 |
py
|
Python
|
tests/integration/test_get_by_id.py
|
pcrete/skil-python
|
672a1aa9e8af020c960ab9ee280cbb6b194afc3f
|
[
"Apache-2.0"
] | 23 |
2018-09-19T13:34:27.000Z
|
2022-02-14T09:49:35.000Z
|
tests/integration/test_get_by_id.py
|
pcrete/skil-python
|
672a1aa9e8af020c960ab9ee280cbb6b194afc3f
|
[
"Apache-2.0"
] | 33 |
2018-10-18T07:58:05.000Z
|
2019-05-16T08:24:12.000Z
|
tests/integration/test_get_by_id.py
|
pcrete/skil-python
|
672a1aa9e8af020c960ab9ee280cbb6b194afc3f
|
[
"Apache-2.0"
] | 11 |
2018-10-21T18:58:57.000Z
|
2022-02-14T09:49:36.000Z
|
import pytest
import skil
import uuid
import os
work_space = None # because number of workspaces is limited
_sk = None
def _get_sk():
global _sk
if _sk is None:
_sk = skil.Skil()
return _sk
def _get_ws():
global work_space
if work_space is not None:
return work_space
sk = _get_sk()
work_space = skil.WorkSpace(sk)
return work_space
def test_work_space_by_id():
global work_space
global work_space_id
sk = _get_sk()
work_space = skil.WorkSpace(sk, name='test_ws')
ws_id = work_space.id
work_space_id = ws_id
work_space2 = skil.get_workspace_by_id(sk, ws_id)
assert work_space.name == work_space2.name
def test_experiment_by_id():
ws = _get_ws()
exp = skil.Experiment(ws, name='test_exp')
exp_id = exp.id
exp2 = skil.get_experiment_by_id(ws.skil, exp_id)
assert exp.name == exp2.name
def test_deployment_by_id():
sk = _get_sk()
dep = skil.Deployment(sk, name='test_dep' + str(uuid.uuid1())[:6])
dep_id = dep.id
dep2 = skil.get_deployment_by_id(sk, dep_id)
assert dep.name == dep2.name
def test_model_by_id():
ws = _get_ws()
exp = skil.Experiment(ws, name='test_exp2')
with open('model.h5', 'w') as f:
f.write('')
model = skil.Model('model.h5', name='test_model', experiment=exp)
model_id = model.id
model2 = skil.get_model_by_id(exp, model_id)
assert model.name == model2.name
os.remove('model.h5')
def test_transform_by_id():
ws = _get_ws()
exp = skil.Experiment(ws, name='test_transform')
transform = skil.Transform('iris_tp.json')
transform_id = transform.id
tf_2 = skil.get_transform_by_id(transform_id, "CSV", exp)
assert tf_2.name == transform.name
if __name__ == '__main__':
pytest.main([__file__])
| 23.828947 | 70 | 0.668691 |
f46105a07177fd860ffb677c5ff344539337466a
| 1,239 |
py
|
Python
|
spamsmss.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2 |
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
spamsmss.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
spamsmss.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2 |
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
import requests,os,sys,time
from time import sleep
b="\033[94m"
c="\033[96m"
g="\033[92m"
r="\033[91m"
p="\033[1;97m"
d="\033[00m"
ab="\033[90m"
dn=f"{d}[{g}√{d}]{p}"
er=f"{d}[{r}!{d}]{p}"
pr=f"{d}[{c}?{d}]{p}"
def clear():
os.system("cls" if os.name == "nt" else "clear")
def baner():
clear()
print(f"""
{p}╔═╗┌─┐┌─┐┌┬┐ {c}┌─┐┌┬┐┌─┐
{p}╚═╗├─┘├─┤│││ {c}└─┐│││└─┐
{p}╚═╝┴ ┴ ┴┴ ┴ {c}└─┘┴ ┴└─┘{p}{g}
https://ainxbot-id.herokuapp.com
{ab}--------------------------------{d}""")
def cblg():
lg=input(f"{pr}Coba lagi? ({d}{c}y{d}/{c}n{p}) : {c}")
if lg == "y" or lg == "Y":
sleep(10)
os.system("python run.py")
elif lg == "n" or lg == "N":
sys.exit(f"{er}Bye bro jangan lupa kasih bintang github saya:)")
else:
print(f"{er}Ngetik yang bener coeg")
cblg()
def spam(nomor):
req=requests.get("https://ainxbot-sms.herokuapp.com/api/spamsms",params={"phone":nomor}).text
if "terkirim" in req:
print(f"{dn}Spam ke {c}{nomor} berhasil")
else:
print(f"{er}Spam ke {c}{nomor} gagal")
if __name__=="__main__":
baner()
nomor=input(f"{er}Put your number,example : {c}8xxx {p}tanpa 0/62\n{er}Limit : 3x\n{pr}{ab} >>> {c}")
spam(nomor)
cblg()
| 27.533333 | 105 | 0.505246 |
f48fd79fdb10e24f80dac61e97fe641bcdfca8f1
| 734 |
py
|
Python
|
src/onegov/user/utils.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/user/utils.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/user/utils.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from furl import furl
def password_reset_url(user, request, url):
""" Appends the token needed by PasswordResetForm for a password reset.
:user:
The user (model).
:request:
The request.
:url:
The URL which points to the password reset view (which is using the
PasswordResetForm).
:return: An URL containg the password reset token, or None if unsuccesful.
"""
# external users may not reset their passwords here
if user.source is not None:
return None
token = request.new_url_safe_token({
'username': user.username,
'modified': user.modified.isoformat() if user.modified else ''
})
return furl(url).add({'token': token}).url
| 23.677419 | 78 | 0.647139 |
22b66023d1953fb6468fc7cd56ceccbeff3eb1de
| 2,584 |
py
|
Python
|
extrahiereMerkmale.py
|
jkerpe/TroubleBubble
|
813ad797398b9f338f136bcb96c6c92186d92ebf
|
[
"MIT"
] | null | null | null |
extrahiereMerkmale.py
|
jkerpe/TroubleBubble
|
813ad797398b9f338f136bcb96c6c92186d92ebf
|
[
"MIT"
] | null | null | null |
extrahiereMerkmale.py
|
jkerpe/TroubleBubble
|
813ad797398b9f338f136bcb96c6c92186d92ebf
|
[
"MIT"
] | 1 |
2021-08-09T14:57:57.000Z
|
2021-08-09T14:57:57.000Z
|
import cv2
import glob
import csv
import os
def getCoord(blb):
"""
Funktion zur Berechnung der Koordinaten eines BLOBs als Liste mit [xmin, ymin, xmax, ymax]
:param blb: BLOB
:return: [xmin, ymin, xmax, ymax]
"""
x_min, y_min, w, h = cv2.boundingRect(blb)
return [x_min, y_min, x_min + w, y_min + h]
def writeInFile(inp):
with open('extrahierteMerkmale.csv', 'a', newline='') as f:
writer = csv.writer(f, delimiter=",")
writer.writerow([inp])
def extrahiere(folder):
ml = float(folder.split('_')[3].split('m')[0].replace('-', '.')) * 1000
bg = cv2.createBackgroundSubtractorMOG2(detectShadows=False, varThreshold=40, history=0)
fnames = sorted(glob.glob(f'{folder}/*.bmp'))
if len(fnames) == 0:
fnames = sorted(glob.glob(f'{folder}/*.tiff'))
for fname in fnames:
frame = cv2.imread(fname)
# Vorverarbeitung:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
frame = cv2.bilateralFilter(frame, 5, 75, 75)
# Anwenden der Hintergrundsubtraktion:
fgmask = bg.apply(frame)
# Druck und Zeit aus Bildernamen extrahieren
try:
druck = int(fname.split('-')[-1].split('.')[0])
except ValueError:
print("Format des Bildnamens stimmt nicht")
return False
blobs, _ = cv2.findContours(fgmask, cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE) # Konturen extrahieren aus Maske
for i in range(len(blobs)):
area = cv2.contourArea(blobs[i])
if area > 200: # kleine BLOBs herausfiltern
coords = getCoord(blobs[i]) # Form: [x_min, y_min, x_max, y_max]
mid_y = int((coords[1] + coords[3]) / 2) # Ungefährer Mittelpunkt (Schneller als durch Momente)
mid_x = int((coords[0] + coords[2]) / 2)
# BLOBs zwischen 700 und 800 Pixeln herausfiltern
if (mid_x in range(700, 800)) & (mid_y in range(10, 75)):
perimeter = cv2.arcLength(blobs[i], True)
rundh = int(1000 * (4 * 3.14 * area) / (perimeter ** 2))
inp = f"{int(ml)};{int(area)};{druck};{rundh};{int(area*area)};{int(area*druck)}"
writeInFile(inp)
print(inp)
if __name__ == '__main__':
writeInFile(f"ml;flaeche;druck;rundheit;flaecheHoch2;flaecheMalDruck")
ordner = glob.glob('202*')
for uo in ordner:
extrahiere(uo)
| 36.394366 | 113 | 0.561533 |
22efa3095dccad014d5a0a9a8e851728cb81f6c6
| 2,388 |
py
|
Python
|
rawio/grouping/markov.py
|
hdkai/Raw-IO
|
f0fa928d7ef59a363c6f4c876d642af6dede6ae4
|
[
"Apache-2.0"
] | null | null | null |
rawio/grouping/markov.py
|
hdkai/Raw-IO
|
f0fa928d7ef59a363c6f4c876d642af6dede6ae4
|
[
"Apache-2.0"
] | null | null | null |
rawio/grouping/markov.py
|
hdkai/Raw-IO
|
f0fa928d7ef59a363c6f4c876d642af6dede6ae4
|
[
"Apache-2.0"
] | null | null | null |
#
# RawIO
# Copyright (c) 2021 Yusuf Olokoba.
#
from cv2 import findTransformECC, MOTION_TRANSLATION, TERM_CRITERIA_COUNT, TERM_CRITERIA_EPS
from numpy import asarray, eye, float32
from PIL import Image
from sklearn.feature_extraction.image import extract_patches_2d
from typing import Callable
def markov_similarity (min_probability: float=0.8, trials: int=100, patch_size: float=0.1) -> Callable[[str, str], bool]:
"""
Create a similarity function which estimates a binomial distribution on a Markov random field defined over the image.
In simple terms, it checks for patch correspondences :/
We use Evangelidis & Psarakis, 2008 with Monte Carlo simulation to estimate the binomial distribution.
Parameters:
min_probability (float): Minimum probability for images to be considered similar, in range [0., 1.].
trials (int): Number of Monte Carlo trials for estimating the binomial distribution.
patch_size (float): Relative patch size for ECC trials, in range [0., 1.].
Returns:
callable: Pairwise image similarity function returning a boolean.
"""
def similarity_fn (path_a: str, path_b: str) -> bool:
# Load images
image_a = Image.open(path_a)
image_b = Image.open(path_b)
# Check sizes
if image_a.size != image_b.size:
return False
# Load images
image_a.draft("L", (2560, 1440))
image_b.draft("L", (2560, 1440))
image_a = asarray(image_a)
image_b = asarray(image_b)
# Extract patches
SEED = 1
size = int(min(image_a.shape) * patch_size)
patches_a = extract_patches_2d(image_a, (size, size), max_patches=trials, random_state=SEED)
patches_b = extract_patches_2d(image_b, (size, size), max_patches=trials, random_state=SEED)
# Run Monte Carlo estimation
IDENTITY = eye(2, 3, dtype=float32)
CRITERIA = (TERM_CRITERIA_EPS | TERM_CRITERIA_COUNT, 50, 1e-4)
passes = 0
for patch_a, patch_b in zip(patches_a, patches_b):
try:
findTransformECC(patch_a, patch_b, IDENTITY.copy(), MOTION_TRANSLATION, CRITERIA, None, 5)
passes += 1
except:
pass
# Check
estimator = passes / patches_a.shape[0]
return estimator >= min_probability
return similarity_fn
| 41.894737 | 121 | 0.664992 |
a3b8800c8469609e7884a8f8087b6a7b36f0dead
| 239 |
py
|
Python
|
assets/2019-09-06-reflected-text-analysis/test_install.py
|
nilsreiter/nilsreiter.de
|
821fd9f4c649bc86049f5e27419e83be288c1129
|
[
"MIT"
] | null | null | null |
assets/2019-09-06-reflected-text-analysis/test_install.py
|
nilsreiter/nilsreiter.de
|
821fd9f4c649bc86049f5e27419e83be288c1129
|
[
"MIT"
] | null | null | null |
assets/2019-09-06-reflected-text-analysis/test_install.py
|
nilsreiter/nilsreiter.de
|
821fd9f4c649bc86049f5e27419e83be288c1129
|
[
"MIT"
] | null | null | null |
try:
import requests
import nltk
print("Installation successful!")
except:
print("There was a problem with the installation of your python libraries. Please contact \
[email protected]")
| 21.727273 | 96 | 0.677824 |
4a6c4e0a6993f600ca47cf0869cdf1dfcf6eb283
| 1,340 |
py
|
Python
|
service/api/feeds.py
|
netzbegruenung/schaufenster
|
c0860570cf6b46dc0fade9cef7562edd2fa7f3a0
|
[
"Apache-2.0"
] | 1 |
2021-07-20T06:56:38.000Z
|
2021-07-20T06:56:38.000Z
|
service/api/feeds.py
|
netzbegruenung/schaufenster
|
c0860570cf6b46dc0fade9cef7562edd2fa7f3a0
|
[
"Apache-2.0"
] | 1 |
2018-01-23T22:36:49.000Z
|
2018-01-24T18:52:27.000Z
|
service/api/feeds.py
|
netzbegruenung/schaufenster
|
c0860570cf6b46dc0fade9cef7562edd2fa7f3a0
|
[
"Apache-2.0"
] | 2 |
2018-01-23T21:25:57.000Z
|
2018-01-24T21:46:41.000Z
|
import feedparser
from datetime import datetime
class Client(object):
def __init__(self, url):
self.url = url
self.__load()
def __load(self):
self.feed = feedparser.parse(self.url)
def metadata(self):
"""
Returns meta information from the feed
"""
return {
"title": self.feed["feed"].get("title"),
"link": self.feed["feed"].get("link"),
"description": self.feed["feed"].get("description"),
"published": self.feed["feed"].get("published"),
}
def __entry_details(self, entry):
"""
Returns only a few entry details we care about
"""
return {
"title": entry["title"],
"summary": entry["summary"],
"link": entry["link"],
"published": datetime(
entry["published_parsed"][0], entry["published_parsed"][1], entry["published_parsed"][2],
entry["published_parsed"][3], entry["published_parsed"][4], entry["published_parsed"][5]
)
}
def recent_items(self, num=3):
"""
Returns the num most recent entries from the feed
"""
out = []
for n in range(0, num):
out.append(self.__entry_details(self.feed.entries[n]))
return out
| 29.130435 | 105 | 0.532836 |
4aa21ac2ab0a3d29a05cf3f6f04413a9e2960c12
| 845 |
py
|
Python
|
ASIS/2020/Finals/babyauth/bin/server.py
|
mystickev/ctf-archives
|
89e99a5cd5fb6b2923cad3fe1948d3ff78649b4e
|
[
"MIT"
] | 1 |
2021-11-02T20:53:58.000Z
|
2021-11-02T20:53:58.000Z
|
ASIS/2020/Finals/babyauth/bin/server.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | null | null | null |
ASIS/2020/Finals/babyauth/bin/server.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | 1 |
2021-12-19T11:06:24.000Z
|
2021-12-19T11:06:24.000Z
|
#!/usr/bin/env python3
import os
import random
import shutil
import string
ctable = string.ascii_letters + string.digits
rndstr = lambda n: ''.join([random.choice(ctable) for i in range(n)])
PATH = '/tmp/babyauth_' + rndstr(16)
USERNAME = 'admin'
PASSWORD = rndstr(16)
if __name__ == '__main__':
# Setup
os.makedirs(PATH, exist_ok=True)
with open(f'{PATH}/username', 'w') as f:
f.write(USERNAME)
with open(f'{PATH}/password', 'w') as f:
f.write(PASSWORD)
# Prove your exploit is stable
for ROUND in range(5):
# Authentication
if os.system(f'./auth {PATH}') != 0:
shutil.rmtree(PATH)
exit(0)
print(f"[+] {ROUND+1}/5: OK", flush=True)
# Delicious Fruit
with open("/flag", "r") as f:
print(f.read())
# Cleanup
shutil.rmtree(PATH)
| 23.472222 | 69 | 0.598817 |
604cdc9b42836c1628f2ef2c19eedf4cbe7e38c0
| 12,228 |
py
|
Python
|
components/py_engine/framework/ap3216c.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | 4,538 |
2017-10-20T05:19:03.000Z
|
2022-03-30T02:29:30.000Z
|
components/py_engine/framework/ap3216c.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | 1,088 |
2017-10-21T07:57:22.000Z
|
2022-03-31T08:15:49.000Z
|
components/py_engine/framework/ap3216c.py
|
willianchanlovegithub/AliOS-Things
|
637c0802cab667b872d3b97a121e18c66f256eab
|
[
"Apache-2.0"
] | 1,860 |
2017-10-20T05:22:35.000Z
|
2022-03-27T10:54:14.000Z
|
"""
Copyright (C) 2015-2020 Alibaba Group Holding Limited
The driver for AP3216C chip, The AP3216C is an integrated ALS & PS module
that includes a digital ambient light sensor [ALS], a proximity sensor [PS],
and an IR LED in a single package.
"""
from micropython import const
from driver import I2C
from utime import sleep_ms
import math
AP3216C_ADDR = const(0x1e)
# System Register
AP3216C_SYS_CONFIGURATION_REG = const(0x00)
AP3216C_SYS_INT_STATUS_REG = const(0x01)
AP3216C_SYS_INT_CLEAR_MANNER_REG = const(0x02)
AP3216C_IR_DATA_L_REG = const(0x0A)
AP3216C_IR_DATA_H_REG = const(0x0B)
AP3216C_ALS_DATA_L_REG = const(0x0C)
AP3216C_ALS_DATA_H_REG = const(0x0D)
AP3216C_PS_DATA_L_REG = const(0x0E)
AP3216C_PS_DATA_H_REG = const(0x0F)
# ALS Register
AP3216C_ALS_CONFIGURATION_REG = const(0x10)
AP3216C_ALS_CALIBRATION_REG = const(0x19)
AP3216C_ALS_THRESHOLD_LOW_L_REG = const(0x1A)
AP3216C_ALS_THRESHOLD_LOW_H_REG = const(0x1B)
AP3216C_ALS_THRESHOLD_HIGH_L_REG = const(0x1C)
AP3216C_ALS_THRESHOLD_HIGH_H_REG = const(0x1D)
# PS Register
AP3216C_PS_CONFIGURATION_REG = const(0x20)
AP3216C_PS_LED_DRIVER_REG = const(0x21)
AP3216C_PS_INT_FORM_REG = const(0x22)
AP3216C_PS_MEAN_TIME_REG = const(0x23)
AP3216C_PS_LED_WAITING_TIME_REG = const(0x24)
AP3216C_PS_CALIBRATION_L_REG = const(0x28)
AP3216C_PS_CALIBRATION_H_REG = const(0x29)
AP3216C_PS_THRESHOLD_LOW_L_REG = const(0x2A)
AP3216C_PS_THRESHOLD_LOW_H_REG = const(0x2B)
AP3216C_PS_THRESHOLD_HIGH_L_REG = const(0x2C)
AP3216C_PS_THRESHOLD_HIGH_H_REG = const(0x2D)
#mode value
AP3216C_MODE_POWER_DOWN = const(0x0)
AP3216C_MODE_ALS = const(0x1)
AP3216C_MODE_PS = const(0x2)
AP3216C_MODE_ALS_AND_PS = const(0x3)
AP3216C_MODE_SW_RESET = const(0x4)
AP3216C_MODE_ALS_ONCE = const(0x5)
AP3216C_MODE_PS_ONCE = const(0x6)
AP3216C_MODE_ALS_AND_PS_ONCE = const(0x7)
#ap3216c_int_clear_manner
AP3216C_INT_CLEAR_MANNER_BY_READING = const(0x0)
AP3216C_ALS_CLEAR_MANNER_BY_SOFTWARE = const(0x1)
#als_range
AP3216C_ALS_RANGE_20661 = const(0x0)
AP3216C_ALS_RANGE_5162 = const(0x1)
AP3216C_ALS_RANGE_1291 = const(0x2)
AP3216C_ALS_RANGE_323 = const(0x3)
#als_range
AP3216C_PS_GAIN1 = const(0x0)
AP3216C_PS_GAIN2 = const(0x1)
AP3216C_PS_GAIN4 = const(0x2)
AP3216C_PS_GAIN8 = const(0x3)
AP3216C_SYSTEM_MODE = const(0x0)
AP3216C_INT_PARAM = const(0x1)
AP3216C_ALS_RANGE = const(0x2)
AP3216C_ALS_PERSIST = const(0x3)
AP3216C_ALS_CALIBRATION = const(0x4)
AP3216C_ALS_LOW_THRESHOLD_L = const(0x5)
AP3216C_ALS_LOW_THRESHOLD_H = const(0x6)
AP3216C_ALS_HIGH_THRESHOLD_L = const(0x7)
AP3216C_ALS_HIGH_THRESHOLD_H = const(0x8)
AP3216C_PS_INTEGRATED_TIME = const(0x9)
AP3216C_PS_GAIN = const(0xa)
AP3216C_PS_PERSIST = const(0xb)
AP3216C_PS_LED_CONTROL = const(0xc)
AP3216C_PS_LED_DRIVER_RATIO = const(0xd)
AP3216C_PS_INT_MODE = const(0xe)
AP3216C_PS_MEAN_TIME = const(0xf)
AP3216C_PS_WAITING_TIME = const(0x10)
AP3216C_PS_CALIBRATION_L = const(0x11)
AP3216C_PS_CALIBRATION_H = const(0x12)
AP3216C_PS_LOW_THRESHOLD_L = const(0x13)
AP3216C_PS_LOW_THRESHOLD_H = const(0x14)
AP3216C_PS_HIGH_THRESHOLD_L = const(0x15)
AP3216C_PS_HIGH_THRESHOLD_H = const(0x16)
class AP3216CError(Exception):
def __init__(self, value=0, msg="ap3216c common error"):
self.value = value
self.msg = msg
def __str__(self):
return "Error code:%d, Error message: %s" % (self.value, str(self.msg))
__repr__ = __str__
class AP3216C(object):
"""
This class implements ap3216c chip's defs.
"""
def __init__(self):
self.i2cDev = None
def open(self, devid):
self.i2cDev = I2C()
self.i2cDev.open(devid)
# 写寄存器的值
def write_reg(self, addr, data):
msgbuf = bytearray([data])
self.i2cDev.writeReg(addr, msgbuf)
print("--> write addr " + str(addr) + ", value = " + str(msgbuf))
# 读寄存器的值
def read_regs(self, addr, len):
buf = bytearray(len)
self.i2cDev.readReg(addr, buf)
print("--> read " + str(len) + " bytes from addr " + str(addr) + ", " + str(len) + " bytes value = " + str(buf))
return buf;
# 软件复位传感器
def reset_sensor(self):
self.write_reg(AP3216C_SYS_CONFIGURATION_REG, AP3216C_MODE_SW_RESET); # reset
def read_low_and_high(self, reg, len):
# buf
# buf[0] = self.read_regs(reg, len) # 读低字节
# buf[1] = self.read_regs(reg + 1, len) # 读高字节
data = self.read_regs(reg, len)[0] | (self.read_regs(reg + 1, len)[0] << len * 8) # 合并数据
if (data > (1 << 15)):
data = data - (1<<16)
return data
def ap3216c_get_IntStatus(self):
# 读中断状态寄存器
IntStatus = self.read_regs(AP3216C_SYS_INT_STATUS_REG, 1)[0]
# IntStatus 第 0 位表示 ALS 中断,第 1 位表示 PS 中断。
return IntStatus # 返回状态
def ap3216c_int_init(self):
print("ap3216c_int_init")
#配置 中断输入引脚
def ap3216c_int_Config(self):
print("ap3216c_int_Config")
#初始化入口
def init(self):
# reset ap3216c
self.reset_sensor()
sleep_ms(100)
self.ap3216c_set_param(AP3216C_SYSTEM_MODE, AP3216C_MODE_ALS_AND_PS)
sleep_ms(150) # delay at least 112.5ms
self.ap3216c_int_Config()
self.ap3216c_int_init()
# This function reads light by ap3216c sensor measurement
# @param no
# @return the ambient light converted to float data.
#
def ap3216c_read_ambient_light(self):
read_data = self.read_low_and_high(AP3216C_ALS_DATA_L_REG, 1)
range = self.ap3216c_get_param(AP3216C_ALS_RANGE)
print("ap3216c_read_ambient_light read_data is " , read_data, range)
if (range == AP3216C_ALS_RANGE_20661):
brightness = 0.35 * read_data # sensor ambient light converse to reality
elif (range == AP3216C_ALS_RANGE_5162):
brightness = 0.0788 * read_data # sensor ambient light converse to reality
elif (range == AP3216C_ALS_RANGE_1291):
brightness = 0.0197 * read_data # sensor ambient light converse to reality
elif (range == AP3216C_ALS_RANGE_323):
brightness = 0.0049 * read_data # sensor ambient light converse to reality
return brightness
#This function reads proximity by ap3216c sensor measurement
#@param no
#@return the proximity data.
def ap3216c_read_ps_data(self):
read_data = self.read_low_and_high(AP3216C_PS_DATA_L_REG, 1) # read two data
print("ap3216c_read_ps_data read_data is " , read_data);
if (1 == ((read_data >> 6) & 0x01 or (read_data >> 14) & 0x01)) :
return 55555 # 红外过高(IR),PS无效 返回一个 55555 的无效数据
proximity = (read_data & 0x000f) + (((read_data >> 8) & 0x3f) << 4)
# sensor proximity converse to reality
if (proximity > (1 << 15)) :
proximity = proximity - (1<<16)
proximity |= read_data & 0x8000 # 取最高位,0 表示物体远离,1 表示物体靠近
return proximity # proximity 后十位是数据位,最高位为状态位
#This function reads ir by ap3216c sensor measurement
#@param no
#@return the ir data.
def ap3216c_read_ir_data(self):
read_data = self.read_low_and_high(AP3216C_IR_DATA_L_REG, 1) # read two data
print("ap3216c_read_ir_data read_data is" , read_data);
proximity = (read_data & 0x0003) + ((read_data >> 8) & 0xFF)
# sensor proximity converse to reality
if (proximity > (1 << 15)) :
proximity = proximity - (1<<16)
return proximity
#This function sets parameter of ap3216c sensor
#@param cmd the parameter cmd of device
#@param value for setting value in cmd register
#@return the setting parameter status,RT_EOK reprensents setting successfully.
def ap3216c_set_param(self, cmd, value):
if cmd == AP3216C_SYSTEM_MODE:
# default 000,power down
self.write_reg(AP3216C_SYS_CONFIGURATION_REG, value)
elif cmd == AP3216C_INT_PARAM:
self.write_reg(AP3216C_SYS_INT_CLEAR_MANNER_REG, value)
elif cmd == AP3216C_ALS_RANGE:
args = self.read_regs(AP3216C_ALS_CONFIGURATION_REG, 1)[0]
args &= 0xcf
args |= value << 4
self.write_reg(AP3216C_ALS_CONFIGURATION_REG, args)
elif cmd == AP3216C_ALS_PERSIST:
args = self.read_regs(AP3216C_ALS_CONFIGURATION_REG, 1)[0]
args &= 0xf0
args |= value
self.write_reg(AP3216C_ALS_CONFIGURATION_REG, args)
elif cmd == AP3216C_ALS_LOW_THRESHOLD_L:
self.write_reg(AP3216C_ALS_THRESHOLD_LOW_L_REG, value)
elif cmd == AP3216C_ALS_LOW_THRESHOLD_H:
self.write_reg(AP3216C_ALS_THRESHOLD_LOW_H_REG, value)
elif cmd == AP3216C_ALS_HIGH_THRESHOLD_L:
self.write_reg(AP3216C_ALS_THRESHOLD_HIGH_L_REG, value)
elif cmd == AP3216C_ALS_HIGH_THRESHOLD_H:
self.write_reg(AP3216C_ALS_THRESHOLD_HIGH_H_REG, value)
elif cmd == AP3216C_PS_GAIN:
args = self.read_regs(AP3216C_PS_CONFIGURATION_REG, 1)[0]
args &= 0xf3
args |= value
self.write_reg(AP3216C_PS_CONFIGURATION_REG, args)
elif cmd == AP3216C_PS_PERSIST:
args = self.read_regs(AP3216C_PS_CONFIGURATION_REG, 1)[0]
args &= 0xfc
args |= value
self.write_reg(AP3216C_PS_CONFIGURATION_REG, args)
elif cmd == AP3216C_PS_LOW_THRESHOLD_L:
self.write_reg(AP3216C_PS_THRESHOLD_LOW_L_REG, value)
elif cmd == AP3216C_PS_LOW_THRESHOLD_H:
self.write_reg(AP3216C_PS_THRESHOLD_LOW_H_REG, value)
elif cmd == AP3216C_PS_HIGH_THRESHOLD_L:
self.write_reg(AP3216C_PS_THRESHOLD_HIGH_L_REG, value)
elif cmd == AP3216C_PS_HIGH_THRESHOLD_H:
self.write_reg(AP3216C_PS_THRESHOLD_HIGH_H_REG, value)
#This function gets parameter of ap3216c sensor
#@param cmd the parameter cmd of device
#@param value to get value in cmd register
#@return the getting parameter status,RT_EOK reprensents getting successfully.
def ap3216c_get_param(self, cmd):
if cmd == AP3216C_SYSTEM_MODE:
value = self.read_regs(AP3216C_SYS_CONFIGURATION_REG, 1)[0]
elif cmd == AP3216C_INT_PARAM:
value = self.read_regs(AP3216C_SYS_INT_CLEAR_MANNER_REG, 1)[0]
elif cmd == AP3216C_ALS_RANGE:
value = self.read_regs(AP3216C_ALS_CONFIGURATION_REG, 1)[0]
temp = (value & 0xff) >> 4
value = temp
elif cmd == AP3216C_ALS_PERSIST:
temp = self.read_regs(AP3216C_ALS_CONFIGURATION_REG, 1)[0]
temp = value & 0x0f
value = temp
elif cmd == AP3216C_ALS_LOW_THRESHOLD_L:
value = self.read_regs(AP3216C_ALS_THRESHOLD_LOW_L_REG, 1)[0]
elif cmd == AP3216C_ALS_LOW_THRESHOLD_H:
value = self.read_regs(AP3216C_ALS_THRESHOLD_LOW_H_REG, 1)[0]
elif cmd == AP3216C_ALS_HIGH_THRESHOLD_L:
value = self.read_regs(AP3216C_ALS_THRESHOLD_HIGH_L_REG, 1)[0]
elif cmd == AP3216C_ALS_HIGH_THRESHOLD_H:
value = self.read_regs(AP3216C_ALS_THRESHOLD_HIGH_H_REG, 1)[0]
elif cmd == AP3216C_PS_GAIN:
temp = self.read_regs(AP3216C_PS_CONFIGURATION_REG, 1)[0]
value = (temp & 0xc) >> 2
elif cmd == AP3216C_PS_PERSIST:
temp = self.read_regs(AP3216C_PS_CONFIGURATION_REG, 1)[0]
value = temp & 0x3
elif cmd == AP3216C_PS_LOW_THRESHOLD_L:
value = self.read_regs(AP3216C_PS_THRESHOLD_LOW_L_REG, 1)[0]
elif cmd == AP3216C_PS_LOW_THRESHOLD_H:
value = self.read_regs(AP3216C_PS_THRESHOLD_LOW_H_REG, 1)[0]
elif cmd == AP3216C_PS_HIGH_THRESHOLD_L:
value = self.read_regs(AP3216C_PS_THRESHOLD_HIGH_L_REG, 1)[0]
elif cmd == AP3216C_PS_HIGH_THRESHOLD_H:
value = self.read_regs(AP3216C_PS_THRESHOLD_HIGH_H_REG, 1)[0]
return value
def close(self):
self.i2cDev.close()
| 39.067093 | 120 | 0.673945 |
60a0d3209632595f9201efa502347783b8b4f229
| 1,695 |
py
|
Python
|
Work/Trivia - Module 5/routes.py
|
dineshkumar2509/Flask
|
bcacb81d9411f53a88a80c2e4ce31e8141775cbe
|
[
"Apache-2.0"
] | 79 |
2015-01-31T17:08:36.000Z
|
2021-11-16T16:33:51.000Z
|
Work/Trivia - Module 5/routes.py
|
dineshkumar2509/Flask
|
bcacb81d9411f53a88a80c2e4ce31e8141775cbe
|
[
"Apache-2.0"
] | 3 |
2015-05-31T16:34:20.000Z
|
2016-10-17T05:32:36.000Z
|
Work/Trivia - Module 5/routes.py
|
dineshkumar2509/Flask
|
bcacb81d9411f53a88a80c2e4ce31e8141775cbe
|
[
"Apache-2.0"
] | 100 |
2015-02-01T12:28:58.000Z
|
2020-11-15T11:43:34.000Z
|
from flask import Flask, url_for, request, render_template
from app import app
import redis
r=redis.StrictRedis('localhost',6379,0, decode_responses=True,charset='utf-8');
@app.route('/')
def hello():
url = url_for('about');
link = '<a href="' + url + '">About us!</a>';
return link;
@app.route('/about')
def about():
return 'We are the knights who say Ni!!';
@app.route('/question/<title>', methods=['GET', 'POST'])
def question(title):
if request.method == 'GET':
# Redis code to load question
question = r.get(title+':question')
return render_template('AnswerQuestion.html',
question = question)
elif request.method == 'POST':
submittedAnswer = request.form['submittedAnswer'];
# Redis code to load answer
answer=r.get(title+':answer')
if submittedAnswer == answer:
return render_template('Correct.html');
else:
return render_template('Incorrect.html',
answer = answer,
submittedAnswer = submittedAnswer);
@app.route('/submit', methods=['GET', 'POST'])
def submit():
if request.method == 'GET':
return render_template('CreateQuestion.html');
elif request.method == 'POST':
question = request.form['question'];
answer = request.form['answer'];
title = request.form['title'];
# Redis code to save question and answer
r.set(title+':question',question);
r.set(title+':answer',answer);
return render_template('CreatedQuestion.html',
question = question);
return;
| 32.596154 | 79 | 0.581711 |
60ef0f0d35ea4a7e7e26dbed43d9a92fb1a69d04
| 469 |
py
|
Python
|
Mass-Delete-Http-main/delh.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2 |
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
Mass-Delete-Http-main/delh.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
Mass-Delete-Http-main/delh.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2 |
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
#JametKNTLS - h0d3_g4n - Moslem - Kiddenta - Naskleng45
#Created By : Jenderal92@Shin403
banner = """
Mass Delete HTTP | Jamet Crew
"""
print banner
def http(url):
try:
htt = (url)
x = htt.replace('http://', '').replace('https://', '')
open('delhttp.txt','a').write(x+'\n'); print('Deleted http' + ' '+url)
except: pass
site = raw_input('List Site : ')
ht = open(site, 'r').readlines()
for i in ht:
try:
siten = i.strip()
data=http(siten)
except: pass
| 21.318182 | 72 | 0.620469 |
e8213ab7dcb4bf3c584e4890ef6627bd294be6a4
| 2,276 |
py
|
Python
|
src/onegov/election_day/screen_widgets/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/election_day/screen_widgets/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/election_day/screen_widgets/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.election_day.screen_widgets.election import (
ElectionCandidatesByEntityTableWidget,
ElectionCandidatesChartWidget,
ElectionCandidatesTableWidget,
ElectionCompoundCandidatesTableWidget,
ElectionCompoundDistrictsTableWidget,
ElectionCompoundListsChartWidget,
ElectionCompoundListsTableWidget,
ElectionListsChartWidget,
ElectionListsTableWidget,
)
from onegov.election_day.screen_widgets.generic import (
ColumnWidget,
CountedEntitiesWidget,
H1Widget,
H2Widget,
H3Widget,
HRWidget,
LogoWidget,
ProgressWidget,
RowWidget,
TextWidget,
TitleWidget,
)
from onegov.election_day.screen_widgets.vote import (
VoteCounterProposalDistrictsMap,
VoteCounterProposalEntitiesMap,
VoteCounterProposalEntitiesTableWidget,
VoteCounterProposalResultBarWidget,
VoteCounterProposalTitleWidget,
VoteProposalDistrictsMap,
VoteProposalEntitiesMap,
VoteProposalEntitiesTableWidget,
VoteProposalResultBarWidget,
VoteTieBreakerDistrictsMap,
VoteTieBreakerEntitiesMap,
VoteTieBreakerEntitiesTableWidget,
VoteTieBreakerResultBarWidget,
VoteTieBreakerTitleWidget
)
__all__ = (
'ColumnWidget',
'CountedEntitiesWidget',
'ElectionCandidatesByEntityTableWidget',
'ElectionCandidatesChartWidget',
'ElectionCandidatesTableWidget',
'ElectionCompoundCandidatesTableWidget',
'ElectionCompoundDistrictsTableWidget',
'ElectionCompoundListsChartWidget',
'ElectionCompoundListsTableWidget',
'ElectionListsChartWidget',
'ElectionListsTableWidget',
'H1Widget',
'H2Widget',
'H3Widget',
'HRWidget',
'LogoWidget',
'ProgressWidget',
'RowWidget',
'TextWidget',
'TitleWidget',
'VoteCounterProposalDistrictsMap',
'VoteCounterProposalEntitiesMap',
'VoteCounterProposalEntitiesTableWidget',
'VoteCounterProposalResultBarWidget',
'VoteCounterProposalTitleWidget',
'VoteProposalDistrictsMap',
'VoteProposalEntitiesMap',
'VoteProposalEntitiesTableWidget',
'VoteProposalResultBarWidget',
'VoteTieBreakerDistrictsMap',
'VoteTieBreakerEntitiesMap',
'VoteTieBreakerEntitiesTableWidget',
'VoteTieBreakerResultBarWidget',
'VoteTieBreakerTitleWidget',
)
| 28.810127 | 57 | 0.774165 |
c727aed3597bde1325e1ff047e4667dfd3acfed8
| 3,244 |
py
|
Python
|
src/server.py
|
tschibu/starthack-asimov
|
bbc6fe04986738c014fa4cbd02a7f9e23b8a9c29
|
[
"MIT"
] | null | null | null |
src/server.py
|
tschibu/starthack-asimov
|
bbc6fe04986738c014fa4cbd02a7f9e23b8a9c29
|
[
"MIT"
] | null | null | null |
src/server.py
|
tschibu/starthack-asimov
|
bbc6fe04986738c014fa4cbd02a7f9e23b8a9c29
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Server for the Raspi Webapp
Examples:
- get json with curl -> curl -X POST http://0.0.0.0:2828/api/v1/getCrashInfo -d data/1.json
- get image with curl -> curl -X POST http://0.0.0.0:2828/api/v1/getCrashImage -o received_img.png
"""
import sys
sys.path.append('..')
import os
import signal
import time
from sanic import Sanic
from sanic.response import json
from sanic.response import file
import helper.log_helper as logger
import config
from damage_image import DamageImage
from data_parser import DataParser
app = Sanic()
app.name = "CrashSimulationAsimov"
log = logger.get(False, "Server")
# SIGINT handler (when pressing Ctrl+C)
def signal_int_handler(sig, frame):
print("Ctrl+C Pressed. Exit...")
sys.exit(0)
# Routes
# GET - index.html
@app.route('/', methods=['GET'],)
async def index(request):
return await file(os.path.join(os.path.dirname(__file__), 'frontend/index.html'))
# GET - favicon.ico
@app.route('/favicon.ico', methods=['GET'],)
async def favicon(request):
return await file(os.path.join(os.path.dirname(__file__), 'frontend/favicon.ico'))
# POST request 1 - returns JSON {"impactAngle": degrees, "offsetMaximumForce": millisecond}
@app.route('/api/v1/getCrashInfo', methods=['POST',])
async def crash_info(request):
''' crash info parses the crash record and returns a JSON object '''
log.info("Handling '/api/v1/getCrashInfo'")
angle, max_force_offset, _, _, _ = DataParser().parse_input_data(request.body.decode('utf8'))
return json({'impactAngle': angle, 'offsetMaximumForce': max_force_offset})
# POST request 2 - returns a rendered crash image (PNG)
@app.route('/api/v1/getCrashImage', methods=['POST',])
async def crash_image(request):
''' crash image parses the crash record and returns a Image '''
log.info("Handling '/api/v1/getCrashImage'")
customOffset = 0
try:
customOffset = int(request.args.get('timeOffsetMS'))
except Exception as e:
log.error(e)
log.info("Set customOffset: " + str(customOffset) + "ms")
angle_impact, max_force, damage_id, crash_time, max_force_offset = DataParser().parse_input_data(request.body.decode('utf8'), custom_offset=customOffset)
d = DamageImage(angle_impact, max_force, damage_id, crash_time, max_force_offset)
return await file(d.get_image())
# POST request 3 - returns a rendered crash image list (PNG)
@app.route('/api/v1/play', methods=['POST',])
async def image_list(request):
''' crash image parses the crash record and returns a Image List '''
log.info("Handling '/api/v1/play'")
images = []
data = request.body.decode('utf-8')
for i in range(-8000, 8000, 1000):
angle_impact, max_force, damage_id, crash_time, max_force_offset = DataParser().parse_input_data(data, custom_offset=i)
d = DamageImage(angle_impact, max_force, damage_id, crash_time, max_force_offset)
images.append(d.get_image())
return json({"data": images})
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal_int_handler)
##app.add_task(task(app))
app.static('/frontend', './frontend')
app.static('/images', './images')
app.run(host=config.host, port=config.port, debug=False, access_log=False)
| 35.26087 | 157 | 0.705919 |
c7323a134a1167a656505108c241a73e79055d04
| 225 |
py
|
Python
|
20-fs-ias-lec/groups/09-loraSense/LoRaSense/boot.py
|
Kyrus1999/BACnet
|
5be8e1377252166041bcd0b066cce5b92b077d06
|
[
"MIT"
] | 8 |
2020-03-17T21:12:18.000Z
|
2021-12-12T15:55:54.000Z
|
20-fs-ias-lec/groups/09-loraSense/LoRaSense/boot.py
|
Kyrus1999/BACnet
|
5be8e1377252166041bcd0b066cce5b92b077d06
|
[
"MIT"
] | 2 |
2021-07-19T06:18:43.000Z
|
2022-02-10T12:17:58.000Z
|
20-fs-ias-lec/groups/09-loraSense/LoRaSense/boot.py
|
Kyrus1999/BACnet
|
5be8e1377252166041bcd0b066cce5b92b077d06
|
[
"MIT"
] | 25 |
2020-03-20T09:32:45.000Z
|
2021-07-18T18:12:59.000Z
|
##import lib.lorasense as lorasense
##lorasense = lorasense.LoraSense(mode=1, debug=1)
##lorasense.setupWLAN("CasaSalsi","S@lsi1968")
##lorasense.setupLoRa(start=True)
##lorasense.setupUDP("192.168.1.123",54528, start=True)
| 32.142857 | 55 | 0.755556 |
c7b0cf1a0772c67c3dcf4ebb16a8758ba432ef0c
| 911 |
py
|
Python
|
scriptsForPreprocessing/separate_img_with_mask_and_without.py
|
fishial/Object-Detection-Model
|
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
|
[
"CC0-1.0"
] | 1 |
2022-01-03T14:00:17.000Z
|
2022-01-03T14:00:17.000Z
|
scriptsForPreprocessing/separate_img_with_mask_and_without.py
|
fishial/Object-Detection-Model
|
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
|
[
"CC0-1.0"
] | null | null | null |
scriptsForPreprocessing/separate_img_with_mask_and_without.py
|
fishial/Object-Detection-Model
|
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
|
[
"CC0-1.0"
] | 1 |
2021-12-21T09:50:53.000Z
|
2021-12-21T09:50:53.000Z
|
import numpy as np
import pandas as pd
import os, json
import xlrd
import os.path as path
import re
from shutil import copyfile
mask_dataset = r'resources/fishial-new/train'
img_dir = r'resources/train-fish-img'
img_dir_test = r'resources/test-fish-img'
new_part = r'resources/new_part'
val = r'resources/val-fish-img'
path_to_unique_img = r'resources/fish-img-unique'
os.makedirs(path_to_unique_img, exist_ok=True)
list_path_img = [os.path.basename(i) for i in os.listdir(mask_dataset)]
list_img_dir = [os.path.basename(i) for i in os.listdir(val)] #+ [os.path.basename(i) for i in os.listdir(img_dir_test)] + [os.path.basename(i) for i in os.listdir(new_part)] + [os.path.basename(i) for i in os.listdir(val)]
data = set(list_img_dir) - set(list_path_img)
for idx, i in enumerate(data):
print("Leave: {}".format(len(data) - idx))
copyfile(os.path.join(val, i), os.path.join(path_to_unique_img, i))
| 39.608696 | 224 | 0.745335 |
90837de99b6b6982a183dc9710ad84fdd489a295
| 6,304 |
py
|
Python
|
Src/Scripts/mateParsing.py
|
hazemalsaied/IdenSys
|
9b6220ff7e65f7059240b742c81952132a765007
|
[
"MIT"
] | 2 |
2017-09-28T13:54:57.000Z
|
2018-06-28T05:03:06.000Z
|
Src/Scripts/mateParsing.py
|
hazemalsaied/IdenSys
|
9b6220ff7e65f7059240b742c81952132a765007
|
[
"MIT"
] | null | null | null |
Src/Scripts/mateParsing.py
|
hazemalsaied/IdenSys
|
9b6220ff7e65f7059240b742c81952132a765007
|
[
"MIT"
] | null | null | null |
import os
import marmot
def ConlluToConll2009(conllu2016Path):
with open(conllu2016Path) as corpusFile:
lines = corpusFile.readlines()
Conll2009Text = ''
for line in lines:
if len(line) > 0 and line.endswith('\n'):
line = line[:-1]
if line.startswith('#'):
continue
if line == '':
Conll2009Text += line + '\n'
continue
lineParts = line.split('\t')
if '-' in lineParts[0]:
continue
if len(lineParts) != 10 or '-' in lineParts[0]:
print 'Error: not-well formatted line: file: ', str(os.path.basename(conllu2016Path)), ', line:', line
continue
Conll2009Text += lineParts[0] + '\t' + lineParts[1] + '\t' + lineParts[2] + '\t' + lineParts[2] + '\t' + \
lineParts[3] + '\t' + lineParts[3] + '\t' + lineParts[5] \
+ '\t' + lineParts[5] + '\t' + lineParts[6] + '\t' + lineParts[6] + '\t' + lineParts[
7] + '\t' + lineParts[7] + '\t_\t' + lineParts[8] + '\t' + lineParts[8] + '\t_'
idx = 0
while idx < 14:
Conll2009Text += '\t_'
idx += 1
Conll2009Text += '\n'
mateFile = open(conllu2016Path + '.conllu2009', 'w+')
mateFile.write(Conll2009Text)
def conllu2009Toconllu(conllu2009Path, Conll2016Path):
with open(conllu2009Path) as Conll2009File:
lines9 = Conll2009File.readlines()
with open(Conll2016Path) as Conll2016File:
lines16 = Conll2016File.readlines()
Conll2016Text = ''
idx = 0
for line in lines16:
if len(line) > 0 and line.endswith('\n'):
line = line[:-1]
if line.startswith('#'):
Conll2016Text += line + '\n'
continue
if line == '':
idx += 1
Conll2016Text += line + '\n'
continue
lineParts16 = line.split('\t')
lineParts09 = lines9[idx].split('\t')
if '-' in lineParts16[0]:
continue
idx += 1
lineParts16[6] = lineParts09[9]
lineParts16[7] = lineParts09[11]
line = ''
for linePart in lineParts16:
line += linePart + '\t'
line = line[:-1] + '\n'
Conll2016Text += line
mateFile = open(Conll2016Path + '.autoDep', 'w+')
mateFile.write(Conll2016Text)
def jackknife(foldNum, langName):
corpusPath = '/Users/halsaied/Documents/IdenSys/sharedTask/'+ langName+ '/train.conllu.autoPOS.conllu2009'
with open(corpusPath) as corpusFile:
lines = corpusFile.readlines()
foldSize = len(lines) / foldNum
ResultPath = '/Users/halsaied/Documents/IdenSys/MateTools/'+langName + '/Jackkniffing/'
for i in xrange(0, foldNum):
trainPath = os.path.join(ResultPath, str(i) + '.train.jck.txt')
testPath = os.path.join(ResultPath, str(i) + '.test.jck.txt')
startCuttingIdx = i * foldSize
startCuttingiIdx = marmot.approximateCuttingIdx(startCuttingIdx, lines)
endCuttingIdx = (i + 1) * foldSize
endCuttingIdx = marmot.approximateCuttingIdx(endCuttingIdx, lines)
testLines = lines[startCuttingiIdx: endCuttingIdx]
if startCuttingIdx == 0:
trainLines = lines[endCuttingIdx:]
elif endCuttingIdx == len(lines) - 1:
trainLines = lines[: startCuttingIdx]
else:
trainLines = lines[:startCuttingiIdx] + lines[endCuttingIdx:]
createMateFile(trainLines, trainPath)
createMateFile(testLines, testPath)
def createMateFile(lines, path):
trainCorpus = ''
for line in lines:
trainCorpus += line
marmotTrainFile = open(path, 'w+')
marmotTrainFile.write(trainCorpus)
def createMateBatchJCK(foldNum, langList):
batch = '#!/bin/bash\n'
outPutPath = '/Users/halsaied/Documents/IdenSys/MateTools/srl/lib/'
jackPath = '/Users/halsaied/Documents/IdenSys/MateTools/HU/Jackkniffing/'
for lang in langList.split(','):
for f in xrange(0, foldNum):
trainFile = os.path.join(jackPath, str(f) + '.train.jck.txt')
modelFile = os.path.join(jackPath, str(f) + '.model.jck.txt')
batch += 'java -cp anna-3.3.jar is2.parser.Parser -train ' + trainFile + ' -model ' + modelFile + '\n'
testFile = os.path.join(jackPath, str(f) + '.test.jck.txt')
outputFile = os.path.join(jackPath, str(f) + '.output.jck.txt')
batch += 'java -cp anna-3.3.jar is2.parser.Parser -model ' + modelFile + ' -test '+ testFile +' -out ' + outputFile + '\n'
batchFile = open(outPutPath + 'dep.jck.batch.sh', 'w+')
batchFile.write(batch)
def mergeConlluFiles(outfilesPath,outputFileName):
lines = ''
for subdir, dirs, files in os.walk(outfilesPath):
for file in files:
with open(os.path.join(outfilesPath, file)) as conlluFile:
jckOutLines = conlluFile.readlines()
jckOutLines = marmot.removeFinalEmptyLines(jckOutLines)
for line in jckOutLines:
lines += line
outFile = open(os.path.join(outfilesPath, outputFileName), 'w')
outFile.write(lines)
#ConlluToConll2009('/Users/halsaied/Documents/IdenSys/sharedtask/HU/train.conllu.autoPOS')
#jackknife(10, 'HU')
#createMateBatchJCK(10, 'HU')
#conllu2009Toconllu('/Users/halsaied/Documents/IdenSys/sharedtask/HU/train.conllu.autoPOS.conllu2009', '/Users/halsaied/Documents/IdenSys/sharedtask/HU/train.conllu.autoPOS')
#ConlluToConll2009('/Users/halsaied/Documents/IdenSys/sharedtask/HU/test.conllu.autoPOS')
#mergeConlluFiles('/Users/halsaied/Documents/IdenSys/mateTools/SPMRL/','spmrl.conllu')
ConlluToConll2009('/Users/halsaied/Documents/IdenSys/sharedtask/FR/train.conllu')
ConlluToConll2009('/Users/halsaied/Documents/IdenSys/sharedtask/FR/test.conllu')
| 41.473684 | 174 | 0.567735 |
464079e55c5fbd7510efa3e6119c59a67ca40c1c
| 955 |
py
|
Python
|
AP_SS16/US1/python_custom_scripts/error_formula.py
|
DimensionalScoop/kautschuk
|
90403f97cd60b9716cb6a06668196891d5d96578
|
[
"MIT"
] | 3 |
2016-04-27T17:07:00.000Z
|
2022-02-02T15:43:15.000Z
|
FP_2017/18/python_custom_scripts/error_formula.py
|
DimensionalScoop/kautschuk
|
90403f97cd60b9716cb6a06668196891d5d96578
|
[
"MIT"
] | 5 |
2016-04-27T17:10:03.000Z
|
2017-06-20T14:54:20.000Z
|
latex-template/python_custom_scripts/error_formula.py
|
DimensionalScoop/kautschuk
|
90403f97cd60b9716cb6a06668196891d5d96578
|
[
"MIT"
] | null | null | null |
import sympy
from table import (
make_SI,
write,
)
def error(f, err_vars=None): # mit z.B. err_vars=(E,q) lassen sich die fehlerbehafteten Größen übermitteln.
from sympy import Symbol, latex
s = 0
latex_names = dict()
if err_vars == None:
err_vars = f.free_symbols
for v in err_vars:
err = Symbol('latex_std_' + v.name)
s += f.diff(v)**2 * err**2
latex_names[err] = '\\sigma_{' + latex(v) + '}'
return latex(sympy.sqrt(s), symbol_names=latex_names)
# D1, P, m, cw = sympy.var(r'D_1 P m c_w')
# mkck = sympy.var('mkck')
# vreal = D1/P*(m*cw+mkck)
# write('build/Fehlerformel_1.tex', error(vreal, err_vars=(D1,P, m, cw)))
#
# T1, T2 = sympy.var('T_1 T_2')
# videal = T1/(T1-T2)
# write('build/Fehlerformel_2.tex', error(videal, err_vars=(T1,T2)))
#
# dT, A, B, t = sympy.var(r'\td{T}{t} A B t')
# f = 2*A*t + B
# write('build/Fehlerformel_3.tex', error(f, err_vars=(A,B)))
| 28.088235 | 122 | 0.598953 |
d3dc5c78123c2f25529a4c75abbd5f96f5859304
| 23,338 |
py
|
Python
|
betaTest.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
betaTest.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
betaTest.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
from datahandler import expertPatterns
from datahandler import miningPatterns
from datahandler import analyser as analyser
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def tryExpertPatterns(filename):
"""
Expérimentations avec la classe expertPatterns
"""
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(filename)
ep.printPatterns()
print(ep.getSetOfObselTypes())
def tryRawResults(filea, fileb, filec):
"""
Expérimentations de la classe MiningPatterns en mode Raw
"""
ri = miningPatterns.MiningPatterns()
rb = miningPatterns.MiningPatterns()
rr = miningPatterns.MiningPatterns()
ri.getRawMiningResults(filea)
rb.getRawMiningResults(fileb)
rr.getRawMiningResults(filec)
for line in ri.rawResults:
if (len(line) - 13) % 11 != 0:
print("error")
for line in rb.rawResults:
if (len(line) - 13) % 11 != 0:
print("error")
for line in rr.rawResults:
if (len(line) - 13) % 11 != 0:
print("error")
def tryResults(filename):
"""
Expérimentations de la classe MiningPatterns en mode standard
"""
rr = miningPatterns.MiningPatterns()
rr.getMiningResults(filename)
rr.printMiningResults()
def tryFindPatterns(mining, expert, output):
"""
Expérimentation de la méthode findPatterns
"""
rr = miningPatterns.MiningPatterns()
ep = expertPatterns.ExpertPatterns()
rr.getMiningResults(mining)
ep.getPatterns(expert)
sortedra = analyser.sortBy(0, False, rr)
sortedrb = analyser.sortBy(4, True, rr)
sortedrc = analyser.sortBy(7, True, rr)
ra = analyser.findPatterns(ep, sortedra)
rb = analyser.findPatterns(ep, sortedrb)
rc = analyser.findPatterns(ep, sortedrc)
print("Résultats : ")
analyser.generateGraph(ra, rb, rc)
def tryNewMiningPatterns(filename):
"""
Expérimentations de la méthode mining patterns
"""
rr = list(miningPatterns.readRows(filename, 13, 11))
for elt in rr:
print(elt)
print("Break")
miningPatterns.sortBy(rr, [("freq", "desc"), ("cov int", "asc"), ("recov", "desc")])
for elt in rr:
print(elt.infos["freq"], elt.infos["cov int"], elt.infos["recov"])
def tryAnalysis(mining, expert, output, output2):
"""
Experimentations des analyses brutes
"""
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
mp = list(miningPatterns.readRows(mining, 13, 11))
analyserRes = analyser.findPatterns(ep, mp)
analyserRes.toFile(output)
miningPatterns.sortBy(mp, [("freq", "desc")])
for i in range(40):
print(mp[i].infos)
analyserRes = analyser.findPatterns(ep, mp)
analyserRes.toFile(output2)
def tryAnalysisWithRemove(mining, expert, outputRev):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
mp = list(miningPatterns.readRows(mining, 13, 11))
analyserRes = analyser.findPatternsWithRevision(ep, mp)
analyserRes.toFile(outputRev)
print(analyserRes)
def tryComparativeAnalysis(mining, expert, outputStand, outputRev):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
mp = list(miningPatterns.readRows(mining, 13, 11))
mpRev = list(miningPatterns.readRows(mining, 13, 11))
miningPatterns.sortBy(mp, [("cov evt", "desc"), ("long", "asc")])
analyserRes = analyser.findPatterns(ep, mp)
analyserRes.toFile(outputStand)
miningPatterns.sortBy(mpRev, [("cov evt", "desc"), ("long", "asc")])
analyserResRev = analyser.findPatternsWithRevision(ep, mpRev)
analyserResRev.toFile(outputRev)
print(analyserRes)
def niemeRefacto(mining, expert):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
mp = miningPatterns.Patterns(mining, ";", 13, 11)
mp.sortBy([("freq", "asc"), ("cov evt", "desc")])
for pattern in mp.patterns:
print(pattern)
mp.printInfos()
mp.printOccInfos()
def tryAnalyser(mining, expert, nameExpe):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
mpRand = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
mpCove = miningPatterns.Patterns(mining, ";", 13, 11)
mpRandNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreqNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
mpCoveNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
# Random
fname = "DATA/" + nameExpe + "_no-rev_beforeSortRandom.csv"
mpRandNoRev.toFile(fname)
anaRandNoRev = mpRandNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseRandom.csv"
anaRandNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortRandom.csv"
mpRandNoRev.toFile(fname)
# Freq
fname = "DATA/" + nameExpe + "_no-rev_beforeSortFreq.csv"
mpFreqNoRev.toFile(fname)
mpFreqNoRev.sortBy([("freq", "desc")])
anaFreqNoRev = mpFreqNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseFreq.csv"
anaFreqNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortFreq.csv"
mpFreqNoRev.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_no-rev_beforeSortCovEvt.csv"
mpCoveNoRev.toFile(fname)
mpCoveNoRev.sortBy([("cov evt", "desc")])
anaCoveNoRev = mpCoveNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseCovEvt.csv"
anaCoveNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortCovEvt.csv"
mpCoveNoRev.toFile(fname)
# Avec rév
mpRand = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
mpCove = miningPatterns.Patterns(mining, ";", 13, 11)
# Random
fname = "DATA/" + nameExpe + "_rev_beforeSortRandom.csv"
mpRand.toFile(fname)
anaRand = mpRand.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseRandom.csv"
anaRand.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortRandom.csv"
mpRand.toFile(fname)
# Freq
fname = "DATA/" + nameExpe + "_rev_beforeSortFreq.csv"
mpFreq.toFile(fname)
mpFreq.sortBy([("freq", "desc")])
anaFreq = mpFreq.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseFreq.csv"
anaFreq.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortFreq.csv"
mpFreq.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_rev_beforeSortCovEvt.csv"
mpCove.toFile(fname)
mpCove.sortBy([("cov evt", "desc")])
anaCove = mpCove.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseCovEvt.csv"
anaCove.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortCovEvt.csv"
mpCove.toFile(fname)
# Génération des graphs
x = list(range(len(anaRandNoRev.results)))
y = list(range(len(anaFreqNoRev.results)))
z = list(range(len(anaCoveNoRev.results)))
xrev = list(range(len(anaRand.results)))
yrev = list(range(len(anaFreq.results)))
zrev = list(range(len(anaCove.results)))
random = []
freq = []
cov = []
randomrev = []
freqrev = []
covrev = []
for elt in anaRandNoRev.results:
random.append(elt["idxMining"])
for elt in anaFreqNoRev.results:
freq.append(elt["idxMining"])
for elt in anaCoveNoRev.results:
cov.append(elt["idxMining"])
for elt in anaRand.results:
randomrev.append(elt["idxMining"])
for elt in anaFreq.results:
freqrev.append(elt["idxMining"])
for elt in anaCove.results:
covrev.append(elt["idxMining"])
pdfname = "DATA/" + nameExpe + ".pdf"
pp = PdfPages(pdfname)
plt.figure(1)
plt.plot(x, random, 'r', linestyle="-", label="Random")
plt.plot(y, freq, 'g', linestyle="--", label="Fréq")
plt.plot(z, cov, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(2)
plt.plot(xrev, randomrev, 'r', linestyle="-", label="Random")
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Fréq")
plt.plot(zrev, covrev, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats avec révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(3)
plt.plot(x, random, 'r', linestyle="-", label="Random sans révision")
plt.plot(xrev, randomrev, 'g', linestyle="--", label="Random avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de random avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(4)
plt.plot(y, freq, 'r', linestyle="-", label="Freq sans révision")
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Freq avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de freq avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(5)
plt.plot(z, cov, 'r', linestyle="-", label="Cov evt sans révision")
plt.plot(zrev, covrev, 'g', linestyle="--", label="Cov evt avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de cov evt avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
pp.close()
def tryAnalyserLongeur(mining, expert, nameExpe):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
print(ep.patterns)
mpRand = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
mpCove = miningPatterns.Patterns(mining, ";", 13, 11)
mpRandNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreqNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
mpCoveNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
# Random
fname = "DATA/" + nameExpe + "_no-rev_beforeSortRandom.csv"
mpRandNoRev.toFile(fname)
anaRandNoRev = mpRandNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseRandom.csv"
anaRandNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortRandom.csv"
mpRandNoRev.toFile(fname)
# Freq
fname = "DATA/" + nameExpe + "_no-rev_beforeSortFreq.csv"
mpFreqNoRev.toFile(fname)
mpFreqNoRev.sortBy([("long", "desc"), ("freq", "desc")])
anaFreqNoRev = mpFreqNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseFreq.csv"
anaFreqNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortFreq.csv"
mpFreqNoRev.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_no-rev_beforeSortCovEvt.csv"
mpCoveNoRev.toFile(fname)
mpCoveNoRev.sortBy([("long", "desc"), ("cov evt", "desc")])
anaCoveNoRev = mpCoveNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseCovEvt.csv"
anaCoveNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortCovEvt.csv"
mpCoveNoRev.toFile(fname)
# Avec rév
mpRand = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
mpCove = miningPatterns.Patterns(mining, ";", 13, 11)
# Random
fname = "DATA/" + nameExpe + "_rev_beforeSortRandom.csv"
mpRand.toFile(fname)
anaRand = mpRand.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseRandom.csv"
anaRand.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortRandom.csv"
mpRand.toFile(fname)
# Freq
fname = "DATA/" + nameExpe + "_rev_beforeSortFreq.csv"
mpFreq.toFile(fname)
mpFreq.sortBy([("long", "desc"), ("freq", "desc")])
anaFreq = mpFreq.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseFreq.csv"
anaFreq.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortFreq.csv"
mpFreq.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_rev_beforeSortCovEvt.csv"
mpCove.toFile(fname)
mpCove.sortBy([("long", "desc"), ("cov evt", "desc")])
anaCove = mpCove.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseCovEvt.csv"
anaCove.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortCovEvt.csv"
mpCove.toFile(fname)
# Génération des graphs
x = list(range(len(anaRandNoRev.results)))
y = list(range(len(anaFreqNoRev.results)))
z = list(range(len(anaCoveNoRev.results)))
xrev = list(range(len(anaRand.results)))
yrev = list(range(len(anaFreq.results)))
zrev = list(range(len(anaCove.results)))
random = []
freq = []
cov = []
randomrev = []
freqrev = []
covrev = []
for elt in anaRandNoRev.results:
random.append(elt["idxMining"])
for elt in anaFreqNoRev.results:
freq.append(elt["idxMining"])
for elt in anaCoveNoRev.results:
cov.append(elt["idxMining"])
for elt in anaRand.results:
randomrev.append(elt["idxMining"])
for elt in anaFreq.results:
freqrev.append(elt["idxMining"])
for elt in anaCove.results:
covrev.append(elt["idxMining"])
pp = PdfPages("DATA/reichertLong.pdf")
plt.figure(1)
plt.plot(x, random, 'r', linestyle="-", label="Random")
plt.plot(y, freq, 'g', linestyle="--", label="Fréq")
plt.plot(z, cov, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(2)
plt.plot(xrev, randomrev, 'r', linestyle="-", label="Random")
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Fréq")
plt.plot(zrev, covrev, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats avec révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(3)
plt.plot(x, random, 'r', linestyle="-", label="Random sans révision")
plt.plot(xrev, randomrev, 'g', linestyle="--", label="Random avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de random avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(4)
plt.plot(y, freq, 'r', linestyle="-", label="Freq sans révision")
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Freq avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de freq avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(5)
plt.plot(z, cov, 'r', linestyle="-", label="Cov evt sans révision")
plt.plot(zrev, covrev, 'g', linestyle="--", label="Cov evt avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de cov evt avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
pp.close()
def tryAnalyserDebussy(mining, expert, nameExpe):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
print(ep.patterns)
mpRand = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
mpCove = miningPatterns.Patterns(mining, ";", 13, 11)
mpRandNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreqNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
mpCoveNoRev = miningPatterns.Patterns(mining, ";", 13, 11)
# Random
fname = "DATA/" + nameExpe + "_no-rev_beforeSortRandom.csv"
mpRandNoRev.toFile(fname)
anaRandNoRev = mpRandNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseRandom.csv"
anaRandNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortRandom.csv"
mpRandNoRev.toFile(fname)
# Freq
fname = "DATA/" + nameExpe + "_no-rev_beforeSortFreq.csv"
mpFreqNoRev.toFile(fname)
mpFreqNoRev.sortBy([("long", "desc"), ("freq", "desc")])
anaFreqNoRev = mpFreqNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseFreq.csv"
anaFreqNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortFreq.csv"
mpFreqNoRev.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_no-rev_beforeSortCovEvt.csv"
mpCoveNoRev.toFile(fname)
mpCoveNoRev.sortBy([("long", "desc"), ("cov evt", "desc")])
anaCoveNoRev = mpCoveNoRev.findPatterns(ep)
fname = "DATA/" + nameExpe + "_no-rev_analyseCovEvt.csv"
anaCoveNoRev.toFile(fname)
fname = "DATA/" + nameExpe + "_no-rev_afterSortCovEvt.csv"
mpCoveNoRev.toFile(fname)
# Avec rév
mpRand = miningPatterns.Patterns(mining, ";", 13, 11)
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
mpCove = miningPatterns.Patterns(mining, ";", 13, 11)
# Random
fname = "DATA/" + nameExpe + "_rev_beforeSortRandom.csv"
mpRand.toFile(fname)
anaRand = mpRand.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseRandom.csv"
anaRand.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortRandom.csv"
mpRand.toFile(fname)
# Freq
fname = "DATA/" + nameExpe + "_rev_beforeSortFreq.csv"
mpFreq.toFile(fname)
mpFreq.sortBy([("long", "desc"), ("freq", "desc")])
anaFreq = mpFreq.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseFreq.csv"
anaFreq.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortFreq.csv"
mpFreq.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_rev_beforeSortCovEvt.csv"
mpCove.toFile(fname)
mpCove.sortBy([("long", "desc"), ("cov evt", "desc")])
anaCove = mpCove.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseCovEvt.csv"
anaCove.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortCovEvt.csv"
mpCove.toFile(fname)
# Génération des graphs
x = list(range(len(anaRandNoRev.results)))
y = list(range(len(anaFreqNoRev.results)))
z = list(range(len(anaCoveNoRev.results)))
xrev = list(range(len(anaRand.results)))
yrev = list(range(len(anaFreq.results)))
zrev = list(range(len(anaCove.results)))
random = []
freq = []
cov = []
randomrev = []
freqrev = []
covrev = []
for elt in anaRandNoRev.results:
random.append(elt["idxMining"])
for elt in anaFreqNoRev.results:
freq.append(elt["idxMining"])
for elt in anaCoveNoRev.results:
cov.append(elt["idxMining"])
for elt in anaRand.results:
randomrev.append(elt["idxMining"])
for elt in anaFreq.results:
freqrev.append(elt["idxMining"])
for elt in anaCove.results:
covrev.append(elt["idxMining"])
pp = PdfPages("DATA/reichertLong.pdf")
plt.figure(1)
plt.plot(x, random, 'r', linestyle="-", label="Random")
plt.plot(y, freq, 'g', linestyle="--", label="Fréq")
plt.plot(z, cov, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(2)
plt.plot(xrev, randomrev, 'r', linestyle="-", label="Random")
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Fréq")
plt.plot(zrev, covrev, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats avec révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(3)
plt.plot(x, random, 'r', linestyle="-", label="Random sans révision")
plt.plot(xrev, randomrev, 'g', linestyle="--", label="Random avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de random avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(4)
plt.plot(y, freq, 'r', linestyle="-", label="Freq sans révision")
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Freq avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de freq avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
plt.figure(5)
plt.plot(z, cov, 'r', linestyle="-", label="Cov evt sans révision")
plt.plot(zrev, covrev, 'g', linestyle="--", label="Cov evt avec révision")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Performances de cov evt avec / sans révision')
plt.legend()
plt.savefig(pp, format="pdf")
pp.close()
def tryAnalyserIbert(mining, expert, nameExpe):
ep = expertPatterns.ExpertPatterns()
ep.getPatterns(expert)
print("Patterns read")
# Avec rév
mpFreq = miningPatterns.Patterns(mining, ";", 13, 11)
print("mp Freq ok")
mpCove = mpFreq
#miningPatterns.Patterns(mining, ";", 13, 11)
print("mp Cove ok")
# Freq
fname = "DATA/" + nameExpe + "_rev_beforeSortFreq.csv"
mpFreq.toFile(fname)
mpFreq.sortBy([("freq", "desc")])
anaFreq = mpFreq.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseFreq.csv"
anaFreq.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortFreq.csv"
mpFreq.toFile(fname)
# Cov evt
fname = "DATA/" + nameExpe + "_rev_beforeSortCovEvt.csv"
mpCove.toFile(fname)
mpCove.sortBy([("cov evt", "desc")])
anaCove = mpCove.findPatternsWithRevision(ep)
fname = "DATA/" + nameExpe + "_rev_analyseCovEvt.csv"
anaCove.toFile(fname)
fname = "DATA/" + nameExpe + "_rev_afterSortCovEvt.csv"
mpCove.toFile(fname)
# Génération des graphs
yrev = list(range(len(anaFreq.results)))
zrev = list(range(len(anaCove.results)))
freqrev = []
covrev = []
for elt in anaFreq.results:
freqrev.append(elt["idxMining"])
for elt in anaCove.results:
covrev.append(elt["idxMining"])
pdfname = "DATA/" + nameExpe + ".pdf"
pp = PdfPages(pdfname)
plt.figure(1)
plt.plot(yrev, freqrev, 'g', linestyle="--", label="Fréq")
plt.plot(zrev, covrev, 'b', linestyle="-.", label="Cov")
plt.xlabel('Itération')
plt.ylabel('Rang du pattern')
plt.title('Comparaison des résultats avec révision')
plt.legend()
plt.savefig(pp, format="pdf")
pp.close()
# Nom des fichiers
debussy = "DATA/debussy_motifs.csv"
debussy_expert = "DATA/Debussy_Syrinx_court.txt"
ibert = "DATA/ibert_400_fouille.csv"
ibert_expert = "DATA/ibert_400.csv"
reichert = "DATA/Reichert_tarentelle_out1.csv"
reichert_expert = "DATA/Reichert_tarentelle_motifs.txt"
dataperso = "DATA/datatest.csv"
dataperso_expert = "DATA/datatest_expert.csv"
mining = ibert
expert = ibert_expert
# tryExpertPatterns(filename)
# tryRawResults(filea, fileb, filec)
# tryResults(filename)
# tryFindPatterns(mining, expert, output)
# tryNewMiningPatterns(filename)
# tryAnalysis(mining, expert, output, output2)
# tryAnalysisWithRemove(mining, expert, outputRev)
# tryComparativeAnalysis(mining, expert, outputStand, outputRev)
# niemeRefacto(mining, expert)
#tryAnalyserLongeur(mining, expert, "reichert_long")
tryAnalyserIbert(mining, expert, "ibert_400")
| 31.36828 | 88 | 0.654726 |
731aaa728a42deca6ff2a33458565b8c47fd81a1
| 680 |
py
|
Python
|
src/aplusb.py
|
ianxin/algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | 2 |
2018-03-13T08:59:14.000Z
|
2018-03-13T08:59:25.000Z
|
src/aplusb.py
|
ianxin/Algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | null | null | null |
src/aplusb.py
|
ianxin/Algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | null | null | null |
class Solution:
"""
@param a: The first integer
@param b: The second integer
@return: The sum of a and b
异或=不考虑进位的加法
与=只考虑进位
"""
def aplusb(self, a, b):
# write your code here, try to do it without arithmetic operators.
if b == 0:
return a
if a == 0:
return b
while b:
carry = (a&b)<<1
a = a ^ b
b = carry
return a
def aplusb(self,a, b):
# write your code here, try to do it without arithmetic operators.
limit = 0xfffffffff
while b:
a, b = (a ^ b) & limit, (a & b) << 1
return a if a & 1 << 32 == 0 else a | (~limit)
| 26.153846 | 74 | 0.492647 |
734577f5486144b835c04b90a22b098fb0f0aaa0
| 4,074 |
py
|
Python
|
test/test_npu/test_network_ops/test_miopen_batch_norm.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_miopen_batch_norm.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_miopen_batch_norm.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestBn(TestCase):
def cpu_op_exec(self, input1, grad_tensor, dim, fun):
input1.requires_grad_(True)
grad_tensor = grad_tensor.to("cpu")
if fun == "1d":
m = torch.nn.BatchNorm1d(dim)
elif fun == "2d":
m = torch.nn.BatchNorm2d(dim)
else:
m = torch.nn.BatchNorm3d(dim)
input_cpu = m(input1)
input_cpu = input_cpu.detach().numpy()
return input_cpu
def npu_op_exec_new(self, input1, grad_tensor, dim, fun):
grad_tensor = grad_tensor.to("npu")
w = torch.ones_like(input1)
w = w.to("npu")
if fun == "1d":
m = torch.nn.BatchNorm1d(dim)
elif fun == "2d":
m = torch.nn.BatchNorm2d(dim)
else:
m = torch.nn.BatchNorm3d(dim)
m = m.to("npu")
input_npu = m(input1)
input_npu = input_npu.to("cpu")
input_npu = input_npu.detach().numpy()
return input_npu
def do_test(self, item, prec, prec16, fun):
cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)
if cpu_input1.dtype == torch.float16:
cpu_input1 = cpu_input1.to(torch.float32)
grad_tensor = torch.randn(item[0][2])
cpu_output = self.cpu_op_exec(cpu_input1, grad_tensor, item[0][2][1], fun)
npu_output = self.npu_op_exec_new(npu_input1, grad_tensor, item[0][2][1], fun)
if (cpu_output.dtype != npu_output.dtype):
cpu_output = cpu_output.astype(npu_output.dtype)
self.assertRtolEqual(cpu_output, npu_output, prec, prec16)
def test_batchnorm_shape_format(self, device):
#pylint:disable=unused-argument
shape_format_1d = [
[[np.float32, 0, [25, 35, 40]]],
[[np.float32, 0, [256, 672, 7]]],
[[np.float32, 0, [256, 288, 14]]],
[[np.float16, 0, [1024, 58, 56]]],
[[np.float16, 0, [1024, 1024, 7]]],
[[np.float16, 0, [1024, 24, 28]]],
]
shape_format_2d = [
[[np.float32, 3, [2, 3, 2, 2]]],
[[np.float32, 3, [256, 672, 7, 7]]],
[[np.float32, 3, [256, 288, 14, 14]]],
[[np.float32, 3, [1024, 58, 28, 28]]],
[[np.float32, 3, [1024, 116, 14, 14]]],
[[np.float32, 3, [1024, 24, 112, 112]]],
[[np.float16, 3, [1024, 58, 56, 56]]],
[[np.float16, 3, [1024, 1024, 7, 7]]],
[[np.float16, 3, [1024, 24, 28, 28]]],
[[np.float16, 3, [1024, 116, 28, 28]]],
[[np.float16, 3, [1024, 232, 7, 7]]],
[[np.float16, 3, [1024, 232, 14, 14]]],
]
shape_format_3d = [
[[np.float32, -1, [2, 3, 2, 2, 5]]],
[[np.float16, -1, [1024, 232, 14, 14, 4]]],
]
# BatchNorm1d ok
for item in shape_format_1d:
self.do_test(item, prec = 0.001, prec16 = 0.01, fun = "1d")
# BatchNorm2d ok
for item in shape_format_2d:
self.do_test(item, prec = 0.001, prec16 = 0.01, fun = "2d")
instantiate_device_type_tests(TestBn, globals(), except_for="cpu")
if __name__ == "__main__":
run_tests()
| 39.173077 | 86 | 0.567256 |
81fdddb56fcd0f33e2dae625cd0539d4e0135f34
| 2,992 |
py
|
Python
|
nets/ious.py
|
Rory-Godwin/FOLO
|
32b4773cec99edc6ce7baff9c113eba4f8dc1d29
|
[
"MIT"
] | null | null | null |
nets/ious.py
|
Rory-Godwin/FOLO
|
32b4773cec99edc6ce7baff9c113eba4f8dc1d29
|
[
"MIT"
] | null | null | null |
nets/ious.py
|
Rory-Godwin/FOLO
|
32b4773cec99edc6ce7baff9c113eba4f8dc1d29
|
[
"MIT"
] | null | null | null |
import math
import tensorflow as tf
from keras import backend as K
def box_ciou(b1, b2):
"""
输入为:
----------
b1: tensor, shape=(batch, feat_w, feat_h, anchor_num, 4), xywh
b2: tensor, shape=(batch, feat_w, feat_h, anchor_num, 4), xywh
返回为:
-------
ciou: tensor, shape=(batch, feat_w, feat_h, anchor_num, 1)
"""
#-----------------------------------------------------------#
# 求出预测框左上角右下角
# b1_mins (batch, feat_w, feat_h, anchor_num, 2)
# b1_maxes (batch, feat_w, feat_h, anchor_num, 2)
#-----------------------------------------------------------#
b1_xy = b1[..., :2]
b1_wh = b1[..., 2:4]
b1_wh_half = b1_wh/2.
b1_mins = b1_xy - b1_wh_half
b1_maxes = b1_xy + b1_wh_half
#-----------------------------------------------------------#
# 求出真实框左上角右下角
# b2_mins (batch, feat_w, feat_h, anchor_num, 2)
# b2_maxes (batch, feat_w, feat_h, anchor_num, 2)
#-----------------------------------------------------------#
b2_xy = b2[..., :2]
b2_wh = b2[..., 2:4]
b2_wh_half = b2_wh/2.
b2_mins = b2_xy - b2_wh_half
b2_maxes = b2_xy + b2_wh_half
#-----------------------------------------------------------#
# 求真实框和预测框所有的iou
# iou (batch, feat_w, feat_h, anchor_num)
#-----------------------------------------------------------#
intersect_mins = K.maximum(b1_mins, b2_mins)
intersect_maxes = K.minimum(b1_maxes, b2_maxes)
intersect_wh = K.maximum(intersect_maxes - intersect_mins, 0.)
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
b1_area = b1_wh[..., 0] * b1_wh[..., 1]
b2_area = b2_wh[..., 0] * b2_wh[..., 1]
union_area = b1_area + b2_area - intersect_area
iou = intersect_area / K.maximum(union_area, K.epsilon())
#-----------------------------------------------------------#
# 计算中心的差距
# center_distance (batch, feat_w, feat_h, anchor_num)
#-----------------------------------------------------------#
center_distance = K.sum(K.square(b1_xy - b2_xy), axis=-1)
enclose_mins = K.minimum(b1_mins, b2_mins)
enclose_maxes = K.maximum(b1_maxes, b2_maxes)
enclose_wh = K.maximum(enclose_maxes - enclose_mins, 0.0)
#-----------------------------------------------------------#
# 计算对角线距离
# enclose_diagonal (batch, feat_w, feat_h, anchor_num)
#-----------------------------------------------------------#
enclose_diagonal = K.sum(K.square(enclose_wh), axis=-1)
ciou = iou - 1.0 * (center_distance) / K.maximum(enclose_diagonal ,K.epsilon())
v = 4 * K.square(tf.math.atan2(b1_wh[..., 0], K.maximum(b1_wh[..., 1], K.epsilon())) - tf.math.atan2(b2_wh[..., 0], K.maximum(b2_wh[..., 1],K.epsilon()))) / (math.pi * math.pi)
alpha = v / K.maximum((1.0 - iou + v), K.epsilon())
ciou = ciou - alpha * v
ciou = K.expand_dims(ciou, -1)
ciou = tf.where(tf.is_nan(ciou), tf.zeros_like(ciou), ciou)
return ciou
| 40.432432 | 180 | 0.476604 |
c31f4583cc34ede994f2f4341defabb4a0146a81
| 1,634 |
py
|
Python
|
research/audio/fcn-4/preprocess.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/audio/fcn-4/preprocess.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/audio/fcn-4/preprocess.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
'''
preprocess
'''
import os
import numpy as np
from src.model_utils.config import config
from src.dataset import create_dataset
def get_bin():
"""
generate bin files.
"""
data_train = create_dataset(config.data_dir, config.val_filename, config.batch_size, ['feature', 'label'],
config.num_consumer)
data_train = data_train.create_tuple_iterator(output_numpy=True)
res_true = []
i = 0
data_path = os.path.join(config.pre_result_path, "00_data")
os.makedirs(data_path)
for data, label in data_train:
file_name = "fcn4_bs" + str(config.batch_size) + "_" + str(i) + ".bin"
file_path = os.path.join(data_path, file_name)
data.tofile(file_path)
res_true.append(label)
i = i + 1
np.save(os.path.join(config.pre_result_path, "label_ids.npy"), res_true)
print("=" * 20, "export bin files finished", "=" * 20)
if __name__ == "__main__":
get_bin()
| 34.041667 | 110 | 0.657283 |
c32e87b18e002e86ddc11328546622ce2dd8b27f
| 9,356 |
py
|
Python
|
Packs/HealthCheck/Scripts/HealthCheckServerLog/HealthCheckServerLog.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/HealthCheck/Scripts/HealthCheckServerLog/HealthCheckServerLog.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/HealthCheck/Scripts/HealthCheckServerLog/HealthCheckServerLog.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# noqa: F401
# noqa: F401
import re
def findOldestDate(incidentDate, newDate):
incidentDate = datetime.strptime(incidentDate, "%Y-%m-%d %H:%M:%S")
newDate = datetime.strptime(newDate, "%Y-%m-%d %H:%M:%S")
return min([incidentDate, newDate])
def findNewestDate(incidentDate, newDate):
incidentDate = datetime.strptime(incidentDate, "%Y-%m-%d %H:%M:%S")
newDate = datetime.strptime(newDate, "%Y-%m-%d %H:%M:%S")
return max([incidentDate, newDate])
context = demisto.context()
suggestions = []
knownerrors = [
{
"Got permission denied while trying to connect to the Docker daemon socket at unix:///var/run/docker.sock: Get": [
"Error Found: `Got permission denied while trying to connect to the Docker daemon socket at unix`",
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA14u000000HB4oCAG"
]
},
{
'[Errno 13] Permission denied:': [
'Error Found: `[Errno 13] Permission denied`',
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA14u000000HB4ZCAW"
]
},
{
'config.json: permission denied': [
'Error Found: `config.json: permission denied`',
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA14u000000HB4tCAG"
]
},
{
'Error response from daemon: OCI runtime create failed:': [
'Error Found: `Error response from daemon: OCI runtime create failed`',
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA14u000000HB4eCAG"
]
},
{
'proxyconnect tcp: tls: oversized record received with length 20527': [
'Error Found: `proxyconnect tcp: tls: oversized record received with length 20527`',
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA10g000000PNhpCAG"
]
},
{
"error: websocket: not a websocket handshake: 'upgrade' token not found in 'Connection' header": [
'Error Found: `websocket: not a websocket handshake: upgrade token not found in Connection header`',
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA10g000000PNiOCAW"
]
},
{
"Create more free space in thin pool or use dm.min_free_space": [
'Error Found: `Create more free space in thin pool or use dm.min_free_space`',
"Please refer to https://knowledgebase.paloaltonetworks.com/KCSArticleDetail?id=kA10g000000PNhQCAW"
]
},
{
"in pool reached high watermark": [
"Error Found: `amount of active containers in pool reached high watermark`",
"Check and increase high watermark for docker: https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-0/"
"cortex-xsoar-admin/cortex-xsoar-overview/performance-tuning-of-cortex-xsoar-server"
]
},
{
"no space left on device": [
"Error Found: `no space left on device`",
"Free up Disk Space with Data Archiving: https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-0/"
"cortex-xsoar-admin/manage-data/free-up-disc-space-with-data-archiving"
]
},
{
"ImportError: No module named": [
"Error Found: `ImportError: No module named`",
"Python environment missing dependency or docker image outdated."
]
},
{
"(error: websocket: close 1006 (abnormal closure): unexpected EOF)": [
" Error Found: `error: websocket: close 1006 (abnormal closure): unexpected EOF`",
"WebSocket Configuration: https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-1/cortex-xsoar-admin/installation/"
"post-installation-checklist/websocket-configuration.html#idee004eaa-34d9-41a1-a8d0-aba3bf9f91bb"
]
},
{
"fatal error: runtime: out of memory": [
"Error Found: `fatal error: runtime: out of memory.`",
"Performance Tuning of Cortex XSOAR Server: https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-0/"
"cortex-xsoar-admin/cortex-xsoar-overview/performance-tuning-of-cortex-xsoar-server"
]
},
{
"error Wrong schedule format": [
"Error Found: `error Wrong schedule format`",
"Change jobs.serverSiemIncidents.schedule=<time in minutes> to Xm. for example 5 minuets should be 5m"
]
},
{
"error Failed on ensure function for": [
"Error Found: `error Failed on ensure function for`",
"Reindex the Entire Database: "
"https://docs.paloaltonetworks.com/cortex/cortex-xsoar/6-0/cortex-xsoar-admin/manage-data/reindex-the-database"
]
},
{
"Version didnt change": [
"Error Found: `Version didnt change`",
"Upgrade used an older version, Re-run the upgrade with the latest version."
]
},
{
"layout-edit-.json: invalid argument": [
"Error Found: `layout-edit-.json: invalid argument`",
"Please contact customer support"
]
},
{
"error: unsupported mode": [
"Error Found: `error: unsupported mode`",
"Remove old index files under /usr/local/demisto/dist. and do a hard refresh in the browser. "
"No service restart needed"
]
}
]
res = []
context_since = context.get('LogServer', {}).get('since')
since = log_until = restartcount = None
context_log_until = context.get('LogServer', {}).get('logUntil')
context_restartcount = context.get('LogServer', {}).get('restartCount')
path = demisto.executeCommand('getFilePath', {'id': demisto.args()['entryID']})
if path[0]['Type'] == entryTypes['error']:
demisto.results('File not found')
else:
try:
with open(path[0]['Contents']['path'], 'r') as f:
data_line = f.readlines()
# find Since and find knownErrors
for line in data_line:
if 'good luck' in line:
if (context_restartcount is None) and (restartcount is None):
restartcount = 1
elif (context_restartcount is not None) and (restartcount is None):
restartcount = int(context_restartcount)
restartcount += 1
elif (context_restartcount is not None) and (restartcount is not None):
restartcount += 1
for item in knownerrors:
for (err, suggest) in item.items():
if err in line:
if suggest not in suggestions:
suggestions.append(suggest)
if (context_since is None) and (since is None):
since = re.findall('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})', line)
oldestDate = since[0]
continue
elif (context_since is not None) and (since is None):
since = re.findall('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})', line)
oldestDate = findOldestDate(since[0], context_since)
continue
else:
continue
# find Last Log
for line in reversed(data_line):
if (context_log_until is None) and (log_until is None):
log_until = re.findall('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})', line)
if not log_until:
log_until = None
continue
newestDate = log_until[0]
break
elif (context_since is not None) and (log_until is None):
log_until = re.findall('(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})', line)
if not log_until:
continue
newestDate = log_until[0]
newestDate = findNewestDate(log_until[0], context_log_until)
break
else:
oldestDate = context_since
newestDate = context_log_until
break
demisto.setContext("LogServer.since", str(oldestDate))
demisto.setContext("LogServer.logUntil", str(newestDate))
demisto.setContext("LogServer.restartCount", restartcount)
demisto.executeCommand("setIncident", {"healthcheckrestartcount": restartcount,
"healthchecklogsince": str(oldestDate),
"healthcheckloguntil": str(newestDate)})
if suggestions:
for entry in suggestions:
res.append({"category": "Log Analysis", "severity": "High", "description": entry[0], "resolution": entry[1]})
results = CommandResults(
readable_output="HealthCheckServerLog Done",
outputs_prefix="HealthCheck.ActionableItems",
outputs=res)
return_results(results)
except UnicodeDecodeError:
demisto.results("Could not read file")
| 43.924883 | 129 | 0.583048 |
82d00165d25409b457fbce26aedc9099e84a9d45
| 4,901 |
py
|
Python
|
20-hs-redez-sem/groups/02-unionDir/filesystem-redez-client/browser/create.py
|
Kyrus1999/BACnet
|
5be8e1377252166041bcd0b066cce5b92b077d06
|
[
"MIT"
] | 8 |
2020-03-17T21:12:18.000Z
|
2021-12-12T15:55:54.000Z
|
20-hs-redez-sem/groups/02-unionDir/filesystem-redez-client/browser/create.py
|
Kyrus1999/BACnet
|
5be8e1377252166041bcd0b066cce5b92b077d06
|
[
"MIT"
] | 2 |
2021-07-19T06:18:43.000Z
|
2022-02-10T12:17:58.000Z
|
20-hs-redez-sem/groups/02-unionDir/filesystem-redez-client/browser/create.py
|
Kyrus1999/BACnet
|
5be8e1377252166041bcd0b066cce5b92b077d06
|
[
"MIT"
] | 25 |
2020-03-20T09:32:45.000Z
|
2021-07-18T18:12:59.000Z
|
'''
Project Imports
'''
from utils import color
from pathlib import Path
import random
'''
System Imports
'''
import os
import json
'''
Description:
Creates a welcoming message for the user upon starting up the program.
'''
def logo():
line = " `:///// ://///: \n"
line += " `+dmmmmmms `mmmmmmo .` \n"
line += " `dmmmmmmmm. +mmmmmm- od` \n"
line += " smmmmmmmmy dmmmmmm hms \n"
line += " hmmmmmmmm- :mmmmmms `mmh \n"
line += " hmmmmmmm: `mmmmmmm` +mmh \n"
line += " hmmmmmmm: `mmmmmmm` +mmh \n"
line += " hmmmmmmh +mmmmmmy ymmh \n"
line += " hmmmmmm+ ymmmmmm- `mmmh \n"
line += " hmmmmmm: /mmmmd/ ommmh \n"
line += " hmmmmmm/ .::- :mmmmh \n"
line += " smmmmmmd` +mmmmms \n"
line += " `hmmmmmmd/` .+dmmmmmh` \n"
line += " /hmmmmmmmhyssyhmmmmmmmh/ \n"
line += " `-://////////////:-` \n"
line = color.purple(line)
return line
def welcome():
msg = color.cyan("############################################################################\n")
msg += color.cyan("# ~ {}{}\n").format(
color.purple("Redecentralised Filesystem"), color.cyan(" ~ #"))
msg += color.cyan("# #\n")
msg += color.cyan("# - Establish connection with to file server with {} {}\n").format(
color.yellow("register <IP> <Name>"), color.cyan("#"))
msg += color.cyan("# - Enter {} {}\n").format(color.yellow("name"), color.cyan(
"of known server #"))
msg += color.cyan("# - List names of all known servers with {} {}\n").format(
color.yellow("serverlist"), color.cyan("#"))
msg += color.cyan("# - Exit program with {} {}\n").format(
color.yellow("quit"), color.cyan("#"))
msg += color.cyan("############################################################################\n")
return msg
def thank():
names = ['Ken Rotaris', 'Tunahan Erbay', 'Leonardo Salsi']
random.shuffle(names) # names in random order
names[0] = color.bold(color.red(names[0]))
names[1] = color.bold(color.greenDark(names[1]))
names[2] = color.bold(color.yellow(names[2]))
random.shuffle(names)
msg = '\n Thanks for using our Application!\n Made with ' + color.bold(
color.redLight('❤')) + ' by: {0}, {1}, {2}\n'.format(names[0], names[1], names[2])
return msg
def config_folder():
home = str(Path.home())
dir = ".filesystem_config"
path = os.path.join(home, dir)
if not os.path.exists(path):
os.mkdir(path)
return path
def serverlist_file(user, fsrootpath):
filename = "servers.json"
path = config_folder()
file = os.path.join(path, filename)
fspath = os.path.join(fsrootpath, "localhost")
if not os.path.exists(fspath):
os.mkdir(fspath)
if not os.path.isfile(file):
config_file = open(file, "w+")
json.dump({}, config_file, indent=4)
config_file.close()
return file
def content_file(fsrootpath, hash):
path = os.path.join(fsrootpath, "{}.json".format(hash))
if not os.path.exists(path):
content_file = open(path, "w+")
json.dump({}, content_file, indent=4)
content_file.close()
return path
def filesystem():
home = str(Path.home())
dir = ".filesystem"
path = os.path.join(home, dir)
if not os.path.exists(path):
os.mkdir(path)
return path
def serverlist(config_file):
config_json = open(config_file, "r")
return json.load(config_json)
def save_server_info(config_file, ip, name, hash, fsrootpath):
config_json = open(config_file, "r")
serverlist = json.load(config_json)
fspath = os.path.join(fsrootpath, name)
if not os.path.exists(fspath):
os.mkdir(fspath)
config_json.close()
serverlist.update({name: {"ip": ip, "hash":hash, "path": fspath, "mounts": []}})
config_json = open(config_file, "w")
json.dump(serverlist, config_json, indent=4)
config_json.close()
| 41.184874 | 103 | 0.457866 |
81c499c451786e2f2cdea24b4caec99990c9f12b
| 7,144 |
py
|
Python
|
examples/simultaneous_translation/stacl/reader.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
examples/simultaneous_translation/stacl/reader.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
examples/simultaneous_translation/stacl/reader.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from paddle.io import DataLoader
from paddlenlp.data import Vocab, Pad
from paddlenlp.data.sampler import SamplerHelper
from paddlenlp.datasets import load_dataset
def read(src_tgt_file, only_src=False):
with open(src_tgt_file, 'r', encoding='utf8') as src_tgt_f:
for line in src_tgt_f:
line = line.strip('\n')
if not line:
continue
line_split = line.split('\t')
if only_src:
yield {"src": line_split[0]}
else:
if len(line_split) != 2:
continue
yield {"src": line_split[0], "trg": line_split[1]}
def min_max_filer(data, max_len, min_len=0):
# 1 for special tokens.
data_min_len = min(len(data[0]), len(data[1])) + 1
data_max_len = max(len(data[0]), len(data[1])) + 1
return (data_min_len >= min_len) and (data_max_len <= max_len)
def create_data_loader(args, places=None):
data_files = {'train': args.training_file, 'dev': args.validation_file}
datasets = [
load_dataset(read, src_tgt_file=filename, lazy=False)
for split, filename in data_files.items()
]
src_vocab = Vocab.load_vocabulary(args.src_vocab_fpath,
bos_token=args.special_token[0],
eos_token=args.special_token[1],
unk_token=args.special_token[2])
trg_vocab = Vocab.load_vocabulary(args.trg_vocab_fpath,
bos_token=args.special_token[0],
eos_token=args.special_token[1],
unk_token=args.special_token[2])
args.src_vocab_size = len(src_vocab)
args.trg_vocab_size = len(trg_vocab)
def convert_samples(sample):
source = [item.strip() for item in sample['src'].split()]
target = [item.strip() for item in sample['trg'].split()]
source = src_vocab.to_indices(source) + [args.eos_idx]
target = [args.bos_idx] + \
trg_vocab.to_indices(target) + [args.eos_idx]
return source, target
data_loaders = [(None)] * 2
for i, dataset in enumerate(datasets):
dataset = dataset.map(convert_samples, lazy=False).filter(
partial(min_max_filer, max_len=args.max_length))
sampler = SamplerHelper(dataset)
if args.sort_type == SortType.GLOBAL:
src_key = (lambda x, data_source: len(data_source[x][0]))
trg_key = (lambda x, data_source: len(data_source[x][1]))
# Sort twice
sampler = sampler.sort(key=trg_key).sort(key=src_key)
else:
if args.shuffle:
sampler = sampler.shuffle(seed=args.random_seed)
max_key = (lambda x, data_source: max(len(data_source[x][0]),
len(data_source[x][1])))
if args.sort_type == SortType.POOL:
sampler = sampler.sort(key=max_key, buffer_size=args.pool_size)
batch_size_fn = lambda new, count, sofar, data_source: max(
sofar, len(data_source[new][0]), len(data_source[new][1]))
batch_sampler = sampler.batch(
batch_size=args.batch_size,
drop_last=False,
batch_size_fn=batch_size_fn,
key=lambda size_so_far, minibatch_len: size_so_far * minibatch_len)
if args.shuffle_batch:
batch_sampler = batch_sampler.shuffle(seed=args.random_seed)
if i == 0:
batch_sampler = batch_sampler.shard()
data_loader = DataLoader(dataset=dataset,
places=places,
batch_sampler=batch_sampler,
collate_fn=partial(prepare_train_input,
pad_idx=args.bos_idx),
num_workers=0)
data_loaders[i] = (data_loader)
return data_loaders
def create_infer_loader(args, places=None):
data_files = {
'test': args.predict_file,
}
dataset = load_dataset(read,
src_tgt_file=data_files['test'],
only_src=True,
lazy=False)
src_vocab = Vocab.load_vocabulary(args.src_vocab_fpath,
bos_token=args.special_token[0],
eos_token=args.special_token[1],
unk_token=args.special_token[2])
trg_vocab = Vocab.load_vocabulary(args.trg_vocab_fpath,
bos_token=args.special_token[0],
eos_token=args.special_token[1],
unk_token=args.special_token[2])
args.src_vocab_size = len(src_vocab)
args.trg_vocab_size = len(trg_vocab)
def convert_samples(sample):
source = [item.strip() for item in sample['src'].split()]
source = src_vocab.to_indices(source) + [args.eos_idx]
target = [args.bos_idx]
return source, target
dataset = dataset.map(convert_samples, lazy=False)
batch_sampler = SamplerHelper(dataset).batch(batch_size=args.batch_size,
drop_last=False)
data_loader = DataLoader(dataset=dataset,
places=places,
batch_sampler=batch_sampler,
collate_fn=partial(prepare_infer_input,
pad_idx=args.bos_idx),
num_workers=0,
return_list=True)
return data_loader, trg_vocab.to_tokens
def prepare_train_input(insts, pad_idx):
"""
Put all padded data needed by training into a list.
"""
word_pad = Pad(pad_idx)
src_word = word_pad([inst[0] for inst in insts])
trg_word = word_pad([inst[1][:-1] for inst in insts])
lbl_word = word_pad([inst[1][1:] for inst in insts])
data_inputs = [src_word, trg_word, lbl_word]
return data_inputs
def prepare_infer_input(insts, pad_idx):
"""
Put all padded data needed by beam search decoder into a list.
"""
word_pad = Pad(pad_idx)
src_word = word_pad([inst[0] for inst in insts])
return [
src_word,
]
class SortType(object):
GLOBAL = 'global'
POOL = 'pool'
NONE = "none"
| 37.015544 | 79 | 0.577548 |
6fb6a51e63681e0aa9afefd9a9456710227cf8d1
| 2,911 |
py
|
Python
|
project/api/event_list/schemas.py
|
DanielGrams/gsevp
|
e94034f7b64de76f38754b56455e83092378261f
|
[
"MIT"
] | 1 |
2021-06-01T14:49:18.000Z
|
2021-06-01T14:49:18.000Z
|
project/api/event_list/schemas.py
|
DanielGrams/gsevp
|
e94034f7b64de76f38754b56455e83092378261f
|
[
"MIT"
] | 286 |
2020-12-04T14:13:00.000Z
|
2022-03-09T19:05:16.000Z
|
project/api/event_list/schemas.py
|
DanielGrams/gsevpt
|
a92f71694388e227e65ed1b24446246ee688d00e
|
[
"MIT"
] | null | null | null |
from marshmallow import fields, pre_dump, validate
from project.api import marshmallow
from project.api.event.schemas import EventWriteIdSchema
from project.api.organization.schemas import OrganizationRefSchema
from project.api.schemas import (
IdSchemaMixin,
PaginationRequestSchema,
PaginationResponseSchema,
SQLAlchemyBaseSchema,
TrackableSchemaMixin,
WriteIdSchemaMixin,
)
from project.models import EventList
class EventListModelSchema(SQLAlchemyBaseSchema):
class Meta:
model = EventList
load_instance = True
class EventListIdSchema(EventListModelSchema, IdSchemaMixin):
pass
class EventListWriteIdSchema(EventListModelSchema, WriteIdSchemaMixin):
pass
class EventListBaseSchemaMixin(TrackableSchemaMixin):
name = marshmallow.auto_field(
required=True, validate=validate.Length(min=3, max=255)
)
class EventListSchema(EventListIdSchema, EventListBaseSchemaMixin):
organization = fields.Nested(OrganizationRefSchema, attribute="adminunit")
class EventListRefSchema(EventListIdSchema):
name = marshmallow.auto_field()
class EventListListRequestSchema(PaginationRequestSchema):
name = fields.Str(
metadata={"description": "Looks for name."},
)
class EventListListResponseSchema(PaginationResponseSchema):
items = fields.List(
fields.Nested(EventListRefSchema), metadata={"description": "Event lists"}
)
class EventListStatusSchema(marshmallow.Schema):
event_list = fields.Nested(EventListRefSchema)
contains_event = fields.Boolean(
required=True, metadata={"description": "True if list contains event."}
)
@pre_dump(pass_many=True)
def unwrap_tuple(self, data, many, **kwargs):
return {"event_list": data[0], "contains_event": data[1] > 0}
class EventListStatusListResponseSchema(PaginationResponseSchema):
items = fields.List(
fields.Nested(EventListStatusSchema),
metadata={"description": "Event list stati"},
)
class EventListWriteSchemaMixin(object):
pass
class EventListCreateRequestSchema(
EventListModelSchema,
EventListBaseSchemaMixin,
EventListWriteSchemaMixin,
):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.make_post_schema()
class EventListUpdateRequestSchema(
EventListModelSchema,
EventListBaseSchemaMixin,
):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.make_post_schema()
class EventListPatchRequestSchema(
EventListModelSchema,
EventListBaseSchemaMixin,
):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.make_patch_schema()
class EventListEventRequestSchema(marshmallow.Schema):
event = fields.Nested(
EventWriteIdSchema,
required=True,
metadata={"description": "Event."},
)
| 25.761062 | 82 | 0.732051 |
82f6751e314f2754a1a262f0744dbdda133f43c8
| 24 |
py
|
Python
|
addition_module/DMUE/preprocess/mtcnn/__init__.py
|
weihaoxie/FaceX-Zoo
|
db0b087e4f4d28152e172d6c8d3767a8870733b4
|
[
"Apache-2.0"
] | 1,329 |
2021-01-13T07:06:30.000Z
|
2022-03-31T07:23:39.000Z
|
addition_module/DMUE/preprocess/mtcnn/__init__.py
|
weihaoxie/FaceX-Zoo
|
db0b087e4f4d28152e172d6c8d3767a8870733b4
|
[
"Apache-2.0"
] | 115 |
2021-01-13T10:42:57.000Z
|
2022-03-28T03:57:52.000Z
|
addition_module/DMUE/preprocess/mtcnn/__init__.py
|
weihaoxie/FaceX-Zoo
|
db0b087e4f4d28152e172d6c8d3767a8870733b4
|
[
"Apache-2.0"
] | 351 |
2021-01-13T07:21:00.000Z
|
2022-03-29T14:11:39.000Z
|
from .mtcnn import MTCNN
| 24 | 24 | 0.833333 |
737da20d9a6420090511fbbd8929883888e8480a
| 1,734 |
py
|
Python
|
tests/test_geraeteeigenschaften.py
|
bo4e/BO4E-python
|
28b12f853c8a496d14b133759b7aa2d6661f79a0
|
[
"MIT"
] | 1 |
2022-03-02T12:49:44.000Z
|
2022-03-02T12:49:44.000Z
|
tests/test_geraeteeigenschaften.py
|
bo4e/BO4E-python
|
28b12f853c8a496d14b133759b7aa2d6661f79a0
|
[
"MIT"
] | 21 |
2022-02-04T07:38:46.000Z
|
2022-03-28T14:01:53.000Z
|
tests/test_geraeteeigenschaften.py
|
bo4e/BO4E-python
|
28b12f853c8a496d14b133759b7aa2d6661f79a0
|
[
"MIT"
] | null | null | null |
import pytest # type:ignore[import]
from bo4e.com.geraeteeigenschaften import Geraeteeigenschaften, GeraeteeigenschaftenSchema
from bo4e.enum.geraetemerkmal import Geraetemerkmal
from bo4e.enum.geraetetyp import Geraetetyp
from tests.serialization_helper import assert_serialization_roundtrip # type:ignore[import]
example_geraeteeigenschaften = Geraeteeigenschaften(
geraetemerkmal=Geraetemerkmal.GAS_G1000, geraetetyp=Geraetetyp.MULTIPLEXANLAGE
)
class TestGeraeteeigenschaften:
@pytest.mark.parametrize(
"geraeteeigenschaften, expected_json_dict",
[
pytest.param(
example_geraeteeigenschaften,
{"geraetemerkmal": "GAS_G1000", "geraetetyp": "MULTIPLEXANLAGE"},
),
],
)
def test_serialization_roundtrip(self, geraeteeigenschaften: Geraeteeigenschaften, expected_json_dict: dict):
"""
Test de-/serialisation of Geraeteeigenschaften
"""
assert_serialization_roundtrip(geraeteeigenschaften, GeraeteeigenschaftenSchema(), expected_json_dict)
def test_missing_required_attribute(self):
with pytest.raises(TypeError) as excinfo:
_ = Geraeteeigenschaften()
assert "missing 1 required" in str(excinfo.value)
@pytest.mark.parametrize(
"not_a_geraetetyp",
[
pytest.param(17), # not a geraetetyp
pytest.param("foo"), # not a geraetetyp
],
)
def test_failing_validation(self, not_a_geraetetyp):
with pytest.raises(TypeError) as excinfo:
_ = Geraeteeigenschaften(geraetemerkmal=Geraetemerkmal.GAS_G1000, geraetetyp=not_a_geraetetyp)
assert "'geraetetyp' must be " in str(excinfo.value)
| 36.893617 | 113 | 0.709343 |
737ede7364f745e346abeac81cd8f69fa78c402c
| 3,705 |
py
|
Python
|
Embedded/RaspberryPi/main.py
|
BlaCkinkGJ/SFSH
|
0134f1e4698ef34caee2d5a8cd875c51507b3527
|
[
"MIT"
] | 1 |
2019-02-28T08:39:55.000Z
|
2019-02-28T08:39:55.000Z
|
Embedded/RaspberryPi/main.py
|
BlaCkinkGJ/SFSH
|
0134f1e4698ef34caee2d5a8cd875c51507b3527
|
[
"MIT"
] | 5 |
2018-07-17T13:09:34.000Z
|
2018-09-11T13:55:33.000Z
|
Embedded/RaspberryPi/main.py
|
BlaCkinkGJ/SFSH
|
0134f1e4698ef34caee2d5a8cd875c51507b3527
|
[
"MIT"
] | 2 |
2019-05-17T03:07:08.000Z
|
2022-01-01T07:04:31.000Z
|
#!/usr/bin/python3
import Detector as det, Connector
import sys
import re
import os
from xml.etree.ElementTree import Element, SubElement, dump, ElementTree, parse
def DataProcessing(con, sleep, account):
if account is None or con is None or sleep is None:
return -1
try:
con.pushToSerial(sleep)
con.pushToTarget(account['id'], account['name'], sleep)
except ValueError or KeyboardInterrupt:
con.serial.setData(con.serial.ALERT_ON)
return 0
MY_IP = '127.0.0.1'
MY_PORT = 3000
def EyeDetection(account):
global MY_IP, MY_PORT
sleep = False
counter = 0
# AWS Public Key
con = Connector.Connector(ip = MY_IP, port=MY_PORT, method=Connector.CONNECTED_TCP)
print("Connected to "+MY_IP+':'+str(MY_PORT))
try:
print("start")
eye = det.Eye(cascade_path="./opencv/haarcascades/haarcascade_eye_tree_eyeglasses.xml")
for frame in eye.getFrame():
eye.analyze(frame)
percent = float(eye.sleepPercentage())
if percent > 0.8 : counter += 1
else : counter = 0
if counter > 15 : sleep = True
# This statement just test statement
elif counter == 0 : sleep = False
if DataProcessing(con, sleep, account) == -1:
return -1
except KeyboardInterrupt:
print("end")
return 0
def command(opcode, operand):
global MY_IP, MY_PORT
returnVal = -1
if type(opcode) == str and type(operand) == str:
if opcode == '--host':
ipRegex = re.compile(r'^(\d{1,4}.\d{1,4}.\d{1,4}.\d{1,4})$')
result = ipRegex.search(operand)
if result is not None:
MY_IP = result.group()
returnVal = 0
else:
print("[--host] You have to write IPv4(xxx.xxx.xxx.xxx)")
elif opcode == '--port':
if operand.isdecimal():
MY_PORT = int(operand)
returnVal = 0
else:
print("[--port] Decimal only command")
return returnVal
def operateCommand(argv, argc):
commandStatus = 0
if argc == 3:
commandStatus |= command(argv[1], argv[2])
elif argc == 5:
commandStatus |= command(argv[1], argv[2])
commandStatus |= command(argv[3], argv[4])
else:
commandStatus |= -1
return commandStatus
def getAccount():
result = {
'id': None,
'name': None
}
if not os.path.exists('data.xml'):
data = Element("LoginData")
while True:
result['id'] = input("ID 값을 입력해주십시오 : ")
result['name'] = input("이름을 입력해주십시오 : ")
if result['id'].isdecimal():
break
else:
print("잘못된 ID 값입니다. 다시 수행해주십시오.")
SubElement(data, "id").text = result['id']
SubElement(data, "name").text = result['name']
ElementTree(data).write("data.xml")
else:
tree = parse("data.xml")
data = tree.getroot()
result['id'] = data.findtext("id")
result['name'] = data.findtext("name")
if result['id'] is not None:
result['id'] = int(result['id'])
return result
if __name__=="__main__":
if operateCommand(sys.argv, len(sys.argv)) == -1:
if len(sys.argv) > 1:
print('''[ Invalid Command ]
You have to use like
python main.py (--host 127.0.0.1) (--port 3000)\n''')
print("Operates in Default Setting")
MY_IP = '127.0.0.1'
MY_PORT = 3000
account = getAccount()
if account['id'] is not None and account['name'] is not None:
EyeDetection(account)
| 28.5 | 95 | 0.556005 |
fb5eab4e0575f5a805252e1bab21b12f747913e4
| 5,621 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/patches/v6_12/repost_entries_with_target_warehouse.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
frappe-bench/apps/erpnext/erpnext/patches/v6_12/repost_entries_with_target_warehouse.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v6_12/repost_entries_with_target_warehouse.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import print_function, unicode_literals
import frappe
"""
This patch is written to fix Stock Ledger Entries and GL Entries
against Delivery Notes and Sales Invoice where Target Warehouse has been set wrongly
due to User Permissions on Warehouse.
This cannot be run automatically because we can't take a call that
Target Warehouse has been set purposefully or by mistake.
Thats why we left it to the users to take the call, and manually run the patch.
This patch has 2 main functions, `check()` and `repost()`.
- Run `check` function, to list out all the Sales Orders, Delivery Notes
and Sales Invoice with Target Warehouse.
- Run `repost` function to remove the Target Warehouse value and repost SLE and GLE again.
To execute this patch run following commands from frappe-bench directory:
```
bench --site [your-site-name] execute erpnext.patches.v6_12.repost_entries_with_target_warehouse.check
bench --site [your-site-name] backup
bench --site [your-site-name] execute erpnext.patches.v6_12.repost_entries_with_target_warehouse.repost
```
Exception Handling:
While reposting, if you get any exception, it will printed on screen.
Mostly it can be due to negative stock issue. If that is the case, follow these steps
- Ensure that stock is available for those items in the mentioned warehouse on the date mentioned in the error
- Execute `repost` funciton again
"""
def check():
so_list = get_affected_sales_order()
dn_list = get_affected_delivery_notes()
si_list = get_affected_sales_invoice()
if so_list or dn_list or si_list:
print("Entries with Target Warehouse:")
if so_list:
print("Sales Order")
print(so_list)
if dn_list:
print("Delivery Notes")
print([d.name for d in dn_list])
if si_list:
print("Sales Invoice")
print([d.name for d in si_list])
def repost():
dn_failed_list, si_failed_list = [], []
repost_dn(dn_failed_list)
repost_si(si_failed_list)
repost_so()
frappe.db.commit()
if dn_failed_list:
print("-"*40)
print("Delivery Note Failed to Repost")
print(dn_failed_list)
if si_failed_list:
print("-"*40)
print("Sales Invoice Failed to Repost")
print(si_failed_list)
print()
print("""
If above Delivery Notes / Sales Invoice failed due to negative stock, follow these steps:
- Ensure that stock is available for those items in the mentioned warehouse on the date mentioned in the error
- Run this patch again
""")
def repost_dn(dn_failed_list):
dn_list = get_affected_delivery_notes()
if dn_list:
print("-"*40)
print("Reposting Delivery Notes")
for dn in dn_list:
if dn.docstatus == 0:
continue
print(dn.name)
try:
dn_doc = frappe.get_doc("Delivery Note", dn.name)
dn_doc.docstatus = 2
dn_doc.update_prevdoc_status()
dn_doc.update_stock_ledger()
dn_doc.cancel_packing_slips()
frappe.db.sql("""delete from `tabGL Entry`
where voucher_type='Delivery Note' and voucher_no=%s""", dn.name)
frappe.db.sql("update `tabDelivery Note Item` set target_warehouse='' where parent=%s", dn.name)
dn_doc = frappe.get_doc("Delivery Note", dn.name)
dn_doc.docstatus = 1
dn_doc.on_submit()
frappe.db.commit()
except Exception:
dn_failed_list.append(dn.name)
frappe.local.stockledger_exceptions = None
print(frappe.get_traceback())
frappe.db.rollback()
frappe.db.sql("update `tabDelivery Note Item` set target_warehouse='' where docstatus=0")
def repost_si(si_failed_list):
si_list = get_affected_sales_invoice()
if si_list:
print("-"*40)
print("Reposting Sales Invoice")
for si in si_list:
if si.docstatus == 0:
continue
print(si.name)
try:
si_doc = frappe.get_doc("Sales Invoice", si.name)
si_doc.docstatus = 2
si_doc.update_stock_ledger()
frappe.db.sql("""delete from `tabGL Entry`
where voucher_type='Sales Invoice' and voucher_no=%s""", si.name)
frappe.db.sql("update `tabSales Invoice Item` set target_warehouse='' where parent=%s", si.name)
si_doc = frappe.get_doc("Sales Invoice", si.name)
si_doc.docstatus = 1
si_doc.update_stock_ledger()
si_doc.make_gl_entries()
frappe.db.commit()
except Exception:
si_failed_list.append(si.name)
frappe.local.stockledger_exceptions = None
print(frappe.get_traceback())
frappe.db.rollback()
frappe.db.sql("update `tabSales Invoice Item` set target_warehouse='' where docstatus=0")
def repost_so():
so_list = get_affected_sales_order()
frappe.db.sql("update `tabSales Order Item` set target_warehouse=''")
if so_list:
print("-"*40)
print("Sales Order reposted")
def get_affected_delivery_notes():
return frappe.db.sql("""select distinct dn.name, dn.docstatus
from `tabDelivery Note` dn, `tabDelivery Note Item` dn_item
where dn.name=dn_item.parent and dn.docstatus < 2
and dn_item.target_warehouse is not null and dn_item.target_warehouse != ''
order by dn.posting_date asc""", as_dict=1)
def get_affected_sales_invoice():
return frappe.db.sql("""select distinct si.name, si.docstatus
from `tabSales Invoice` si, `tabSales Invoice Item` si_item
where si.name=si_item.parent and si.docstatus < 2 and si.update_stock=1
and si_item.target_warehouse is not null and si_item.target_warehouse != ''
order by si.posting_date asc""", as_dict=1)
def get_affected_sales_order():
return frappe.db.sql_list("""select distinct parent from `tabSales Order Item`
where target_warehouse is not null and target_warehouse != '' and docstatus <2""")
| 32.12 | 111 | 0.737947 |
f7fa5ebc1de24bb058fb900810372a06231c2c5a
| 1,279 |
py
|
Python
|
___Python/Jonas/Python/p05_random/m01_wuerfeln.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Jonas/Python/p05_random/m01_wuerfeln.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Jonas/Python/p05_random/m01_wuerfeln.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
import random
r= random.Random()
def wuerfeln():
return r.randint(1, 6) # Augenzahl zwischen 1 und 6
def muenzwurf():
return r.randint(0,1) # 0 = Kopf, 1 = Zahl
print(wuerfeln())
d = {}
for i in range(10000):
augenzahl = wuerfeln()
if augenzahl in d:
d[augenzahl] += 1
else:
d[augenzahl] = 1
print(d)
# 1) Lottozahlen 6 aus 49 ermitteln ==> [2,7,13,17,19,42]
def kugel():
return r.randint(1, 49) # Anzahl Kugeln 1 bis 49
#Lösung a
zahlen = []
d = {}
kugeln = 0
while kugeln < 6:
ziehung = kugel() # Menge der gezogenen Kugeln
if ziehung not in d: #diese Kugel wurde zuvor noch nicht gezogen
d[ziehung] =1
zahlen.append(ziehung) # Kugel darf nich nochmal gezogen werden deshalb nach ziehung
kugeln += 1
print (sorted(zahlen))
# Lösung b
urne = list(range(1, 50)) # urne = [1,2,3..., 49]
lottoziehung = []
for i in range(6):
ziehung = urne.pop(r.randint(0, len(urne) - 1))
lottoziehung.append(ziehung)
print(sorted(lottoziehung))
# Lösung c
lottoziehung = r.sample(range(1, 50), 6)
print(sorted(lottoziehung))
# 2) Schreibe eine Funktion wuerfeln2, die fair würfelt.
# bei der Implementierung darf nur die Funktion muenzwurf benutzt werden
| 24.596154 | 93 | 0.629398 |
540a70d9c480c933ffa337956d0c19aa553c3ecc
| 1,873 |
py
|
Python
|
REWORK/src/py/manager.py
|
JueK3y/Instagram-automated-commenting
|
7ecc4119d8e9ff0dfba620488018fbd9fce61606
|
[
"RSA-MD"
] | 9 |
2021-06-22T14:01:24.000Z
|
2022-03-11T08:59:38.000Z
|
REWORK/src/py/manager.py
|
JueK3y/Instagram-automated-commenting
|
7ecc4119d8e9ff0dfba620488018fbd9fce61606
|
[
"RSA-MD"
] | 6 |
2021-01-15T09:12:11.000Z
|
2021-05-25T08:12:10.000Z
|
REWORK/src/py/manager.py
|
JueK3y/Instagram-automated-commenting
|
7ecc4119d8e9ff0dfba620488018fbd9fce61606
|
[
"RSA-MD"
] | 1 |
2021-12-27T18:58:53.000Z
|
2021-12-27T18:58:53.000Z
|
# ┌─────────────────────────────────────────────────────────────────────────┐
# │ Instagram Automated Commenting 2.0 │
# ├─────────────────────────────────────────────────────────────────────────┤
# │ DO NOT indicate used program sections as your own. │
# │ DO NOT sell the software to other people under your name. │
# │ Before further development and sale, │
# │ a written agreement must be made with the manufacturer (JueK3y). │
# │ In the event of possible damage, the user alone is liable, │
# │ the manufacturer (JueK3y) withdraws from any legal responsibility. │
# ├─────────────────────────────────────────────────────────────────────────┤
# │ Copyright © 2020 - 2021 by JueK3y (Julian Kennedy) │
# | https://github.com/JueK3y/Instagram-automated-commenting │
# └─────────────────────────────────────────────────────────────────────────┘
# -*- coding: utf-8 -*-
import sys
from wifiSpeed import WiFi
from update import Update
from profile import Profile
from mainLogic import Manager
from dataFiles import Comment
from credentials import Login
if sys.argv[1] == "1":
WiFi.checkConnection()
elif sys.argv[1] == "2":
Update.checkUpdate()
elif sys.argv[1] == "3":
Profile.get()
elif sys.argv[1] == "4":
Manager.startLogic()
elif sys.argv[1] == "5":
Comment.openFile()
elif sys.argv[1] == "6":
Profile.create(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5])
elif sys.argv[1] == "7":
Profile.editUN(sys.argv[2], sys.argv[3], sys.argv[4])
elif sys.argv[1] == "8":
Profile.editPW(sys.argv[2], sys.argv[3])
elif sys.argv[1] == "9":
Profile.delete(sys.argv[2])
elif sys.argv[1] == "10":
Login.get(sys.argv[2])
else:
print("Huh, weird")
sys.stdout.flush()
| 38.22449 | 77 | 0.501869 |
543c8635bdb708d598047f030659530fe27a5b41
| 1,040 |
py
|
Python
|
tests/onegov/town6/test_views_images.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
tests/onegov/town6/test_views_images.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
tests/onegov/town6/test_views_images.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from webtest import Upload
from tests.shared.utils import create_image, get_meta
def test_view_images(client):
assert client.get('/images', expect_errors=True).status_code == 403
client.login_admin()
images_page = client.get('/images')
assert "Noch keine Bilder hochgeladen" in images_page
images_page.form['file'] = Upload('Test.txt', b'File content')
assert images_page.form.submit(expect_errors=True).status_code == 415
images_page.form['file'] = Upload('Test.jpg', create_image().read())
images_page.form.submit()
images_page = client.get('/images')
assert "Noch keine Bilder hochgeladen" not in images_page
img_url = images_page.pyquery('.image-container a').attr('href')
# Test Open Graph meta properties
social_media = client.get('/social-media-settings')
social_media.form['og_logo_default'] = img_url
social_media.form.submit().follow()
home = client.get('/')
assert get_meta(home, 'og:image:alt') == 'Test.jpg'
assert get_meta(home, 'og:image')
| 29.714286 | 73 | 0.704808 |
5462e0f12ef819c90c1969b718787c250d278a05
| 2,194 |
py
|
Python
|
src/apriori_interest.py
|
PAULUAPAUL/MasterThesis_AssociationRulesBiodiversity
|
0855abc5ec4835a28be4aa305e5e45e73297b389
|
[
"MIT"
] | null | null | null |
src/apriori_interest.py
|
PAULUAPAUL/MasterThesis_AssociationRulesBiodiversity
|
0855abc5ec4835a28be4aa305e5e45e73297b389
|
[
"MIT"
] | null | null | null |
src/apriori_interest.py
|
PAULUAPAUL/MasterThesis_AssociationRulesBiodiversity
|
0855abc5ec4835a28be4aa305e5e45e73297b389
|
[
"MIT"
] | null | null | null |
from csv import reader
from collections import defaultdict, Counter
from itertools import chain, combinations
from optparse import OptionParser
from utils_interest import *
import numpy as np
from csv_writer import *
def apriori_interest(itemSetList, minSup, minConf, minInt):
C1ItemSet = getItemSetFromList(itemSetList)
# Final result global frequent itemset
globalFreqItemSet = dict()
# Storing global itemset with support count
globalItemSetWithSup = Counter()
L1ItemSet = getAboveMinSup(
C1ItemSet, itemSetList, minSup,minInt, globalItemSetWithSup,1)
currentLSet = L1ItemSet
k = 2
# Calculating frequent item set
while(currentLSet):
# Storing frequent itemset
globalFreqItemSet[k-1] = currentLSet
# Self-joining Lk
# print(currentLSet)
candidateSet = getUnion(list(currentLSet), k)
# print('Union fertig ', candidateSet)
# Perform subset testing and remove pruned supersets
candidateSet = pruning(candidateSet, currentLSet, k-1)
# Scanning itemSet for counting support
# print('candidate fertig ', candidateSet)
#print(str(k))
currentLSet = getAboveMinSup(
candidateSet, itemSetList, minSup, minInt, globalItemSetWithSup,k)
#print('Bedingung: ',currentLSet)
k += 1
write_csvfile_freq('data/Apriori_Int_minSup_'+ str(minSup)+ "_freq.csv",globalFreqItemSet)
for i in range(6,10):
minConf=float(i/10)
rules = associationRule(globalFreqItemSet, globalItemSetWithSup, minConf)
rules.sort(key=lambda x: x[2])
fname='data/Apriori_INT_minSup_'+ str(minSup) +'_minConf_'+ str(minConf)+'_rules.csv'
write_csvfile_rules(fname,rules)
return globalFreqItemSet, rules
# itemSetList = [['PR', 'P', 'CC'],['PR', 'DP'],['DP', 'CC'],['PR','P','CC']]
# # # itemSetList = [['A', 'B', 'D'],['A', 'B','C','D'],['B', 'D'],['B','C','D','E'],['A','C','E'],['B','D','F'],['A','E','F'],['C','F'],['B','C','F'],['A','B','C','D','F']]
# #
# freqItemSet, rules = apriori(itemSetList, minSup=0.3, minConf=0.5,minInt=0.0)
# print(rules)
| 39.890909 | 174 | 0.634913 |
3f88452f3dfad63601498d762a112e2ff7f26943
| 2,589 |
py
|
Python
|
Packs/HealthCheck/Scripts/HealthCheckDiskUsageLine/HealthCheckDiskUsageLine.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/HealthCheck/Scripts/HealthCheckDiskUsageLine/HealthCheckDiskUsageLine.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/HealthCheck/Scripts/HealthCheckDiskUsageLine/HealthCheckDiskUsageLine.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
from CommonServerPython import * # noqa: F401
def main():
res = execute_command("demisto-api-get", {"uri": "/system/config"})
config_json = res['response']
partition = config_json.get('sysConf', {}).get('disk.partitions.to.monitor') or '/'
res = execute_command(
"demisto-api-post",
{
"uri": "/statistics/widgets/query",
"body": {
"size": 1440,
"dataType": "system",
"params": {
"timeFrame": "minutes",
},
"query": f"disk.usedPercent.{partition}",
"dateRange": {
"period": {
"byFrom": "hours",
"fromValue": 24,
},
},
"widgetType": "line",
},
})
stats = res["response"]
output = []
higher = 0
build_number = get_demisto_version()['buildNumber']
# in local development instances, the build number will be "REPLACE_THIS_WITH_CI_BUILD_NUM"
build_number = f'{build_number}' if build_number != "REPLACE_THIS_WITH_CI_BUILD_NUM" else "618658"
if int(build_number) >= 618657:
# Line graph:
for counter, entry in enumerate(stats):
higher = max(entry["data"][0], higher)
if counter % 2 == 0:
output.append({"name": counter, "data": [higher]})
higher = 0
data = {
"Type": 17,
"ContentsFormat": "line",
"Contents": {
"stats": output,
"params": {
"timeFrame": "minutes",
"format": "HH:mm",
"layout": "vertical"
}
}
}
else:
# Bar graph:
now = datetime.utcnow()
then = now - timedelta(days=1)
for counter, entry in enumerate(stats):
higher = max(entry["data"][0], higher)
if counter % 60 == 0:
then = then + timedelta(hours=1)
name = then.strftime("%H:%M")
output.append({"name": name, "data": [higher]})
higher = 0
data = {
"Type": 17,
"ContentsFormat": "bar",
"Contents": {
"stats": output,
"params": {
"layout": "horizontal"
}
}
}
return data
if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover
return_results(main())
| 30.104651 | 102 | 0.447277 |
3fa58d4e2b228407e60b249183cda9e498594f9d
| 434 |
py
|
Python
|
Algorithms/DynamicProgramming/Min Cost Path/min_cost_path.py
|
Nidita/Data-Structures-Algorithms
|
7b5198c8d37e9a70dd0885c6eef6dddd9d85d74a
|
[
"MIT"
] | 26 |
2019-07-17T11:05:43.000Z
|
2022-02-06T08:31:40.000Z
|
Algorithms/DynamicProgramming/Min Cost Path/min_cost_path.py
|
Nidita/Data-Structures-Algorithms
|
7b5198c8d37e9a70dd0885c6eef6dddd9d85d74a
|
[
"MIT"
] | 7 |
2019-07-16T19:52:25.000Z
|
2022-01-08T08:03:44.000Z
|
Algorithms/DynamicProgramming/Min Cost Path/min_cost_path.py
|
Nidita/Data-Structures-Algorithms
|
7b5198c8d37e9a70dd0885c6eef6dddd9d85d74a
|
[
"MIT"
] | 19 |
2020-01-14T02:44:28.000Z
|
2021-12-27T17:31:59.000Z
|
import sys
def min_cost_path(arr, x, y):
if x<0 or y<0:
return sys.maxsize
elif x==0 and y==0:
return arr[x][y]
else:
return arr[x][y] + min(min_cost_path(arr, x-1, y-1), min_cost_path(arr, x-1, y), min_cost_path(arr, x, y-1))
input_matrix = [[1, 2, 3], [4, 8, 2], [1, 5, 3]]
import time
init = time.time()
print(min_cost_path(input_matrix, 2, 2))
end = time.time()
print((end-init)*1000)
| 18.083333 | 116 | 0.585253 |
3f26d896f5643fbcfa45eed6097af22b70ada489
| 1,181 |
py
|
Python
|
2020-09-03-1234-gma_Klassen_becher.py
|
gmaubach/OOP-with-Python
|
9b059e911d55d616e756324564f1f2cc524aa53d
|
[
"MIT"
] | null | null | null |
2020-09-03-1234-gma_Klassen_becher.py
|
gmaubach/OOP-with-Python
|
9b059e911d55d616e756324564f1f2cc524aa53d
|
[
"MIT"
] | null | null | null |
2020-09-03-1234-gma_Klassen_becher.py
|
gmaubach/OOP-with-Python
|
9b059e911d55d616e756324564f1f2cc524aa53d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 3 12:07:10 2020
@author: Georg Maubach
Programmieren Sie eine Klasse Becher, die Volumen und Fullmenge
des Bechers in Milliliter als Fliesskommazahl speichert.
Schreiben Sie einen Konstruktor für Volumen und Fullmenge,
setzen Sie fur leere Becher ein Standardagrument ein.
Schreiben Sie eine ausgabe-Methode.
Implementieren Sie Getter für die Attribute (keine Setter).
Prüfen Sie auf gültigen Zustand.
"""
class Becher:
def __init__(self, v, f = 0):
if (v <= 0):
raise ValueError("Volumen muss >0 sein.")
elif (f > v):
raise ValueError("Die Füllmenge ist groesser als der Becher")
else:
self._volumen = v
if (f < 0):
raise ValueError("F muss >=0 sein.")
else:
self._fuellmenge = f
def ausgabe(self):
print("Volumen: ", self._volumen,
"Füllmenge: ", self._fuellmenge)
def getFuellmenge(self):
return(self._fuellmenge)
def getVolumen(self):
return(self._volumen)
# https://www.python.org/dev/peps/pep-0008/
# EOF .
| 24.102041 | 74 | 0.606266 |
18e07e149c1a80f4481540c64312e41965cb7326
| 7,427 |
py
|
Python
|
python/oneflow/compatible/single_client/test/ops/test_broadcast_to_compatible_with.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | 3,285 |
2020-07-31T05:51:22.000Z
|
2022-03-31T15:20:16.000Z
|
python/oneflow/compatible/single_client/test/ops/test_broadcast_to_compatible_with.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | 2,417 |
2020-07-31T06:28:58.000Z
|
2022-03-31T23:04:14.000Z
|
python/oneflow/compatible/single_client/test/ops/test_broadcast_to_compatible_with.py
|
wangyuyue/oneflow
|
0a71c22fe8355392acc8dc0e301589faee4c4832
|
[
"Apache-2.0"
] | 520 |
2020-07-31T05:52:42.000Z
|
2022-03-29T02:38:11.000Z
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import numpy as np
import oneflow.compatible.single_client.unittest
from oneflow.compatible import single_client as flow
from oneflow.compatible.single_client import typing as oft
def _of_broadcast_to_compatible_with(x, compatible_shape, x_shape=None):
assert isinstance(compatible_shape, (list, tuple))
if x_shape is None:
x_shape = x.shape
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
func_config.default_logical_view(flow.scope.mirrored_view())
@flow.global_function(function_config=func_config)
def broadcast_to_compatible_with_fn(
x_def: oft.ListNumpy.Placeholder(shape=x_shape, dtype=flow.float)
):
compatible_var = [
flow.get_variable(
"compatible_var_{}".format(i),
shape=cp_shape,
dtype=flow.float,
initializer=flow.random_normal_initializer(),
trainable=False,
)
for (i, cp_shape) in enumerate(compatible_shape)
]
return flow.broadcast_to_compatible_with(x_def, compatible_var)
return broadcast_to_compatible_with_fn([x]).get().numpy_list()[0]
def _of_broadcast_to_compatible_with_dynamic(
x, a, b, x_shape=None, a_shape=None, b_shape=None
):
if x_shape is None:
x_shape = x.shape
if a_shape is None:
a_shape = a.shape
if b_shape is None:
b_shape = b.shape
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
func_config.default_logical_view(flow.scope.mirrored_view())
@flow.global_function(function_config=func_config)
def broadcast_to_compatible_with_fn(
x_def: oft.ListNumpy.Placeholder(x_shape, dtype=flow.float),
a_def: oft.ListNumpy.Placeholder(a_shape, dtype=flow.float),
b_def: oft.ListNumpy.Placeholder(b_shape, dtype=flow.float),
):
return flow.broadcast_to_compatible_with(
x_def, [flow.identity(a_def), flow.identity(b_def)]
)
return broadcast_to_compatible_with_fn([x], [a], [b]).get().numpy_list()[0]
def _of_broadcast_to_compatible_with_grad(x, compatible_shape, dx_watcher):
assert isinstance(compatible_shape, (list, tuple))
assert callable(dx_watcher)
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
func_config.default_logical_view(flow.scope.consistent_view())
@flow.global_function(type="train", function_config=func_config)
def broadcast_to_compatible_with_fn(
x_def: oft.Numpy.Placeholder(x.shape, dtype=flow.float)
):
x_var = flow.get_variable(
"x_var",
shape=x.shape,
dtype=flow.float,
initializer=flow.constant_initializer(0),
trainable=True,
)
compatible_var = [
flow.get_variable(
"compatible_var_{}".format(i),
shape=cp_shape,
dtype=flow.float,
initializer=flow.random_normal_initializer(),
trainable=False,
)
for (i, cp_shape) in enumerate(compatible_shape)
]
x_var = x_var + x_def
y = flow.broadcast_to_compatible_with(x_var, compatible_var)
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [0.001]), momentum=0
).minimize(y)
flow.watch_diff(x_var, dx_watcher)
return y
return broadcast_to_compatible_with_fn(x).get().numpy()
@flow.unittest.skip_unless_1n1d()
class TestBroadcastToCompatibleWith(flow.unittest.TestCase):
def test_broadcast_to_compatible_with(test_case):
x = np.random.standard_normal((5, 2)).astype(np.float32)
compatible_shape = [[4, 5, 2], [4, 5, 1]]
ret = _of_broadcast_to_compatible_with(x, compatible_shape)
expected_ret = np.broadcast_to(x, [4, 5, 2])
test_case.assertTrue(np.array_equal(expected_ret, ret))
def test_dynamic_broadcast_to_compatible_with(test_case):
x = np.random.standard_normal((10, 6)).astype(np.float32)
x_static_shape = (15, 6)
a = np.random.standard_normal((3, 10, 6)).astype(np.float32)
a_static_shape = (3, 15, 6)
b = np.random.standard_normal((3, 10, 1)).astype(np.float32)
b_static_shape = (3, 15, 1)
ret = _of_broadcast_to_compatible_with_dynamic(
x, a, b, x_static_shape, a_static_shape, b_static_shape
)
expected_ret = np.broadcast_to(x, [3, 10, 6])
test_case.assertTrue(np.array_equal(expected_ret, ret))
def test_dynamic_broadcast_to_compatible_with_case_2(test_case):
x = np.random.standard_normal((20, 1, 1)).astype(np.float32)
x_static_shape = (23, 1, 1)
a = np.random.standard_normal((11, 1)).astype(np.float32)
a_static_shape = (15, 1)
b = np.random.standard_normal((7,)).astype(np.float32)
b_static_shape = (8,)
ret = _of_broadcast_to_compatible_with_dynamic(
x, a, b, x_static_shape, a_static_shape, b_static_shape
)
expected_ret = np.broadcast_to(x, [20, 11, 7])
test_case.assertTrue(np.array_equal(expected_ret, ret))
def test_broadcast_to_compatible_with_grad(test_case):
x = np.random.standard_normal((7, 1, 4)).astype(np.float32)
compatible_shape = [[7, 1, 4], [5, 4]]
def compare_dy(dx_blob):
dx = np.ones([7, 5, 4], dtype=np.float32).sum(axis=1).reshape(x.shape)
test_case.assertTrue(np.array_equal(dx, dx_blob.numpy()))
ret = _of_broadcast_to_compatible_with_grad(x, compatible_shape, compare_dy)
exp_ret = np.broadcast_to(x, [7, 5, 4])
test_case.assertTrue(np.array_equal(exp_ret, ret))
def test_broadcast_to_compatible_with_grad_case_2(test_case):
x = np.random.standard_normal((7, 1, 4)).astype(np.float32)
compatible_shape = [[1, 7, 5, 4]]
def compare_dy(dx_blob):
dx = np.ones([7, 5, 4], dtype=np.float32).sum(axis=1).reshape(x.shape)
test_case.assertTrue(np.array_equal(dx, dx_blob.numpy()))
ret = _of_broadcast_to_compatible_with_grad(x, compatible_shape, compare_dy)
exp_ret = np.broadcast_to(x, [1, 7, 5, 4])
test_case.assertTrue(np.array_equal(exp_ret, ret))
def test_broadcast_to_compatible_with_no_broadcast(test_case):
x = np.random.standard_normal((9, 9, 6)).astype(np.float32)
x_static_shape = (10, 9, 6)
compatible_shape = [[6], [9, 1]]
ret = _of_broadcast_to_compatible_with(x, compatible_shape, x_static_shape)
test_case.assertTrue(np.array_equal(x, ret))
if __name__ == "__main__":
unittest.main()
| 39.089474 | 84 | 0.674566 |
7a036e77645f1333cb2160d1d7290b9ecc25c444
| 548 |
py
|
Python
|
src/bo4e/enum/zeiteinheit.py
|
bo4e/BO4E-python
|
28b12f853c8a496d14b133759b7aa2d6661f79a0
|
[
"MIT"
] | 1 |
2022-03-02T12:49:44.000Z
|
2022-03-02T12:49:44.000Z
|
src/bo4e/enum/zeiteinheit.py
|
bo4e/BO4E-python
|
28b12f853c8a496d14b133759b7aa2d6661f79a0
|
[
"MIT"
] | 21 |
2022-02-04T07:38:46.000Z
|
2022-03-28T14:01:53.000Z
|
src/bo4e/enum/zeiteinheit.py
|
bo4e/BO4E-python
|
28b12f853c8a496d14b133759b7aa2d6661f79a0
|
[
"MIT"
] | null | null | null |
# pylint:disable=missing-module-docstring
from bo4e.enum.strenum import StrEnum
class Zeiteinheit(StrEnum):
"""
Auflistung möglicher Einheiten zur Verwendung in zeitbezogenen Angaben.
"""
SEKUNDE = "SEKUNDE" #: Sekunde
MINUTE = "MINUTE" #: Minute
STUNDE = "STUNDE" #: Stunde
VIERTEL_STUNDE = "VIERTEL_STUNDE" #: Viertelstunde
TAG = "TAG" #: Tag
WOCHE = "WOCHE" #: Woche
MONAT = "MONAT" #: Monat
QUARTAL = "QUARTAL" #: Quartal
HALBJAHR = "HALBJAHR" #: Halbjahr
JAHR = "JAHR" #: Jahr
| 26.095238 | 75 | 0.633212 |
e1ace5d51becd2b9d111989b35465d9e256fdbfe
| 9,142 |
py
|
Python
|
lib/python/qmk/info.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
lib/python/qmk/info.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
lib/python/qmk/info.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
"""Functions that help us generate and use info.json files.
"""
import json
from glob import glob
from pathlib import Path
from milc import cli
from qmk.constants import ARM_PROCESSORS, AVR_PROCESSORS, VUSB_PROCESSORS
from qmk.c_parse import find_layouts
from qmk.keyboard import config_h, rules_mk
from qmk.math import compute
def info_json(keyboard):
"""Generate the info.json data for a specific keyboard.
"""
info_data = {
'keyboard_name': str(keyboard),
'keyboard_folder': str(keyboard),
'layouts': {},
'maintainer': 'qmk',
}
for layout_name, layout_json in _find_all_layouts(keyboard).items():
if not layout_name.startswith('LAYOUT_kc'):
info_data['layouts'][layout_name] = layout_json
info_data = merge_info_jsons(keyboard, info_data)
info_data = _extract_config_h(info_data)
info_data = _extract_rules_mk(info_data)
return info_data
def _extract_config_h(info_data):
"""Pull some keyboard information from existing rules.mk files
"""
config_c = config_h(info_data['keyboard_folder'])
row_pins = config_c.get('MATRIX_ROW_PINS', '').replace('{', '').replace('}', '').strip()
col_pins = config_c.get('MATRIX_COL_PINS', '').replace('{', '').replace('}', '').strip()
direct_pins = config_c.get('DIRECT_PINS', '').replace(' ', '')[1:-1]
info_data['diode_direction'] = config_c.get('DIODE_DIRECTION')
info_data['matrix_size'] = {
'rows': compute(config_c.get('MATRIX_ROWS', '0')),
'cols': compute(config_c.get('MATRIX_COLS', '0')),
}
info_data['matrix_pins'] = {}
if row_pins:
info_data['matrix_pins']['rows'] = row_pins.split(',')
if col_pins:
info_data['matrix_pins']['cols'] = col_pins.split(',')
if direct_pins:
direct_pin_array = []
for row in direct_pins.split('},{'):
if row.startswith('{'):
row = row[1:]
if row.endswith('}'):
row = row[:-1]
direct_pin_array.append([])
for pin in row.split(','):
if pin == 'NO_PIN':
pin = None
direct_pin_array[-1].append(pin)
info_data['matrix_pins']['direct'] = direct_pin_array
info_data['usb'] = {
'vid': config_c.get('VENDOR_ID'),
'pid': config_c.get('PRODUCT_ID'),
'device_ver': config_c.get('DEVICE_VER'),
'manufacturer': config_c.get('MANUFACTURER'),
'product': config_c.get('PRODUCT'),
'description': config_c.get('DESCRIPTION'),
}
return info_data
def _extract_rules_mk(info_data):
"""Pull some keyboard information from existing rules.mk files
"""
rules = rules_mk(info_data['keyboard_folder'])
mcu = rules.get('MCU')
if mcu in ARM_PROCESSORS:
arm_processor_rules(info_data, rules)
elif mcu in AVR_PROCESSORS:
avr_processor_rules(info_data, rules)
else:
cli.log.warning("%s: Unknown MCU: %s" % (info_data['keyboard_folder'], mcu))
unknown_processor_rules(info_data, rules)
return info_data
def _find_all_layouts(keyboard):
"""Looks for layout macros associated with this keyboard.
"""
layouts = {}
rules = rules_mk(keyboard)
keyboard_path = Path(rules.get('DEFAULT_FOLDER', keyboard))
# Pull in all layouts defined in the standard files
current_path = Path('keyboards/')
for directory in keyboard_path.parts:
current_path = current_path / directory
keyboard_h = '%s.h' % (directory,)
keyboard_h_path = current_path / keyboard_h
if keyboard_h_path.exists():
layouts.update(find_layouts(keyboard_h_path))
if not layouts:
# If we didn't find any layouts above we widen our search. This is error
# prone which is why we want to encourage people to follow the standard above.
cli.log.warning('%s: Falling back to searching for KEYMAP/LAYOUT macros.' % (keyboard))
for file in glob('keyboards/%s/*.h' % keyboard):
if file.endswith('.h'):
these_layouts = find_layouts(file)
if these_layouts:
layouts.update(these_layouts)
if 'LAYOUTS' in rules:
# Match these up against the supplied layouts
supported_layouts = rules['LAYOUTS'].strip().split()
for layout_name in sorted(layouts):
if not layout_name.startswith('LAYOUT_'):
continue
layout_name = layout_name[7:]
if layout_name in supported_layouts:
supported_layouts.remove(layout_name)
if supported_layouts:
cli.log.error('%s: Missing LAYOUT() macro for %s' % (keyboard, ', '.join(supported_layouts)))
return layouts
def arm_processor_rules(info_data, rules):
"""Setup the default info for an ARM board.
"""
info_data['processor_type'] = 'arm'
info_data['bootloader'] = rules['BOOTLOADER'] if 'BOOTLOADER' in rules else 'unknown'
info_data['processor'] = rules['MCU'] if 'MCU' in rules else 'unknown'
info_data['protocol'] = 'ChibiOS'
if info_data['bootloader'] == 'unknown':
if 'STM32' in info_data['processor']:
info_data['bootloader'] = 'stm32-dfu'
elif info_data.get('manufacturer') == 'Input Club':
info_data['bootloader'] = 'kiibohd-dfu'
if 'STM32' in info_data['processor']:
info_data['platform'] = 'STM32'
elif 'MCU_SERIES' in rules:
info_data['platform'] = rules['MCU_SERIES']
elif 'ARM_ATSAM' in rules:
info_data['platform'] = 'ARM_ATSAM'
return info_data
def avr_processor_rules(info_data, rules):
"""Setup the default info for an AVR board.
"""
info_data['processor_type'] = 'avr'
info_data['bootloader'] = rules['BOOTLOADER'] if 'BOOTLOADER' in rules else 'atmel-dfu'
info_data['platform'] = rules['ARCH'] if 'ARCH' in rules else 'unknown'
info_data['processor'] = rules['MCU'] if 'MCU' in rules else 'unknown'
info_data['protocol'] = 'V-USB' if rules.get('MCU') in VUSB_PROCESSORS else 'LUFA'
# FIXME(fauxpark/anyone): Eventually we should detect the protocol by looking at PROTOCOL inherited from mcu_selection.mk:
# info_data['protocol'] = 'V-USB' if rules.get('PROTOCOL') == 'VUSB' else 'LUFA'
return info_data
def unknown_processor_rules(info_data, rules):
"""Setup the default keyboard info for unknown boards.
"""
info_data['bootloader'] = 'unknown'
info_data['platform'] = 'unknown'
info_data['processor'] = 'unknown'
info_data['processor_type'] = 'unknown'
info_data['protocol'] = 'unknown'
return info_data
def merge_info_jsons(keyboard, info_data):
"""Return a merged copy of all the info.json files for a keyboard.
"""
for info_file in find_info_json(keyboard):
# Load and validate the JSON data
with info_file.open('r') as info_fd:
new_info_data = json.load(info_fd)
if not isinstance(new_info_data, dict):
cli.log.error("Invalid file %s, root object should be a dictionary.", str(info_file))
continue
# Copy whitelisted keys into `info_data`
for key in ('keyboard_name', 'manufacturer', 'identifier', 'url', 'maintainer', 'processor', 'bootloader', 'width', 'height'):
if key in new_info_data:
info_data[key] = new_info_data[key]
# Merge the layouts in
if 'layouts' in new_info_data:
for layout_name, json_layout in new_info_data['layouts'].items():
# Only pull in layouts we have a macro for
if layout_name in info_data['layouts']:
if info_data['layouts'][layout_name]['key_count'] != len(json_layout['layout']):
cli.log.error('%s: %s: Number of elements in info.json does not match! info.json:%s != %s:%s', info_data['keyboard_folder'], layout_name, len(json_layout['layout']), layout_name, len(info_data['layouts'][layout_name]['layout']))
else:
for i, key in enumerate(info_data['layouts'][layout_name]['layout']):
key.update(json_layout['layout'][i])
return info_data
def find_info_json(keyboard):
"""Finds all the info.json files associated with a keyboard.
"""
# Find the most specific first
base_path = Path('keyboards')
keyboard_path = base_path / keyboard
keyboard_parent = keyboard_path.parent
info_jsons = [keyboard_path / 'info.json']
# Add DEFAULT_FOLDER before parents, if present
rules = rules_mk(keyboard)
if 'DEFAULT_FOLDER' in rules:
info_jsons.append(Path(rules['DEFAULT_FOLDER']) / 'info.json')
# Add in parent folders for least specific
for _ in range(5):
info_jsons.append(keyboard_parent / 'info.json')
if keyboard_parent.parent == base_path:
break
keyboard_parent = keyboard_parent.parent
# Return a list of the info.json files that actually exist
return [info_json for info_json in info_jsons if info_json.exists()]
| 36.568 | 252 | 0.636294 |
e1c668ebcaab662be4a8e25dce4debac1d6b4b89
| 591 |
py
|
Python
|
app/templatetags/meta.py
|
StevenMedina/MovieAPI
|
805e79d396e197383bce6095febf0252231a1018
|
[
"MIT"
] | null | null | null |
app/templatetags/meta.py
|
StevenMedina/MovieAPI
|
805e79d396e197383bce6095febf0252231a1018
|
[
"MIT"
] | null | null | null |
app/templatetags/meta.py
|
StevenMedina/MovieAPI
|
805e79d396e197383bce6095febf0252231a1018
|
[
"MIT"
] | null | null | null |
from django import template
from django.conf import settings
from django.templatetags.static import static
register = template.Library()
@register.inclusion_tag('layout/_meta.html')
def meta_tags(title=None, description=None, image=None, page_type='website'):
if not image:
image = static('img/social-shared.jpg')
if image.startswith('/'):
image = f'{settings.BASE_URL}{image}'
return {
'title': title if title else 'TODO',
'description': description if description else 'TODO',
'image': image,
'page_type': page_type,
}
| 25.695652 | 77 | 0.671743 |
3d448a6ffe013e01860496c7447b9b84a5ef5061
| 573 |
py
|
Python
|
backend/utils/handle_get_queries.py
|
methodpark/digitaleswarten
|
024c0b88df54e9727925b202e139b3c5b2ce73d6
|
[
"Apache-2.0"
] | 10 |
2020-03-20T19:14:43.000Z
|
2020-10-29T21:31:40.000Z
|
backend/utils/handle_get_queries.py
|
methodpark/digitaleswarten
|
024c0b88df54e9727925b202e139b3c5b2ce73d6
|
[
"Apache-2.0"
] | 41 |
2020-03-20T20:27:55.000Z
|
2020-03-24T21:49:37.000Z
|
backend/utils/handle_get_queries.py
|
methodpark/digitaleswarten
|
024c0b88df54e9727925b202e139b3c5b2ce73d6
|
[
"Apache-2.0"
] | 1 |
2020-03-21T09:31:51.000Z
|
2020-03-21T09:31:51.000Z
|
from flask import abort
def get_entry_detail_level(request):
"""
Returns whether the detail level of an entry should be short or full.
"""
entry_level_detail = request.args.get('personDetails', None)
if entry_level_detail not in ['short', 'full']:
abort(400)
return entry_level_detail
def get_entry_state_query(request):
"""
Returns whether the entry state is requested or throws 400 otherwise.
"""
state_queried = request.args.get('state', None)
if state_queried is None:
abort(400)
return state_queried
| 28.65 | 73 | 0.691099 |
18707eb8a4666cf621ae3f8d98473cc3d8937e12
| 436 |
py
|
Python
|
Problems/Stack/Easy/CrawlerLogFolder/test_crawler_log_folder.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | 1 |
2021-08-16T14:52:05.000Z
|
2021-08-16T14:52:05.000Z
|
Problems/Stack/Easy/CrawlerLogFolder/test_crawler_log_folder.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | null | null | null |
Problems/Stack/Easy/CrawlerLogFolder/test_crawler_log_folder.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from crawler_log_folder import minOperations
class Test(TestCase):
def test_min_operations(self):
self.assertEqual(minOperations(["d1/", "d2/", "../", "d21/", "./"]), 2)
self.assertEqual(minOperations(["d1/", "d2/", "./", "d3/", "../", "d31/"]), 3)
self.assertEqual(minOperations(["d1/", "../", "../", "../"]), 0)
self.assertEqual(minOperations(["./", "../", "./"]), 0)
| 48.444444 | 86 | 0.559633 |
62a0513fdd53558d63be074d6ea6d04483889757
| 553 |
pyde
|
Python
|
sketches/rainbowgrid2/rainbowgrid2.pyde
|
kantel/processingpy
|
74aae222e46f68d1c8f06307aaede3cdae65c8ec
|
[
"MIT"
] | 4 |
2018-06-03T02:11:46.000Z
|
2021-08-18T19:55:15.000Z
|
sketches/rainbowgrid2/rainbowgrid2.pyde
|
kantel/processingpy
|
74aae222e46f68d1c8f06307aaede3cdae65c8ec
|
[
"MIT"
] | null | null | null |
sketches/rainbowgrid2/rainbowgrid2.pyde
|
kantel/processingpy
|
74aae222e46f68d1c8f06307aaede3cdae65c8ec
|
[
"MIT"
] | 3 |
2019-12-23T19:12:51.000Z
|
2021-04-30T14:00:31.000Z
|
# Rainbow Grid handgezeichnet
add_library('handy')
def setup():
global h
h = HandyRenderer(this)
size(600, 600)
this.surface.setTitle("Rainbow Grid Handy")
rectMode(CENTER)
h.setRoughness(1)
h.setFillWeight(0.9)
h.setFillGap(0.9)
def draw():
colorMode(RGB)
background(235, 215, 182)
colorMode(HSB)
translate(12, 12)
for x in range(20):
for y in range(20):
d = dist(30*x, 30*y, mouseX, mouseY)
fill(0.5*d, 255, 255)
h.rect(x*30 + 3, y*30 + 3, 24, 24)
| 22.12 | 48 | 0.575045 |
62006a9006134559cbde2e01ab25eabe2b71c9e7
| 634 |
py
|
Python
|
exercises/zh/test_03_11.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/zh/test_03_11.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/zh/test_03_11.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
assert Span.has_extension(
"wikipedia_url"
), "你有在span上注册这个扩展吗?"
ext = Span.get_extension("wikipedia_url")
assert ext[2] is not None, "你有正确设置getter吗?"
assert (
"getter=get_wikipedia_url" in __solution__
), "你有设置getter为get_wikipedia_url了吗?"
assert (
"(ent.text, ent._.wikipedia_url)" in __solution__
), "你有读取到定制化属性了吗?"
assert (
doc.ents[-1]._.wikipedia_url
== "https://zh.wikipedia.org/w/index.php?search=周杰伦"
), "貌似这个属性的值是错误的。"
__msg__.good(
"漂亮!我们现在有了一个定制化的模型组件,使用模型预测的命名实体来生成"
"维基百科的URL,然后把它们设定成为一个定制化属性。可以在浏览器中打开这"
"个网址看看吧!"
)
| 27.565217 | 60 | 0.649842 |
028e38a4206292f2a8fafdefe0402d56090ceb87
| 986 |
py
|
Python
|
Scheduling/Python Implementation/sjf.py
|
G-M-C/S5-SSLAB
|
d56a968ffdf56c845cde7aa0cf047c88870ed011
|
[
"MIT"
] | null | null | null |
Scheduling/Python Implementation/sjf.py
|
G-M-C/S5-SSLAB
|
d56a968ffdf56c845cde7aa0cf047c88870ed011
|
[
"MIT"
] | null | null | null |
Scheduling/Python Implementation/sjf.py
|
G-M-C/S5-SSLAB
|
d56a968ffdf56c845cde7aa0cf047c88870ed011
|
[
"MIT"
] | null | null | null |
import pandas as pd
import operator
class Process:
def __init__(self):
self.pid=input("Enter Process ID : ")
self.bt=int(input("Enter Burst time : "))
def get_wt(self,x):
self.wt=x
def get_tat(self,x):
self.tat=x
PID=[]
BT=[]
WT=[]
TAT=[]
n = int(input("No:of processes ? "))
p = []
for i in range(0,n):
p.append(Process())
p.sort(key=operator.attrgetter('bt'))
for i in range(0, n):
if i==0:
p[i].get_tat(p[i].bt)
else:
p[i].get_tat(p[i-1].tat+p[i].bt)
p[i].get_wt(p[i].tat-p[i].bt)
PID.append(p[i].pid)
BT.append(p[i].bt)
WT.append(p[i].wt)
TAT.append(p[i].tat)
print("\n\nSJF Scheduling")
for item in PID:
print("< {} >".format(item),end=" ")
d={'Process #':PID,'Burst Time':BT,'Turn-around Time':TAT,'Waiting Time':WT}
ptable=pd.DataFrame(d)
display(ptable)
avw=float(sum(WT)/n)
avt=float(sum(TAT)/n)
print("Average Turnaround Time : ",avt)
print("Average Waiting Time : “,avw)
| 22.409091 | 76 | 0.590264 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.