max_stars_repo_path
stringlengths 4
182
| max_stars_repo_name
stringlengths 6
116
| max_stars_count
int64 0
191k
| id
stringlengths 7
7
| content
stringlengths 100
10k
| size
int64 100
10k
|
---|---|---|---|---|---|
run.py
|
showi/mattermost-integration-marmiton
| 0 |
2024523
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from mattermost_marmiton.app import app
from mattermost_marmiton.settings import MARMITON_API_KEY, MATTERMOST_MARMITON_TOKEN
if __name__ == "__main__":
if not MARMITON_API_KEY:
print("MARMITON_API_KEY must be configured. Please see README.md for instructions")
sys.exit()
if not MATTERMOST_MARMITON_TOKEN:
print("MATTERMOST_MARMITON_TOKEN must be configured. Please see README.md for instructions")
sys.exit()
port = os.environ.get('MATTERMOST_MARMITON_PORT', None) or os.environ.get('PORT', 5000)
host = os.environ.get('MATTERMOST_MARMITON_HOST', None) or os.environ.get('HOST', '0.0.0.0')
app.run(host=str(host), port=int(port))
| 755 |
libtrack/elfmod/vstruct/defs/win32.py
|
columbia/libtrack
| 40 |
2023523
|
# FIXME this is named wrong!
import vstruct
from vstruct.primitives import *
class CLIENT_ID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_ptr32()
self.UniqueThread = v_ptr32()
class EXCEPTION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_ptr32()
self.ExceptionAddress = v_ptr32()
self.NumberParameters = v_uint32()
class EXCEPTION_REGISTRATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.prev = v_ptr32()
self.handler = v_ptr32()
class HEAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.Signature = v_uint32()
self.Flags = v_uint32()
self.ForceFlags = v_uint32()
self.VirtualMemoryThreshold = v_uint32()
self.SegmentReserve = v_uint32()
self.SegmentCommit = v_uint32()
self.DeCommitFreeBlockThreshold = v_uint32()
self.DeCommitTotalFreeThreshold = v_uint32()
self.TotalFreeSize = v_uint32()
self.MaximumAllocationSize = v_uint32()
self.ProcessHeapsListIndex = v_uint16()
self.HeaderValidateLength = v_uint16()
self.HeaderValidateCopy = v_ptr32()
self.NextAvailableTagIndex = v_uint16()
self.MaximumTagIndex = v_uint16()
self.TagEntries = v_ptr32()
self.UCRSegments = v_ptr32()
self.UnusedUnCommittedRanges = v_ptr32()
self.AlignRound = v_uint32()
self.AlignMask = v_uint32()
self.VirtualAllocBlocks = ListEntry()
self.Segments = vstruct.VArray([v_uint32() for i in range(64)])
self.u = vstruct.VArray([v_uint8() for i in range(16)])
self.u2 = vstruct.VArray([v_uint8() for i in range(2)])
self.AllocatorBackTraceIndex = v_uint16()
self.NonDedicatedListLength = v_uint32()
self.LargeBlocksIndex = v_ptr32()
self.PseudoTagEntries = v_ptr32()
self.FreeLists = vstruct.VArray([ListEntry() for i in range(128)])
self.LockVariable = v_uint32()
self.CommitRoutine = v_ptr32()
self.FrontEndHeap = v_ptr32()
self.FrontEndHeapLockCount = v_uint16()
self.FrontEndHeapType = v_uint8()
self.LastSegmentIndex = v_uint8()
class HEAP_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.SegmentSignature = v_uint32()
self.SegmentFlags = v_uint32()
self.Heap = v_ptr32()
self.LargestUncommitedRange = v_uint32()
self.BaseAddress = v_ptr32()
self.NumberOfPages = v_uint32()
self.FirstEntry = v_ptr32()
self.LastValidEntry = v_ptr32()
self.NumberOfUnCommittedPages = v_uint32()
self.NumberOfUnCommittedRanges = v_uint32()
self.UncommittedRanges = v_ptr32()
self.SegmentAllocatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
self.LastEntryInSegment = v_ptr32()
class HEAP_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.PrevSize = v_uint16()
self.SegmentIndex = v_uint8()
self.Flags = v_uint8()
self.Unused = v_uint8()
self.TagIndex = v_uint8()
class ListEntry(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class NT_TIB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_ptr32()
self.StackBase = v_ptr32()
self.StackLimit = v_ptr32()
self.SubSystemTib = v_ptr32()
self.FiberData = v_ptr32()
#x.Version = v_ptr32() # This is a union field
self.ArbitraryUserPtr = v_ptr32()
self.Self = v_ptr32()
class PEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InheritedAddressSpace = v_uint8()
self.ReadImageFileExecOptions = v_uint8()
self.BeingDebugged = v_uint8()
self.SpareBool = v_uint8()
self.Mutant = v_ptr32()
self.ImageBaseAddress = v_ptr32()
self.Ldr = v_ptr32()
self.ProcessParameters = v_ptr32()
self.SubSystemData = v_ptr32()
self.ProcessHeap = v_ptr32()
self.FastPebLock = v_ptr32()
self.FastPebLockRoutine = v_ptr32()
self.FastPebUnlockRoutine = v_ptr32()
self.EnvironmentUpdateCount = v_uint32()
self.KernelCallbackTable = v_ptr32()
self.SystemReserved = v_uint32()
self.AtlThunkSListPtr32 = v_ptr32()
self.FreeList = v_ptr32()
self.TlsExpansionCounter = v_uint32()
self.TlsBitmap = v_ptr32()
self.TlsBitmapBits = vstruct.VArray([v_uint32() for i in range(2)])
self.ReadOnlySharedMemoryBase = v_ptr32()
self.ReadOnlySharedMemoryHeap = v_ptr32()
self.ReadOnlyStaticServerData = v_ptr32()
self.AnsiCodePageData = v_ptr32()
self.OemCodePageData = v_ptr32()
self.UnicodeCaseTableData = v_ptr32()
self.NumberOfProcessors = v_uint32()
self.NtGlobalFlag = v_uint64()
self.CriticalSectionTimeout = v_uint64()
self.HeapSegmentReserve = v_uint32()
self.HeapSegmentCommit = v_uint32()
self.HeapDeCommitTotalFreeThreshold = v_uint32()
self.HeapDeCommitFreeBlockThreshold = v_uint32()
self.NumberOfHeaps = v_uint32()
self.MaximumNumberOfHeaps = v_uint32()
self.ProcessHeaps = v_ptr32()
self.GdiSharedHandleTable = v_ptr32()
self.ProcessStarterHelper = v_ptr32()
self.GdiDCAttributeList = v_uint32()
self.LoaderLock = v_ptr32()
self.OSMajorVersion = v_uint32()
self.OSMinorVersion = v_uint32()
self.OSBuildNumber = v_uint16()
self.OSCSDVersion = v_uint16()
self.OSPlatformId = v_uint32()
self.ImageSubsystem = v_uint32()
self.ImageSubsystemMajorVersion = v_uint32()
self.ImageSubsystemMinorVersion = v_uint32()
self.ImageProcessAffinityMask = v_uint32()
self.GdiHandleBuffer = vstruct.VArray([v_ptr32() for i in range(34)])
self.PostProcessInitRoutine = v_ptr32()
self.TlsExpansionBitmap = v_ptr32()
self.TlsExpansionBitmapBits = vstruct.VArray([v_uint32() for i in range(32)])
self.SessionId = v_uint32()
self.AppCompatFlags = v_uint64()
self.AppCompatFlagsUser = v_uint64()
self.pShimData = v_ptr32()
self.AppCompatInfo = v_ptr32()
self.CSDVersion = v_ptr32()
self.UNKNOWN = v_uint32()
self.ActivationContextData = v_ptr32()
self.ProcessAssemblyStorageMap = v_ptr32()
self.SystemDefaultActivationContextData = v_ptr32()
self.SystemAssemblyStorageMap = v_ptr32()
self.MinimumStackCommit = v_uint32()
class SEH3_SCOPETABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnclosingLevel = v_int32()
self.FilterFunction = v_ptr32()
self.HandlerFunction = v_ptr32()
class SEH4_SCOPETABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GSCookieOffset = v_int32()
self.GSCookieXOROffset = v_int32()
self.EHCookieOffset = v_int32()
self.EHCookieXOROffset = v_int32()
self.EnclosingLevel = v_int32()
self.FilterFunction = v_ptr32()
self.HandlerFunction = v_ptr32()
class TEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TIB = NT_TIB()
self.EnvironmentPointer = v_ptr32()
self.ClientId = CLIENT_ID()
self.ActiveRpcHandle = v_ptr32()
self.ThreadLocalStorage = v_ptr32()
self.ProcessEnvironmentBlock = v_ptr32()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_ptr32()
self.Win32ThreadInfo = v_ptr32()
self.User32Reserved = vstruct.VArray([v_uint32() for i in range(26)])
self.UserReserved = vstruct.VArray([v_uint32() for i in range(5)])
self.WOW32Reserved = v_ptr32()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
class CLSID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.uuid = GUID()
class IID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.uuid = GUID()
| 8,788 |
src/maximopyclient/selectclause.py
|
ibmmaximorestjsonapis/maximopyclient
| 0 |
2022949
|
'''
Created on Aug 2, 2020
@author: AnamitraBhattacharyy
'''
class SelectClause:
'''
classdocs
'''
def __init__(self, props=["*"], object_name=None, relation=None, relation_os=None, all_header_props=False):
self.props = props
self.relation = relation
self.relation_os = relation_os
self.all_header_props = all_header_props
self.related_select_clauses = None
self.object_name = object_name
if self.all_header_props:
self.props = ["_allheaderprops_"]
def add(self, select_clause):
if self.all_header_props:
return
if self.related_select_clauses is None:
self.related_select_clauses = []
self.related_select_clauses.append(select_clause)
def to_string(self):
clause = ""
if self.props is None and self.related_select_clauses is None:
return None
if self.relation is not None:
if self.relation_os is None:
clause = "rel."+self.relation+"{"
else:
clause = "rel." + self.relation + self.relation_os + "{"
elif self.object_name is not None:
clause = self.object_name + "{"
else:
clause = ""
if self.props is not None:
for elem in self.props:
clause += elem+","
if self.related_select_clauses is not None:
for related_clause in self.related_select_clauses:
clause += related_clause.to_string()
clause = clause[:(len(clause)-1)]
if self.relation is not None or self.object_name is not None:
clause = clause + "}"
return clause
def params(self):
params = {}
params['oslc.select'] = self.to_string()
print(params['oslc.select'])
return params
| 1,895 |
onlineshop/core/views.py
|
amitgit712/multi-ven-ecom
| 0 |
2025221
|
from django.shortcuts import render
from products.models import Product
def homepage(request):
recent_products = Product.objects.all().order_by('-id')[:6]
context = {
'recent_products': recent_products
}
return render(request, 'core/homepage.html', context)
| 285 |
example_2/train.py
|
Erlemar/sberloga_hydra_demo
| 6 |
2023020
|
from dataclasses import dataclass
from typing import Any, Optional
import hydra
import numpy as np
from hydra.core.config_store import ConfigStore
from hydra.utils import instantiate
from omegaconf import DictConfig, OmegaConf
from sklearn.datasets import load_wine
from sklearn.model_selection import cross_val_score
@dataclass
class RFConfig:
max_depth: Optional[int] = None
_target_: str = 'sklearn.ensemble.RandomForestClassifier'
n_estimators: int = 100
random_state: int = 42
@dataclass
class LogregConfig:
_target_: str = 'sklearn.linear_model.LogisticRegression'
penalty: str = 'l1'
solver: str = 'liblinear'
C: float = 1.0
random_state: int = 42
max_iter: int = 42
@dataclass
class CrossValConfig:
scoring: Optional[str] = None
cv: Optional[int] = None
@dataclass
class GeneralConfig:
random_state: int = 42
@dataclass
class Config:
# We will populate db using composition.
model: Any = RFConfig()
cross_val: CrossValConfig = CrossValConfig()
general: GeneralConfig = GeneralConfig()
cs = ConfigStore.instance()
cs.store(name="config", node=Config)
cs.store(group="model", name="rf", node=RFConfig)
cs.store(group="model", name="logreg", node=LogregConfig)
@hydra.main(config_path='conf', config_name='config')
def run(cfg: DictConfig) -> None:
print(OmegaConf.to_yaml(cfg))
X, y = load_wine(return_X_y=True, as_frame=True)
model = instantiate(cfg.model)
scores = cross_val_score(model, X, y, **cfg.cross_val)
print(f'Mean score: {np.mean(scores):.4f}. Std: {np.mean(scores):.4f}')
if __name__ == '__main__':
run()
| 1,634 |
django_elasticsearch_dsl/indices.py
|
flavienliger/django-elasticsearch-dsl
| 3 |
2024987
|
from copy import deepcopy
from django.utils.encoding import python_2_unicode_compatible
from elasticsearch_dsl import Index as DSLIndex
from .apps import DEDConfig
from .registries import registry
@python_2_unicode_compatible
class Index(DSLIndex):
def __init__(self, name, using='default'):
super(Index, self).__init__(name, using)
self._settings = deepcopy(DEDConfig.default_index_settings())
def doc_type(self, doc_type, *args, **kwargs):
"""
Extend to register the doc_type in the global document registry
"""
doc_type = super(Index, self).doc_type(doc_type, *args, **kwargs)
registry.register(self, doc_type)
return doc_type
def __str__(self):
return self._name
| 757 |
fluentxy/tests/test_fluentxy.py
|
bryanwweber/fluentxy
| 0 |
2023484
|
from .. import parse_data
import pytest
pd = pytest.importorskip("pandas")
pdt = pytest.importorskip("pandas.util.testing")
def test_parse_data(shared_datadir):
with open(shared_datadir / "axial-velocity-mesh-1.xy") as m:
mesh_1 = m.readlines()
mesh_1_data = parse_data(mesh_1)
mesh_1_csv = pd.read_csv(
shared_datadir / "axial-velocity-mesh-1.csv", header=[0, 1], index_col=0
)
pdt.assert_frame_equal(mesh_1_data, mesh_1_csv, check_names=True)
with open(shared_datadir / "axial-velocity-mesh-2.xy") as m:
mesh_2 = m.readlines()
mesh_2_data = parse_data(mesh_2)
mesh_2_csv = pd.read_csv(
shared_datadir / "axial-velocity-mesh-2.csv", header=[0, 1], index_col=0
)
pdt.assert_frame_equal(mesh_2_data, mesh_2_csv, check_names=True)
| 805 |
ucb_ds.py
|
radu1/secure-ucb
| 0 |
2022783
|
import time
import os
import sys
import json
from math import sqrt, log
from phe import paillier
from Crypto.Random import get_random_bytes
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
sys.path.append(os.path.relpath("."))
from tools import pull, generate_permutation, get_inverse, run_experiment1
########## class DataOwner
class DataOwner():
# Initialize DataOwner, who knowns K and mu
def __init__(self, K, mu, key):
self.time = 0
t = time.time()
self.K = K
self.mu = mu
self.key = key # shared key with AS, R_i
self.aesgcm = AESGCM(self.key)
self.time += time.time() - t
# Step 0: DataOwner outsources mu[i]
def outsource_arm(self, i):
t = time.time()
nonce = os.urandom(12)
ciphertext_mu_i = self.aesgcm.encrypt(nonce, str(self.mu[i]).encode('utf-8'), None)
self.time += time.time() - t
return (ciphertext_mu_i, nonce)
########## class DataClient
class DataClient():
# Initialize DataClient, who has a budget N
def __init__(self, N):
self.time = 0
t = time.time()
self.N = N
self.pk, self._sk = paillier.generate_paillier_keypair()
self.time += time.time() - t
# Step 1: Send the budget
def send_budget(self, N):
return N
# Step 6: Receive the cumulative reward
def receive_R(self, ciphertext_R):
t = time.time()
self.R = self._sk.decrypt(ciphertext_R)
self.time += time.time() - t
########## class R_node
class R_node():
# Initialize arm node i, who has variables s_i, n_i, and t needed to compute B_i later on
def __init__(self, K, i, pk_DC, key):
self.time = 0
t = time.time()
self.i = i
self.s_i = 0
self.n_i = 0
self.t = K-1
self.pk_DC = pk_DC # the pk of DataClient is needed to send ecrypted sum of rewards at the end
self.key = key # shared key with AS, DO
self.aesgcm = AESGCM(self.key)
self.time += time.time() - t
# Step 0: Arm node i receives its mu[i] that is outsourced by DataOwner
def receive_outsourced_mu(self, data_DO_Ri):
t = time.time()
ciphertext_mu_i, nonce = data_DO_Ri
self.mu_i = float(self.aesgcm.decrypt(nonce, ciphertext_mu_i, None))
self.time += time.time() - t
# Step 2: Arm node i receives a triple of ciphertexts (b, first, node), is pulled if b=1, then updates its variables
def receive_AS(self, triple_and_nonces):
t = time.time()
ciphertext_b, ciphertext_first, ciphertext_next, nonce1, nonce2, nonce3 = triple_and_nonces
self.b = int(self.aesgcm.decrypt(nonce1, ciphertext_b, None))
self.first = int(self.aesgcm.decrypt(nonce2, ciphertext_first, None))
self.next = int(self.aesgcm.decrypt(nonce3, ciphertext_next, None))
self.t += 1
if self.b == 1:
r = pull(self.mu_i)
self.s_i += r
self.n_i += 1
self.B_i = 1. * self.s_i / self.n_i + sqrt(2. * log(self.t) / self.n_i)
self.time += time.time() - t
# Step 3: Start ring computation
def start_ring(self):
t = time.time()
nonce1 = os.urandom(12)
nonce2 = os.urandom(12)
ciphertext_B_m = self.aesgcm.encrypt(nonce1, str(self.B_i).encode('utf-8'), None)
ciphertext_i_m = self.aesgcm.encrypt(nonce2, str(self.i).encode('utf-8'), None)
self.time += time.time() - t
self.R_nodes[self.next].receive_Ri((ciphertext_B_m, ciphertext_i_m, nonce1, nonce2))
# Step 3/4: Arm node i receives pair (B_m, i_m), updates variables, and sends either to next arm node or to AS
def receive_Ri(self, pair_and_nonces):
t = time.time()
ciphertext_B_m, ciphertext_i_m, nonce1, nonce2 = pair_and_nonces
B_m = float(self.aesgcm.decrypt(nonce1, ciphertext_B_m, None))
i_m = int(self.aesgcm.decrypt(nonce2, ciphertext_i_m, None))
if self.B_i > B_m:
B_m = self.B_i
i_m = self.i
if self.next != 0:
nonce1 = os.urandom(12)
ciphertext_B_m = self.aesgcm.encrypt(nonce1, str(B_m).encode('utf-8'), None)
ciphertext_i_m = self.aesgcm.encrypt(nonce2, str(i_m).encode('utf-8'), None)
self.time += time.time() - t
if self.next != 0:
self.R_nodes[self.next].receive_Ri((ciphertext_B_m, ciphertext_i_m, nonce1, nonce2))
else:
self.AS.receive_Ri((ciphertext_i_m, nonce2))
# Step 5: Arm node i sends the sum of rewards that it produced
def send_partial_reward(self):
t = time.time()
ciphertext_s_i = self.pk_DC.encrypt(self.s_i)
self.time += time.time() - t
return ciphertext_s_i
########## class ArmSelector
class ArmSelector():
# Initialize AS, who knows K
def __init__(self, K, key):
self.time = 0
t = time.time()
self.K = K
self.i_m = 0 # index of arm to be pulled next, is updated during the Exploration-exploitation phase
self.key = key # shared key with DO, R_i
self.aesgcm = AESGCM(self.key)
self.time += time.time() - t
# Step 1: Receive budget
def receive_budget(self, N):
t = time.time()
self.N = N
self.time += time.time() - t
# Step 2: Send triple (b, first, node) to each arm node, based on the generated permutation
def send_Ri(self):
t = time.time()
self.sigma = generate_permutation(self.K)
self.time += time.time() - t
for i in range(1, self.K+1):
t = time.time()
b = 1 if (self.i_m == 0 or self.i_m == i) else 0
first = 1 if (self.sigma[i] == 1) else 0
next = 0 if (self.sigma[i] == self.K) else get_inverse(self.sigma, self.sigma[i]+1)
nonce1 = os.urandom(12)
nonce2 = os.urandom(12)
nonce3 = os.urandom(12)
ciphertext_b = self.aesgcm.encrypt(nonce1, str(b).encode('utf-8'), None)
ciphertext_first = self.aesgcm.encrypt(nonce2, str(first).encode('utf-8'), None)
ciphertext_next = self.aesgcm.encrypt(nonce3, str(next).encode('utf-8'), None)
self.time += time.time() - t
self.R_nodes[i].receive_AS((ciphertext_b, ciphertext_first, ciphertext_next, nonce1, nonce2, nonce3))
t = time.time()
if first == 1:
first_node = self.R_nodes[i]
self.time += time.time() - t
first_node.start_ring()
# Step 4: Receive the index of the arm to be pulled next
def receive_Ri(self, ciphertext_and_nonce):
t = time.time()
ciphertext_i_m, nonce = ciphertext_and_nonce
self.i_m = int(self.aesgcm.decrypt(nonce, ciphertext_i_m, None))
self.time += time.time() - t
# Step 5: Sums up all partial rewards to obtain the cumulative reward
def compute_cumulative_reward(self):
ciphertext_partial_rewards = []
for i in range(1, self.K+1):
ciphertext_partial_rewards.append(self.R_nodes[i].send_partial_reward())
t = time.time()
ciphertext_R = sum(ciphertext_partial_rewards)
self.time += time.time() - t
return ciphertext_R
########## Main program
def UCB_DS(N, K, mu):
t_start = time.time()
# generate AES CBC key; the key is shared between DO, AS, R_i
key = get_random_bytes(32)
DC = DataClient(N) # we create DataClient here because her pk is needed to initialize each arm node
# step 0
DO = DataOwner(K, mu, key)
R_nodes = dict()
for i in range (1, K+1):
R_nodes[i] = R_node(K, i, DC.pk, key)
data_DO_Ri = DO.outsource_arm(i)
R_nodes[i].receive_outsourced_mu(data_DO_Ri)
# step 1
AS = ArmSelector(K, key)
data_DC_AS = DC.send_budget(AS)
AS.receive_budget(data_DC_AS)
# make nodes know each other
AS.R_nodes = R_nodes
for i in range(1, K+1):
R_nodes[i].AS = AS
R_nodes[i].R_nodes = R_nodes
# steps 2, 3, 4
for i in range(N-K+1):
AS.send_Ri()
# steps 5, 6
R = AS.compute_cumulative_reward()
DC.receive_R(R)
# construct and return result
result = dict()
result["R"] = DC.R
result["time"] = time.time() - t_start
result["time DO"] = DO.time
result["time DC"] = DC.time
result["time AS"] = AS.time
check_time = 0
check_time += DO.time + DC.time + AS.time
for i in range(1, K+1):
result["time R" + str(i)] = R_nodes[i].time
check_time += R_nodes[i].time
assert (result["time"] - check_time < 0.03 * result["time"])
return result
# to avoid executing run_experiment1(UCB_DS) when classes from this file are imported in ucb_ds2.py
if __name__ == "__main__":
run_experiment1(UCB_DS)
| 7,816 |
too_many_repos/models/wrapped_repo.py
|
locknic/too-many-repos
| 0 |
2022857
|
from git import Commit
from git import Repo
class WrappedRepo(Repo):
@property
def is_master(self) -> bool:
return self.active_branch == self.heads.master
@property
def name(self) -> str:
return self.remote().url.replace('<EMAIL>:', '').replace('.git', '')
@property
def has_changes(self) -> bool:
return self.is_dirty(untracked_files=True)
@property
def commits_ahead_remote(self) -> int:
return self._count_commits_ahead(self.active_branch.tracking_branch().commit, self.active_branch.commit)
@property
def commits_behind_remote(self) -> int:
return self._count_commits_ahead(self.active_branch.commit, self.active_branch.tracking_branch().commit)
@property
def commits_ahead_master(self) -> int:
return self._count_commits_ahead(self._last_master_commit, self.active_branch.commit)
@property
def commits_behind_master(self) -> int:
return self._count_commits_ahead(self._last_master_commit, self.heads.master.tracking_branch().commit)
@property
def _last_master_commit(self) -> Commit:
last_master_commit = self.merge_base(self.active_branch, self.heads.master)
return last_master_commit[0]
def _count_commits_ahead(self, starting_commit: Commit, ending_commit: Commit) -> int:
commits_ahead = self.iter_commits(str(starting_commit) + ".." + str(ending_commit))
count_ahead = sum(1 for _ in commits_ahead)
return count_ahead
| 1,503 |
homepage/portfolio/translations.py
|
FabianVolkers/portfolio
| 0 |
2025127
|
from django.conf import settings
"""
Helper function for filtering querysets for current language,
falling back to default language if translations does not exist
"""
def filter_translations(queryset, lang):
if lang == settings.LANGUAGE_CODE:
queryset = queryset.filter(lang=lang)
else:
translations = queryset.filter(lang=lang)
# Fallback to default language
fallback = queryset.exclude(
common__id__in=translations.values('common')
).filter(
lang=settings.LANGUAGE_CODE
)
queryset = fallback.union(translations)
return queryset
| 632 |
tests/objects/test_hostgroup.py
|
VunkLai/ndk
| 1 |
2022974
|
import unittest
from ndk.objects import HostGroup
from ndk.stack import Stack
class HostGroupTestCase(unittest.TestCase):
def setUp(self):
self.stack = Stack('HostGroupTesting')
def test_contact_group(self):
cg = HostGroup(self.stack, hostgroup_name='Foo')
assert cg.pk == 'foo'
assert cg.alias == 'foo'
def test_alias(self):
cg = HostGroup(self.stack, hostgroup_name='foo', alias='bar')
assert cg.hostgroup_name == 'foo'
assert cg.alias == 'bar'
| 521 |
docs/templatePy.py
|
jll123567/Sysh
| 0 |
2024524
|
"""
A template for docstrings.
This would be a longer description of the module.
Classes:
:class:`SimpleBleDevice`, :class:`others`
"""
class SimpleBleDevice(object):
"""This is a conceptual class representation of a simple BLE device
(GATT Server). It is essentially an extended combination of the
:class:`bluepy.btle.Peripheral` and :class:`bluepy.btle.ScanEntry` classes
:param client: A handle to the :class:`simpleble.SimpleBleClient` client
object that detected the device
:type client: class:`simpleble.SimpleBleClient`
:param addr: Device MAC address, defaults to None
:type addr: str, optional
:param addrType: Device address type - one of ADDR_TYPE_PUBLIC or
ADDR_TYPE_RANDOM, defaults to ADDR_TYPE_PUBLIC
:type addrType: str, optional
:param iface: Bluetooth interface number (0 = /dev/hci0) used for the
connection, defaults to 0
:type iface: int, optional
:param data: A list of tuples (adtype, description, value) containing the
AD type code, human-readable description and value for all available
advertising data items, defaults to None
:type data: list, optional
:param rssi: Received Signal Strength Indication for the last received
broadcast from the device. This is an integer value measured in dB,
where 0 dB is the maximum (theoretical) signal strength, and more
negative numbers indicate a weaker signal, defaults to 0
:type rssi: int, optional
:param connectable: `True` if the device supports connections, and `False`
otherwise (typically used for advertising ‘beacons’).,
defaults to `False`
:type connectable: bool, optional
:param updateCount: Integer count of the number of advertising packets
received from the device so far, defaults to 0
:type updateCount: int, optional
"""
def __init__(self, client, addr=None, addrType=None, iface=0,
data=None, rssi=0, connectable=False, updateCount=0):
"""Constructor method
"""
super().__init__(deviceAddr=None, addrType=addrType, iface=iface)
self.addr = addr
self.addrType = addrType
self.iface = iface
self.rssi = rssi
self.connectable = connectable
self.updateCount = updateCount
self.data = data
self._connected = False
self._services = []
self._characteristics = []
self._client = client
def getServices(self, uuids=None):
"""Returns a list of :class:`bluepy.blte.Service` objects representing
the services offered by the device. This will perform Bluetooth service
discovery if this has not already been done; otherwise it will return a
cached list of services immediately..
:param uuids: A list of string service UUIDs to be discovered,
defaults to None
:type uuids: list, optional
:return: A list of the discovered :class:`bluepy.blte.Service` objects,
which match the provided ``uuids``
:rtype: list On Python 3.x, this returns a dictionary view object,
not a list
"""
self._services = []
if(uuids is not None):
for uuid in uuids:
try:
service = self.getServiceByUUID(uuid)
self.services.append(service)
except BTLEException:
pass
else:
self._services = super().getServices()
return self._services
def setNotificationCallback(self, callback):
"""Set the callback function to be executed when the device sends a
notification to the client.
:param callback: A function handle of the form
``callback(client, characteristic, data)``, where ``client`` is a
handle to the :class:`simpleble.SimpleBleClient` that invoked the
callback, ``characteristic`` is the notified
:class:`bluepy.blte.Characteristic` object and data is a
`bytearray` containing the updated value. Defaults to None
:type callback: function, optional
"""
self.withDelegate(
SimpleBleNotificationDelegate(
callback,
client=self._client
)
)
def getCharacteristics(self, startHnd=1, endHnd=0xFFFF, uuids=None):
"""Returns a list containing :class:`bluepy.btle.Characteristic`
objects for the peripheral. If no arguments are given, will return all
characteristics. If startHnd and/or endHnd are given, the list is
restricted to characteristics whose handles are within the given range.
:param startHnd: Start index, defaults to 1
:type startHnd: int, optional
:param endHnd: End index, defaults to 0xFFFF
:type endHnd: int, optional
:param uuids: a list of UUID strings, defaults to None
:type uuids: list, optional
:return: List of returned :class:`bluepy.btle.Characteristic` objects
:rtype: list
"""
self._characteristics = []
if(uuids is not None):
for uuid in uuids:
try:
characteristic = super().getCharacteristics(
startHnd, endHnd, uuid)[0]
self._characteristics.append(characteristic)
except BTLEException:
pass
else:
self._characteristics = super().getCharacteristics(startHnd,
endHnd)
return self._characteristics
def connect(self):
"""Attempts to initiate a connection with the device.
:return: `True` if connection was successful, `False` otherwise
:rtype: bool
"""
try:
super().connect(self.addr,
addrType=self.addrType,
iface=self.iface)
except BTLEException as ex:
self._connected = False
return (False, ex)
self._connected = True
return True
def disconnect(self):
"""Drops existing connection to device
"""
super().disconnect()
self._connected = False
def isConnected(self):
"""Checks to see if device is connected
:return: `True` if connected, `False` otherwise
:rtype: bool
"""
return self._connected
def printInfo(self):
"""Print info about device
"""
print("Device %s (%s), RSSI=%d dB" %
(self.addr, self.addrType, self.rssi))
for (adtype, desc, value) in self.data:
print(" %s = %s" % (desc, value))
def raiseErr(self):
"""Raise an error
:raises Exception: Generic error to raise.
"""
raise Exception("Ignore me")
| 6,917 |
dash/dash-renderer/setup.py
|
nickmelnikov82/dash
| 17,143 |
2025081
|
import json
from setuptools import setup
with open("package.json") as fp:
package = json.load(fp)
setup(
name="dash_renderer",
version=package["version"],
author="<NAME>",
author_email="<EMAIL>",
packages=["dash_renderer"],
include_package_data=True,
license="MIT",
description="Front-end component renderer for Dash",
install_requires=[],
)
| 384 |
Comparison_Experiments/SCAFNet/nets.py
|
Shun-Gan/Adaptive-Driver-Attention-ADA-model
| 1 |
2024740
|
from __future__ import print_function
from __future__ import absolute_import
import tensorflow as tf
from keras.models import Model
from keras.layers import MaxPooling3D, Conv2D, GlobalAveragePooling2D, Concatenate, Lambda, ConvLSTM2D, Conv3D
from keras.layers import TimeDistributed, Multiply, Add, UpSampling2D, BatchNormalization, ReLU, Dropout
from configs import *
from keras.layers import Input, Layer, Dense
from keras import activations, initializers, constraints
from keras import regularizers
from keras.engine import Layer
import keras.backend as K
class GraphConvolution(Layer):
"""Basic graph convolution layer as in https://arxiv.org/abs/1609.02907
x=[batch, node, C], adj = [batch, n, n] --> [batch, node, OutChannel]
"""
def __init__(self, units,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super(GraphConvolution, self).__init__(**kwargs)
self.units = units
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.supports_masking = True
super(GraphConvolution, self).__init__(**kwargs)
def compute_output_shape(self, input_shape):
features_shape = input_shape[0]
output_shape = features_shape[:-1] + (self.units,)
return output_shape # (batch_size, node, output_dim)
def build(self, input_shape):
features_shape = input_shape[0]
assert len(features_shape) == 3
input_dim = features_shape[2]
self.kernel = self.add_weight(shape=(input_dim,
self.units),
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
if self.use_bias:
self.bias = self.add_weight(shape=(self.units,),
initializer=self.bias_initializer,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
else:
self.bias = None
super(GraphConvolution, self).build(input_shape)
def call(self, inputs, mask=None):
features = inputs[0]
basis = inputs[1]
supports = K.batch_dot(basis, features)
output = K.dot(supports, self.kernel)
if self.use_bias:
output = output + self.bias
return self.activation(output)
class SGcn(Layer):
def __init__(self, out_channels, **kwargs):
self.out_channels = out_channels
# self.bn1 = BatchNormalization()
super(SGcn, self).__init__(**kwargs)
def build(self, input_shape):
self.size = input_shape
self.sim_embed1 = Dense(input_shape[-1])
self.sim_embed2 = Dense(input_shape[-1])
self.graph1 = GraphConvolution(input_shape[-1], activation='relu')
self.graph2 = GraphConvolution(input_shape[-1], activation='relu')
self.graph3 = GraphConvolution(input_shape[-1], activation='relu')
super(SGcn, self).build(input_shape)
def call(self, inputs, **kwargs):
n, h, w, c = self.size
inputs = tf.reshape(inputs, [n, h * w, c])
adj = self.get_adj(inputs)
outs = self.graph1([inputs, adj])
outs = self.graph2([outs, adj])
outs = self.graph3([outs, adj])
# outs = self.bn1(outs)
outs = tf.reduce_mean(outs, 1)
outs = tf.expand_dims(outs, -2)
outs = tf.expand_dims(outs, -2) # [N,T,1,1,C]
return outs
def compute_output_shape(self, input_shape):
return (input_shape[0], 1, 1, self.out_channels)
def get_adj(self, x):
sim1 = self.sim_embed1(x)
sim2 = self.sim_embed2(x)
adj = tf.matmul(sim1, tf.transpose(sim2, [0, 2, 1])) # d x d mat.
adj = tf.nn.softmax(adj)
return adj
def feature_extractor(shapes=(batch_size, input_t, input_shape[0], input_shape[1], 3)):
inputs = Input(batch_shape=shapes)
x = Conv3D(filters=64, kernel_size=3, strides=1, padding='same')(inputs)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv3D(filters=64, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = MaxPooling3D((1, 2, 2))(x)
x = Conv3D(filters=128, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv3D(filters=128, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = MaxPooling3D((1, 2, 2))(x)
x = Conv3D(filters=256, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv3D(filters=256, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv3D(filters=256, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = MaxPooling3D((1, 2, 2))(x)
x = Conv3D(filters=512, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv3D(filters=512, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv3D(filters=512, kernel_size=3, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ReLU()(x)
model = Model(inputs=inputs, outputs=x)
return model
def my_net(x, y, stateful=False):
encoder = feature_extractor()
seg_encoder = feature_extractor()
x = encoder(x)
y = seg_encoder(y)
y = TimeDistributed(SGcn(512))(y)
outs = Multiply()([x, y])
outs = ConvLSTM2D(filters=256, kernel_size=3, padding='same', stateful=stateful)(outs)
outs = Conv2D(filters=128, kernel_size=3, strides=1, padding='same')(outs)
outs = BatchNormalization()(outs)
outs = ReLU()(outs)
outs = UpSampling2D(4, interpolation='bilinear')(outs)
outs = Conv2D(filters=1, kernel_size=1, strides=1, padding='same', activation='sigmoid')(outs)
outs = UpSampling2D(2, interpolation='bilinear')(outs)
# return outs, outs, outs
return outs, outs
if __name__ == '__main__':
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth = True # 不全部占满显存, 按需分配
session = tf.Session(config=config) # 设置session KTF.set_session(sess)
batch_size, input_t, input_shape = [4, 5, (256, 192)]
x = Input(batch_shape=(32, 5, 256, 192, 3))
y = Input(batch_shape=(32, 5, 256, 192, 3))
m = Model(inputs=[x, y], outputs=my_net(x, y, ))
print("Compiling MyNet")
m.summary()
| 7,952 |
boundingbox/boundingbox.py
|
nickhalmagyi/boundingbox
| 1 |
2022867
|
import numpy as np
from math import degrees
from haversine import haversine
from importlib import reload
import time
import boundingbox.validations; reload(boundingbox.validations)
from boundingbox.validations.numbers import validate_positive_number
from boundingbox.validations.coordinates import validate_latlon_degrees, validate_latlons_degrees, validate_units
import boundingbox.coordinates; reload(boundingbox.coordinates)
from boundingbox.coordinates import convert_latlon_degrees_to_radians, mod_longitude_radians
from boundingbox.settings import EARTH_RADIUS, NORTH, SOUTH, EAST, WEST, KM, MILES, FRONT, REVERSE
class BoundingBox:
def __init__(self, source, length, units=KM):
self.source_degrees = source
self.length = length
self.units = units
self.earth_radius = EARTH_RADIUS[units]
self.source_radians = convert_latlon_degrees_to_radians(self.source_degrees)
self.bbox = self.make_bounding_box(self.source_radians, self.length)
@property
def source_degrees(self):
return self.__source_degrees
@source_degrees.setter
def source_degrees(self, source_degrees):
validate_latlon_degrees(source_degrees)
self.__source_degrees = source_degrees
@property
def length(self):
return self.__length
@length.setter
def length(self, length):
validate_positive_number(length)
self.__length = length
@property
def units(self):
return self.__units
@units.setter
def units(self, units):
validate_units(units)
self.__units = units
def make_max_latitude_diff(self, length):
return length / self.earth_radius
def make_max_longitude_diff(self, source_radians, length):
"""
:param source_radians: lat-lon pair in radians
:param length: positive number
:return: the maximum longitude difference between the source and a circle of radius=length around it.
"""
d = length / self.earth_radius
max_longitude_arg = np.cos(source_radians[0]) ** (-1) * \
(np.cos(d) ** 2 - np.sin(source_radians[0]) ** 2) ** (1 / 2)
return np.abs(np.arccos(max_longitude_arg))
def make_bounding_box(self, source_radians, length):
"""
:param source_radians: lat-lon pair in radians
:param length: positive number
:return: dict with keys = [FRONT, REVERSE],
values are dicts with keys = [north, south, east, west]
and values in degrees
"""
bbox = {}
bbox_front = {}
bbox_reverse = {}
max_latitude_diff = self.make_max_latitude_diff(length)
# the bounding box does not reach either pole
if (np.abs(source_radians[0]) + max_latitude_diff) <= np.pi / 2:
max_longitude_diff = self.make_max_longitude_diff(source_radians, length)
bbox_front[NORTH] = source_radians[0] + max_latitude_diff
bbox_front[SOUTH] = source_radians[0] - max_latitude_diff
bbox_front[EAST] = mod_longitude_radians(source_radians[1] + max_longitude_diff)
bbox_front[WEST] = mod_longitude_radians(source_radians[1] - max_longitude_diff)
# the bounding box surpasses at least one pole
else:
bbox_front[EAST] = np.pi / 2
bbox_front[WEST] = -np.pi / 2
bbox_reverse[EAST] = -np.pi / 2
bbox_reverse[WEST] = np.pi / 2
# the bounding box surpasses both north and south poles
if (source_radians[0] + max_latitude_diff > np.pi / 2 and \
source_radians[0] - max_latitude_diff < -np.pi / 2):
bbox_front[NORTH] = np.pi / 2
bbox_front[SOUTH] = -np.pi / 2
# the bounding box surpasses the north pole
elif source_radians[0] + max_latitude_diff > np.pi / 2:
bbox_front[NORTH] = np.pi / 2
bbox_front[SOUTH] = source_radians[0] - max_latitude_diff
bbox_reverse[NORTH] = np.pi / 2
# bbox_reverse[SOUTH] is the point at which the circle intersects
# lon = source[1] +/- pi/2 .
bbox_reverse[SOUTH] = np.arcsin(np.cos(length / self.earth_radius) / np.sin(source_radians[0]))
# the bounding box surpasses the south pole
elif source_radians[0] - max_latitude_diff < -np.pi / 2:
bbox_front[NORTH] = source_radians[0] + max_latitude_diff
bbox_front[SOUTH] = -np.pi / 2
# bbox_reverse[NORTH] is the point at which the circle intersects
# lon = source[1] +/- pi/2.
bbox_reverse[NORTH] = np.arcsin(np.cos(length / self.earth_radius) / np.sin(source_radians[0]))
bbox_reverse[SOUTH] = -np.pi / 2
bbox_reverse = {k: degrees(v) for k, v in bbox_reverse.items()}
bbox[REVERSE] = bbox_reverse
# convert both bbox to degrees
bbox_front = {k: degrees(v) for k, v in bbox_front.items()}
bbox[FRONT] = bbox_front
return bbox
def target_in_bounding_box_front(self, target, bbox):
return (target[0] >= bbox[SOUTH]) & (target[0] <= bbox[NORTH]) & (target[1] >= bbox[WEST]) & (target[1] <= bbox[EAST])
def target_in_bounding_box_reverse(self, target, bbox):
return 1 == (target[0] >= bbox[SOUTH]) & (target[0] <= bbox[NORTH]) & ~((target[1] >= bbox[EAST]) & (target[1] <= bbox[WEST]))
def filter_targets_in_bbox(self, targets, bbox):
"""
:param targets: An iterable of lat-lon pairs.
:param bbox: dict with keys = [north, south, east, west]
:return: An iterable of lat-lon pairs where each pair is inside bbox
"""
if bbox[WEST] <= bbox[EAST]:
target_in_bounding_box = self.target_in_bounding_box_front
else:
target_in_bounding_box = self.target_in_bounding_box_reverse
return targets[target_in_bounding_box(np.transpose(targets), bbox)]
def filter_targets_in_bboxs(self, targets, bboxs):
"""
:param bboxs:
:param targets: An iterable of lat-lon pairs.
:return: An iterable of lat-lon pairs where each pair is inside at least one of the bbox in bboxs
"""
targets_filtered = []
for k,v in bboxs.items():
targets_filtered += list(self.filter_targets_in_bbox(targets, bboxs[k]))
return targets_filtered
def compute_distances_from_source(self, source_degrees, targets):
"""
:param source_degrees: lat-lon pair in degrees
:param targets: An iterable of lat-lon pairs.
:return: np array where each element is of the form [(lat, lon), distance]
"""
if len(targets) == 0:
return []
targets_distance = np.array([[target, haversine(source_degrees, target)] for target in targets])
# sort by haversine distance
targets_dist_sorted = targets_distance[targets_distance[:,1].argsort()]
return targets_dist_sorted
def get_points_within_bbox(self, targets, bbox):
"""
:param bbox:
:param targets: An iterable of lat-lon pairs.
:return: np array where each element is of the form [(lat, lon), distance] and is inside bbox
"""
targets_filtered = self.filter_targets_in_bbox(targets, bbox)
targets_dist = self.compute_distances_from_source(self.source_degrees, targets_filtered)
return targets_dist
def get_points_within_bboxs(self, targets, bboxs):
"""
:param bboxs: dict where values are dicts with keys = [north, south, east, west]
:param targets: An iterable of lat-lon pairs.
:return: all locations in targets which are inside ANY of the bbox in bboxs
"""
targets_dist = []
for k,v in bboxs.items():
targets_dist += list(self.get_points_within_bbox(targets, bboxs[k]))
return np.array(targets_dist)
| 8,136 |
projects/tic-tac-toe.py
|
SleepNoMore/Python-practice
| 0 |
2024160
|
'''
Old School TicTacToe Game
Use numpad tu play
'''
import random
def display_board(board):
print('\n'*100)
print(board[7], "|", board[8], "|", board[9])
print(board[4], "|", board[5], "|", board[6])
print(board[1], "|", board[2], "|", board[3])
def player_input():
marker = ""
while marker != "X" and marker != "O":
marker = input("Player 1 choose X or O: ").upper()
if marker == "X":
return ("X", "O")
else:
return ("O", "X")
def place_marker(board, marker, position):
board[position] = marker
def win_check(board, mark):
return (
(board[7] == mark and board[8] == mark and board[9] == mark) or
(board[4] == mark and board[5] == mark and board[6] == mark) or
(board[1] == mark and board[2] == mark and board[3] == mark) or
(board[7] == mark and board[4] == mark and board[1] == mark) or
(board[8] == mark and board[5] == mark and board[2] == mark) or
(board[9] == mark and board[6] == mark and board[3] == mark) or
(board[7] == mark and board[5] == mark and board[3] == mark) or
(board[9] == mark and board[5] == mark and board[1] == mark)
)
def choose_first():
if random.randint(0, 1) == 0:
return "Player 1"
else:
return "Player 2"
def space_check(board, position):
return board[position] == " "
def full_board_check(board):
for i in range(1, 10):
if space_check(board, i):
return False
else:
return True
def player_choice(board):
position = 0
posList = [1, 2, 3, 4, 5, 6, 7, 8, 9]
while position not in posList or not space_check(board, position):
position = int(input("Choose your next position (1-9): "))
return position
def replay():
return input("Do you want to play again?(y or n): ").lower() == "y"
print("Welcome to TicTacToe!!!")
while True:
the_board = [" "]*10
player1_marker, player2_marker = player_input()
turn = choose_first()
print(turn + " will go first.")
play_game = input("Are you ready?(y or n)").lower()
if play_game == "y":
game_on = True
else:
game_on = False
while game_on:
if turn == "Player 1":
display_board(the_board)
position = player_choice(the_board)
place_marker(the_board, player1_marker, position)
if win_check(the_board, player1_marker):
display_board(the_board)
print("Player 1 have won the game!")
game_on = False
else:
if full_board_check(the_board):
display_board(the_board)
print("Game is a draw!!")
break
else:
turn = "Player 2"
else:
display_board(the_board)
position = player_choice(the_board)
place_marker(the_board, player2_marker, position)
if win_check(the_board, player2_marker):
display_board(the_board)
print("Player 2 have won the game!")
game_on = False
else:
if full_board_check(the_board):
display_board(the_board)
print("Game is a draw!!")
break
else:
turn = "Player 1"
if not replay():
break
| 3,444 |
reports/brokers/tests/test_report.py
|
ITVaan/reports.brokers.bridge
| 0 |
2024214
|
from hashlib import sha256
from os import path
from pyramid import testing
from reports.brokers.api.views.report import ReportView
from reports.brokers.tests.base_db_test import BaseDbTestCase
from reports.brokers.tests.utils import test_config
class TestReport(BaseDbTestCase):
def test_init(self):
request = testing.DummyRequest()
setattr(request.registry, 'settings', test_config)
rv = ReportView(request)
self.assertEqual(rv.request, request)
def test_generate(self):
request = testing.DummyRequest()
setattr(request.registry, 'settings', test_config)
cursor = self.conn.cursor(buffered=True)
password = <PASSWORD>("<PASSWORD>").<PASSWORD>()
cursor.execute("""INSERT INTO `users` (`user_name`, `password`, `blocked`) VALUES ("test", "{}", 0);""".format(
password))
cursor.close()
self.conn.commit()
data = {
'start_report_period': '01.05.2017',
'end_report_period': '01.06.2017',
'report_number': '1',
'user_name': 'test',
'password': '<PASSWORD>',
'config': test_config}
setattr(request, 'GET', data)
rv = ReportView(request)
self.assertEqual(rv.generate().status_int, 200)
def test_file_exist(self):
request = testing.DummyRequest()
setattr(request.registry, 'settings', test_config)
result_dir = test_config.get('result_dir')
date = '2017-05-02-00-00-00'
start_date = '2017-05-01-00-00-00'
end_date = '2017-06-01-00-00-00'
result_file = path.join(result_dir,
'{}_start_date_{}_end_date_{}_report_number_1.xlsx'.format(date, start_date, end_date))
open(result_file, 'a').close()
data = {
'start_report_period': '01.05.2017',
'end_report_period': '01.06.2017',
'report_number': '1',
'user_name': 'test',
'password': '<PASSWORD>',
'config': test_config}
setattr(request, 'GET', data)
rv = ReportView(request)
self.assertEqual(rv.generate().status_int, 200)
| 2,183 |
dbaas/workflow/steps/mysql/flipperfox_migration/create_foxha_mysql_users.py
|
jaeko44/python_dbaas
| 0 |
2024992
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.mysql.util import get_client_for_infra
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0013
LOG = logging.getLogger(__name__)
class CreateFoxHAMySQLUser(BaseStep):
def __unicode__(self):
return "Creating FOXHA MySQL user..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
client = get_client_for_infra(databaseinfra=databaseinfra)
client.query("GRANT ALL PRIVILEGES ON *.* TO 'foxha'@'%' IDENTIFIED BY PASSWORD <PASSWORD>' WITH GRANT OPTION")
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0013)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0013)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
| 1,248 |
backend/create_funding.py
|
holwech/Spacebanks
| 0 |
2024963
|
import user
import sqlite3
'''
Check database if the user exist and create new user if not
funding_permission need to be chosen here
'''
def create_funding(status, time_ack, time_expired, funding_type, userId):
conn = sqlite3.connect('SQLite_data/funding_storage.db')
c = conn.cursor()
c.execute('''SELECT * FROM funding WHERE userId=?''', (str(userId),) )
user_funding_count = len(c.fetchall())
c.execute('''INSERT INTO funding(fundingId, userId, status, time_ack, time_exp) VALUES(?,?,?,?,?)''', (str(user_funding_count + 1), str(userId), status, time_ack, time_expired))
conn.commit()
funding_object = user.Funding(status, time_ack, time_expired, funding_type,user_funding_count)
return funding_object
| 717 |
DLSiteSpider.py
|
songdaoyuan/DLSiteSpider
| 3 |
2022827
|
# -*-coding: utf-8 -*-
# OOP rework @ 2020/2/3
#Multi-Thread Download rework @ 2020/2/6
# 获取一个月前发售的同人音声作品
# 获取销量榜单前10?
import concurrent
import datetime
import os
import time
from concurrent.futures import ThreadPoolExecutor
import requests
from bs4 import BeautifulSoup
class DLsite():
def __init__(self):
self.session = requests.session()
self.header = {
'Host': 'www.dlsite.com',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:67.0) Gecko/20100101 Firefox/67.0'
}
self.imgHeader = {
'Host': 'img.dlsite.jp',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:73.0) Gecko/20100101 Firefox/73.0',
'Accept': 'image/webp,*/*',
'Referer': 'https://www.dlsite.com/maniax/'
}
# 使用前先勾选SS/SSR客户端中的允许局域网连接或启用全局代理
self.proxie = {
'https': 'https://127.0.0.1:1078',
'http': 'http://127.0.0.1:1078'
}
self.cookie = {
'DL_SITE_DOMAIN': 'maniax',
'dlsite_dozen': '9',
'uniqid': '0.2wgxnlorlws',
'adultchecked': '1'
}
def GetOneMonthAgoWorks(self):
def toAllowed(name):
p = str(name)
p = p.replace("/", "·").replace(":", ":").replace("*", "·")
p = p.replace("?", "?").replace("\"", "'").replace("<", "《")
p = p.replace(">", "》").replace("|", "·").replace("\\", "·")
return p
def mknewdir(foldername):
if not os.path.exists(foldername):
os.mkdir(foldername)
self.OneMonthAgo = (datetime.datetime.now() - datetime.timedelta(days=31)
).strftime('%Y-%m-%d') # Count one month as 31 days
mknewdir(self.OneMonthAgo)
url = f"https://www.dlsite.com/maniax/new/=/date/{self.OneMonthAgo}/work_type[0]/SOU/work_type[1]"
req = self.session.get(url, headers=self.header, proxies=self.proxie)
html = req.content.decode('utf-8')
soup = BeautifulSoup(html, 'lxml')
self.ThumbnailsList = [] # 作品封面缩略图链接
self.TitleList = [] # 作品标题名
self.UrlList = [] # 作品链接
for img in soup.find_all(name='img', attrs={'ref': 'popup_img'}):
self.ThumbnailsList.append('https:' + img['src'])
self.CoverList = list(map(lambda x: x.replace("resize", "modpub").replace(
"_240x240.jpg", ".jpg"), self.ThumbnailsList)) # 作品封面大图链接
for dt in soup.find_all(name='dt', attrs={'class': 'work_name'}):
for a in dt.find_all('a'):
self.TitleList.append(toAllowed(a.string.strip()))
self.UrlList.append(a.get('href'))
self.MTDownload(self.CoverList, self.TitleList, self.UrlList)
'''
fp = os.path.join(OneMonthAgo,toAllowed(a.string.strip()))
print(fp)
self.mknewdir(fp)
with open(os.path.join(fp, 'index.url'), 'w', encoding='utf-8') as f:
f.write('[InternetShortcut]\nurl=%s' % a.get('href'))
r = requests.get(CoverList[i], headers=self.imgHeader, cookies=self.cookie, proxies=self.proxie)
with open(os.path.join(fp, os.path.basename(CoverList[i])), 'wb') as f:
f.write(r.content)
'''
def MTDownload(self, CList, TList, UList):
with concurrent.futures.ProcessPoolExecutor(max_workers=8) as exector:
for c, t, u in zip(CList, TList, UList):
exector.submit(self.download, c, t, u)
'''
删除注释并且注释上面的代码块来启用单线程下载
for c,t,u in zip(CList,TList,UList):
self.download(c,t,u)
'''
def download(self, cover, title, url):
fp = os.path.join(self.OneMonthAgo, title)
imgp = os.path.join(fp, os.path.basename(cover))
urlp = os.path.join(fp, 'index.url')
r = requests.get(cover, headers=self.imgHeader,
cookies=self.cookie, proxies=self.proxie)
if not os.path.exists(fp):
os.mkdir(fp)
with open(imgp, 'wb') as f:
f.write(r.content)
with open(urlp, 'w', encoding='utf-8') as f:
f.write(f'[InternetShortcut]\nurl={url}')
if __name__ == '__main__':
t1 = time.perf_counter()
DL = DLsite()
DL.GetOneMonthAgoWorks()
t2 = time.perf_counter()
print(t2-t1)
| 4,421 |
catamount/ops/stack_ops.py
|
jthestness/catamount
| 13 |
2024790
|
from .base_op import Op
from catamount.tensors.tensor import Tensor
class TensorStack:
''' An object to be shared among Stack ops to push and pop tensor handles
for dynamic execution.
'''
def __init__(self):
self._tensor_stack = []
self._parent = None
self._push = None
self._pop = None
# The tensor reference that will be pushed into the stack
self._reference = None
def __len__(self):
return len(self._tensor_stack)
def isValid(self):
return self._parent is not None and \
self._push is not None and \
self._pop is not None
def associateStackOp(self, stack_op):
self._parent = stack_op
def associatePush(self, push_op):
self._push = push_op
def associatePop(self, pop_op):
self._pop = pop_op
@property
def name(self):
return self._parent.name
def addReferenceTensor(self, tensor):
assert self._reference is None
self._reference = tensor
def push(self, tensor):
assert isinstance(tensor, Tensor)
self._tensor_stack.insert(0, tensor)
def peek(self):
if len(self._tensor_stack) == 0:
return None
else:
return self._tensor_stack[0]
class BaseStackOp(Op):
def __init__(self, name):
super(BaseStackOp, self).__init__(name)
# The stack reference to use for pushing and popping
self._stack = None
def debugString(self):
to_return = super(BaseStackOp, self).debugString()
to_return += '\n Stack: {}'.format(self._stack.name)
return to_return
def setStack(self, stack):
self.debugAssert(self._stack is None)
self._stack = stack
def getStack(self):
return self._stack
def calcAlgFlops(self):
# Stack operations have no Flops
return 0
def calcAlgBytes(self):
# Stack operations do not perform algorithmic activity,
# so accessed memory is not algorithmic
return 0
def calcAlgFootprint(self):
# Stack operations do not perform algorithmic activity,
# so accessed memory is not algorithmic
return 0
class StackOp(BaseStackOp):
def __init__(self, name):
super(StackOp, self).__init__(name)
self._stack = TensorStack()
self._stack.associateStackOp(self)
def isValid(self):
return self._stack.isValid() and super(StackOp, self).isValid()
def propagateShapes(self, make_symbolic=False):
# Zero or one inputs. First input is the maximum depth of the stack
self.debugAssert(len(self._inputs) <= 1)
self.debugAssert(len(self._outputs) == 1)
# The output is a resource handle of shape [Dimension(2)]
self.debugAssert(self._outputs[0].shape.rank == 1 and
self._outputs[0].shape.numElements() == 2)
class StackPopOp(BaseStackOp):
def __init__(self, name):
super(StackPopOp, self).__init__(name)
@property
def inputs(self):
tensor_inputs = list(super(StackPopOp, self).inputs)
if self._stack is not None:
tensor_inputs.append(self._stack._reference)
return tensor_inputs
def setStack(self, stack):
super(StackPopOp, self).setStack(stack)
self._stack.associatePop(self)
def canVisit(self, visited_ops):
self.debugAssert(self._stack._reference is not None)
stack_push_op = self._stack._reference.producer
self.debugAssert(stack_push_op == self._stack._push)
self.debugAssert(isinstance(stack_push_op, StackPushOp))
if stack_push_op not in visited_ops:
return False
return super(StackPopOp, self).canVisit(visited_ops)
def propagateShapes(self, make_symbolic=False):
self.debugAssert(len(self._inputs) == 1)
self.debugAssert(len(self._outputs) == 1)
self.debugAssert(self._stack._reference is not None)
in_tensor = self._stack._reference
in_shape = in_tensor.shape
self._outputs[0].mergeShape(in_shape, make_symbolic=make_symbolic)
if in_tensor.value is not None:
self._outputs[0].setValue(in_tensor.value)
class StackPushOp(BaseStackOp):
def __init__(self, name):
super(StackPushOp, self).__init__(name)
def setStack(self, stack):
super(StackPushOp, self).setStack(stack)
self._stack.associatePush(self)
self._stack.addReferenceTensor(self.outputs[0])
def propagateShapes(self, make_symbolic=False):
self.debugAssert(len(self._inputs) == 2)
self.debugAssert(len(self._outputs) == 1)
in_shape = self._inputs[1].shape
self._outputs[0].mergeShape(in_shape, make_symbolic=make_symbolic)
if self._inputs[1].value is not None:
self._outputs[0].setValue(self._inputs[1].value)
| 4,918 |
lib/yahoo/sports/fantasy/constants/yahoo.py
|
goztrk/django-htk
| 206 |
2024553
|
# Game IDs
# https://developer.yahoo.com/fantasysports/guide/game-resource.html
YAHOO_SPORTS_FANTASY_GAME_CODES = [
'nfl',
'pnfl',
'mlb',
'pmlb',
'nba',
'nhl',
]
YAHOO_SPORTS_FANTASY_GAME_IDS = {
'nfl' : {
'2001' : 57,
'2002' : 49,
'2003' : 79,
'2004' : 101,
'2005' : 124,
'2006' : 153,
'2007' : 175,
'2008' : 199,
'2009' : 222,
'2010' : 242,
'2011' : 257,
'2012' : 273,
'2013' : 314,
'2014' : 331,
},
'pnfl' : {
'2001' : 58,
'2002' : 62,
'2003' : 78,
'2004' : 102,
'2005' : 125,
'2006' : 154,
'2007' : 176,
'2008' : 200,
'2009' : 223,
},
'mlb' : {
'2001' : 12,
'2002' : 39,
'2003' : 74,
'2004' : 98,
'2005' : 113,
'2006' : 147,
'2007' : 171,
'2008' : 195,
'2009' : 215,
'2010' : 238,
'2011' : 253,
'2012' : 268,
'2013' : 308,
'2014' : 328,
},
'pmlb' : {
'2002' : 44,
'2003' : 73,
'2004' : 99,
'2005' : 114,
'2006' : 148,
'2007' : 172,
'2008' : 196,
'2009' : 216,
},
'nba' : {
'2001' : 16,
'2002' : 67,
'2003' : 95,
'2004' : 112,
'2005' : 131,
'2006' : 165,
'2007' : 187,
'2008' : 211,
'2009' : 234,
'2010' : 249,
'2011' : 265,
'2012' : 304,
'2013' : 322,
'2014' : 342,
},
'nhl' : {
'2001' : 15,
'2002' : 64,
'2003' : 94,
'2004' : 111,
'2005' : 130,
'2006' : 164,
'2007' : 186,
'2008' : 210,
'2009' : 233,
'2010' : 248,
'2011' : 263,
'2012' : 303,
'2013' : 321,
'2014' : 341,
},
}
| 1,939 |
setup.py
|
BoredPlayer/matgrapher
| 0 |
2023906
|
from setuptools import setup
setup(
name='matgrapher',
version = '0.2.2',
description = 'An easy to use python class aiding with matplotlib graph making.',
url = 'https://github.com/BoredPlayer/matgrapher',
author = 'BoredPlayer',
author_email = '<EMAIL>',
license = 'MIT License',
packages = ['matgrapher'],
install_requires = ['numpy', 'matplotlib'],
classifiers=[
'Developements Status :: 1 - Planning',
'Intended Audience :: Science/Research',
'License :: MIT License',
'Programming Language :: Python :: 3'
]
)
| 618 |
publishable/utils.py
|
Ilyes-Hammadi/django-publishable
| 5 |
2024958
|
from django.db import models
from .constants import TYPES
def clone_model(original_model):
"""
Create a copy of a Django mode
:param original_model: models.Model
:return: models.Model
"""
# Create a local copy of the model to avoid references errors
model = original_model.__class__.objects._all().get(pk=original_model.id)
# This block of codes forces Django to clone to model and the simple fields
# CharField, TextField etc...
destination_model = model
destination_model.pk = None
destination_model.type = TYPES.PUBLISHED
destination_model.save(broadcast_draft=False)
# Search for the ManyToManyField and copy it's content into the model
all_model_fields = original_model._meta.get_fields()
for field in all_model_fields:
if isinstance(field, models.ManyToManyField):
# Select the through model
through = field.remote_field.through
# Get the name of the field
for f in through._meta.get_fields():
if isinstance(f, models.ForeignKey):
m = f.rel.to
if isinstance(original_model, m):
parent_model_field_name = f.attname
formated_parent_model_field_name = '_'.join(parent_model_field_name.split('_')[:2])
filter_params = {
parent_model_field_name: original_model.pk,
}
all_link_models = through.objects.filter(**filter_params)
for link_model in all_link_models:
link_model.pk = None
setattr(link_model, formated_parent_model_field_name, destination_model)
try:
link_model.save(broadcast_draft=False)
except:
link_model.save()
destination_model.save(broadcast_draft=False)
return destination_model
| 1,895 |
tests/test_easy_problems/test_1_to_50/test_factorial_trailing_zeroes.py
|
ivan1016017/LeetCodeAlgorithmProblems
| 0 |
2023655
|
import unittest
from src.my_project.easy_problems.from1to50.factorial_trailing_zeroes import Solution
import random
from faker import Faker
class FactorialTrailingZeroesTestCase(unittest.TestCase):
def test_one_zero(self):
solution = Solution()
self.assertEqual(solution.trailingZeroes(5),1)
def test_two_zero(self):
solution = Solution()
self.assertEqual(solution.trailingZeroes(10),2)
| 431 |
poptimizer/config.py
|
poliyev/poptimizer
| 0 |
2024155
|
"""Основные настраиваемые параметры."""
import logging
import pathlib
from typing import Union, cast
import pandas as pd
import torch
import yaml
from poptimizer.shared.log import get_handlers
class POptimizerError(Exception):
"""Базовое исключение."""
# Устройство на котором будет производиться обучение
DEVICE = torch.device("cpu") # torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Количество колонок в распечатках без переноса на несколько страниц
pd.set_option("display.max_columns", 20)
pd.set_option("display.max_rows", None)
pd.set_option("display.width", None)
_root = pathlib.Path(__file__).parents[1]
# Путь к директории с отчетами
REPORTS_PATH = _root / "reports"
# Путь к директории с портфелями
PORT_PATH = _root / "portfolio"
# Путь к директории с логами
LOG_PATH = _root / "logs"
# Конфигурация логгера
logging.basicConfig(level=logging.INFO, handlers=get_handlers(LOG_PATH))
def _load_config() -> dict[str, Union[int, float, str]]:
cfg = {}
path = _root / "config" / "config.yaml"
if path.exists():
with path.open() as file:
cfg = yaml.safe_load(file)
logging.getLogger("Config").info(f"{cfg}")
return cfg
_cfg = _load_config()
# Количество торговых дней в месяце и в году
MONTH_IN_TRADING_DAYS = 21
YEAR_IN_TRADING_DAYS = 12 * MONTH_IN_TRADING_DAYS
# Загрузка конфигурации
FORECAST_DAYS = cast(int, _cfg.get("FORECAST_DAYS", 21))
HISTORY_DAYS_MIN = cast(int, _cfg.get("HISTORY_DAYS_MIN", 63))
P_VALUE = cast(float, _cfg.get("P_VALUE", 0.05))
COSTS = cast(float, _cfg.get("COSTS", 0.025)) / 100
TRADING_INTERVAL = cast(int, _cfg.get("TRADING_INTERVAL", 1))
START_EVOLVE_HOUR = cast(int, _cfg.get("START_EVOLVE_HOUR", 1))
STOP_EVOLVE_HOUR = cast(int, _cfg.get("STOP_EVOLVE_HOUR", 1))
OPTIMIZER = cast(str, _cfg.get("OPTIMIZER", "resample"))
| 1,836 |
libs/player.py
|
siw0615/slack-poker-bot
| 5 |
2023985
|
from enum import Enum
class PlayerStatus(Enum):
PLAYING = 0
FOLD = 1
ALLIN = 2
class PlayerMode(Enum):
ENTERING = 0
NORMAL = 1
LEAVING = 2
class Player:
def __init__(self, userid: str, username: str, chip: int):
self.userid = userid
self.username = username
self.chip = chip
self.mode = PlayerMode.ENTERING
self.init()
def init(self):
self.chip_bet = 0
self.cards = [0] * 2
self.status = PlayerStatus.PLAYING
self.timeout_count = 0
self.rank = None
self.hand = None
def get_remaining_chip(self) -> int:
return self.chip - self.chip_bet
def is_playing(self) -> bool:
return self.status == PlayerStatus.PLAYING
def is_fold(self) -> bool:
return self.status == PlayerStatus.FOLD
def is_allin(self) -> bool:
return self.status == PlayerStatus.ALLIN
def set_playing(self) -> None:
self.status = PlayerStatus.PLAYING
def set_fold(self) -> None:
self.status = PlayerStatus.FOLD
def set_allin(self) -> None:
self.status = PlayerStatus.ALLIN
def set_rank_and_hand(self, rank, hand):
self.rank = rank
self.hand = hand
def is_leaving(self) -> bool:
return self.mode == PlayerMode.LEAVING
def is_normal(self) -> bool:
return self.mode == PlayerMode.NORMAL
def set_entering(self) -> None:
self.mode = PlayerMode.ENTERING
def set_normal(self) -> None:
self.mode = PlayerMode.NORMAL
def set_leaving(self) -> None:
self.mode = PlayerMode.LEAVING
| 1,634 |
fudge/processing/resonances/setup.py
|
brown170/fudge
| 14 |
2024800
|
# <<BEGIN-copyright>>
# Copyright 2021, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
# <<END-copyright>>
import os
import numpy
from distutils.core import setup, Extension
# find numpy include path:
numpyPath = os.path.split( numpy.__file__ )[0]
numpyPath = os.path.join( numpyPath, 'core/include/numpy' )
getBreitWignerSums = Extension( '_getBreitWignerSums',
sources = ['getBreitWignerSums.c'],
include_dirs = [ './', numpyPath ] )
getScatteringMatrices = Extension( '_getScatteringMatrices',
sources = ['getScatteringMatrices.c'],
include_dirs = [ './', numpyPath ] )
getCoulombWavefunctions = Extension( '_getCoulombWavefunctions',
sources = ['getCoulombWavefunctions.c','coulfg2.c'],
include_dirs = [ './', numpyPath ] )
setup(name='extensions',
version='1.0',
description = 'Extensions (written in c) for better performance in reconstructing resonances',
ext_modules=[ getBreitWignerSums, getScatteringMatrices, getCoulombWavefunctions ] )
| 1,114 |
unit_tests/utilities/test_zaza_utilities_bundle.py
|
thedac/zaza-openstack-tests
| 5 |
2024708
|
# Copyright 2018 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# import mock
import unit_tests.utils as ut_utils
import zaza.openstack.utilities.bundle as bundle
import yaml
TEST_BUNDLE_WITH_PLACEMENT = """
machines:
'0':
series: bionic
'1':
series: bionic
'2':
series: bionic
relations:
- - ceph-osd:mon
- ceph-mon:osd
series: bionic
services:
ceph-mon:
annotations:
gui-x: '750'
gui-y: '500'
charm: cs:ceph-mon-26
num_units: 3
options:
expected-osd-count: 3
source: cloud:bionic-rocky
to:
- lxd:0
- lxd:1
- lxd:2
ceph-osd:
annotations:
gui-x: '1000'
gui-y: '500'
charm: cs:ceph-osd-269
num_units: 3
options:
osd-devices: /dev/sdb
source: cloud:bionic-rocky
to:
- '0'
- '1'
- '2'
"""
TEST_BUNDLE_WITHOUT_PLACEMENT = """
relations:
- - ceph-osd:mon
- ceph-mon:osd
series: bionic
services:
ceph-mon:
annotations:
gui-x: '750'
gui-y: '500'
charm: cs:ceph-mon-26
num_units: 3
options:
expected-osd-count: 3
source: cloud:bionic-rocky
ceph-osd:
annotations:
gui-x: '1000'
gui-y: '500'
charm: cs:ceph-osd-269
num_units: 3
options:
osd-devices: /dev/sdb
source: cloud:bionic-rocky
"""
class TestUtilitiesBundle(ut_utils.BaseTestCase):
def test_flatten_bundle(self):
self.maxDiff = 1500
input_yaml = yaml.safe_load(TEST_BUNDLE_WITH_PLACEMENT)
flattened = bundle.remove_machine_specification(input_yaml)
expected = yaml.safe_load(TEST_BUNDLE_WITHOUT_PLACEMENT)
self.assertEqual(expected, flattened)
def test_add_series(self):
self.maxDiff = 1500
input_yaml = yaml.safe_load(TEST_BUNDLE_WITH_PLACEMENT)
input_yaml.pop('series', None)
flattened = bundle.remove_machine_specification(input_yaml)
expected = yaml.safe_load(TEST_BUNDLE_WITHOUT_PLACEMENT)
self.assertEqual(expected, flattened)
def test_parser(self):
args = bundle.parse_args([
'-i', 'bundle.yaml'])
self.assertEqual(args.input, 'bundle.yaml')
self.assertEqual(args.output, '/dev/stdout')
def test_parser_output(self):
args = bundle.parse_args([
'-i', 'bundle.yaml',
'-o', 'bundle_out.yaml'])
self.assertEqual(args.input, 'bundle.yaml')
self.assertEqual(args.output, 'bundle_out.yaml')
| 2,977 |
python_aulas/desaf086_lista_matriz.py
|
gilsonaureliano/Python-aulas
| 1 |
2024458
|
matriz = []
m = []
for c in range(0, 3):
for t in range (0, 3):
m.append(int(input(f'Digite um valor para ({c},{t})= ')))
matriz.append(m[:])
m.clear()
for c in range(0, 3):
print(f'{matriz[c]}', end=' ')
print()
for c in range(3, 6):
print(f'{matriz[c]}', end=' ')
print()
for c in range (6, 9):
print(f'{matriz[c]}', end=' ')
| 368 |
python/testData/inspections/ChangeSignatureKeywordAndPositionalParameters.py
|
Sajadrahimi/intellij-community
| 0 |
2025166
|
def f(x, foo=1):
pass
f<warning descr="Unexpected argument(s)"><caret>(x, 42, <warning descr="Unexpected argument">bar='spam'</warning>)</warning>
| 152 |
interface/read.py
|
karttur/geoimagine02-grass
| 0 |
2022647
|
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 2 18:30:34 2013
@author: pietro
"""
from __future__ import (nested_scopes, generators, division, absolute_import,
with_statement, print_function, unicode_literals)
def do_nothing(p):
return p
def get_None(p):
return None
def get_dict(p):
return dict(p.items())
def get_values(p):
return [e.text.strip() for e in p.findall('value/name')]
def read_text(p):
return p.text.strip()
def read_keydesc(par):
name = par.text.strip()
items = [e.text.strip() for e in par.findall('item')]
return name, tuple(items) if len(items) > 1 else None
GETFROMTAG = {
'description': read_text,
'keydesc': read_keydesc,
'gisprompt': get_dict,
'default': read_text,
'values': get_values,
'value': get_None,
'guisection': read_text,
'label': read_text,
'suppress_required': get_None,
'keywords': read_text,
'guidependency': read_text,
'rules': get_None,
}
GETTYPE = {
'string': str,
'integer': int,
'float': float,
'double': float,
'file': str,
'all': do_nothing,
}
def element2dict(xparameter):
diz = dict(xparameter.items())
for p in xparameter:
if p.tag in GETFROMTAG:
diz[p.tag] = GETFROMTAG[p.tag](p)
else:
print('New tag: %s, ignored' % p.tag)
return diz
# dictionary used to create docstring for the objects
DOC = {
#------------------------------------------------------------
# head
'head': """{cmd_name}({cmd_params})
Parameters
----------
""",
#------------------------------------------------------------
# param
'param': """{name}: {default}{required}{multi}{ptype}
{description}{values}{keydescvalues}""",
#------------------------------------------------------------
# flag_head
'flag_head': """
Flags
------
""",
#------------------------------------------------------------
# flag
'flag': """{name}: {default}, {supress}
{description}""",
#------------------------------------------------------------
# foot
'foot': """
Special Parameters
------------------
The Module class have some optional parameters which are distinct using a final
underscore.
run_: True, optional
If True execute the module.
finish_: True, optional
If True wait until the end of the module execution, and store the module
outputs into stdout, stderr attributes of the class.
stdin_: PIPE, optional
Set the standard input.
env_: dictionary, optional
Set the environment variables.
"""}
| 2,592 |
3.py
|
SahilAgarwal321/Project_Euler
| 0 |
2024949
|
'''
What is the largest prime factor of the number 600851475143 ?
'''
def prime_factors(n):
"""Returns all the prime factors of a positive integer"""
factors = []
d = 2
while n > 1:
while n % d == 0:
factors.append(d)
n /= d
d = d + 1
if d*d > n:
if n > 1: factors.append(n)
break
return factors
prime_factors(600851475143)
| 428 |
tests/test_biolink_model.py
|
hsolbrig/ShExJSG
| 1 |
2022795
|
import os
import unittest
from ShExJSG import ShExC, ShExJ
from tests import input_data_dir, output_data_dir
from pyjsg.jsglib.loader import load
update_output = False
class BioLinkShexCTestCase(unittest.TestCase):
def do_test(self, infile: str) -> None:
outfile = os.path.join(output_data_dir, infile.rsplit('.', 1)[0] + '.shex')
shexj = load(os.path.join(input_data_dir, infile), ShExJ)
shexc = ShExC(shexj)
self.assertIsNotNone(shexc)
shexc_text = str(shexc)
if update_output:
with open(outfile, 'w') as outf:
outf.write(shexc_text)
with open(outfile) as outf:
target_shexc = outf.read()
self.maxDiff = None
self.assertEqual(target_shexc, shexc_text)
self.assertFalse(update_output, "update_output is set to True")
def test_conversion(self):
""" Test the ShExC emitter using the biolink model """
self.maxDiff = None
self.do_test('biolink-model.json')
def test_shortand(self):
self.do_test('shortand.json')
def test_list(self):
self.do_test('list.json')
def test_meta(self):
self.do_test('meta.json')
if __name__ == '__main__':
unittest.main()
| 1,249 |
tests/test_logging.py
|
ministryofjustice/money-to-prisoners-common
| 7 |
2024785
|
import json
import logging
from logging.config import dictConfig
import unittest
messages = []
def get_log_text():
return '\n'.join(messages)
class Handler(logging.Handler):
def __init__(self, level=0):
super().__init__(level)
def emit(self, record):
msg = self.format(record)
messages.append(msg)
class ELKLoggingTestCase(unittest.TestCase):
@classmethod
def setup_logging(cls, formatter='elk'):
logging_conf = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'simple': {
'format': '%(asctime)s [%(levelname)s] %(message)s',
'datefmt': '%Y-%m-%dT%H:%M:%S',
},
'elk': {
'()': 'mtp_common.logging.ELKFormatter'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'tests.test_logging.Handler',
'formatter': formatter,
},
},
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'loggers': {
'mtp': {
'level': 'INFO',
'handlers': ['console'],
'propagate': False,
},
},
}
dictConfig(logging_conf)
messages.clear()
def test_elk_fields_not_formatted_normally(self):
self.setup_logging(formatter='simple')
mtp_logger = logging.getLogger('mtp')
root_logger = logging.getLogger()
for logger in [mtp_logger, root_logger]:
logger.info(
'Test info message',
extra={
'excluded': 321,
'elk_fields': {
'@fields.extra_field': 123
}
}
)
logs = get_log_text()
self.assertIn('Test info message', logs)
self.assertNotIn('elk_fields', logs)
self.assertNotIn('extra_field', logs)
self.assertNotIn('123', logs)
self.assertNotIn('excluded', logs)
def test_elk_formatter_includes_extra_fields(self):
self.setup_logging(formatter='elk')
mtp_logger = logging.getLogger('mtp')
mtp_logger.info(
'Test info message',
extra={
'excluded': 321,
'elk_fields': {
'@fields.extra_field': 123,
'@fields.extra_field2': ['a', 'b'],
}
}
)
logs = get_log_text()
logs = json.loads(logs)
self.assertIsInstance(logs, dict)
for key in ['timestamp_msec', 'message',
'@fields.level', '@fields.logger', '@fields.source_path',
'@fields.extra_field']:
self.assertIn(key, logs)
self.assertEqual(logs['message'], 'Test info message')
self.assertEqual(logs['@fields.level'], 'INFO')
self.assertEqual(logs['@fields.extra_field'], 123)
self.assertEqual(logs['@fields.extra_field2'], ['a', 'b'])
self.assertNotIn('excluded', logs)
def test_elk_formatter_serialises_arguments(self):
class Obj:
def __str__(self):
return '🌍'
self.setup_logging(formatter='elk')
mtp_logger = logging.getLogger('mtp')
mtp_logger.info('This %d object cannot be serialised to JSON: %s', 1, Obj())
logs = get_log_text()
logs = json.loads(logs)
self.assertIsInstance(logs, dict)
self.assertEqual(logs['message'], 'This 1 object cannot be serialised to JSON: 🌍')
self.assertSequenceEqual(logs['variables'], [1, '🌍'])
| 3,842 |
scripts/estimate-transitions.py
|
blester125/constrained-decoding
| 0 |
2025118
|
import os
import json
import argparse
from collections import defaultdict
import numpy as np
from baseline.utils import read_conll
from .utils import download_dataset, estimate_counts, normalize_transitions
def make_vocab(transitions):
vocab = defaultdict(lambda: len(vocab))
for src in transitions:
for tgt in transitions[src]:
vocab[tgt]
vocab[src]
return dict(vocab.items())
def to_dense(transitions):
vocab = make_vocab(transitions)
trans = np.zeros((len(vocab), len(vocab)))
for src, src_idx in vocab.items():
for tgt, tgt_idx in vocab.items():
trans[src_idx, tgt_idx] = transitions[src][tgt]
return vocab, trans
def main():
parser = argparse.ArgumentParser(description="Estimate transition probabilities from the training data.")
parser.add_argument("dataset")
parser.add_argument("--datasets-index", "--datasets_index", default="configs/datasets.json")
parser.add_argument("--cache", default="data")
parser.add_argument("--surface-index", "--surface_index", default=0, type=int)
parser.add_argument("--entity-index", "--entity_index", default=-1, type=int)
parser.add_argument("--delim")
parser.add_argument("--output")
args = parser.parse_args()
dataset = download_dataset(args.dataset, args.datasets_index, args.cache)
_, transitions = estimate_counts(
list(read_conll(dataset["train_file"], args.delim)), args.surface_index, args.entity_index
)
transitions, _ = normalize_transitions(transitions)
vocab, transitions = to_dense(transitions)
args.output = args.dataset if args.output is None else args.output
os.makedirs(args.output, exist_ok=True)
with open(os.path.join(args.output, "vocab.json"), "w") as wf:
json.dump(vocab, wf, indent=2)
np.save(os.path.join(args.output, "transitions.npy"), transitions)
if __name__ == "__main__":
main()
| 1,935 |
metis_cut.py
|
HolyLow/Balanced_Graph_Partitioning
| 0 |
2023811
|
import numpy as np
import subprocess
import os.path
from snudda.analyse_connection import SnuddaAnalyseConnection
class MetisCut(object):
def __init__(self, in_file, exe_file):
self.in_file = in_file
self.snudda_analyser = SnuddaAnalyseConnection(in_file)
self.exe_file = exe_file
def default_cut(self, npartition, nmachine, enable_multicut, gapJunction_scale):
self.synapse_cut(npartition, nmachine, enable_multicut)
# self.gapJunction_cut(npartition, nmachine, enable_multicut)
self.hybrid_cut(npartition, nmachine, enable_multicut, gapJunction_scale)
def synapse_cut(self, npartition, nmachine, enable_multicut):
cut_penalty_mat = self.snudda_analyser.analyze_cut_penalty(npartition, nmachine)
synapse_con_mat = self.snudda_analyser.create_con_mat("synapses")
synapse_distribution = self.multi_machine_partition(synapse_con_mat, npartition, nmachine, enable_multicut)
self.snudda_analyser.analyse_partition_cut(synapse_con_mat, cut_penalty_mat, synapse_distribution, npartition)
fingerprint = "synapse-metis-partition" + str(synapse_con_mat.shape[0]) + "-" + str(npartition) + "-" + str(nmachine)
if enable_multicut:
fingerprint += "-enableMulticut"
else:
fingerprint += "-disableMulticut"
self.export_partition_to_csv(synapse_distribution, fingerprint)
def gapJunction_cut(self, npartition, nmachine, enable_multicut):
cut_penalty_mat = self.snudda_analyser.analyze_cut_penalty(npartition, nmachine)
gapJunction_con_mat = self.snudda_analyser.create_con_mat("gapJunctions")
gapJunction_distribution = self.multi_machine_partition(gapJunction_con_mat, npartition, nmachine, enable_multicut)
self.snudda_analyser.analyse_partition_cut(gapJunction_con_mat, cut_penalty_mat, gapJunction_distribution, npartition)
fingerprint = "gapJunction-metis-partition" + str(gapJunction_con_mat.shape[0]) + "-" + str(npartition) + "-" + str(nmachine)
if enable_multicut:
fingerprint += "-enableMulticut"
else:
fingerprint += "-disableMulticut"
self.export_partition_to_csv(gapJunction_distribution, fingerprint)
def hybrid_cut(self, npartition, nmachine, enable_multicut, gapJunction_scale):
cut_penalty_mat = self.snudda_analyser.analyze_cut_penalty(npartition, nmachine)
synapse_con_mat = self.snudda_analyser.create_con_mat("synapses")
gapJunction_con_mat = self.snudda_analyser.create_con_mat("gapJunctions")
hybrid_con_mat = np.add(synapse_con_mat, np.multiply(gapJunction_con_mat, gapJunction_scale))
hybrid_distribution = self.multi_machine_partition(hybrid_con_mat, npartition, nmachine, enable_multicut)
self.snudda_analyser.analyse_partition_cut(hybrid_con_mat, cut_penalty_mat, hybrid_distribution, npartition)
fingerprint = "hybrid-metis-partition" + str(hybrid_con_mat.shape[0]) + "-" + str(npartition) + "-" + str(nmachine)
if enable_multicut:
fingerprint += "-enableMulticut"
else:
fingerprint += "-disableMulticut"
self.export_partition_to_csv(hybrid_distribution, fingerprint)
def multi_machine_partition(self, con_mat, multi_machine_partition, nmachine, enable_multicut):
# assert nmachine == 1, "unsupported multi machine partition"
assert multi_machine_partition >= nmachine, "partition number is smaller than machine number"
assert multi_machine_partition % nmachine == 0, "partition number can't be equally fit into machine number"
if nmachine == 1 or not enable_multicut:
multi_machine_distribution = self.graph_partition(con_mat, multi_machine_partition)
return multi_machine_distribution
multi_machine_distribution = np.zeros(con_mat.shape[0], dtype=int)
across_machine_distribution = self.graph_partition(con_mat, nmachine)
single_machine_partition = multi_machine_partition // nmachine
for machine_id in range(nmachine):
machine_nodes = []
for i in range(across_machine_distribution.shape[0]):
if across_machine_distribution[i] == machine_id:
machine_nodes.append(i)
subgraph_global_distribution = self.subgraph_partition(con_mat, machine_nodes, single_machine_partition, single_machine_partition*machine_id)
multi_machine_distribution = np.add(multi_machine_distribution, subgraph_global_distribution)
return multi_machine_distribution
def subgraph_partition(self, con_mat, subgraph_nodes, npartition, offset):
subgraph_con_mat = np.zeros((len(subgraph_nodes), len(subgraph_nodes)), dtype=int)
for i in range(subgraph_con_mat.shape[0]):
for j in range(subgraph_con_mat.shape[1]):
subgraph_con_mat[i][j] = con_mat[subgraph_nodes[i]][subgraph_nodes[j]]
subgraph_distribution = self.graph_partition(subgraph_con_mat, npartition)
subgraph_global_distribution = np.zeros(con_mat.shape[0], dtype=int)
for i in range(len(subgraph_nodes)):
subgraph_global_distribution[subgraph_nodes[i]] = subgraph_distribution[i] + offset
return subgraph_global_distribution
def graph_partition(self, con_mat, npartition):
graph_file = "metis-graph.txt"
self.export_connection_matrix_to_metis_graph(con_mat, graph_file)
self.run_metis_partition(graph_file, npartition)
distribution = self.load_metis_partition_result(graph_file, npartition)
return distribution
def export_connection_matrix_to_metis_graph(self, con_mat, graph_file):
graph = ""
con_cnt = 0
for i in range(con_mat.shape[0]):
node = ""
for j in range(con_mat.shape[1]):
if i == j:
continue
con = con_mat[i][j] + con_mat[j][i]
if con > 0:
node += str(j+1) + " " + str(con) + " " # metis node numbering starts from 1
con_cnt += 1
graph += node + "\n"
con_cnt /= 2
with open(graph_file, "w") as f:
f.write("%d %d 001\n" % (con_mat.shape[0], con_cnt))
f.write(graph)
def run_metis_partition(self, graph_file, npartition):
cmd = self.exe_file + " " + graph_file + " " + str(npartition)
status, output = subprocess.getstatusoutput(cmd)
assert status == 0, "command status is not success"
print(output)
def load_metis_partition_result(self, graph_file, npartition):
distribution_file = graph_file + ".part." + str(npartition)
distribution = np.loadtxt(distribution_file, delimiter=",", dtype="int")
max_id = np.max(distribution)
assert max_id == npartition - 1, "infile nhost mismatched"
return distribution
def export_partition_to_csv(self, distribution, fingerprint):
save_dir = os.path.dirname(self.in_file)
save_csv = save_dir + "/" + fingerprint + ".csv"
np.savetxt(save_csv, distribution, delimiter=",", fmt="%d")
print("write to %s done" % save_csv)
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser(description="Metis cut")
parser.add_argument("inFile", help="Snudda HDF5 file with network")
parser.add_argument("exeFile", help="Metis partition binary executable file")
parser.add_argument("npartition", type=int, help="partition npartition")
parser.add_argument("--gapJunctionScale", "--gapJunctionScale", type=int, default=1000, help="Scale of a gapJunction relative to a synapse")
parser.add_argument("--nmachine", "--nmachine", type=int, default=1, help="Number of machines the partitions would distribute to equally")
parser.add_argument('--multicut', dest='enableMulticut', action='store_true', default=False, help='Enable the multicut algorithm')
args = parser.parse_args()
metis_cut = MetisCut(args.inFile, args.exeFile)
metis_cut.default_cut(args.npartition, args.nmachine, args.enableMulticut, args.gapJunctionScale)
| 8,214 |
api/app/schemas/report_world.py
|
Crowd-Wire/CrowdWire
| 16 |
2025242
|
from pydantic import BaseModel, Field
from typing import Optional
from datetime import datetime
from typing_extensions import Annotated
class ReportWorldBase(BaseModel):
reporter: Optional[int]
reported: Optional[int]
timestamp: Optional[datetime]
comment: Annotated[Optional[str], Field(max_length=300)]
# does not need the id of the reporter because it is necessary a token
class ReportWorldCreate(ReportWorldBase):
comment: str
class ReportWorldUpdate(BaseModel):
reporter: int
reviewed: bool
class ReportWorldInDB(ReportWorldBase):
reported: int
reporter: int
timestamp: datetime
comment: str
reviewed: bool
class Config:
orm_mode = True
class ReportWorldInDBWithEmail(ReportWorldInDB):
reporter_email: str
world_name: str
banned: int
| 823 |
tools/build/container_release.bzl
|
soumyajitsamanta/airy
| 275 |
2025087
|
load("@io_bazel_rules_docker//container:container.bzl", "container_push")
def container_release(registry, repository):
container_push(
name = "local",
format = "Docker",
image = ":image",
registry = registry,
repository = repository,
tag = "{BUILD_USER}",
)
container_push(
name = "release",
format = "Docker",
image = ":image",
registry = registry,
repository = repository,
tag = "{STABLE_VERSION}",
)
| 517 |
uis/arithmetics_ui.py
|
mordy-python/Ultimate-Calculator
| 44 |
2025267
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MathWindow(object):
def addition(self, number1, number2):
return number1 + number2
def subtract(self, number1, number2):
return number1 - number2
def multiply(self, number1, number2):
return number1 * number2
def power(self, number1, number2):
return number1 ** number2
def divide(self, number1, number2):
return number1 / number2
def operations(self):
try:
first_number = float(self.first_line.text())
second_number = float(self.second_line.text())
operation = self.opert_box.currentText()
if operation == 'Add':
add = self.addition(first_number, second_number)
self.out_line.setText(str(add))
elif operation == 'Subtract':
sub = self.subtract(first_number, second_number)
self.out_line.setText(str(sub))
elif operation == 'Multiply':
mult = self.multiply(first_number, second_number)
self.out_line.setText(str(mult))
elif operation == 'Divide':
div = self.divide(first_number, second_number)
self.out_line.setText(str(div))
elif operation == 'Power':
power = self.power(first_number, second_number)
self.out_line.setText(str(power))
except:
self.out_line.setText("Try Again...!")
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(422, 409)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.first_line = QtWidgets.QLineEdit(self.centralwidget)
self.first_line.setGeometry(QtCore.QRect(150, 30, 201, 31))
self.first_line.setObjectName("first_line")
self.second_line = QtWidgets.QLineEdit(self.centralwidget)
self.second_line.setGeometry(QtCore.QRect(150, 140, 201, 31))
self.second_line.setObjectName("second_line")
self.opert_box = QtWidgets.QComboBox(self.centralwidget)
self.opert_box.setGeometry(QtCore.QRect(150, 80, 201, 31))
self.opert_box.setObjectName("opert_box")
self.opert_box.addItem("")
self.opert_box.addItem("")
self.opert_box.addItem("")
self.opert_box.addItem("")
self.opert_box.addItem("")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(30, 33, 101, 20))
self.label.setObjectName("label")
self.second_number = QtWidgets.QLabel(self.centralwidget)
self.second_number.setGeometry(QtCore.QRect(30, 150, 101, 20))
self.second_number.setObjectName("second_number")
self.operation = QtWidgets.QLabel(self.centralwidget)
self.operation.setGeometry(QtCore.QRect(30, 90, 101, 20))
self.operation.setObjectName("operation")
self.out_line = QtWidgets.QLineEdit(self.centralwidget)
self.out_line.setGeometry(QtCore.QRect(150, 210, 201, 51))
self.out_line.setObjectName("out_line")
self.go_btn = QtWidgets.QPushButton(self.centralwidget)
self.go_btn.setGeometry(QtCore.QRect(60, 320, 131, 51))
self.go_btn.setObjectName("go_btn")
self.go_btn.clicked.connect(self.operations)
self.exit_btn = QtWidgets.QPushButton(self.centralwidget)
self.exit_btn.setGeometry(QtCore.QRect(260, 320, 131, 51))
self.exit_btn.setObjectName("exit_btn")
self.exit_btn.clicked.connect(MainWindow.hide)
self.output = QtWidgets.QLabel(self.centralwidget)
self.output.setGeometry(QtCore.QRect(30, 220, 101, 20))
self.output.setObjectName("output")
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate(
"MainWindow", "Basic Arithmetics"))
self.opert_box.setItemText(0, _translate("MainWindow", "Add"))
self.opert_box.setItemText(1, _translate("MainWindow", "Subtract"))
self.opert_box.setItemText(2, _translate("MainWindow", "Multiply"))
self.opert_box.setItemText(3, _translate("MainWindow", "Divide"))
self.opert_box.setItemText(4, _translate("MainWindow", "Power"))
self.label.setText(_translate("MainWindow", "First Number"))
self.second_number.setText(_translate("MainWindow", "Second Number"))
self.operation.setText(_translate("MainWindow", "Operation"))
self.go_btn.setText(_translate("MainWindow", "Go"))
self.exit_btn.setText(_translate("MainWindow", "Exit"))
self.output.setText(_translate("MainWindow", "Output"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MathWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 5,093 |
tox/__init__.py
|
kz6fittycent/tox
| 0 |
2024624
|
import os
import signal
from pkg_resources import DistributionNotFound
from pkg_resources import get_distribution
from .hookspecs import hookimpl
from .hookspecs import hookspec
try:
_full_version = get_distribution(__name__).version
__version__ = _full_version.split('+', 1)[0]
except DistributionNotFound:
__version__ = '0.0.0.dev0'
# separate function because pytest-mock `spy` does not work on Exceptions
# can use neither a class method nor a static because of
# https://bugs.python.org/issue23078
# even a normal method failed with
# TypeError: descriptor '__getattribute__' requires a 'BaseException' object but received a 'type'
def _exit_code_str(exception_name, command, exit_code):
""" string representation for an InvocationError, with exit code """
str_ = "%s for command %s" % (exception_name, command)
if exit_code is not None:
str_ += " (exited with code %d)" % (exit_code)
if (os.name == 'posix') and (exit_code > 128):
signals = {number: name
for name, number in vars(signal).items()
if name.startswith("SIG")}
number = exit_code - 128
name = signals.get(number)
if name:
str_ += ("\nNote: this might indicate a fatal error signal "
"({} - 128 = {}: {})".format(number+128, number, name))
return str_
class exception:
class Error(Exception):
def __str__(self):
return "%s: %s" % (self.__class__.__name__, self.args[0])
class MissingSubstitution(Error):
FLAG = 'TOX_MISSING_SUBSTITUTION'
"""placeholder for debugging configurations"""
def __init__(self, name):
self.name = name
class ConfigError(Error):
""" error in tox configuration. """
class UnsupportedInterpreter(Error):
"""signals an unsupported Interpreter"""
class InterpreterNotFound(Error):
"""signals that an interpreter could not be found"""
class InvocationError(Error):
""" an error while invoking a script. """
def __init__(self, command, exit_code=None):
super(exception.Error, self).__init__(command, exit_code)
self.command = command
self.exit_code = exit_code
def __str__(self):
return _exit_code_str(self.__class__.__name__, self.command, self.exit_code)
class MissingFile(Error):
""" an error while invoking a script. """
class MissingDirectory(Error):
""" a directory did not exist. """
class MissingDependency(Error):
""" a dependency could not be found or determined. """
class MinVersionError(Error):
""" the installed tox version is lower than requested minversion. """
def __init__(self, message):
self.message = message
super(exception.MinVersionError, self).__init__(message)
from .session import main as cmdline # noqa
__all__ = ('hookspec', 'hookimpl', 'cmdline', 'exception', '__version__')
| 3,049 |
pick.py
|
www8098/TD3-HEX
| 0 |
2024867
|
import pickle
import numpy as np
path = 'data/walker2d-expert-v2.pkl'
f = open(path, 'rb')
data = pickle.load(f)
id = np.random.randint(0, 1000, 100)
trans = []
for i in id:
trans.append(data[i])
with open('data/walker2d-poor.pkl', 'wb') as f:
pickle.dump(trans, f)
# with open('data/halfcheetah-poor.pkl', 'rb') as f:
# data = pickle.load(f)
# acc_r = []
# for traj in data:
# acc_r.append(sum(traj['rewards']))
# print(acc_r)
# acc_r = enumerate(acc_r)
# acc_r.sorted(key = lambda x:x[1])
# print(acc_r[:10])
# max_id = acc_r.index(max(acc_r))
# print(data[1].keys())
# print(len(data[1]))
| 649 |
src/Buffer.py
|
hmomin/TD3-Bipedal-Walker
| 1 |
2023381
|
import numpy as np
class Buffer():
def __init__(self, observationDim: int, actionDim: int, size: int=1_000_000):
# use a fixed-size buffer to prevent constant list instantiations
self.states = np.zeros((size, observationDim))
self.actions = np.zeros((size, actionDim))
self.rewards = np.zeros(size)
self.nextStates = np.zeros((size, observationDim))
self.doneFlags = np.zeros(size)
# use a pointer to keep track of where in the buffer we are
self.pointer = 0
# use current size to ensure we don't train on any non-existent data points
self.currentSize = 0
self.size = size
def store(
self, state: np.ndarray, action: np.ndarray, reward: float, nextState: np.ndarray,
doneFlag: bool
):
# store all the data for this transition
ptr = self.pointer
self.states[ptr] = state
self.actions[ptr] = action
self.rewards[ptr] = reward
self.nextStates[ptr] = nextState
self.doneFlags[ptr] = doneFlag
# update the pointer and current size
self.pointer = (self.pointer + 1) % self.size
self.currentSize = min(self.currentSize + 1, self.size)
def getMiniBatch(self, size: int) -> dict:
# ensure size is not bigger than the current size of the buffer
size = min(size, self.currentSize)
# generate random indices
indices = np.random.choice(self.currentSize, size, replace=False)
# return the mini-batch of transitions
return {
"states": self.states[indices],
"actions": self.actions[indices],
"rewards": self.rewards[indices],
"nextStates": self.nextStates[indices],
"doneFlags": self.doneFlags[indices],
}
| 1,816 |
setup.py
|
maximkulkin/zephyr
| 36 |
2023276
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from setuptools import setup, find_packages
import lollipop
def read(fname):
with open(fname) as fp:
content = fp.read()
return content
setup(
name='lollipop',
version=lollipop.__version__,
description=('Data serialization and validation library'),
long_description=read('README.rst'),
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/maximkulkin/lollipop',
packages=['lollipop'],
include_package_data=True,
license='MIT',
zip_safe=False,
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema',
'marshmallow'),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
)
| 1,462 |
tests/microdot/test_multidict.py
|
steve1aa/microdot
| 173 |
2023816
|
import unittest
from microdot import MultiDict
class TestMultiDict(unittest.TestCase):
def test_multidict(self):
d = MultiDict()
assert dict(d) == {}
assert d.get('zero') is None
assert d.get('zero', default=0) == 0
assert d.getlist('zero') == []
assert d.getlist('zero', type=int) == []
d['one'] = 1
assert d['one'] == 1
assert d.get('one') == 1
assert d.get('one', default=2) == 1
assert d.get('one', type=int) == 1
assert d.get('one', type=str) == '1'
d['two'] = 1
d['two'] = 2
assert d['two'] == 1
assert d.get('two') == 1
assert d.get('two', default=2) == 1
assert d.get('two', type=int) == 1
assert d.get('two', type=str) == '1'
assert d.getlist('two') == [1, 2]
assert d.getlist('two', type=int) == [1, 2]
assert d.getlist('two', type=str) == ['1', '2']
| 946 |
Ana_calculo_perfilverticaltempBiasIG.py
|
SudestadaARG/VerificationMetrics_WRF-ERA
| 1 |
2023164
|
#!/usr/bin/env python
#<NAME> Github SudestadaARG
# Programa para calcular el perfile vertical de Bias, usando matrices Bias hechas previamente
# Voy a necesitar:
#
# matrices de Bias en 3D que fueron almacenadas asi:
# np.ma.dump(bias_u, path_out + '/bias_u_' + exp + '_ANA12h')
import numpy as np
import os
import argparse
import gc
# Defino argumentos para indicarle la fecha y el miembro por linea de comando:
parser = argparse.ArgumentParser(description='Hour Exp Per')
parser.add_argument('Hour',type=int)
parser.add_argument('Exp',type=str)
parser.add_argument('Per',type=str)
args = parser.parse_args()
periodo= args.Per
exp = '/' + args.Exp #'/sinnud' #'/spnudUV6h'#'/spnud6h'
ANA = '.000'
#se indica que conjunto de datos se usan y que horas del dia que se uso
temp= str(args.Hour).zfill(2) + 'z'#'06Z' '12Z' '18Z''00Z' es la fecha de inicio que usan los calculos de biastemp
if exp=='/GFS' or exp=='/GEFS':
path_in = '/data/miglesias/verificacion_doc/dif_ERA_GEFS/BIAS' + exp + '/'
path_out = '/data/miglesias/verificacion_doc/dif_ERA_GEFS/BIAS' + exp + '/perfilvertical'
ANA = '_000'
else:
path_in = '/data/miglesias/verificacion_doc/dif_ERA_WRFupp/BIAS' + exp + '/'
path_out = '/data/miglesias/verificacion_doc/dif_ERA_WRFupp/BIAS' + exp + '/perfilvertical'
ANA = '.000'
M=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17', '18','19','20'] #vector numero de miembro de ensamble
#m=0 # Ojo que M[1] = '02', acordate que python empieza a contar desde 0
m = 0 # hay que inicializar la m aca adentro del loop de los dias, sino sigue contando y no encuentra
while m < len(M):
if exp == '/GFS':
bias_geo = np.ma.load( path_in + 'bias_geo_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA )
bias_u = np.ma.load( path_in + 'bias_u_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_v = np.ma.load( path_in + 'bias_v_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_t = np.ma.load( path_in + 'bias_t_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_q = np.ma.load( path_in + 'bias_q_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
else:
bias_geo = np.ma.load( path_in + M[m] + '/bias_geo_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA )
bias_u = np.ma.load( path_in + M[m] + '/bias_u_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_v = np.ma.load( path_in + M[m] + '/bias_v_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_t = np.ma.load( path_in + M[m] + '/bias_t_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_q = np.ma.load( path_in + M[m] + '/bias_q_' + temp + '_M'+ M[m]+'_'+periodo+'_ana' + ANA)
bias_geo_vert = np.ma.mean(np.ma.mean(bias_geo, axis = (1)),axis=(1))
bias_u_vert = np.ma.mean(np.ma.mean(bias_u, axis = (1)),axis=(1))
bias_v_vert = np.ma.mean(np.ma.mean(bias_v, axis = (1)),axis=(1))
bias_t_vert = np.ma.mean(np.ma.mean(bias_t, axis = (1)),axis=(1))
bias_q_vert = np.ma.mean(np.ma.mean(bias_q, axis = (1)),axis=(1))
if exp == '/GFS':
np.ma.dump(bias_geo_vert, path_out + '/bias_geo_vert_' + temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_u_vert, path_out + '/bias_u_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_v_vert, path_out + '/bias_v_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_t_vert, path_out + '/bias_t_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_q_vert, path_out + '/bias_q_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
else:
np.ma.dump(bias_geo_vert, path_out + '/' + M[m] + '/bias_geo_vert_' + temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_u_vert, path_out + '/' + M[m] + '/bias_u_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_v_vert, path_out + '/' + M[m] + '/bias_v_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_t_vert, path_out + '/' + M[m] + '/bias_t_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
np.ma.dump(bias_q_vert, path_out + '/' + M[m] + '/bias_q_vert_'+ temp + '_M'+ M[m] +'_'+periodo+'_ana' + ANA)
bias_geo = 0
bias_geo_vert = 0
bias_u = 0
bias_u_vert = 0
bias_v = 0
bias_v_vert = 0
bias_t = 0
bias_t_vert = 0
bias_q = 0
bias_q_vert = 0
m = m + 1 # Loop de los miembros
gc.collect() # Saca la basura de la memoria
| 4,402 |
THC/2021/crypto/Kidsign/server.py
|
ruhan-islam/ctf-archives
| 1 |
2024107
|
from Crypto.Util.number import inverse, isPrime
from random import SystemRandom
from hashlib import sha256
from flag import FLAG
import os
rand = SystemRandom()
class ElGamal:
def __init__(self):
self.q = 89666094075799358333912553751544914665545515386283824011992558231120286657213785559151513056027280869020616111209289142073255564770995469726364925295894316484503027288982119436576308594740674437582226015660087863550818792499346330713413631956572604302171842281106323020998625124370502577704273068156073608681
assert(isPrime(self.q))
self.p = 2*self.q + 1
assert(isPrime(self.p))
self.g = 2
self.H = sha256
self.x = rand.randint(1,self.p-2)
self.y = pow(self.g,self.x,self.p)
def sign(self,m):
k = rand.randint(2,self.p-2)
while GCD(k,self.p-1) != 1:
k = rand.randint(2,self.p-2)
r = pow(self.g,k,self.p)
h = int(self.H(m).hexdigest(),16)
s = ((h - self.x * r)* inverse(k,self.p-1)) % (self.p - 1)
assert(s != 0)
return (r,s)
def verify(self,m,r,s):
if r <= 0 or r >= (self.p):
return False
if s <= 0 or s >= (self.p-1):
return False
h = int(self.H(m).hexdigest(),16)
return pow(self.g,h,self.p) == (pow(self.y,r,self.p) * pow(r,s,self.p)) % self.p
if __name__ == '__main__':
S = ElGamal()
print("Here are your parameters:\n - generator g: {:d}\n - prime p: {:d}\n - public key y: {:d}\n".format(S.g, S.p, S.y))
message = os.urandom(16)
print("If you can sign this message : {:s}, I'll reward you with a flag!".format(message.hex()))
r = int(input("r: "))
s = int(input("s: "))
if S.verify(message,r,s):
print(FLAG)
else:
print("Nope.")
| 1,616 |
robocode-python-ls-core/tests/robocode_ls_core_tests/test_document.py
|
emanlove/robotframework-lsp
| 0 |
2024803
|
# Original work Copyright 2017 Palantir Technologies, Inc. (MIT)
# Original work Copyright 2020 Open Law Library. (Apache 2)
# See ThirdPartyNotices.txt in the project root for license information.
# All modifications Copyright (c) Robocorp Technologies Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robocode_ls_core.workspace import Document
from robocode_ls_core.lsp import TextDocumentContentChangeEvent, Position, Range
from robocode_ls_core import uris
import pytest
import os.path
DOC = """document
for
testing
"""
DOC_URI = uris.from_fs_path(os.path.abspath(__file__))
@pytest.fixture
def doc():
return Document(DOC_URI, DOC)
def test_document_empty_edit():
doc = Document("file:///uri", u"")
change = TextDocumentContentChangeEvent(
Range(Position(0, 0), Position(0, 0)), 0, u"f"
)
doc.apply_change(change)
assert doc.source == u"f"
def test_document_end_of_file_edit():
old = ["print 'a'\n", "print 'b'\n"]
doc = Document("file:///uri", u"".join(old))
change = TextDocumentContentChangeEvent(
Range(Position(2, 0), Position(2, 0)), 0, u"o"
)
doc.apply_change(change)
assert doc.get_internal_lines() == ("print 'a'\n", "print 'b'\n", "o")
def test_document_line_edit():
doc = Document("file:///uri", u"itshelloworld")
change = TextDocumentContentChangeEvent(
Range(Position(0, 3), Position(0, 8)), 0, u"goodbye"
)
doc.apply_change(change)
assert doc.source == u"itsgoodbyeworld"
def test_document_lines(doc):
assert len(doc.get_internal_lines()) == 3
assert doc.get_internal_lines()[0] == "document\n"
def test_document_multiline_edit():
old = ["def hello(a, b):\n", " print a\n", " print b\n"]
doc = Document("file:///uri", u"".join(old))
change = TextDocumentContentChangeEvent(
Range(Position(1, 4), Position(2, 11)), 0, u"print a, b"
)
doc.apply_change(change)
assert doc.get_internal_lines() == ("def hello(a, b):\n", " print a, b\n")
def test_document_props(doc):
assert doc.uri == DOC_URI
assert doc.source == DOC
def test_document_source_unicode():
document_mem = Document(DOC_URI, u"my source")
document_disk = Document(DOC_URI)
assert isinstance(document_mem.source, type(document_disk.source))
def test_offset_at_position(doc):
assert doc.selection(0, 8).offset_at_position == 8
assert doc.selection(1, 5).offset_at_position == 14
assert doc.selection(2, 0).offset_at_position == 13
assert doc.selection(2, 4).offset_at_position == 17
assert doc.selection(4, 0).offset_at_position == 21
def test_word_at_position(doc):
"""
Return the position under the cursor (or last in line if past the end)
"""
assert doc.selection(0, 8).word_at_position == "document"
assert doc.selection(0, 1000).word_at_position == "document"
assert doc.selection(1, 5).word_at_position == "for"
assert doc.selection(2, 0).word_at_position == "testing"
assert doc.selection(4, 0).word_at_position == ""
def test_get_line():
d = Document(uri="", source="")
assert d.get_last_line() == ""
d.source = "my\nfoo"
assert d.get_line(0) == "my"
assert d.get_last_line() == "foo"
assert d.get_line_count() == 2
d.source = "my\nfoo\n"
assert d.get_line(0) == "my"
assert d.get_line(1) == "foo"
assert d.get_line(2) == ""
assert d.get_last_line() == ""
assert list(d.iter_lines()) == ["my\n", "foo\n", ""]
assert list(d.iter_lines(False)) == ["my", "foo", ""]
def test_get_last_line_col():
d = Document(uri="", source="")
assert d.get_last_line_col() == (0, 0)
d.source = "my"
assert d.get_last_line_col() == (0, 2)
d.source = "my\n"
assert d.get_last_line_col() == (1, 0)
def test_offset_to_line_col_1():
d = Document(uri="", source="my\nfo\nba")
assert d.offset_to_line_col(0) == (0, 0)
assert d.offset_to_line_col(1) == (0, 1)
assert d.offset_to_line_col(2) == (0, 2)
assert d.offset_to_line_col(3) == (1, 0)
assert d.offset_to_line_col(4) == (1, 1)
assert d.offset_to_line_col(5) == (1, 2)
assert d.offset_to_line_col(6) == (2, 0)
assert d.offset_to_line_col(7) == (2, 1)
assert d.offset_to_line_col(8) == (2, 2)
# Note: block below is out of bounds
assert d.offset_to_line_col(9) == (2, 3)
assert d.offset_to_line_col(10) == (2, 4)
def test_offset_to_line_col_2():
d = Document(uri="", source="\n\n\n")
with pytest.raises(ValueError):
assert d.offset_to_line_col(-1)
assert d.offset_to_line_col(0) == (0, 0)
assert d.offset_to_line_col(1) == (1, 0)
assert d.offset_to_line_col(2) == (2, 0)
# Note: block below is out of bounds
assert d.offset_to_line_col(3) == (3, 0)
assert d.offset_to_line_col(4) == (3, 1)
| 5,351 |
Project3/python/touch_motors(1).py
|
cnunes-purdue/team38_project3
| 0 |
2024349
|
#!/usr/bin/env python
#
# https://www.dexterindustries.com/BrickPi/
# https://github.com/DexterInd/BrickPi3
#
# Copyright (c) 2016 Dexter Industries
# Released under the MIT license (http://choosealicense.com/licenses/mit/).
# For more information, see https://github.com/DexterInd/BrickPi3/blob/master/LICENSE.md
#
# This code is an example for running all motors while a touch sensor connected to PORT_1 of the BrickPi3 is being pressed.
#
# Hardware: Connect EV3 or NXT motor(s) to any of the BrickPi3 motor ports. Make sure that the BrickPi3 is running on a 9v power supply.
#
# Results: When you run this program, the motor(s) speed will ramp up and down while the touch sensor is pressed. The position for each motor will be printed.
from __future__ import print_function # use python 3 syntax but make it compatible with python 2
from __future__ import division # ''
import time # import the time library for the sleep function
import brickpi3 # import the BrickPi3 drivers
BP = brickpi3.BrickPi3() # Create an instance of the BrickPi3 class. BP will be the BrickPi3 object.
BP.set_sensor_type(BP.PORT_1, BP.SENSOR_TYPE.TOUCH) # Configure for a touch sensor. If an EV3 touch sensor is connected, it will be configured for EV3 touch, otherwise it'll configured for NXT touch.
try:
print("Press touch sensor on port 1 to run motors")
value = 0
while not value:
try:
value = BP.get_sensor(BP.PORT_1)
except brickpi3.SensorError:
pass
speed = 0
adder = 1
while True:
# BP.get_sensor retrieves a sensor value.
# BP.PORT_1 specifies that we are looking for the value of sensor port 1.
# BP.get_sensor returns the sensor value.
try:
value = BP.get_sensor(BP.PORT_1)
except brickpi3.SensorError as error:
print(error)
value = 0
if value: # if the touch sensor is pressed
if speed <= -100 or speed >= 100: # if speed reached 100, start ramping down. If speed reached -100, start ramping up.
adder = -adder
speed += adder
else: # else the touch sensor is not pressed or not configured, so set the speed to 0
speed = 0
adder = 1
# Set the motor speed for all four motors
BP.set_motor_power(BP.PORT_A + BP.PORT_B + BP.PORT_C, speed)
BP.set_motor_power(BP.PORT_D, -speed)
try:
# Each of the following BP.get_motor_encoder functions returns the encoder value (what we want to display).
print("Encoder A: %6d B: %6d C: %6d D: %6d" % (BP.get_motor_encoder(BP.PORT_A), BP.get_motor_encoder(BP.PORT_B), BP.get_motor_encoder(BP.PORT_C), BP.get_motor_encoder(BP.PORT_D)))
except IOError as error:
print(error)
time.sleep(0.02) # delay for 0.02 seconds (20ms) to reduce the Raspberry Pi CPU load.
except KeyboardInterrupt: # except the program gets interrupted by Ctrl+C on the keyboard.
BP.reset_all() # Unconfigure the sensors, disable the motors, and restore the LED to the control of the BrickPi3 firmware.
| 3,259 |
settings.py
|
alee2021/JumpingBird
| 0 |
2024291
|
TITLE = "The Jumping Bird"
# screen dims
WIDTH = 480
HEIGHT = 600
# frames per second
FPS = 60
# colors
WHITE = (255, 255, 255)
BLACK = (0,0,0)
REDDISH = (240,55,66)
YELLOW = (255, 255, 0)
GREEN = (25, 198, 60)
DARKBLUE = (14, 9, 59)
SKY_BLUE = (143, 185, 252)
DARK_RED = (112, 19, 19)
FONT_NAME = '"'
SPRITESHEET = "spritesheet_jumper.png"
# data files
HS_FILE = "highscore.txt"
# player settings
PLAYER_ACC = 0.5
PLAYER_FRICTION = -0.12
PLAYER_GRAV = 0.8
PLAYER_JUMP = 23
# game settings
CLOUD_POWER = 85
BOOST_POWER = 100
BOOM_POWER = 70
POW_SPAWN_PCT = 7
BOOM_SPAWN_PCT = 7
MOB_FREQ = 70000
# layers - adds order on what is in front of one another
PLAYER_LAYER = 2
PLATFORM_LAYER = 1
CLOUD_LAYER = 0
POW_LAYER = 1
BOOM_LAYER = 1
MOB_LAYER = 2
SPIKES_LAYER = 2
# platform settings
''' old platforms from drawing rectangles'''
'''
PLATFORM_LIST = [(0, HEIGHT - 40, WIDTH, 40),
(65, HEIGHT - 300, WIDTH-400, 40),
(20, HEIGHT - 350, WIDTH-300, 40),
(200, HEIGHT - 150, WIDTH-350, 40),
(200, HEIGHT - 450, WIDTH-350, 40)]
'''
PLATFORM_LIST = [(0, HEIGHT - 40),
(65, HEIGHT - 300),
(20, HEIGHT - 350),
(200, HEIGHT - 150),
(200, HEIGHT - 450)]
| 1,283 |
tests/test_lint.py
|
sthagen/csaf-lint
| 1 |
2022660
|
# -*- coding: utf-8 -*-
# pylint: disable=missing-docstring,unused-import,reimported
import os
import pathlib
from unittest import mock
import jsonschema
from lxml import etree # type: ignore
import pytest # type: ignore
import csaf_lint.lint as lint
CONTENT_FEATURES = ('document', 'document-product', 'document-vulnerability', 'full', 'spam')
USAGE_ERROR_TOKENS = ('requires', 'two', 'schema', 'document')
USAGE_ERROR_NO_EMBEDDING_UNKNOWN_TOKENS = ('no', 'embed', 'support', 'non')
USAGE_ERROR_NO_EMBEDDING_XML_TOKENS = ('no', 'embed', 'support', 'xml')
CVRF_IMPLICIT_1_2_DOCUMENT_PATH = pathlib.Path('tests', 'fixtures', 'cvrf-no-version-given', 'is_wun_two.xml') # cvrf_1.2_example_a.xml
CVRF_IMPLICIT_1_1_DOCUMENT_PATH = pathlib.Path('tests', 'fixtures', 'cvrf-no-version-given', 'is_wun_wun.xml') # CVRF-1.1-cisco-sa-20110525-rvs4000.xml
def test_main_embedded_unknown_nok(capsys):
assert lint.main(argv=["foo"], embedded=True, debug=False) == 2
out, _ = capsys.readouterr()
for token in USAGE_ERROR_NO_EMBEDDING_UNKNOWN_TOKENS:
assert token in out
assert lint.main(argv=["foo", "bar"], embedded=True, debug=False) == 2
out, _ = capsys.readouterr()
for token in USAGE_ERROR_NO_EMBEDDING_UNKNOWN_TOKENS:
assert token in out
def test_main_embedded_xml_nok(capsys):
assert lint.main(argv=["<foo>"], embedded=True, debug=False) == 2
out, _ = capsys.readouterr()
for token in USAGE_ERROR_NO_EMBEDDING_XML_TOKENS:
assert token in out
assert lint.main(argv=["<foo>", "<bar>"], embedded=True, debug=False) == 2
out, _ = capsys.readouterr()
for token in USAGE_ERROR_NO_EMBEDDING_XML_TOKENS:
assert token in out
def test_derive_version_from_namespace_nok():
assert lint.derive_version_from_namespace(None) == ('', None)
def test_derive_schema_path_nok():
assert lint.derive_schema_path(object(), '42', None) == lint.CVRF_PRE_OASIS_SCHEMA_FILE
def test_main_too_many_args_nok():
assert lint.main(["a", "b", "c"]) == 2
def test_inputs_xml_empty_nok():
assert lint.inputs_xml(0, []) == (None, None)
def test_version_from_explicit_cvrf_1_x_in_schema_path():
for indicator in (lint.CRVF_DEFAULT_SEMANTIC_VERSION, lint.CRVF_PRE_OASIS_SEMANTIC_VERSION):
assert lint.version_from(schema_path=indicator, document_path=None) == indicator
def test_version_from_explicit_cvrf_1_x_in_document_path():
for indicator in (lint.CRVF_DEFAULT_SEMANTIC_VERSION, lint.CRVF_PRE_OASIS_SEMANTIC_VERSION):
assert lint.version_from(schema_path='', document_path=indicator) == indicator
def test_version_from_implicit_cvrf_1_x_in_document_path():
version_document_map = {
lint.CRVF_DEFAULT_SEMANTIC_VERSION: CVRF_IMPLICIT_1_2_DOCUMENT_PATH,
lint.CRVF_PRE_OASIS_SEMANTIC_VERSION: CVRF_IMPLICIT_1_1_DOCUMENT_PATH,
}
for version, document_path in version_document_map.items():
assert lint.version_from(schema_path='', document_path=document_path) == version
def test_main_validate_spam_default_ok(capsys):
n = 1
nn = f'{n:02d}'
a_document_path = pathlib.Path('tests', 'fixtures', 'csaf-2.0', 'baseline', 'spam', f'{nn}.json')
argv = [a_document_path]
assert lint.main(argv=argv, embedded=False, debug=False) == 0
_, err = capsys.readouterr()
assert not err
@pytest.mark.serial
def test_main_validate_spam_ok(capsys):
"""
python -m csaf_lint csaf_lint/schema/csaf/2.0/csaf.json tests/fixtures/csaf-2.0/baseline/spam/01.json
returns 0 and no additional information
"""
for n in range(1, 11):
nn = f'{n:02d}'
a_document_path = pathlib.Path('tests', 'fixtures', 'csaf-2.0', 'baseline', 'spam', f'{nn}.json')
argv = [lint.CSAF_2_0_SCHEMA_PATH, a_document_path]
assert lint.main(argv=argv, embedded=False, debug=False) == 0
_, err = capsys.readouterr()
assert not err
@pytest.mark.serial
def test_main_validate_spam_nok():
a_document_path = pathlib.Path('tests', 'fixtures', 'csaf-2.0', 'invalid', 'spam', '01.json')
argv = [lint.CSAF_2_0_SCHEMA_PATH, a_document_path]
assert lint.main(argv=argv, embedded=False, debug=False) == 1
@pytest.mark.serial
def test_main_nok_non_existing_folder_(capsys):
nef = 'folder_does_not_exist'
a_document_path = pathlib.Path(nef, 'no_doc.json')
assert pathlib.Path(nef).is_dir() is False, f"Unexpected folder {nef} exists which breaks this test"
message = r"\[Errno 2\] No such file or directory: '%s'" % (a_document_path,)
with pytest.raises(FileNotFoundError, match=message):
lint.main([lint.CSAF_2_0_SCHEMA_PATH, a_document_path], embedded=False, debug=False)
_, err = capsys.readouterr()
assert not err
@pytest.mark.serial
@mock.patch.dict(os.environ, {"XML_CATALOG_FILES": ""}, clear=True)
def test_main_validate_xml_cvrf_1_2_schema_and_document_ok(capsys):
a_schema_path = pathlib.Path('csaf_lint', 'schema', 'cvrf', '1.2', 'cvrf.xsd')
a_document_path = pathlib.Path('tests', 'fixtures', 'cvrf-1.2', 'baseline', '01.xml') # cvrf_1.2_example_a.xml
argv = [str(a_schema_path), str(a_document_path)]
assert lint.main(argv=argv, embedded=False, debug=False) == 0
_, err = capsys.readouterr()
assert not err
@pytest.mark.serial
@mock.patch.dict(os.environ, {"XML_CATALOG_FILES": ""}, clear=True)
def test_main_validate_xml_cvrf_1_2_document_only_version_in_path_ok(capsys):
a_document_path = pathlib.Path('tests', 'fixtures', 'cvrf-1.2', 'baseline', '01.xml') # cvrf_1.2_example_a.xml
argv = [str(a_document_path)]
assert lint.main(argv=argv, embedded=False, debug=False) == 0
_, err = capsys.readouterr()
assert not err
@pytest.mark.serial
@mock.patch.dict(os.environ, {"XML_CATALOG_FILES": ""}, clear=True)
def test_main_validate_xml_cvrf_1_2_document_only_version_not_in_path_ok():
a_document_path = CVRF_IMPLICIT_1_2_DOCUMENT_PATH
argv = [str(a_document_path)]
assert lint.main(argv=argv, embedded=False, debug=False) == 0, "OK"
@pytest.mark.serial
@mock.patch.dict(os.environ, {"XML_CATALOG_FILES": ""}, clear=True)
def test_main_validate_xml_cvrf_1_1_document_only_version_not_in_path_ok(capsys):
a_document_path = CVRF_IMPLICIT_1_1_DOCUMENT_PATH
argv = [str(a_document_path)]
try:
assert lint.main(argv=argv, embedded=False, debug=False) == 0
except etree.XMLSchemaParseError as err:
assert os.getenv('XML_CATALOG_FILES', '') == 'csaf_lint/schema/catalog_1_1.xml'
@pytest.mark.serial
@mock.patch.dict(os.environ, {"XML_CATALOG_FILES": ""}, clear=True)
def test_main_validate_xml_cvrf_1_1_document_only_version_in_path_ok(capsys):
a_document_path = pathlib.Path('tests', 'fixtures', 'cvrf-1.1', 'baseline', '01.xml') # CVRF-1.1-cisco-sa-20110525-rvs4000.xml
argv = [str(a_document_path)]
try:
assert lint.main(argv=argv, embedded=False, debug=False) == 0
except etree.XMLSchemaParseError as err:
assert os.getenv('XML_CATALOG_FILES', '') == 'csaf_lint/schema/catalog_1_1.xml'
@pytest.mark.serial
@pytest.mark.slow
def test_main_validate_rest_ok(capsys):
for content in CONTENT_FEATURES[:-1]:
for n in range(1, 11):
nn = f'{n:02d}'
a_document_path = pathlib.Path('tests', 'fixtures', 'csaf-2.0', 'baseline', content, f'{nn}.json')
argv = [lint.CSAF_2_0_SCHEMA_PATH, a_document_path]
try:
assert lint.main(argv=argv, embedded=False, debug=False) == 0
except jsonschema.exceptions.ValidationError as err:
raise ValueError(f"failed validation for {a_document_path} in {test_main_validate_rest_ok}. Details: {err}")
_, err = capsys.readouterr()
assert not err
@pytest.mark.serial
@pytest.mark.slow
def test_main_validate_rest_nok():
for content in CONTENT_FEATURES[:-1]:
for n in range(1, 11):
nn = f'{n:02d}'
a_document_path = pathlib.Path('tests', 'fixtures', 'csaf-2.0', 'invalid', content, f'{nn}.json')
argv = [lint.CSAF_2_0_SCHEMA_PATH, a_document_path]
assert lint.main(argv=argv, embedded=False, debug=False) == 1, "ERROR"
| 8,189 |
spid_cie_oidc/provider/views/consent_page_view.py
|
damikael/spid-cie-oidc-django
| 0 |
2024533
|
import logging
from django.core.paginator import Paginator
import urllib.parse
from django.contrib.auth import logout
from django.http import (
HttpResponseForbidden,
HttpResponseRedirect,
)
from django.shortcuts import redirect, render
from django.urls import reverse
from django.utils.translation import gettext as _
from django.views import View
from spid_cie_oidc.entity.models import (
TrustChain
)
from spid_cie_oidc.provider.forms import ConsentPageForm
from spid_cie_oidc.provider.models import IssuedToken, OidcSession
from spid_cie_oidc.provider.settings import OIDCFED_PROVIDER_HISTORY_PER_PAGE
from . import OpBase
logger = logging.getLogger(__name__)
class ConsentPageView(OpBase, View):
template = "op_user_consent.html"
def get_consent_form(self):
return ConsentPageForm
def get(self, request, *args, **kwargs):
try:
session = self.check_session(request)
except Exception:
logger.warning("Invalid session on Consent page")
return HttpResponseForbidden()
tc = TrustChain.objects.filter(
sub=session.client_id,
type="openid_relying_party",
is_active = True
).first()
# if this auth code was already been used ... forbidden
if IssuedToken.objects.filter(session=session):
logger.warning(f"Auth code Replay {session}")
return HttpResponseForbidden()
i18n_user_claims = self.attributes_names_to_release(
request, session
)['i18n_user_claims']
context = {
"form": self.get_consent_form()(),
"session": session,
"client_organization_name": tc.metadata.get(
"client_name", session.client_id
),
"user_claims": sorted(set(i18n_user_claims),),
"redirect_uri": session.authz_request["redirect_uri"]
}
return render(request, self.template, context)
def post(self, request, *args, **kwargs):
try:
session = self.check_session(request)
except Exception:
logger.warning("Invalid session")
return HttpResponseForbidden()
self.payload = session.authz_request
form = self.get_consent_form()(request.POST)
if not form.is_valid():
return self.redirect_response_data(
self.payload["redirect_uri"],
# TODO: this is not normative -> check AgID/IPZS
error="rejected_by_user",
error_description=_("User rejected the release of attributes"),
state = self.payload.get("state", "")
)
issuer = self.get_issuer()
iss_token_data = self.get_iss_token_data(session, issuer)
IssuedToken.objects.create(**iss_token_data)
return self.redirect_response_data(
self.payload["redirect_uri"],
code=session.auth_code,
state=session.authz_request["state"],
iss=issuer.sub if issuer else "",
)
def oidc_provider_not_consent(request):
redirect_uri = request.GET.get("redirect_uri")
logout(request)
kwargs = dict(
error = "invalid_request",
error_description = _(
"Authentication request rejected by user"
)
)
url = f'{redirect_uri}?{urllib.parse.urlencode(kwargs)}'
return HttpResponseRedirect(url)
class UserAccessHistoryView(OpBase, View):
def get(self, request, *args, **kwargs):
try:
session = self.check_session(request)
except Exception:
logger.warning("Invalid session on Access History page")
return HttpResponseForbidden()
user_access_history = OidcSession.objects.filter(
user_uid=session.user_uid,
).exclude(auth_code=session.auth_code)
paginator = Paginator(
user_access_history,
OIDCFED_PROVIDER_HISTORY_PER_PAGE
)
page = request.GET.get("page", 1)
history = paginator.get_page(page)
context = {
"history": history,
"user": request.user
}
return render(request, "op_user_history.html", context)
class RevokeSessionView(OpBase, View):
def get(self, request, *args, **kwargs):
try:
self.check_session(request)
except Exception:
logger.warning("Invalid session on revoke page")
return HttpResponseForbidden()
if request.GET.get("auth_code"):
auth_code = request.GET["auth_code"]
session_to_revoke = OidcSession.objects.filter(
auth_code=auth_code,
user=request.user
).first()
session_to_revoke.revoke(destroy_session=False)
return redirect("oidc_provider_access_history")
| 4,864 |
should_be/extensions/sequence.py
|
jayvdb/should_be
| 4 |
2024537
|
from should_be.core import BaseMixin, ObjectMixin
try:
from collections.abc import Sequence, Counter
except ImportError:
# python < 3.3
from collections import Sequence, Counter
class SequenceMixin(BaseMixin):
target_class = Sequence
def should_have_same_items_as(self, target):
msg_smaller = ('{txt} should have been {val}, but did not have '
'the items {items}')
msg_bigger = ('{txt} should have been {val}, but had the extra '
'items {items}')
msg_diff = ('{txt} should have been {val}, but differed in items '
'{i1} and {i2}')
fst = Counter(self)
snd = Counter(target)
we_had = fst - snd
they_had = snd - fst
if we_had != Counter() and they_had != Counter():
self.should_follow(fst == snd, msg_diff,
val=target,
i1=we_had.keys(),
i2=they_had.keys())
self.should_follow(we_had == {}, msg_bigger,
val=target,
items=list(we_had.elements()))
self.should_follow(they_had == {}, msg_smaller,
val=target,
items=list(they_had.elements()))
def should_be(self, target):
if self == target:
return
try:
len_msg = ('{txt} should have been {val}, but they had '
'different lengths ({l1} vs {l2})')
self.should_follow(len(self) == len(target), len_msg,
val=target,
l1=len(self),
l2=len(target))
item_msg = ('{txt} should have been {val}, but they differed '
'at item {ind} ({i1} vs {i2})')
rangef = range
try:
rangef = xrange
except NameError:
pass
for i in rangef(len(self)):
self.should_follow(self[i] == target[i], item_msg,
val=target,
ind=i,
i1=self[i],
i2=target[i])
except (TypeError, NotImplementedError):
ObjectMixin.should_be.__get__(self)(target)
| 2,401 |
lib/rapidsms/skeleton/project/manage.py
|
rapidsms/rapidsms-legacy
| 0 |
2023522
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import sys, os
sys.path.append(os.path.join(os.getcwd(),'lib'))
import rapidsms
if __name__ == "__main__":
rapidsms.manager.start(sys.argv)
| 202 |
src/scaffoldfitter/fitterstepconfig.py
|
zekh167/scaffoldfitter
| 0 |
2023220
|
"""
Fit step for configuring subsequent behaviour, e.g. data projection settings.
"""
from scaffoldfitter.fitterstep import FitterStep
class FitterStepConfig(FitterStep):
_jsonTypeId = "_FitterStepConfig"
def __init__(self):
super(FitterStepConfig, self).__init__()
# Example json serialisation within config step. Include only groups and options in-use
# Note that these are model group names; data group names differing by
# case or whitespace are set by Fitter to matching model names.
#"groupSettings": {
# "GROUPNAME1" : {
# "dataProportion" : 0.1
# }
# "GROUPNAME2" : {
# "dataProportion" : null
# }
# }
# The first group GROUPNAME1 uses only 0.1 = 10% of the data points.
# unlisted groups or groups without dataProportion inherit from earlier config step
# or back to initial global setting (1.0 in this case = include all points).
# null value cancels inherited dataProportion = go back to global setting.
self._groupSettings = {}
self._projectionCentreGroups = False
@classmethod
def getJsonTypeId(cls):
return cls._jsonTypeId
def decodeSettingsJSONDict(self, dctIn : dict):
"""
Decode definition of step from JSON dict.
"""
assert self._jsonTypeId in dctIn
# ensure all new options are in dct
dct = self.encodeSettingsJSONDict()
dct.update(dctIn)
self._groupSettings = dct["groupSettings"]
self._projectionCentreGroups = dct["projectionCentreGroups"]
def encodeSettingsJSONDict(self) -> dict:
"""
Encode definition of step in dict.
:return: Settings in a dict ready for passing to json.dump.
"""
return {
self._jsonTypeId : True,
"groupSettings" : self._groupSettings,
"projectionCentreGroups" : self._projectionCentreGroups
}
def getGroupSettingsNames(self):
"""
:return: List of names of groups settings are held for.
"""
return list(self._groupSettings.keys())
def clearGroupSettings(self, groupName):
"""
Clear all local settings for group so fall back to last config
settings or global defaults.
:param groupName: Exact model group name.
"""
groupSettings = self._groupSettings.pop(groupName, None)
def clearGroupDataProportion(self, groupName):
"""
Clear local group data proportion so fall back to last config or global default.
:param groupName: Exact model group name.
"""
groupSettings = self._groupSettings.get(groupName)
if groupSettings:
groupSettings.pop("dataProportion", None)
if len(groupSettings) == 0:
self._groupSettings.pop(groupName)
def getGroupDataProportion(self, groupName):
"""
Get proportion of group data points to include in fit, or None to
use global default.
:param groupName: Exact model group name.
:return: Proportion, isLocallySet. Proportion is either a value from
0.0 to 1.0, where 0.1 = 10%, or None if using global value (1.0).
The second return value is True if the value is set locally.
"""
groupSettings = self._groupSettings.get(groupName)
if groupSettings:
proportion = groupSettings.get("dataProportion", "INHERIT")
if proportion != "INHERIT":
return proportion, True
inheritConfigStep = self.getFitter().getInheritFitterStepConfig(self)
proportion = inheritConfigStep.getGroupDataProportion(groupName)[0] if inheritConfigStep else None
return proportion, False
def setGroupDataProportion(self, groupName, proportion):
"""
Set proportion of group data points to include in fit, or force
return to global default.
:param groupName: Exact model group name.
:param proportion: Float valued proportion from 0.0 (0%) to 1.0 (100%),
or None to force used of global default. Asserts value is valid.
"""
assert (proportion is None) or (isinstance(proportion, float) and (0.0 <= proportion <= 1.0)), "FitterStepConfig: Invalid group data proportion"
groupSettings = self._groupSettings.get(groupName)
if not groupSettings:
groupSettings = self._groupSettings[groupName] = {}
groupSettings["dataProportion"] = proportion
def isProjectionCentreGroups(self):
return self._projectionCentreGroups
def setProjectionCentreGroups(self, projectionCentreGroups):
"""
:param projectionCentreGroups: True to compute projections of group data
translated so centre of data is at centre of target model geometry.
Helps fit features with good initial orientation, but in the wrong place.
:return: True if state changed, otherwise False.
"""
if projectionCentreGroups != self._projectionCentreGroups:
self._projectionCentreGroups = projectionCentreGroups
return True
return False
def run(self, modelFileNameStem=None):
"""
Calculate data projections with current settings.
:param modelFileNameStem: Optional name stem of intermediate output file to write.
"""
self._fitter.calculateDataProjections(self)
if modelFileNameStem:
self._fitter.writeModel(modelFileNameStem + "_config.exf")
self.setHasRun(True)
| 5,641 |
credentials.py
|
vicky-eve/Password-FlaskIP3
| 0 |
2023367
|
class Credentials:
"""
class that generates new instances of credentials.
"""
credentials_list = [] #empty credentials list
def __init__ (self, appname, username,password):
'''
__init__ method to define properties of our user.
Args:
appname:new_credentials appname.
username : new_credentials username.
password : <PASSWORD>.
'''
self.appname = appname
self.username = username
self.password = password
def save_credentials (self):
'''
save_credentials method saves credentials objects into credentials_list.
'''
Credentials.credentials_list.append(self)
@classmethod
def display_all_credentials(cls):
'''
method that returns the contact list
'''
return cls.credentials_list
def delete_credentials(self):
'''
delete_credentials method deletes a saved credential from the credentials_list
'''
Credentials.credentials_list.remove(self)
| 1,075 |
src/mldisasm/benchmarks/bench_stack.py
|
ChrisSwinchatt/MLDisasm
| 4 |
2025173
|
#!/usr/bin/env python3
'''
Compares tf.stack, tf.convert_to_tensor and tf.concat.
'''
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.INFO)
import ujson
from mldisasm.benchmarks.common import *
from mldisasm.io.file_manager import FileManager
from mldisasm.util import prof
BATCH_SIZE = 100
def run():
'''
Run benchmark.
'''
tf.enable_eager_execution()
file_mgr = FileManager()
results = []
with tf.device('/cpu:0'), file_mgr.open_training(MODEL_NAME) as file:
# Load and process batch.
X = [None]*BATCH_SIZE
y = [None]*BATCH_SIZE
file_iter = iter(file)
for i in range(BATCH_SIZE):
X[i], y[i] = ujson.loads(next(file_iter))
assert len(X) == BATCH_SIZE
assert len(y) == BATCH_SIZE
assert list(filter(lambda x: x is None, X)) == []
assert list(filter(lambda x: x is None, y)) == []
# Convert each pair of lists to tensors.
stack_times = []
concat_times = []
convert_times = []
# Hack: the first call (stack/concat/convert) runs significantly slower, resulting in imbalanced results. We try
# to fix this by running an unprofiled stack(). This does seem to make the subsequent calls perform more evenly.
# Most likely, the first call causes TF to initialise the GPU, so by calling stack() we are avoiding this.
tf.stack(X)
tf.stack(y)
for _ in range(BENCH_ITER):
# Profile tf.stack().
with prof(None, resources=['time'], log_level=None) as p:
tf.stack(X)
tf.stack(y)
time = p.profilers['time'].compute_delta()
time = round(time*1000)
stack_times.append(time)
# Profile tf.concat().
with prof(None, resources=['time'], log_level=None) as p:
tf.concat(X, axis=0)
tf.concat(y, axis=0)
time = p.profilers['time'].compute_delta()
time = round(time*1000)
concat_times.append(time)
# Profile tf.convert_to_tensor().
with prof(None, resources=['time'], log_level=None) as p:
tf.convert_to_tensor(X)
tf.convert_to_tensor(y)
time = p.profilers['time'].compute_delta()
time = round(time*1000)
convert_times.append(time)
results.append(BenchmarkResult('stack', stack_times))
results.append(BenchmarkResult('concat', concat_times))
results.append(BenchmarkResult('convert', convert_times))
return results
| 2,731 |
colmap_converter/__main__.py
|
dichotomies/NeuralDiff
| 5 |
2023110
|
import argparse
from .meta import *
from .frames import *
parser = argparse.ArgumentParser()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--colmap_dir",
type=str,
help="Root directory of COLMAP project directory, which contains `sparse/0`.",
)
parser.add_argument(
"--scale", default=1, type=int, help="Downscaling factor for images."
)
parser.add_argument(
"--dir_dst", default='data/custom', type=str, help="Destination directory for converted dataset."
)
parser.add_argument(
"--split_nth", default=0, type=int, help="select every n-th frame as validation and every other n-th frame as test frame."
)
args = parser.parse_args()
return args
def run(args):
colmap_model_dir = os.path.join(args.colmap_dir, 'sparse/0')
colmap = load_colmap(colmap_model_dir)
meta = calc_meta(colmap, split_nth=args.split_nth)
frames_dir_src = os.path.join(args.colmap_dir, 'images')
dataset_id = os.path.split(os.path.normpath(args.colmap_dir))[1]
dataset_dir = os.path.join(args.dir_dst, dataset_id)
frames_dir_dst = os.path.join(dataset_dir, 'images')
os.makedirs(frames_dir_dst)
save_meta(dataset_dir, meta)
save_frames(frames_dir_src, frames_dir_dst, meta)
if __name__ == '__main__':
args = parse_args()
run(args)
| 1,385 |
lib/utils/interface/htc.py
|
kpark1/dynamo
| 1 |
2024969
|
import sys
import logging
import re
import socket
import htcondor
from dynamo.dataformat import Configuration, ConfigurationError
LOG = logging.getLogger(__name__)
class HTCondor(object):
"""
HTCondor interface.
"""
_default_config = None
@staticmethod
def set_default(config):
HTCondor._default_config = Configuration(config)
def __init__(self, config = None):
if config is None:
if HTCondor._default_config is None:
raise ConfigurationError('HTCondor default configuration is not set')
config = HTCondor._default_config
self._collector_name = config.collector
self._collector = htcondor.Collector(config.collector)
self._schedd_constraint = config.schedd_constraint
self._schedds = []
def find_jobs(self, constraint = 'True', attributes = []):
"""
Return ClassAds for jobs matching the constraints.
"""
if len(self._schedds) == 0:
LOG.info('Finding schedds reporting to collector %s', self._collector_name)
attempt = 0
while True:
try:
schedd_ads = self._collector.query(htcondor.AdTypes.Schedd, self._schedd_constraint, ['MyAddress', 'ScheddIpAddr'])
break
except IOError:
attempt += 1
LOG.warning('Collector query failed: %s', str(sys.exc_info()[0]))
if attempt == 10:
LOG.error('Communication with the collector failed. We have no information of the condor pool.')
return
LOG.debug('%d schedd ads', len(schedd_ads))
for ad in schedd_ads:
LOG.debug(ad)
schedd = htcondor.Schedd(ad)
matches = re.match('<([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+):([0-9]+)', ad['MyAddress'])
# schedd does not have an ipaddr attribute natively, but we can assign it
schedd.ipaddr = matches.group(1)
schedd.host = socket.getnameinfo((matches.group(1), int(matches.group(2))), socket.AF_INET)[0] # socket.getnameinfo(*, AF_INET) returns a (host, port) 2-tuple
self._schedds.append(schedd)
LOG.debug('Found schedds: %s', ', '.join(['%s (%s)' % (schedd.host, schedd.ipaddr) for schedd in self._schedds]))
LOG.debug('Querying HTCondor with constraint "%s" for attributes %s', constraint, str(attributes))
classads = []
for schedd in self._schedds:
attempt = 0
while True:
try:
ads = schedd.query(constraint, attributes)
break
except IOError:
attempt += 1
LOG.warning('IOError in communicating with schedd %s. Trying again.', schedd.ipaddr)
if attempt == 10:
LOG.error('Schedd %s did not respond.', schedd.ipaddr)
ads = []
break
classads.extend(ads)
LOG.info('HTCondor returned %d classads', len(classads))
return classads
| 3,247 |
packaging/flydra_compat/setup.py
|
elhananby/flydra
| 45 |
2024051
|
from distutils.core import setup
setup(name='flydra_compat',
version='0.1.1',
author='<NAME>',
author_email='<EMAIL>',
description='flydra backwards compatibility layer',
packages = ['flydra'],
)
| 233 |
installer.py
|
lueckem/learningPygame
| 1 |
2024169
|
import cx_Freeze
import os
os.environ['TCL_LIBRARY'] = "C:\\Users\\Marvins Pc\\AppData\\Local\\Programs\\Python\\Python36\\tcl\\tcl8.6"
os.environ['TK_LIBRARY'] = "C:\\Users\\Marvins Pc\\AppData\\Local\\Programs\\Python\\Python36\\tcl\\tk8.6"
executables = [cx_Freeze.Executable("main.py")]
cx_Freeze.setup(
name="from Mars to Jupiter",
options={"build_exe": {"packages":["pygame","random"],
"include_files":["highscores.txt","LICENSE.txt", "README.md", "images",
"C:\\Users\\Marvins Pc\\AppData\\Local\\Programs\\Python\\Python36\\Lib\\site-packages\\pygame\\freesansbold.ttf"]}},
executables = executables
)
#"highscores.txt","LICENSE.txt", "README.md","images\asteroid_large.png",
# "images\asteroid_medium.png", "images\asteroid_small.png", "images\ship.png",
# "images\shipicon.png","images\tutorial.png"
| 990 |
discovery/save_load/test_df.py
|
zhengfaning/vnpy_andy
| 0 |
2024009
|
import pandas as pd
x = dict(
index = 100,
name='xx'
)
data = [
{'index': 1, 'name':'man', 'age': 100},
{'index': 2, 'name':'man1', 'age': 10},
{'index': 3, 'name':'man2', 'age': 50},
]
df = pd.DataFrame(data)
print(df.columns.values.tolist())
df.set_index(['index'], inplace=True)
print(df.columns.values.tolist())
print(df)
| 351 |
Assignment/Swap of Two numbers.py
|
reddyprasade/PYTHON-BASIC-FOR-ALL
| 21 |
2025029
|
# Python program to swap two variables
num1 = input('Enter First Number: ')
num2 = input('Enter Second Number: ')
print("Value of num1 before swapping: ", num1)
print("Value of num2 before swapping: ", num2)
# swapping two numbers using temporary variable
temp = num1
num1 = num2
num2 = temp
print("Value of num1 after swapping: ", num1)
print("Value of num2 after swapping: ", num2)
| 403 |
Data Science With Python/10-merging-dataframes-with-pandas/04-case-study-summer-olympics/04-counting-medals-by-country-edition-in-a-pivot-table.py
|
aimanahmedmoin1997/DataCamp
| 3 |
2025115
|
'''
Counting medals by country/edition in a pivot table
Here, you'll start with the concatenated DataFrame medals from the previous exercise.
You can construct a pivot table to see the number of medals each country won in each year. The result is a new DataFrame with the Olympic edition on the Index and with 138 country NOC codes as columns. If you want a refresher on pivot tables, it may be useful to refer back to the relevant exercises in Manipulating DataFrames with pandas.
INSTRUCTIONS
100XP
Construct a pivot table from the DataFrame medals, aggregating by count (by specifying the aggfunc parameter). Use 'Edition' as the Index, 'Athlete' for the values, and 'NOC' for the columns.
Print the first & last 5 rows of medal_counts. This has been done for you, so hit 'Submit Answer' to see the results!
'''
# Construct the pivot_table: medal_counts
medal_counts = medals.pivot_table(aggfunc='count', index='Edition', values='Athlete', columns='NOC')
# Print the first & last 5 rows of medal_counts
print(medal_counts.head())
print(medal_counts.tail())
| 1,064 |
src/q0103.py
|
mirzadm/ctci-5th-py
| 0 |
2025230
|
"""Chapter 1: Question 3.
Is string 1 a permuation of string 2?
"""
def is_permutation_sort(s1, s2):
"""Uses sorting."""
s1_list = list(s1)
s2_list = list(s2)
s1_list.sort()
s2_list.sort()
return s1_list == s2_list
def is_permutation_dict(s1, s2):
"""Uses a dictionary to store character counts."""
n1 = len(s1)
n2 = len(s2)
if n1 != n2:
return False
if s1 == s2:
return True
ch_count = {}
for ch in s1:
if ch_count.get(ch, 0) == 0:
ch_count[ch] = 1
else:
ch_count[ch] += 1
for ch in s2:
if ch_count.get(ch, 0) == 0:
return False
else:
ch_count[ch] -= 1
return True
| 733 |
tests/test_ztranslation.py
|
sn6uv/rply
| 0 |
2024967
|
import py
try:
from rpython.rtyper.test.test_llinterp import interpret
except ImportError:
py.test.skip("Needs RPython to be on the PYTHONPATH")
from rply import LexerGenerator, ParserGenerator, Token
from rply.errors import ParserGeneratorWarning
from .base import BaseTests
from .utils import BoxInt, ParserState
class BaseTestTranslation(BaseTests):
def test_basic_lexer(self):
lg = LexerGenerator()
lg.add("NUMBER", r"\d+")
lg.add("PLUS", r"\+")
l = lg.build()
def f(n):
tokens = l.lex("%d+%d+%d" % (n, n, n))
i = 0
s = 0
while i < 5:
t = tokens.next()
if i % 2 == 0:
if t.name != "NUMBER":
return -1
s += int(t.value)
else:
if t.name != "PLUS":
return -2
if t.value != "+":
return -3
i += 1
ended = False
try:
tokens.next()
except StopIteration:
ended = True
if not ended:
return -4
return s
assert self.run(f, [14]) == 42
def test_basic_parser(self):
pg = ParserGenerator(["NUMBER", "PLUS"])
@pg.production("main : expr")
def main(p):
return p[0]
@pg.production("expr : expr PLUS expr")
def expr_op(p):
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expr : NUMBER")
def expr_num(p):
return BoxInt(int(p[0].getstr()))
with self.assert_warns(
ParserGeneratorWarning, "1 shift/reduce conflict"
):
parser = pg.build()
def f(n):
return parser.parse(iter([
Token("NUMBER", str(n)),
Token("PLUS", "+"),
Token("NUMBER", str(n))
])).getint()
assert self.run(f, [12]) == 24
def test_parser_state(self):
pg = ParserGenerator(["NUMBER", "PLUS"], precedence=[
("left", ["PLUS"]),
])
@pg.production("main : expression")
def main(state, p):
state.count += 1
return p[0]
@pg.production("expression : expression PLUS expression")
def expression_plus(state, p):
state.count += 1
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expression : NUMBER")
def expression_number(state, p):
state.count += 1
return BoxInt(int(p[0].getstr()))
parser = pg.build()
def f():
state = ParserState()
return parser.parse(iter([
Token("NUMBER", "10"),
Token("PLUS", "+"),
Token("NUMBER", "12"),
Token("PLUS", "+"),
Token("NUMBER", "-2"),
]), state=state).getint() + state.count
assert self.run(f, []) == 26
class TestTranslation(BaseTestTranslation):
def run(self, func, args):
return interpret(func, args)
class TestUntranslated(BaseTestTranslation):
def run(self, func, args):
return func(*args)
| 3,296 |
tests/test_utils.py
|
titusz/onixcheck
| 23 |
2025099
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import types
from onixcheck import utils
from os.path import abspath, dirname
TEST_DIR = abspath(dirname(utils.__file__))
def test_iter_files_simple():
gen = utils.iter_files(TEST_DIR, ['py'])
assert isinstance(gen, types.GeneratorType)
assert len(list(gen)) > 5
def test_iter_files_no_matches():
gen = utils.iter_files(TEST_DIR, ['noext'])
assert len(list(gen)) == 0
def test_iter_files_flat():
gen = utils.iter_files(TEST_DIR, ['xsd'])
assert len(list(gen)) == 0
gen = utils.iter_files(TEST_DIR, ['PY'])
assert len(list(gen)) > 5
def test_iter_files_recursive():
gen = utils.iter_files(TEST_DIR, ['xsd'], recursive=True)
assert len(list(gen)) > 5
| 766 |
google/maps/roads/v1op/roads-v1op-py/google/maps/roads_v1/types/roads.py
|
googleapis/googleapis-gen
| 7 |
2024913
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import wrappers_pb2 # type: ignore
from google.type import latlng_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.maps.roads.v1op',
manifest={
'TravelMode',
'SnapToRoadsRequest',
'SnappedPoint',
'SnapToRoadsResponse',
'ListNearestRoadsRequest',
'ListNearestRoadsResponse',
},
)
class TravelMode(proto.Enum):
r"""An enum representing the mode of travel used for snapping."""
TRAVEL_MODE_UNSPECIFIED = 0
DRIVING = 1
CYCLING = 2
WALKING = 3
class SnapToRoadsRequest(proto.Message):
r"""A request to the SnapToRoads method, requesting that a
sequence of points be snapped to road segments.
Attributes:
path (str):
The path to be snapped as a series of lat, lng points.
Specified as a string of the format: lat,lng|lat,lng|...
interpolate (bool):
Whether to interpolate the points to return
full road geometry.
asset_id (str):
The asset ID of the asset to which this path
relates. This is used for abuse detection
purposes for clients with asset-based SKUs.
travel_mode (google.maps.roads_v1.types.TravelMode):
The type of travel being tracked. This will
constrain the paths we snap to.
"""
path = proto.Field(
proto.STRING,
number=1,
)
interpolate = proto.Field(
proto.BOOL,
number=2,
)
asset_id = proto.Field(
proto.STRING,
number=3,
)
travel_mode = proto.Field(
proto.ENUM,
number=4,
enum='TravelMode',
)
class SnappedPoint(proto.Message):
r"""A snapped point object, representing the result of snapping.
Attributes:
location (google.type.latlng_pb2.LatLng):
The lat,lng of the snapped location.
original_index (google.protobuf.wrappers_pb2.UInt32Value):
The index into the original path of the
equivalent pre-snapped point. This allows for
identification of points which have been
interpolated if this index is missing.
place_id (str):
The place ID for this snapped location (road
segment). These are the same as are currently
used by the Places API.
"""
location = proto.Field(
proto.MESSAGE,
number=1,
message=latlng_pb2.LatLng,
)
original_index = proto.Field(
proto.MESSAGE,
number=2,
message=wrappers_pb2.UInt32Value,
)
place_id = proto.Field(
proto.STRING,
number=3,
)
class SnapToRoadsResponse(proto.Message):
r"""The response from the SnapToRoads method, returning a
sequence of snapped points.
Attributes:
snapped_points (Sequence[google.maps.roads_v1.types.SnappedPoint]):
A list of snapped points.
warning_message (str):
User-visible warning message, if any, which
can be shown alongside a valid result.
"""
snapped_points = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='SnappedPoint',
)
warning_message = proto.Field(
proto.STRING,
number=2,
)
class ListNearestRoadsRequest(proto.Message):
r"""A request to the ListNearestRoads method, requesting that a
sequence of points be snapped individually to the road segment
that each is closest to.
Attributes:
points (str):
The points to be snapped as a series of lat, lng points.
Specified as a string of the format: lat,lng|lat,lng|...
travel_mode (google.maps.roads_v1.types.TravelMode):
The type of travel being tracked. This will
constrain the roads we snap to.
"""
points = proto.Field(
proto.STRING,
number=1,
)
travel_mode = proto.Field(
proto.ENUM,
number=2,
enum='TravelMode',
)
class ListNearestRoadsResponse(proto.Message):
r"""The response from the ListNearestRoads method, returning a
list of snapped points.
Attributes:
snapped_points (Sequence[google.maps.roads_v1.types.SnappedPoint]):
A list of snapped points.
"""
snapped_points = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='SnappedPoint',
)
__all__ = tuple(sorted(__protobuf__.manifest))
| 5,119 |
interview-problems/sumOfOneDArray.py
|
uonyekwuluje/python-development
| 0 |
2024154
|
from typing import List
class Solution:
"""
This code computes the sum of 1 dimensional array
"""
def runningSum(self, nums: List[int]) -> List[int]:
newArray = []
for x in range(len(nums)):
sumn = 0
for y in range(x+1):
sumn += nums[y]
newArray.append(sumn)
return(newArray)
if __name__ == '__main__':
newsoln = Solution()
testArrays = [[1,2,3,4],[1,1,1,1,1],[3,1,2,10,1]]
for x in range(len(testArrays)):
print("Array:=> {} | Sum of Array:=> {}".format(testArrays[x], newsoln.runningSum(testArrays[x])))
| 626 |
vendas/migrations/0004_auto_20210205_0624.py
|
WesGtoX/gestao-clientes
| 0 |
2024779
|
# Generated by Django 2.2.6 on 2021-02-05 06:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vendas', '0003_auto_20210119_1647'),
]
operations = [
migrations.AlterModelOptions(
name='venda',
options={'permissions': (('setar_nfe', 'Usuário pode alterar parametro NF-e'), ('permissao2', 'Permissão 2'), ('permissao3', 'Permissão 3'))},
),
migrations.AlterField(
model_name='itemdopedido',
name='desconto',
field=models.DecimalField(decimal_places=2, default=0, max_digits=5),
),
migrations.AlterField(
model_name='itemdopedido',
name='quantidade',
field=models.FloatField(default=0),
),
]
| 814 |
Factory Method/Python 3/WarningLogger.py
|
kuuhaku86/design-patterns
| 11 |
2023631
|
from ILogger import ILogger;
from datetime import datetime
class WarningLogger(ILogger):
def print(self, message):
print("{} [WARNING]: {}".format(datetime.now().isoformat(), message))
| 191 |
ch1/recipe1/train_networks_mnist.py
|
xinglu/Tensorflow-2.0-Computer-Vision-Cookbook
| 1 |
2024844
|
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelBinarizer
from tensorflow.keras import Input
from tensorflow.keras.datasets import mnist
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import *
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape((X_train.shape[0], 28 * 28 * 1))
X_test = X_test.reshape((X_test.shape[0], 28 * 28 * 1))
X_train = X_train.astype('float32') / 255.0
X_test = X_test.astype('float32') / 255.0
label_binarizer = LabelBinarizer()
y_train = label_binarizer.fit_transform(y_train)
y_test = label_binarizer.fit_transform(y_test)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, train_size=0.8)
sequential_model = Sequential()
sequential_model.add(Dense(256, input_shape=(28 * 28 * 1,), activation='sigmoid'))
sequential_model.add(Dense(128, activation='sigmoid'))
sequential_model.add(Dense(10, activation='softmax'))
layers = [Dense(256, input_shape=(28 * 28 * 1,), activation='sigmoid'),
Dense(128, activation='sigmoid'),
Dense(10, activation='softmax')]
sequential_model2 = Sequential(layers)
input_layer = Input(shape=(28 * 28 * 1,))
dense_1 = Dense(256, activation='sigmoid')(input_layer)
dense_2 = Dense(128, activation='sigmoid')(dense_1)
predictions = Dense(10, activation='softmax')(dense_2)
functional_model = Model(inputs=input_layer, outputs=predictions)
class ClassModel(Model):
def __init__(self):
super(ClassModel, self).__init__()
self.dense_1 = Dense(256, activation='sigmoid')
self.dense_2 = Dense(256, activation='sigmoid')
self.predictions = Dense(10, activation='softmax')
def call(self, inputs, **kwargs):
x = self.dense_1(inputs)
x = self.dense_2(x)
return self.predictions(x)
class_model = ClassModel()
models = {
'sequential_model': sequential_model,
'sequential_model_list': sequential_model2,
'functional_model': functional_model,
'class_model': class_model
}
for name, model in models.items():
print(f'Compiling model: {name}')
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
print(f'Training model: {name}')
model.fit(X_train, y_train,
validation_data=(X_valid, y_valid),
epochs=50,
batch_size=256,
verbose=0)
_, accuracy = model.evaluate(X_test, y_test, verbose=0)
print(f'Testing model: {name}. \nAccuracy: {accuracy}')
print('---')
| 2,550 |
src/graph_transpiler/webdnn/backend/webgpu/kernels/embedding.py
|
gunpowder78/webdnn
| 1 |
2022916
|
from typing import List
from webdnn.backend.code_generator.allocator import MemoryLayout
from webdnn.backend.code_generator.injectors.buffer_injector import BufferInjector
from webdnn.backend.code_generator.injectors.kernel_name_injector import KernelNameInjector
from webdnn.backend.webgpu.generator import WebGPUDescriptorGenerator
from webdnn.backend.webgpu.kernel import Kernel, GPUSize
from webdnn.backend.webgpu.preset_placeholders import MAX_THREADS_PER_THREADGROUP
from webdnn.graph.axis import Axis
from webdnn.graph.operators.embedding import Embedding
from webdnn.graph.order import OrderCN, OrderNT, OrderNTC
template = """
kernel void %%FUNC_NAME%%(device float * %%STATIC_BUFFER%%[[buffer(0)]],
device float * %%DYNAMIC_BUFFER%%[[buffer(1)]],
const device int * %%META_BUFFER%% [[buffer(2)]],
uint global_index[[thread_position_in_grid]],
uint num_threads[[threads_per_grid]])
{
const device float *X = %%LOAD_BUFFER(embedding_X)%%;
device float *Y = %%LOAD_BUFFER(embedding_Y)%%;
const device float *W = %%LOAD_BUFFER(embedding_W)%%;
const int T = %%LOAD_BUFFER(embedding_T)%%;
const int N = %%LOAD_BUFFER(embedding_N)%%;
const int C = %%LOAD_BUFFER(embedding_C)%%;
for (int gid = global_index; gid < N * T; gid += num_threads) {
const int t = gid % T;
const int n = gid / T;
const int word = (int)X[gid];
for (int c = 0; c < C; c++) {
Y[(n * T + t) * C + c] = W[word * C + c];
}
}
}
"""
@WebGPUDescriptorGenerator.register_handler(Embedding)
def embedding(op: Embedding, memory_layout: MemoryLayout) -> List[Kernel]:
x = op.inputs["x"]
w = op.inputs["w"]
y = op.outputs["y"]
assert x.order == OrderNT
assert w.order == OrderCN
assert y.order == OrderNTC
buffer_injector = BufferInjector()
buffer_injector.register({
"embedding_X": memory_layout[x],
"embedding_Y": memory_layout[y],
"embedding_W": memory_layout[w],
"embedding_T": x.shape_dict[Axis.T],
"embedding_N": x.shape_dict[Axis.N],
"embedding_C": w.shape_dict[Axis.N]
})
name_injector = KernelNameInjector(op)
source = template
source = buffer_injector.inject(source)
source = name_injector.inject(source)
kernel = Kernel(
{name_injector.name: source},
name_injector.name,
GPUSize(8, 1, 1),
GPUSize(MAX_THREADS_PER_THREADGROUP, 1, 1),
buffer_injector.buffer,
buffer_injector.unresolved_value_list
)
return [kernel]
| 2,659 |
spot_demo_cdk/download_export_model.py
|
aws-samples/greengrass-v2-spot-robot-demo
| 0 |
2024341
|
import gluoncv as gcv
from gluoncv.utils import export_block
import tarfile
import os
MODEL_NAME = "ssd_512_mobilenet1.0_voc"
MODEL_DIR = "./pretrained_models/{}".format(MODEL_NAME)
# create model directory
if not os.path.exists(MODEL_DIR):
os.makedirs(MODEL_DIR, exist_ok=True)
# export model
net = gcv.model_zoo.get_model(MODEL_NAME, pretrained=True)
export_block(
"{}/{}".format(MODEL_DIR, MODEL_NAME),
net,
preprocess=True,
layout="HWC",
)
# create model archive
with tarfile.open("{}.tar.gz".format(MODEL_DIR), "w:gz") as tar:
tar.add(
MODEL_DIR,
arcname=os.path.basename(MODEL_NAME),
)
print("Done.")
| 656 |
mosaic_ml/autosklearn_wrapper/autosklearn.py
|
herilalaina/mosaic_ml
| 20 |
2024737
|
#! /usr/bin/python -u
# Below source code is extracted from https://github.com/automl/auto-sklearn
import os
import scipy
import mosaic_ml
import numpy as np
from autosklearn.util import pipeline
from autosklearn import metalearning
from autosklearn.smbo import EXCLUDE_META_FEATURES_CLASSIFICATION, EXCLUDE_META_FEATURES_REGRESSION, CLASSIFICATION_TASKS, \
MULTICLASS_CLASSIFICATION, BINARY_CLASSIFICATION, MULTILABEL_CLASSIFICATION, REGRESSION
from autosklearn.constants import *
from mosaic_ml.autosklearn_wrapper.mismbo import suggest_via_metalearning_
from autosklearn.metalearning.metalearning.meta_base import MetaBase
from autosklearn.data.abstract_data_manager import perform_one_hot_encoding
from autosklearn.metalearning.metafeatures.metafeatures import \
calculate_all_metafeatures_with_labels, calculate_all_metafeatures_encoded_labels
def _calculate_metafeatures__(data_feat_type, data_info_task, basename,
x_train, y_train):
# == Calculate metafeatures
task_name = 'CalculateMetafeatures'
categorical = [True if feat_type.lower() in ['categorical'] else False
for feat_type in data_feat_type]
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if data_info_task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
if data_info_task in [MULTICLASS_CLASSIFICATION, BINARY_CLASSIFICATION,
MULTILABEL_CLASSIFICATION, REGRESSION]:
result = calculate_all_metafeatures_with_labels(
x_train, y_train, categorical=categorical,
dataset_name=basename,
dont_calculate=EXCLUDE_META_FEATURES, )
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
else:
result = None
return result
def _calculate_metafeatures_encoded__(basename, x_train, y_train):
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
task_name = 'CalculateMetafeaturesEncoded'
result = calculate_all_metafeatures_encoded_labels(
x_train, y_train, categorical=[False] * x_train.shape[1],
dataset_name=basename, dont_calculate=EXCLUDE_META_FEATURES)
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
return result
def get_autosklearn_metalearning(X_train, y_train, cat, metric, num_initial_configurations):
task_id = "new_task"
is_sparse = scipy.sparse.issparse(X_train)
dataset_properties = {
'signed': True,
'multiclass': False if len(np.unique(y_train)) == 2 == 2 else True,
'task': 1 if len(np.unique(y_train)) == 2 else 2,
'sparse': is_sparse,
'is_sparse': is_sparse,
'target_type': 'classification',
'multilabel': False
}
config_space = pipeline.get_configuration_space(dataset_properties, None, None, None, None)
metalearning_dir = os.path.join(os.path.dirname(metalearning.__file__),
"files",
"balanced_accuracy_{0}.classification_{1}".format("multiclass" if dataset_properties["multiclass"] else "binary",
"sparse" if dataset_properties["sparse"] else "dense"))
metabase = MetaBase(config_space, metalearning_dir)
meta_features = None
try:
rvals, sparse = perform_one_hot_encoding(dataset_properties["sparse"],
[c in ['categorical'] for c in cat],
[X_train])
meta_features = _calculate_metafeatures_encoded__(task_id, rvals[0], y_train)
X_train = rvals
except:
meta_features = _calculate_metafeatures__(cat, MULTICLASS_CLASSIFICATION, task_id, X_train, y_train)
if meta_features is None:
raise Exception("Error calculating metafeatures")
metabase.add_dataset(task_id, meta_features)
configs, list_nn = (suggest_via_metalearning_(metabase, task_id, metric, 2 if dataset_properties["multiclass"] else 1, False, num_initial_configurations))
return configs, list_nn
| 4,357 |
storage/pycopia/db/importers/nmap.py
|
kdart/pycopia
| 89 |
2025246
|
#!/usr/bin/python2.5
# -*- coding: us-ascii -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# Copyright (C) 2010 <NAME> <<EMAIL>>
#
# NOTE: Most of Pycopia is licensed under the LGPL license. This module is
# licenced under the GPL licence due to nmap license requirements. It
# parses the nmap XML output and is thus considered a derivitive work.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
"""
Import nmap XML output into the network model.
Even though nmap reports discovered hosts as hosts, they are actually
interfaces. This importer creates unattached interfaces. A
user/administrator will have to go and assign created interfaces to hosts.
"""
import xml
if hasattr(xml, "use_pyxml"):
xml.use_pyxml()
import xml.sax.sax2exts
import xml.sax.handler
from pycopia.logging import warn
from pycopia import ipv4
from pycopia.db import models
# States the XML host scanner can be in
NOTINTERESTED = 0
INTERESTED = 1
INTERFACE_TYPE_ID = 6 # ethernetCsmacd
INTERFACE_TYPE = None
def get_network(session, ipnet):
"""Returns a Network model object. Creates one if necessary."""
q = session.query(models.Network).filter(models.Network.ipnetwork==ipnet.CIDR)
try:
net = q.one()
except models.NoResultFound:
args = dict(name=ipnet.CIDR, ipnetwork=ipnet.CIDR, layer=3)
net = models.create(models.Network, **args)
session.add(net)
session.commit()
return net
def get_equipment(session, hostname):
q = session.query(models.Equipment).filter(models.Equipment.name==hostname)
try:
host = q.one()
except models.NoResultFound:
return get_unknown_equipment(session, hostname)
else:
return host
def get_unknown_equipment(session, hostname):
model = get_unknown_equipment_model(session)
eq = models.create(models.Equipment, name=hostname, model=model, comment="Added by nmap importer.")
session.add(eq)
session.commit()
return eq
def get_unknown_equipment_model(session):
cat = session.query(models.EquipmentCategory).filter(models.EquipmentCategory.name=="unknown").one()
q = session.query(models.EquipmentModel).filter(models.and_(
models.EquipmentModel.name=="Unknown",
models.EquipmentModel.category==cat)
)
try:
model = q.one()
except models.NoResultFound:
manu = session.query(models.Corporation).filter(models.Corporation.name=="Custom").one()
model = models.create(models.EquipmentModel, name="Unknown", category=cat, manufacturer=manu)
session.add(model)
session.commit()
return model
def get_interface_type(session, enumeration=INTERFACE_TYPE_ID):
global INTERFACE_TYPE
if INTERFACE_TYPE is None:
q = session.query(models.InterfaceType).filter(
models.InterfaceType.enumeration==enumeration,
)
INTERFACE_TYPE = q.one()
return INTERFACE_TYPE
def add_interface(session, attribs):
"""Add new interface, don't duplicate existing one.
Also try connecting to equipment if possible.
"""
network = attribs.get("network")
ipaddr = attribs["ipaddr"]
attribs["interface_type"] = get_interface_type(session)
q = session.query(models.Interface).filter(models.and_(
models.Interface.network==network,
models.Interface.ipaddr==ipaddr)
)
# try to find equipment by matching name.
hostname = attribs.get("description")
if hostname:
eq = get_equipment(session, hostname)
del attribs["description"]
else:
eq = None
attribs["equipment"] = eq
try:
intf = q.one()
except models.NoResultFound:
intf = models.create(models.Interface, **attribs)
session.add(intf)
session.commit()
else:
models.update(intf, **attribs)
session.commit()
class ContentHandler(object):
"""SAX content handler.
Manages state and adds interface records when enough host data is collected.
"""
def __init__(self):
self._state = NOTINTERESTED
self.network = None
self.session = None
self._locator = None
self._current_interface = None
def startDocument(self):
self.session = models.get_session()
def endDocument(self):
sess = self.session
self.session = None
self.network = None
sess.commit()
sess.close()
def startElement(self, name, attribs):
if name == "nmaprun":
cidr = attribs["args"].split()[-1] # assumes one network scanned
net = ipv4.IPv4(str(cidr)) # cidr would be unicode
self.network = get_network(self.session, net)
return
elif name == "host":
self._state = INTERESTED
self._current_interface = dict(name="unknown")
elif self._state == NOTINTERESTED:
return
elif name == "status":
if attribs["state"] == "down":
self._state = NOTINTERESTED
self._current_interface = None
elif name == "address":
if attribs["addrtype"] == "ipv4":
intf = self._current_interface
intf["ipaddr"] = str(attribs["addr"])
intf["layer"] = 3
intf["network"] = self.network
elif attribs["addrtype"] == "mac":
self._current_interface["macaddr"] = str(attribs["addr"])
elif name == "hostnames": # strange that namp treats hostnames and addresses differently.
pass
elif name == "hostname":
# stuff name in description since we can't tell what host this
# is attached to yet. User can later use this to map to host.
# Keep only the last one.
self._current_interface["description"] = str(attribs["name"])
def endElement(self, name):
if name == "host":
if self._state == INTERESTED:
intf = self._current_interface
self._current_interface = None
add_interface(self.session, intf)
self._state = NOTINTERESTED
def setDocumentLocator(self, locator):
self._locator = locator
def characters(self, text):
pass
def processingInstruction(self, target, data):
'handle: xml version="1.0" encoding="ISO-8859-1"?'
pass
def startPrefixMapping(self, prefix, uri):
warn("!!! Unhandled prefix: %r" % (prefix,))
def endPrefixMapping(self, prefix):
pass
def skippedEntity(self, name):
warn("unhandled ignorableWhitespace: %r" % (whitespace,))
def ignorableWhitespace(self, whitespace):
warn("unhandled ignorableWhitespace: %r" % (whitespace,))
def startElementNS(self, cooked_name, name, atts):
pass
def endElementNS(self, name, rawname):
pass
# DTDHandler interface
def notationDecl(self, name, publicId, systemId):
"""Handle a notation declaration event."""
warn("unhandled notationDecl: %r %r %r" % ( name, publicId, systemId))
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
warn("unhandled unparsedEntityDecl: %r %r %r %r" % ( name, publicId, systemId, ndata))
# entity resolver interface
def resolveEntity(self, publicId, systemId):
pass
class ErrorHandler(object):
def __init__(self, logfile=None):
self._lf = logfile
def error(self, exception):
"Handle a recoverable error."
#raise exception
warn("XML error: %s" % (exception,))
def fatalError(self, exception):
"Handle a non-recoverable error."
#raise exception
warn("XML fatalError: %s" % (exception,))
def warning(self, exception):
"Handle a warning."
warn("XML Warning: %s" % (exception,))
def get_parser(logfile=None, namespaces=0, validate=0, external_ges=0):
handler = ContentHandler()
# create parser
parser = xml.sax.sax2exts.XMLParserFactory.make_parser()
parser.setFeature(xml.sax.handler.feature_namespaces, namespaces)
parser.setFeature(xml.sax.handler.feature_validation, validate)
parser.setFeature(xml.sax.handler.feature_external_ges, external_ges)
parser.setFeature(xml.sax.handler.feature_external_pes, 0)
parser.setFeature(xml.sax.handler.feature_string_interning, 1)
# set handlers
parser.setContentHandler(handler)
parser.setDTDHandler(handler)
parser.setEntityResolver(handler)
parser.setErrorHandler(ErrorHandler(logfile))
return parser
def import_nmap(filename):
parser = get_parser()
parser.parse(filename)
if __name__ == "__main__":
import sys
from pycopia import autodebug
import_nmap("/home/keith/tmp/10-223-1-0.xml")
| 9,327 |
SerpentRPG.py
|
Snackhole/SerpentRPG
| 1 |
2025043
|
import os
import sys
AbsoluteDirectoryPath = os.path.dirname(os.path.abspath(__file__))
if AbsoluteDirectoryPath.endswith(".pyz") or AbsoluteDirectoryPath.endswith(".pyzw"):
AbsoluteDirectoryPath = os.path.dirname(AbsoluteDirectoryPath)
if sys.path[0] != AbsoluteDirectoryPath:
sys.path.insert(0, AbsoluteDirectoryPath)
from PyQt5.QtWidgets import QApplication
from Interface.Windows.DiceRollerWindow import DiceRollerWindow
from Interface.Windows.DieClockWindow import DieClockWindow
from Interface.Windows.ModeSelectionWindow import ModeSelectionWindow
from Interface.Windows.WildernessTravelManagerWindow import WildernessTravelManagerWindow
from Build import BuildVariables
def StartApp():
AppInst = QApplication(sys.argv)
# Script Name
ScriptName = BuildVariables["VersionedAppName"]
# Mode Selection Window
ModeSelectionWindowInst = ModeSelectionWindow(ScriptName, AbsoluteDirectoryPath)
# Enter Mode Selection Loop
AppInst.exec_()
# Initialize Mode
Mode = ModeSelectionWindowInst.Mode
if Mode is not None:
# Modes Dictionary
Modes = {}
Modes["Dice Roller"] = DiceRollerWindow
Modes["Die Clock"] = DieClockWindow
Modes["Wilderness Travel Manager"] = WildernessTravelManagerWindow
# Create Mode Window
ModeWindowInst = Modes[Mode](ScriptName, AbsoluteDirectoryPath)
# Enter Mode Loop
sys.exit(AppInst.exec_())
if __name__ == "__main__":
StartApp()
| 1,490 |
tests/test_affine_memory.py
|
giantoak/dedupe
| 1 |
2024606
|
from dedupe.distance.affinegap import affineGapDistance as stringDistance
for i in xrange(10**6):
stringDistance('asdfadsfa', '3qrqwear')
| 141 |
paper/aihc_stats/stats/summary_metrics.py
|
stanfordmlgroup/lca-code
| 8 |
2024669
|
import sklearn.metrics as skm
from rpy2.robjects.packages import importr
log_loss = skm.log_loss
def roc_auc_score(groundtruth, probabilities, ci):
if ci:
proc = importr('pROC')
roc = proc.roc(groundtruth, probabilities)
auc = proc.ci_auc(roc, conf_level=0.95, method="delong")
# lower (95%), mean, upper (95%)
ci_dict = {'lower': auc[0], 'mean': auc[1], 'upper': auc[2]}
return ci_dict
else:
return skm.roc_auc_score(groundtruth, probabilities)
def pr_auc_score(groundtruth, probabilities, ci):
if ci:
# See also: https://www.rdocumentation.org/packages/MRIaggr/versions/1.1.5/topics/calcAUPRC
mriaggr = importr('MRIaggr')
prc = mriaggr.calcAUPRC(probabilities, groundtruth,
ci=True, alpha=0.05)
# mean, lower (95%), upper (95%)
ci_dict = {'lower': prc[1], 'mean': prc[0], 'upper': prc[2]}
return ci_dict
else:
return skm.average_precision_score(groundtruth, probabilities)
def compute_summary_metrics(eval_groundtruth,
eval_probabilities,
ci=True,
logger=None):
"""Compute summary metrics."""
auroc = roc_auc_score(eval_groundtruth,
eval_probabilities,
ci=ci)
# FIXME: MRIagg won't install so CI for PR AUC does not work.
auprc = pr_auc_score(eval_groundtruth,
eval_probabilities,
ci=False)
log_loss = skm.log_loss(eval_groundtruth,
eval_probabilities)
summary_metrics = {}
summary_metrics["auroc"] = auroc
summary_metrics["auprc"] = auprc
summary_metrics["log_loss"] = log_loss
if logger is not None:
logger.log(f"Summary metrics: {summary_metrics}")
return summary_metrics
# Curves.
roc_curve = skm.roc_curve
pr_curve = skm.precision_recall_curve
# Curve helper function
def get_curves(groundtruth,
probabilities,
logger=None):
roc = roc_curve(groundtruth, probabilities)
pr = pr_curve(groundtruth, probabilities)
curves = {}
curves["roc"] = roc
curves["pr"] = pr
return curves
| 2,313 |
utils/gen_xfactual.py
|
mycal-tucker/IC3Net
| 0 |
2024948
|
import os
from settings import settings
import torch.nn as nn
import torch.optim as optim
import time
from args import get_args
from nns.probe import Probe
from utils.game_tracker import GameTracker
from utils.util_fns import *
def gen_counterfactual(z, probe, s_prime, criterion=None):
start_time = time.time()
z_prime = z
z_prime.requires_grad = True
# optimizer = optim.SGD([z_prime], lr=0.0001, momentum=0.9)
# optimizer = optim.SGD([z_prime], lr=0.001, momentum=0.9)
optimizer = optim.SGD([z_prime], lr=0.01, momentum=0.9) # Good. Generated the prey results.
if criterion is None:
criterion = nn.CrossEntropyLoss()
# criterion = nn.BCEWithLogitsLoss()
num_steps = 0
stopping_loss = 0.001 # Was 0.05
# stopping_loss = .001 # Generated the prey results
loss = 100
max_patience = 10000
max_num_steps = settings.NUM_XFACT_STEPS
curr_patience = 0
min_loss = loss
probe.eval()
curr_time = time.time()
while curr_time - start_time < 100 and num_steps < max_num_steps and loss > stopping_loss:
optimizer.zero_grad()
outputs = probe(torch.Tensor(z_prime))
loss = criterion(outputs, s_prime)
loss.backward()
optimizer.step()
# if num_steps % 100 == 0:
# print("Loss", loss)
num_steps += 1
curr_patience += 1
if loss < min_loss - 0.01:
min_loss = loss
curr_patience = 0
if curr_patience > max_patience:
print("Breaking because of patience with loss", loss)
break
curr_time = time.time()
# print("Diff", curr_time - start_time)
# print("Num steps", num_steps, "\tloss", loss)
# if num_steps >= max_num_steps:
# print("Max step thing", loss)
# if loss <= stopping_loss:
# print("Broke for min loss", loss)
return z_prime
if __name__ == '__main__':
print("Dummy calls for debugging.")
# First, load a tracker and a trained probe.
parser = get_args()
init_args_for_env(parser)
args = parser.parse_args()
tracker_path = os.path.join(args.load, args.env_name, args.exp_name, "seed" + str(args.seed), 'tracker.pkl')
tracker = GameTracker.from_file(tracker_path)
probe_path = os.path.join(args.load, args.env_name, args.exp_name, "seed" + str(args.seed), 'c_probe.pth')
c_dim = tracker.data[0][2][0].detach().numpy().shape[1]
num_locations = tracker.data[0][0].shape[0]
c_probe = Probe(c_dim, num_locations, num_layers=3)
c_probe.load_state_dict(torch.load(probe_path))
c_probe.eval()
new_goal = np.zeros((1, num_locations))
new_goal[0, 20] = 1
new_goal = torch.Tensor(new_goal)
agent_id = 3
for _, _, hiddens in tracker.data:
c, _ = hiddens
c = c[agent_id].detach().numpy()
c = torch.unsqueeze(torch.Tensor(c), 0)
xfactual = gen_counterfactual(c, c_probe, new_goal)
print("X factual", xfactual)
| 2,976 |
AA-Proyecto-Python-20/main.py
|
AndresS0421/Blog-de-notas-python
| 1 |
2024671
|
"""
Proyecto python y MySQL:
- Abrir asistente.
- Login o registro.
- Si elegimos registro, creará un usuario en la base de datos.
- Si elegimos login, identifica al usuario y nos preguntará:
- Crear nota, mostrar notas, borrarlas.
"""
from Usuarios import acciones
print("""
Acciones disponibles:
- registro
- login
""")
hazEl = acciones.Acciones()
accion = input("¿Qué quieres hacer? ")
if accion == "registro":
hazEl.registro()
elif accion == "login":
hazEl.login()
| 488 |
process_mining/conf_data.py
|
fau-is/pdm
| 0 |
2025076
|
"""
The module contains the conformance data informations
"""
import threading
class ConformanceAnalysisData(object):
"""
The overall conformance analysis data contains information about all failed traces in an event log in an aggregated
form
"""
conformance_analysis_data = None
def __init__(self):
"""
Default initializer
"""
self.ViolatingTraces = []
self.ViolatedActivities = {}
self.ViolatedConnections = {}
self.ViolatedExcluded = {}
self.ViolatedPending = {}
self.ViolatedRoles = {}
self.ViolatedTraces = {}
self.Lock = threading.RLock() # Used to make the class thread safe
ConformanceAnalysisData.conformance_analysis_data = self
def create_violated_traces_dict(self):
"""
Creates a dict from the Violating traces
:return: a dict with the process paths
"""
process_paths = {}
for trace in self.ViolatingTraces:
process_path = str()
for event in trace.Events:
process_path += " -{}".format(event.EventName)
process_path = process_path.lstrip()[1:]
if process_path in process_paths:
process_paths[process_path] += 1
else:
process_paths[process_path] = 1
return process_paths
def append_violating_trace(self, trace):
"""
Appends a violating trace to the ViolatingTraces array
:param trace: the trace to be added
:return:
"""
self.ViolatingTraces.append(trace)
def append_conformance_data(self, trace_conformance_data):
"""
Thread safe method to add trace conformance analysis data to the overall conformance analysis data
:type trace_conformance_data: TraceConformanceAnalysisData that will be added to the overall information
"""
# Acquire lock for thread safe execution
self.Lock.acquire()
if trace_conformance_data.Trace is not None:
self.ViolatingTraces.append(trace_conformance_data.Trace)
for activity in trace_conformance_data.ViolatingActivities:
if activity in self.ViolatedActivities:
self.ViolatedActivities[activity] += 1
else:
self.ViolatedActivities[activity] = 1
for connection in trace_conformance_data.ViolatingConstraints:
if connection in self.ViolatedConnections:
self.ViolatedConnections[connection] += 1
else:
self.ViolatedConnections[connection] = 1
for excluded_execution in trace_conformance_data.ViolatingExcluded:
if excluded_execution in self.ViolatedExcluded:
self.ViolatedExcluded[excluded_execution] += 1
else:
self.ViolatedExcluded[excluded_execution] = 1
for pending_marking in trace_conformance_data.ViolatingPending:
if pending_marking in self.ViolatedPending:
self.ViolatedPending[pending_marking] += 1
else:
self.ViolatedPending[pending_marking] = 1
for role in trace_conformance_data.ViolatingRoles:
if role in self.ViolatedRoles:
self.ViolatedRoles[role] += 1
else:
self.ViolatedRoles[role] = 1
del trace_conformance_data
# Release lock for next thread
self.Lock.release()
class TraceConformanceAnalysisData(object):
"""
This class is used to capture all failing constraints of a trace in a DCR graph
"""
def __init__(self, trace):
"""
Default initializer for all violations in a Trace
:param trace: trace that is added to the trace data
"""
self.Trace = trace
self.HasViolations = False
self.ViolatingActivities = []
self.ViolatingConstraints = []
self.ViolatingExcluded = []
self.ViolatingPending = []
self.ViolatingRoles = []
self.ViolatingNestingActivityCall = []
self.ViolatingNestingActivityBlocked = []
def add_violating_role(self, role, node_role):
"""
Adds a violating role to the list of violating role violations
:param role: the role that was used
:param node_role: the role that should have been used
:return: None
"""
self.HasViolations = True
self.ViolatingRoles.append('{} instead of {}'.format(role, node_role))
def add_violating_activity(self, activity):
"""
Adds an execution of a violating activity
:param activity: the activity that was executed even though it was excluded
:return: None
"""
self.HasViolations = True
self.ViolatingActivities.append(activity)
def add_violating_connection(self, connection):
"""
Adds a violated connection (either Milestone or Condition) to the violations
:param connection: the violated connection
:return: None
"""
self.HasViolations = True
self.ViolatingConstraints.append(connection)
def add_violating_pending(self, pending):
"""
Adds an activity to the list of activities that were still in pending in the process execution
:param pending: The Activity to add
:return: None
"""
self.HasViolations = True
self.ViolatingPending.append(pending)
def add_violating_nesting_activity_occurred(self, nesting_activity):
"""
Adds an activity to the list of nesting activities that occurred in the event log
:return: None
"""
self.HasViolations = True
self.ViolatingNestingActivityCall.append(nesting_activity)
def add_violating_nesting_activity_blocked(self, activity):
"""
Add a tuple to
:param activity:
:return: None
"""
self.HasViolations = True
self.ViolatingNestingActivityBlocked.append(tuple((activity, activity.NestingActivity)))
def calculate_trace_alignment_fitness(self):
"""
TODO Future Work
:return: None
"""
pass
| 6,225 |
release/stubs.min/System/Runtime/InteropServices/__init___parts/TypeLibTypeFlags.py
|
YKato521/ironpython-stubs
| 0 |
2025139
|
class TypeLibTypeFlags(Enum, IComparable, IFormattable, IConvertible):
"""
Describes the original settings of the System.Runtime.InteropServices.TYPEFLAGS in the COM type library from which the type was imported.
enum (flags) TypeLibTypeFlags,values: FAggregatable (1024),FAppObject (1),FCanCreate (2),FControl (32),FDispatchable (4096),FDual (64),FHidden (16),FLicensed (4),FNonExtensible (128),FOleAutomation (256),FPreDeclId (8),FReplaceable (2048),FRestricted (512),FReverseBind (8192)
"""
def __eq__(self, *args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self, *args):
pass
def __gt__(self, *args):
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args):
pass
def __lt__(self, *args):
pass
def __ne__(self, *args):
pass
def __reduce_ex__(self, *args):
pass
def __str__(self, *args):
pass
FAggregatable = None
FAppObject = None
FCanCreate = None
FControl = None
FDispatchable = None
FDual = None
FHidden = None
FLicensed = None
FNonExtensible = None
FOleAutomation = None
FPreDeclId = None
FReplaceable = None
FRestricted = None
FReverseBind = None
value__ = None
| 1,716 |
Policy-Explicit/test.py
|
mahdijo6731/SugarScape
| 6 |
2025227
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 3 06:00:22 2018
@author: ymamo
"""
from itertools import groupby
test = ["A", "B", "C"]
test.remove(test[0])
test2 = "A"*len(test)
print (test2)
new = list(zip(test2, test))
print (new)
| 256 |
Socket.py
|
austinjcheng/chatterbox
| 3 |
2024747
|
import socket
from Settings import HOST, PORT, CHANNEL, usernames, tokens
def openSocket(userIndex):
s = socket.socket()
s.connect((HOST, PORT))
s.send("PASS " + tokens[userIndex] + "\r\n")
s.send("NICK " + usernames[userIndex] + "\r\n")
s.send("JOIN #" + CHANNEL + "\r\n")
return s
def sendMessage(s, message):
messageTemp = "PRIVMSG #" + CHANNEL + " :" + message
s.send(messageTemp + "\r\n")
print("Sent: " + messageTemp)
def sendDefaultMessage(message):
from Initialize import users, currentUserIndex
messageTemp = "PRIVMSG #" + CHANNEL + " :" + message
users[currentUserIndex].send(messageTemp + "\r\n")
print("Sent: " + messageTemp)
| 699 |
schematic/utils/__init__.py
|
linglp/schematic
| 8 |
2025157
|
from schematic.utils.curie_utils import (
expand_curie_to_uri,
expand_curies_in_schema,
extract_name_from_uri_or_curie,
uri2label,
)
from schematic.utils.df_utils import update_df
from schematic.utils.general import dict2list, find_duplicates, str2list, unlist
from schematic.utils.google_api_utils import (
download_creds_file,
execute_google_api_requests,
)
from schematic.utils.io_utils import (
export_json,
load_default,
load_json,
load_schemaorg,
)
from schematic.utils.schema_utils import load_schema_into_networkx
from schematic.utils.validate_utils import (
validate_class_schema,
validate_property_schema,
validate_schema,
)
from schematic.utils.viz_utils import visualize
| 735 |
Part_05/video_reader.py
|
dawidborycki/SocialDistancingDetector
| 0 |
2025298
|
import cv2 as opencv
class VideoReader(object):
def __init__(self, file_path):
# Open the video file
try:
self.video_capture = opencv.VideoCapture(file_path)
except expression as identifier:
print(identifier)
def read_next_frame(self):
(capture_status, frame) = self.video_capture.read()
# Verify the status
if(capture_status):
return frame
else:
return None
| 526 |
office365/sharepoint/sitedesigns/design_package_menu_contents.py
|
rikeshtailor/Office365-REST-Python-Client
| 544 |
2024354
|
from office365.sharepoint.base_entity import BaseEntity
class DesignPackageMenuContents(BaseEntity):
pass
| 112 |
etc/utils/arrays.py
|
hariprasadraja/my_config
| 3 |
2023321
|
#!/usr/bin/env python3
import sys
import fire
class Array():
'''
array utilties like (reverse,distinct...)
'''
def reverse(self, *args):
'''
reverse an array
> reverse 1 2 3 4 5
[5, 4, 3, 2, 1]
'''
print(list(reversed(args)))
# print(array)
# print(list(reversed(array)))
def distinct(self, *args):
'''
print distinct elements in an array
> distinct 1 2 1 4 2 3 5
[1, 2, 4, 3, 5]
'''
print(list(dict.fromkeys(args)))
def random(self, *args):
'''
returns random element in an array
❯ python3 etc/utils/utils.py array random 2 1 2 3 4 5
5
'''
import random
print(random.choice(args))
def rotate(self, n, *args):
"""
rotate 'n' elements in the array
❯ array rotate 2 1 2 3 4 5
[5, 1, 2, 3, 4]
"""
k = len(args)
k %= n
print(list(args[k-n:]+args[:k-n]))
def print(self, *args):
"""
print an array
> array print 1 2 3 4 5
1
2
3
4
5
"""
for item in args:
print(item)
if __name__ == "__main__":
fire.Fire(Array)
| 1,330 |
tests/integration/reqs/test_ripple_path_find.py
|
SubCODERS/xrpl-py
| 1 |
2023518
|
from tests.integration.integration_test_case import IntegrationTestCase
from tests.integration.it_utils import test_async_and_sync
from tests.integration.reusable_values import DESTINATION, WALLET
from xrpl.models.requests import RipplePathFind
class TestRipplePathFind(IntegrationTestCase):
@test_async_and_sync(globals())
async def test_basic_functionality(self, client):
response = await client.request(
RipplePathFind(
source_account=WALLET.classic_address,
destination_account=DESTINATION.classic_address,
destination_amount="100",
),
)
self.assertTrue(response.is_successful())
| 690 |
gradient_utils/metrics/env.py
|
urbas/gradient-utils
| 2 |
2024902
|
import os
PUSH_GATEWAY_ENV = 'PAPERSPACE_METRIC_PUSHGATEWAY'
PUSH_GATEWAY_DEFAULT = 'http://gradient-processing-prometheus-pushgateway:9091'
WORKLOAD_TYPE_ENV = 'PAPERSPACE_METRIC_WORKLOAD_TYPE'
WORKLOAD_TYPE_DEFAULT = 'experiment'
WORKLOAD_ID_ENV = 'PAPERSPACE_METRIC_WORKLOAD_ID'
LEGACY_EXPERIMENT_ID_ENV = 'PAPERSPACE_EXPERIMENT_ID'
HOSTNAME = os.getenv("HOSTNAME")
def get_metric_pushgateway():
return os.getenv(PUSH_GATEWAY_ENV, PUSH_GATEWAY_DEFAULT)
def get_workload_type():
return os.getenv(WORKLOAD_TYPE_ENV, WORKLOAD_TYPE_DEFAULT)
def get_workload_label():
return 'label_metrics_{}_handle'.format(get_workload_type())
def _get_env_var_or_raise(*env_vars):
rv = None
for env_var in env_vars:
rv = os.getenv(env_var)
if not rv:
break
if rv is None:
msg = "{} environment variable(s) not found".format(
", ".join(env_vars))
raise ValueError(msg)
return rv
def _get_experiment_id():
if os.getenv(LEGACY_EXPERIMENT_ID_ENV):
return os.getenv(LEGACY_EXPERIMENT_ID_ENV)
try:
experiment_id = HOSTNAME.split('-')[1]
return experiment_id
except IndexError:
msg = "Experiment ID not found"
raise ValueError(msg)
def get_workload_id():
if os.getenv(WORKLOAD_ID_ENV):
return os.getenv(WORKLOAD_ID_ENV)
return _get_experiment_id()
| 1,393 |
providers/ansible/__init__.py
|
theadviceio/executer
| 0 |
2024765
|
import sys
import conf
import api.utils.typeutils
import api.utils.errors
__version__ = 0.1
__author__ = '<EMAIL>'
quote_func = None
if sys.version_info > (3,0,0):
import shlex
quote_func = shlex.quote
else:
import pipes
quote_func = pipes.quote
def prepare_cmd(**kwargs):
"""Prepare cmd string for run with ansible
:param hosts: List or string of hosts for an inventory (default: 127.0.0.1)
:param host: A host where run this module (default: 127.0.0.1)
:param module: Which ansible module run
:param module_args: Arguments of the module
:param connection: Connection type. By default (local)
:return: cmd
:raises SyntaxError: on non-standart values
:raises ValueError: on unexpected types
"""
cmd = "%s " % conf.BIN_RUN_ANSIBLE
hosts = ['127.0.0.1']
if 'hosts' in kwargs:
hosts = api.utils.typeutils.to_list(kwargs['hosts'])
try:
_inventory = ",".join(hosts) + ","
inventory = "%s %s" % (conf.ansible.ANSIBLE_INVENTORY_KEY, _inventory)
cmd += " %s" % inventory
except Exception as error:
api.utils.errors.get_syntax_error("Can't prepare_cmd:inventory because %s" % error)
if 'module' not in kwargs:
api.utils.errors.get_key_error("Can't run ansible without specify a module")
ansible_module = kwargs['module']
cmd += " %s%s" % (conf.ansible.ANSIBLE_MODULE_KEY, ansible_module)
if 'module_args' not in kwargs:
api.utils.errors.get_key_error("Can't run ansible without specify a module arguments")
ansible_module_args = kwargs['module_args']
ansible_module_args_line = ""
if api.utils.typeutils.is_string(ansible_module_args):
ansible_module_args_line = quote_func(ansible_module_args)
elif api.utils.typeutils.is_list(ansible_module_args):
ansible_module_args_line = quote_func(";".join(ansible_module_args))
else:
api.utils.errors.get_syntax_error("Can't prepare_cmd:ansible_module_args because wrong type %s" % type(ansible_module_args))
cmd += " %s %s" % (conf.ansible.ANSIBLE_MODULE_ARGS_KEY, ansible_module_args_line)
try:
connection = "%slocal " % conf.ansible.ANSIBLE_CONNECTION_KEY
if 'connection' in kwargs:
connection = kwargs["connection"]
if not api.utils.typeutils.is_string(connection):
api.utils.errors.get_syntax_error("Can't prepare_cmd:connection because wrong type %s" % type(connection))
connection = "%s%s" % (conf.ansible.ANSIBLE_CONNECTION_KEY, connection)
cmd += " %s" % connection
except Exception as error:
api.utils.errors.get_syntax_error("Can't prepare_cmd:connection because %s" % error)
host = '127.0.0.1'
if 'host' in kwargs:
host = kwargs['host']
cmd += " %s" % host
return cmd
| 2,835 |
market/locale/cs/data.py
|
katomaso/django-market
| 0 |
2024417
|
"""Data specific to Czech."""
countries = (
("Ben", "Benešov"),
("Ber", "Beroun"),
("Bla", "Blansko"),
("Bře", "Břeclav"),
("Brn", "Brno-město"),
("Brn-v", "Brno-venkov"),
("Bru", "Bruntál"),
("ČL", "Česká Lípa"),
("ČB", "České Budějovice"),
("ČK", "Český Krumlov"),
("Che", "Cheb"),
("Cho", "Chomutov"),
("Chr", "Chrudim"),
("Děč", "Děčín"),
("Dom", "Domažlice"),
("FM", "Frýdek-Místek"),
("HB", "Havlíčkův Brod"),
("Hod", "Hodonín"),
("HK", "Hradec Králové"),
("JN", "Jablonec nad Nisou"),
("Jes", "Jeseník"),
("Jič", "Jičín"),
("Jih", "Jihlava"),
("JH", "<NAME>"),
("KV", "Karlovy Vary"),
("Kar", "Karviná"),
("Kld", "Kladno"),
("Klt", "Klatovy"),
("Kln", "Kolín"),
("Kro", "Kroměříž"),
("KH", "Kutná Hora"),
("Lib", "Liberec"),
("Lit", "Litoměřice"),
("Lou", "Louny"),
("Měl", "Mělník"),
("MB", "Mladá Boleslav"),
("Mos", "Most"),
("Nác", "Náchod"),
("NJ", "Nový Jičín"),
("Nym", "Nymburk"),
("Olo", "Olomouc"),
("Opa", "Opava"),
("Ost", "Ostrava"),
("Par", "Pardubice"),
("Pel", "Pelhřimov"),
("Pís", "Písek"),
("Plz", "Plzeň"),
("Plz-j", "Plzeň-jih"),
("Plz-s", "Plzeň-sever"),
("Pra", "Prachatice"),
("Pha", "Praha"),
("Pha-v", "Praha-východ"),
("Pha-z", "Praha-západ"),
("Pře", "Přerov"),
("Pří", "Příbram"),
("Pro", "Prostějov"),
("Rak", "Rakovník"),
("Rok", "Rokycany"),
("RnK", "Rychnov nad Kněžnou"),
("Sem", "Semily"),
("Sok", "Sokolov"),
("Str", "Strakonice"),
("Šum", "Šumperk"),
("Svi", "Svitavy"),
("Táb", "Tábor"),
("Tac", "Tachov"),
("Tep", "Teplice"),
("Tře", "Třebíč"),
("Tru", "Trutnov"),
("UH", "<NAME>"),
("ÚnL", "Ústí nad Labem"),
("ÚnO", "Ústí nad Orlicí"),
("Vse", "Vsetín"),
("Vyš", "Vyškov"),
("Zlí", "Zlín"),
("Zno", "Znojmo"),
("Žďá", "Žďár nad Sázavou")
)
| 1,997 |
seed_otp/crypto.py
|
brndnmtthws/seed-otp
| 9 |
2024471
|
def encrypt(
num_keys,
keylist,
wordlist,
word_to_idx,
words):
"""Encrypt words using OTP word indexes."""
if len(words) > num_keys:
raise ValueError("Number of input words exceeds key length")
result = []
for i in range(0, len(words)):
m = words[i]
m_i = word_to_idx[m]
k = keylist[i]
c_i = (m_i + k) % len(wordlist)
result.append({
'message': wordlist[m_i],
'ciphertext': wordlist[c_i]
})
return result
def decrypt(
num_keys,
keylist,
wordlist,
word_to_idx,
words):
"""Encrypt words using OTP word indexes."""
if len(words) > num_keys:
raise ValueError("Number of input words exceeds key length")
result = []
for i in range(0, len(words)):
c = words[i]
c_i = word_to_idx[c]
k = keylist[i]
m_i = (c_i - k) % len(wordlist)
result.append({
'message': wordlist[m_i],
'ciphertext': wordlist[c_i]
})
return result
| 1,094 |
installer/core/terraform/utils.py
|
jacob-kinzer/pacbot
| 0 |
2022960
|
from core.config import Settings
import os
import json
def get_terraform_provider_file():
return os.path.join(
Settings.TERRAFORM_DIR,
'provider.tf'
)
def get_terraform_scripts_and_files_dir():
return os.path.join(
Settings.TERRAFORM_DIR,
'scripts_and_files'
)
def get_terraform_scripts_dir():
return os.path.join(
get_terraform_scripts_and_files_dir(),
'scripts'
)
def get_terraform_resource_path(resource_class):
resource_path = ".".join(
[resource_class.resource_instance_name, resource_class.get_resource_id()])
return resource_path
def get_formatted_resource_attr_value(arg_value, attrs):
field_type = attrs.get('type', None)
if field_type == 'json':
arg_value = json.dumps(arg_value)
elif arg_value is True or arg_value is False:
arg_value = "true" if arg_value is True else "false"
else:
arg_value = get_prefix_added_attr_value(arg_value, attrs)
return arg_value
def get_prefix_added_attr_value(arg_value, attrs):
if attrs.get('prefix', False):
trail_value = "" if arg_value.strip() == "" else arg_value
prefix_sep = "" if (Settings.RESOURCE_NAME_PREFIX.strip() == "" or trail_value == "") else attrs.get('sep', "")
arg_value = Settings.RESOURCE_NAME_PREFIX.strip() + prefix_sep + trail_value
return arg_value
def get_terraform_latest_output_file():
return os.path.join(Settings.OUTPUT_DIR, 'output.json')
def get_terraform_status_file():
return os.path.join(Settings.OUTPUT_DIR, 'status.json')
| 1,594 |
mocket/utils.py
|
mindflayer/python-mocket
| 226 |
2023879
|
import binascii
import io
import os
import ssl
from .compat import decode_from_bytes, encode_to_bytes
SSL_PROTOCOL = ssl.PROTOCOL_SSLv23
class MocketSocketCore(io.BytesIO):
def write(self, content):
super(MocketSocketCore, self).write(content)
from mocket import Mocket
if Mocket.r_fd and Mocket.w_fd:
os.write(Mocket.w_fd, content)
def hexdump(binary_string):
r"""
>>> hexdump(b"bar foobar foo") == decode_from_bytes(encode_to_bytes("62 61 72 20 66 6F 6F 62 61 72 20 66 6F 6F"))
True
"""
bs = decode_from_bytes(binascii.hexlify(binary_string).upper())
return " ".join(a + b for a, b in zip(bs[::2], bs[1::2]))
def hexload(string):
r"""
>>> hexload("62 61 72 20 66 6F 6F 62 61 72 20 66 6F 6F") == encode_to_bytes("bar foobar foo")
True
"""
string_no_spaces = "".join(string.split())
return encode_to_bytes(binascii.unhexlify(string_no_spaces))
def get_mocketize(wrapper_):
import decorator
if decorator.__version__ < "5":
return decorator.decorator(wrapper_)
return decorator.decorator(wrapper_, kwsyntax=True)
| 1,132 |
src/tests/profile/complete/test_db.py
|
NSKgooner/hack_moscow
| 3 |
2025152
|
def test_insert_known():
pass
def test_delete_unknown():
pass
def test_select_unknown_save_time():
pass
def test_complete():
pass
| 152 |
devilry/devilry_gradingsystem/views/feedbackbulkeditorbase.py
|
devilry/devilry-django
| 29 |
2024950
|
from datetime import datetime
from random import randint
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import gettext_lazy
from django.http import Http404
from django import forms
from django.shortcuts import redirect
from django.http import HttpResponseBadRequest
from devilry.devilry_markup.parse_markdown import markdown_full
from devilry.devilry_gradingsystem.models import FeedbackDraft
from devilry.devilry_gradingsystem.widgets.editmarkdown import EditMarkdownLayoutObject
from devilry.devilry_gradingsystem.widgets.editfeedbackbuttonbar import BulkEditFeedbackButtonBar
from devilry.devilry_examiner.views.bulkviewbase import BulkViewBase
from devilry.devilry_examiner.views.bulkviewbase import OptionsForm
class FeedbackBulkEditorOptionsForm(OptionsForm):
draft_id = forms.IntegerField(
required=False,
widget=forms.HiddenInput)
def clean_draft_id(self):
draft_id = self.cleaned_data['draft_id']
if draft_id is not None and not FeedbackDraft.objects.filter(id=draft_id).exists():
raise forms.ValidationError("Invalid draft ID: {}.".format(draft_id))
return draft_id
def clean(self):
cleaned_data = super(FeedbackBulkEditorOptionsForm, self).clean()
if hasattr(self, 'cleaned_groups'):
cleaned_groups = self.cleaned_groups
groups_with_no_deliveries = cleaned_groups.exclude_groups_with_deliveries()
if groups_with_no_deliveries.exists():
raise forms.ValidationError(gettext_lazy('One or more of the selected groups has no deliveries.'))
else:
self.cleaned_groups = cleaned_groups
return cleaned_data
class FeedbackBulkEditorFormBase(FeedbackBulkEditorOptionsForm):
def __init__(self, *args, **kwargs):
super(FeedbackBulkEditorFormBase, self).__init__(*args, **kwargs)
self._add_feedbacktext_field()
def _add_feedbacktext_field(self):
self.fields['feedbacktext'] = forms.CharField(
label=gettext_lazy('Feedback text'),
required=False)
def get_feedbacktext_layout_elements(self):
return [EditMarkdownLayoutObject()]
def get_submitbuttons_layout_elements(self):
return [BulkEditFeedbackButtonBar()]
def add_common_layout_elements(self):
for element in self.get_feedbacktext_layout_elements():
self.helper.layout.append(element)
for element in self.get_submitbuttons_layout_elements():
self.helper.layout.append(element)
self.helper.layout.append('group_ids')
self.helper.layout.append('draft_id')
class FeedbackBulkEditorFormView(BulkViewBase):
optionsform_class = FeedbackBulkEditorOptionsForm
def get_primaryform_classes(self):
return {
'submit_publish': self.form_class,
'submit_preview': self.form_class,
}
def get_points_from_form(self, form):
raise NotImplementedError()
def get_default_points_value(self):
raise NotImplementedError()
def optionsform_invalid(self, optionsform):
return self.render_view({
'optionsform': optionsform
})
def get_create_feedbackdraft_kwargs(self, form, publish):
return {
'groups': form.cleaned_groups,
'feedbacktext_raw': form.cleaned_data['feedbacktext'],
'feedbacktext_html': markdown_full(form.cleaned_data['feedbacktext']),
'publish': publish,
'points': self.get_points_from_form(form)
}
def _get_preview_redirect_url(self, randomkey):
return "{}".format(reverse('devilry_gradingsystem_feedbackdraft_bulkpreview',
kwargs={'assignmentid': self.object.id, 'randomkey': randomkey}))
def save_pluginspecific_state(self, form):
"""
Save extra state that is specific to this plugin. I.E: Input from
users that has no corresponding field in FeedbackDraft, and has to be
stored in the data models for the plugin.
"""
pass
def submitted_primaryform_valid(self, form, context_data):
publish = 'submit_publish' in self.request.POST
preview = 'submit_preview' in self.request.POST
self.save_pluginspecific_state(form)
draft_ids = self.create_feedbackdrafts(**self.get_create_feedbackdraft_kwargs(form, publish))
if preview:
randomkey = '{}.{}'.format(timezone.now().strftime('%Y-%m-%d_%H-%M-%S-%f'), randint(0, 10000000))
sessionkey = 'devilry_gradingsystem_draftids_{}'.format(randomkey)
self.request.session[sessionkey] = draft_ids
return redirect(str(self._get_preview_redirect_url(randomkey)))
else:
return super(FeedbackBulkEditorFormView, self).submitted_primaryform_valid(form, context_data)
def get_initial_from_draft(self, draft):
return {
'feedbacktext': draft.feedbacktext_raw
}
def get_primaryform_initial_data(self, formclass):
if self.optionsdict['draft_id']:
draft = FeedbackDraft.objects.get(id=self.optionsdict['draft_id'])
if draft.delivery.assignment != self.assignment:
raise Http404()
extra_data = self.get_initial_from_draft(draft)
else:
extra_data = {
'feedbacktext': '',
'points': self.get_default_points_value()
}
initial = super(FeedbackBulkEditorFormView, self).get_primaryform_initial_data(formclass)
extra_data.update(initial)
return extra_data
def get(self, *args, **kwargs):
assignment = self.get_object()
if not assignment.has_valid_grading_setup():
return redirect('devilry_examiner_singledeliveryview', deliveryid=self.delivery.id)
return super(FeedbackBulkEditorFormView, self).get(*args, **kwargs)
def post(self, *args, **kwargs):
assignment = self.get_object()
if not assignment.has_valid_grading_setup():
return HttpResponseBadRequest('Grading system is not set up correctly')
elif 'submit_save_draft' in self.request.POST:
return redirect(str(self.request.path))
return super(FeedbackBulkEditorFormView, self).post(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(FeedbackBulkEditorFormView, self).get_context_data(**kwargs)
assignment = self.object
context['valid_grading_system_setup'] = assignment.has_valid_grading_setup()
return context
def create_feedbackdrafts(self, groups, points, feedbacktext_raw, feedbacktext_html, publish=False):
draft_ids = []
for group in groups:
delivery_id = group.last_delivery_id
draft = FeedbackDraft(
delivery_id=delivery_id,
points=points,
feedbacktext_raw=feedbacktext_raw,
feedbacktext_html=feedbacktext_html,
saved_by=self.request.user
)
if publish:
draft.published = True
draft.staticfeedback = draft.to_staticfeedback()
draft.staticfeedback.full_clean()
draft.staticfeedback.save()
draft.save()
draft_ids.append(draft.id)
return draft_ids
| 7,424 |
binance-spot/__init__.py
|
AbdeenM/binance-spot
| 2 |
2023294
|
from common.scripts.binance_spot.requestclient import RequestClient
from common.scripts.binance_spot.subscriptionclient import SubscriptionClient
| 146 |
mesh/__init__.py
|
omersaeed/mesh
| 1 |
2024282
|
import logging
from mesh.util import LogFormatter
#log = logging.getLogger('mesh')
#log.setLevel(logging.DEBUG)
#handler = logging.StreamHandler()
#handler.setFormatter(LogFormatter())
#log.addHandler(handler)
| 211 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.