max_stars_repo_path
stringlengths 4
182
| max_stars_repo_name
stringlengths 6
116
| max_stars_count
int64 0
191k
| id
stringlengths 7
7
| content
stringlengths 100
10k
| size
int64 100
10k
|
---|---|---|---|---|---|
# Lista - Condicionais/4.py
|
thizago/letscode
| 0 |
2025496
|
numero_1 = float(input('Digite o primeiro número: '))
numero_2 = float(input('Digite o segundo número: '))
if numero_1 < numero_2:
print (numero_2)
elif numero_1 > numero_2:
print (numero_1)
else:
print ('os números são iguais')
| 241 |
scripts/postprocess_dir.py
|
uhh-lt/chinese-whispers
| 6 |
2023443
|
#!/usr/bin/env python
# encoding: utf-8
import argparse
from os.path import splitext, join
import os
from postprocess import postprocess
import glob
def main():
parser = argparse.ArgumentParser(description='Postprocess word sense induction file for all files in a directory.')
parser.add_argument('ddt_dir', help='Path to a directory with csv files with DDTs: "word<TAB>sense-id<TAB>keyword<TAB>cluster" w/o header by default. Here <cluster> is "word:sim<SPACE><SPACE>word:sim<SPACE><SPACE>..."')
parser.add_argument('-min_size', help='Minimum cluster size. Default -- 5.', default="5")
args = parser.parse_args()
print "Input DDT directory (pattern):", args.ddt_dir
print "Min size:", args.min_size
#postprocess(args.ddt, output_fpath, filtered_fpath, int(args.min_size))
for cluster_fpath in glob.glob(args.ddt_dir):
if splitext(cluster_fpath)[-1] == ".csv":
print "\n>>>", cluster_fpath
postprocess(
cluster_fpath,
cluster_fpath+"-minsize" + args.min_size + ".csv",
cluster_fpath+"-minsize" + args.min_size + "-filtered.csv",
args.min_size)
if __name__ == '__main__':
main()
| 1,233 |
panopticon/fifemon-condor-probe/fifemon/condor/__init__.py
|
opensciencegrid/open-pool-display
| 0 |
2025970
|
import os
os.environ['_CONDOR_GSI_SKIP_HOST_CHECK'] = "true"
from .status import get_pool_status
from .slots import get_pool_slots, get_pool_glidein_slots
from .priorities import get_pool_priorities
from .jobs import Jobs
# disable debug logging, causes memory leak in long-running processes
import htcondor
htcondor.param['TOOL_LOG'] = '/dev/null'
htcondor.enable_log()
| 373 |
code/debug.py
|
911Steven/Table-Fact-Checking
| 1 |
2026470
|
import pandas
from beam_search import dynamic_programming
import spacy
nlp = spacy.load('en_core_web_sm')
if __name__ == "__main__":
table = 3
if table == 1:
t = pandas.read_csv('../data/all_csv/1-1341423-13.html.csv', delimiter="#")
elif table == 2:
t = pandas.read_csv('../data/all_csv/2-10808089-16.html.csv', delimiter="#")
elif table == 3:
t = pandas.read_csv('../data/all_csv/1-28498999-6.html.csv', delimiter="#")
else:
pass
cols = t.columns
cols = cols.map(lambda x: x.replace(' ', '_') if isinstance(x, (str, unicode)) else x)
t.columns = cols
print t
option = -1
if option == 1:
sent = u"<NAME> and <NAME> are both democratic"
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('party', 'democratic'), ('incumbent', '<NAME>'), ('incumbent', '<NAME>')]
head_str = []
mem_num = []
head_num = []
elif option == 2:
sent = u'there are 3 _ _ _ in _'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = []
head_str = ['incumbent']
mem_num = [('first_elected', 1998), ("tmp_none", 3)]
head_num = ['first_elected']
elif option == 3:
sent = u'phi crane is _ with the earliest year of _'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('incumbent', 'phil crane')]
head_str = ['incumbent']
mem_num = []
head_num = ['first_elected']
elif option == 4:
sent = u"there is more _ oriented incumbents than _"
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('party', 'democratic'), ('party', 'republican')]
head_str = []
mem_num = []
head_num = []
elif option == -1:
sent = u"<NAME> is not one of the two who had 24 events."
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('player', '<NAME>')]
head_str = ['player']
mem_num = [("tmp_none", 2), ("events", 24)]
head_num = ["events"]
elif option == -2:
sent = u'united states happens more times than any other teams'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('country', 'united states')]
head_str = ['country']
mem_num = []
head_num = []
elif option == 5:
sent = u'there are 2 _ who were not _'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [("results", "re - elected")]
head_str = ['incumbent']
mem_num = [("tmp_none", 2)]
head_num = []
elif option == 6:
sent = u'all _ are _ _'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [("results", "re - elected")]
head_str = ['incumbent']
mem_num = []
head_num = []
elif option == 7:
sent = u'the earliest _ is _ in _'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = []
head_str = ['incumbent']
mem_num = [('first_elected', 1994)]
head_num = ['first_elected']
elif option == 8:
sent = u'_ _ _ are all _'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('incumbent', '<NAME>'), ('incumbent', 'lane evans'), ('party', 'republican')]
head_str = []
mem_num = []
head_num = []
elif option == 9:
sent = u'st kilda lost to essendon and hawthorn lost to south melbourne'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('home_team', 'st kilda'), ('away_team', 'south melbourne'), ('home_team', 'collingwood'), ('away_team', 'north melbourne')]
head_str = []
mem_num = []
head_num = []
elif option == 10:
sent = u'The game with the fewest number of people in attendance was hawthorn vs south melbourne'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('home_team', 'hawthorn'), ('away_team', 'footscray')]
head_str = []
mem_num = []
head_num = ['crowd']
elif option == 11:
sent = u'collingwood is following essendon'
tags = [_.tag_ for _ in nlp(sent)]
mem_str = [('home_team', 'collingwood'), ('home_team', 'essendon')]
head_str = ['home_team']
mem_num = []
head_num = []
dynamic_programming(t, sent, tags, mem_str, mem_num, head_str, head_num, 6)
| 4,263 |
lab01/app/urls.py
|
vixxerror/TECSUP-DAE-2021--2
| 0 |
2025671
|
from django.urls import path
from . import views
urlpatterns = [
# ex: http://127.0.0.1:8000/polls/
path('', views.index, name='index'),
# ex: http://127.0.0.1:8000/polls/5/
path('suma/', views.suma, name='suma'),
# ex: http://127.0.0.1:8000/polls//5/results/
path('suma/<int:numero1>/', views.numero1, name='numero1'),
# ex: http://127.0.0.1:8000/polls/5/vote/
path('suma/<int:numero1>/<int:numero2>/', views.numero2, name='numero2'),
path('resta/', views.resta, name='resta'),
path('resta/<int:numero1>/<int:numero2>/', views.resta, name='numero2'),
path('multiplicacion/', views.resta, name='resta'),
path('multiplicacion/<int:numero1>/<int:numero2>/', views.multiplicacion, name='numero2'),
]
| 752 |
game.py
|
rooted-cyber/ultroid-plugin
| 0 |
2026363
|
from telethon import events
@bot.on(events.NewMessage(pattern="game", outgoing= True, incoming=True))
async def hi(event):
for c in await bot.inline_query("inlinegamesbot"," a"):
await c.click(event.chat_id)
break
| 223 |
api/mongodb_init_recipe.py
|
yanehi/raspberrypi-cocktailmachine
| 5 |
2026256
|
import pymongo
# database connection
# client = pymongo.MongoClient("mongodb://barkeeper:[email protected]/cocktailmachine")
client = pymongo.MongoClient("mongodb://mongodb:27017/")
# create database cocktailmachine
db = client["cocktailmachine"]
recipe_collection = db["recipe"]
recipe_list = [
{
"name": "Vodka Shot",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd64d",
"amount": 4
}
]
},
{
"name": "Vodka-O",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd64d",
"amount": 4
},
{
"ingredientId": "5f9760bc3c54e107bf5fd653",
"amount": 10
}
]
},
{
"name": "Cuba Libre",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd64b",
"amount": 4
},
{
"ingredientId": "5f9760bc3c54e107bf5fd64c",
"amount": 10
}
]
},
{
"name": "Copa-mixable",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd64c",
"amount": 3
},
{
"ingredientId": "5f9760bc3c54e107bf5fd64c",
"amount": 2
},
{
"ingredientId": "5f9760bc3c54e107bf5fd64b",
"amount": 1
},
{
"ingredientId": "5f9760bc3c54e107bf5fd64d",
"amount": 3
}
]
},
{
"name": "Copa-No-mixable",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd659",
"amount": 9
},
{
"ingredientId": "5f9760bc3c54e107bf5fd658",
"amount": 8
}
]
},
{
"name": "Copa-No-mixable2",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd657",
"amount": 1
},
{
"ingredientId": "5f9760bc3c54e107bf5fd656",
"amount": 3
}
]
},
{
"name": "Wasser",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd651",
"amount": 10
}
]
},
{
"name": "Mischbar",
"ingredients": [
{
"ingredientId": "5f9760bc3c54e107bf5fd64d",
"amount": 3
}
]
}
]
# insert values in ingredients collection
recipe_collection.insert_many(recipe_list)
| 2,766 |
src/gui/tkinter/scale_gui.py
|
E1mir/PySandbox
| 0 |
2026460
|
import tkinter
from tkinter import *
def get_value():
selection = f"Value: {var.get()}"
label.config(text=selection)
window = Tk()
var = DoubleVar()
scale = Scale(window, variable=var, orient=HORIZONTAL)
scale.pack()
button = Button(window, text="Retrieve value", command=get_value)
button.pack()
label = Label(window)
label.pack()
window.mainloop()
| 367 |
eikonal/implicit_network.py
|
noamroze/moser_flow
| 5 |
2026286
|
# ---------------------------------------------------------------------------------------------------------------------
# 2d surface reconstruction from point-cloud with EikoNet
#
# ---------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------------
# imports
import torch
import torch.nn as nn
import numpy as np
import temp
import os
from datetime import datetime
import GPUtil
import sdf_utils
# import old.grad_layers as nng
# ---------------------------------------------------------------------------------------------------------------------
def mkdir_ifnotexists(directory):
if not os.path.exists(directory):
os.mkdir(directory)
deviceIDs = GPUtil.getAvailable(order='memory', limit=1, maxLoad=0.5, maxMemory=0.5, includeNan=False, excludeID=[],
excludeUUID=[])
gpu = deviceIDs[0]
os.environ["CUDA_VISIBLE_DEVICES"] = '{0}'.format(gpu)
# regularization coefficient
ALPHA = 0.1
# sofplus coefficient
BETA = 100
n_input = 8
# ---------------------------------------------------------------------------------------------------------------------
class ImplicitNetwork(nn.Module):
def __init__(
self,
latent_size,
d_in,
d_out,
dims,
skip_in=(),
weight_norm=False,
geometric_init=False,
bias=1.0,
):
super().__init__()
dims = [d_in + latent_size] + dims + [d_out]
self.pc_dim = d_in #
self.d_in = d_in + latent_size
self.num_layers = len(dims)
self.skip_in = skip_in
for l in range(0, self.num_layers - 1):
if l + 1 in self.skip_in:
out_dim = dims[l + 1] - dims[0]
else:
out_dim = dims[l + 1]
# lin = nng.LinearGrad(dims[l], out_dim)
lin = nn.Linear(dims[l], out_dim)
if geometric_init:
if l == self.num_layers - 2:
torch.nn.init.normal_(lin.weight, mean=np.sqrt(np.pi) / np.sqrt(dims[l]), std=0.0001)
torch.nn.init.constant_(lin.bias, -bias)
else:
torch.nn.init.constant_(lin.bias, 0.0)
torch.nn.init.normal_(lin.weight, 0.0, np.sqrt(2) / np.sqrt(out_dim))
if weight_norm:
lin = nn.utils.weight_norm(lin)
setattr(self, "lin" + str(l), lin)
# self.softplus = nng.SoftplusGrad(beta=100)
self.softplus = nn.Softplus(beta=100)
# def forward(self, input, compute_grad=False):
def forward(self, input):
'''
:param input: [shape: (N x d_in)]
:param compute_grad: True for computing the input gradient. default=False
:return: x: [shape: (N x d_out)]
x_grad: input gradient if compute_grad=True [shape: (N x d_in x d_out)]
None if compute_grad=False
'''
x = input
# x_grad = None
for l in range(0, self.num_layers - 1):
lin = getattr(self, "lin" + str(l))
if l in self.skip_in:
x = torch.cat([x, input], 1) / np.sqrt(2)
# if compute_grad:
# skip_grad = torch.eye(self.d_in, device=x.device)[:, -self.pc_dim:].repeat(input.shape[0], 1, 1)#
# x_grad = torch.cat([x_grad, skip_grad], 1) / np.sqrt(2)
# x, x_grad = lin(x, x_grad, compute_grad, l == 0, self.pc_dim)
x = lin(x)
if l < self.num_layers - 2:
# x, x_grad = self.softplus(x, x_grad, compute_grad)
x = self.softplus(x)
# return x, x_grad
return x
def gradient(self, x):
x.requires_grad_(True)
y = self.forward(x)[:,:1]
d_output = torch.ones_like(y, requires_grad=False, device=y.device)
gradients = torch.autograd.grad(
outputs=y,
inputs=x,
grad_outputs=d_output,
create_graph=True,
retain_graph=True,
only_inputs=True)[0]
return gradients.unsqueeze(1)
# --------------------------------------------------------------------------------------------------------------------#
# --------------------------------------------------------------------------------------------------------------------#
# --------------------------------------------------------------------------------------------------------------------#
if __name__ == '__main__':
device = 'gpu'
# output path+name
exps_folder_name = 'exps'
expname = 'debug'
# expdir = os.path.join(os.environ['HOME'], 'data/Projects/Eikonal-Network/{0}/{1}'.format(exps_folder_name, expname))
expdir = os.path.join('.', exps_folder_name, expname)
# mkdir_ifnotexists(os.path.join(os.environ['HOME'],
# 'data/Projects/Eikonal-Network/{0}'.format(exps_folder_name)))
mkdir_ifnotexists(os.path.join('.', exps_folder_name))
mkdir_ifnotexists(expdir)
timestamp = '{:%Y_%m_%d_%H_%M_%S}'.format(datetime.now())
mkdir_ifnotexists(os.path.join(expdir, timestamp))
expdir = os.path.join(expdir, timestamp)
# model parameters
d_in = 2
d_out = 1
dims = [512, 512, 512, 512, 512, 512, 512, 512]
skips = [4] # before which layers we do the skip connection
bias = 1.0
N = 1 # batch size
# training parameters
max_epochs = 100001
learning_rate = 1.0 * 1e-4
# learning_rate_decay = 0.95
decrease_lr_every = 0 # 1000
decrease_lr_by = 1.0 # 0.5
sigma_nn = 1 #50
# output surface every
output_surface_every = 1000
# create our MLP model
model = ImplicitNetwork(0,d_in, d_out, dims, skip_in=skips)
if (device == 'gpu'):
model = model.cuda()
# optimize model
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
# 2D expirements
shape = sdf_utils.Line(n_input)
# shape = sdf_utils.LineCrazy(n_input)
# shape = sdf_utils.HalfCircle(n_input)
# shape = sdf_utils.Snowflake(n_input)
# shape = sdf_utils.Square(n_input)
# shape = sdf_utils.LShape(n_input)
# shape = sdf_utils.Random(n_input)
S = shape.get_points()
print(S.shape)
# move to GPU
if device == 'gpu':
S = S.cuda()
# compute sigma per point
n = S.shape[0]
S1 = S.unsqueeze(0).repeat(n, 1, 1)
S2 = S.unsqueeze(1).repeat(1, n, 1)
D = torch.norm(S1 - S2, p=2, dim=2)
sorted, indices = torch.sort(D, dim=1)
sigma_max = D.max()
sigmas = sorted[:, sigma_nn]
sigmas = sigmas.cuda()
for t in range(max_epochs):
X_1 = ((torch.randn(N, S.shape[0], d_in).cuda() * (sigmas.unsqueeze(0).unsqueeze(2).repeat(1, 1, 2)) +
S.unsqueeze(0).repeat(N, 1, 1)).reshape(N * S.shape[0], d_in)).cuda()
X_general = torch.empty(S.shape[0] // 2, d_in).uniform_(-1.0, 1.0).cuda()
X = torch.cat([X_1, X_general], 0)
# compute loss
# Y, grad = model(torch.cat([S, X], 0), compute_grad=True)
# Y = Y[:S.shape[0], 0:1]
Y = model(S)
grad = model.gradient(torch.cat([X,S.clone()],dim=0))
grad_norm = grad[:,0,:].norm(2, dim=1)
grad_loss = ((grad_norm - 1) ** 2).mean()
loss_fn = (torch.abs(Y)).mean() + ALPHA * grad_loss
# print loss and grad loss every 500 epochs
if divmod(t, 100)[1] == 0:
print(expname, timestamp, t, 'loss =', loss_fn.item(), 'grad_loss =', grad_loss.item())
# backward pass
optimizer.zero_grad()
loss_fn.backward()
optimizer.step()
# output surface in middle epochs, if required
if (t >= 0) and (output_surface_every > 0) and (np.mod(t, output_surface_every) == 0):
temp.plot_contour(points=S,
grad_points=X,
model=model,
path=expdir,
epoch=t,
resolution=500,
shape=shape,
line=True)
torch.save(
{"model": model.state_dict()},
os.path.join(expdir + "/network_{0}.pth".format(t)))
# update learning rate, if required
if (decrease_lr_every > 0) and (np.mod(t, decrease_lr_every) == 0) and (t > 1):
learning_rate = learning_rate * decrease_lr_by
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
torch.save(
{"model": model.state_dict()},
os.path.join(expdir + "/network_{0}.pth".format(t)))
# plot the zero level set surface
temp.plot_contour(points=S,
grad_points=X,
model=model,
path=expdir,
epoch=t,
resolution=1000,
shape=shape,
line=True)
print('end')
| 9,166 |
A.py
|
S4ltster/2022-cope-simulator
| 1 |
2024763
|
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
In original definition, 'Cope' means someone who is creating an psychological defence of a more lighter belief to deal with a harsh truth.
However, it could easily be used and misinterpretated by online trolls, usualy from Discord or 4Chan, in an attempt to convey mockery, trolling, defaminate and insulting, usualy those people have pathetic lives so they spend it tossing this word and insult/mock others to feed their superiority complex in online chats and/or forums.
> Discord chat
Person 1: Man, sometimes being shorter than average sucks.
Discord troll: Cope
person 1: How is this a 'Cope'?
Discord troll: Haha, cope manlet.
person 1: This is rude, mind your language.
Discord troll: Cope harder manlet.
| 8,806 |
payment_maintenance/payment_maintenance/doctype/bakery_sup_invoice/bakery_sup_invoice.py
|
Srijenanithish/Payment_Maintenance_System
| 0 |
2026417
|
# Copyright (c) 2021, Srijena_Nithish and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
class BakerySupInvoice(Document):
def validate(self):
if self.qty >0:
if frappe.db.exists("Bakery Warehouse",{'item': self.item}):
existing_qty = frappe.db.get_value("Bakery Warehouse",{'item':self.item},"qty")
updated_qty = self.qty
if existing_qty:
updated_qty += existing_qty
frappe.db.set_value("Bakery Warehouse",{'item': self.item},"qty",updated_qty)
else:
stock_entry = frappe.new_doc("Bakery Warehouse")
stock_entry.item = self.item
stock_entry.qty = self.qty
stock_entry.save()
if self.qty > 0:
if frappe.db.exists("Bakery Sup Payment",{'supplier': self.supplier}):
existing_amt = frappe.db.get_value("Bakery Sup Payment",{'supplier':self.supplier},"paid_amount")
exist_balance_amt = frappe.db.get_value("Bakery Sup Payment",{'supplier':self.supplier},"remaining_to_pay")
exist_total_amt = frappe.db.get_value("Bakery Sup Payment",{'supplier':self.supplier},"total_amount")
updated_balance_amt = self.balance_amount_to_pay
updated_amt = self.amount_paid
updated_total_amt = self.total_amount
if existing_amt and exist_balance_amt:
existing_amt = updated_amt
exist_balance_amt = updated_balance_amt
exist_total_amt = updated_total_amt
frappe.db.set_value("Bakery Sup Payment",{'supplier': self.supplier},"paid_amount",existing_amt)
frappe.db.set_value("Bakery Sup Payment",{'supplier': self.supplier},"remaining_to_pay",exist_balance_amt)
frappe.db.set_value("Bakery Sup Payment",{'supplier': self.supplier},"total_amount",exist_total_amt)
else:
stock_entry = frappe.new_doc("Bakery Sup Payment")
stock_entry.supplier = self.supplier
stock_entry.paid_amount = self.amount_paid
stock_entry.remaining_to_pay = self.balance_amount_to_pay
stock_entry.total_amount = self.total_amount
stock_entry.save()
| 2,017 |
cars/automobile/migrations/0001_initial.py
|
DeyberLuna/Frameworks-9a-2021
| 0 |
2025601
|
# Generated by Django 3.1.7 on 2021-04-21 23:25
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Brand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('marca', models.CharField(max_length=50)),
('modelo_marca', models.CharField(max_length=50)),
('create_at', models.DateTimeField()),
('update_at', models.DateTimeField()),
],
),
migrations.CreateModel(
name='BrandReference',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('reference', models.CharField(max_length=150)),
('create_at', models.DateTimeField()),
('update_at', models.DateTimeField()),
('delete_at', models.DateTimeField()),
('brand_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='automobile.brand')),
],
),
migrations.CreateModel(
name='Auto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('modelo', models.IntegerField()),
('color', models.CharField(max_length=150)),
('clase', models.CharField(max_length=150)),
('numero_chasis', models.CharField(max_length=150)),
('numero_motor', models.CharField(max_length=150)),
('tipo', models.CharField(max_length=150)),
('placa', models.CharField(max_length=150)),
('kilometraje', models.IntegerField()),
('cilindraje', models.IntegerField()),
('tipo_combustible', models.CharField(max_length=150)),
('create_at', models.DateTimeField()),
('update_at', models.DateTimeField()),
('delete_at', models.DateTimeField()),
('brand_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='automobile.brand')),
('reference_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='automobile.brandreference')),
],
),
]
| 2,516 |
abi_random.py
|
Abirami33/python-75-hackathon
| 0 |
2022713
|
#GUESS THE NUMBER GAME USING RANDOM NUMBERS
#importing random library
import random
import sys
#function for this play
def playfun(count,x,num):
if num == x:
print("great & excellent guess!")
sys.exit()
elif abs((num-x))<=25:
print("you guessed 25% away!")
count=count+1
return count
elif abs((num-x))<=50:
print("you guessed 50% away!")
count=count+1
return count
elif abs((num-x))<=75:
print("you guessed 75% away!")
count=count+1
return count
elif abs((num-x))<=100:
print("You guessed 100% away!")
count=count+1
return count
print('***************WELCOME To WORD_GAUGE!*************')
x=random.randint(1,100)
#print(x)
c=0
num=int(input("Guess the number:"))
c=playfun(0,x,num);
print('you have',3-c,'more chances')
#to provide the user with 3 chances
while c <= 2:
choice= input("You have some more chances! Want to play again! Type yes or no ")
if choice.lower() == 'yes':
num=int(input("Guess the number:"))
c=playfun(c,x,num);
print('you have',3-c,'more chances')
elif choice.lower() == 'no':
break
else:
print("Enter the valid choice please!")
break
print('Chances exceeded! Well tried! see you later!')
# to provide hint
def hint(num,x):
if x>num:
print('Add',abs(x-num),'to your last guess')
elif x<num:
print('Subtract',abs(x-num),'from your last guess')
print('Take a hint to find out the guess!')
hint(num,x);
num1=int(input("Finally guess the number:"))
#final guess
if num1==x:
print("You have finally done well!")
else:
print("You are dropped out!")
'''OUTPUT:stud@HP-246-Notebook-PC:~$ python abi_random.py
***************WELCOME To WORD_GAUGE!*************
Guess the number:7
you guessed 75% away!
you have 2 more chances
You have some more chances! Want to play again! Type yes or no yes
Guess the number:77
you guessed 25% away!
you have 1 more chances
You have some more chances! Want to play again! Type yes or no yes
Guess the number:96
you guessed 50% away!
you have 0 more chances
Chances exceeded! Well tried! see you later!
Take a hint to find out the guess!
Subtract 34 from your last guess
Finally guess the number:62
You have finally done well!
'''
| 2,412 |
arct_token_swapping.py
|
rtohid/qtranspilation
| 0 |
2025800
|
import networkx as nx
from arct.permutation.general import ApproximateTokenSwapper
from copy import deepcopy
from typing import List, Tuple
Swap = Tuple[int, int]
def demo_approx_token_swapping(in_circuit: nx.Graph,
mapping: List[int]) -> List[Swap]:
permuter = ApproximateTokenSwapper(in_circuit)
original_mapping = list(in_circuit.nodes())
print("Original mapping:")
print(original_mapping)
print()
permutation_order = permuter.map(mapping)
new_mapping = deepcopy(original_mapping)
for permutation in permutation_order:
new_mapping[permutation[0]], new_mapping[permutation[1]] = new_mapping[
permutation[1]], new_mapping[permutation[0]]
print("New mapping:")
print(new_mapping)
print()
return permutation_order
if __name__ == "__main__":
demo_circuit = nx.convert_node_labels_to_integers(nx.grid_2d_graph(4, 4))
mapping = [[node, 15 - node] for node in demo_circuit.nodes()]
demo_circuit_permutations = demo_approx_token_swapping(
demo_circuit, mapping)
print("Permutation order:")
print(demo_circuit_permutations)
| 1,154 |
src/spaceone/statistics/info/schedule_info.py
|
choonho/statistics
| 0 |
2024681
|
import functools
from spaceone.api.statistics.v1 import schedule_pb2
from spaceone.core.pygrpc.message_type import *
from spaceone.statistics.model.schedule_model import Schedule, Scheduled, JoinQuery, Formula, QueryOption
__all__ = ['ScheduleInfo', 'SchedulesInfo']
def ScheduledInfo(vo: Scheduled):
info = {
'cron': vo.cron,
'interval': vo.interval,
'hours': vo.hours,
'minutes': vo.minutes
}
return schedule_pb2.Scheduled(**info)
def ScheduleInfo(schedule_vo: Schedule, minimal=False):
info = {
'schedule_id': schedule_vo.schedule_id,
'topic': schedule_vo.topic,
'state': schedule_vo.state,
}
if not minimal:
info.update({
'options': change_struct_type(schedule_vo.options.to_dict()) if schedule_vo.options else None,
'schedule': ScheduledInfo(schedule_vo.schedule) if schedule_vo.schedule else None,
'tags': change_struct_type(schedule_vo.tags),
'domain_id': schedule_vo.domain_id,
'created_at': change_timestamp_type(schedule_vo.created_at),
'last_scheduled_at': change_timestamp_type(schedule_vo.last_scheduled_at)
})
return schedule_pb2.ScheduleInfo(**info)
def SchedulesInfo(schedule_vos, total_count, **kwargs):
return schedule_pb2.SchedulesInfo(results=list(
map(functools.partial(ScheduleInfo, **kwargs), schedule_vos)), total_count=total_count)
| 1,450 |
driveapi.py
|
naranma/drive-api
| 0 |
2025199
|
import os
import pprint
import uuid
import pickle
import google.oauth2.credentials
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
pp = pprint.PrettyPrinter(indent=2)
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret.
CLIENT_SECRETS_FILE = "client_secrets.json"
# This access scope grants read-only access to the authenticated user's Drive
# account.
#SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
SCOPES = ['https://www.googleapis.com/auth/drive']
API_SERVICE_NAME = 'drive'
API_VERSION = 'v3'
#STORAGE = Storage('storage.json')
#credentials = STORAGE.get()
CREDENTIALS_PICKLE = 'token.pickle'
def get_authenticated_service2():
credentials = None
if os.path.exists(CREDENTIALS_PICKLE):
with open(CREDENTIALS_PICKLE, 'rb') as token:
credentials = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not credentials or not credentials.valid:
if credentials and credentials.expired and credentials.refresh_token:
credentials.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRETS_FILE, SCOPES)
#credentials = flow.run_local_server(port=0)
credentials = flow.run_console()
# Save the credentials for the next run
with open(CREDENTIALS_PICKLE, 'wb') as token:
pickle.dump(credentials, token)
return build(API_SERVICE_NAME, API_VERSION, credentials = credentials)
def get_authenticated_service():
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRETS_FILE, SCOPES)
credentials = flow.run_console()
return build(API_SERVICE_NAME, API_VERSION, credentials = credentials)
def list_drive_files(service, **kwargs):
results = service.files().list(
**kwargs
).execute()
pp.pprint(results)
def list_drive_files2(service, **kwargs):
request = service.files().list(
**kwargs
)
while request is not None:
response = request.execute()
pp.pprint(response)
request = service.drives().list_next(previous_request=request,previous_response=response)
def create_teamdrive(service, td_name):
request_id = str(uuid.uuid4()) # random unique UUID string
body = {'name': td_name}
return service.teamdrives().create(body=body,
requestId=request_id, fields='id').execute().get('id')
def list_teamdrive(service):
results = service.drives().list(pageSize=100).execute() # pageSize=None return 10 items. Max = 100
return results.get('drives')
def list_teamdrive2(service):
results = []
request = service.drives().list(pageSize=100) # pageSize=None return 10 items. Max = 100
while request is not None:
response = request.execute()
results = results + response.get('drives')
#pp.pprint(response)
request = service.drives().list_next(previous_request=request,previous_response=response)
return results
def update_teamdrive(service, td_id, td_name):
body = {'name': td_name}
return service.teamdrives().update(body=body,
teamDriveId=td_id, fields='id').execute().get('id')
def get_teamdrive(service, td_id):
return service.teamdrives().get(
teamDriveId=td_id, fields='*').execute()
def add_user(service, td_id, user, role='organizer'):
body = {'type': 'user', 'role': role, 'emailAddress': user}
return service.permissions().create(body=body, fileId=td_id, sendNotificationEmail=False,
supportsTeamDrives=True, fields='id').execute().get('id')
def add_group(service, td_id, group, role='organizer'):
body = {'type': 'group', 'role': role, 'emailAddress': group}
return service.permissions().create(body=body, fileId=td_id, sendNotificationEmail=False,
supportsTeamDrives=True, fields='id').execute().get('id')
| 3,994 |
Tree/postorder_traversal.py
|
AaronOS0/leetcode_solver
| 0 |
2026061
|
#!/usr/bin/env python
from typing import List, Optional
from collections import Counter, deque
"""
Questions:
145. Binary Tree Postorder Traversal
590. N-ary Tree Postorder Traversal
"""
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Node:
def __init__(self, val=None, children=None):
self.val = val
self.children = children
class Solution:
"""
145. Binary Tree Postorder Traversal
Given the root of a binary tree, return the postorder traversal of its nodes' values.
https://leetcode.com/problems/binary-tree-postorder-traversal/
>>> root = [1,null,2,3]
>>> [3,2,1]
"""
# Time Complexity: O()
# Space Complexity: O()
# Recursion version
def postorderTraversal(self, root: Optional[TreeNode]) -> List[int]:
res = []
if not root:
return res
def recursion(root, res):
if root:
recursion(root.left, res)
recursion(root.right, res)
res.append(root.val)
recursion(root, res)
return res
# Iteration version
def postorderTraversal1(self, root: Optional[TreeNode]) -> List[int]:
res, stack = [], [root]
while stack:
node = stack.pop()
if node:
stack.append(node.left)
stack.append(node.right)
res.append(node.val)
return res[::-1]
"""
590. N-ary Tree Postorder Traversal
Given the root of an n-ary tree, return the postorder traversal of its nodes' values.
>>> [1,null,3,2,4,null,5,6]
>>> [5,6,3,2,4,1]
"""
# Recursion version
def postorder(self, root: 'Node') -> List[int]:
res = []
# Empty tree
if not root:
return res
def recursion(root, res):
for child in root.children:
recursion(child, res)
res.append(root.val)
recursion(root, res)
return res
# Iteration version
def postorder1(self, root: 'Node') -> List[int]:
res = []
if not root:
return res
stack = [root]
while stack:
curr = stack.pop()
res.append(curr.val)
stack.extend(curr.children)
return res[::-1]
| 2,395 |
main.py
|
nikitt-code/sso-python-sdk
| 1 |
2026485
|
from ssosdk import SSOSDK
if __name__ == '__main__':
app = SSOSDK("TOKEN HERE")
# users.get
# users_array - array of ids, if u need one user [ id ]
# app.usersGet([ 463406970 ])
# transfer.create
# id - recipient id, count - value of send coins
# app.transfersCreate(id, count)
# transfers.getHistory
# count - count of transfers to show
# app.transfersGetHistory(count)
# transfers.get
# transfer_ids - array of ids of transfers, if u need one transfer [ id ]
# app.transfersGet([ 743 ])
# webhooks.get
# app.webhooksGet(url)
# webhooks.create
# url - url to your server
# app.webhooksSet(url)
# webhooks.delete
# app.webhooksDelete(url)
# promocodes.create
# count, activations
# app.promoCreate(count, activations)
# promocodes.get
# app.promoGet()
# promocodes.activate
# code - promocode value
# app.promoActivate()
| 943 |
src/chains/migrations/0013_rename_url_uri.py
|
tharsis/safe-config-service
| 8 |
2024852
|
# Generated by Django 3.2.5 on 2021-07-15 15:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("chains", "0012_chain_gas_price_fixed_wei"),
]
operations = [
migrations.RenameField(
model_name="chain",
old_name="block_explorer_url",
new_name="block_explorer_uri",
),
migrations.RenameField(
model_name="chain",
old_name="currency_logo_url",
new_name="currency_logo_uri",
),
migrations.RenameField(
model_name="chain",
old_name="gas_price_oracle_url",
new_name="gas_price_oracle_uri",
),
migrations.RenameField(
model_name="chain",
old_name="rpc_url",
new_name="rpc_uri",
),
migrations.RenameField(
model_name="chain",
old_name="transaction_service_url",
new_name="transaction_service_uri",
),
]
| 1,026 |
plotROC.py
|
hdadong/dadong
| 0 |
2025288
|
import os
import cv2
import sys
import os.path as osp
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['xtick.labelsize']=24
mpl.rcParams['ytick.labelsize']=24
def main():
gto=open('logcacdvs207.log','r')
lines=gto.readlines()
dis=[]
for line in lines:
dis.append(float(line.split(' ')[1]))
print(len(dis),dis[0])
sot=sorted(dis)
fpr=[]
far=[]
for testi in range(0,4000):
fenge=sot[testi]
label=[]
for distance in dis:
if distance>fenge:
label.append(0)
else:
label.append(1)
#print label
fp=0
fa=0
for i in xrange(0,10):
for j in xrange(i*200,(i+1)*200):
if i in [0,2,4,6,8]:
if label[j]==0:
fp+=1
if i in [1,3,5,7,9]:
if label[j]==1:
fa+=1
if testi%10==0:
print testi,', fpr:',fp/2000.0,'far:', fa/2000.0
far.append(fa/2000.0)
fpr.append(fp/2000.0)
fpr=np.asarray(fpr)
far=np.asarray(far)
plt.figure('ROC')
plt.plot(far,1-fpr)
plt.xlim((0,1))
plt.ylim((0,1))
plt.show()
if __name__ == '__main__':
main()
| 1,228 |
toffifee.py
|
christiankuhl/nonsense
| 0 |
2026290
|
from collections import OrderedDict
import sys
import os
import time
class Tracer(object):
def __init__(self, columns=6, rows=4):
self.columns = columns
self.rows = rows
self.position = (0, 0)
self.history = OrderedDict()
self.history[self.position] = self.possibilities()
def possibilities(self):
p = [(self.position[0] + l*i, self.position[1] + k*j) for k in [-1, 1] for l in [-1, 1]
for i in range(1, 3) for j in range(1, 3) if i + j == 3]
p = [(r, s) for (r, s) in p if r in range(self.columns) and s in range(self.rows)
and not (r, s) in self.history]
return p
def trace(self):
while not self.done():
possibilities = self.history[next(reversed(self.history))]
if possibilities:
position = possibilities.pop()
path = self.move(position)
else:
try:
self.back()
except StopIteration:
break
if path:
return path
else:
print("No solution fond!")
def move(self, position):
self.position = position
self.history[position] = self.possibilities()
if self.done():
solution = list(self.history.keys())
print("Found path:", solution)
return solution
def back(self):
self.history.popitem()
self.position = next(reversed(self.history))
def done(self):
return len(self.history) == self.columns * self.rows
def print_there(row, col, text):
sys.stdout.write("\x1b7\x1b[%d;%df%s\x1b8" % (row + 6, col + 6, text))
sys.stdout.flush()
if __name__ == '__main__':
os.system("clear")
a = int(input("Columns: "))
b = int(input("Rows: "))
t = Tracer(a, b)
print("Calculating...")
solution = t.trace()
if solution:
os.system("setterm -cursor off")
for row in range(b):
for col in range(a):
print_there(row, col, "*")
prev_row, prev_col = 0, 0
pawn = u"\u265e"
print_there(solution[0][0], solution[0][1], "\033[92m" + pawn)
time.sleep(1)
for row, col in solution[1:]:
print_there(prev_col, prev_row, "\033[94m*")
print_there(col, row, "\033[92m" + pawn)
prev_row, prev_col = row, col
time.sleep(1)
os.system("setterm -cursor on")
| 2,556 |
examples/print_settings.py
|
iorodeo/pyMightLED
| 0 |
2025864
|
"""
print_settings.py - illustrates how to print the devices current settins.
"""
from pyMightLED import LedController
port = '/dev/ttyUSB0'
dev = LedController(port)
dev.printSettings()
| 188 |
record_post_trade.py
|
IncarboneLuca/OneTradeParDay
| 0 |
2025974
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 2 23:57:53 2021
@author:<NAME>
Record data around opening marcket to be used in the future to improove your
algorithm or change the tresholds to improove the gain.
- set buy/sell at 15:26 (running OTAD.py script)
- after 16:00 sell at any gain and set stopLOSSES
run record_post_trade.py after 19:00 daily (before next trade)
run DDD.py to run the analysis on recorded database to test possible trade and simulate the possible past gain
"""
from credentials import get_cred
from tvDatafeed import TvDatafeed,Interval
import pandas as pd
from datetime import datetime
def veryfy_trade(one_trade,low_to_buy,high_to_sell):
bought = False
sold = False
#do not buy after 15:39
one_trade_buy = one_trade.between_time('15:19', '15:39')
for i in range(len(one_trade_buy)):
if one_trade_buy.iloc[i]['low']<=low_to_buy:
bought = True
# order after 15:26
one_trade_sell = one_trade.between_time('15:26', '17:01')
for i in range(len(one_trade_sell)):
if one_trade_sell.iloc[i]['high']>=high_to_sell:
sold = True
if bought and sold:
return "OK"
elif bought:
return "Losses"
else:
return "noTrade"
def extract_data(file_n):
#get data 5m resolution
df = pd.read_csv(file_n, index_col=0, parse_dates=True)
return df
username,password=get_cred('tradingview');
# you need to run previously the credentials.py script to update a valid credential
tv=TvDatafeed(username=username, password=password)
data = tv.get_hist('WHSELFINVEST:USTECH100CFD','WHSELFINVEST',interval=Interval.in_1_minute,n_bars=1000)
print(data)
# data=pd.read_csv('DB/out.csv')
# print(data)
# str_date = "2021-07-02"
# data_day = data.loc[str_date:str_date]
# start=data_day.between_time('15:19','15:24')
str_date = datetime.today().strftime('%Y-%m-%d')
data_day = data
one_trade = data_day.between_time('15:19', '17:01')
# BackUP data for future analysis
USTESCH100_data = tv.get_hist('WHSELFINVEST:USTECH100CFD','WHSELFINVEST',interval=Interval.in_1_minute,n_bars=5000)
USSP500_data = tv.get_hist('WHSELFINVEST:USSP500CFD','WHSELFINVEST',interval=Interval.in_1_minute,n_bars=5000)
WALLSTREET_data = tv.get_hist('WHSELFINVEST:WALLSTREETCFD','WHSELFINVEST',interval=Interval.in_1_minute,n_bars=5000)
# BakUp just opening market timewindow
USTESCH100_d = USTESCH100_data.between_time('15:00', '19:00')
USSP500_d = USSP500_data.between_time('15:00', '19:00')
WALLSTREET_d = WALLSTREET_data.between_time('15:00', '19:00')
#save to csv file
USTESCH100_d.to_csv(str('DB/USTECH100/1m/'+str_date+'_dailyRecord.csv'))
USSP500_d.to_csv(str('DB/USSP500/1m/'+str_date+'_dailyRecord.csv'))
WALLSTREET_d.to_csv(str('DB/WALLSTREET/1m/'+str_date+'_dailyRecord.csv'))
#Backup 5m resolution
USTESCH100_data = tv.get_hist('WHSELFINVEST:USTECH100CFD','WHSELFINVEST',interval=Interval.in_5_minute,n_bars=5000)
USSP500_data = tv.get_hist('WHSELFINVEST:USSP500CFD','WHSELFINVEST',interval=Interval.in_5_minute,n_bars=5000)
WALLSTREET_data = tv.get_hist('WHSELFINVEST:WALLSTREETCFD','WHSELFINVEST',interval=Interval.in_5_minute,n_bars=5000)
#consider past values to be add to the output file
USP500_df = extract_data( ".\\DB\\USSP500\\5m\\USSP500_5m_Record.csv")
USTECH100_df = extract_data( ".\\DB\\USTECH100\\5m\\USTECH100_5m_Record.csv")
WALLSTREET_df = extract_data( ".\\DB\\WALLSTREET\\5m\\WALLSTREET_5m_Record.csv")
# concatenate the old database and the new data to avoid data redundancy
USSP500_d=pd.concat([USP500_df,USSP500_data])
USSP500_d = USSP500_d[~USSP500_d.index.duplicated()]
USTESCH100_d=pd.concat([USTECH100_df,USTESCH100_data])
USTESCH100_d = USTESCH100_d[~USTESCH100_d.index.duplicated()]
WALLSTREET_d=pd.concat([WALLSTREET_df,WALLSTREET_data])
WALLSTREET_d = WALLSTREET_d[~WALLSTREET_d.index.duplicated()]
#save to csv file
USTESCH100_d.to_csv(str('DB/USTECH100/5m/USTECH100_5m_Record.csv'))
USSP500_d.to_csv(str('DB/USSP500/5m/USSP500_5m_Record.csv'))
WALLSTREET_d.to_csv(str('DB/WALLSTREET/5m/WALLSTREET_5m_Record.csv'))
| 4,108 |
src/gallium/tools/trace/format.py
|
SoftReaper/Mesa-Renoir-deb
| 0 |
2024953
|
#!/usr/bin/env python3
##########################################################################
#
# Copyright 2008 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sub license, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice (including the
# next paragraph) shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
# IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
# ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
##########################################################################
import sys
class Formatter:
'''Plain formatter'''
def __init__(self, stream):
self.stream = stream
def text(self, text):
self.stream.write(text)
def newline(self):
self.text('\n')
def function(self, name):
self.text(name)
def variable(self, name):
self.text(name)
def literal(self, value):
self.text(str(value))
def address(self, addr):
self.text(str(addr))
class AnsiFormatter(Formatter):
'''Formatter for plain-text files which outputs ANSI escape codes. See
http://en.wikipedia.org/wiki/ANSI_escape_code for more information
concerning ANSI escape codes.
'''
_csi = '\33['
_normal = '0m'
_bold = '1m'
_italic = '3m'
_red = '31m'
_green = '32m'
_blue = '34m'
def _escape(self, code):
self.text(self._csi + code)
def function(self, name):
self._escape(self._bold)
Formatter.function(self, name)
self._escape(self._normal)
def variable(self, name):
self._escape(self._italic)
Formatter.variable(self, name)
self._escape(self._normal)
def literal(self, value):
self._escape(self._blue)
Formatter.literal(self, value)
self._escape(self._normal)
def address(self, value):
self._escape(self._green)
Formatter.address(self, value)
self._escape(self._normal)
class WindowsConsoleFormatter(Formatter):
'''Formatter for the Windows Console. See
http://code.activestate.com/recipes/496901/ for more information.
'''
STD_INPUT_HANDLE = -10
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLUE = 0x01
FOREGROUND_GREEN = 0x02
FOREGROUND_RED = 0x04
FOREGROUND_INTENSITY = 0x08
BACKGROUND_BLUE = 0x10
BACKGROUND_GREEN = 0x20
BACKGROUND_RED = 0x40
BACKGROUND_INTENSITY = 0x80
_normal = FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED
_bold = FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY
_italic = FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED
_red = FOREGROUND_RED | FOREGROUND_INTENSITY
_green = FOREGROUND_GREEN | FOREGROUND_INTENSITY
_blue = FOREGROUND_BLUE | FOREGROUND_INTENSITY
def __init__(self, stream):
Formatter.__init__(self, stream)
if stream is sys.stdin:
nStdHandle = self.STD_INPUT_HANDLE
elif stream is sys.stdout:
nStdHandle = self.STD_OUTPUT_HANDLE
elif stream is sys.stderr:
nStdHandle = self.STD_ERROR_HANDLE
else:
nStdHandle = None
if nStdHandle:
import ctypes
self.handle = ctypes.windll.kernel32.GetStdHandle(nStdHandle)
else:
self.handle = None
def _attribute(self, attr):
if self.handle:
import ctypes
ctypes.windll.kernel32.SetConsoleTextAttribute(self.handle, attr)
def function(self, name):
self._attribute(self._bold)
Formatter.function(self, name)
self._attribute(self._normal)
def variable(self, name):
self._attribute(self._italic)
Formatter.variable(self, name)
self._attribute(self._normal)
def literal(self, value):
self._attribute(self._blue)
Formatter.literal(self, value)
self._attribute(self._normal)
def address(self, value):
self._attribute(self._green)
Formatter.address(self, value)
self._attribute(self._normal)
def DefaultFormatter(stream):
if sys.platform in ('linux2', 'linux', 'cygwin'):
return AnsiFormatter(stream)
elif sys.platform in ('win32', ):
return WindowsConsoleFormatter(stream)
else:
return Formatter(stream)
| 5,156 |
tests/return_values.py
|
nocturn9x/asyncevents
| 5 |
2026408
|
import asyncio
from asyncevents import on_event, emit
@on_event("hello")
async def hello(_, event: str):
print(f"Hello {event!r}!")
return 42
@on_event("hello")
async def owo(_, event: str):
print(f"owo {event!r}!")
return 1
@on_event("hello")
async def hi(_, event: str):
print(f"Hello {event!r}!")
async def main():
print("Firing blocking event 'hello'")
assert await emit("hello") == [42, 1, None]
print("Handlers for event 'hello' have exited")
if __name__ == "__main__":
asyncio.run(main())
| 542 |
VIS_2020/WeightCalculationFromImageBrightness.py
|
rakib045/TCarto
| 3 |
2026083
|
from energyMinimization import *
import colorsys
import csv
from collections import Counter
import itertools
import colorsys
import math
square_grid = 128
input_image_file = "input/LowLightImageEnhancement.png"
output_weight_filename = "input/LowLightImageEnhancement_lightness_weight_128_128.txt"
def create_hls_array(image):
pixels = image.load()
hls_array = np.empty(shape=(image.height, image.width, 3), dtype=float)
for row in range(0, image.height):
for column in range(0, image.width):
rgb = pixels[column, row]
hls = colorsys.rgb_to_hls(rgb[0]/255.0, rgb[1]/255.0, rgb[2]/255.0)
hls_array[row, column, 0] = hls[0]
#hls_array[row, column, 1] = 100*(2**(2.5*(hls[1])))
hls_array[row, column, 1] = hls[1]
hls_array[row, column, 2] = hls[2]
return hls_array
def image_from_hls_array(hls_array):
new_image = Image.new("RGB", (hls_array.shape[1], hls_array.shape[0]))
for row in range(0, new_image.height):
for column in range(0, new_image.width):
rgb = colorsys.hls_to_rgb(hls_array[row, column, 0],
hls_array[row, column, 1],
hls_array[row, column, 2])
rgb = (int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255))
new_image.putpixel((column, row), rgb)
return new_image
input_image = Image.open(input_image_file)
hls = create_hls_array(input_image)
#print('Min:', hls[:, :, 1].min())
#print('Max:', hls[:, :, 1].max())
#new_image = image_from_hls_array(hls)
#new_image.save('input/hls_test3.png')
grid_count_horizontal = square_grid
grid_count_vertical = square_grid
max = hls.shape[0]
var = hls[:, :, 1]
inc_y = int(max/grid_count_vertical)
inc_x = m.ceil(max/grid_count_horizontal)
data = []
for x in range(0, grid_count_horizontal, 1):
for y in range(0, grid_count_vertical, 1):
sum = 0
n = 0
#print('....'+str(x)+'.....'+str(y)+'.......')
for j in range((y)*inc_y,(y+1)*inc_y,1):
for i in range((x)*inc_x,(x+1)*inc_x,1):
sum = var[i, j] + sum
n = n+1
av = sum/n
#print(av)
data.append(av)
#min_val = min(data)
#max_val = max(data)
#print('Min:', min_val)
#print('Max:', max_val)
count = 1
with open(output_weight_filename, 'w') as f:
for i in data:
if count != grid_count_horizontal * grid_count_vertical:
f.write('{:.4f},'.format(i))
if count % grid_count_horizontal == 0:
f.write('\n')
else:
f.write('{:.4f}'.format(i))
count += 1
print('Finished !!')
| 2,696 |
openstack_dashboard/dashboards/sdscontroller/executions/views.py
|
iostackproject/SDS-dashboard
| 1 |
2025631
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from . import tabs as mydashboard_tabs
from . import forms as project_forms
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.views import generic
from horizon import tabs
from horizon import forms
from horizon import exceptions
from horizon.utils import memoized
from openstack_dashboard.api import zoeapi
class IndexView(tabs.TabbedTableView):
tab_group_class = mydashboard_tabs.MypanelTabs
template_name = 'sdscontroller/executions/index.html'
def get_data(self, request, context, *args, **kwargs):
# Add data to the context here...
return context
class CreateExecutionView(forms.ModalFormView):
form_class = project_forms.CreateExecutionForm
template_name = 'sdscontroller/executions/create.html'
success_url = reverse_lazy("horizon:sdscontroller:executions:index")
modal_id = "create_execution_modal"
modal_header = _("Create Execution")
submit_label = _("Create Execution")
submit_url = "horizon:sdscontroller:executions:create"
def form_valid(self, form):
return super(CreateExecutionView, self).form_valid(form)
def get_initial(self):
initial = super(CreateExecutionView, self).get_initial()
initial['name'] = ''
initial['app_name'] = ''
return initial
def get_context_data(self, **kwargs):
context = super(CreateExecutionView, self).get_context_data(**kwargs)
return context
class ExecutionDetailsView(forms.ModalFormMixin, generic.TemplateView):
template_name = 'sdscontroller/executions/details.html'
page_title = _("Executions Details")
@memoized.memoized_method
def get_object(self):
try:
return zoeapi.get_execution_details(self.kwargs["instance_id"])
except Exception:
redirect = reverse("horizon:sdscontroller:executions:index")
exceptions.handle(self.request,
_('Unable to retrieve details.'),
redirect=redirect)
def get_context_data(self, **kwargs):
print("zoe execution details view: get_context_data")
context = super(ExecutionDetailsView, self).get_context_data(**kwargs)
context['execution'] = self.get_object()
return context
| 2,911 |
dentexchange/apps/libs/tests/haystack/test_get_instance.py
|
hellhound/dentexchange
| 1 |
2025936
|
# -*- coding:utf-8 -*-
import unittest
import mock
from ...haystack.utils import get_instance
class GetInstanceTestCase(unittest.TestCase):
def test_get_instance_should_call_and_return_managers_get_with_pk_from_model_class(
self):
# setup
model_class = mock.Mock()
pk = '1'
get = model_class._default_manager.get
# action
returned_value = get_instance(model_class, pk)
# assert
self.assertDictEqual(dict(pk=int(pk)), get.call_args[1])
self.assertEqual(id(get.return_value), id(returned_value))
def test_get_instance_should_return_none_when_pk_does_not_exist(
self):
# setup
model_class = mock.Mock()
pk = '1'
get = model_class._default_manager.get
model_class.DoesNotExist = Exception
get.side_effect = model_class.DoesNotExist()
# action
returned_value = get_instance(model_class, pk)
# assert
self.assertDictEqual(dict(pk=int(pk)), get.call_args[1])
self.assertIsNone(returned_value)
def test_get_instance_should_return_none_when_pk_yields_multiple_objects(
self):
# setup
model_class = mock.Mock()
pk = '1'
get = model_class._default_manager.get
model_class.MultipleObjectsReturned = Exception
get.side_effect = model_class.MultipleObjectsReturned()
# action
returned_value = get_instance(model_class, pk)
# assert
self.assertDictEqual(dict(pk=int(pk)), get.call_args[1])
self.assertIsNone(returned_value)
| 1,616 |
app/resolve_relocations.py
|
mongodb-labs/disasm
| 269 |
2025630
|
# Copyright 2016 MongoDB Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from disassemble import disasm_plt
from disasm_demangler import demangle
from elftools.elf.relocation import RelocationSection
import struct, time
MAX_INSTR_SIZE = 16
def resolve_plt(addr, plt_section, exe):
sym = None
plt_offset = addr - plt_section['sh_addr'] + plt_section['sh_offset']
plt_section.stream.seek(plt_offset)
# "execute" instructions in .plt to find indirection
rela_addr, size = disasm_plt(plt_section.stream.read(MAX_INSTR_SIZE), addr)
if not rela_addr:
return None
# update rela_addr if it's in the reloc table
reloc_section = exe.elff.get_section_by_name(".rela.plt")
if not reloc_section:
reloc_section = exe.elff.get_section_by_name(".rel.plt")
if not reloc_section:
return None
sym = sym_from_reloc_section(exe, rela_addr, reloc_section)
if sym: # found in reloc table
sym.name = demangle(sym.name) + " (.plt)"
return sym
else: # not in relocation table
print ("not in reloc table")
section = exe.get_section_from_offset(rela_addr)
if section.name == ".text":
return get_symbol_by_addr(rela_addr)
else:
print "Unhandled section: " + section.name
return None
def resolve_got(addr, got_section, exe):
# is GOT always populated by .dyn?? unclear. TODO
reloc_section = exe.elff.get_section_by_name(".rela.dyn")
if not reloc_section:
reloc_section = exe.elff.get_section_by_name(".rel.dyn")
if not reloc_section:
return None
sym = sym_from_reloc_section(exe, addr, reloc_section)
if sym:
sym.name = demangle(sym.name) + " (.got)"
return sym
else:
print "not in reloc table"
return None
# given relocation address (the address into .got or .plt)
# and the relevant relocation section, get the symbol
def sym_from_reloc_section(exe, rela_addr, reloc_section):
symtab = exe.elff.get_section(reloc_section['sh_link'])
for reloc in reloc_section.iter_relocations():
if reloc["r_offset"] == rela_addr:
sym = symtab.get_symbol(reloc['r_info_sym'])
return sym
return None
| 2,755 |
tests/test_pipelines.py
|
ca-scribner/bundle-kubeflow
| 0 |
2026450
|
import inspect
from typing import Callable
import pytest
from kfp import Client
from .pipelines.cowsay import cowsay_pipeline
from .pipelines.jupyter import jupyter_pipeline
from .pipelines.katib import katib_pipeline
from .pipelines.mnist import mnist_pipeline
from .pipelines.object_detection import object_detection_pipeline
def get_params(func):
return {name: value.default for name, value in inspect.signature(func).parameters.items()}
@pytest.mark.parametrize(
'name,fn',
[
pytest.param(
'mnist',
mnist_pipeline,
marks=[pytest.mark.full, pytest.mark.lite, pytest.mark.edge],
),
pytest.param(
'cowsay',
cowsay_pipeline,
marks=[pytest.mark.full, pytest.mark.lite, pytest.mark.edge],
),
pytest.param(
'katib',
katib_pipeline,
marks=[pytest.mark.full],
),
pytest.param(
'jupyter',
jupyter_pipeline,
marks=[pytest.mark.full, pytest.mark.lite],
),
pytest.param(
'object_detection',
object_detection_pipeline,
marks=pytest.mark.gpu,
),
],
)
def test_pipelines(name: str, fn: Callable):
"""Runs each pipeline that it's been parameterized for, and waits for it to succeed."""
client = Client('127.0.0.1:8888')
run = client.create_run_from_pipeline_func(fn, arguments=get_params(fn))
completed = client.wait_for_run_completion(run.run_id, timeout=3600)
status = completed.to_dict()['run']['status']
assert status == 'Succeeded', f'Pipeline {name} status is {status}'
| 1,670 |
debug_toolbar_multilang/views.py
|
Matt3o12/django-debug-toolbar-multilang
| 1 |
2026488
|
from django.conf import settings
from django.http.response import HttpResponseRedirect
from django.utils.http import is_safe_url
from django.utils.translation import check_for_language
try:
from django.utils.translation import LANGUAGE_SESSION_KEY
except ImportError:
LANGUAGE_SESSION_KEY = "django_language"
def get_next_url(request):
"""
Returns the next url field (in our case, it can only be
HTTP_REFERER, so we don't care about the next parameter).
If the URL is not safe, it will return '/'.
:param request: HttpRequest
:return: str
"""
next_url = request.META.get('HTTP_REFERER')
if not is_safe_url(url=next_url, host=request.get_host()):
next_url = '/'
return next_url
def _set_key(container, key, attribute):
"""
Sets the value of `settings.attribute` to container[key] if value is in
`django.utils.settings`.
:param container: dict
:param key: str
:param attribute: str
:return: None
"""
value = getattr(settings, attribute, None)
if value:
container[key] = value
def change_language(request):
"""
This is a modified version of i18n's version of set_language which
supports GET requests as well.
Original description:
Redirect to a given url while setting the chosen language in the
session or cookie. The url and the language code need to be
specified in the request parameters.
"""
response = HttpResponseRedirect(get_next_url(request))
lang_code = request.POST.get('language', request.GET.get("language", None))
if lang_code and check_for_language(lang_code):
if hasattr(request, 'session'):
request.session[LANGUAGE_SESSION_KEY] = lang_code
else:
cookieKwargs = {}
_set_key(cookieKwargs, "max_age", "LANGUAGE_COOKIE_AGE")
_set_key(cookieKwargs, "path", "LANGUAGE_COOKIE_PATH")
_set_key(cookieKwargs, "domain", "LANGUAGE_COOKIE_DOMAIN")
response.set_cookie(
settings.LANGUAGE_COOKIE_NAME, lang_code, **cookieKwargs
)
return response
| 2,128 |
authentication/models.py
|
eshiofune/brutus
| 0 |
2026533
|
from django.contrib.auth.models import User
from django.db import models
from authentication.managers import PersonManager
class Person(User):
objects = PersonManager()
class Meta:
proxy = True
| 213 |
src/resources/const.py
|
atracordis/tweets_startup_funding_extractor
| 0 |
2025825
|
import string
path_to_driver = 'C:/Users/Thrall/chromedriver/chromedriver.exe'
path_to_raw = "../data/raw_data/scraped_tweets.csv"
path_to_parsed = "../data/parsed_data/scraped_tweets.csv"
path_to_edited = "../data/edited_data/scraped_tweets.csv"
path_to_staging = "../data/staging_data/scraped_tweets.csv"
letters = {"series {}".format(letter): number for letter, number in zip(string.ascii_lowercase, range(1, 28))}
series_converter = {
"seed": 0,
"mezzanine": 27,
"ipo": 28,
"public": 29
}
series_converter.update(letters)
inv_series_converter = {v: k for k, v in series_converter.items()}
units_converter = {
"billion": 1000000000,
"b": 1000000000,
"million": 1000000,
"m": 1000000,
}
list_cols_parsed = ["date", "user", "parsed_data"]
list_cols_edited = ["date", "user", "edited_data"]
list_cols_staged = ["date", "user", "date_scraped", "company_name", "series", "raised_funds", "investors", "date"]
| 980 |
apriltags2_ros/src/publish_detections_in_local_frame.py
|
selcukercan/apriltag2_ros
| 0 |
2026053
|
#!/usr/bin/env python
import rospy
import rosbag
import numpy as np
from threading import Lock
from shutil import copy
from std_msgs.msg import Bool
from apriltags2_ros.msg import AprilTagDetectionArray
from apriltags2_ros.msg import VehiclePoseEuler
from apriltags2_ros_post_process.rotation_utils import *
from apriltags2_ros_post_process.time_sync_utils import *
class ToLocalPose:
def __init__(self):
"""
listens to pose estimation returned by apriltag2_ros node and converts it
into robot pose expressed in the global frame
"""
host_package = rospy.get_namespace() # as defined by <group> in launch file
self.node_name = 'publish_detections_in_local_frame' # node name , as defined in launch file
host_package_node = host_package + self.node_name
self.veh = host_package.split('/')[1]
# initialize the node
rospy.init_node('publish_detections_in_local_frame_node', anonymous=False)
# Parameters
# determine we work synchronously or asynchronously, where asynchronous is the default
# mode of operation. synchronous operation is benefitial when post-processing the recorded
# experiment data. For example it is beneficial when only compressed image is available from the experiment and we want to
# pass exach image through a localization pipeline (compressed_image -> decoder -> rectification -> apriltags_detection -> to_local_pose)
# to extract the pose in world frame
self.synchronous_mode = rospy.get_param(param_name="/operation_mode")
self.total_msg_count = rospy.get_param(param_name="/" + self.veh + "/buffer_node/message_count")
rospy.logwarn("TOTAL_MSG_COUNT: {}".format(self.total_msg_count))
# Publisher
self.pub_topic_image_request = "/" + self.veh + "/" + self.node_name + "/" + "image_requested"
self.pub_image_request = rospy.Publisher(self.pub_topic_image_request, Bool, queue_size=1)
self.pub_topic_name = host_package_node + '/tag_detections_local_frame'
self.pub_detection_in_robot_frame = rospy.Publisher(self.pub_topic_name ,VehiclePoseEuler,queue_size=1)
# Subscriber
sub_topic_name = '/' + self.veh + '/tag_detections'
self.sub_img = rospy.Subscriber(sub_topic_name, AprilTagDetectionArray, self.cbDetection)
if self.synchronous_mode:
# get the input rosbags, and name of the output bag we wish the create
input_bag = rospy.get_param(param_name= host_package_node + "/input_rosbag")
self.output_bag = rospy.get_param(param_name= host_package_node + "/output_rosbag")
# wrap bag file operations with a lock as rospy api is not threat-safe.
self.lock = Lock()
self.lock.acquire()
copy(input_bag, self.output_bag)
self.lock.release()
self.numb_written_images = 0
self.wrote_all_images = False
else:
rospy.logwarn('INVALID MODE OF OPERATION in publish_detections_in_local_frame')
def setupParam(self,param_name,default_value):
value = rospy.get_param(param_name,default_value)
rospy.set_param(param_name,value) #Write to parameter server for transparancy
rospy.loginfo("[%s] %s = %s " %(self.node_name,param_name,value))
return value
def cbDetection(self,msg):
if (len(msg.detections) > 0): # non-emtpy detection message
# unpack the position and orientation returned by apriltags2 ros
t_msg = msg.detections[0].pose.pose.pose.position
q_msg = msg.detections[0].pose.pose.pose.orientation
# convert the message content into a numpy array as robot_pose_in_world_frame requires so.
t = np.array([t_msg.x, t_msg.y, t_msg.z])
q = np.array([q_msg.x, q_msg.y, q_msg.z, q_msg.w])
# express relative rotation of the robot wrt the global frame.
veh_R_world, veh_t_world = robot_pose_in_word_frame(q,t)
veh_feaXYZ_world = rotation_matrix_to_euler(veh_R_world)
# convert from numpy float to standart python float to be written into the message
veh_t_world = veh_t_world.tolist()
veh_feaXYZ_world = veh_feaXYZ_world.tolist()
# form message to publish
veh_pose_euler_msg = VehiclePoseEuler()
veh_pose_euler_msg.header.stamp = rospy.Time.now()
# position
veh_pose_euler_msg.posx = veh_t_world[0]
veh_pose_euler_msg.posy = veh_t_world[1]
veh_pose_euler_msg.posz = veh_t_world[2]
# orientation
veh_pose_euler_msg.rotx = veh_feaXYZ_world[0]
veh_pose_euler_msg.roty = veh_feaXYZ_world[1]
veh_pose_euler_msg.rotz = veh_feaXYZ_world[2]
# finally publish the message
self.pub_detection_in_robot_frame.publish(veh_pose_euler_msg)
if self.synchronous_mode:
# save the message to a bag file
self.lock.acquire()
output_rosbag = rosbag.Bag(self.output_bag, 'a') # open bag to write
output_rosbag.write(self.pub_topic_name, veh_pose_euler_msg)
output_rosbag.close()
self.lock.release()
rospy.loginfo("[{}] wrote image {}".format(self.node_name, self.numb_written_images))
self.numb_written_images += 1
# request a new image from "buffer.py"
req_msg = Bool(True)
self.pub_image_request.publish(req_msg)
if self.numb_written_images == self.total_msg_count - 1:
time_sync(self.output_bag)
else:
rospy.loginfo("[{}] empty apriltag detection recieved publishing with all entries 0".format(self.node_name, self.numb_written_images))
# form message to publish
veh_pose_euler_msg = VehiclePoseEuler()
veh_pose_euler_msg.header.stamp = rospy.Time.now()
# position
veh_pose_euler_msg.posx = 0
veh_pose_euler_msg.posy = 0
veh_pose_euler_msg.posz = 0
# orientation
veh_pose_euler_msg.rotx = 0
veh_pose_euler_msg.roty = 0
veh_pose_euler_msg.rotz = 0
# finally publish the message
self.pub_detection_in_robot_frame.publish(veh_pose_euler_msg)
if __name__ == '__main__':
to_local_pose = ToLocalPose()
rospy.spin()
| 6,550 |
geopy/geocoders/mapquest.py
|
navidata/geopy
| 0 |
2025940
|
"""
:class:`.MapQuest` geocoder.
"""
from geopy.compat import urlencode
from geopy.geocoders.base import (
Geocoder,
DEFAULT_FORMAT_STRING,
DEFAULT_TIMEOUT,
DEFAULT_SCHEME
)
from geopy.location import Location
from geopy.util import logger, join_filter
from geopy import exc
__all__ = ("MapQuest", )
class MapQuest(Geocoder): # pylint: disable=W0223
"""
MapQuest geocoder, documentation at:
http://www.mapquestapi.com/geocoding/
"""
def __init__(
self,
api_key,
format_string=DEFAULT_FORMAT_STRING,
scheme=DEFAULT_SCHEME,
timeout=DEFAULT_TIMEOUT,
proxies=None,
): # pylint: disable=R0913
"""
Initialize a MapQuest geocoder with address information and
MapQuest API key.
:param string api_key: Key provided by MapQuest.
:param string format_string: String containing '%s' where the
string to geocode should be interpolated before querying the
geocoder. For example: '%s, Mountain View, CA'. The default
is just '%s'.
:param string scheme: Use 'https' or 'http' as the API URL's scheme.
Default is https. Note that SSL connections' certificates are not
verified.
.. versionadded:: 0.97
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception.
.. versionadded:: 0.97
:param dict proxies: If specified, routes this geocoder's requests
through the specified proxy. E.g., {"https": "192.0.2.0"}. For
more information, see documentation on
:class:`urllib2.ProxyHandler`.
"""
super(MapQuest, self).__init__(format_string, scheme, timeout, proxies)
self.api_key = api_key
self.api = (
"%s://www.mapquestapi.com/geocoding/v1" % self.scheme
)
def geocode(self, query, exactly_one=True, timeout=None): # pylint: disable=W0221
"""
Geocode a location query.
:param string query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
.. versionadded:: 0.97
"""
params = {
'location' : self.format_string % query
}
if exactly_one:
params['maxResults'] = 1
# don't urlencode MapQuest API keys
url = "?".join((
self.api + '/address',
"&".join(("=".join(('key', self.api_key)), urlencode(params)))
))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one
)
def reverse(self, query, exactly_one=True, timeout=None):
"""
Reverse geocode a point.
.. versionadded:: 1.4.0
:param query: The coordinates for which you wish to obtain the
closest human-readable addresses.
:type query: :class:`geopy.point.Point`, list or tuple of (latitude,
longitude), or string as "%(latitude)s, %(longitude)s".
:param bool exactly_one: Return one result, or a list?
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
"""
point = self._coerce_point_to_string(query)
# don't urlencode MapQuest API keys
url = "%s/reverse?key=%s&location=%s" % (
self.api, self.api_key, point)
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one
)
def _parse_json(self, resources, exactly_one=True):
"""
Parse display name, latitude, and longitude from an JSON response.
"""
if resources.get('info').get('statuscode') == 403:
raise exc.GeocoderAuthenticationFailure()
resources = resources.get('results')[0].get('locations', [])
if not len(resources):
return None
def parse_resource(resource):
"""
Parse each record.
"""
city = resource['adminArea5']
county = resource['adminArea4']
state = resource['adminArea3']
country = resource['adminArea1']
latLng = resource['latLng']
latitude, longitude = latLng.get('lat'), latLng.get('lng')
location = join_filter(", ", [city, county, state, country])
if latitude and longitude:
latitude = float(latitude)
longitude = float(longitude)
return Location(location, (latitude, longitude), resource)
if exactly_one:
return parse_resource(resources[0])
else:
return [parse_resource(resource) for resource in resources]
| 5,574 |
day11.py
|
pmrowla/aoc2020
| 0 |
2026472
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Advent of Code 2020 day 11 module."""
from itertools import product
def empty(grid, pos):
x, y = pos
return grid[y][x] == "L"
def occupied(grid, pos):
x, y = pos
return grid[y][x] == "#"
DELTAS = {
"nw": (-1, -1),
"n": (0, -1),
"ne": (1, -1),
"e": (1, 0),
"se": (1, 1),
"s": (0, 1),
"sw": (-1, 1),
"w": (-1, 0),
}
def adj(grid, pos, immediate=True):
x, y = pos
for x_delta, y_delta in DELTAS.values():
i = 1
while True:
x1 = x + i * x_delta
y1 = y + i * y_delta
if x1 < 0 or x1 >= len(grid[y]) or y1 < 0 or y1 >= len(grid):
break
if immediate:
yield x1, y1
break
if grid[y1][x1] != ".":
yield x1, y1
break
i += 1
def step(grid, limit=4, immediate=True):
new_grid = []
for y in range(len(grid)):
new_row = []
for x in range(len(grid[y])):
if empty(grid, (x, y)) and not any(
occupied(grid, pos)
for pos in adj(grid, (x, y), immediate=immediate)
):
new_row.append("#")
elif occupied(grid, (x, y)) and sum(
occupied(grid, pos)
for pos in adj(grid, (x, y), immediate=immediate)
) >= limit:
new_row.append("L")
else:
new_row.append(grid[y][x])
new_grid.append(new_row)
return new_grid
def count_occupied(puzzle_input, **kwargs):
grid = [list(row) for row in puzzle_input]
next_grid = None
while True:
next_grid = step(grid, **kwargs)
if next_grid == grid:
break
grid = next_grid
return sum((occupied(grid, pos) for pos in product(range(len(grid[0])), range(len(grid)))))
def process(puzzle_input, verbose=False):
p1 = count_occupied(puzzle_input)
p2 = count_occupied(puzzle_input, limit=5, immediate=False)
return p1, p2
def main():
"""Main entry point."""
import argparse
import fileinput
parser = argparse.ArgumentParser()
parser.add_argument('infile', help='input file to read ("-" for stdin)')
parser.add_argument('-v', '--verbose', '-d', '--debug',
action='store_true', dest='verbose', help='verbose output')
args = parser.parse_args()
try:
puzzle_input = [line.strip() for line in fileinput.input(args.infile) if line.strip()]
p1, p2 = process(puzzle_input, verbose=args.verbose)
print(f'Part one: {p1}')
print(f'Part two: {p2}')
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| 2,766 |
tests/components/sleepiq/test_init.py
|
pcaston/core
| 1 |
2025964
|
"""The tests for the SleepIQ component."""
from unittest.mock import MagicMock, patch
from openpeerpower import setup
import openpeerpower.components.sleepiq as sleepiq
from tests.common import load_fixture
CONFIG = {"sleepiq": {"username": "foo", "password": "<PASSWORD>"}}
def mock_responses(mock, single=False):
"""Mock responses for SleepIQ."""
base_url = "https://prod-api.sleepiq.sleepnumber.com/rest/"
if single:
suffix = "-single"
else:
suffix = ""
mock.put(base_url + "login", text=load_fixture("sleepiq-login.json"))
mock.get(base_url + "bed?_k=0987", text=load_fixture(f"sleepiq-bed{suffix}.json"))
mock.get(base_url + "sleeper?_k=0987", text=load_fixture("sleepiq-sleeper.json"))
mock.get(
base_url + "bed/familyStatus?_k=0987",
text=load_fixture(f"sleepiq-familystatus{suffix}.json"),
)
async def test_setup(opp, requests_mock):
"""Test the setup."""
mock_responses(requests_mock)
# We're mocking the load_platform discoveries or else the platforms
# will be setup during tear down when blocking till done, but the mocks
# are no longer active.
with patch("openpeerpower.helpers.discovery.load_platform", MagicMock()):
assert sleepiq.setup(opp, CONFIG)
async def test_setup_login_failed(opp, requests_mock):
"""Test the setup if a bad username or password is given."""
mock_responses(requests_mock)
requests_mock.put(
"https://prod-api.sleepiq.sleepnumber.com/rest/login",
status_code=401,
json=load_fixture("sleepiq-login-failed.json"),
)
response = sleepiq.setup(opp, CONFIG)
assert not response
async def test_setup_component_no_login(opp):
"""Test the setup when no login is configured."""
conf = CONFIG.copy()
del conf["sleepiq"]["username"]
assert not await setup.async_setup_component(opp, sleepiq.DOMAIN, conf)
async def test_setup_component_no_password(opp):
"""Test the setup when no password is configured."""
conf = CONFIG.copy()
del conf["sleepiq"]["password"]
assert not await setup.async_setup_component(opp, sleepiq.DOMAIN, conf)
| 2,155 |
python2/tests/test_raygunprovider.py
|
GreatFruitOmsk/raygun4py
| 0 |
2025684
|
import unittest, sys
from raygun4py import raygunprovider
class TestRaygunSender(unittest.TestCase):
def setUp(self):
self.sender = raygunprovider.RaygunSender('invalidapikey')
self.handler = raygunprovider.RaygunHandler('testkey', 'v1.0')
def test_apikey(self):
self.assertEqual(self.sender.apiKey, 'invalidapikey')
def test_handler_apikey(self):
self.assertEqual(self.handler.sender.apiKey, 'testkey')
def test_handler_version(self):
self.assertEqual(self.handler.version, 'v1.0')
def test_sending_403_with_invalid_key(self):
try:
raise StandardError('test')
except Exception as e:
info = sys.exc_info()
http_result = self.sender.send(info[0], info[1], info[2])
self.assertEqual(http_result[0], 403)
def main():
unittest.main()
if __name__ == '__main__':
main()
| 927 |
examples/example_server.py
|
Vincent0700/tcplite
| 2 |
2026550
|
from tcplite import TCPServer
if __name__ == '__main__':
server = TCPServer(port=10086)
server.start()
| 112 |
25-class-metaprog/tinyenums/microenum.py
|
leorochael/example-code-2e
| 0 |
2024023
|
# This is an implementation of an idea by <NAME> (@gwidion)
# shared privately with me, with permission to use in Fluent Python 2e.
"""
Testing ``AutoFillDict``::
>>> adict = AutoFillDict()
>>> len(adict)
0
>>> adict['first']
0
>>> adict
{'first': 0}
>>> adict['second']
1
>>> adict['third']
2
>>> len(adict)
3
>>> adict
{'first': 0, 'second': 1, 'third': 2}
>>> adict['__magic__']
Traceback (most recent call last):
...
KeyError: '__magic__'
Testing ``MicroEnum``::
>>> class Flavor(MicroEnum):
... cocoa
... coconut
... vanilla
>>> Flavor.cocoa, Flavor.vanilla
(0, 2)
>>> Flavor[1]
'coconut'
"""
class AutoFillDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__next_value = 0
def __missing__(self, key):
if key.startswith('__') and key.endswith('__'):
raise KeyError(key)
self[key] = value = self.__next_value
self.__next_value += 1
return value
class MicroEnumMeta(type):
def __prepare__(name, bases, **kwargs):
return AutoFillDict()
class MicroEnum(metaclass=MicroEnumMeta):
def __class_getitem__(cls, key):
for k, v in cls.__dict__.items():
if v == key:
return k
raise KeyError(key)
| 1,388 |
setup.py
|
shunjuu/Akari
| 0 |
2026142
|
from setuptools import setup
setup(
name='akari',
url='https://github.com/shunjuu/Akari',
author='Kyrielight',
packages=['akari'],
install_requires=[
'ayumi @ git+https://github.com/shunjuu/Ayumi',
'requests'
],
version='0.1',
license='MIT',
description='Jikan.moe Userlist Wrapper.'
)
| 338 |
localstack/plugins.py
|
suryatmodulus/localstack
| 1 |
2026499
|
import logging
from localstack import config
from localstack.runtime import hooks
LOG = logging.getLogger(__name__)
@hooks.configure_localstack_container()
def configure_edge_port(container):
ports = [config.EDGE_PORT, config.EDGE_PORT_HTTP]
LOG.info("configuring container with edge ports: %s", ports)
for port in ports:
if port:
container.ports.add(port)
| 393 |
utils.py
|
salesforce/CoST
| 16 |
2026362
|
import os
import numpy as np
import pickle
import torch
import random
from datetime import datetime
import torch.nn as nn
def pkl_save(name, var):
with open(name, 'wb') as f:
pickle.dump(var, f)
def pkl_load(name):
with open(name, 'rb') as f:
return pickle.load(f)
def torch_pad_nan(arr, left=0, right=0, dim=0):
if left > 0:
padshape = list(arr.shape)
padshape[dim] = left
arr = torch.cat((torch.full(padshape, np.nan), arr), dim=dim)
if right > 0:
padshape = list(arr.shape)
padshape[dim] = right
arr = torch.cat((arr, torch.full(padshape, np.nan)), dim=dim)
return arr
def pad_nan_to_target(array, target_length, axis=0, both_side=False):
assert array.dtype in [np.float16, np.float32, np.float64]
pad_size = target_length - array.shape[axis]
if pad_size <= 0:
return array
npad = [(0, 0)] * array.ndim
if both_side:
npad[axis] = (pad_size // 2, pad_size - pad_size//2)
else:
npad[axis] = (0, pad_size)
return np.pad(array, pad_width=npad, mode='constant', constant_values=np.nan)
def split_with_nan(x, sections, axis=0):
assert x.dtype in [np.float16, np.float32, np.float64]
arrs = np.array_split(x, sections, axis=axis)
target_length = arrs[0].shape[axis]
for i in range(len(arrs)):
arrs[i] = pad_nan_to_target(arrs[i], target_length, axis=axis)
return arrs
def take_per_row(A, indx, num_elem):
all_indx = indx[:,None] + np.arange(num_elem)
return A[torch.arange(all_indx.shape[0])[:,None], all_indx]
def centerize_vary_length_series(x):
prefix_zeros = np.argmax(~np.isnan(x).all(axis=-1), axis=1)
suffix_zeros = np.argmax(~np.isnan(x[:, ::-1]).all(axis=-1), axis=1)
offset = (prefix_zeros + suffix_zeros) // 2 - prefix_zeros
rows, column_indices = np.ogrid[:x.shape[0], :x.shape[1]]
offset[offset < 0] += x.shape[1]
column_indices = column_indices - offset[:, np.newaxis]
return x[rows, column_indices]
def data_dropout(arr, p):
B, T = arr.shape[0], arr.shape[1]
mask = np.full(B*T, False, dtype=np.bool)
ele_sel = np.random.choice(
B*T,
size=int(B*T*p),
replace=False
)
mask[ele_sel] = True
res = arr.copy()
res[mask.reshape(B, T)] = np.nan
return res
def name_with_datetime(prefix='default'):
now = datetime.now()
return prefix + '_' + now.strftime("%Y%m%d_%H%M%S")
def init_dl_program(
device_name,
seed=None,
use_cudnn=True,
deterministic=False,
benchmark=False,
use_tf32=False,
max_threads=None
):
import torch
if max_threads is not None:
torch.set_num_threads(max_threads) # intraop
if torch.get_num_interop_threads() != max_threads:
torch.set_num_interop_threads(max_threads) # interop
try:
import mkl
except:
pass
else:
mkl.set_num_threads(max_threads)
if seed is not None:
random.seed(seed)
seed += 1
np.random.seed(seed)
seed += 1
torch.manual_seed(seed)
if isinstance(device_name, (str, int)):
device_name = [device_name]
devices = []
for t in reversed(device_name):
t_device = torch.device(t)
devices.append(t_device)
if t_device.type == 'cuda':
assert torch.cuda.is_available()
torch.cuda.set_device(t_device)
if seed is not None:
seed += 1
torch.cuda.manual_seed(seed)
devices.reverse()
torch.backends.cudnn.enabled = use_cudnn
torch.backends.cudnn.deterministic = deterministic
torch.backends.cudnn.benchmark = benchmark
if hasattr(torch.backends.cudnn, 'allow_tf32'):
torch.backends.cudnn.allow_tf32 = use_tf32
torch.backends.cuda.matmul.allow_tf32 = use_tf32
return devices if len(devices) > 1 else devices[0]
| 3,976 |
skmultilearn/problem_transform/__init__.py
|
chrysm/scikit-multilearn
| 0 |
2025575
|
"""
The :mod:`skmultilearn.problem_transform` module provides classifiers
that follow the problem transformation approaches to multi-label classification:
- :class:`BinaryRelevance` - treats each label as a separate single-class classification problem
- :class:`ClassifierChain`- treats each label as a part of a conditioned chain of single-class classification problems
- :class:`LabelPowerset` - treats each label combination as a separate class with one multi-class classification problem
"""
from .br import BinaryRelevance
from .cc import ClassifierChain
from .lp import LabelPowerset
__all__ = ["BinaryRelevance",
"ClassifierChain",
"LabelPowerset"]
| 688 |
test/unit/conftest.py
|
francesco-giordano/aws-parallelcluster-cookbook
| 44 |
2025846
|
"""
This module loads pytest fixtures and plugins needed by all tests.
It's very useful for fixtures that need to be shared among all tests.
"""
import pytest
@pytest.fixture()
def test_datadir(request, datadir):
"""
Inject the datadir with resources for the specific test function.
If the test function is declared in a class then datadir is ClassName/FunctionName
otherwise it is only FunctionName.
"""
function_name = request.function.__name__
if not request.cls:
return datadir / function_name
class_name = request.cls.__name__
return datadir / "{0}/{1}".format(class_name, function_name)
| 643 |
tests/keras2onnx_applications/nightly_build/test_inception_v4.py
|
pbeukema/tensorflow-onnx
| 1,473 |
2024878
|
# SPDX-License-Identifier: Apache-2.0
import os
import sys
import unittest
import mock_keras2onnx
import onnx
import numpy as np
from mock_keras2onnx.proto import keras
from keras.applications import VGG19
from os.path import dirname, abspath
sys.path.insert(0, os.path.join(dirname(abspath(__file__)), '../../keras2onnx_tests/'))
from test_utils import run_onnx_runtime, test_level_0
Activation = keras.layers.Activation
AveragePooling2D = keras.layers.AveragePooling2D
Add = keras.layers.Add
BatchNormalization = keras.layers.BatchNormalization
concatenate = keras.layers.concatenate
Convolution2D = keras.layers.Convolution2D
Dense = keras.layers.Dense
Dropout = keras.layers.Dropout
Embedding = keras.layers.Embedding
Flatten = keras.layers.Flatten
Input = keras.layers.Input
Lambda = keras.layers.Lambda
LeakyReLU = keras.layers.LeakyReLU
MaxPooling2D = keras.layers.MaxPooling2D
Multiply = keras.layers.Multiply
Reshape = keras.layers.Reshape
SeparableConv2D = keras.layers.SeparableConv2D
UpSampling2D = keras.layers.UpSampling2D
ZeroPadding2D = keras.layers.ZeroPadding2D
Sequential = keras.models.Sequential
Model = keras.models.Model
K = keras.backend
# Model from https://github.com/titu1994/Inception-v4
def conv_block(x, nb_filter, nb_row, nb_col, border_mode='same', subsample=(1, 1), bias=False):
channel_axis = -1
x = Convolution2D(nb_filter, (nb_row, nb_col), strides=subsample, padding=border_mode, use_bias=bias)(x)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation('relu')(x)
return x
def inception_stem(input):
channel_axis = -1
# Input Shape is 299 x 299 x 3 (th) or 3 x 299 x 299 (th)
x = conv_block(input, 32, 3, 3, subsample=(2, 2), border_mode='valid')
x = conv_block(x, 32, 3, 3, border_mode='valid')
x = conv_block(x, 64, 3, 3)
x1 = MaxPooling2D((3, 3), strides=(2, 2), padding='valid')(x)
x2 = conv_block(x, 96, 3, 3, subsample=(2, 2), border_mode='valid')
x = concatenate([x1, x2], axis=channel_axis)
x1 = conv_block(x, 64, 1, 1)
x1 = conv_block(x1, 96, 3, 3, border_mode='valid')
x2 = conv_block(x, 64, 1, 1)
x2 = conv_block(x2, 64, 1, 7)
x2 = conv_block(x2, 64, 7, 1)
x2 = conv_block(x2, 96, 3, 3, border_mode='valid')
x = concatenate([x1, x2], axis=channel_axis)
x1 = conv_block(x, 192, 3, 3, subsample=(2, 2), border_mode='valid')
x2 = MaxPooling2D((3, 3), strides=(2, 2), padding='valid')(x)
x = concatenate([x1, x2], axis=channel_axis)
return x
def inception_A(input):
channel_axis = -1
a1 = conv_block(input, 96, 1, 1)
a2 = conv_block(input, 64, 1, 1)
a2 = conv_block(a2, 96, 3, 3)
a3 = conv_block(input, 64, 1, 1)
a3 = conv_block(a3, 96, 3, 3)
a3 = conv_block(a3, 96, 3, 3)
a4 = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(input)
a4 = conv_block(a4, 96, 1, 1)
m = concatenate([a1, a2, a3, a4], axis=channel_axis)
return m
def inception_B(input):
channel_axis = -1
b1 = conv_block(input, 384, 1, 1)
b2 = conv_block(input, 192, 1, 1)
b2 = conv_block(b2, 224, 1, 7)
b2 = conv_block(b2, 256, 7, 1)
b3 = conv_block(input, 192, 1, 1)
b3 = conv_block(b3, 192, 7, 1)
b3 = conv_block(b3, 224, 1, 7)
b3 = conv_block(b3, 224, 7, 1)
b3 = conv_block(b3, 256, 1, 7)
b4 = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(input)
b4 = conv_block(b4, 128, 1, 1)
m = concatenate([b1, b2, b3, b4], axis=channel_axis)
return m
def inception_C(input):
channel_axis = -1
c1 = conv_block(input, 256, 1, 1)
c2 = conv_block(input, 384, 1, 1)
c2_1 = conv_block(c2, 256, 1, 3)
c2_2 = conv_block(c2, 256, 3, 1)
c2 = concatenate([c2_1, c2_2], axis=channel_axis)
c3 = conv_block(input, 384, 1, 1)
c3 = conv_block(c3, 448, 3, 1)
c3 = conv_block(c3, 512, 1, 3)
c3_1 = conv_block(c3, 256, 1, 3)
c3_2 = conv_block(c3, 256, 3, 1)
c3 = concatenate([c3_1, c3_2], axis=channel_axis)
c4 = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(input)
c4 = conv_block(c4, 256, 1, 1)
m = concatenate([c1, c2, c3, c4], axis=channel_axis)
return m
def reduction_A(input):
channel_axis = -1
r1 = conv_block(input, 384, 3, 3, subsample=(2, 2), border_mode='valid')
r2 = conv_block(input, 192, 1, 1)
r2 = conv_block(r2, 224, 3, 3)
r2 = conv_block(r2, 256, 3, 3, subsample=(2, 2), border_mode='valid')
r3 = MaxPooling2D((3, 3), strides=(2, 2), padding='valid')(input)
m = concatenate([r1, r2, r3], axis=channel_axis)
return m
def reduction_B(input):
channel_axis = -1
r1 = conv_block(input, 192, 1, 1)
r1 = conv_block(r1, 192, 3, 3, subsample=(2, 2), border_mode='valid')
r2 = conv_block(input, 256, 1, 1)
r2 = conv_block(r2, 256, 1, 7)
r2 = conv_block(r2, 320, 7, 1)
r2 = conv_block(r2, 320, 3, 3, subsample=(2, 2), border_mode='valid')
r3 = MaxPooling2D((3, 3), strides=(2, 2), padding='valid')(input)
m = concatenate([r1, r2, r3], axis=channel_axis)
return m
def create_inception_v4(nb_classes=1001):
'''
Creates a inception v4 network
:param nb_classes: number of classes.txt
:return: Keras Model with 1 input and 1 output
'''
init = Input((299, 299, 3))
# Input Shape is 299 x 299 x 3 (tf) or 3 x 299 x 299 (th)
x = inception_stem(init)
# 4 x Inception A
for i in range(4):
x = inception_A(x)
# Reduction A
x = reduction_A(x)
# 7 x Inception B
for i in range(7):
x = inception_B(x)
# Reduction B
x = reduction_B(x)
# 3 x Inception C
for i in range(3):
x = inception_C(x)
# Average Pooling
x = AveragePooling2D((8, 8))(x)
# Dropout
x = Dropout(0.8)(x)
x = Flatten()(x)
# Output
out = Dense(activation='softmax', units=nb_classes)(x)
model = Model(init, out, name='Inception-v4')
return model
# Model from https://github.com/titu1994/Inception-v4
class TestInceptionV4(unittest.TestCase):
def setUp(self):
self.model_files = []
def tearDown(self):
for fl in self.model_files:
os.remove(fl)
@unittest.skipIf(test_level_0,
"Test level 0 only.")
def test_inception_v4(self):
K.clear_session()
keras_model = create_inception_v4()
data = np.random.rand(2, 299, 299, 3).astype(np.float32)
expected = keras_model.predict(data)
onnx_model = mock_keras2onnx.convert_keras(keras_model, keras_model.name)
self.assertTrue(
run_onnx_runtime(onnx_model.graph.name, onnx_model, data, expected, self.model_files))
if __name__ == "__main__":
unittest.main()
| 6,729 |
train_cntk.py
|
rehakomoon/VRC_PhotoRotation_Estimation
| 1 |
2026259
|
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 1 20:47:27 2019
@author: rehakomoon
"""
from pathlib import Path
import random
import itertools
import numpy as np
from PIL import Image, ImageOps
import cntk as C
from tqdm import tqdm
dataset_train_dir = Path("C:/dataset/anotated_resized/")
dataset_test_dir = Path("E:/vrc_rotation/dataset/anotated_eval_resized/")
log_dir = Path("E:/vrc_rotation/log_cntk/")
logfile_path = Path("E:/vrc_rotation/log_cntk/log.txt")
log_dir.mkdir(exist_ok=True)
batch_size = 32
test_batch_size = batch_size // 8
num_epoch = 10000
initial_epoch = 0
learning_rate = 0.001
image_size = 480
def get_image_list(dataset_dir):
image_path_list = []
dataset_dir = Path(dataset_dir)
for user_dir in dataset_dir.iterdir():
image_path_list += [str(p.absolute()) for p in user_dir.glob('*.png')]
return image_path_list
train_image_path_list = get_image_list(dataset_train_dir)
test_image_path_list = get_image_list(dataset_test_dir)
x_pl = C.ops.input_variable((3, image_size, image_size), np.float32)
y_pl = C.ops.input_variable((2), np.float32)
def CNN(x):
with C.layers.default_options(init=C.initializer.glorot_uniform()):
x = C.layers.Convolution2D(filter_shape=(5,5), num_filters=16, activation=None)(x)
x = C.layers.BatchNormalization(map_rank=1)(x)
x = C.relu(x)
x = C.layers.MaxPooling(filter_shape=(2,2), strides=(2,2))(x)
x = C.layers.Convolution2D(filter_shape=(5,5), num_filters=16, activation=None)(x)
x = C.layers.BatchNormalization(map_rank=1)(x)
x = C.relu(x)
x = C.layers.MaxPooling(filter_shape=(2,2), strides=(2,2))(x)
x = C.layers.Convolution2D(filter_shape=(5,5), num_filters=64, activation=None)(x)
x = C.layers.BatchNormalization(map_rank=1)(x)
x = C.relu(x)
x = C.layers.MaxPooling(filter_shape=(2,2), strides=(2,2))(x)
x = C.layers.Dropout(0.3)(x)
x = C.layers.Convolution2D(filter_shape=(5,5), num_filters=64, activation=None)(x)
x = C.layers.BatchNormalization(map_rank=1)(x)
x = C.relu(x)
x = C.layers.MaxPooling(filter_shape=(2,2), strides=(2,2))(x)
x = C.layers.Dropout(0.3)(x)
x = C.layers.Convolution2D(filter_shape=(5,5), num_filters=256, activation=None)(x)
x = C.layers.BatchNormalization(map_rank=1)(x)
x = C.relu(x)
x = C.layers.MaxPooling(filter_shape=(2,2), strides=(2,2))(x)
x = C.layers.Dropout(0.3)(x)
x = C.layers.Convolution2D(filter_shape=(5,5), num_filters=256, activation=None)(x)
x = C.layers.BatchNormalization(map_rank=1)(x)
x = C.relu(x)
x = C.layers.MaxPooling(filter_shape=(2,2), strides=(2,2))(x)
x = C.layers.Dropout(0.3)(x)
x = C.layers.MaxPooling(filter_shape=(3,3), strides=(1,1))(x)
x = C.layers.Dense(256, activation=None)(x)
x = C.relu(x)
x = C.layers.Dropout(0.3)(x)
x = C.layers.Dense(256, activation=None)(x)
x = C.relu(x)
x = C.layers.Dropout(0.3)(x)
x = C.layers.Dense(2, activation=None)(x)
return x
model = CNN(x_pl)
lr_schedule = C.learners.learning_rate_schedule(learning_rate, unit=C.UnitType.sample)
optimizer = C.learners.sgd(model.parameters, lr=lr_schedule)
loss = C.losses.cross_entropy_with_softmax(model, y_pl)
acc = C.metrics.classification_error(model, y_pl)
trainer = C.Trainer(model, (loss, acc), optimizer)
model_epoch_list = [int(str(s)[-10:-4]) for s in log_dir.glob("model_*.dat")]
if (len(model_epoch_list) > 0):
latest_model_path = log_dir / f"model_{max(model_epoch_list):06}.dat"
print(f"load {latest_model_path}...")
state = trainer.restore_from_checkpoint(str(latest_model_path))
initial_epoch = max(model_epoch_list) + 1
for epoch in range(initial_epoch, num_epoch):
image_path_list = train_image_path_list
random.shuffle(image_path_list)
sum_loss = 0.0
sum_acc = 0.0
sum_seen = 0.0
my_bar = tqdm(range(0, len(image_path_list), batch_size), leave=False)
for i in my_bar:
batch_image_path_list = image_path_list[i:i+batch_size]
this_batch_size = len(batch_image_path_list)
rotate_angle = np.random.randint(0, 6, this_batch_size)
rotate_angle[rotate_angle > 3] = 0
flip_flag = np.random.randint(0, 2, this_batch_size)
images = (Image.open(p) for p in batch_image_path_list)
images = (ImageOps.mirror(im) if f else im for im, f in zip(images, flip_flag))
images = (im.rotate(k * 90) for im, k in zip(images, rotate_angle))
images = [np.asarray(im)[:,:,0:3] for im in images]
images = np.stack(images)
images = images.transpose(0, 3, 1, 2)
images = images.astype(np.float32) / 255.0
images = np.ascontiguousarray(images)
labels = (rotate_angle == 0)
labels = np.stack([labels, np.logical_not(labels)]).transpose()
labels = labels.astype(np.float32)
labels = np.ascontiguousarray(labels)
input_map = {x_pl: images, y_pl: labels}
_, outputs = trainer.train_minibatch(input_map, outputs=(loss, acc))
sum_loss += outputs[loss].sum()
sum_acc += len(batch_image_path_list) - outputs[acc].sum()
sum_seen += len(batch_image_path_list)
my_bar.set_description(f"loss: {sum_loss/sum_seen:0.6f}, acc: {sum_acc/sum_seen:0.6f}")
print(f'e: {epoch},\t loss: {sum_loss/sum_seen},\t acc: {sum_acc/sum_seen}')
with open(logfile_path, "a") as fout:
fout.write(f"t, {epoch}, {sum_loss/sum_seen}, {sum_acc/sum_seen}\n")
if epoch%10 == 0:
image_path_list = test_image_path_list
sum_loss = 0.0
sum_acc = 0.0
sum_seen = 0.0
my_bar = tqdm(range(0, len(image_path_list), test_batch_size), leave=False)
for i in my_bar:
batch_image_path_list = image_path_list[i:i+test_batch_size]
this_batch_size = len(batch_image_path_list)
rotate_angle = np.array([0, 1, 2, 3, 0, 1, 2, 3] * this_batch_size, dtype=np.int8)
flip_flag = np.array([0, 0, 0, 0, 1, 1, 1, 1] * this_batch_size, dtype=np.int8)
images = ([Image.open(p)]*8 for p in batch_image_path_list)
images = itertools.chain.from_iterable(images)
images = (ImageOps.mirror(im) if f else im for im, f in zip(images, flip_flag))
images = (im.rotate(k * 90) for im, k in zip(images, rotate_angle))
images = [np.asarray(im)[:,:,0:3] for im in images]
images = np.stack(images)
images = images.transpose(0, 3, 1, 2)
images = images.astype(np.float32) / 255.0
images = np.ascontiguousarray(images)
labels = (rotate_angle == 0)
labels = np.stack([labels, np.logical_not(labels)]).transpose()
labels = labels.astype(np.float32)
labels = np.ascontiguousarray(labels)
input_map = {x_pl: images, y_pl: labels}
batch_loss = loss.eval(input_map)
batch_acc = acc.eval(input_map)
sum_loss += batch_loss.sum()
sum_acc += len(batch_image_path_list) * 8 - batch_acc.sum()
sum_seen += len(batch_image_path_list) * 8
my_bar.set_description(f"loss: {sum_loss / sum_seen:0.6f}, acc: {sum_acc / sum_seen:0.6f}")
print(f'test e: {epoch},\t loss: {sum_loss/sum_seen},\t acc: {sum_acc/sum_seen}')
with open(logfile_path, "a") as fout:
fout.write(f"e, {epoch}, {sum_loss/sum_seen}, {sum_acc/sum_seen}\n")
if epoch%10 == 0:
model_save_path = log_dir / f"model_{epoch:06}.dat"
model_save_path_onnx = log_dir / f"model_{epoch:06}.onnx"
trainer.save_checkpoint(str(model_save_path))
model.save(str(model_save_path_onnx), format=C.ModelFormat.ONNX)
| 8,010 |
test/assert_interactive.py
|
ToolFramework/cppyy
| 84 |
2026299
|
from cppyy.interactive import *
# namespace at the global level
assert std
# cppyy functions
assert cppdef
assert include
try:
import __pypy__
# 'cppyy.gbl' bound to 'g'
assert g
assert g.std
except ImportError:
# full lazy lookup available
assert gInterpreter
| 283 |
script.py
|
Dann38/complex_back
| 0 |
2026300
|
import fnmatch
import getopt
import os
import shutil
import sys
from sys import argv
from Levenshtein import distance
import cv2
from my_lib import get_text_from_img, image_processing, similarity, read_img
def main(argv):
input_folder = ''
output_folder = ''
try:
opts, args = getopt.getopt(argv, "hi:o:", ["input_folder=", "output_folder="])
except getopt.GetoptError:
print('script.py -i <input folder> -o <output folder>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('script.py -i <input folder> -o <output folder>')
sys.exit()
elif opt in ("-i", "--i"):
input_folder = arg
elif opt in ("-o", "--o"):
output_folder = os.path.join(arg, 'images')
is_dir = os.path.isdir(input_folder)
if not is_dir:
raise Exception(input_folder, "- It is not a folder")
if os.path.exists(output_folder):
shutil.rmtree(output_folder)
os.makedirs(output_folder)
n = 0
total_similarity_before = 0
total_similarity_after = 0
total_levenshtein_before = 0
total_levenshtein_after = 0
total_size = 0
statistics_print = False
bad_image = "None"
bad_similarity = 2
bad_levenshtein = 2
delta_similary = 0
delta_levenshtein = 0
for root, directories, files in os.walk(input_folder):
for file_name in fnmatch.filter(files, "*.jpg"):
path = os.path.join(root, file_name)
name = os.path.splitext(os.path.split(path)[1])[0]
save_img_path = os.path.join(output_folder, name + ".jpeg")
save_text_path = os.path.join(output_folder, name + ".txt")
save_origin_text_path = os.path.join(output_folder, name + "-origin" + ".txt")
img_before = read_img(path)
img_after = image_processing(img_before)
text_before = get_text_from_img(img_before)
text_after = get_text_from_img(img_after)
gs_name = os.path.join(root, name + ".txt")
try:
with open(gs_name, 'r', encoding="UTF-8") as f:
text_gs = f.read()
similarity_before = similarity(text_gs, text_before)
similarity_after = similarity(text_gs, text_after)
total_similarity_before += similarity_before
total_similarity_after += similarity_after
size = len(text_gs)
total_size += size
levenshtein_before = distance(text_gs, text_before)
levenshtein_after = distance(text_gs, text_after)
total_levenshtein_before += levenshtein_before
total_levenshtein_after += levenshtein_after
print()
print(
f"{name}:\tSimilarity: \t"
f" Before: {100 * similarity_before :5.2f}%\t"
f" After: {100 * similarity_after :5.2f}%")
print(
f"{name}:\tLevenshtein:\t"
f" Before: {levenshtein_before / size :5.2f}\t"
f" After: {levenshtein_after / size:5.2f}")
print("=======================")
delta_similary = similarity_after-similarity_before
delta_levenshtein = (levenshtein_before - levenshtein_after) / size
print(f"delta s: {delta_similary:.3f} ({bad_similarity})")
print(f"delta l: {delta_levenshtein:.3f} ({bad_levenshtein})")
if bad_similarity > delta_similary:
bad_image_sim = name
bad_similarity = delta_similary
if bad_levenshtein > delta_levenshtein:
bad_image_lev = name
bad_levenshtein = delta_levenshtein
statistics_print = True
n += 1
except FileNotFoundError:
print("OK")
with open(save_text_path, 'w', encoding="UTF-8") as f:
f.write(text_after)
with open(save_origin_text_path, 'w', encoding="UTF-8") as f:
f.write(text_before)
cv2.imwrite(save_img_path, img_after)
if statistics_print:
lev_before = total_levenshtein_before / total_size
lev_after = total_levenshtein_after / total_size
lev_improvement_percent = (lev_before-lev_after)*100
print("Total Similarity ========================")
print(f"Before:\t {100 * total_similarity_before / n:5.2f} %")
print(f"After:\t {100 * total_similarity_after / n:5.2f} %")
print("Total Levenshtein =======================")
print(f"Before:\t {lev_before:5.2f}")
print(f"After:\t {lev_after:5.2f}")
print(f"Improvement Levenshtein: {lev_improvement_percent:5.2f}%")
print(f"Bad image Similarity:\t {bad_image_sim}\t (delta:{bad_similarity*100:5.2f} %)")
print(f"Bad image Levenshtein:\t {bad_image_lev}\t (delta:{bad_levenshtein*100:5.2f} %)")
if __name__ == '__main__':
main(argv[1:])
| 5,226 |
web/clockthree.py
|
rupello/ClockTHREEjr
| 1 |
2026203
|
import os
from string import lower,upper
import StringIO
import flask
from flask import Flask, render_template, abort, Response, request, current_app
import ttfquery
from flask_bootstrap import Bootstrap
import Simulate
import clockwords
import clockface
import wtfhelpers
def create_app():
"create a configures app instance"
app = Flask(__name__)
Bootstrap(app)
fontreg = ttfquery.ttffiles.Registry()
fontreg.scan('./fonts/')
app.config['wtfs'] = wtfhelpers.loadwtfsandfonts('./langs/',fontreg)
app.config['fontregistry'] = fontreg
return app
# the app
app = create_app()
def findwtf(style):
"find the path to the .wrf file"
for dirpath,dirnames,fnames in os.walk('./langs'):
for f in fnames:
name,ext = os.path.splitext(f)
if ext.lower()=='.wtf':
if style.lower()==name.lower():
return os.path.join(dirpath,f)
def default_font(style):
return current_app.config['wtfs'][style.lower()]['fonts'][0]
def findfontpath(style,fontname):
for fontpath in current_app.config['wtfs'][style.lower()]['fonts']:
if font_name_from_path(fontpath)==fontname:
return fontpath
return default_font(style)
@app.template_filter('fontname')
def font_name_from_path(path):
return os.path.basename(path).lower()
@app.route('/')
@app.route('/index')
def index():
return flask.redirect("/clock3jr/styles/", code=302)
@app.route('/clock3jr/<style>/clockface/')
def clockfaceimg(style):
wtfpath = findwtf(style)
if wtfpath is not None:
data = Simulate.readwtf(wtfpath)
fgcolor = request.args.get('fg', '#303030')
fontname = request.args.get('font')
try:
fontsize = int(request.args.get('fontsize','30'))
except ValueError:
fontsize = 30
img = clockface.drawclock(fontpath=findfontpath(style,fontname),
fontsize=fontsize,
fgcolor=fgcolor,
bgcolor=clockface.BLACK,
style=data['letters'],
case=lower,
drawLEDs=False)
io = StringIO.StringIO()
img.save(io, format='JPEG')
return Response(io.getvalue(), mimetype='image/jpeg')
else:
abort(404)
@app.route('/clock3jr/<style>/map/')
def map(style):
wtfpath = findwtf(style)
if wtfpath is not None:
data = Simulate.readwtf(wtfpath)
return clockwords.data2json(data)
else:
abort(404)
@app.route('/clock3jr/<style>/cells/')
def cells(style):
wtfpath = findwtf(style)
if wtfpath is not None:
fontname = request.args.get('font')
try:
fontsize = int(request.args.get('fontsize','30'))
except ValueError:
fontsize = 30
data = Simulate.readwtf(wtfpath)
return render_template('cells.html',
cells=clockface.build_cells(fontpath=findfontpath(style,fontname),
fontsize=fontsize,
style=data['letters'],
case=lower),
style=style,
fontname=fontname,
fontsize=fontsize)
else:
abort(404)
@app.route('/clock3jr/<style>/')
def clock3jr(style):
wtfpath = findwtf(style)
if wtfpath is not None:
fontname = request.args.get('font')
try:
fontsize = int(request.args.get('fontsize','30'))
except ValueError:
fontsize = 30
data = Simulate.readwtf(wtfpath)
return render_template('clock.html',
cells=clockface.build_cells(fontpath=findfontpath(style,fontname),
fontsize=fontsize,
style=data['letters'],
case=lower),
style=style,
fontname=fontname,
fontsize=fontsize)
else:
abort(404)
@app.route('/clock3jr/styles/')
def styles():
wtfs = current_app.config['wtfs']
return render_template('styles.html',styles=['foo','bar'],wtfs=wtfs)
@app.route('/clock3jr/stylesandfonts/')
def stylesandfonts():
fontsbystyle = {}
wtfs = current_app.config['wtfs']
for name,data in wtfs.items():
fontsbystyle[name]=[font_name_from_path(fp) for fp in wtfs[name]['fonts']]
return flask.jsonify(fontsbystyle)
#!flask/bin/python
if __name__ == '__main__':
app.run(debug = True)
| 4,898 |
gcf_http_cli.py
|
jasonlopez01/flask-cli-demo
| 0 |
2023464
|
import argparse
import importlib
import json
import os
import sys
from typing import Callable, Optional, Tuple
import flask
# Allow import from current working directory modules
sys.path.append(os.getcwd())
# Constants
CLI_VERSION = "0.0.1"
HTTP_METHODS = ["GET", "POST", "PUT", "DELETE"]
GCF_MODULE_PATH_ENV_VAR = "PD_FLASK_UTILS_GCF_PATH"
DEFAULT_GCF_MAIN_PATH = "main.main"
# Functions
def load_json(json_payload: str) -> Optional[dict]:
"""
Load either json string or file
:param json_payload:
:return:
"""
if json_payload is None:
return None
if isinstance(json_payload, dict):
return json_payload
if os.path.isfile(json_payload):
with open(json_payload, "r") as f:
return json.load(f)
else:
return json.loads(json_payload)
def import_main_gcf_entrypoint() -> Callable:
"""
Import the main function entrypoint from a python module deployed as a Cloud Function
Finds the import path based on an env variable "PD_FLASK_UTILS_GCF_PATH", defaults to "main.main"
:return: a function acting as the main entrypoint for a python Cloud Function
"""
gcf_main_import_path = os.environ.get(
GCF_MODULE_PATH_ENV_VAR, DEFAULT_GCF_MAIN_PATH
)
gcf_main_import_path_list = gcf_main_import_path.split(".")
main_module_name = ".".join(gcf_main_import_path_list[0:-1])
gcf_main_name = gcf_main_import_path_list[-1]
# Import main module with GCF entrypoint function
main_gcf = importlib.import_module(main_module_name)
return getattr(main_gcf, gcf_main_name)
def mock_gcf_flask_request(
gcf_main_func: Callable, http_method: str, endpoint: str, payload: Optional[dict]
) -> Tuple[int, str]:
"""
Make a mock request to an entrypoint function of a HTTP Triggered Cloud Function via the flask test client
:param gcf_main_func: a function acting as the main entrypoint for a python Cloud Function (HTTP Trigger)
:param http_method: HTTP Method to use as uppercase string (eg. GET, POST, etc.)
:param endpoint: endpoint of Flask App to call
:param payload: Dict to include as json body
:return: Tuple of mock HTTP Response Status Code and Data
"""
assert http_method in HTTP_METHODS
test_app = flask.Flask(__name__)
with test_app.test_request_context(endpoint, method=http_method, json=payload):
resp = gcf_main_func(flask.request)
return int(resp.status_code), resp.data.decode("utf-8")
def main():
# Import main function entrypoint
try:
gcf_entrypoint: Callable = import_main_gcf_entrypoint()
except Exception as e:
gcf_import_error = e
gcf_entrypoint = None
# Setup CLI
gcf_cli = argparse.ArgumentParser(
description=f"""
CLI wrapper around a Python function acting as the entrypoint to a Cloud Function (HTTP Trigger).
Attempts importing a the function with current working directory as root.
Uses import path specified in env variable {GCF_MODULE_PATH_ENV_VAR},
with format of "module.function" (default set to "{DEFAULT_GCF_MAIN_PATH}")
"""
)
gcf_cli.add_argument(
"--http-method",
type=str,
default="POST",
help="HTTP Method to mock when calling a given endpoint",
choices=HTTP_METHODS,
)
gcf_cli.add_argument(
"--json",
type=str,
help="JSON formatted input to include in payload of request, or path to a JSON file",
)
gcf_cli.add_argument(
"--endpoint", type=str, help="Endpoint to call, defaults to '/'", default="/"
)
gcf_cli.version = CLI_VERSION
gcf_cli.add_argument("--version", action="version")
# Parse inputs
args = gcf_cli.parse_args()
endpoint: str = args.endpoint
http_method: str = args.http_method
json_payload: str = args.json
payload: Optional[dict] = load_json(json_payload)
if not gcf_entrypoint:
error_prefix = "ERROR: "
print(f"{error_prefix}{gcf_import_error}")
print(
f"{error_prefix}Attempt to import gcf entrypoint function from {GCF_MAIN_IMPORT_PATH} failed."
)
print(f"{error_prefix}Attempts import with current working directory as root.")
print(
f"{error_prefix}Can set a different import path via env variable {GCF_MODULE_PATH_ENV_VAR} (ex. export {GCF_MODULE_PATH_ENV_VAR}=moduleA.my_gcf_main_func)"
)
sys.exit(gcf_import_error)
# Use flask test client to make mock request
status_code, resp_content = mock_gcf_flask_request(
gcf_main_func=gcf_entrypoint,
http_method=http_method,
endpoint=endpoint,
payload=payload,
)
# Exit with response status code and content
exit_value = 1
print("\n", "-" * 100)
if 200 <= status_code < 300:
print(
f"Finished successfully with mock status code {status_code}\n{resp_content}"
)
exit_value = 0
else:
print(
f"Endpoint command failed with mock status code {status_code}\n{resp_content}"
)
exit_value = resp_content
return sys.exit(exit_value)
if __name__ == "__main__":
main()
| 5,224 |
examples/acados_python/external_model/export_external_ode_model.py
|
mindThomas/acados
| 322 |
2026049
|
#
# Copyright 2019 <NAME>, <NAME>, <NAME>,
# <NAME>, <NAME>, <NAME>, <NAME>,
# <NAME>, <NAME>, <NAME>, <NAME>,
# <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
#
# This file is part of acados.
#
# The 2-Clause BSD License
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.;
#
from acados_template import AcadosModel
from casadi import MX, external
import sys,os
from ctypes import *
def export_external_ode_model():
model_name = 'external_ode'
# Declare model variables
x = MX.sym('x', 2)
u = MX.sym('u', 1)
xDot = MX.sym('xDot', 2)
cdll.LoadLibrary('./test_external_lib/build/libexternal_ode_casadi.so')
f_ext = external('libexternal_ode_casadi', 'libexternal_ode_casadi.so', {'enable_fd': True})
f_expl = f_ext(x, u)
f_impl = xDot - f_expl
model = AcadosModel()
model.f_impl_expr = f_impl
model.f_expl_expr = f_expl
model.x = x
model.xdot = xDot
model.u = u
model.p =[]
model.name = model_name
return model
| 2,210 |
dohproxy.py
|
mjtooley/DoH_Tools
| 0 |
2026123
|
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import socket
from io import BytesIO
from urllib.parse import urlparse, parse_qs
from typing import Dict, List
import random
import requests
from dohjsonclient.client import DohJsonClient
import json
import dns.exception
import dns.message
from dns.message import Message
import dns.rcode
from dns import resolver, query, exception
from utils.utils import (
create_http_wire_response,
create_http_json_response,
)
class DNSResolverClient:
def __init__(self, name_server: str = "internal"):
self.name_server = name_server
def resolve(self, message: Message) -> Message:
maximum = 4
timeout = 0.4
response_message = 0
if self.name_server == 'internal':
self.name_server = resolver.get_default_resolver().nameservers[0]
done = False
tests = 0
while not done and tests < maximum:
try:
response_message = query.udp(message, self.name_server, timeout=timeout)
done = True
except exception.Timeout:
tests += 1
return response_message
def dns_query_from_body(body: bytes):
exc = b'Malformed DNS query'
try:
return dns.message.from_wire(body)
except Exception as e:
print(e)
def get_question(dnsq):
question = str(dnsq.question[0])
question = question.split()
question = question[0].rstrip('.')
return question
def resolve(dnsq):
question = get_question(dnsq)
#question = str(dnsq.question[0])
#question = question.split()
#question = question[0].rstrip('.')
dns_resolver = DNSResolverClient(name_server='internal')
dnsr = dns_resolver.resolve((dnsq))
# print("resolve:", dnsr)
return dnsr
TV_EVERYWHERE_HOSTS = {'www.nbc.com', 'www.cbs.com', 'www.espn.com'}
TV_EVERYWHERE_AUTH = 'sp.auth.<EMAIL>'
tv_everywhere_hosts = {} # init the outer dict
for tvh in TV_EVERYWHERE_HOSTS:
tv_everywhere_hosts[tvh] = {} # Init the inner list
last_tve = {}
def tv_everywhere_host(qname):
for tvh in TV_EVERYWHERE_HOSTS:
if tvh in qname:
return tvh
return None
def check_tve(client_ip, dnsq):
q = get_question(dnsq)
tvh = tv_everywhere_host(q)
if tvh != None:
# add a tuple for the entry
# TO-DO Need to fix the code log the number of unique IPs per TVH
if client_ip in tv_everywhere_hosts[h]:
if tv_everywhere_hosts[tvh][client_ip]
tv_everywhere_hosts[tvh][client_ip] = 0 # Append the client_ip tuple as seen chatting with the TVH
last_tve[client_ip] = tvh # Store the last TVH seen for the IP
# Now check if it is a TV_EVERYWHERE_AUTH
if TV_EVERYWHERE_AUTH in q:
for h in TV_EVERYWHERE_HOSTS:
if client_ip in tv_everywhere_hosts[h]:
if tv_everywhere_hosts[h][client_ip] == 0:
tv_everywhere_hosts[h][client_ip] = 3600
print("added {} to tv_everywhere_client[{}]".format(h, client_ip))
elif tv_everywhere_hosts[h][client_ip] > 0:
print('--- Multiple Logins Detected for {} to {} -------\n'.format(client_ip, last_tve[client_ip]))
def add_tve_client(client_ip, dnsr):
# extract the TTL from the dns response
print("Add TVE client {}".format(client_ip))
ttl = 3600
if client_ip in tv_everywhere_clients:
print("we already learned this client")
else:
tv_everywhere_clients[client_ip] = ttl
print("added {} to tv_everywhere list".format(client_ip))
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
global tv_everywhere_clients
# print('do_GET')
url_path = self.path
print(url_path)
params: Dict[url_path, List]
params = parse_qs(urlparse(url_path).query)
headers = self.headers
ua = self.headers['User-Agent']
accept = self.headers['Accept']
ct = self.headers['Content-Type']
#content_length = int(self.headers['Content-Length'])
client_ip = self.client_address[0]
if 'referer' in self.headers:
referrer = self.headers['referer']
# Send DoH Request to upstream DoH Resolver or DNS Resolver
client = DohJsonClient()
try:
print(params['name'], params['type'])
if 'name' in params:
qname = params['name'][0]
dnsq = dns.message.make_query(qname, dns.rdatatype.ANY)
dnsr = resolve(dnsq)
result = client.resolve_cloudflare({'name': params['name'][0], 'type': params['type'][0]})
check_tve(client_ip,dnsq)
self.send_response(200)
#self.send_header('content-type', 'application/dns-message')
#self.send_header('server', 'ncta-doh')
self.end_headers()
doh_resp = json.dumps(result)
# Need to encode the serialized JSON data
self.wfile.write(doh_resp.encode('utf-8'))
except Exception as e:
print(e)
def do_POST(self):
print("do_POST")
headers = self.headers
ua = self.headers['User-Agent']
accept = self.headers['Accept']
ct = self.headers['Content-Type']
client_ip = self.client_address[0]
#print("UA:", ua)
#print("Client:", client_ip)
if 'referer' in self.headers:
referrer = self.headers['referer']
content_length = int(self.headers['Content-Length'])
body = self.rfile.read(content_length)
url_path = self.path
# print("URL_Path:", url_path)
params: Dict[url_path, List]
params = parse_qs(urlparse(url_path).query)
try:
dnsq =dns_query_from_body(body)
dnsr = resolve(dnsq)
check_tve(client_ip,dnsq)
#print("dnsr:", dnsr.answer)
except Exception as e:
print(e)
if dnsr is None:
dnsr = dns.message.make_response(dnsq)
dnsr.set_rcode(dns.rcode.SERVFAIL)
# response_headers.append(('content-length', str(len(body))))
self.send_response(200)
self.send_header('content-type', 'application/dns-message')
self.send_header('server', 'ncta-doh')
self.end_headers()
body = dnsr.to_wire()
response = BytesIO()
response.write(body)
self.wfile.write(response.getvalue())
ADDRESS = '172.25.12.45'
PORT = 4443
httpd = HTTPServer((ADDRESS, PORT), SimpleHTTPRequestHandler)
try:
httpd.socket = ssl.wrap_socket(httpd.socket, certfile='mypemfile.pem', server_side=True)
except Exception as e:
print (e)
print("Starting DoH Server on {}:{}".format(ADDRESS,PORT))
httpd.serve_forever()
| 6,863 |
pypint/solvers/i_parallel_solver.py
|
DiMoser/PyPinT
| 0 |
2025495
|
# coding=utf-8
"""
.. moduleauthor: <NAME> <<EMAIL>>
"""
from pypint.communicators.i_communication_provider import ICommunicationProvider
from pypint.utilities import assert_named_argument
class IParallelSolver(object):
"""basic interface for parallel solvers
"""
def __init__(self, **kwargs):
"""
Parameters
----------
communicator : :py:class:`.ICommunicationProvider`
"""
assert_named_argument('communicator', kwargs, types=ICommunicationProvider, descriptor="Communicator",
checking_obj=self)
self._communicator = kwargs['communicator']
self._states = []
@property
def comm(self):
return self._communicator
| 737 |
tests/server-identity.py
|
movermeyer/tangelo
| 40 |
2026314
|
import nose
import requests
import fixture
@nose.with_setup(fixture.start_tangelo, fixture.stop_tangelo)
def test_server_identity():
response = requests.get(fixture.url("/"))
assert response.headers["server"] == ""
| 226 |
apps/alleria/migrations/0002_auto_20180329_1650.py
|
z4none/alleria
| 0 |
2026202
|
# Generated by Django 2.0.3 on 2018-03-29 08:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('alleria', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='dictionaryitem',
name='value',
),
migrations.AlterField(
model_name='dictionaryitem',
name='code',
field=models.CharField(max_length=50, unique=True, verbose_name='编码'),
),
migrations.AlterField(
model_name='dictionaryitem',
name='name',
field=models.CharField(max_length=20, verbose_name='名称'),
),
migrations.AlterField(
model_name='dictionaryitem',
name='order',
field=models.IntegerField(default=0, verbose_name='顺序'),
),
migrations.AlterField(
model_name='dictionaryitem',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='alleria.DictionaryType', verbose_name='类型'),
),
]
| 1,172 |
RedditDaily/Easy/01_input-output.py
|
santarini/python
| 3 |
2025939
|
name = input("What is your name? \n")
age = int(input("How old are you? \n"))
location = input("Where do you live? \n")
print("Your name is " + name + ", you are " + str(age) + " years old, you live in " + location)
| 220 |
fmlpy/preprocess/filters.py
|
crazywiden/fmlpy
| 3 |
2025713
|
import pandas as pd
import numpy as np
def CUMSUM_filter(price, thres):
"""
S_+(t) = max{0, S_+(t-1) + y(t) - y(t-1)}
S_-(t) = min{0, S_-(t-1) + y(t) - y(t-1)}
S(t) = max{S_+(t), -S_-(t)}
sample when S(t) > thres
@parameters:
price -- 1d vector(list or np.ndarray)
thres -- integer or vector
thres is a vector means different threshold at different stages is allowed
@returns:
CUMSUM_idx -- 1d vector
each element is the starting index of each bar
"""
CUMSUM_idx = []
price_diff = np.diff(price)
S_pos, S_neg = 0, 0
for i in range(1,len(price)):
S_pos = max(0, S_pos + price_diff[i-1])
S_neg = min(0, S_neg + price_diff[i-1])
if max(S_pos, -S_neg) >= thres:
CUMSUM_idx.append(i)
S_pos, S_neg = 0, 0
return np.array(CUMSUM_idx)
| 876 |
task_inventory/order_31_to_60/order_34_tkinter_usage/hello_world/hello_world.py
|
flyingSprite/spinelle
| 1 |
2026566
|
import tkinter as tk
root = tk.Tk()
label = tk.Label(root, text='Hello World!', padx=15, pady=15)
label.pack()
root.mainloop()
| 128 |
collar_metrics/__init__.py
|
robertdfrench/collar-metrics
| 1 |
2026253
|
import flask
def bootstrap(app, barks):
@app.route("/collar/<collar_id>/barks", methods=['GET'])
def list_barks(collar_id):
return flask.jsonify(data=barks.by_collar(collar_id))
@app.route("/collar/<collar_id>/barks/new", methods=['POST'])
def add_barks(collar_id):
for bark in flask.request.json['data']:
bark['attributes'].update(collar=collar_id)
barks.add(**(bark['attributes']))
return flask.jsonify(meta={'accepted': True})
| 499 |
config.py
|
alexanderpiers/ccd-param-scan
| 0 |
2026438
|
import os
def editConfigFile(parameter, newValue, baseconfig="config/config.ini", modifiedconfig="config/config-mod.ini", verbose=False):
"""
Reads in the CCD drone config file and edits the given parameters
"""
# Check if in an outfiles are the same
newConfigContent = ""
infile = open(baseconfig, "r")
for line in infile:
# Strip the first part, check if it matches the parameter
configFileParameterName = line.split("=")[0].strip()
# If we get a match, write the new parameter, otherwise write the original line
if configFileParameterName == parameter:
newConfigLine = parameter + " = " + str(newValue) + " ; MODIFIED\n"
newConfigContent += newConfigLine
if verbose:
print("Found line to modify!")
print("Writing: " + newConfigLine)
else:
newConfigContent += line
infile.close()
outfile = open(modifiedconfig, "w+")
outfile.write(newConfigContent)
outfile.close()
if __name__ == '__main__':
# Test the edit capabilities
editConfigFile("two_og_hi", -1, verbose=True, baseconfig="config/config-mod.ini", modifiedconfig="config/config-mod.ini")
| 1,106 |
web/app/main/forms.py
|
innocorps/PyIoT
| 2 |
2026529
|
"""Creates HTML webforms, using WTForms"""
from flask_wtf import FlaskForm
from wtforms import SubmitField, TextAreaField, BooleanField
from wtforms.validators import Required
class JSONForm(FlaskForm):
"""
JSONForm is a single box HTML Form used to send user
messages to other parts of the app
Attributes:
json_message (obj): Allows user to input message into a TextAreaField.
submit (obj): Takes the message from the TextAreaField and relays it.
"""
json_message = TextAreaField("JSON Message", validators=[Required()])
submit = SubmitField('Submit')
class SearchEnableForm(FlaskForm):
search_enable = BooleanField(
"Enable search (disables table auto-update)",
default=False)
submit = SubmitField('Submit')
| 783 |
src/azure-cli/azure/cli/__main__.py
|
0cool321/azure-cli
| 2 |
2026282
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
import os
import azure.cli.main
import azure.cli.core.telemetry as telemetry
try:
args = sys.argv[1:]
# Check if we are in argcomplete mode - if so, we
# need to pick up our args from environment variables
if os.environ.get('_ARGCOMPLETE'):
comp_line = os.environ.get('COMP_LINE')
if comp_line:
args = comp_line.split()[1:]
sys.exit(azure.cli.main.main(args))
except KeyboardInterrupt:
telemetry.log_telemetry('keyboard interrupt')
sys.exit(1)
finally:
try:
telemetry.flush_telemetry()
except Exception: # pylint: disable=broad-except
pass
| 982 |
api/tests/utils/__init__.py
|
M4hakala/drf_route_api_example
| 0 |
2023233
|
from .route_functions import create_route_with_coordinates
from .integration_test import TestRoute
__all__ = [
'TestRoute',
'create_route_with_coordinates',
]
| 169 |
Courses/EE6226/Software/automata/tools.py
|
ldkong1205/ntu-graduate-courses
| 22 |
2024972
|
from automata import frontend, weighted_frontend, row_vector
def convert(txt):
names = txt.split()
return ",".join(["%s.cfg" % name for name in names])
def filter_results(super_name, new_super_name):
# replace the [weighted-automaton] with [automation]
# remove weights in transitions
import fileinput
import string
fout = open(new_super_name, "w")
transition_f = -1
for line in fileinput.input(super_name):
newline = string.replace(line, "[weighted-automaton]", "[automaton]" )
s_index = string.find(newline,"transitions")
linew = ""
if transition_f is 0 or s_index is not -1:
transition_f = 0;
if s_index is -1:
s_index = 0
else:
linew = "transitions = "
var = 1
while var == 1 :
phase_s = string.find(newline,"(", s_index)
if phase_s is -1:
break;
phase_e = string.find(newline,")", phase_s)
if phase_e is -1:
break;
phase = newline[phase_s+1:phase_e]
pindex1= string.find(phase,",")
pindex2= string.find(phase,",",pindex1+1)
pindex3= string.find(phase,",",pindex2+1)
newphase = phase[0:pindex3]
if newline[phase_e+1] is ",":
linew = linew + "("+ newphase +"),"
else:
linew = linew + "("+ newphase +")"
s_index = phase_e
else:
linew = newline
fout.writelines(linew)
lines = fileinput.filelineno()
fout.close()
return;
| 1,772 |
drepr/old_code/tests/pydrepr/test_repr_builder.py
|
scorpio975/d-repr
| 5 |
2023426
|
from pydrepr import ReprBuilder
def test_build_data_cube_model():
repr = ReprBuilder() \
.add_resource("default", "csv", delimiter=",") \
.add_preprocess_func("pmap", ["2..", "1.."], code="return float(value)") \
.add_preprocess_func("pmap", [0, "1.."], code="""
if value == "":
return context.get_left_value(index)
return "http://reference.data.gov.uk/id/gregorian-interval/" + value.split("-")[0] + "-01-01T00:00:00/P3Y"
""".strip()) \
.add_attribute("area", ["2..", 0]) \
.add_attribute("gender", [1, "1.."]) \
.add_attribute("period", [0, "1.."]) \
.add_attribute("obs", ["2..", "1.."]) \
.add_dim_alignment("obs", "area", [{"source": 0, "target": 0}]) \
.add_dim_alignment("obs", "gender", [{"source": 1, "target": 1}]) \
.add_dim_alignment("obs", "period", [{"source": 1, "target": 1}]) \
.add_sm() \
.add_prefix("qb", "http://purl.org/linked-data/cube#") \
.add_prefix("smdx-measure", "http://purl.org/linked-data/sdmx/2009/measure#") \
.add_prefix("eg", "http://example.org/") \
.add_class("qb:Observation") \
.add_data_node("eg:refArea", "area") \
.add_data_node("eg:gender", "gender") \
.add_data_node("eg:refPeriod", "period", "xsd:anyURI") \
.add_data_node("smdx-measure:obsValue", "obs") \
.finish() \
.finish() \
.build()
assert repr.to_yml_string(simplify=False) == """
version: '1'
resources:
default:
type: csv
delimiter: ','
preprocessing:
- type: pmap
input:
resource_id: default
slices:
- 2..
- 1..
code: |-
return float(value)
output:
- type: pmap
input:
resource_id: default
slices:
- 0
- 1..
code: |-
if value == "":
return context.get_left_value(index)
return "http://reference.data.gov.uk/id/gregorian-interval/" + value.split("-")[0] + "-01-01T00:00:00/P3Y"
output:
variables:
area:
location:
resource_id: default
slices:
- 2..
- 0
unique: false
sorted: none
value_type: unspecified
gender:
location:
resource_id: default
slices:
- 1
- 1..
unique: false
sorted: none
value_type: unspecified
period:
location:
resource_id: default
slices:
- 0
- 1..
unique: false
sorted: none
value_type: unspecified
obs:
location:
resource_id: default
slices:
- 2..
- 1..
unique: false
sorted: none
value_type: unspecified
alignments:
- type: dimension
source: obs
target: area
aligned_dims:
- source: 0
target: 0
- type: dimension
source: obs
target: gender
aligned_dims:
- source: 1
target: 1
- type: dimension
source: obs
target: period
aligned_dims:
- source: 1
target: 1
semantic_model:
data_nodes:
area: qb:Observation:1--eg:refArea
gender: qb:Observation:1--eg:gender
period: qb:Observation:1--eg:refPeriod^^xsd:anyURI
obs: qb:Observation:1--smdx-measure:obsValue
literal_nodes: []
relations: []
prefixes:
qb: http://purl.org/linked-data/cube#
smdx-measure: http://purl.org/linked-data/sdmx/2009/measure#
eg: http://example.org/
""".lstrip()
| 3,323 |
src/bokeh_server/results/main.py
|
AlexMGitHub/TheWholeEnchilada
| 1 |
2024172
|
"""Display results of trained model.
Results are displayed according to whether the dataset is a regression or a
classification problem.
"""
# %% Imports
# Standard system imports
from pathlib import Path
import pickle
# Related third party imports
from bokeh.io import curdoc
# Local application/library specific imports
from bokeh_server.results.plots.regression_results import regression_results
from bokeh_server.results.plots.classification_results \
import classification_results
# -----------------------------------------------------------------------------
# Setup
# -----------------------------------------------------------------------------
data_path = Path('src/bokeh_server/data/eda_data')
with open(data_path, 'rb') as data_file:
pickled_data = pickle.load(data_file)
metadata = pickled_data['metadata']
ml_type = metadata['type']
# -----------------------------------------------------------------------------
# Layout
# -----------------------------------------------------------------------------
if ml_type == 'classification':
results_layout = classification_results()
elif ml_type == 'regression':
results_layout = regression_results()
curdoc().add_root(results_layout)
| 1,216 |
setup.py
|
SwitcherLabs/switcherlabs-python
| 0 |
2026220
|
import os
from codecs import open
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
os.chdir(here)
version_contents = {}
with open(os.path.join(here, "switcherlabs", "version.py"), encoding="utf-8") as f:
exec(f.read(), version_contents)
readme = None
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
readme = f.read()
setup(
name="switcherlabs",
version=version_contents["VERSION"],
description="Python SDK for SwitcherLabs",
long_description=readme,
long_description_content_type="text/markdown",
author="SwitcherLabs",
author_email="<EMAIL>",
url="https://github.com/switcherlabs/switcherlabs-python",
license="MIT",
keywords="switcherslabs api feature-flags",
packages=find_packages(exclude=["tests", "tests.*"]),
zip_safe=False,
install_requires=[
'requests >= 2.20; python_version >= "3.0"',
'requests[security] >= 2.20; python_version < "3.0"',
],
python_requires="!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
)
| 1,062 |
python/sbp/utils.py
|
zk20/libsbp
| 0 |
2026279
|
#!/usr/bin/env python
# Copyright (C) 2011-2014 Swift Navigation Inc.
# Contact: https://support.swiftnav.com
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
"""Shared utility functions.
"""
EXCLUDE = ['sender', 'msg_type', 'crc', 'length', 'preamble', 'payload']
from construct import Container
def exclude_fields(obj, exclude=EXCLUDE):
"""
Return dict of object without parent attrs.
"""
return dict([(k, getattr(obj, k)) for k in obj.__slots__ if k not in exclude])
def walk_json_dict(coll):
"""
Flatten a parsed SBP object into a dicts and lists, which are
compatible for JSON output.
Parameters
----------
coll : dict
"""
if isinstance(coll, dict):
return dict((k, walk_json_dict(v)) for (k, v) in iter(coll.items()) if k != '_io')
elif isinstance(coll, bytes):
return coll.decode('ascii', errors='replace')
elif hasattr(coll, '__iter__') and not isinstance(coll, str):
return [walk_json_dict(seq) for seq in coll]
else:
return coll
def containerize(coll):
"""Walk attribute fields passed from an SBP message and convert to
Containers where appropriate. Needed for Construct proper
serialization.
Parameters
----------
coll : dict
"""
# If the caller has used intantiated a message class using classes
# representing the inner components of messages, they should have
# a _parser and not a to_binary.
if hasattr(coll, "_parser") and not hasattr(coll, "to_binary"):
coll = dict([(k, getattr(coll, k)) for k in coll.__slots__])
if isinstance(coll, Container):
[setattr(coll, k, containerize(v)) for (k, v) in coll.items()]
return coll
elif isinstance(coll, dict):
return containerize(Container(**coll))
elif isinstance(coll, list):
for j, i in enumerate(coll):
if isinstance(i, dict):
coll[j] = containerize(Container(**i))
return coll
else:
return coll
def fmt_repr(obj):
"""Print a orphaned string representation of an object without the
clutter of its parent object.
"""
items = ["%s = %r" % (k, v) for k, v in list(exclude_fields(obj).items())]
return "<%s: {%s}>" % (obj.__class__.__name__, ', '.join(items))
| 2,495 |
rfchat/rfchat.py
|
paf0186/home_automation
| 0 |
2026026
|
import sys
import tty
import termios
import threading
import time
from rpi_rf import RFDevice
# Elegant shutdown
def exithandler():
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
try:
rx.cleanup()
tx.cleanup()
except:
pass
sys.exit(0)
# Activate our transmitter and received
tx = RFDevice(17)
tx.enable_tx()
rx = RFDevice(27)
rx.enable_rx()
# Receiving loop
def rec(rx):
print("Receiving")
lastTime = None
while True:
currentTime = rx.rx_code_timestamp
if (
currentTime != lastTime and
(lastTime is None or currentTime - lastTime > 350000)
):
lastTime = rx.rx_code_timestamp
try:
if (rx.rx_code == 13): # Enter/Return Pressed
sys.stdout.write('\r\n')
else:
sys.stdout.write(chr(rx.rx_code))
sys.stdout.flush()
except:
pass
time.sleep(0.01)
# Start receiving thread
t = threading.Thread(target=rec, args=(rx,), daemon=True)
t.start()
print("Ready to transmit")
# Remember how the shell was set up so we can reset on exit
old_settings = termios.tcgetattr(sys.stdin)
tty.setraw(sys.stdin)
while True:
# Wait for a keypress
char = sys.stdin.read(1)
# If CTRL-C, shutdown
if ord(char) == 3:
exithandler()
else:
# Transmit character
tx.tx_code(ord(char))
time.sleep(0.01)
| 1,493 |
play_game.py
|
rortms/tictac
| 0 |
2026424
|
import kivy
kivy.require('1.9.1')
from kivy.app import App
from kivy.clock import Clock
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.popup import Popup
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.button import Button
from kivy.uix.label import Label
from utilities import *
colors = {
'black' : [0, 0, 0],
'white' : [1, 1, 1],
'red' : [1, 0, 0],
'green' : [0, 1, 0],
'blue' : [0, 0, 1],
'cyan' : [0, 200./255, 200],
'magenta' : [200./255, 0, 200./255],
'yellow' : [1, 1, 0],
'orange' : [1, 128./255, 0]
}
strategies = ['random',
'ideal',
'minimax',
'Qlearning',
'miniQmax',
'human',
'train-miniQmax']
### MESSAGES ###
noQs_message = "Currently only 3x3 game Q's have been trained for miniQmax\n" + \
"and Q-learning. Although miniQmax can still perform reasonably\n" + \
"well with its 3x3 Q on a 4x4 game, don't expect smarts for size > 4 :(\n" + \
"However!! You can train your own miniQmax Q by choosing train-miniQmax!!"
NA_gamesize_message = "Game size must be an integer between 2 and 10 exclusive"
# Default game in absence of player choice
default_game = setupGame(QMap(), 3, ['ideal', 'ideal'])
## So that user is greeted only once
greeted = False
## To handle end of game event
game_has_finished = False
########################################################
class TictacScreenManager(ScreenManager):
def __init__(self, **kwargs):
super(TictacScreenManager, self).__init__(**kwargs)
def update(self, dt):
game_board = self.get_screen('game_board')
current_player = game_board.G.current_player
# is_choosing = current_player.strategies.human_choosing
if not game_has_finished:
if current_player.policy == 'human':
self.stopClock()
else:
game_board.updateBoard()
else:
self.stopClock()
game_board.endGamePopup()
self.resetGame()
def resetGame(self):
global game_has_finished
game_has_finished = False
self.switch_to(SelectScreen())
def startClock(self):
self.take_step = Clock.schedule_interval(self.update, 0.8)
def stopClock(self):
self.take_step.cancel()
class SelectScreen(Screen):
def whichChoice(self, choices):
choice = [button for button in choices if button.pressed]
if choice:
return choice[0].text
else:
return "ideal"
def sanitizeTextInput(self):
global greeted
user_text = self.ids['user_text_input'].text
if user_text != '':
try:
N = int(user_text)
if N > 2 and N < 10:
self.ready2go = True
self.game_size = N
if N > 3:
if not greeted:
Popup(title='Hi!',
title_size='50sp',
content=Label(text=noQs_message),
size_hint=(0.8,0.4)).open()
greeted = True
else:
self.ready2go = False
raise ValueError('Integer out of range')
except ValueError:
self.ready2go = False
popup =Popup(title='Invalid Game Size',
title_size = '20sp',
content=Label(text= NA_gamesize_message),
size_hint=(0.8, 0.4))
popup.open()
#print self.whichChoice(p1_choices), self.whichChoice(p2_choices)
def makeGameAndSwitch(self, player_choices):
N = str(self.game_size)
policies = [self.whichChoice(ch) for ch in player_choices]
# Setup game
Q_loader = { 'Qlearning' : self.loadQ('newlucky'),
'miniQmax' : self.loadQ('pipeQ'),
'train-miniQmax': self.loadQ('train-miniQmax_'+N+'X'+N)}
# Load trained Q if it exists
QM1 = Q_loader.get(policies[0], None)
QM2 = Q_loader.get(policies[1], None)
if 'train-miniQmax' in policies:
global_QM = Q_loader['train-miniQmax']
game = setupGame(global_QM, self.game_size, policies, p1QM=QM1,p2QM=QM2, learning=True)
else:
game = setupGame(QMap(), self.game_size, policies, p1QM=QM1,p2QM=QM2)
# Create board widget
gb = GameBoard(game, game_size=self.game_size,name='game_board')
self.manager.switch_to(gb)
if policies[0] is not 'human': # Clock should not run during human's turn
self.manager.startClock()
def loadQ(self, name): # Convenience function
import os.path
Q = QMap()
if os.path.isfile("./Qs/"+name+".pickle"):
with open("./Qs/"+name+".pickle", 'rb') as f:
Q = pickle.load(f)
return Q
class GreenButton(Button):
pass
class BoardTile(ButtonBehavior, Label):
def __init__(self,game_board, **kwargs):
super(BoardTile, self).__init__(**kwargs)
self.game_board = game_board
self.tile_is_set = False
self.can_resume = False
def on_press(self):
if not self.tile_is_set: # Clicking on activated tiles results in no action
self.game_board.G.current_player.strategies.human_move_index = int(self.text)
self.game_board.updateBoard()
self.can_resume = True
def on_release(self):
if self.can_resume:
self.game_board.manager.startClock()
class GameBoard(Screen):
def __init__(self, game=default_game, game_size=3, **kwargs):
super(Screen, self).__init__(**kwargs)
self.game_size = game_size
self.G = game
self.tiles = \
[BoardTile(self, text=str(i)) for i in range(game_size**2)]
# Generate Grid
self.grid = GridLayout(cols=game_size)
for tile in self.tiles:
self.grid.add_widget(tile)
#Add Grid to Gameboard Screen
self.add_widget(self.grid)
def updateBoard(self):
global game_has_finished
if not self.G.game_finished:
self.G.takeStep()
for position, mark in self.G.game_sequence:
self.tiles[position].color = colors['blue'] + [1]
self.tiles[position].text = mark
self.tiles[position].font_size = self.tiles[0].width * 0.8
self.tiles[position].tile_is_set = True
else:
game_has_finished = True
#######################################################
# Save Q map if at least one policy was train-miniQmax
if self.G.learning:
print "GOING TO SAVE"
N = str(self.game_size)
with open('./Qs/train-miniQmax_'+N+'X'+N+'.pickle', 'wb') as f:
pickle.dump(self.G.QM, f, pickle.HIGHEST_PROTOCOL)
def endGamePopup(self,end_text="finished"):
def popIt(endmess=end_text):
content=GreenButton(text=endmess, font_size='30sp')
popup =Popup(title='Game Finished',
title_size = '50sp',
content=content,
auto_dismiss=False,
size_hint=(0.8, 0.4), size=(400,400))
content.bind(on_press=popup.dismiss)
popup.open()
p1, p2 = self.G.players
if True not in [p1.is_winner, p2.is_winner]:
popIt("Its a draw!")
else:
winner = [p for p in [p1,p2] if p.is_winner][0]
popIt(winner.mark + " WINS!")
class StrategyList(BoxLayout):
def __init__(self, **kwargs):
super(StrategyList, self).__init__(**kwargs)
self.strat_buttons = \
[ListButton(text=strategy,parent_list=self) for strategy in strategies]
for b in self.strat_buttons:
self.add_widget(b)
class ListButton(ButtonBehavior, Label):
def __init__(self, parent_list,**kwargs):
super(ListButton, self).__init__(**kwargs)
self.pressed = False
self.parent_list = parent_list
def on_press(self):
self.pressed = not self.pressed
text_color = { False: colors['white'],
True: colors['blue'] } [self.pressed]
for b in self.parent_list.strat_buttons:
if b != self:
b.pressed = False
b.color = colors['white'] + [1]
self.color = text_color + [1]
print text_color, self.parent_list, self.pressed
class TicTacApp(App):
def build(self):
game = TictacScreenManager()#SelectScreen()#GameBoard(5)
return game
if __name__ == '__main__':
TicTacApp().run()
| 9,388 |
aispace/datasets/ccf_bdci_2020.py
|
SmileGoat/AiSpace
| 32 |
2026149
|
# -*- coding: utf-8 -*-
# @Time : 2019-12-23 15:24
# @Author : yingyuankai
# @Email : <EMAIL>
# @File : glue_zh.py
"""DuReader."""
__all__ = [
"Ccfbdci2020"
]
import os
import six
import logging
import json
import tensorflow as tf
import tensorflow_datasets as tfds
from aispace.datasets import BaseDataset
from aispace.datasets import data_transformers as Transformer
_GLUE_CITATION = "TODO"
logger = logging.getLogger(__name__)
class CcfBdci2020Config(tfds.core.BuilderConfig):
"""BuilderConfig for DuReader."""
# @tfds.core.disallow_positional_args
def __init__(self,
text_features=None,
label_column=None,
data_urls=None,
data_dir=None,
citation=None,
url=None,
label_classes=None,
train_shards=1,
process_label=lambda x: x,
**kwargs):
"""BuilderConfig for DuReader.
Args:
text_features: `dict[string, string]`, map from the name of the feature
dict for each text field to the name of the column in the tsv file
label_column: `string`, name of the column in the tsv file corresponding
to the label
data_url: `string`, url to download the zip file from
data_dir: `string`, the path to the folder containing the tsv files in the
downloaded zip
citation: `string`, citation for the data set
url: `string`, url for information about the data set
label_classes: `list[string]`, the list of classes if the label is
categorical. If not provided, then the label will be of type
`tf.float32`.
train_shards: `int`, number of shards for the train data set
process_label: `Function[string, any]`, function taking in the raw value
of the label and processing it to the form required by the label feature
**kwargs: keyword arguments forwarded to super.
"""
# Version history:
# 1.0.0: S3 (new shuffling, sharding and slicing mechanism).
# 0.0.1: Initial version.
super(CcfBdci2020Config, self).__init__(
version=tfds.core.Version(
"1.0.0",
# experiments={tfds.core.Experiment.S3: False}
),
# supported_versions=[
# tfds.core.Version(
# "1.0.0",
# "New split API (https://tensorflow.org/datasets/splits)"
# ),
# ],
**kwargs)
self.data_urls = data_urls
self.data_dir = data_dir
self.citation = citation
self.url = url
self.train_shards = train_shards
self.process_label = process_label
@BaseDataset.register("ccfbdci2020")
class Ccfbdci2020(BaseDataset):
"""Ccfbdci2020"""
BUILDER_CONFIGS = [
CcfBdci2020Config(
name='plain_text',
description="""
闲聊对话相关数据:华为的微博数据 [1] ,北航和微软的豆瓣多轮对话 [2] ,清华的LCCC数据集 [3] 。
知识对话相关数据:百度的DuConv [4] ,清华的KdConv [5],腾讯的检索辅助生成对话数据集 [6]。
推荐对话相关数据:百度的DuRecDial [7]。""",
data_url=["https://dataset-bj.cdn.bcebos.com/qianyan/douban.zip",
"https://dataset-bj.cdn.bcebos.com/qianyan/duconv.zip",
"https://dataset-bj.cdn.bcebos.com/qianyan/DuRecDial.zip",
"https://dataset-bj.cdn.bcebos.com/qianyan/LCCC.zip",
"https://dataset-bj.cdn.bcebos.com/qianyan/kdconv.zip",
"https://dataset-bj.cdn.bcebos.com/qianyan/tencent.zip",
"https://dataset-bj.cdn.bcebos.com/qianyan/weibo.zip"],
data_dir=".",
citation="",
url="https://aistudio.baidu.com/aistudio/competition/detail/49"
),
]
def __init__(self, data_dir, **kwargs):
super(Ccfbdci2020, self).__init__(data_dir, **kwargs)
if "dataset" in self.hparams and "transformer" in self.hparams.dataset and self.hparams.dataset.transformer is not None:
self.transformer = Transformer.BaseTransformer.\
by_name(self.hparams.dataset.transformer)(self.hparams, data_dir=data_dir)
def _info(self):
features = self._get_feature_dict()
if not features:
logger.warning("Do not specify inputs and outputs in config, using default feature dict.")
features = self._base_feature_dict()
metadata = None
if "dataset" in self.hparams and "tokenizer" in self.hparams.dataset and "name" in self.hparams.dataset.tokenizer:
metadata = tfds.core.MetadataDict({"tokenizer": self.hparams.dataset.tokenizer.name,
"vocab_size": self.hparams.pretrained.config.vocab_size})
return tfds.core.DatasetInfo(
builder=self,
description=self.builder_config.description,
features=tfds.features.FeaturesDict(features),
metadata=metadata,
homepage="https://aistudio.baidu.com/aistudio/competition/detail/55",
citation=self.builder_config.citation + "\n" + _GLUE_CITATION,
)
def _base_feature_dict(self):
features = {
text_feature: tfds.features.Text()
for text_feature in six.iterkeys(self.builder_config.text_features)
}
return features
def _split_generators(self, dl_manager):
dl_dir = dl_manager.download_and_extract(self.builder_config.data_url)
data_dir = os.path.join(dl_dir, self.builder_config.data_dir)
data_train_json, data_validation_json, data_test_json = \
os.path.join(data_dir, f"dureader_{self.builder_config.name}-data/train.json"), \
os.path.join(data_dir, f"dureader_{self.builder_config.name}-data/dev.json"), \
os.path.join(data_dir, f"dureader_{self.builder_config.name}-data/test.json")
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
num_shards=self.builder_config.train_shards,
gen_kwargs={"filepath": data_train_json, 'split': "train"}
),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
num_shards=1,
gen_kwargs={"filepath": data_validation_json, 'split': "validation"}
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
num_shards=1,
gen_kwargs={"filepath": data_test_json, 'split': "test"}
)
]
def _generate_examples(self, filepath, **kwargs):
"""
直接从原始数据到tfrecords, 不用生成中间的json文件
:param filepath:
:param kwargs:
:return:
"""
generator = self._generate_examples_from_json if "dataset" in self.hparams and \
"transformer" in self.hparams.dataset \
and self.hparams.dataset.transformer is not None \
else self._generate_examples_from_raw
for idx, item in enumerate(generator(filepath, **kwargs)):
yield idx, item
def _generate_examples_from_raw(self, filepath, **kwargs):
pass
| 7,314 |
nlpatl/models/clustering/__init__.py
|
dumpmemory/nlpatl
| 18 |
2025408
|
from nlpatl.models.clustering.clustering import Clustering
from nlpatl.models.clustering.sklearn_clustering import SkLearnClustering
from nlpatl.models.clustering.sklearn_extra_clustering import SkLearnExtraClustering
| 221 |
mconnectionGit.py
|
marcinpeski/TA-algorithm
| 0 |
2025656
|
import sys
import mysql.connector
from mysql.connector import Error
def establish_connection(log, online = False):
version = sys.version_info
if not online:
try:
connection = mysql.connector.connect(host='127.0.0.1',
database='eTalg',
user='marcin',
password='<PASSWORD>',
port = '33360',
buffered = True)
if connection.is_connected():
db_Info = connection.get_server_info()
cursor = connection.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
return connection, cursor
except Error as e:
log.add_line(["-"*20,"-"*20,"-"*20])
log.add_line(["Error while connecting to MySQL", e])
log.add_line(["This connection should work from outside of LM. If it doesn't work, you probably forgot to set up VAGRANT"])
else:
try:
#This connection is from inside of vagrant machine, or maybe even from
connection = mysql.connector.connect(host='localhost',
database='economics',
user='XXXXX',
password='<PASSWORD>',
port = '3306',#33360 - see above
buffered = True)
if connection.is_connected():
db_Info = connection.get_server_info()
log.add_line(["Connected to MySQL Server version ", db_Info])
cursor = connection.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
log.add_line(["You're connected to database: ", record])
return connection, cursor
except Error as e:
log.add_line(["-"*20,"-"*20,"-"*20])
log.add_line(["Error while connecting to MySQL", e])
log.add_line(["This connection should work from outside of LM. If it doesn't work, you probably forgot to set up VAGRANT"])
def close_connection(connection, cursor, commit = False):
if (connection.is_connected()):
if commit:
connection.commit()
cursor.close()
connection.close()
| 2,568 |
lib/JumpScale/grid/osis/OSISBaseObjectComplexType.py
|
rudecs/jumpscale_core7
| 0 |
2025900
|
from JumpScale import j
import JumpScale.baselib.hash
import copy
import JumpScale.baselib.code
class OSISBaseObjectComplexType(j.code.classGetJSRootModelBase()):
def init(self,namespace,category,version):
if not hasattr(self,"guid"):
self.guid=""
if self.guid=="":
self.guid=j.base.idgenerator.generateGUID()
self.guid=self.guid.replace("-","")
self._ckey=""
self._meta=[namespace,category,int(version)] #$namespace,$category,$version
def getUniqueKey(self):
"""
return unique key for object, is used to define unique id (std the guid)
if return None means is always unique
"""
return None
# def getSetGuid(self):
# """
# use osis to define & set unique guid (sometimes combination of other keys, std the guid and does nothing)
# """
# return self.guid
def getSetGuid(self):
"""
"""
if "gid" not in self.__dict__ or self.gid==0 or self.gid=="":
self.gid=j.application.whoAmI.gid
# self.sguid=struct.pack("<HH",self.gid,self.id)
# self.guid = "%s_%s" % (self.gid, self.id)
self.lastmod=j.base.time.getTimeEpoch()
return self.guid
def getContentKey(self):
"""
is like returning the hash, is used to see if object changed
"""
dd=j.code.object2json(self,True,ignoreKeys=["guid","id","sguid","moddate"],ignoreUnderscoreKeys=True)
return j.tools.hash.md5_string(j.tools.text.toStr(dd))
def load(self, ddict):
"""
load the object starting from dict of primitive types (dict, list, int, bool, str, long) and a combination of those
std behaviour is the __dict__ of the obj
"""
j.code.dict2JSModelobject(self,ddict)
def dump(self):
"""
dump the object to a dict of primitive types (dict, list, int, bool, str, long) and a combination of those
std behaviour is the __dict__ of the obj
"""
return j.code.object2dict(self,dieOnUnknown=True)
def getDictForIndex(self,ignoreKeys=[]):
"""
get dict of object without passwd and props starting with _
"""
return j.code.object2dict(self,ignoreKeys=ignoreKeys+["passwd","password","secret"],ignoreUnderscoreKeys=True)
def __eq__(self,other):
if not hasattr(other, "__dict__"):
return False
def clean(obj):
dd={}
keys=list(obj.__dict__.keys())
keys.sort()
for key in keys:
val= obj.__dict__[key]
if key[0]!="_":
dd[str(key)]=val
return dd
# print "'%s'"%clean(self)
# print "'%s'"%clean(other)
return clean(self)==clean(other)
| 2,841 |
plusone.py
|
Garmelon/plusone
| 0 |
2025222
|
import asyncio
import configparser
import logging
import re
import yaboli
from yaboli.utils import *
logger = logging.getLogger("plusone")
class PointsDB(yaboli.Database):
def initialize(self, db):
with db:
db.execute((
"CREATE TABLE IF NOT EXISTS points ("
"normalized_nick TEXT PRIMARY KEY, "
"nick TEXT NOT NULL, "
"room TEXT NOT NULL, "
"points INTEGER NOT NULL"
")"
))
@yaboli.operation
def add_points(self, db, room, nick, points):
normalized_nick = normalize(nick)
with db:
db.execute(
"INSERT OR IGNORE INTO points VALUES (?,?,?,0)",
(normalized_nick, nick, room)
)
db.execute((
"UPDATE points "
"SET points=points+?, nick=? "
"WHERE normalized_nick=? AND room=?"
), (points, nick, normalized_nick, room))
@yaboli.operation
def points_of(self, db, room, nick):
normalized_nick = normalize(nick)
res = db.execute((
"SELECT points FROM points "
"WHERE normalized_nick=? AND room=?"
), (normalized_nick, room))
points = res.fetchone()
return points[0] if points else 0
class PlusOne:
SHORT_DESCRIPTION = "counts :+1:s"
DESCRIPTION = (
"'plusone' counts +1/:+1:/:bronze:s:"
" Simply reply '+1' to someone's message to give them a point.\n"
" Alternatively, specify a person with: '+1 [to] @person'.\n"
)
COMMANDS = (
"!points - show your own points\n"
"!points <nick> [<nick> ...] - list other people's points\n"
)
AUTHOR = "Created by @Garmy using github.com/Garmelon/yaboli\n"
PLUSONE_RE = r"\s*(\+1|:\+1:|:bronze(!\?|\?!)?:)(\s+(.*))?"
MENTION_RE = r"(to\s+@?|@)(\S+)"
def __init__(self, dbfile):
self.db = PointsDB(dbfile)
@yaboli.command("points")
async def command_points(self, room, message, argstr):
args = yaboli.Bot.parse_args(argstr)
if args:
lines = []
for nick in args:
if nick[0] == "@": # a bit hacky, requires you to mention nicks starting with '@'
nick = nick[1:]
points = await self.db.points_of(room.roomname, nick)
line = f"{mention(nick, ping=False)} has {points} point{'' if points == 1 else 's'}."
lines.append(line)
text = "\n".join(lines)
await room.send(text, message.mid)
else: # your own points
points = await self.db.points_of(room.roomname, message.sender.nick)
text = f"You have {points} point{'' if points == 1 else 's'}."
await room.send(text, message.mid)
@yaboli.trigger(PLUSONE_RE, flags=re.IGNORECASE)
async def trigger_plusone(self, room, message, match):
rest = match.group(4)
if rest:
specific = re.match(self.MENTION_RE, match.group(4))
else:
specific = None
nick = None
if specific:
nick = specific.group(2)
elif message.parent:
parent_message = await room.get_message(message.parent)
nick = parent_message.sender.nick
if nick is None:
text = "You can't +1 nothing..."
elif similar(nick, message.sender.nick):
text = "There's no such thing as free points on the internet."
else:
await self.db.add_points(room.roomname, nick, 1)
text = f"Point for user {mention(nick, ping=False)} registered."
await room.send(text, message.mid)
class PlusOneBot(yaboli.Bot):
PING_TEXT = ":bronze?!:"
SHORT_HELP = PlusOne.SHORT_DESCRIPTION
LONG_HELP = PlusOne.DESCRIPTION + PlusOne.COMMANDS + PlusOne.AUTHOR
def __init__(self, nick, dbfile, cookiefile=None):
super().__init__(nick, cookiefile=cookiefile)
self.plusone = PlusOne(dbfile)
async def on_send(self, room, message):
await super().on_send(room, message)
await self.plusone.trigger_plusone(room, message)
async def on_command_specific(self, room, message, command, nick, argstr):
if similar(nick, room.session.nick) and not argstr:
await self.botrulez_ping(room, message, command, text=self.PING_TEXT)
await self.botrulez_help(room, message, command, text=self.LONG_HELP)
await self.botrulez_uptime(room, message, command)
await self.botrulez_kill(room, message, command, text="-1")
await self.botrulez_restart(room, message, command, text="∓1")
async def on_command_general(self, room, message, command, argstr):
if not argstr:
await self.botrulez_ping(room, message, command, text=self.PING_TEXT)
await self.botrulez_help(room, message, command, text=self.SHORT_HELP)
await self.plusone.command_points(room, message, command, argstr)
def main(configfile):
logging.basicConfig(level=logging.INFO)
config = configparser.ConfigParser(allow_no_value=True)
config.read(configfile)
nick = config.get("general", "nick")
cookiefile = config.get("general", "cookiefile", fallback=None)
dbfile = config.get("general", "dbfile", fallback=None)
bot = PlusOneBot(nick, dbfile, cookiefile=cookiefile)
for room, password in config.items("rooms"):
if not password:
password = None
bot.join_room(room, password=password)
asyncio.get_event_loop().run_forever()
if __name__ == "__main__":
main("plusone.conf")
| 4,881 |
alpyca_sim/build/pybind/test_pybind_class.py
|
alpyca/alpyca
| 3 |
2025263
|
import unittest
from pybind_class import PybindClass
from pybind_writer import PybindWriter
class TestPybindClass(unittest.TestCase):
def test_context_manager(self):
writer = PybindWriter()
with PybindClass(writer):
writer.write_line('test')
self.assertEqual(writer.text, 'test;\n\n')
if __name__ == '__main__':
unittest.main()
| 386 |
movefile_restart/main.py
|
hammy3502/python-movefile-restart
| 0 |
2025975
|
import sys
import os
if sys.platform != "win32":
raise OSError("movefile-restart module is only supported on Windows systems!")
import winreg
_registry = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
def __get_current_values():
"""Get Values.
Internal function to get the current values stored inside PendingFileRenameOperations as a giant list of strings.
Returns:
str[]: List of strings in PendingFileRenameOperations
"""
try:
_read_key = winreg.OpenKey(_registry, "SYSTEM\\CurrentControlSet\\Control\\Session Manager", 0, winreg.KEY_READ)
except PermissionError:
raise PermissionError("Permission Denied to read registry key.") # Re-raise to make clear to end-user/library user
file_ops_values = None
i = 0
while True:
try:
if winreg.EnumValue(_read_key,i)[0] == "PendingFileRenameOperations":
file_ops_values = winreg.EnumValue(_read_key,i)[1]
break
except OSError:
break
i += 1
if file_ops_values == None:
return []
return file_ops_values
def __set_registry(values):
"""Set PendingFileRenameOperations.
Use at your own risk internal function. Takes a list of strings, and writes it to PendingFileRenameOperations.
Args:
values (str[]): List of strings to write to PendingFileRenameOperations key.
"""
try:
_write_key = winreg.OpenKey(_registry, "SYSTEM\\CurrentControlSet\\Control\\Session Manager", 0, winreg.KEY_WRITE)
except PermissionError:
raise PermissionError("Permission Denied to write registry key.")
winreg.SetValueEx(_write_key, "PendingFileRenameOperations", 0, winreg.REG_MULTI_SZ, values)
def DeleteFile(file_path, check_conflicts=True):
"""Queue File for Deletion.
Adds the Registry information to delete a file on reboot.
Args:
file_path (str): A path to the file to delete.
check_conflicts (bool): Checks file_path to make sure the Delete can happen as supplied. Defaults to True.
Raises:
FileNotFoundError: Raised if the file_path doesn't exist.
"""
file_path = file_path.replace("/", "\\")
values = __get_current_values()
if check_conflicts and not (os.path.isfile(file_path)):
values.reverse()
try:
file_path_index = values.index("\\??\\" + file_path)
except ValueError:
file_path_index = -1
if file_path_index % 2 != 0 or file_path_index == -1:
raise FileNotFoundError("Path {} does not exist and is not being created during a move operation!".format(file_path))
values.reverse()
values.append("\\??\\" + file_path)
values.append("")
__set_registry(values)
def MoveFile(from_path, to_path, check_conflicts=True):
"""Queue File for Moving.
Adds the Registry information to move a file on reboot.
Args:
from_path (str): The directory being moved from.
to_path (str): The directory being moved to.
check_conflicts (bool): Check from_path and to_path to make sure the Move/Rename can be performed successfully.
Raises:
FileNotFoundError: Raised if the from_path doesn't exist or if the directory of to_path doesn't exist.
FileExistsError: Raised if to_path already exists.
"""
from_path = from_path.replace("/", "\\")
if check_conflicts and not os.path.isfile(from_path): # Don't move non-existant path
raise FileNotFoundError("Path {} does not exist!".format(from_path))
to_path = to_path.replace("/", "\\")
if check_conflicts and not os.path.isdir(os.path.dirname(to_path)): # Don't move to non-existant dir
raise FileNotFoundError("Path {} does not exist to move to!".format(os.path.dirname(to_path)))
values = __get_current_values()
if check_conflicts and os.path.isfile(to_path): # Don't move to already-existing destination unless it will be deleted/moved
values.reverse()
try:
to_path_index = values.index("\\??\\" + to_path)
except ValueError:
to_path_index = -1
if to_path_index % 2 == 0 or to_path_index == -1:
raise FileExistsError("Path {} already exists and isn't already being deleted/moved!".format(to_path))
values.reverse()
values.append("\\??\\" + from_path)
values.append("\\??\\" + to_path)
__set_registry(values)
def RenameFile(from_path, to_path, check_conflicts=True):
"""MoveFile Alias."""
MoveFile(from_path, to_path, check_conflicts)
def GetFileOperations():
"""Get Pending File Operations.
Returns a list with tuples of the format (from_path, to_path). If to_path is empty, then the file is being deleted.
Returns:
tuple[]: A list of tuples containing the pending file operations.
"""
values = __get_current_values()
to_return = []
for i in range(int(len(values) / 2)):
to_return.append((values[2*i].replace("\\??\\", ""), values[2*i+1].replace("\\??\\", "")))
return to_return
def PrintFileOperations():
"""Prints Pending File Operations."""
vals = GetFileOperations()
if not vals:
print("There are no currently pending file operations!")
return
for i in vals:
if i[1] == "":
print("Deleting {}".format(i[0]))
else:
print("Moving {} to {}".format(i[0], i[1]))
def RemoveFileOperation(file_op_index):
"""Remove File Operation from Occuring.
Args:
file_op_index (int): Index of file operation to remove. Same indexes as GetFileOperations().
Raises:
TypeError: file_op_index isn't an integer.
IndexError: The passed in index doesn't exist.
"""
values = __get_current_values()
if not isinstance(file_op_index, int):
raise TypeError("Index for operation to remove must be an integer!")
try:
del values[file_op_index*2:file_op_index*2+2]
except IndexError:
raise IndexError("Index {} does not exist!".format(str(file_op_index))) # Re-raising here to be more descriptive for debugging
__set_registry(values)
def CheckPermissions():
"""Get Permissions.
Gets the permissions for reading/writing the registry as a tuple.
Returns:
(bool, bool): First bool is True/False for reading the key, second is for writing the key.
"""
read = True
write = True
try:
winreg.OpenKey(_registry, "SYSTEM\\CurrentControlSet\\Control\\Session Manager", 0, winreg.KEY_READ)
try:
winreg.OpenKey(_registry, "SYSTEM\\CurrentControlSet\\Control\\Session Manager", 0, winreg.KEY_WRITE)
except PermissionError:
write = False
except PermissionError:
read = False
write = False # Due to how this program works, if reading is impossible, so is writing.
return (read, write)
if __name__ == "__main__":
if CheckPermissions()[0]:
print("Currently pending file operations: ")
PrintFileOperations()
else:
print("No read permission on registry key!")
sys.exit()
| 7,138 |
purview_py/conn/__init__.py
|
Spydernaz/purview_py
| 0 |
2026398
|
"""
OWLPy
~~~~~~~~~~~~~~~~~~~~~
An extension module to facilitate API Models and functionality.
:copyright: (c) 2019 Spydernaz
:license: MIT, see LICENSE for more details.
"""
from .Connection import PurviewConnection
| 218 |
test.py
|
hashhar/jsonschema2prestosql
| 2 |
2024054
|
#!/usr/bin/env python3
import jsonschema2sql
jsonschema = jsonschema2sql.load_schema("test.json")
sql = jsonschema2sql.generate_create_table(
"test_table", "default", "s3://some-bucket/", ["ad"], "PARQUET", False, jsonschema
)
expected_sql = """CREATE TABLE "default"."test_table" (
"string_col" varchar,
"datetime_col" timestamp,
"datetime_string_col" varchar,
"date_col" date,
"date_string_col" varchar,
"time_col" time,
"time_string_col" varchar,
"decimal_string_col" decimal(10, 2),
"double_col" double,
"double_double_col" double,
"float_col" float,
"decimal_col" decimal(5, 3),
"action_date" bigint,
"boolean_col" boolean,
"array_col" array(varchar),
"array_object_col" array(ROW("string_col" varchar, "datetime_col" timestamp)),
"object_col" ROW("string_col" varchar, "integer_col" bigint),
"ad" varchar
) WITH (
external_location = 's3://some-bucket/',
partitioned_by = ARRAY['ad'],
format = 'PARQUET'
)"""
assert sql == expected_sql
print("** TESTS PASS! **")
| 1,061 |
setup.py
|
jcdaniel14/devnet-ssh
| 0 |
2025860
|
from setuptools import setup, find_packages
with open("README.md") as readme_file:
README = readme_file.read()
setup_args = dict(
name='devnet_ssh',
version='1.0.5',
description='Fast and simple SSH library for interactive session based on Paramiko',
long_description_content_type="text/markdown",
long_description=README,
license='MIT',
packages=find_packages(),
author='<NAME>',
author_email='<EMAIL>',
keywords=['SSH', 'SSH Client', 'Paramiko', "Devnet"],
url='https://github.com/jcdaniel14/devnet_ssh.git',
download_url='https://pypi.org/project/devnet_ssh/'
)
install_requires = [
"paramiko>=2.4.3"
]
if __name__ == '__main__':
setup(**setup_args, install_requires=install_requires)
| 776 |
model.py
|
exchhattu/BiomedicaLorHealthCare-NLP
| 0 |
2026484
|
#!/usr/bin/python3
'''
Written: <NAME> PhD
Mon Sep 16 15:04:45 2019
'''
import os, sys, shutil
import random
class Model:
def __init__(self, fo_train, fo_valid, fo_test):
self._fo_train = fo_train
self._fo_valid = fo_valid
self._fo_test = fo_test
# indexes
self._ts_train = []
self._ts_valid = []
self._ts_test = []
def create_validation_data(self, ts_rows):
in_seed = 89
ts_row_idxes = [i for i in range(0, ts_rows.shape[0])]
random.Random(in_seed).shuffle(ts_row_idxes)
in_train = int(self._fo_train * len(ts_row_idxes))
in_valid = int(self._fo_valid * len(ts_row_idxes))
in_test = int(self._fo_test * len(ts_row_idxes))
# in the case if float and int conversion lose some data
in_error = in_train + in_valid + in_test
in_diff = len(ts_row_idxes) - in_error
in_train = in_train + in_diff
print("[Updates]: {0:0.2f}% train => {1:d}".format(self._fo_train, in_train))
print("[Updates]: {0:0.2f}% valid => {1:d}".format(self._fo_valid, in_valid))
print("[Updates]: {0:0.2f}% test => {1:d}".format(self._fo_test, in_test))
in_tr_idxes = ts_row_idxes[:in_train]
in_va_idxes = ts_row_idxes[in_train:in_train+in_valid]
in_te_idxes = ts_row_idxes[-in_test:]
self._ts_train = ts_rows[in_tr_idxes]
self._ts_valid = ts_rows[in_va_idxes]
self._ts_test = ts_rows[in_te_idxes]
def split_validation_data(self, root_path=os.getcwd()):
ts_dirs = ["train", "valid", "test"]
for st_dname in ts_dirs:
self.copy_data(root_path=root_path, data_type=st_dname)
def copy_data(self, root_path=os.getcwd(), data_type="train"):
try:
path = os.path.join(root_path, data_type)
if os.path.exists(path): shutil.rmtree(path)
os.makedirs(path)
ts_data = []
if data_type=="train":
ts_data = self._ts_train
elif data_type=="valid":
ts_data = self._ts_valid
elif data_type=="test":
ts_data = self._ts_test
# Copy files in respective directory
ts_formats = ["txt", "ana"]
for st_format in ts_formats:
for st_dir in ts_data:
f_spath = os.path.join(root_path, "%s.%s" %(st_dir, st_format))
f_dpath = os.path.join(root_path, data_type)
if os.path.isfile(f_spath):
shutil.copy(f_spath, f_dpath)
os.remove(f_spath)
except:
print("[FATAL] creating data for validation is unsuccessful.")
sys.exit(0)
| 2,511 |
client/tests_crypto.py
|
vrandkode/stuk-stuk
| 5 |
2023941
|
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
import zlib
import base64
import crypto
def Keys():
key = RSA.generate(2048, e=65537)
return key, key.publickey()
def GenerateKeys(prefix):
pri, pub = Keys()
private_key = pri.exportKey("PEM")
public_key = pub.exportKey("PEM")
print(private_key)
fd = open(".test/{0}_private.pem".format(prefix), "wb")
fd.write(private_key)
fd.close()
print(public_key)
fd = open(".test/{0}_public.pem".format(prefix), "wb")
fd.write(public_key)
fd.close()
public_ssh_key = pub.exportKey("OpenSSH")
print(public_ssh_key)
fd = open(".test/{0}_public.pub".format(prefix), "wb")
fd.write(public_ssh_key)
fd.close()
def PairKey(plain=False, size=2048):
""" Genera par de claves RSA """
key = RSA.generate(size)
return (key, key.publickey()) if plain else (key.exportKey("PEM"), key.publickey().exportKey('OpenSSH'))
def ed(token):
print(token, "encrypt->")
ciphertext = encrypt('.test/plataforma_public.pem', token)
print(ciphertext)
print(".decrypt->")
print(decrypt('.test/plataforma_private.pem', ciphertext))
print("------------")
def tests():
GenerateKeys("plataforma")
print("\n\n")
with open(".test/plataforma_public.pub", "rb") as f:
token = f.read()
print("len:",len(token))
ed(token)
print("=======================\n")
token2 = b"<EMAIL>.totp.ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmyEmBfjgnHvasVKDKm3z0qtgZPclcqfnKnZ95TgfTA/0OoayjdZz8p3xnR0cWeroaiDvBBVKy0lD2oj484h/mD/UkXLVHZBKYTEPklw3GU0nYteSVWU6a8Uht5OLzHU58QM7FtDyvFtqXJBeKVWhbqBn6SLNjaG1CoolkC+TNt5moRvKllp8jY1ohgek96qi1V+CBZVlJlfxRY8eCjcGN1wmsbM5WN7HmSZhfFw4hJYR3LTRSw/EVg/MtKofaOVl7Pr2i1I5Wj2aiHsKpjl8WF5g3L/5OIPEpskxhv42QeEhBCgT0R2f1DMQ7YS0noS3LUSsTKnPqjsqYnqd190AX"
ed(token2)
tok = b"<EMAIL> <PASSWORD>in<PASSWORD>.totp.ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmyEmBfjgnHvasVKDKm3z0qtgZPclcqfnKnZ95TgfTA/0OoayjdZz8p3xnR0cWeroaiDvBBVKy0lD2oj484h/mD/UkXLVHZBKYTEPklw3GU0nYteSVWU6a8Uht5OLzHU58QM7FtDyvFtqXJBeKVWhbqBn6SLNjaG1CoolkC+TNt5moRvKllp8jY1ohgek96qi1V+CBZVlJlfxRY8eCjcGN1wmsbM5WN7HmSZhfFw4hJYR3LTRSw/EVg/MtKofaOVl7Pr2i1I5Wj2aiHsKpjl8WF5g3L/5OIPEpskxhv42QeEhBCgT0R2f1DMQ7YS0noS3LUSsTKnPqjsqYnqd190AX"
encrypted = crypto.AE('.test/plataforma_public.pem', tok)
print(encrypted)
print(".....")
print(crypto.AD('.test/plataforma_private.pem',encrypted))
print(crypto.AD('.test/plataforma_private.pem',"<KEY>"))
#crypto.AD('.test/plataforma_private.pem',"<KEY>")
| 2,539 |
pandas-udf/udf-with-apache-arrow.py
|
FahaoTang/spark-examples
| 0 |
2026486
|
from pyspark import SparkConf
from pyspark.sql import SparkSession, Window
from pyspark.sql.types import ArrayType, StructField, StructType, StringType, IntegerType, DecimalType, FloatType
from pyspark.sql.functions import udf, collect_list, struct, explode, pandas_udf, PandasUDFType, col
from decimal import Decimal
import random
import pandas as pd
import numpy as np
appName = "Python Example - UDF with Apache Arrow (Pandas UDF)"
master = 'local'
# Create Spark session
conf = SparkConf().setMaster(master)
spark = SparkSession.builder.config(conf=conf) \
.getOrCreate()
# Enable Arrow optimization and fallback if there is no Arrow installed
spark.conf.set("spark.sql.execution.arrow.enabled", "true")
spark.conf.set("spark.sql.execution.arrow.fallback.enabled", "true")
# Construct the data frame directly (without reading from HDFS)
cust_count = 10
txn_count = 100
data = [(i, j, i * j * random.random() * random.choice((-1, 1)))
for j in range(txn_count) for i in range(cust_count)]
# Create a schema for the dataframe
schema = StructType([
StructField('CustomerID', IntegerType(), False),
StructField('TransactionID', IntegerType(), False),
StructField('Amount', FloatType(), True)
])
# Create the data frame
df = spark.createDataFrame(data, schema=schema)
# Function 1 - Scalar function - dervice a new column with value as Credit or Debit.
def calc_credit_debit_func(amount):
return pd.Series(["Credit" if a >= 0 else "Debit" for a in amount])
fn_credit_debit = pandas_udf(calc_credit_debit_func, returnType=StringType())
df = df.withColumn("CreditOrDebit", fn_credit_debit(df.Amount))
df.show()
# Function 2 - Group map function - calculate the difference from mean
attributes = [
StructField('CustomerID', IntegerType(), False),
StructField('TransactionID', IntegerType(), False),
StructField('Amount', FloatType(), False),
StructField('CreditOrDebit', StringType(), False),
StructField('Diff', FloatType(), False)
]
attribute_names = [a.name for a in attributes]
@pandas_udf(StructType(attributes), PandasUDFType.GROUPED_MAP)
def fn_calc_diff_from_mean(txn):
pdf = txn
amount = pdf.Amount
pdf = pdf.assign(Diff=amount - amount.mean())
return pdf
df_map = df.groupby("CustomerID").apply(fn_calc_diff_from_mean)
df_map.show(100)
# Function 3 - Group aggregate function - calculate mean only
@pandas_udf(FloatType(), PandasUDFType.GROUPED_AGG)
def mean_udf(amount):
return np.mean(amount)
df_agg = df.groupby("CustomerID").agg(mean_udf(df['Amount']).alias("Mean"))
df_agg.show()
# Function 4 - Group aggregate function - Windowing function
w = Window \
.partitionBy('CustomerID') \
.rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)
df.withColumn('Mean', mean_udf(df['Amount']).over(w)).show()
| 2,822 |
structy-practice-solutions/03-uncompress.py
|
RuthraVed/hackerearth-practice-solutions
| 0 |
2026477
|
"""
## Uncompress ##
Write a function, uncompress, that takes in a string as an argument.
The input string will be formatted into multiple groups
according to the following pattern:
<number><char>
For example, '2c' or '3a'.
The function should return an uncompressed version of the string
where each 'char' of a group is repeated 'number' times consecutively,
like 'ccc' or 'aaa'.
You may assume that the input string is well-formed according to the previously mentioned pattern.
"""
from timer_module import timer_func
@timer_func
def uncompress_my_way(s):
number_str = ''
uncompressed_result = ''
for char in s:
if char.isdigit():
# Keep saving the consecutive digits as string
number_str += char
else:
uncompressed_result += char*int(number_str)
# Reinitialize to store a new number
number_str = ''
return uncompressed_result
@timer_func
def uncompress_alvin_way(s):
numbers = '0123456789'
result = ""
i = j = 0
# Used for-loop, as anyhow j needs to keep incrementing
for j in range(0, len(s)):
if s[j] in numbers:
continue # j increments automatically
else:
result += s[j]*int(s[i:j])
i = j + 1 # Bringing i to j's position
return result
# --- Tests ---
test_input_values = [
"2c3a1t",
"4s2b",
"2p1o5p",
"3n12e2z",
"127y",
]
expected_results = [
'ccaaat',
'ssssbb',
'ppoppppp',
'nnneeeeeeeeeeeezz',
'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy',
]
def test_func_by_name(func):
for i in range(0,len(test_input_values)):
result = func(test_input_values[i]) # Test solution against every input
assert result == expected_results[i], \
f'Expected max value as {expected_results[i]}, got: {result}'
print(f'Test [{i}] passed, with correct result as {expected_results[i]}.')
test_func_by_name(uncompress_my_way)
test_func_by_name(uncompress_alvin_way)
| 2,109 |
main.py
|
mssalvador/NextProject
| 1 |
2026103
|
# the usual include statements
import os
import sys
import importlib
import pyspark
package_dict = {
'semisupervised.zip': './semisupervised', 'cleaning.zip': './cleaning',
'classification.zip': './classification', 'shared.zip': './shared', 'examples.zip': './examples'}
for zip_file, path in package_dict.items():
if os.path.exists(zip_file):
sys.path.insert(0, zip_file)
else:
sys.path.insert(0, path)
if __name__ == '__main__':
from shared.OwnArguments import OwnArguments
arguments = OwnArguments()
arguments.add_argument('--cluster_path', types=str, required=True, dest='cluster_path')
arguments.add_argument('--job', types=str, required=True, dest='job_name')
arguments.add_argument('--job_args', dest='job_args', nargs='*')
arguments.add_argument('--input_data', dest='input_data', types=str)
arguments.add_argument('--features', dest='features', types=str, nargs='*')
arguments.add_argument('--id', dest='id', types=str, nargs='*')
arguments.add_argument('--labels', dest='labels', types=str, nargs='*', required=False)
arguments.parse_arguments()
all_args = dict()
if arguments.job_args:
all_args['algo_params'] = dict(arg.split('=') for arg in arguments.job_args)
all_args['input_data'] = arguments.input_data
all_args['features'] = arguments.features
all_args['id'] = arguments.id
all_args['labels'] = arguments.labels
# dtu_cluster_path = 'file:///home/micsas/workspace/distributions/dist_workflow'
# local_path = "file:/home/svanhmic/workspace/DABAI/Workflows/dist_workflow"
# visma_cluster_path = 'file:/home/ml/deployments/workflows'
py_files = ['/shared.zip', '/examples.zip', '/cleaning.zip', '/classification.zip', '/semisupervised.zip']
spark_conf = pyspark.SparkConf(loadDefaults=False)
(spark_conf
.set('spark.executor.cores', 4)
.set('spark.executor.memory', '1G')
.set('spark.executors', 2)
)
sc = pyspark.SparkContext(appName=arguments.job_name)
job_module = importlib.import_module('{:s}'.format(arguments.job_name))
# sc = pyspark.SparkContext(
# appName=arguments.job_name, pyFiles=[arguments.cluster_path+py_file for py_file in py_files], conf=spark_conf)
# job_module = importlib.import_module('{:s}'.format(arguments.job_name))
try:
data_frame = job_module.run(sc, **all_args)
# data_frame.printSchema()
# data_frame.show()
rdd = data_frame.toJSON() # .saveAsTextFile('hdfs:///tmp/cleaning.txt')
js = rdd.collect()
# print(js)
if arguments.job_name == 'cleaning':
print("""{"cluster":["""+','.join(js)+"""]}""")
elif arguments.job_name == 'classification':
print("""{"classification":[""" + ','.join(js) + """]}""")
elif arguments.job_name == 'semisupervised':
print("""{"semisuper":["""+ ','.join(js)+"""]}""")
except TypeError as te:
print('Did not run', te) # make this more logable...
| 3,030 |
Products/CMFDefault/tests/test_Favorite.py
|
zopefoundation/Products.CMFDefault
| 0 |
2026246
|
##############################################################################
#
# Copyright (c) 2001 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Unit tests for Favorites.
"""
import unittest
import Testing
from zope.component import getSiteManager
from zope.interface.verify import verifyClass
from zope.testing.cleanup import cleanUp
from Products.CMFCore.interfaces import IMembershipTool
from Products.CMFCore.interfaces import ISiteRoot
from Products.CMFCore.interfaces import IURLTool
from Products.CMFCore.testing import ConformsToContent
from Products.CMFCore.tests.base.dummy import DummyContent
from Products.CMFCore.tests.base.dummy import DummySite
from Products.CMFCore.tests.base.dummy import DummyTool
class FavoriteTests(ConformsToContent, unittest.TestCase):
def _getTargetClass(self):
from Products.CMFDefault.Favorite import Favorite
return Favorite
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def setUp(self):
self.site = DummySite('site')
sm = getSiteManager()
sm.registerUtility(self.site, ISiteRoot)
sm.registerUtility(DummyTool(), IMembershipTool)
sm.registerUtility(DummyTool().__of__(self.site), IURLTool)
self.site._setObject( 'target', DummyContent() )
def tearDown(self):
cleanUp()
def test_interfaces(self):
from Products.CMFDefault.interfaces import IFavorite
from Products.CMFDefault.interfaces import ILink
from Products.CMFDefault.interfaces import IMutableFavorite
from Products.CMFDefault.interfaces import IMutableLink
verifyClass(IFavorite, self._getTargetClass())
verifyClass(ILink, self._getTargetClass())
verifyClass(IMutableFavorite, self._getTargetClass())
verifyClass(IMutableLink, self._getTargetClass())
def test_Empty( self ):
utool = getSiteManager().getUtility(IURLTool)
f = self.site._setObject('foo', self._makeOne('foo'))
self.assertEqual( f.getId(), 'foo' )
self.assertEqual( f.Title(), '' )
self.assertEqual( f.Description(), '' )
self.assertEqual( f.getRemoteUrl(), utool() )
self.assertEqual( f.getObject(), self.site )
self.assertEqual( f.getIconURL(), self.site.getIconURL() )
self.assertEqual( f.icon(), '' )
def test_CtorArgs( self ):
utool = getSiteManager().getUtility(IURLTool)
target = self.site.target
self.assertEqual( self._makeOne( 'foo'
, title='Title'
).Title(), 'Title' )
self.assertEqual( self._makeOne( 'bar'
, description='Description'
).Description(), 'Description' )
baz = self.site._setObject('foo',
self._makeOne('baz', remote_url='target'))
self.assertEqual( baz.getObject(), target )
self.assertEqual( baz.getRemoteUrl(), '%s/target' % utool() )
self.assertEqual( baz.getIconURL(), target.getIconURL() )
self.assertEqual( baz.icon(), target.icon() )
def test_edit( self ):
utool = getSiteManager().getUtility(IURLTool)
target = self.site.target
f = self.site._setObject('foo', self._makeOne('foo'))
f.edit( 'target' )
self.assertEqual( f.getObject(), target )
self.assertEqual( f.getRemoteUrl(), '%s/target' % utool() )
self.assertEqual( f.getIconURL(), target.getIconURL() )
self.assertEqual( f.icon(), target.icon() )
def test_editEmpty( self ):
utool = getSiteManager().getUtility(IURLTool)
f = self.site._setObject('gnnn', self._makeOne('gnnn'))
f.edit( '' )
self.assertEqual( f.getObject(), self.site )
self.assertEqual( f.getRemoteUrl(), utool() )
self.assertEqual( f.getIconURL(), self.site.getIconURL() )
self.assertEqual( f.icon(), '' )
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(FavoriteTests),
))
| 4,586 |
NeuralStyleTransferSrc/content_image.py
|
vonlippmann/Deep-_Style_Transfer
| 2 |
2026522
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'content_image.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_ConImg(object):
def setupUi(self, ConImg):
ConImg.setObjectName("ConImg")
ConImg.resize(663, 561)
self.horizontalLayout = QtWidgets.QHBoxLayout(ConImg)
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_8 = QtWidgets.QLabel(ConImg)
self.label_8.setStyleSheet("QLabel{;\n"
"image: url(:/content/content_image/taj_mahal.jpg);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label_8.setText("")
self.label_8.setObjectName("label_8")
self.gridLayout_2.addWidget(self.label_8, 0, 2, 1, 1)
self.label_10 = QtWidgets.QLabel(ConImg)
self.label_10.setObjectName("label_10")
self.gridLayout_2.addWidget(self.label_10, 3, 0, 1, 1, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label_4 = QtWidgets.QLabel(ConImg)
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label_9 = QtWidgets.QLabel(ConImg)
self.label_9.setStyleSheet("QLabel{;\n"
" image: url(:/content/content_image/tubingen.jpg);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label_9.setText("")
self.label_9.setObjectName("label_9")
self.gridLayout_2.addWidget(self.label_9, 2, 0, 1, 1)
self.label_7 = QtWidgets.QLabel(ConImg)
self.label_7.setStyleSheet("QLabel{;\n"
"image: url(:/content/content_image/new_york.png);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label_7.setText("")
self.label_7.setObjectName("label_7")
self.gridLayout_2.addWidget(self.label_7, 0, 1, 1, 1)
self.label_2 = QtWidgets.QLabel(ConImg)
self.label_2.setAlignment(QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 3, 1, 1, 1)
self.label_3 = QtWidgets.QLabel(ConImg)
self.label_3.setStyleSheet("QLabel{;\n"
"image: url(:/content/content_image/garden.png);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label_3.setText("")
self.label_3.setObjectName("label_3")
self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 1)
self.label_6 = QtWidgets.QLabel(ConImg)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_6.sizePolicy().hasHeightForWidth())
self.label_6.setSizePolicy(sizePolicy)
self.label_6.setMaximumSize(QtCore.QSize(200, 50))
self.label_6.setObjectName("label_6")
self.gridLayout_2.addWidget(self.label_6, 1, 2, 1, 1, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label = QtWidgets.QLabel(ConImg)
self.label.setStyleSheet("QLabel{image: url(:/content/content_image/3.jpeg);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label.setText("")
self.label.setAlignment(QtCore.Qt.AlignBottom|QtCore.Qt.AlignHCenter)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 2, 1, 1, 1)
self.label_5 = QtWidgets.QLabel(ConImg)
self.label_5.setObjectName("label_5")
self.gridLayout_2.addWidget(self.label_5, 1, 1, 1, 1, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label_11 = QtWidgets.QLabel(ConImg)
self.label_11.setStyleSheet("QLabel{\n"
" image: url(:/content/content_image/04.jpeg);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label_11.setText("")
self.label_11.setObjectName("label_11")
self.gridLayout_2.addWidget(self.label_11, 2, 2, 1, 1)
self.label_12 = QtWidgets.QLabel(ConImg)
self.label_12.setObjectName("label_12")
self.gridLayout_2.addWidget(self.label_12, 3, 2, 1, 1, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label_13 = QtWidgets.QLabel(ConImg)
self.label_13.setStyleSheet("QLabel{\n"
" image: url(:/content/content_image/lion.jpg);\n"
"border:1px solid;\n"
" border-color: rgba(255, 255, 255,0);\n"
"}\n"
"QLabel:hover{border:2px solid;\n"
"border-color:\"blue\"}")
self.label_13.setText("")
self.label_13.setObjectName("label_13")
self.gridLayout_2.addWidget(self.label_13, 4, 0, 1, 1)
self.label_16 = QtWidgets.QLabel(ConImg)
self.label_16.setObjectName("label_16")
self.gridLayout_2.addWidget(self.label_16, 5, 0, 1, 1, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.gridLayout_2.setRowStretch(0, 10)
self.gridLayout_2.setRowStretch(1, 2)
self.gridLayout_2.setRowStretch(2, 10)
self.gridLayout_2.setRowStretch(3, 2)
self.gridLayout_2.setRowStretch(4, 10)
self.gridLayout_2.setRowStretch(5, 2)
self.verticalLayout.addLayout(self.gridLayout_2)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setContentsMargins(-1, 10, -1, -1)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.pushButton_2 = QtWidgets.QPushButton(ConImg)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout_2.addWidget(self.pushButton_2)
self.pushButton = QtWidgets.QPushButton(ConImg)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout_2.addWidget(self.pushButton)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout.addLayout(self.verticalLayout)
self.retranslateUi(ConImg)
self.pushButton_2.clicked.connect(ConImg.accept)
self.pushButton.clicked.connect(ConImg.reject)
QtCore.QMetaObject.connectSlotsByName(ConImg)
def retranslateUi(self, ConImg):
_translate = QtCore.QCoreApplication.translate
ConImg.setWindowTitle(_translate("ConImg", "Dialog"))
self.label_10.setText(_translate("ConImg", "tubingen"))
self.label_4.setText(_translate("ConImg", "Garden"))
self.label_2.setText(_translate("ConImg", "rhinoceros"))
self.label_6.setText(_translate("ConImg", "taj_mahal"))
self.label_5.setText(_translate("ConImg", "NewYork"))
self.label_12.setText(_translate("ConImg", "car"))
self.label_16.setText(_translate("ConImg", "lions"))
self.pushButton_2.setText(_translate("ConImg", "OK"))
self.pushButton.setText(_translate("ConImg", "CANCEL"))
import picture_rc
| 7,886 |
Scripts/003_hackerrank/Python/p010.py
|
OrangePeelFX/Python-Tutorial
| 0 |
2026587
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Problem 010
Finding the percentage
Source : https://www.hackerrank.com/challenges/finding-the-percentage/problem
"""
import sys
def debug (msg):
print(msg, file=sys.stderr)
return
n = int(input())
student_marks = {}
for _ in range(n):
name, *line = input().split()
scores = list(map(float, line))
student_marks[name] = scores
query_name = input()
marks = student_marks[query_name]
avg = sum(marks)/len(marks)
print("{:.2f}".format(avg))
| 514 |
coding/learn_pyspark/basic_concept/dataframe_demo.py
|
yatao91/learning_road
| 3 |
2026567
|
# -*- coding: utf-8 -*-
from pyspark.sql import SparkSession
spark = SparkSession.builder.master("spark://spark001:7077").appName("create_dataframe_demo").getOrCreate()
df = spark.read.json("people.json")
"""
# 展示dataframe内容
df.show()
# 使用树结构展示dataframe的schema
df.printSchema()
# select name列
df.select("name").show()
# 对年龄列值做加一处理
df.select(df["name"], df["age"] + 1).show()
# 查询年龄大于21的
df.filter(df["age"] > 21).show()
# 使用年龄分组并计数
df.groupBy("age").count().show()
# 注册dataframe作为一个SQL临时视图: session作用域, 如果创建临时视图的session结束了, 临时视图也将消失
df.createOrReplaceTempView("people")
sqlDF = spark.sql("SELECT * FROM people")
sqlDF.show()
# 注册dataframe作为一个全局临时视图
df.createGlobalTempView("people")
# 全局临时视图绑定到系统保留数据库`global_temp`上
spark.sql("SELECT * FROM global_temp.people").show()
# 全局临时视图是跨session使用的
spark.newSession().sql("SELECT * FROM global_temp.people").show()
"""
| 873 |
src/controls/KeyboardMouseControl.py
|
NEKERAFA/Soul-Tower
| 0 |
2025950
|
import pygame
import math as m
from pygame.locals import *
from src.ControlManager import *
from src.scenes.Scene import *
class KeyboardMouseControl(ControlManager):
upButton = K_w
downButton = K_s
leftButton = K_a
rightButton = K_d
secButton = K_SPACE
selectButton = K_e
actionButton = K_q
@classmethod
def up(cls):
return pygame.key.get_pressed()[cls.upButton]
@classmethod
def down(cls):
return pygame.key.get_pressed()[cls.downButton]
@classmethod
def left(cls):
return pygame.key.get_pressed()[cls.leftButton]
@classmethod
def right(cls):
return pygame.key.get_pressed()[cls.rightButton]
@classmethod
def angle(cls, pos):
(playerX, playerY) = pos
(mouseX, mouseY) = pygame.mouse.get_pos()
# Escalado
mouseX /= SCALE_FACTOR
mouseY /= SCALE_FACTOR
ang = m.degrees(m.atan2(playerY - mouseY, mouseX - playerX))
return ang
@classmethod
def prim_button(cls):
return pygame.mouse.get_pressed()[0]
@classmethod
def sec_button(cls):
return pygame.key.get_pressed()[cls.secButton]
@classmethod
def action_button(cls):
return pygame.key.get_pressed()[cls.actionButton]
@classmethod
def select_button(cls):
return pygame.key.get_pressed()[cls.selectButton]
@classmethod
def set_key_up(cls, newKey):
cls.upButton = newKey
@classmethod
def set_key_down(cls, newKey):
cls.downButton = newKey
@classmethod
def set_key_left(cls, newKey):
cls.leftButton = newKey
@classmethod
def set_key_right(cls, newKey):
cls.rightButton = newKey
@classmethod
def set_key_select(cls, newKey):
cls.selectButton = newKey
@classmethod
def set_key_action(cls, newKey):
cls.actionButton = newKey
| 1,977 |
model/detection_model/maskscoring_rcnn/test_net.py
|
JinGyeSetBirdsFree/FudanOCR
| 25 |
2026208
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Set up custom environment before nearly anything else is imported
# NOTE: this should be the first import (no not reorder)
from maskrcnn_benchmark.utils.env import setup_environment # noqa F401 isort:skip
import argparse
import os
import json
import tempfile
import numpy as np
import torch
from maskrcnn_benchmark.config import cfg
from maskrcnn_benchmark.data import make_data_loader
from maskrcnn_benchmark.engine.inference import inference
from maskrcnn_benchmark.modeling.detector import build_detection_model
from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer
from maskrcnn_benchmark.utils.collect_env import collect_env_info
from maskrcnn_benchmark.utils.comm import synchronize, get_rank
from maskrcnn_benchmark.utils.logger import setup_logger
from maskrcnn_benchmark.utils.miscellaneous import mkdir
from maskrcnn_benchmark.engine.extra_utils import coco_results_to_contest, mask_nms
from maskrcnn_benchmark.utils.imports import import_file
def main():
parser = argparse.ArgumentParser(description="PyTorch Object Detection Inference")
parser.add_argument(
"--config-file",
default="configs/e2e_ms_rcnn_R_50_FPN_1x.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument("--local_rank", type=int, default=0)
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
distributed = num_gpus > 1
if distributed:
torch.cuda.set_device(args.local_rank)
torch.distributed.deprecated.init_process_group(
backend="nccl", init_method="env://"
)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
save_dir = ""
logger = setup_logger("maskrcnn_benchmark", save_dir, get_rank())
logger.info("Using {} GPUs".format(num_gpus))
logger.info(cfg)
logger.info("Collecting env info (might take some time)")
logger.info("\n" + collect_env_info())
model = build_detection_model(cfg)
model.to(cfg.MODEL.DEVICE)
output_dir = cfg.OUTPUT_DIR
checkpointer = DetectronCheckpointer(cfg, model, save_dir=output_dir)
_ = checkpointer.load(cfg.MODEL.WEIGHT)
iou_types = ("bbox",)
if cfg.MODEL.MASK_ON:
iou_types = iou_types + ("segm",)
output_folders = [None] * len(cfg.DATASETS.TEST)
if cfg.OUTPUT_DIR:
dataset_names = cfg.DATASETS.TEST
for idx, dataset_name in enumerate(dataset_names):
output_folder = os.path.join(cfg.OUTPUT_DIR, "inference", dataset_name)
mkdir(output_folder)
output_folders[idx] = output_folder
data_loaders_val = make_data_loader(cfg, is_train=False, is_distributed=distributed)
for output_folder, data_loader_val in zip(output_folders, data_loaders_val):
_, coco_results, _ = inference(
model,
data_loader_val,
iou_types=iou_types,
box_only=cfg.MODEL.RPN_ONLY,
device=cfg.MODEL.DEVICE,
expected_results=cfg.TEST.EXPECTED_RESULTS,
expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL,
output_folder=output_folder,
maskiou_on=cfg.MODEL.MASKIOU_ON
)
synchronize()
#############################
# post-processing
#############################
paths_catalog = import_file(
"maskrcnn_benchmark.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = paths_catalog.DatasetCatalog
output_results, bbox_results = coco_results_to_contest(coco_results)
if cfg.TEST.VIZ:
gt_path = os.path.join(DatasetCatalog.DATA_DIR,
DatasetCatalog.DATASETS[cfg.DATASETS.TEST[0]][1])
with open(gt_path, 'r') as f:
gt_results = json.load(f)
# mask_nms
mmi_thresh = 0.3
conf_thresh = 0.5 # 0.4
for idx, (key, result) in enumerate(output_results.items()):
print("[ {} ]/[ {} ]".format(idx+1, len(output_results)))
output_results[key] = mask_nms(result, result[0]['size'], mmi_thres=mmi_thresh, conf_thres=conf_thresh)
# viz
if cfg.TEST.VIZ:
import cv2
if not os.path.exists(cfg.VIS_DIR):
os.mkdir(cfg.VIS_DIR)
img_dir = os.path.join(DatasetCatalog.DATA_DIR,
DatasetCatalog.DATASETS[cfg.DATASETS.TEST[0]][0])
img = cv2.imread(os.path.join(img_dir, key.replace('res', 'gt')+'.jpg'))
gt_img = img.copy()
for rect in bbox_results[key]:
if rect['confidence'] > conf_thresh:
pred_pts = rect['points']
img = cv2.polylines(img, [np.array(pred_pts).astype(np.int32)], True, (0, 255, 0), 3)
for poly in output_results[key]:
pred_pts = poly['points']
img = cv2.polylines(img, [np.array(pred_pts).astype(np.int32)], True, (0, 0, 255), 2)
for rect in bbox_results[key]:
if rect['confidence'] > conf_thresh:
pred_pts = rect['points']
img = cv2.putText(img, '{:.4f}'.format(rect['confidence']), (pred_pts[0][0], pred_pts[0][1]),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 0), 2, cv2.LINE_AA)
img = cv2.putText(img, '{:.4f}'.format(rect['confidence']), (pred_pts[0][0], pred_pts[0][1]),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255), 1, cv2.LINE_AA)
for gt_poly in gt_results[key.replace('res', 'gt')]['polygons']:
gt_pts = gt_poly['points']
if gt_poly['illegibility']:
gt_img = cv2.polylines(gt_img, [np.array(gt_pts).astype(np.int32)], True, (0, 255, 0), 2)
else:
gt_img = cv2.polylines(gt_img, [np.array(gt_pts).astype(np.int32)], True, (0, 0, 255), 2)
img_show = np.concatenate([img, gt_img], axis=1)
cv2.imwrite(os.path.join(cfg.VIS_DIR, key.replace('res', 'gt')+'.jpg'), img_show)
with tempfile.NamedTemporaryFile() as f:
file_path = f.name
if output_folder:
file_path = os.path.join(output_folder, "result.json")
bbox_file_path = os.path.join(output_folder, "bbox_result.json")
with open(file_path, "w") as json_f:
json.dump(output_results, json_f)
with open(bbox_file_path, "w") as json_ff:
json.dump(bbox_results, json_ff)
if __name__ == "__main__":
main()
| 6,842 |
src/class_2/guess_game.py
|
byteEpoch/python_course
| 1 |
2025844
|
# Ejemplo de uso del while, if/elif/else, random.randint y sys.exit.
# Se trata de un juego de adivinación de un número aleatorio entre el 1 y el 100.
import random
import sys
guess_number = random.randint(1, 100)
while True:
num = int(input('Dime un numbero del 1 al 100: '))
if num < guess_number:
print('El numero es mayor.')
elif num > guess_number:
print('El numero es menor.')
else:
print('Ganaste')
sys.exit()
| 468 |
helpers/sett/simulation/SimulationManager.py
|
EchoDao-BSC/badger-system
| 0 |
2026101
|
import time
import random
from brownie import accounts
from enum import Enum
from rich.console import Console
from scripts.systems.badger_system import BadgerSystem
from helpers.sett.SnapshotManager import SnapshotManager
from .provisioners import (
BaseProvisioner,
DiggRewardsProvisioner,
DiggLpMetaFarmProvisioner,
SushiDiggWbtcLpOptimizerProvisioner,
)
from .actors import (
UserActor,
SettKeeperActor,
StrategyKeeperActor,
ChainActor,
DiggActor,
)
console = Console()
# Provision num users for sim.
NUM_USERS = 10
class SimulationManagerState(Enum):
IDLE = 0
PROVISIONED = 1
RANDOMIZED = 2
RUNNING = 3
# SimulationManager is meant to be initialized per test and run once.
class SimulationManager:
def __init__(
self,
badger: BadgerSystem,
snap: SnapshotManager,
settId: str,
seed: int = 0, # Default seed is 0 or unset, will generate.
):
self.accounts = accounts[6:] # Use the 7th account onwards.
# User accounts (need to be provisioned before running sim).
self.users = []
self.badger = badger
self.snap = snap
self.sett = badger.getSett(settId)
self.strategy = badger.getStrategy(settId)
self.want = badger.getStrategyWant(settId)
self.settKeeper = accounts.at(self.sett.keeper(), force=True)
self.strategyKeeper = accounts.at(self.strategy.keeper(), force=True)
# Actors generate valid actions based on the actor type. For example,
# user actors need to have deposited first before they can withdraw
# (withdraw before deposit is an invalid action).
self.actors = [
SettKeeperActor(self, self.settKeeper),
StrategyKeeperActor(self, self.strategyKeeper),
DiggActor(self, self.badger.deployer),
ChainActor(),
]
# Ordered valid actions generated by actors.
self.actions = []
self.state = SimulationManagerState.IDLE
# Track seed so we can configure this value if we want to repro test failures.
self.seed = seed
if self.seed == 0:
self.seed = int(time.time())
console.print(f"initialized simulation manager with seed: {self.seed}")
random.seed(self.seed)
self.provisioner = self._initProvisioner(self.strategy.getName())
def provision(self) -> None:
if self.state != SimulationManagerState.IDLE:
raise Exception(f"invalid state: {self.state}")
accountsUsed = set([])
while len(self.users) < NUM_USERS:
idx = int(random.random()*len(self.accounts))
if idx in accountsUsed:
continue
self.users.append(self.accounts[idx])
accountsUsed.add(idx)
self.provisioner._distributeTokens(self.users)
self.provisioner._distributeWant(self.users)
self._provisionUserActors()
console.print(f"provisioned {len(self.users)} users {len(self.actors)} actors")
self.state = SimulationManagerState.PROVISIONED
def randomize(self, numActions: int) -> None:
if self.state != SimulationManagerState.PROVISIONED:
raise Exception(f"invalid state: {self.state}")
for i in range(0, numActions):
# Pick a random actor and generate an action.
idx = int(random.random() * len(self.actors))
self.actions.append(self.actors[idx].generateAction())
console.print(f"randomized {numActions} actions")
self.state = SimulationManagerState.RANDOMIZED
def run(self) -> None:
if self.state != SimulationManagerState.RANDOMIZED:
raise Exception(f"invalid state: {self.state}")
self.state = SimulationManagerState.RUNNING
console.print(f"running {len(self.actions)} actions")
for action in self.actions:
action.run()
def _initProvisioner(self, name) -> BaseProvisioner:
if name == "StrategyDiggRewards":
return DiggRewardsProvisioner(self)
if name == "StrategyDiggLpMetaFarm":
return DiggLpMetaFarmProvisioner(self)
if name == "StrategySushiDiggWbtcLpOptimizer":
return SushiDiggWbtcLpOptimizerProvisioner(self)
raise Exception(f"invalid strategy name (no provisioner): {name}")
def _provisionUserActors(self) -> None:
# Add all users as actors the sim.
for user in self.users:
self.actors.append(UserActor(self, user))
| 4,553 |
b_lambda_layer_common_test/unit/ssm/dummy_ssm_client.py
|
gMatas/B.LambdaLayerCommon
| 2 |
2025989
|
from itertools import cycle
class DummySsmClient:
def __init__(self):
self.__dummy_params = cycle([
{
'Name': 'TestParameter',
'Type': 'String',
'Value': 'StringValue1',
'Version': 10,
},
{
'Name': 'TestParameter',
'Type': 'StringList',
'Value': 'StringValue2',
'Version': 20,
},
{
'Name': 'TestParameter',
'Type': 'SecureString',
'Value': 'StringValue3',
'Version': 30,
},
])
self.get_parameters_function_calls = 0
def get_parameters(self, *args, **kwargs):
self.get_parameters_function_calls += 1
return {
'Parameters': [next(self.__dummy_params)],
'InvalidParameters': []
}
| 926 |
mtg_qe/data/internal_index_integration.py
|
s-i-dunn-wsu/cs483_proj
| 0 |
2025943
|
# <NAME>
# CS 483, Fall 2019
def get_internal_index():
"""
Returns the 'internal index' for the project.
The internal index is a pair of dicts that arrange
card objects in easy-to-use outside of whoosh ways.
The first dict, keyed by 'by_name', stores cards instances
with a unique name. The second dict, keyed by 'by_multiverseid',
stores all cards (all cards scraped) by their multiverseid.
Between the two, any time we have a result from whoosh and need
to navigate to another card or print, we should be covered.
"""
# there's a fun trick to 'hiding', rather obscuring, things in module namespace.
# namespaces, and instance attributes, ultimately boil down
# to a dict *somewhere*. With the local module, you can
# retrieve this dict with the globals() function.
# If you have a global variable named, say, `foo`, then
# the globals() dict will have a key in it "foo".
# While creating variables requires you to adhere to naming
# conventions, dict's can be keyed by any hashable (so all strings).
# This means you can obscure things in the module's namespace by
# using an illegal variable name (something like, say, "!!my_obscured_var")
# This makes it impossible to reference the value in a usual way
# while still being able to access it via that dict.
# I say 'obscure' instead of 'hide' because anyone who introspects
# the dict will undoubtedly see its presence, but in my experience
# it tends to fool IDEs and the like.
# To compound with this, modules are really just objects.
# we can store things in that object and trust that its still there
# so long as the module is alive, or at least not reloaded.
# this allows us to achieve singleton-like behavior, as a module
# will typically only be loaded once unless there's some shenanigans afoot.
# So to ensure that the large internal_index.json file is only parsed once
# we'll combine these two tools.
if globals().get('!!_idx_dict', None) == None:
# load the json file into globals()['!!_idx_dict']
from . import get_data_location
from ..model.card import Card
import os
import json
with open(os.path.join(get_data_location(), 'internal_index.json')) as fd:
deflated_cards = json.load(fd)
# inflate all cards in the data set.
inflated_cards = {'by_name': {}, 'by_multiverseid': {}}
for top_level_key in inflated_cards: # there are non-card fields in internal_index, so iter over inflated_cads
for key, value in deflated_cards[top_level_key].items():
inflated_cards[top_level_key][key] = Card().deserialize(value)
# push the inflated cards into the dict with non-card fields.
deflated_cards.update(inflated_cards)
# Set the global value:
globals()['!!_idx_dict'] = deflated_cards
return globals().get('!!_idx_dict')
| 2,969 |
tests/debugger_protocol/arg/_common.py
|
rozuur/ptvsd
| 0 |
2025325
|
from debugger_protocol.arg import ANY, FieldsNamespace, Field
FIELDS_BASIC = [
Field('name'),
Field.START_OPTIONAL,
Field('value'),
]
BASIC_FULL = {
'name': 'spam',
'value': 'eggs',
}
BASIC_MIN = {
'name': 'spam',
}
class Basic(FieldsNamespace):
FIELDS = FIELDS_BASIC
FIELDS_EXTENDED = [
Field('name', datatype=str, optional=False),
Field('valid', datatype=bool, optional=True),
Field('id', datatype=int, optional=False),
Field('value', datatype=ANY, optional=True),
Field('x', datatype=Basic, optional=True),
Field('y', datatype={int, str}, optional=True),
Field('z', datatype=[Basic], optional=True),
]
EXTENDED_FULL = {
'name': 'spam',
'valid': True,
'id': 10,
'value': None,
'x': BASIC_FULL,
'y': 11,
'z': [
BASIC_FULL,
BASIC_MIN,
],
}
EXTENDED_MIN = {
'name': 'spam',
'id': 10,
}
| 907 |
Aula 39 JSON Parte 4/Aula 39.py
|
JadilsonJR/Python
| 0 |
2026476
|
import json
#Carregando um arquivo externo
with open('E:/Documents/GitHub_Projetos/Python/Python/Aula 39 JSON Parte 4/jogador.json') as f:
jogador=json.load(f)
#Imprimindo Itens especifico de um Dictionary dentro de um Dictinary
for a in jogador["Aeronaves"]:
print (a["tipo"], " - " ,a["habilidade"])
| 313 |
bookshop/books/management/commands/build_data.py
|
paul-wolf/djaq
| 48 |
2024979
|
import sys
import traceback
from django.core.management.base import BaseCommand, CommandError
from books.data_factory import build_data
class Command(BaseCommand):
help = "Build data"
def add_arguments(self, parser):
parser.add_argument(
"--book-count", default=1000, action="store", dest="book_count", type=int
)
def handle(self, *args, **options):
build_data(book_count=options.get("book_count"))
| 454 |
data.py
|
ua-snap/daily-precip-dash
| 1 |
2025499
|
# import urllib.parse
import pandas as pd
import datetime
def fetch_data(community):
"""
Reads data from ACIS API for selected community.
"""
# Placeholder for early dev/explore work.
# This code works for fetching from the API.
# TODO we must cache the results for a community here,
# so that we're at least not hitting the API up twice
# for every page load / location change.
# https://beaker.readthedocs.io/en/latest/index.html or similar.
# query = urllib.parse.urlencode(
# {"sid": "26451", "sdate": "1950-01-01", "edate": "2020-04-20", "elems": "4,10", "output": "csv"}
# )
# api_url = "http://data.rcc-acis.org/StnData?"
# std = pd.read_csv(api_url + query, names=["date", "pcpt", "snow"], parse_dates=True, skiprows=1)
# std = std.loc[std.pcpt != "M"] # drop missing
# std = std.loc[std.snow != "M"] # drop missing
# std = std.replace("T", 0) # make T (Trace) = 0
# std.to_csv("data/anchorage.csv")
std = pd.read_csv("data/anchorage.csv")
std = std.loc[std.pcpt > 0]
std["date"] = pd.to_datetime(std["date"])
std["doy"] = std["date"].apply(lambda d: d.strftime("%j")).astype("int")
std["year"] = std["date"].apply(lambda d: d.strftime("%Y")).astype("int")
std["total"] = std["pcpt"] + std["snow"]
return std
| 1,328 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.