max_stars_repo_path
stringlengths 4
182
| max_stars_repo_name
stringlengths 6
116
| max_stars_count
int64 0
191k
| id
stringlengths 7
7
| content
stringlengths 100
10k
| size
int64 100
10k
|
---|---|---|---|---|---|
almoxarifado/migrations/0001_initial.py
|
rvmoura96/projeto-almoxarifado
| 1 |
2172191
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-29 14:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Equipamento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo', models.CharField(max_length=200)),
('modelo', models.CharField(max_length=170)),
('ativo_imobilizado', models.PositiveIntegerField()),
('serial_number', models.CharField(max_length=30)),
('status', models.CharField(max_length=12)),
('data_retirada', models.DateTimeField()),
('data_entrega', models.DateTimeField(default=django.utils.timezone.now)),
('localizacao', models.CharField(max_length=150)),
('observacoes', models.TextField()),
],
),
]
| 1,148 |
src/forest/common/proc_utils.py
|
ADVRHumanoids/forest
| 0 |
2170818
|
import subprocess
import sys
import getpass
import re
from forest.common import print_utils
call_process_verbose = False
def call_process(args, cwd='.', input=None, verbose=False, print_on_error=True, shell=False) -> bool:
# convert args to string
args = list(map(str, args))
if verbose or call_process_verbose:
if shell:
print(f'calling shell with command "{args}"')
else:
print('calling "{}"'.format(' '.join(args)))
executable = '/bin/bash' if shell else None
if call_process_verbose or verbose:
# run will print output to terminal
proc = subprocess.run(args=args,
cwd=cwd,
input=input,
shell=shell,
executable=executable)
print(f'returned {proc.returncode}')
return proc.returncode == 0
try:
# run with output/error redirection and exit status check
pr = subprocess.run(args=args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=cwd,
input=input,
shell=shell,
executable=executable,
check=True)
print_utils.log_file.write(pr.stdout.decode())
except subprocess.CalledProcessError as e:
# on error, print output (includes stderr)
print_utils.log_file.write(e.output.decode())
if print_on_error and not verbose:
print(e.output.decode(), file=sys.stderr)
return False
return True
def get_output(args, cwd='.', input=None, verbose=False, print_on_error=True, shell=False):
if verbose or call_process_verbose:
if shell:
print(f'calling shell with command "{args}"')
else:
print('calling "{}"'.format(' '.join(args)))
try:
# check_output will not print
out = subprocess.check_output(args=args, cwd=cwd, input=input, shell=shell)
ret = out.decode().strip()
if verbose or call_process_verbose:
print('calling "{}" returned "{}"'.format(' '.join(args), ret))
return ret
except subprocess.CalledProcessError as e:
# on error, print output and errors
if print_on_error:
print('stdout: ' + e.output.decode(), file=sys.stderr)
print('stderr: ' + e.stderr.decode(), file=sys.stderr)
return None
| 2,521 |
clastic/sinter.py
|
kurtbrose/clastic
| 1 |
2171419
|
# -*- coding: utf-8 -*-
import re
import types
import inspect
from inspect import ArgSpec
_VERBOSE = False
_INDENT = ' '
def getargspec(f):
# TODO: support partials
if not inspect.isfunction(f) and not inspect.ismethod(f) \
and hasattr(f, '__call__'):
if isinstance(getattr(f, '_argspec', None), ArgSpec):
return f._argspec
f = f.__call__ # callable objects
if isinstance(getattr(f, '_argspec', None), ArgSpec):
return f._argspec # we'll take your word for it; good luck, lil buddy.
ret = inspect.getargspec(f)
if not all([isinstance(a, basestring) for a in ret.args]):
raise TypeError('does not support anonymous tuple arguments '
'or any other strange args for that matter.')
if isinstance(f, types.MethodType):
ret = ret._replace(args=ret.args[1:]) # throw away "self"
return ret
def get_arg_names(f, only_required=False):
arg_names, _, _, defaults = getargspec(f)
if only_required and defaults:
arg_names = arg_names[:-len(defaults)]
return tuple(arg_names)
def inject(f, injectables):
arg_names, _, kw_name, defaults = getargspec(f)
defaults = dict(reversed(zip(reversed(arg_names),
reversed(defaults or []))))
all_kwargs = dict(defaults)
all_kwargs.update(injectables)
if kw_name:
return f(**all_kwargs)
kwargs = dict([(k, v) for k, v in all_kwargs.items() if k in arg_names])
return f(**kwargs)
def chain_argspec(func_list, provides):
provided_sofar = set(['next']) # 'next' is an extremely special case
optional_sofar = set()
required_sofar = set()
for f, p in zip(func_list, provides):
# middlewares can default the same parameter to different values;
# can't properly keep track of default values
arg_names, _, _, defaults = getargspec(f)
def_offs = -len(defaults) if defaults else None
undefaulted, defaulted = arg_names[:def_offs], arg_names[def_offs:]
optional_sofar.update(defaulted)
# keep track of defaults so that e.g. endpoint default param
# can pick up request injected/provided param
required_sofar |= set(undefaulted) - provided_sofar
provided_sofar.update(p)
return required_sofar, optional_sofar
def get_func_name(obj, with_module=False):
if not callable(obj):
raise TypeError('expected a callable object')
ret = []
if with_module and obj.__module__:
ret.append(obj.__module__)
if isinstance(obj, types.MethodType):
ret.append(obj.im_class.__name__)
obj = obj.im_func
func_name = getattr(obj, 'func_name', None)
if not func_name:
func_name = repr(obj)
ret.append(func_name)
return '.'.join(ret)
#funcs[0] = function to call
#params[0] = parameters to take
def build_chain_str(funcs, params, params_sofar=None, level=0,
func_aliaser=None, func_names=None):
if not funcs:
return '' # stopping case
if params_sofar is None:
params_sofar = set(['next'])
#if func_names is None:
# func_names = set()
params_sofar.update(params[0])
next_args = getargspec(funcs[0]).args
next_arg_dict = dict([(a, a) for a in next_args])
next_args = ', '.join(['%s=%s' % kv for kv in next_arg_dict.items()
if kv[0] in params_sofar])
outer_indent = _INDENT * level
inner_indent = outer_indent + _INDENT
outer_arg_str = ', '.join(params[0])
def_str = '%sdef next(%s):\n' % (outer_indent, outer_arg_str)
body_str = build_chain_str(funcs[1:], params[1:], params_sofar, level + 1)
#func_name = get_func_name(funcs[0])
#func_alias = get_next_func_alias(funcs[0])
return_str = '%sreturn funcs[%s](%s)\n' % (inner_indent, level, next_args)
return ''.join([def_str, body_str, return_str])
def compile_chain(funcs, params, verbose=_VERBOSE):
call_str = build_chain_str(funcs, params)
code = compile(call_str, '<string>', 'single')
if verbose:
print call_str
d = {'funcs': funcs}
exec code in d
return d['next']
def make_chain(funcs, provides, final_func, preprovided):
funcs = list(funcs)
provides = list(provides)
preprovided = set(preprovided)
reqs, opts = chain_argspec(funcs + [final_func],
provides + [()])
unresolved = tuple(reqs - preprovided)
args = reqs | (preprovided & opts)
chain = compile_chain(funcs + [final_func],
[args] + provides)
return chain, set(args), set(unresolved)
_camel2under_re = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
def camel2under(camel_string):
return _camel2under_re.sub(r'_\1', camel_string).lower()
def get_next_func_alias(func, func_names=None):
if func_names is None:
func_names = set()
func_name = get_func_name(func)
func_alias = camel2under(func_name.replace('.', '__'))
func_alias = func_alias.replace('middleware', 'mw')
while func_alias in func_names:
try:
head, _, tail = func_alias.rpartition('_')
cur_count = int(tail)
func_alias = '%s_%s' % (head, cur_count + 1)
except:
func_alias = func_alias + '_2'
return 'next_' + func_alias
| 5,357 |
10.py
|
flurincoretti/adventofcode
| 0 |
2171183
|
from math import atan2
def get_dims(asteroid_map):
width = asteroid_map.find('\n')
height = asteroid_map.count('\n') + 1
return width, height
def get_coords(asteroid_map):
w, h = get_dims(asteroid_map)
am = asteroid_map.split('\n')
return [(x,y) for x in range(w) for y in range(h) if am[y][x] == '#']
def get_angles(coords, c):
return [atan2(b[1]-c[1], b[0]-c[0]) for b in coords if b != c]
def part_one(asteroid_map):
coords = get_coords(asteroid_map)
counts = [len(set(get_angles(coords, c))) for c in coords]
return coords[counts.index(max(counts))], max(counts)
if __name__ == "__main__":
inputs = open('inputs/10.txt', 'r')
asteroid_map = inputs.read()
# Part 1
best, n = part_one(asteroid_map)
print("Best is {} with {} other asteroids detected.".format(best, n))
| 841 |
main.py
|
mtHuberty/python-stat-tracker
| 0 |
2171264
|
from bs4 import BeautifulSoup as soup
from selenium import webdriver
import requests as req
import yaml
import sys
import html
# Optionally, a user can choose a few options by altering values in this file. File is not required for app to run.
def loadConfig():
with open("config.yml", "r") as stream:
try:
cfg = yaml.safe_load(stream)
print("Config loaded! :\n\t" + str(cfg))
return cfg
except yaml.YAMLError as e:
print("Got a YAML error:\n\t" + str(e))
return ""
# Optionally, we can hit Blizzard's PvP API for some basic data.
# To toggle this function, comment it out in __main__
def getPvpApiData():
try:
return req.get('https://us.api.battle.net/wow/leaderboard/2v2?&locale=en_US&apikey=<KEY>')
except req.exceptions.RequestException as e:
print("Error:\n\t" + str(e) + "\n\tClosing application...")
sys.exit(1)
# Optionally, we can write the results of the API call from the function above to a json file.
def makeFile(data, fileName="whoops.json"):
try:
file = open(fileName, "w")
file.write(data)
file.close()
except IOError as e:
print("Problem creating pvp-api-data file...\n\t" + str(e) + "\n\tContinuing...")
# Here's where all the magic will happen!
if __name__ == "__main__":
# Open config file and initialize variables from it. Not important.
cfg = loadConfig()
pvpApiFile = cfg["filenames"]["pvp-api-file"] + ".json"
# Attempt an HTTP get request to the Blizzard PvP (en_US) API for top player names/classes/specs.
# TODO - Possibly deprecate this and use scraping only (without calling API)
dataBytes = getPvpApiData()
dataText = dataBytes.text
dataSoup = soup(dataText, "html.parser")
# Create a file with the data from the API call -- Just because we can. (And for practice with config file etc)
# makeFile(dataText, pvpApiFile)
# 2v2 Ladder Scraping.
# UPDATE: This doesn't work. The data we need is loaded after the DOM is finished loading via javascript probably,
# so we aren't getting the character names/classes etc.
twosBytes = req.get("https://worldofwarcraft.com/en-gb/game/pvp/leaderboards/2v2")
twosText = twosBytes.text
twosSoup = soup(html.unescape(twosText), "html.parser").encode("utf-8")
with open("twosSoup.html", "wb") as twosFile: # Note: MUST open file with "wb"(writebytes) as second arg to avoid encoding issues
twosFile.write(twosSoup)
twosFile.close()
# Detect OS of the platform the app is running on and use appropriate chromedriver
osys = sys.platform
if osys.startswith("win"):
print("Windows OS detected...using windows chromedriver")
driver = "chromedriver.exe"
elif osys.startswith("darwin"):
print("Mac OS detected...using Mac chromedriver")
driver = "mac_chromedriver"
else:
sys.exit("No driver in \"chromedrivers/\" for detected operating system: " + str(osys))
# This actually DOES work, and the first data we need to save and/or parse is stored in "twosInnerHTML" and then written into "twosInnerHtml.html"
browser = webdriver.Chrome("./chromedrivers/" + driver)
browser.get("https://worldofwarcraft.com/en-gb/game/pvp/leaderboards/2v2")
twosInnerHTML = browser.execute_script("return document.body.innerHTML").encode("utf-8")
with open("twosInnerHtml.html", "wb") as twosInnerHtmlFile:
twosInnerHtmlFile.write(twosInnerHTML)
twosInnerHtmlFile.close()
# TODO - Get BeautifulSoup to use the html file from above (or just use the variable twosInnerHTML), and start parsing it.
| 3,678 |
rep.py
|
xnaas/custom-bot-commands
| 0 |
2172339
|
from sopel import plugin, tools
from sopel.formatting import bold, color, colors, plain
@plugin.output_prefix('[REP] ')
@plugin.rate(user=60)
@plugin.require_account('You must have a registered account to participate.', True)
@plugin.require_chanmsg
@plugin.rule(r'^(?P<target>[^\s,*?.!@:<>\'\";#/]{1,32})(?P<PoM>[+-]{2})$')
def rep(bot, trigger):
target = plain(trigger.group('target') or '')
if not target:
return plugin.NOLIMIT
target = tools.Identifier(target)
if target not in bot.channels[trigger.sender].users:
return plugin.NOLIMIT
if not bot.channels[trigger.sender].users[target].account:
bot.say(f"{target} is not registered and can't earn reputation.")
return plugin.NOLIMIT
current_rep = bot.db.get_nick_value(target, 'rep', 0)
PoM = trigger.group('PoM') # Plus or Minus
if PoM == '++':
if target == trigger.nick:
bot.reply('Trying to rep++ yourself is beyond pathetic...')
bot.action('looks away in disgust.')
return
new_rep = current_rep + 1
elif PoM == '--':
new_rep = current_rep - 1
else:
return plugin.NOLIMIT
bot.db.set_nick_value(target, 'rep', new_rep)
new_rep = bold(f'{new_rep:,}')
bot.say(f'{target}: {current_rep:,} → {new_rep}')
@plugin.command('rep')
@plugin.output_prefix('[REP] ')
@plugin.rate(user=3)
@plugin.require_account('You must have a registered account to participate.', True)
@plugin.require_chanmsg
def rep_check(bot, trigger):
target = plain(trigger.group(3) or trigger.nick)
target = tools.Identifier(target)
if target not in bot.channels[trigger.sender].users:
return bot.say(f'{target} is not here.')
if not bot.channels[trigger.sender].users[target].account:
bot.say(f"{target} is not registered and can't earn reputation.")
return plugin.NOLIMIT
rep = bot.db.get_nick_value(target, 'rep', 0)
if rep > 0:
rep = bold(color(f'{rep:,}', colors.GREEN))
elif rep < 0:
rep = bold(color(f'{rep:,}', colors.RED))
else:
rep = color(f'{rep:,}', colors.GREY)
bot.say(f'{target}: {rep}')
@plugin.command('reptop', 'replow')
@plugin.output_prefix('[REP] ')
@plugin.rate(user=5)
@plugin.require_account('You must have a registered account to participate.', True)
def rep_list(bot, trigger):
cmd = trigger.group(1).lower()
query = "SELECT canonical, key, value FROM nick_values a join nicknames "
query += "b on a.nick_id = b.nick_id WHERE key='rep' "
if cmd == 'reptop':
query += "ORDER BY cast(value as int) DESC LIMIT 5;"
elif cmd == 'replow':
query += "ORDER BY cast(value as int) ASC LIMIT 5;"
else:
return plugin.NOLIMIT
lb = bot.db.execute(query).fetchall()
if not lb:
return bot.say('Nobody has any reputation yet!')
rep_list = []
for index, person in enumerate(lb):
name = '\u200B'.join(person[0])
rep = int(person[2])
if cmd == 'reptop' and rep < 0:
pass
elif cmd == 'replow' and rep >= 0:
pass
else:
if rep > 0:
rep = bold(color(f'{rep:,}', colors.GREEN))
elif rep < 0:
rep = bold(color(f'{rep:,}', colors.RED))
else:
rep = color(f'{rep:,}', colors.GREY)
rep_list.append(f'{name}: {rep}')
if cmd == 'reptop' and not rep_list:
return bot.say('No one with positive reputation...')
if cmd == 'replow' and not rep_list:
return bot.say('No one with negative reputation!')
bot.say(', '.join(rep_list))
@plugin.command('repset')
@plugin.output_prefix('[REP ADMIN] ')
@plugin.require_admin
@plugin.require_chanmsg
def rep_set(bot, trigger):
target = plain(trigger.group(3) or '')
if not target:
return bot.say('I need a target...')
target = tools.Identifier(target)
if target not in bot.channels[trigger.sender].users:
return bot.say(f'{target} is not here.')
if not bot.channels[trigger.sender].users[target].account:
bot.say(f"{target} is not registered and can't earn reputation.")
return plugin.NOLIMIT
try:
new_rep = int(plain(trigger.group(4)))
except ValueError:
return bot.say(f'{new_rep} is not an integer.')
current_rep = bot.db.get_nick_value(target, 'rep', 0)
bot.db.set_nick_value(target, 'rep', new_rep)
new_rep = bold(f'{new_rep:,}')
bot.say(f'{target}: {current_rep:,} → {new_rep}')
@plugin.command('repwipe')
@plugin.output_prefix('[REP ADMIN] ')
@plugin.require_admin
def rep_wipe(bot, trigger):
target = plain(trigger.group(3) or '')
if not target:
return bot.say('I need a target...')
target = tools.Identifier(target)
bot.db.delete_nick_value(target, 'rep')
bot.say(f'reputation data wiped for: {bold(target)}')
| 5,050 |
mysit/templatetags/mysit_tag.py
|
GhasemMatoo/Mysite_Restaurants
| 0 |
2171785
|
from django import template
from blog.models import post
##------------------------------
register = template.Library()
@register.inclusion_tag('mysit/sit/menu-box-post.html')
def Menu_Post():
postes = post.objects.filter(status=1)
return {'postes':postes}
| 268 |
patchalerts/games/heroesofthestorm.py
|
silentdot/PatchAlerts
| 8 |
2170948
|
from util import loader
from wrappers.update import Update
from games.base_class import Game
class HOTS(Game):
def __init__(self):
super().__init__('Heroes of the Storm', homepage='https://heroesofthestorm.com')
def scan(self):
encoded = loader.json('https://news.blizzard.com/en-us/blog/list?pageNum=1&pageSize=30&community=heroes-of-the-storm')
soup = loader.direct_soup(encoded['html'])
elems = soup.find_all(attrs={'class': 'ArticleListItem'})
for elem in elems:
a = elem.find('a')
dsc = elem.find(attrs={"class": 'ArticleListItem-description'})
title = elem.find(attrs={'class': 'ArticleListItem-title'})
_url = 'https://news.blizzard.com/' + a['href']
_title = title.text
_desc = dsc.text
yield Update(game=self, update_name=_title, post_url=_url, desc=_desc, color="#632004")
if __name__ == "__main__":
lol = HOTS()
for u in lol.scan():
print(u)
| 895 |
tests/unit/baskerville_tests/utils_tests/test_file_manager.py
|
equalitie/baskerville
| 25 |
2171945
|
# Copyright (c) 2020, eQualit.ie inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import tempfile
import os
# from pyspark import SparkConf
# from pyspark.sql import SparkSession
from baskerville.util.file_manager import FileManager
class TestFileManager(unittest.TestCase):
def test_json(self):
temp_dir = tempfile.gettempdir()
fm = FileManager(path=temp_dir)
some_dict = {'A': 777}
file_name = os.path.join(temp_dir, 'file_manager_test.pickle')
fm.save_to_file(value=some_dict, file_name=file_name, format='json')
assert(os.path.exists(os.path.join(temp_dir, file_name)))
res = fm.load_from_file(file_name)
self.assertDictEqual(res, some_dict)
def test_pickle(self):
temp_dir = tempfile.gettempdir()
fm = FileManager(path=temp_dir)
some_dict = {'A': 777}
file_name = os.path.join(temp_dir, 'file_manager_test.pickle')
fm.save_to_file(value=some_dict, file_name=file_name, format='pickle')
res = fm.load_from_file(file_name, format='pickle')
self.assertDictEqual(res, some_dict)
# HDFS tests are commented our since it should no be executed with all the unit tests
# def test_json_hdfs(self):
# temp_dir = 'hdfs://hadoop-01:8020/anton2'
#
# conf = SparkConf()
# conf.set('spark.hadoop.dfs.client.use.datanode.hostname', 'true')
#
# spark = SparkSession \
# .builder.config(conf=conf) \
# .appName("aaa") \
# .getOrCreate()
#
# fm = FileManager(path=temp_dir, spark_session=spark)
#
# some_dict = {'A': 777}
# file_name = os.path.join(temp_dir, 'file_manager_test7.pickle')
#
# fm.save_to_file(value=some_dict, file_name=file_name, format='json')
# res = fm.load_from_file(file_name, format='json')
# self.assertDictEqual(res, some_dict)
#
# def test_pickle_hdfs(self):
# temp_dir = 'hdfs://hadoop-01:8020/anton2'
#
# conf = SparkConf()
# conf.set('spark.hadoop.dfs.client.use.datanode.hostname', 'true')
#
# spark = SparkSession \
# .builder.config(conf=conf) \
# .appName("aaa") \
# .getOrCreate()
#
# fm = FileManager(path=temp_dir, spark_session=spark)
# some_dict = {'A': 777}
# file_name = os.path.join(temp_dir, 'file_manager_test7.pickle')
#
# fm.save_to_file(value=some_dict, file_name=file_name, format='pickle')
# res = fm.load_from_file(file_name, format='pickle')
# self.assertDictEqual(res, some_dict)
| 2,769 |
utils/crypt.py
|
imsilence/shadow-hostmonitor
| 1 |
2171606
|
#encoding: utf-8
import os
import hashlib
def get_file_md5(path):
if not os.path.exists(path):
return None
md5 = hashlib.md5()
fhandler = open(path, 'rb')
while True:
cxt = fhandler.read(1024 * 5)
if b'' == cxt:
break
md5.update(cxt)
fhandler.close()
return md5.hexdigest()
def get_str_md5(cxt):
md5 = hashlib.md5()
md5.update(cxt.encode('utf-8'))
return md5.hexdigest()
if __name__ == '__main__':
print(get_str_md5('abc'))
| 543 |
app/routes.py
|
saxenakanishk/InterviewSchedulerApp
| 1 |
2169627
|
from flask import render_template, flash, redirect, request, url_for
from werkzeug.urls import url_parse
from app import app
from app.forms import *
from flask_login import current_user, login_user, logout_user, login_required
from app.models import *
from app import db
import sqlite3
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/book', methods=['GET', 'POST'])
@login_required
def book():
form = BookinterviewForm()
if form.validate_on_submit():
# check time collision
interviewcollisions = Interview.query.filter_by(
date=datetime.combine(form.date.data, datetime.min.time())).filter_by(studentEmail=form.students.data).all()
print(len(interviewcollisions))
for interviewcollision in interviewcollisions:
# [a, b] overlaps with [x, y] iff b > x and a < y
if (form.startTime.data < interviewcollision.endTime and (
form.startTime.data + form.duration.data) > interviewcollision.startTime):
flash(
f'The time from {interviewcollision.startTime} to {interviewcollision.endTime} is already booked by {Student.query.filter_by(email=interviewcollision.studentEmail).first().email}.')
return redirect(url_for('book'))
interviewcollisions2 = Interview.query.filter_by(
date=datetime.combine(form.date.data, datetime.min.time())).filter_by(
bookerEmail=form.interviewee.data).all()
print(len(interviewcollisions2))
for interviewcollision in interviewcollisions2:
# [a, b] overlaps with [x, y] iff b > x and a < y
if (form.startTime.data < interviewcollision.endTime and (
form.startTime.data + form.duration.data) > interviewcollision.startTime):
flash(
f'The time from {interviewcollision.startTime} to {interviewcollision.endTime} is already booked by {User.query.filter_by(email=interviewcollision.bookerEmail).first().email}.')
return redirect(url_for('book'))
endTime = form.startTime.data + form.duration.data
interview = Interview(title=form.title.data, date=form.date.data, startTime=form.startTime.data, endTime=endTime, duration=form.duration.data, studentEmail=form.students.data, bookerEmail=form.interviewee.data)
db.session.add(interview)
db.session.commit()
flash('Interview Scheduling success!')
return redirect(url_for('index'))
return render_template('book.html', title='Schedule Interviews', form=form)
@app.route('/interviewbooker')
def interviewbooker():
interviews = Interview.query.order_by(Interview.date).all()
interviewreturns = []
for interview in interviews:
interviewreturn = dict()
interviewreturn['title'] = interview.title
interviewreturn['studentEmail'] = Student.query.filter_by(email=interview.studentEmail).first().email
interviewreturn['bookerEmail'] = User.query.filter_by(email=interview.bookerEmail).first().email
interviewreturn['date'] = interview.date.date()
interviewreturn['time'] = f'{interview.startTime} to {interview.endTime}'
interviewreturns.append(interviewreturn)
return render_template('interviewbooker.html', interviews=interviewreturns)
@app.route('/edit', methods=['GET', 'POST'])
@login_required
def edit():
form = EditinterviewForm()
if form.validate_on_submit():
# check time collision
interviewcollisions = Interview.query.filter_by(
date=datetime.combine(form.date.data, datetime.min.time())).filter_by(studentEmail=form.students.data).all()
print(len(interviewcollisions))
for interviewcollision in interviewcollisions:
# [a, b] overlaps with [x, y] iff b > x and a < y
if (form.startTime.data < interviewcollision.endTime and (
form.startTime.data + form.duration.data) > interviewcollision.startTime):
flash(
f'The time from {interviewcollision.startTime} to {interviewcollision.endTime} is already booked by {Student.query.filter_by(email=interviewcollision.studentEmail).first().email}.')
return redirect(url_for('edit'))
interviewcollisions2 = Interview.query.filter_by(
date=datetime.combine(form.date.data, datetime.min.time())).filter_by(bookerEmail=form.interviewee.data).all()
print(len(interviewcollisions2))
for interviewcollision in interviewcollisions2:
# [a, b] overlaps with [x, y] iff b > x and a < y
if (form.startTime.data < interviewcollision.endTime and (
form.startTime.data + form.duration.data) > interviewcollision.startTime):
flash(
f'The time from {interviewcollision.startTime} to {interviewcollision.endTime} is already booked by {User.query.filter_by(email=interviewcollision.bookerEmail).first().email}.')
return redirect(url_for('edit'))
endTime = form.startTime.data + form.duration.data
interview=Interview.query.filter_by(title=form.title.data).first()
#interview.title=form.title.data
interview.date=form.date.data
interview.startTime=form.startTime.data
interview.endTime=endTime
interview.duration=form.duration.data
interview.studentEmail=form.students.data
interview.bookerEmail=form.interviewee.data
db.session.commit()
flash('Interview Update success!')
return redirect(url_for('index'))
return render_template('edit.html', title='Update Interviews', form=form)
| 6,588 |
tests/interfaces/test_data_exchange.py
|
Mischback/django-calingen
| 0 |
2171602
|
# SPDX-License-Identifier: MIT
"""Provide tests for calingen.interfaces.data_exchange."""
# Python imports
import datetime
from unittest import mock, skip # noqa: F401
# Django imports
from django.test import override_settings, tag # noqa: F401
# external imports
from dateutil import parser
# app imports
from calingen.interfaces.data_exchange import CalendarEntry, CalendarEntryList
# local imports
from ..util.testcases import CalingenTestCase
@tag("interfaces", "data", "calendarentry")
class CalendarEntryTest(CalingenTestCase):
"""Provide tests for the CalendarEntry class."""
def test_constructor_accepts_datetime(self):
# Arrange (set up test environment)
test_datetime = datetime.datetime(2021, 12, 2, 14, 48)
# Act (actually perform what has to be done)
entry = CalendarEntry("foo", "bar", test_datetime, ("foo", "bar"))
# Assert (verify the results)
self.assertEqual(entry.timestamp, test_datetime, ("foo", "bar"))
def test_constructor_accepts_date(self):
# Arrange (set up test environment)
test_date = datetime.date(2021, 12, 2)
# Act (actually perform what has to be done)
entry = CalendarEntry("foo", "bar", test_date, ("foo", "bar"))
# Assert (verify the results)
self.assertEqual(entry.timestamp.date(), test_date)
self.assertEqual(entry.timestamp.time(), datetime.time.min)
def test_constructor_accepts_datestring(self):
# Arrange (set up test environment)
test_datetime = datetime.datetime(2021, 12, 2, 14, 48)
test_datetime_str = test_datetime.__str__()
# Act (actually perform what has to be done)
entry = CalendarEntry("foo", "bar", test_datetime_str, ("foo", "bar"))
# Assert (verify the results)
self.assertEqual(entry.timestamp, test_datetime)
def test_constructor_rejects_non_valid_timestamp(self):
# Arrange (set up test environment)
test_datetime_str = "foobar"
# Act (actually perform what has to be done)
# Assert (verify the results)
with self.assertRaises(parser._parser.ParserError):
entry = CalendarEntry( # noqa: F841
"foo", "bar", test_datetime_str, ("foo", "bar")
)
def test_constructor_rejects_non_valid_source(self):
# Arrange (set up test environment)
test_date = datetime.date(2021, 12, 2)
# Act (actually perform what has to be done)
# Assert (verify the results)
with self.assertRaises(CalendarEntry.CalendarEntryException):
entry = CalendarEntry("foo", "bar", test_date, "BREAK") # noqa: F841
@mock.patch("calingen.interfaces.data_exchange.EventCategory")
def test_constructor_accepts_event_category(self, mock_event_category):
# Arrange (set up test environment)
test_datetime = datetime.datetime(2021, 12, 12, 8, 15)
test_category = "TEST_CAT"
test_event_cat = mock.MagicMock()
mock_event_category.__getitem__.return_value = test_event_cat
mock_event_category.values = [test_category]
# Act (actually perform what has to be done)
entry = CalendarEntry("foo", test_category, test_datetime, ("foo", "bar"))
# Assert (verify the results)
self.assertEqual(entry.timestamp, test_datetime)
self.assertEqual(entry.category, test_event_cat)
def test_eq_different_classes(self):
# Arrange (set up test environment)
class TestClass:
pass
test_class = TestClass()
entry = CalendarEntry(
"foo", "bar", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
# Act (actually perform what has to be done)
return_value = entry.__eq__(test_class)
# Assert (verify the results)
self.assertEqual(return_value, NotImplemented)
self.assertNotEqual(entry, test_class)
self.assertNotEqual(test_class, entry)
def test_lt_different_classes(self):
# Arrange (set up test environment)
class TestClass:
pass
test_class = TestClass()
entry = CalendarEntry(
"foo", "bar", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
# Act (actually perform what has to be done)
return_value = entry.__lt__(test_class)
# Assert (verify the results)
self.assertEqual(return_value, NotImplemented)
with self.assertRaises(TypeError):
self.assertLess(entry, test_class)
def test_lt_timestamp(self):
# Arrange (set up test environment)
entry_1 = CalendarEntry(
"aaa", "bbb", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
entry_2 = CalendarEntry(
"aaa", "bbb", datetime.datetime(2020, 12, 2, 15, 4), ("foo", "bar")
)
# Act (actually perform what has to be done)
# Assert (verify the results)
self.assertFalse(entry_1 < entry_2)
self.assertLess(entry_2, entry_1)
self.assertLessEqual(entry_2, entry_1)
def test_lt_category(self):
# Arrange (set up test environment)
entry_1 = CalendarEntry(
"aaa", "zzz", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
entry_2 = CalendarEntry(
"aaa", "bbb", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
# Act (actually perform what has to be done)
# Assert (verify the results)
self.assertFalse(entry_1 < entry_2)
self.assertLess(entry_2, entry_1)
self.assertLessEqual(entry_2, entry_1)
def test_lt_title(self):
# Arrange (set up test environment)
entry_1 = CalendarEntry(
"zzz", "bbb", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
entry_2 = CalendarEntry(
"aaa", "bbb", datetime.datetime(2021, 12, 2, 15, 4), ("foo", "bar")
)
# Act (actually perform what has to be done)
# Assert (verify the results)
self.assertFalse(entry_1 < entry_2)
self.assertLess(entry_2, entry_1)
self.assertLessEqual(entry_2, entry_1)
@tag("interfaces", "data", "calendarentrylist")
class CalendarEntryListTest(CalingenTestCase):
"""Provide tests for the CalendarEntryList class."""
def test_constructor_initializes_set(self):
"""Constructor initializes _entries."""
# Arrange (set up test environment)
cal_entry_list = CalendarEntryList()
# Act (actually perform what has to be done)
# Assert (verify the results)
self.assertIsInstance(cal_entry_list._entries, set)
def test_add_adds_provided_entry(self):
"""add() appends provided entry to _entries."""
# Arrange (set up test environment)
cal_entry_list = CalendarEntryList()
test_entry = "foo"
# Act (actually perform what has to be done)
cal_entry_list.add(test_entry)
# Assert (verify the results)
self.assertIn(test_entry, cal_entry_list._entries)
def test_add_rejects_none(self):
"""add() rejects if parameter is None."""
# Arrange (set up test environment)
cal_entry_list = CalendarEntryList()
# Act (actually perform what has to be done)
# Assert (verify the results)
with self.assertRaises(CalendarEntryList.CalendarEntryListException):
cal_entry_list.add(None)
def test_merge_merges_distinct_sets(self):
"""merge() correctly merges two distinct CalendarEntryList instances."""
# Arrange (set up test environment)
cal_entry_one = CalendarEntry(
title="foo",
category="foo",
timestamp=datetime.datetime.now(),
source=("foo", "bar"),
)
cal_entry_two = CalendarEntry(
title="bar",
category="bar",
timestamp=datetime.datetime.now(),
source=("foo", "bar"),
)
cal_entry_list_target = CalendarEntryList()
cal_entry_list_target.add(cal_entry_one)
cal_entry_list_second = CalendarEntryList()
cal_entry_list_second.add(cal_entry_two)
# Act (actually perform what has to be done)
cal_entry_list_target.merge(cal_entry_list_second)
# Assert (verify the results)
self.assertIn(cal_entry_one, cal_entry_list_target._entries)
self.assertIn(cal_entry_two, cal_entry_list_target._entries)
def test_merge_merges_non_distinct_sets(self):
"""merge() correctly merges two non distinct CalendarEntryList instances."""
# Arrange (set up test environment)
cal_entry_one = CalendarEntry(
title="foo",
category="foo",
timestamp=datetime.datetime.now(),
source=("foo", "bar"),
)
cal_entry_two = CalendarEntry(
title="bar",
category="bar",
timestamp=datetime.datetime.now(),
source=("foo", "bar"),
)
cal_entry_list_target = CalendarEntryList()
cal_entry_list_target.add(cal_entry_one)
cal_entry_list_second = CalendarEntryList()
cal_entry_list_second.add(cal_entry_two)
cal_entry_list_second.add(cal_entry_one)
# Act (actually perform what has to be done)
cal_entry_list_target.merge(cal_entry_list_second)
# Assert (verify the results)
self.assertIn(cal_entry_one, cal_entry_list_target._entries)
self.assertIn(cal_entry_two, cal_entry_list_target._entries)
self.assertEqual(len(cal_entry_list_target._entries), 2)
| 9,681 |
rubbishcarcode/main.py
|
point142/Raspberry-demo
| 0 |
2170196
|
import wakeup
from lamp import Lamp
from steer import Steer
import iFlytekVoiceTrans
import baiduvoice
import sizhiAI
import userrecord
import classify
import urllib3
import time
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)# 忽略百度api连接时的报错信息。
led1 = Lamp(26) # LED灯1,录音时亮
led2 = Lamp(19) # LED灯2,AI处理时亮
def ai_work():
wakeup.detector.terminate() # 结束监控热词
wakeup.snowboydecoder.play_audio_file() # ding一声
led1.set_on() # 开灯
# 1.录 用户语音
state =userrecord.record()
if state == False: # 唤醒后太久没说话
led1.set_off()
return
led1.set_off()
led2.set_on()
# 2.用户语音 转 文字
ai_text = "你说什么"
user_text = iFlytekVoiceTrans.get_usertext() # 获得语音的文字结果
#user_text = duvoice.speech_to_text()
# 3.获得 AI文字
if user_text=='': # 录音结果有误
print("AI说: " + ai_text)
baiduvoice.text_to_speech(ai_text)
else: # 结果无误
result = classify.get_type(user_text) # 从话中提取垃圾种类
if result==False: # 没有说任何垃圾
ai_text = aibrain.ai_think(user_text) # 思知机器人回答
# 4.AI文字 转 语音
baiduvoice.text_to_speech(ai_text)
else:
if result[0] != None:
print(result[0] + '是' + result[1])
ai_text = result[0] + '是' + result[1]+','+result[1]+'箱已打开'# 回答xx是xx垃圾
else:
print(result[1] + "箱已打开")
ai_text = result[1]+'箱已打开'
# 4.AI文字 转 语音
baiduvoice.text_to_speech(ai_text)
lid = Steer(result[1]) # 垃圾种类对应的盖子
lid.open()
time.sleep(5) # 打开5秒
lid.close()
led2.set_off() # 关灯
if __name__=='__main__':
while True:
print('Sleeping... ')
# 实时监控
wakeup.detector.start(detected_callback=ai_work, # 自定义回调函数
interrupt_check=wakeup.interrupt_callback,
sleep_time=0.03)
#wakeuptool.detector.terminate() # 结束监控热词
print('Sleep again... ')
| 2,125 |
__scraping__/fudbal91.com - selenium/main.py
|
furas/python-code
| 2 |
2172249
|
# author: Bartlomiej "furas" Burek (https://blog.furas.pl)
# date: 2022.03.30
# [web scraping - Xpath select with two conditions - Stack Overflow](https://stackoverflow.com/questions/71670362/xpath-select-with-two-conditions)
from selenium import webdriver
from selenium.webdriver.common.by import By
#from webdriver_manager.chrome import ChromeDriverManager
from webdriver_manager.firefox import GeckoDriverManager
import time
url = 'https://www.fudbal91.com/previews/2022-03-30'
#driver = webdriver.Chrome(executable_path=ChromeDriverManager().install())
driver = webdriver.Firefox(executable_path=GeckoDriverManager().install())
driver.get(url)
time.sleep(2)
all_items = driver.find_elements(By.XPATH, '//a[not(contains(@class,"inprogress"))]//span[contains(@itemprop,"name")]')
print('len(all_items):', len(all_items))
for item in all_items:
print(item.text)
| 876 |
Python/k-empty-slots.py
|
Ravan339/LeetCode
| 4 |
2171500
|
# https://leetcode.com/problems/k-empty-slots/
class Solution(object):
def kEmptySlots(self, flowers, k):
"""
:type flowers: List[int]
:type k: int
:rtype: int
"""
days = [0] * len(flowers)
for i in range(len(flowers)):
days[flowers[i] - 1] = i + 1
i, l, r = 0, 0, k + 1
res = float('inf')
while r < len(days):
di, dl, dr = days[i], days[l], days[r]
if days[i] < days[l] or days[i] <= days[r]:
if i == r: res = min(res, max(days[l], days[r]))
l = i
r = i + k + 1
i += 1
return res if res != float('inf') else -1
| 704 |
arena_navigation/arena_local_planner/learning_based/arena_local_planner_drl/rl_agent/utils/supersuit_utils.py
|
ignc-research/arena-marl
| 7 |
2172227
|
from functools import partial
from typing import Callable
import numpy as np
import rospy
from stable_baselines3.common.vec_env import VecNormalize
from supersuit.vector import ConcatVecEnv, MarkovVectorEnv
from supersuit.vector.sb3_vector_wrapper import SB3VecEnvWrapper
class MarkovVectorEnv_patched(MarkovVectorEnv):
"""Patched environment wrapper which creates the correct API for vector environments. Dones for dead agents are returned as True instead as False."""
def step(self, actions):
agent_set = set(self.par_env.agents)
act_dict = {
agent: actions[i]
for i, agent in enumerate(self.par_env.possible_agents)
if agent in agent_set
}
observations, rewards, dones, infos = self.par_env.step(act_dict)
# adds last observation to info where user can get it
if all(dones.values()):
for agent, obs in observations.items():
infos[agent]["terminal_observation"] = obs
rews = np.array(
[rewards.get(agent, 0) for agent in self.par_env.possible_agents],
dtype=np.float32,
)
# we changed the default value to true instead of false
dns = np.array(
[dones.get(agent, True) for agent in self.par_env.possible_agents],
dtype=np.uint8,
)
infs = [infos.get(agent, {}) for agent in self.par_env.possible_agents]
if all(dones.values()):
observations = self.reset()
else:
observations = self.concat_obs(observations)
assert (
self.black_death
or self.par_env.agents == self.par_env.possible_agents
), "MarkovVectorEnv does not support environments with varying numbers of active agents unless black_death is set to True"
return observations, rews, dns, infs
def vec_env_create(
env_fn: Callable,
agent_list_fn: Callable,
num_robots: int,
num_cpus: int,
num_vec_envs: int,
PATHS: dict,
) -> SB3VecEnvWrapper:
"""Function which vectorizes a given environment function in multiple parallel environments.
Args:
env_fn (Callable): Function that initializes an environment with wrappers
agent_list_fn (Callable): Object containing the program arguments
num_robots (int): Number of robots in the environment
num_cpus (int): Maximal number of CPUs to use (Currently only process is used anyhow)
num_vec_envs (int): Number of parallel environments to spawn
PATHS (dict): Dictionary which holds hyperparameters for the experiment
Returns:
SB3VecEnvWrapper: Vectorized environments following the SB3 VecEnv API. Each each robot in an environment \
poses as an environment in the vector.
"""
env_list_fns = [
partial(
env_fn,
ns=f"sim_{i}",
num_agents=num_robots,
agent_list_fn=agent_list_fn,
PATHS=PATHS,
)
for i in range(1, num_vec_envs + 1)
]
env = env_list_fns[0]()
action_space = env.observation_space
observation_space = env.observation_space
metadata = env.metadata
num_cpus = min(num_cpus, num_vec_envs)
rospy.init_node("train_env", disable_signals=False, anonymous=True)
vec_env = ConcatVecEnv(env_list_fns, observation_space, action_space)
return SB3VecEnvWrapper(vec_env)
| 3,409 |
web_scraping/ec2files/ec2file57.py
|
nikibhatt/Groa
| 1 |
2171980
|
from scraper import *
s = Scraper(start=101574, end=103355, max_iter=30, scraper_instance=57)
s.scrape_letterboxd()
| 117 |
setup.py
|
drygdryg/antichat_python
| 3 |
2170649
|
# -*- coding: utf-8 -*-
from io import open
from setuptools import setup
"""
:authors: drygdryg
:license: MIT
:copyright: (c) 2020 drygdryg
"""
version = '0.0.7'
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='antichat',
version=version,
description='Simple forum.antichat.ru wrapper',
long_description=long_description,
long_description_content_type='text/markdown',
license='MIT',
author='drygdryg',
author_email='<EMAIL>',
url='https://github.com/drygdryg/antichat_python',
download_url='https://github.com/drygdryg/antichat_python/archive/v{}.zip'.format(version),
keywords='wrapper scraper antichat',
packages=['antichat'],
python_requires='>=3.6',
install_requires=[
'requests',
'beautifulsoup4',
'lxml'
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers'
]
)
| 1,244 |
finance/models/__init__.py
|
vsanasc/sbrain
| 1 |
2170851
|
from .choices import (
TYPE_CREDIT_CARD_CHOICES,
METHOD_TYPE_CHOICES
)
from .credit_card import CreditCard, CreditCardBill
from .expense import Expense
from .expense_type import ExpenseCategory, ExpenseType
from .income import Income
from .income_type import IncomeType
from .installment import Installment
| 315 |
customenv/__init__.py
|
geek-guild/async-rl
| 0 |
2172330
|
from gym.envs.registration import register
register(
id="MyEnv-v001",
entry_point="customenv.myenv:MyEnv"
)
| 117 |
takeaway/script/order.py
|
mnaile/startapp
| 7 |
2170194
|
import click
import math
import random
import time
from takeaway.controllers.commands import Operation
from takeaway.core.utils.version import VERSION
@click.group()
def cli():
"""Takeaway group Cli"""
@cli.command()
def clear():
"""This will clear the entire screen """
click.clear()
@cli.command(help='😄 simple boilerplate ready for development')
@click.option('--app','-a',type=click.Choice(['fastapi', 'flask'], case_sensitive=True), prompt='Choose one of the application',show_default=True,default='fastapi',required=True, help='the app name')
@click.option('--db_driver','-dd',type=click.Choice(['mysql', 'postgresql'], case_sensitive=True), prompt='Choose one of the database drivers',show_default=True,default='postgresql',required=True, help='the db driver name')
@click.option('--name',"-n",metavar='the name of yor app',prompt='Name for your app',show_default=True,default='myproject',required=True, help='the app name')
@click.option('--db_name',"-d",metavar='the name of yor db',prompt='Name for your db',show_default=True,default='testdb',required=True, help='the db name')
@click.option('--testdb_name', "-td", metavar='the name of yor testdb', prompt='Name for your testdb', show_default=True, default='unittestdb', required=True, help='the testdb name')
@click.version_option(VERSION)
def cli(app,name,db_driver,db_name,testdb_name, count=4000):
click.echo(click.style('Starting app %s at directory %s!' % (app,name), fg='green'))
click.progressbar(iterable="8", length=None, label=None, show_eta=True, show_percent=None, show_pos=False, item_show_func=None, fill_char='#', empty_char='-', bar_template='%(label)s [%(bar)s] %(info)s', info_sep=' ', width=36, file=None, color=None)
items = range(count)
if click.confirm('Do you have git Repo ?'):
git_repo = click.prompt("Repo URL ", type=str)
else:
git_repo = None
def process_slowly(item):
time.sleep(0.001 * random.random())
def filter(items):
for item in items:
if random.random() > 0.3:
yield item
with click.progressbar(
items, label=f"Processing {app}", fill_char=click.style("#", fg="green")
) as bar:
for item in bar:
process_slowly(item)
management = Operation(app,name,db_driver,db_name,testdb_name,git_repo)
management.execute()
click.echo(click.style('Completed!', blink=True,fg="red"))
| 2,458 |
apps/common/views.py
|
kaixiang1992/bbs
| 0 |
2170942
|
from flask import Blueprint, request, make_response
from untils import restful, cacheuntil
from untils.captcha import Captcha
from exts import smsapi
from .forms import SMSCaptchaForm
from io import BytesIO
bp = Blueprint('common', __name__, url_prefix='/c')
# @bp.route('/sms_captcha/', methods=['post'])
# def sms_captcha():
# telephone = request.form.get('telephone')
# if not telephone:
# return restful.params_error(message='请传入手机号码!')
# code = Captcha.gene_text(number=4) # TODO: 获取随机4位数字字符串
# resp = smsapi.send_sms(telephone=telephone, param=code)
# if resp:
# return restful.success(message='短信验证码发送成功!')
# else:
# return restful.params_error(message='短信验证码发送失败!')
# TODO: 发送短信验证码
@bp.route('/sms_captcha/', methods=['post'])
def sms_captcha():
form = SMSCaptchaForm(request.form)
if form.validate():
telephone = form.telephone.data # TODO: 获取手机号
code = Captcha.gene_text(number=4) # TODO: 获取随机4位数字字符串
resp = smsapi.send_sms(telephone=telephone, param=code)
if resp:
cacheuntil.set(telephone, code) # TODO: redis存储短信验证码
return restful.success(message='短信验证码发送成功!')
else:
return restful.params_error(message='短信验证码发送失败!')
else:
return restful.params_error(message=form.get_random_error(), data=form.get_all_errors())
# TODO: 图形验证码视图
@bp.route('/captcha/')
def CaptchaView():
text, image = Captcha.gene_graph_captcha()
cacheuntil.set(text.lower(), text.lower()) # TODO: redis存储图片验证码
out = BytesIO()
# TODO: 将图片保存到IO中格式png
image.save(out, 'png')
# TODO: 保存完毕后,移动指针到起始位置
out.seek(0)
# TODO: 将IO读取出来转为image/png响应
resp = make_response(out.read())
resp.content_type = 'image/png'
return resp
| 1,794 |
test.py
|
DominikSpiljak/imdb-review-classifier
| 0 |
2169193
|
from pathlib import Path
import torch
from models.models import TfIdfModel
from data.data_preprocessing import TfIdfPreprocessor
def main():
preprocessor = TfIdfPreprocessor.load_from_checkpoint(
list(Path("test_checkpoints").glob("*.pkl"))[0]
)
model_checkpoint = torch.load(list(Path("test_checkpoints").glob("*.pth"))[0])
model = TfIdfModel(input_dim=5000, device="cpu")
model.load_state_dict(model_checkpoint)
model.eval()
while True:
review = input(">> ")
vectorized = preprocessor.transform([review])
print(
f"The review is {torch.sigmoid(model(vectorized)).item() * 100}% positive."
)
if __name__ == "__main__":
main()
| 718 |
pymlviz/model_comparison_table.py
|
taohuang-ubc/package_development_pymlviz
| 0 |
2171141
|
"""
Created on March 3, 2020
@author: <NAME>
Implementation of model_comparison_table in the
pymlviz package.
"""
import pandas as pd
from sklearn.base import is_classifier, is_regressor
def model_comparison_table(X_train, y_train, X_test, y_test, **kwargs):
"""
Takes in scikit learn ML models
of the same family (regression
or classification) and the train
test data then outputs a table
comparing the scores for
different models.
Parameters:
------------
X_train : pd.DataFrame/np.ndarray
Training dataset without labels.
y_train : np.ndarray
Training labels.
X_test : pd.DataFrame/np.ndarray
Test dataset without labels.
y_test : np.ndarray
Test labels.
**kwargs :
Models assigned with meaningful
variable names.
Returns:
------------
pd.DataFrame
Dataframe object consisting of
models and comparison metrics.
Example:
------------
>>> from sklearn.datasets import make_classification
>>> from pymlviz.model_comparison_table import model_comparison_table
>>> from sklearn.linear_model import LogisticRegression
>>> from sklearn.svm import SVC
>>> from sklearn.model_selection import train_test_split
>>> import pandas as pd
>>> syn_data_cf = make_classification(n_samples=1000, n_classes=4,
>>> n_informative=12)
>>> tts_cf = train_test_split(pd.DataFrame(syn_data_cf[0]),
>>> syn_data_cf[1],
>>> test_size=0.33, random_state=42)
>>> X_train, X_test, y_train, y_test = tts_cf
>>> lr_cf = LogisticRegression().fit(X_train, y_train)
>>> svm_cf = SVC().fit(X_train, y_train)
>>> model_comparison_table(X_train, y_train, X_test, y_test,
>>> lr_model=lr_cf, svm_model=svm_cf)
>>> print(model_comparison_table(X_train, y_train,
>>> X_test, y_test,
>>> lr_model=lr_cf, svm_model=svm_cf))
"""
try:
# check if all regression or all classification
regression_check = True
classification_check = True
# classification check
for model_type in kwargs.values():
regression_check &= is_regressor(model_type)
classification_check &= is_classifier(model_type)
assert (classification_check | regression_check), \
"Please enter all regression or classification models"
# create dataframe skeleton for model
df_results = pd.DataFrame({"model_name": [],
"train_score": [],
"test_score": []})
# loop through models specified by user
for model in kwargs:
# compute values for results table
train_score = kwargs[model].score(X_train, y_train)
test_score = kwargs[model].score(X_test, y_test)
model_name = model
# create temporary results table
df_res = pd.DataFrame({"model_name": [model_name],
"train_score": [train_score],
"test_score": [test_score]})
# update results table
df_results = df_results.append(df_res, ignore_index=True)
# return dataframe
return df_results
except AssertionError as Error:
return Error
| 3,412 |
micromelon/_robot_comms/__init__.py
|
timmyhadwen/mm-pymodule
| 3 |
2171159
|
from ._rover_controller import RoverController
from ._comms_constants import MicromelonOpCode, MicromelonType
from .ble import BleControllerThread, BleController
from .uart import UartController
from .transports import RobotTransportBLE
__all__ = [
"RoverController",
"MicromelonOpCode",
"MicromelonType",
"BleController",
"BleControllerThread",
"UartController",
"RobotTransportBLE",
]
| 416 |
backend/app/schemas/auth.py
|
ianahart/blog
| 0 |
2172093
|
from typing import Optional, List
from pydantic import BaseModel
# pyright: reportGeneralTypeIssues=false
class AuthBase(BaseModel):
pass
class AuthCredentials(AuthBase):
name: Optional[str] = None
value: Optional[str] = None
error: str
class AuthLogin(AuthBase):
credentials: List[AuthCredentials]
| 327 |
algorithms/python/ReverseString/ReverseString.py
|
artekr/LeetCode
| 0 |
2171646
|
from typing import List
class Solution:
def reverseString(self, s: List[str]) -> None:
"""
Do not return anything, modify s in-place instead.
"""
l = 0
r = len(s) - 1
while l < r:
s[l], s[r] = s[r], s[l]
l += 1
r -= 1
for c in s:
print(c, end = ' ')
print()
Solution().reverseString(["h","e","l","l","o"])
# expected: ["o","l","l","e","h"]
Solution().reverseString(["H","a","n","n","a","h"])
# expected: ["h","a","n","n","a","H"]
Solution().reverseString([])
# expected: []
| 596 |
ndlib/models/actions/__init__.py
|
vahidmoeinifar/ndlib
| 0 |
2170547
|
__author__ = 'rossetti'
__license__ = "GPL"
__email__ = "<EMAIL>"
from .AddNode import AddNode
from .RemoveNode import RemoveNode
from .SwapEdges import SwapEdges
__all__ = [
'AddNode',
'RemoveNode',
'SwapEdges',
]
| 232 |
image_to_text.py
|
Salah-Zkara/Text-Cleaning
| 1 |
2171574
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as img
from math import log
from tkinter import *
from tkinter import filedialog
import os.path
from os import path
def Laplacien(U):
(I,J,K)=np.shape(U)
delt=U.copy()
for k in range(K):
for i in range(1,I-1):
for j in range(1,J-1):
#delt[i,j,k]=(U[i-1,j,k]+U[i+1,j,k]+U[i,j-1,k]+U[i,j+1,k])-4*U[i,j,k]
delt[i,j,k]=max(((U[i-1,j,k]+U[i+1,j,k]+U[i,j-1,k]+U[i,j+1,k])-4*U[i,j,k]),0)
return delt
def log_img(U):
M=np.full(np.shape(U),0.0001, dtype=np.float64)
(I,J,K)=np.shape(U)
for k in range(K):
for i in range(I):
for j in range(J):
if(U[i,j,k]!=0) :
M[i,j,k]=log(U[i,j,k])
return M
def exp_img(U):
return np.exp(U,dtype=np.float64)
def eqa_img(U):
n=U.copy()
n=log_img(U)
for i in range(6):
N=n+(0.5*Laplacien(n))
n=N
return log_img(U)-N
def askopen():
image_name=filedialog.askopenfilename(initialdir = "/", title = "Select A File", filetype =(("Image","*.png *.jpg *.jpeg"),("all files","*.*")) )
textentry.delete(0, 'end')
textentry.insert(0,image_name)
def click():
image = img.imread(image_name)
plt.imshow(exp_img(eqa_img(image)))
plt.axis('off')
plt.show()
if(path.isfile(image_name)) :
Button(window,font=button_font,fg='white',bg=button_color,text='OK',width=5,command=click).place(x=550,y=280)
def text_check():
image_name=textentry.get()
def click():
image = img.imread(image_name)
plt.imshow(exp_img(eqa_img(image)))
plt.axis('off')
plt.show()
ext=image_name.split('.')
extention=ext[-1].upper()
if(path.isfile(image_name) and (extention=='PNG' or extention=='JPG' or extention=='JPEG')) :
Button(window,font=button_font,fg='white',bg=button_color,text='OK',width=5,command=click).place(x=550,y=280)
else:
Button(window,font=button_font,fg='white',bg=button_color,text='OK',width=5,state=DISABLED).place(x=550,y=280)
backgrounrd_color="#f58442"
button_color="#576161"
button_font="none 12 bold"
window=Tk()
window.title("Image To text")
window.resizable(0, 0)
window.geometry("653x350")
window.iconbitmap('./.resources/icon.ico')
Button(window,font=button_font,fg='white',bg=button_color,text='browse',width=6,command=askopen).place(x=540,y=45)
Button(window,font=button_font,fg='white',bg=button_color,text='check',width=5,command=text_check).place(x=470,y=45)
Button(window,font=button_font,fg='white',bg=button_color,text='OK',width=5,state=DISABLED).place(x=550,y=280)
textentry=Entry(window,bg="white",width=70,text="test")
textentry.place(x=25,y=50)
window.configure(background=backgrounrd_color)
window.mainloop()
| 2,594 |
exemplo/app.py
|
felipeparpinelli/word2vec-pt-br
| 39 |
2171769
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gensim
import codecs
from flask import Flask, render_template
from flask import request
from flask.json import jsonify
import json
import os,sys,glob
app = Flask(__name__)
class DummyModel(object):
"""Um dummy só para ir mais rapido """
def __getattr__(self, attr):
def qualquer_coisa(*args, **kwargs):
print args, kwargs
return True
return qualquer_coisa
model = DummyModel()
model = gensim.models.KeyedVectors.load_word2vec_format("wiki.pt.trigram.vector", binary=True)
@app.route('/', methods=['GET'])
def index():
return render_template('index.html')
@app.route('/operacoes', methods=['GET'])
def operacoes():
return render_template('operacoes.html')
@app.route('/contexto', methods=['GET'])
def contexto():
return render_template('contexto.html')
@app.route("/teste")
def hello():
if request.args.get('most_similar'):
print request.args.get('metric')
if request.args.get('metric') == 'Wikipedia':
similares = model_cgp.most_similar(request.args.get('most_similar'), topn=10)
if request.args.get('metric') == 'Wikipedia':
similares = model_cgj.most_similar(request.args.get('most_similar'), topn=10)
if request.args.get('metric') == 'Wikipedia':
similares = model.most_similar(request.args.get('most_similar'), topn=10)
data_array = []
number_nearest_words = len(similares)
for j in range(0, number_nearest_words):
data = {}
data['name'] = similares[j][0]
data['size'] = str(round(similares[j][1], 3))
data['children'] = []
print data
print j
data_array.append(data)
print data_array
json_data = json.dumps(data_array)
print json_data
return jsonify({request.args.get('most_similar'): data_array})
if request.args.get('context'):
return jsonify({'context': model.doesnt_match(request.args.get('context').split())})
if request.args.get('operation'):
array_words = request.args.get('operation').split()
print array_words
if array_words[1] == '-':
similares = model.most_similar(negative=[array_words[0], array_words[2]])
data_array = []
number_nearest_words = len(similares)
print number_nearest_words
for j in range(0, number_nearest_words):
data = {}
data['source'] = request.args.get('operation')
data['target'] = similares[j][0]
data['size'] = str(round(similares[j][1], 3))
print data
print j
data_array.append(data)
print data_array
json_data = json.dumps(data_array)
print json_data
return jsonify({request.args.get('operation'): data_array})
else:
similares = model.most_similar(positive=[array_words[0], array_words[2]], negative=[array_words[4]])
data_array = []
number_nearest_words = len(similares)
print number_nearest_words
for j in range(0, number_nearest_words):
data = {}
data['source'] = request.args.get('operation')
data['target'] = similares[j][0]
data['size'] = str(round(similares[j][1], 3))
print data
print j
data_array.append(data)
# print data
print data_array
json_data = json.dumps(data_array)
print json_data
return jsonify({request.args.get('operation'): data_array})
if request.args.get('graph'):
similares = model.most_similar(request.args.get('graph'), topn=10)
data_array = []
if request.args.get('type') is "Global":
pass
number_nearest_words = len(similares)
for j in range(0, number_nearest_words):
data = {}
data['source'] = request.args.get('graph')
data['target'] = similares[j][0]
data['size'] = str(round(similares[j][1], 3))
print data
print j
data_array.append(data)
# print data
print data_array
json_data = json.dumps(data_array)
print json_data
return json_data
if request.args.get('mais_distante'):
return jsonify({'mais_distante': model.doesnt_match(request.args.get('mais_distante').split())})
if __name__ == "__main__":
app.run()
| 4,727 |
parse_snarls.py
|
wwliao/pangenome-utils
| 7 |
2170094
|
#!/usr/bin/env python3
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output", required=True)
parser.add_argument("ref_nodes")
parser.add_argument("snarls")
args = parser.parse_args()
ref = {}
with open(args.ref_nodes) as infile:
for line in infile:
cols = line.strip().split("\t")
chrom = cols[0]
start = int(cols[1])
end = int(cols[2])
node = cols[3]
ref[node] = (chrom, start, end)
with open(args.snarls) as infile, open(args.output, "w") as outfile, open("filterout.bedpe", "w") as filterout:
for line in infile:
snarl = json.loads(line)
#if "parent" not in snarl and "type" in snarl:
#if snarl["type"] == 1:
if "parent" not in snarl:
node1 = snarl["start"]["node_id"]
node2 = snarl["end"]["node_id"]
if node1 in ref and node2 in ref:
if "backward" not in snarl["start"] and "backward" not in snarl["end"]:
source = node1
sink = node2
snarl_id = f">{source}>{sink}"
elif "backward" in snarl["start"] and "backward" in snarl["end"]:
source = node2
sink = node1
snarl_id = f">{source}>{sink}"
else:
source = node1
sink = node2
if "backward" in snarl["start"]:
snarl_id = f"<{source}>{sink}"
else:
snarl_id = f">{source}<{sink}"
chrom1 = ref[source][0]
start1 = ref[source][1]
end1 = ref[source][2]
chrom2 = ref[sink][0]
start2 = ref[sink][1]
end2 = ref[sink][2]
if chrom1 == chrom2 and start2 >= end1:
outfile.write(f"{chrom1}\t{start1}\t{end1}\t{chrom2}\t{start2}\t{end2}\t{snarl_id}\n")
else:
filterout.write(f"{chrom1}\t{start1}\t{end1}\t{chrom2}\t{start2}\t{end2}\t{snarl_id}\n")
| 2,127 |
bitwise/wire/TRI.py
|
jamesjiang52/Bitwise
| 0 |
2172251
|
"""
The following classes are defined:
TristateBuffer
"""
class TristateBuffer:
"""Initialize a new tri-state buffer.
Args:
enable: An object of type Wire.
input: An object of type Wire.
output: An object of type Wire. Takes on the value of input if enable
has value 1. Otherwise, value is independent of input.
"""
def __init__(self, enable, input, output):
self.input = input
self.enable = enable
self.output = output
self.input._bind_to(self._update_input)
self.enable._bind_to(self._update_enable)
if enable.value == 1:
self.output.value = self.input.value
else:
pass
def _update_input(self, value):
if self.enable.value == 1:
self.output.value = value
else:
pass
def _update_enable(self, value):
if value == 1:
self.output.value = self.input.value
else:
pass
def __str__(self):
str_ = ""
str_ += "enable: " + str(self.enable.value) + "\n"
str_ += "input: " + str(self.input.value) + "\n"
str_ += "output: " + str(self.output.value)
return str_
def __call__(self, *, enable=None, input=None, output=None):
if enable is not None:
self.enable.value = enable
if input is not None:
self.input.value = input
if output is not None:
self.output.value = output
| 1,498 |
netbox/context_processors.py
|
moonbirddk/networked-toolbox
| 2 |
2171126
|
from django.conf import settings
def timezone_name(request):
tzstr = request.session.get('django_timezone', settings.TIME_ZONE)
return {'TIMEZONE_NAME': tzstr}
def google_analytics_id(request):
return {
'GOOGLE_ANALYTICS_ID': settings.GOOGLE_ANALYTICS_ID
}
def user_has_verified_email(request):
if(request.user.is_authenticated):
result = request.user.emailaddress_set.filter(verified=True).exists()
else:
result = False
return {
'user_has_verified_email': result
}
| 536 |
featuretools_tsfresh_primitives/primitives/percentage_of_reoccurring_values_to_all_values.py
|
RomaKoks/featuretools-tsfresh-primitives
| 7 |
2168335
|
from featuretools.primitives import AggregationPrimitive
from tsfresh.feature_extraction.feature_calculators import (
percentage_of_reoccurring_values_to_all_values,
)
from woodwork.column_schema import ColumnSchema
from woodwork.logical_types import Double
class PercentageOfReoccurringValuesToAllValues(AggregationPrimitive):
"""Returns the ratio of unique values, that are present in the time series
more than once.
# of data points occurring more than once / # of all data points
This means the ratio is normalized to the number of data points in the time
series, in contrast to the
percentage_of_reoccurring_datapoints_to_all_datapoints.
Docstring source:
https://tsfresh.readthedocs.io/en/latest/api/tsfresh.feature_extraction.html#tsfresh.feature_extraction.feature_calculators.percentage_of_reoccurring_values_to_all_values
"""
name = "percentage_of_reoccurring_values_to_all_values"
input_types = [ColumnSchema(semantic_tags={"numeric"})]
return_type = ColumnSchema(logical_type=Double, semantic_tags={"numeric"})
stack_on_self = False
def get_function(self):
return percentage_of_reoccurring_values_to_all_values
| 1,202 |
viya_ark_library/structured_logging/test/test_spec.py
|
mauriziopinzi/viya4-ark
| 0 |
2172026
|
####################################################################
# ### structured_logging.test_spec.py ###
####################################################################
# ### Author: SAS Institute Inc. ###
####################################################################
# ###
# Copyright (c) 2020, SAS Institute Inc., Cary, NC, USA. ###
# All Rights Reserved. ###
# SPDX-License-Identifier: Apache-2.0 ###
# ###
####################################################################
from typing import List, Text
from viya_ark_library.structured_logging.spec import SASStructuredLoggingSpec
def test_static_var_attributes() -> None:
"""
Tests the value of the static attributes variable.
"""
assert SASStructuredLoggingSpec.ATTRIBUTES == "attributes"
def test_static_var_level() -> None:
"""
Tests the value of the static level variable.
"""
assert SASStructuredLoggingSpec.LEVEL == "level"
def test_static_var_message() -> None:
"""
Tests the value of the static message variable.
"""
assert SASStructuredLoggingSpec.MESSAGE == "message"
def test_static_var_message_key() -> None:
"""
Tests the value of the static message key variable.
"""
assert SASStructuredLoggingSpec.MESSAGE_KEY == "messageKey"
def test_static_var_message_parameters() -> None:
"""
Tests the value of the static message parameters variable.
"""
assert SASStructuredLoggingSpec.MESSAGE_PARAMETERS == "messageParameters"
def test_static_var_properties() -> None:
"""
Tests the value of the static properties variable.
"""
assert SASStructuredLoggingSpec.PROPERTIES == "properties"
def test_static_var_property_caller() -> None:
"""
Tests the value of the static caller property key variable.
"""
assert SASStructuredLoggingSpec.PROPERTY_CALLER == "caller"
def test_static_var_property_logger() -> None:
"""
Tests the value of the static logger property variable.
"""
assert SASStructuredLoggingSpec.PROPERTY_LOGGER == "logger"
def test_static_var_property_thread() -> None:
"""
Tests the value of the static thread property variable.
"""
assert SASStructuredLoggingSpec.PROPERTY_THREAD == "thread"
def test_static_var_source() -> None:
"""
Tests the value of the static source variable.
"""
assert SASStructuredLoggingSpec.SOURCE == "source"
def test_static_var_time_stamp() -> None:
"""
Tests the value of the static timestamp variable.
"""
assert SASStructuredLoggingSpec.TIME_STAMP == "timeStamp"
def test_static_var_version() -> None:
"""
Tests the value of the static version variable.
"""
assert SASStructuredLoggingSpec.VERSION == "version"
def test_get_required_keys() -> None:
"""
Tests that all required keys are represented in the returned list.
"""
required_keys: List[Text] = SASStructuredLoggingSpec.get_required_keys()
assert isinstance(required_keys, list)
assert len(required_keys) == 5
assert SASStructuredLoggingSpec.LEVEL in required_keys
assert SASStructuredLoggingSpec.MESSAGE in required_keys
assert SASStructuredLoggingSpec.SOURCE in required_keys
assert SASStructuredLoggingSpec.TIME_STAMP in required_keys
assert SASStructuredLoggingSpec.VERSION in required_keys
| 3,601 |
fastapi_plan/template/{{cookiecutter.project_name}}/app/tests/conftest.py
|
rafsaf/fastapi-template
| 9 |
2171692
|
from asyncio import AbstractEventLoop as EventLoop
from typing import Generator
import pytest
from fastapi.testclient import TestClient
from tortoise.contrib.test import finalizer, initializer
from app import crud, models, schemas
from app.main import create_app
from app.tests.utils.utils import user_authentication_headers
app = create_app()
default_superuser = schemas.UserCreateBySuperuser(
email="<EMAIL>",
password="<PASSWORD>",
is_superuser=True,
is_active=True,
)
default_user = schemas.UserCreateBySuperuser(
email="<EMAIL>",
password="<PASSWORD>",
is_superuser=False,
is_active=True,
)
@pytest.fixture(scope="module")
def client() -> Generator:
initializer(["app.models"])
with TestClient(app) as c:
yield c
finalizer()
@pytest.fixture(scope="module")
def event_loop(client: TestClient) -> Generator:
yield client.task.get_loop()
@pytest.fixture(scope="module")
def normal_user(event_loop: EventLoop) -> Generator:
user: models.User = event_loop.run_until_complete(
crud.user.create_by_superuser(default_user)
)
yield user
event_loop.run_until_complete(user.delete())
@pytest.fixture(scope="module")
def superuser_token_headers(client: TestClient, event_loop: EventLoop) -> Generator:
headers = user_authentication_headers(
client=client,
event_loop=event_loop,
email=default_superuser.email,
password=<PASSWORD>,
is_superuser=default_superuser.is_superuser or True,
is_active=default_superuser.is_active or True,
)
yield headers
@pytest.fixture(scope="module")
def normal_user_token_headers(client: TestClient, event_loop: EventLoop) -> Generator:
headers = user_authentication_headers(
client=client,
event_loop=event_loop,
email=default_user.email,
password=<PASSWORD>_user.password,
is_superuser=default_user.is_superuser or False,
is_active=default_user.is_active or True,
)
yield headers
| 2,019 |
src/the_tale/the_tale/accounts/migrations/0010_remove_changecredentialstask_relogin_required.py
|
al-arz/the-tale
| 85 |
2172258
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2018-11-11 12:55
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0009_remove_middle_gender'),
]
operations = [
migrations.RemoveField(
model_name='changecredentialstask',
name='relogin_required',
),
]
| 421 |
src/numdifftools/tests/test_limits.py
|
rparini/numdifftools
| 0 |
2171951
|
"""
Created on 28. aug. 2015
@author: pab
"""
from __future__ import absolute_import, division, print_function
import unittest
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_allclose
from numdifftools.limits import Limit, Residue, CStepGenerator
from numdifftools.step_generators import make_exact
from numdifftools.extrapolation import EPS
class TestCStepGenerator(unittest.TestCase):
@staticmethod
def test_default_generator():
step_gen = CStepGenerator(num_steps=8)
h = np.array([h for h in step_gen(0)])
print(h)
desired = np.array([[1.47701940e-09, 3.69254849e-10, 9.23137122e-11,
2.30784281e-11, 5.76960701e-12, 1.44240175e-12,
3.60600438e-13, 9.01501096e-14]])
assert_array_almost_equal((h - desired) / desired, 0)
@staticmethod
def test_default_base_step():
step_gen = CStepGenerator(num_steps=1, offset=0)
h = [h for h in step_gen(0)]
desired = make_exact(EPS ** (1. / 1.2))
assert_array_almost_equal((h[0] - desired) / desired, 0)
@staticmethod
def test_fixed_base_step():
desired = 0.1
step_gen = CStepGenerator(base_step=desired, num_steps=1, scale=2,
offset=0)
h = [h for h in step_gen(0)]
assert_array_almost_equal((h[0] - desired) / desired, 0)
class TestLimit(unittest.TestCase):
def test_sinx_div_x(self):
def fun(x):
return np.sin(x)/x
for path in ['radial', 'spiral']:
lim_f = Limit(fun, path=path, full_output=True)
x = np.arange(-10, 10) / np.pi
lim_f0, err = lim_f(x*np.pi)
assert_array_almost_equal(lim_f0, np.sinc(x))
self.assertTrue(np.all(err.error_estimate < 1.0e-14))
def test_derivative_of_cos(self):
x0 = np.pi/2
def fun(x):
return (np.cos(x0+x)-np.cos(x0))/x
lim, err = Limit(fun, step=CStepGenerator(), full_output=True)(0)
assert_allclose(lim, -1)
self.assertTrue(err.error_estimate < 1e-14)
def test_residue_1_div_1_minus_exp_x(self):
def fun(z):
return -z/(np.expm1(2*z))
lim, err = Limit(fun, full_output=True)(0)
assert_allclose(lim, -0.5)
self.assertTrue(err.error_estimate < 1e-14)
def test_difficult_limit(self):
def fun(x):
return (x*np.exp(x)-np.expm1(x))/x**2
for path in ['radial', ]:
lim, err = Limit(fun, path=path, full_output=True)(0)
assert_allclose(lim, 0.5)
self.assertTrue(err.error_estimate < 1e-8)
class TestResidue(unittest.TestCase):
def test_residue_1_div_1_minus_exp_x(self):
def fun(z):
return -1.0/(np.expm1(2*z))
res_h, err = Residue(fun, full_output=True)(0)
assert_allclose(res_h, -0.5)
self.assertTrue(err.error_estimate < 1e-14)
def test_residue_1_div_sin_x2(self):
def fun(z):
return 1.0/np.sin(z)**2
res_h, info = Residue(fun, full_output=True, pole_order=2)(np.pi)
assert_allclose(res_h, 1)
self.assertTrue(info.error_estimate < 1e-10)
if __name__ == "__main__":
unittest.main()
| 3,293 |
tests/TwitterDatabaseTests/ModelsTests/test_TweetORM.py
|
AdamSwenson/TwitterProject
| 0 |
2170911
|
__author__ = 'ars62917'
import unittest
# import sqlalchemy
#
# from DataAnalysis.DataTools import TweetORM
#
#
# class ConnectionMock( TweetORM.Connection ):
# def _make_engine(self):
# pass
#
# def __init__(self, credential_file):
# self.engine = None
# super().__init__(credential_file)
#
# class ConnectionTest(unittest.TestCase):
# def setUp(self):
# self.cred_file = 'tests/sql_test_credentials.xml'
# self.server = 'testhost'
# self.port = 3000
# self.db_name = 'testdbname'
# self.password = '<PASSWORD>'
# self.username = 'testusername'
#
# def test_load_credentials(self):
# conn = ConnectionMock(self.cred_file)
# self.assertEqual(conn._server, self.server)
# self.assertEqual(conn._port, self.port)
# self.assertEqual(conn._db_name, self.db_name)
# self.assertEqual(conn._password, self.password)
# self.assertEqual(conn._username, self.username)
#
# class SqliteConnectionTest(unittest.TestCase):
# def setUp(self):
# pass
#
# def test_make_engine(self):
# conn = TweetORM.SqliteConnection( )
# self.assertIsInstance(conn.engine, sqlalchemy.engine.base.Engine, 'created engine')
# self.assertIsInstance(conn.engine, sqlalchemy.engine.base.Engine, 'created engine')
#
# class MySqlConnectionTest(unittest.TestCase):
# def setUp(self):
# self.cred_file = 'tests/sql_test_credentials.xml'
#
# def test_make_engine(self):
# """
# NB., test dsn string doesn't use the port
# """
# conn = TweetORM.MySqlConnection( self.cred_file )
# self.assertIsInstance(conn.engine, sqlalchemy.engine.base.Engine, 'created engine')
# self.assertEqual(conn._dsn, "mysql+mysqlconnector://testusername:testpassword@testhost:3000/testdbname", "Correct dsn created")
#
#
# class DAO_family_test(unittest.TestCase):
# def setUp(self):
# self.engine1 = sqlalchemy.create_engine('sqlite:///:memory:', echo=True)
# self.engine2 = sqlalchemy.create_engine('sqlite:///:memory:', echo=True)
# self.object1 = TweetORM.DAO( self.engine1 )
# self.object2 = TweetORM.DAO( self.engine2 )
#
# def test_class_inheritance(self):
# self.assertEqual(type(self.object1.global_session), sqlalchemy.orm.session.sessionmaker, "object1 has correct session factory")
# self.assertEqual(type(self.object2.global_session), sqlalchemy.orm.session.sessionmaker, "object2 has correct session factory")
# self.assertEqual(self.object1.global_session, self.object2.global_session, "One global_session shared btwn objects ")
# self.assertNotEqual(self.object1.session, self.object2.session, "Objects have distinct sessions")
#
# class MyTestCase(unittest.TestCase):
#
# def setUp(self):
# Base = declarative_base()
# sqlite_engine = create_engine('sqlite:///:memory:', echo=True)
# Base.metadata.create_all(sqlite_engine)
#
# def test_class_inheritance(self):
# user.userID = 23
# session.add(user)
# session.commit()
# self.assertEqual(True, False)
if __name__ == '__main__':
unittest.main()
| 3,235 |
app/common/auth/reset_password_confirm.py
|
MrPeker/acikkaynak-service
| 5 |
2172328
|
from app.common.library import cognito
from app.common.models import User
def reset_password_confirm(username, confirmation_code, new_password):
client = cognito.create_client()
# pylint:disable=unused-variable
resp, msg = cognito.idp_confirm_forgot_password(
client, username, confirmation_code, new_password
)
if msg is not None:
return {"message": msg, "error": True, "success": False, "data": None}
user = User.objects.get(username=username)
user.is_active = True
user.set_password(<PASSWORD>)
user.save()
return {
"message": "success",
"error": False,
"success": True,
"data": None,
}
| 688 |
Alfred/Alfred.alfredpreferences/workflows/user.workflow.81EEAC0A-3493-4326-B45A-DEF7A758503C/parse-regions.py
|
sthulb/conf-dotfiles
| 0 |
2172017
|
import json
import itertools
from bs4 import BeautifulSoup
with open('./regional-product-services.html', 'r') as fh:
html_doc = fh.read()
soup = BeautifulSoup(html_doc, 'html.parser')
aws_tables = soup.find_all("div", class_='lb-tbl')
services_list = {}
for table in aws_tables:
found_regions = None
for row in table.find_all('tr'):
if not found_regions:
# Examine the first td to see if it contains "Services Offered:"
# If so, read the rest of the row to populate the services list
try:
if row.find_all('td')[0].get_text() == 'Services Offered:':
# Remaining cells are region names
found_regions = [cell.get_text().strip().replace('*', '') for cell in row.find_all('td')[1:]]
except IndexError:
pass
else:
# We know the regions for this table
# Each remaining row will be the service name and then regions
service_name = row.find_all('td')[0].get_text().strip()
region_map = [1 if cell.get_text().strip() == "✓" else 0 for cell in row.find_all('td')[1:]]
# print("|{}|".format(service_name))
available_regions = list(itertools.compress(found_regions, region_map))
if service_name in services_list.keys():
services_list[service_name].extend(available_regions)
else:
services_list[service_name] = available_regions
# for service_name in services_list.keys():
# print("{} is available in: {}".format(
# service_name,
# ", ".join(services_list[service_name])
# ))
with open('regions.json', 'w') as fh:
json.dump(services_list, fh)
| 1,734 |
transfer.py
|
soarflighting/Neural_style_tansfer
| 0 |
2171740
|
'''
转换函数
'''
import torch.optim as optim
from nt_models import NT_models
from nt_utils import image_loader,imshow
def get_input_optimizer(input_img):
optimizer = optim.LBFGS([input_img.requires_grad_()])
return optimizer
def transfer(content_img,style_img,input_img,num_steps = 50,style_weight=1000000,content_weight = 1):
print('Building the style transfer model...')
nt_model = NT_models()
model,style_losses,content_losses = nt_model.get_style_content_model_and_loss(style_img,content_img)
optimizer = get_input_optimizer(input_img)
print('Optimizing ... ')
run = [0]
while run[0] <= num_steps:
def closure():
#### 更正更新的输入图像的值
#### 使用clamp_() 将图像的值改变为0-1之间
input_img.data.clamp_(0,1)
optimizer.zero_grad()
model(input_img)
style_score = 0
content_score = 0
for sl in style_losses:
style_score+= sl.loss
for cl in content_losses:
content_score+=cl.loss
style_score *= style_weight
content_score *= content_weight
loss = style_score+content_score
print("loss = ",loss)
loss.backward()
run[0] += 1
if run[0]%50 == 0:
print("Run {}:".format(run))
print("Style Loss :{:.4f} Content Loss :{:.4f}".format(style_score.item(),content_score.item()))
print()
return style_score+content_score
optimizer.step(closure)
## a last correction
input_img.data.clamp_(0,1)
return input_img.detach()
if __name__ == '__main__':
style_img = image_loader("c:/Users/Mr.fei/pytorch-learn/data/images/picasso.jpg")
content_img = image_loader("c:/Users/Mr.fei/pytorch-learn/data/images/dance.jpg")
input_img = content_img.clone()
output = transfer(content_img,style_img,input_img)
imshow(output,'output_img')
| 2,069 |
pyplots/image_interpolation_lena.py
|
fabianp/scipy-lecture-notes
| 1 |
2170578
|
import numpy as np
import scipy
import matplotlib.pyplot as plt
l = scipy.lena()
plt.figure(figsize=(8, 4))
plt.subplot(121)
plt.imshow(l[200:220, 200:220], cmap=plt.cm.gray)
plt.axis('off')
plt.subplot(122)
plt.imshow(l[200:220, 200:220], cmap=plt.cm.gray, interpolation='nearest')
plt.axis('off')
plt.subplots_adjust(wspace=0.02, hspace=0.02, top=1, bottom=0, left=0, right=1)
plt.show()
| 395 |
shapes/tables.py
|
acdh-oeaw/sh4d
| 0 |
2171361
|
import django_tables2 as tables
from django_tables2.utils import A
from . models import *
class CadastralCommunityTable(tables.Table):
cadcom_nam = tables.LinkColumn(
'shapes:cadastralcommunity_detail',
args=[A('pk')], verbose_name='Name'
)
class Meta:
model = CadastralCommunity
sequence = ('cadcom_nam',)
attrs = {"class": "table table-responsive table-hover"}
| 418 |
tentd/blueprints/groups.py
|
pytent/pytentd
| 3 |
2171871
|
"""Groups endpoints."""
from flask import json, request, g, abort, make_response
from flask.views import MethodView
from tentd.lib.flask import EntityBlueprint, jsonify
from tentd.documents import Group
groups = EntityBlueprint('groups', __name__, url_prefix='/groups')
@groups.route_class('')
class GroupsView(MethodView):
def get(self):
return jsonify(g.entity.groups), 200
def post(self):
return jsonify(Group(entity=g.entity, **request.json()).save())
@groups.route_class('/<string:name>')
class GroupView(MethodView):
def get(self, name):
return jsonify(g.entity.groups.get_or_404(name=name).to_json())
def put(self, name):
group = g.entity.groups.get_or_404(name=name)
group.update_values(**request.json())
return jsonify(group.to_json()), 200
def delete(self, name):
g.entity.groups.get_or_404(name=name).delete()
return make_response(), 200
| 943 |
scripts/create_kurucz_cd23_chianti_H_He_db.py
|
MarkMagee/carsus-db
| 2 |
2172137
|
""" Example script to create a database """
from carsus import init_db
from carsus.io.nist import (
NISTWeightsCompIngester,
NISTIonizationEnergiesIngester
)
from carsus.io.kurucz import GFALLIngester
from carsus.io.chianti_ import ChiantiIngester
from carsus.io.zeta import KnoxLongZetaIngester
def create_test_db(db_fname, gfall_fname, zeta_fname):
"""
Create a database
Parameters
----------
db_fname : str
Filename for the database
gfall_fname : str
Filename for the GFALL file
"""
session = init_db(db_fname)
session.commit()
# Ingest atomic weights
weightscomp_ingester = NISTWeightsCompIngester(session)
weightscomp_ingester.ingest()
session.commit()
# Ingest ionization energies
ioniz_energies_ingester = NISTIonizationEnergiesIngester(
session,
spectra="h-zn"
)
ioniz_energies_ingester.ingest(
ionization_energies=True,
ground_levels=True
)
session.commit()
# Ingest kurucz levels and lines
gfall_ingester = GFALLIngester(session, gfall_fname, ions='H-Zn')
gfall_ingester.ingest(levels=True, lines=True)
session.commit()
# Ingest chianti levels, lines and electron collisions
# H I, He I-II
chianti_ingester = ChiantiIngester(session, ions='H-He')
chianti_ingester.ingest(levels=True, lines=True, collisions=True)
session.commit()
zeta_ingester = KnoxLongZetaIngester(session, zeta_fname)
zeta_ingester.ingest()
session.close()
if __name__ == "__main__":
db_fname = "path/to/empty.db" # Provide the path to the database
gfall_fname = "path/to/gfall.dat" # Provide the path to the gfall file
zeta_fname = "path/to/zeta.dat" # Provide the path to the zeta file
create_test_db(
db_fname=db_fname,
gfall_fname=gfall_fname,
zeta_fname=zeta_fname
)
| 1,960 |
blambda/execute.py
|
balihoo/blambda
| 0 |
2171662
|
"""
Execute python lambda functions. This executes the deployed function on AWS.
"""
import json
import sys
import boto3
from botocore.client import Config as BotoConfig
from . import config
cfg = config.load()
def setup_parser(parser):
app = cfg.get('application')
env = cfg.get('environment')
parser.add_argument('function_name', type=str, help='the base name of the function')
parser.add_argument('--payload', type=str, help='the payload function', default=None)
parser.add_argument('--prefix', type=str, help='the prefix for the function', default=app)
parser.add_argument('--env', type=str, help='the environment this function will run in', default=env)
def run(args):
payload = args.payload
if payload is None:
print("reading payload from stdin")
payload = sys.stdin.read()
function_name = args.function_name
if args.prefix:
function_name = "{}_{}".format(args.prefix, function_name)
if args.env:
function_name = "{}_{}".format(function_name, args.env)
client = boto3.client(
'lambda',
region_name='us-east-1',
config=BotoConfig(
connect_timeout=10,
read_timeout=300)
)
response = client.invoke(
FunctionName=function_name,
Payload=payload.encode('utf-8')
)
if response['StatusCode'] == 200:
try:
payload = json.loads(response['Payload'].read())
print(json.dumps(payload, indent=4))
except:
print(response)
else:
print(response)
| 1,570 |
py_tdlib/constructors/inline_query_result_photo.py
|
Mr-TelegramBot/python-tdlib
| 24 |
2171831
|
from ..factory import Type
class inlineQueryResultPhoto(Type):
id = None # type: "string"
photo = None # type: "photo"
title = None # type: "string"
description = None # type: "string"
| 195 |
python/problem_15.py
|
leoriviera/Project-Euler
| 1 |
2171654
|
def problem_15():
"How many routes are there through a 20×20 grid, [moving right and down from the top left to the top right]?"
# Set the grid size to 20
grid_size = 20
lattice_path_corners = []
# For each y coordinate in the grid...
for y_1 in range(0, grid_size + 1):
path_row = []
# For each x coordinate in the grid...
for x_1 in range(0, grid_size + 1):
# If either coordinate is at the edge, append 1 to the path row.
if(x_1 == 0 or y_1 == 0):
path_row.append(1)
else:
# Otherwise, append zero.
path_row.append(0)
# Append the path row into the lattice path corners
lattice_path_corners.append(path_row)
# For each row in the grid, except the first...
for y_2 in range(1, grid_size + 1):
# For each column in the grid, except the first...
for x_2 in range(1, grid_size + 1):
# Find how many moved available above and below...
one_up = lattice_path_corners[x_2][y_2 - 1]
one_down = lattice_path_corners[x_2 - 1][y_2]
# Append the number of movements to the current coordinate space in the grid.
lattice_path_corners[x_2][y_2] = one_up + one_down
routes = lattice_path_corners[grid_size][grid_size]
return routes
if __name__ == "__main__":
answer = problem_15()
print(answer)
| 1,434 |
odoo-13.0/addons/sale_expense/models/sale_order.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
| 0 |
2170846
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
from odoo import SUPERUSER_ID
from odoo.osv import expression
class SaleOrder(models.Model):
_inherit = 'sale.order'
expense_ids = fields.One2many('hr.expense', 'sale_order_id', string='Expenses', domain=[('state', '=', 'done')], readonly=True, copy=False)
expense_count = fields.Integer("# of Expenses", compute='_compute_expense_count', compute_sudo=True)
@api.model
def _name_search(self, name='', args=None, operator='ilike', limit=100, name_get_uid=None):
""" For expense, we want to show all sales order but only their name_get (no ir.rule applied), this is the only way to do it. """
if self._context.get('sale_expense_all_order'):
domain = expression.AND([args or [], ['&', ('state', '=', 'sale'), ('company_id', 'in', self.env.companies.ids)]])
return super(SaleOrder, self.sudo())._name_search(name=name, args=domain, operator=operator, limit=limit, name_get_uid=SUPERUSER_ID)
return super(SaleOrder, self)._name_search(name=name, args=args, operator=operator, limit=limit, name_get_uid=name_get_uid)
@api.depends('expense_ids')
def _compute_expense_count(self):
expense_data = self.env['hr.expense'].read_group([('sale_order_id', 'in', self.ids), ('state', '=', 'done')], ['sale_order_id'], ['sale_order_id'])
mapped_data = dict([(item['sale_order_id'][0], item['sale_order_id_count']) for item in expense_data])
for sale_order in self:
sale_order.expense_count = mapped_data.get(sale_order.id, 0)
| 1,663 |
ui/gen_ui.py
|
guitaristjimmy/KoGPT--Auto_Resume
| 0 |
2170564
|
# -*- coding: utf-8 -*-
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
import qdarkstyle
from kogpt2.utils import get_tokenizer
import torch
import gluonnlp
from gluonnlp.data import SentencepieceTokenizer
from model.torch_gpt2 import GPT2Config, GPT2LMHeadModel
from functools import partial
import numpy as np
import kss
### 1. koGPT2 Config
ctx = 'cpu'#'cuda' #'cpu' #학습 Device CPU or GPU. colab의 경우 GPU 사용
cachedir = '~/kogpt2/' # KoGPT-2 모델 다운로드 경로
model_path = './gen_m.tar'
load_path_moli_sim = 'C:\\Users\\K\\Desktop\\I_SW\\Python_Note\\gpt-2\\model\\narrativeKoGPT2_checkpoint_best.tar'
vocab_path = './vocab.spiece'
#use_cuda = True # Colab내 GPU 사용을 위한 값
pytorch_kogpt2 = {
'url':
'https://kobert.blob.core.windows.net/models/kogpt2/pytorch/pytorch_kogpt2_676e9bcfa7.params',
'fname': 'pytorch_kogpt2_676e9bcfa7.params',
'chksum': '676e9bcfa7'
}
kogpt2_config = {
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"n_ctx": 1024,
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"n_positions": 1024,
"vocab_size": 50000
}
class Ui_MainWindow(object):
def __init__(self):
self.input_ids = []
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(940, 535)
self.load_model()
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.central_layout = QGridLayout()
self.centralwidget.setLayout(self.central_layout)
self.text_group = QtWidgets.QGroupBox(self.centralwidget)
self.text_group.setGeometry(QtCore.QRect(10, 5, 920, 435))
self.text_group.setObjectName("text_group")
self.text_layout = QGridLayout()
self.text_group.setLayout(self.text_layout)
self.gen_btn = QtWidgets.QPushButton(self.text_group)
self.gen_btn.setObjectName("gen_btn")
self.gen_btn.clicked.connect(self.gen_text)
self.gen_btn.setDisabled(True)
self.text_layout.addWidget(self.gen_btn, 0, 0, 1, 1)
self.edit_btn = QtWidgets.QPushButton(self.text_group)
self.edit_btn.setObjectName("edit_btn")
self.edit_btn.clicked.connect(self.edit)
self.text_layout.addWidget(self.edit_btn, 0, 1, 1, 1)
self.text_edit = QtWidgets.QTextEdit(self.text_group)
self.text_edit.setObjectName("text_edit")
self.text_edit.setDisabled(True)
self.text_layout.addWidget(self.text_edit, 1, 0, 10, 4)
self.central_layout.addWidget(self.text_group, 0, 0, 10, 10)
self.btn_list = []
for i in range(10):
self.btn_list.append(QtWidgets.QPushButton(self.centralwidget))
self.btn_list[-1].setGeometry(QtCore.QRect(20+i*90, 450, 75, 50))
self.btn_list[-1].setObjectName("btn_{}".format(i))
self.btn_list[-1].clicked.connect(partial(self.next_gen, i))
self.btn_list[-1].setDisabled(True)
self.btn_list[-1].setText(' \n ')
self.central_layout.addWidget(self.btn_list[-1], 11, i, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
# menu bar -----------------------------------------------------------------------------------------------------
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 922, 20))
self.menubar.setObjectName("menubar")
self.menuHelp = QtWidgets.QMenu(self.menubar)
self.menuHelp.setObjectName("menuHelp")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
MainWindow.setMenuBar(self.menubar)
self.actionHelp = QtWidgets.QAction(MainWindow)
self.actionHelp.setShortcutVisibleInContextMenu(True)
self.actionHelp.setObjectName("actionHelp")
self.actionSave = QtWidgets.QAction(MainWindow)
self.actionSave.setShortcutVisibleInContextMenu(True)
self.actionSave.setObjectName("actionSave")
self.menuHelp.addAction(self.actionHelp)
self.menuFile.addAction(self.actionSave)
self.menubar.addAction(self.menuHelp.menuAction())
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.text_group.setTitle(_translate("MainWindow", "Text"))
self.gen_btn.setText(_translate("MainWindow", "Start Generation"))
self.edit_btn.setText(_translate("MainWindow", "Edit Text"))
self.menuHelp.setTitle(_translate("MainWindow", "Help"))
self.menuFile.setTitle(_translate("MainWindow", "File"))
self.actionHelp.setText(_translate("MainWindow", "Help"))
self.actionHelp.setShortcut(_translate("MainWindow", "F1"))
self.actionSave.setText(_translate("MainWindow", "Save"))
self.actionSave.setShortcut(_translate("MainWindow", "Ctrl+S"))
def edit(self):
self.edit_btn.setDisabled(True)
self.text_edit.setDisabled(False)
self.gen_btn.setDisabled(False)
def gen_text(self):
self.gen_btn.setDisabled(True)
self.text_edit.setDisabled(True)
self.edit_btn.setDisabled(False)
sentences = self.text_edit.toPlainText()
for sent in kss.split_sentences(sentences):
toked = self.tok(sent)
self.input_ids += [self.vocab[self.vocab.bos_token], ] + \
self.vocab[toked] + \
[self.vocab[self.vocab.eos_token], ]
self.run()
def load_model(self):
### 3. 체크포인트 및 디바이스 설정
# Device 설정
self.device = torch.device(ctx)
# 저장한 Checkpoint 불러오기
checkpoint = torch.load(model_path, map_location=self.device)
# KoGPT-2 언어 모델 학습을 위한 GPT2LMHeadModel 선언
kogpt2model = GPT2LMHeadModel(config=GPT2Config.from_dict(kogpt2_config))
model_state_dict = {'.'.join(key.split('.')[1:]): checkpoint['model_state_dict'][key] for key in checkpoint['model_state_dict'].keys()}
kogpt2model.load_state_dict(model_state_dict)
kogpt2model.eval()
vocab_b_obj = gluonnlp.vocab.BERTVocab.from_sentencepiece(vocab_path,
mask_token=None,
sep_token=None,
cls_token=None,
unknown_token='<unk>',
padding_token='<pad>',
bos_token='<s>',
eos_token='</s>')
### 4. Tokenizer
tok_path = get_tokenizer()
self.tok = SentencepieceTokenizer(tok_path, alpha=0.0, num_best=0)
self.model, self.vocab = kogpt2model, vocab_b_obj
def top_k(self, predict, k=10):
# topk 중 랜덤으로 선택된 값을 반환.
gen = []
print(np.shape(predict))
probs, indexs = torch.topk(predict, k=k, dim=-1)
# probs = probs.squeeze().tolist()[-1]
# indexs = indexs.squeeze().tolist()[-1]
probs = probs.tolist()
indexs = indexs.tolist()
print('indexs :: ', indexs)
for i in range(len(indexs)):
gen.append((self.vocab.to_tokens(indexs[i]), probs[i]))
return gen
def run(self):
# sent = self.text_edit.toPlainText()
# toked = self.tok(sent)
# input_ids = torch.tensor([self.vocab[self.vocab.bos_token], ] +
# self.vocab[toked] +
# [self.vocab[self.vocab.eos_token], ]).unsqueeze(0).to(self.device)
while True:
if len(self.input_ids) >= 1024:
print('del')
del self.input_ids[0]
else:
break
print('input ids :: ', np.shape(self.input_ids))
predicts = self.model(torch.tensor(self.input_ids).unsqueeze(0).to(self.device))
pred = predicts[0].squeeze()[-1]
k_list = self.top_k(pred)
# for idx in range(len(self.btn_list)):
# self.btn_list[idx].setDisabled(True)
for idx, k in enumerate(k_list):
self.btn_list[idx].setText(f'{k[0]}\n({round(k[1], 2)})')
self.btn_list[idx].setDisabled(False)
def next_gen(self, btn_idx):
selected_word = self.btn_list[btn_idx].text().split('\n')[0]
self.text_edit.setText(self.text_edit.toPlainText() + selected_word)
if selected_word == '<s>':
self.input_ids += [self.vocab[self.vocab.bos_token], ]
elif selected_word == '</s>':
self.input_ids += [self.vocab[self.vocab.eos_token], ]
else:
self.input_ids += self.vocab[self.tok(selected_word)]
self.run()
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
dark_stylesheet = qdarkstyle.load_stylesheet_pyqt5()
app.setStyleSheet(dark_stylesheet)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 9,546 |
forager/cb/generate_feed.py
|
opensourcesec/Forager
| 72 |
2167884
|
__author__ = 'CarbonBlack, byt3smith'
# stdlib imports
import re
import sys
import time
import urllib.request, urllib.parse, urllib.error
import json
import optparse
import socket
import base64
import hashlib
# cb imports
sys.path.insert(0, "../../")
from .cbfeeds.feed import CbReport
from .cbfeeds.feed import CbFeed
from .cbfeeds.feed import CbFeedInfo
#pypi
from colorama import Fore, Back, Style, init
# Initialize colorama
init(autoreset=True)
def gen_report_id(iocs):
"""
a report id should be unique
because generate_feed_from_raw may be run repeatedly on the same data, it should
also be deterministic.
this routine sorts all the indicators, then hashes in order to meet these criteria
"""
md5 = hashlib.md5()
# sort the iocs so that a re-order of the same set of iocs results in the same report id
iocs.sort()
for ioc in iocs:
md5.update(ioc.strip().encode('utf-8'))
return md5.hexdigest()
def build_reports(options):
reports = []
ips = []
domains = []
md5s = []
# read all of the lines (of text) from the provided
# input file (of IOCs)
#
iocs = options['ioc_file']
try:
raw_iocs = open(iocs).readlines()
except:
print((Fore.RED + '\n[-]' + Fore.RESET), end=' ')
print('Could not open %s' % iocs)
exit(0)
# iterate over each of the lines
# attempt to determine if each line is a suitable
# ipv4 address, dns name, or md5
#
for raw_ioc in raw_iocs:
# strip off any leading or trailing whitespace
# skip any empty lines
#
raw_ioc = raw_ioc.strip()
if len(raw_ioc) == 0:
continue
try:
# attempt to parse the line as an ipv4 address
#
socket.inet_aton(raw_ioc)
# parsed as an ipv4 address!
#
ips.append(raw_ioc)
except Exception as e:
# attept to parse the line as a md5 and, if that fails,
# as a domain. use trivial parsing
#
if 32 == len(raw_ioc) and \
re.findall(r"([a-fA-F\d]{32})", raw_ioc):
md5s.append(raw_ioc)
elif -1 != raw_ioc.find("."):
domains.append(raw_ioc)
fields = {'iocs': {
},
'timestamp': int(time.mktime(time.gmtime())),
'link': options['feed_link'],
'title': options['report_name'],
'id': gen_report_id(ips + domains + md5s),
'score': 100}
if len(ips) > 0:
fields['iocs']['ipv4'] = ips
if len(domains) > 0:
fields['iocs']['dns'] = domains
if len(md5s) > 0:
fields['iocs']['md5'] = md5s
reports.append(CbReport(**fields))
return reports
def create_feed(options):
feed_meta = json.loads(options)
# generate the required feed information fields
# based on command-line arguments
#
feedinfo = {'name': feed_meta['name'],
'display_name': feed_meta['display_name'],
'provider_url': feed_meta['provider_url'],
'summary': feed_meta['summary'],
'tech_data': feed_meta['tech_data']}
# if an icon was provided, encode as base64 and
# include in the feed information
#
if feed_meta['icon']:
try:
bytes = base64.b64encode(open(feed_meta['icon']).read())
feedinfo['icon'] = bytes
except:
print((Fore.RED + '\n[-]' + Fore.RESET), end=' ')
print('Could not open %s. Make sure file still exists.\n' % feed_meta['icon'])
# build a CbFeedInfo instance
# this does field validation
#
feedinfo = CbFeedInfo(**feedinfo)
# build a list of reports (always one report in this
# case). the single report will include all the IOCs
#
reports = build_reports(feed_meta)
# build a CbFeed instance
# this does field validation (including on the report data)
#
feed = CbFeed(feedinfo, reports)
return feed.dump()
| 4,088 |
src/nvd-action.py
|
andydennis/aiyproject-raspi-nvd
| 0 |
2168812
|
import feedparser
class NistLatest(object):
"""
Class for reading the first item in the NVD RSS feed
Add the following command to your action.py file
make_actor function.
actor.add_keyword(_('nvd latest'), NistLatest(say))
"""
def __init__(self, say):
self.nvd_rss_url = "https://nvd.nist.gov/download/nvd-rss.xml"
self.say = say
def run(self, command):
feed = feedparser.parse(self.nvd_rss_url)
self.say(feed['entries'][0]['title'])
self.say(feed['entries'][0]['summary'])
| 556 |
src/ede/ede_lexer.py
|
Rayshard/ede-pl
| 2 |
2172292
|
from .ede_utils import Error, ErrorType, Result, Success, char
from typing import List
from .ede_token import SYMBOL_DICT_INV, Position, Token, TokenType, is_keyword, is_symbol
# TODO: Comment File
EOF = '\0'
class Reader:
def __init__(self, stream: str) -> None:
self.stream = stream
self.ptr = 0
self.line = 1
self.column = 1
def peek(self) -> str:
return self.stream[self.ptr] if self.ptr < len(self.stream) else EOF
def read(self) -> str:
char = self.peek()
if char != EOF:
self.ptr = self.ptr + 1
if char == '\n':
self.line = self.line + 1
self.column = 1
else:
self.column = self.column + 1
return char
def get_position(self):
return Position(self.line, self.column)
def lex_integer(reader: Reader) -> Result[Token]:
result = ''
position = reader.get_position()
while True:
char = reader.peek()
if not char.isdigit():
break
result += reader.read()
return Success(Token.Integer(position, int(result))) if result != '' else LexError.InvalidIntLit(position)
def lex_id_or_keyword(reader: Reader) -> Result[Token]:
result = ''
position = reader.get_position()
# Attempt to read the initial character
if not reader.peek().isalpha() and reader.peek() != '_':
return LexError.InvalidID(position)
result += reader.read()
while True:
char = reader.peek()
if not char.isalnum() and char != '_':
break
result += reader.read()
return Success(Token.Keyword(position, result) if is_keyword(result) else Token.Identifier(position, result))
def lex_string(reader: Reader) -> Result[Token]:
result = ''
position = reader.get_position()
# Attempt to read the initial "
if reader.peek() != '"':
return LexError.InvalidStringLit(position)
else:
reader.read()
# Read literal contents until an unescaped " or EOF is reached or invalid construction is detected
while True:
char = reader.peek()
#TODO: Convert to match
if char == '\\':
reader.read()
escaped_char = reader.read()
if escaped_char == 't':
result += '\t'
elif escaped_char == 'n':
result += '\n'
elif escaped_char == '\\':
result += '\\'
elif escaped_char == '0':
result += '\0'
elif escaped_char == '"':
result += '"'
elif escaped_char == EOF:
return LexError.UnexpectedEOF(position, "String literal must be closed with a \".")
else:
result += "\\" + escaped_char
continue
elif char == '"':
reader.read()
break
elif char == EOF:
return LexError.UnexpectedEOF(position, "String literal must be closed with a \".")
result += reader.read()
return Success(Token.String(position, result))
def lex_char(reader: Reader) -> Result[Token]:
result = ''
position = reader.get_position()
# Attempt to read the initial '
if reader.peek() != '\'':
return LexError.InvalidCharLit(position, "Char literal must start with symbol: ' ")
else:
reader.read()
# Read literal contents
c = reader.peek()
if c == '\\':
reader.read()
escaped_char = reader.read()
if escaped_char == 't':
result += '\t'
elif escaped_char == 'n':
result += '\n'
elif escaped_char == '\\':
result += '\\'
elif escaped_char == '0':
result += '\0'
elif escaped_char == '"':
result += '"'
elif escaped_char == EOF:
return LexError.UnexpectedEOF(position, "Char literal must be closed with a '.")
else:
return LexError.InvalidCharLit(position, "Char literal must contain only one character.")
elif c == '\'':
reader.read()
return LexError.InvalidCharLit(position, "Char literal must contain one character.")
elif c == EOF:
return LexError.UnexpectedEOF(position, "Char literal must contain one character'.")
else:
result += reader.read()
if reader.peek() != '\'':
return LexError.InvalidCharLit(position, "Char literal must contain one character and be closed with a '.")
else:
reader.read()
return Success(Token.Char(position, char(result)))
def lex(reader: Reader) -> Result[Token]:
# Skip whitespace
while reader.peek().isspace():
reader.read()
position = reader.get_position()
char = reader.peek()
if char.isdigit():
return lex_integer(reader)
elif char == '"':
return lex_string(reader)
elif char == '\'':
return lex_char(reader)
elif is_symbol(char):
symbol = reader.read()
while is_symbol(symbol + reader.peek()):
symbol += reader.read()
if symbol == SYMBOL_DICT_INV[TokenType.SYM_LINE_COMMENT]:
value = ""
while reader.peek() not in ['\n', EOF]:
value += reader.read()
return Success(Token.Comment(position, value))
elif symbol == SYMBOL_DICT_INV[TokenType.SYM_COMMENT_OPEN]:
value = ""
while True:
next_char = reader.read()
if next_char == EOF:
return LexError.InvalidComment(position)
elif next_char + reader.peek() == SYMBOL_DICT_INV[TokenType.SYM_COMMENT_CLOSE]:
reader.read()
return Success(Token.Comment(position, value))
value += next_char
return Success(Token.Symbol(position, symbol))
elif char == EOF:
return Success(Token.EOF(reader.get_position()))
else:
attempt = lex_id_or_keyword(reader)
if attempt.is_success():
return attempt
else:
return Success(Token.Invalid(reader.get_position(), reader.read()))
def tokenize(reader: Reader, keep_comments: bool = False) -> Result[List[Token]]:
tokens : List[Token] = []
while True:
result = lex(reader)
if result.is_error():
return result.error()
token = result.get()
if not keep_comments and token.type == TokenType.COMMENT:
continue
tokens.append(token)
if token.type == TokenType.EOF:
break
return Success(tokens)
class LexError:
'''Wrapper for lexing errors'''
@staticmethod
def InvalidIntLit(pos: Position) -> Error:
return Error(ErrorType.LEXING_INVALID_INT_LIT, pos)
@staticmethod
def InvalidStringLit(pos: Position) -> Error:
return Error(ErrorType.LEXING_INVALID_STR_LIT, pos)
@staticmethod
def InvalidCharLit(pos: Position, why: str) -> Error:
return Error(ErrorType.LEXING_INVALID_CHAR_LIT, pos, why)
@staticmethod
def InvalidComment(pos: Position) -> Error:
return Error(ErrorType.LEXING_INVALID_COMMENT, pos, "Expected */ to close comment")
@staticmethod
def UnexpectedEOF(pos: Position, msg: str) -> Error:
return Error(ErrorType.LEXING_UNEXPECTED_EOF, pos, msg)
@staticmethod
def InvalidID(pos: Position) -> Error:
return Error(ErrorType.LEXING_INVALID_ID, pos, "Expected letter or _ for identifier.")
| 7,603 |
src/rotation.py
|
vitvakatu/skelevisors
| 4 |
2171402
|
#!/usr/bin/env python
import rospy
from std_msgs.msg import String
from os import system
import tf2_msgs.msg
import roslib
import cmath as math
import geometry_msgs.msg as msgs
import turtlesim.srv
from darwin_gazebo.darwin import Darwin
shoulder_coord = (0.0, 0.0, 0.0)
hand_coord = (0.0, 0.0, 0.0)
elbow_coord = (0.0, 0.0, 0.0)
shoulder_rotation = (0.0, 0.0, 0.0)
hand_rotation = (0.0, 0.0, 0.0)
elbow_rotation = (0.0, 0.0, 0.0)
def sub(s, f):
return (f[0] - s[0], f[1] - s[1], f[2] - s[2])
def length(v):
return math.sqrt(v[0] ** 2 + v[1] ** 2 + v[2] ** 2)
def angle(v):
x = v[0] * 100
y = v[1] * 100
z = v[2] * 100
phi = math.atan(z / math.sqrt(x ** 2 + y ** 2))
theta = math.acos(x / math.sqrt(y ** 2 + x ** 2))
return (phi.real, theta.real)
def angle_vectors(v1, v2):
up = v1[0] * v2[0] + v1[1] * v2[1] + v1[2] * v2[2]
down = math.sqrt(v1[0] ** 2 + v1[1] ** 2 + v1[2] ** 2)
down = down * math.sqrt(v2[0] ** 2 + v2[1] ** 2 + v2[2] ** 2)
return math.acos(up / down).real
def translate(value, leftMin, leftMax, rightMin, rightMax):
# Figure out how 'wide' each range is
leftSpan = leftMax - leftMin
rightSpan = rightMax - rightMin
# Convert the left range into a 0-1 range (float)
valueScaled = float(value - leftMin) / float(leftSpan)
# Convert the 0-1 range into a value in the right range.
return rightMin + (valueScaled * rightSpan)
def callback(data):
global hand_coord
global shoulder_coord
global elbow_coord
global shoulder_rotation
global hand_rotation
global elbow_rotation
for tr in data.transforms:
if tr.child_frame_id.startswith('left_hand'):
translation = tr.transform.translation
hand_coord = (translation.y, translation.x, translation.z)
hand_rotation = (rotation.x, rotation.y, rotation.z)
if tr.child_frame_id.startswith('left_shoulder'):
translation = tr.transform.translation
rotation = tr.transform.rotation
shoulder_coord = (translation.y, translation.x, translation.z)
shoulder_rotation = rotation
if tr.child_frame_id.startswith('left_elbow'):
translation = tr.transform.translation
elbow_coord = (translation.y, translation.x, translation.z)
elbow_rotation = (rotation.x, rotation.y, rotation.z)
relative_elbow = sub(shoulder_coord, elbow_coord)
relative_hand = sub(shoulder_coord, hand_coord)
(phi_elbow, theta_elbow) = angle(relative_elbow)
elbow_to_hand = sub(relative_elbow, relative_hand)
angle_elbow_to_hand = angle_vectors(elbow_to_hand, relative_elbow)
#print('Rel.elbow ', relative_elbow)
#print('Rel.hand ', relative_hand)
#print('El 2 hand ', elbow_to_hand)
#print('Angle2hand ', angle_elbow_to_hand)
#print('Phi ', phi_elbow)
#print('Theta ', theta_elbow)
#elbow_to_x = math.acos(math.sqrt(elbow_to_hand[0] ** 2 + elbow_to_hand[2] ** 2) / math.sqrt(elbow_to_hand[0] ** 2 + elbow_to_hand[1] ** 2 + elbow_to_hand[2] ** 2)).real
#elbow_to_y = math.acos(math.sqrt(elbow_to_hand[0] ** 2 + elbow_to_hand[1] ** 2) / math.sqrt(elbow_to_hand[0] ** 2 + elbow_to_hand[1] ** 2 + elbow_to_hand[2] ** 2)).real
elbow_to_axis_z = math.acos(relative_elbow[2] / math.sqrt(relative_elbow[0] ** 2 + relative_elbow[1] ** 2 + relative_elbow[2] ** 2)).real
elbow_to_axis_x = math.acos(relative_elbow[0] / math.sqrt(relative_elbow[0] ** 2 + relative_elbow[1] ** 2 + relative_elbow[2] ** 2)).real
elbow_to_axis_y = math.acos(relative_elbow[1] / math.sqrt(relative_elbow[0] ** 2 + relative_elbow[1] ** 2 + relative_elbow[2] ** 2)).real
#elbow_to_y = math.acos(elbow_to_hand[1] / math.sqrt(elbow_to_hand[0] ** 2 + elbow_to_hand[2])).real
print('ELBOW_TO_Z: ', elbow_to_axis_z)
print('ELBOW_TO_Y: ', elbow_to_axis_y)
print('SHOLDER ROT: ', shoulder_rotation)
print(' ')
if __name__ == '__main__':
rospy.init_node("walker_demo", anonymous=True)
print('Hello')
rospy.loginfo("Darwin initialization finished")
print("Hello")
rospy.Subscriber('tf', tf2_msgs.msg.TFMessage, callback)
rospy.spin()
| 3,920 |
ok_cart/pipelines.py
|
LowerDeez/ok-cart
| 3 |
2171899
|
from typing import Dict, Iterable, Optional, TYPE_CHECKING
from django.conf import settings as django_settings
from .settings import settings
if TYPE_CHECKING:
from django.db.models import Model
from .models import Cart, CartItem, CartGroup
__all__ = (
'run_add_pipelines',
'run_post_add_pipelines',
)
def run_add_pipelines(
*,
cart: 'Cart',
user: django_settings.AUTH_USER_MODEL,
content_object: 'Model',
cart_item: 'CartItem',
cart_group: Optional['CartGroup'],
quantity: int,
parameters: Dict,
**kwargs
):
"""
Run pipelines after adding each passed item to the cart
"""
for func in settings.ADD_PIPELINES:
# cart item wasn't deleted
if cart_item.pk:
func(
cart=cart,
user=user,
content_object=content_object,
cart_item=cart_item,
cart_group=cart_group,
quantity=quantity,
parameters=parameters,
**kwargs
)
def run_post_add_pipelines(
*,
cart: 'Cart',
user: django_settings.AUTH_USER_MODEL,
cart_items: Iterable['CartItem'] = None,
**kwargs
):
"""
Run pipelines after adding all passed items to the cart
"""
for func in settings.POST_ADD_PIPELINES:
func(
cart=cart,
user=user,
cart_items=cart_items,
**kwargs
)
| 1,513 |
django-system/src/products/views.py
|
Deepak-Kharah/ioe-project
| 0 |
2171997
|
from django.views.generic import ListView, DetailView
from .models import Product
class ProductListView(ListView):
template_name = 'products/product_list.html'
context_object_name = 'products'
def get_queryset(self):
return Product.objects.all()
class ProductDetailView(DetailView):
model = Product
template_name = 'products/product_detail.html'
| 380 |
data_extraction/getrouteparseimagesspark.py
|
paloukari/road-scanner
| 0 |
2172120
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 29 16:47:23 2019
@author: achakrab
"""
import requests
import os
import sys
from pyspark import SparkContext
import json
import os
import pandas as pd
import numpy as np
from os import path
from PIL import Image
import numpy as np
import shutil
filename=''
origin='24 Willie Mays Plaza'
destination='1000 S Prairie Ave'
apikey=''
waypoints=['']
def make_lat_long_file(origin,destination,waypoints):
bk = apikey
BASE_URL_DIRECTIONS = 'https://maps.googleapis.com/maps/api/directions/json?'
origin = 'origin=%s' %(('+').join(origin.split(' ')))
destination = 'destination=%s' %(('+').join(destination.split(' ')))
#waypoints = 'waypoints=via:CA-29'
waymap=[('+').join(item) for item in waypoints]
waypoints = 'waypoints=%s' %(('|').join([item for item in waymap]))#&waypoints=Charlestown,MA|via:Lexington,MA
key = '&key=' + bk
global url
url = BASE_URL_DIRECTIONS + origin + '&' + destination + waypoints + key
global r
r = requests.get(url)
lat_lng = []
distance = 0
for i in range(len(r.json()['routes'][0]['legs'])):
#print('big',r.json()['routes'][0]['legs'][i]['end_location']) #These are included in the steps
#print('big',r.json()['routes'][0]['legs'][i]['start_location']) #These are included in the steps
for j in range(len(r.json()['routes'][0]['legs'][i]['steps'])):
distance += r.json()['routes'][0]['legs'][i]['steps'][j]['distance']['value']
end = r.json()['routes'][0]['legs'][i]['steps'][j]['end_location']
start = r.json()['routes'][0]['legs'][i]['steps'][j]['start_location']
lat_lng += [(start['lat'],start['lng']),(end['lat'],end['lng'])]
def get_int_coords_from_json(r):
lat_lng = []
distance = 0
for i in range(len(r.json()['routes'][0]['legs'])):
for j in range(len(r.json()['routes'][0]['legs'][i]['steps'])):
distance += r.json()['routes'][0]['legs'][i]['steps'][j]['distance']['value']
end = r.json()['routes'][0]['legs'][i]['steps'][j]['end_location']
start = r.json()['routes'][0]['legs'][i]['steps'][j]['start_location']
lat_lng += [(start['lat'],start['lng']),(end['lat'],end['lng'])]
up = list(set(lat_lng))
n = round((distance/300)/len(unique_points_original))
upi = []
for i in range(len(up)-1):
upi += list(map(tuple,np.linspace(unique_points_original[i],unique_points_original[i+1],interpolator_n)))
upi = sorted(list(set(upi)),key = lambda x: x[0])
return upi
unique_points_original = list(set(lat_lng))
interpolator_n = round((distance/300)/len(unique_points_original)) #one picture every 300 mts
global unique_points_interpol
unique_points_interpol = []
for i in range(len(unique_points_original)-1):
unique_points_interpol += list(map(tuple,np.linspace(unique_points_original[i],unique_points_original[i+1],interpolator_n)))
unique_points_interpol = sorted(list(set(unique_points_interpol)),key = lambda x: x[0])
unique_points_interpol
def create_path(points):
path = 'path='
for i in points:
path += str(i[0]) + ',' + str(i[1]) + '|'
return path[:-1] #remove last '|'
def get_coords(r,points):
for i in r.json()['snappedPoints']:
points += [(i['location']['latitude'],i['location']['longitude'])]
return points
BASE_URL_SNAP = 'https://roads.googleapis.com/v1/snapToRoads?'
interpolate = '&interpolate=true'
points = []
k = 0
coords_list = []
while k <= len(unique_points_interpol)-1:
coords_list += [unique_points_interpol[k]]
if (len(coords_list)%100==0) or (k+1==len(unique_points_interpol)): #When we have 100 points or we reach the end of the list.
path = create_path(coords_list)
url = BASE_URL_SNAP + path + interpolate + key
r = requests.get(url)
points += get_coords(r,points)
coords_list = []
k += 1
with open('route.txt','w') as f:
json.dump({'route':list(set(points))},f)
make_lat_long_file(origin,destination,waypoints)
##downloading images now
with open('route.txt','r') as f:
content1=f.readlines()
content1[0]=eval(content1[0])
#content[0]=eval(content[0])
content=content1[0]
for item in content:
if item==list(content.keys())[0]:
file=pd.concat((pd.DataFrame(data=np.asarray([item]*len(content[item]))),pd.DataFrame(np.asarray(content[item]))),axis=1)
else:
file=pd.concat((file,pd.concat((pd.DataFrame(data=np.asarray([item]*len(content[item]))),pd.DataFrame(np.asarray(content[item]))),axis=1)))
file.columns=['place','lat','long']
#filename='coordinatesascsv.txt'
file.to_csv('coordinatesascsv.csv')
sc=SparkContext()
f=sc.textFile("smalllistofcoordinates.txt")
def mapf(x):
words=x.split(',')
return (words[0],words[1],words[2],words[3])
f1=f.map(mapf).filter(lambda x: x[0]!='')
os.chdir('picturess360')
org_dest_string='%s-%s' %(origin,destination)
if org_dest_string not in os.listdir(os.getcwd())[0]:
os.system('rm ./*')
def create_image(x):
for heading in range(0,4):
lat=x[1]
long=x[2]
location=x[3]
heading=str(90*heading)
query='https://maps.googleapis.com/maps/api/streetview?size=400x400&location=%s,%s&fov=90&heading=%s&pitch=10&key=%s' % (str(lat),str(long),heading,apikey)
page=requests.get(query)
filename='%s-%s-%s-%s-%s-%s.jpg' %(origin,destination,str(x[0]),str(lat),str(long),location.replace('/','-'))
if not path.exists(filename+".txt") or os.path.getsize(filename)<5*10^3:
f = open(filename,'wb')
f.write(page.content)
f.close()
f1.map(create_image).collect()
| 6,059 |
rdftools/tools/lubm.py
|
cosminbasca/rdftools
| 4 |
2171311
|
#
# author: <NAME>
#
# Copyright 2010 University of Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import io
import sh
import re
from multiprocessing import Pool, cpu_count
from rdftools import interval_split
from rdftools.tools.base import RdfTool
from rdftools.tools.jvmrdftools import run_lubm_generator
from rdftools.tools.raptor import RaptorRdf
__author__ = 'basca'
UNIS_PER_WORKER = 20
class Lubm(RdfTool):
def __init__(self, ontology=None, path=None, *args, **kwargs):
super(Lubm, self).__init__(*args, **kwargs)
self._ontology = ontology if ontology else 'http://www.lehigh.edu/~zhp2/2004/0401/univ-bench.owl'
self._output_path = os.path.abspath(path) if path else os.getcwd()
if not os.path.isdir(self._output_path):
raise ValueError('path: {0} is not a valid path'.format(self._output_path))
@property
def ontology(self):
"""
the lubm ontology, used internally by the generator
:return: the lubm ontology
"""
return self._ontology
def _run(self, num_universities, index=0, generator_seed=0, workers = -1):
"""
a paralel version of the `generate` method
:param num_universities: number of universities to generate
:param index: the index at whitch the generation process starts
:param generator_seed: a seed used by the generator
:return: None
"""
def job_finished(res):
pass
num_cpus = cpu_count()
num_workers = workers if 0 < workers < num_cpus else num_cpus
pool = Pool(processes=num_workers)
# for start, unis_per_worker in interval_split(num_workers, num_universities, threshold=10):
for start in xrange(0, num_universities, UNIS_PER_WORKER):
idx = start + index
unis_per_worker = UNIS_PER_WORKER if (start + UNIS_PER_WORKER) < num_universities else num_universities - start
self._log.info('run lubm generator [%s, %s]', idx, unis_per_worker)
pool.apply_async(
run_lubm_generator,
(unis_per_worker, idx, generator_seed, self.ontology, self._output_path),
callback=job_finished)
pool.close()
self._log.info('wait for work to finalize')
pool.join()
# convert all to ntriples
self._log.info('converting to ntriples ... ')
rdf_converter = RaptorRdf()
rdf_converter(self._output_path, destination_format='ntriples', buffer_size=16, clear=True)
print
# now concatenate all files belonging to 1 university together
files = os.listdir(self._output_path)
sfiles = ' '.join(files)
uni_files = lambda uni: re.findall(r'University%d_[0-9]+\.nt' % uni, sfiles)
for uni in xrange(num_universities):
ufiles = uni_files(uni)
with io.open(os.path.join(self._output_path, 'University%d.nt' % uni), 'w+') as UNI:
for upart in ufiles:
upart_file = os.path.join(self._output_path, upart)
with io.open(upart_file, 'r+') as UPART:
UNI.write(UPART.read())
sh.rm(upart_file)
self._log.info('done')
| 3,777 |
certsign/cli.py
|
nilsfr/certsign
| 11 |
2170106
|
import argparse, sys, logging, os, signal, codecs
from . import client, server, crypto
def main(args=None):
args = sys.argv[1:] if args is None else args
parser = argparse.ArgumentParser(
description="Use ACME to sign a certificate"
)
parser.add_argument(
"--account-key", required=True, help="path to your Let's Encrypt account private key"
)
parser.add_argument(
"--csr", required=True, help="path to your certificate signing request"
)
parser.add_argument(
"--challenge-dir", required=True,
help="path to the directory that serves .well-known/acme-challenge/"
)
parser.add_argument(
"--account-email", default=None, help="email to be associated with the account key"
)
parser.add_argument(
"--ca", default=client.DEFAULT_CA_DIRECTORY,
help="certificate authority, default is Let's Encrypt"
)
parser.add_argument(
"--quiet", action="store_const", const=logging.ERROR,
help="suppress output except for errors"
)
args = parser.parse_args(args)
sign_csr(args)
def server_main(args=None):
args = sys.argv[1:] if args is None else args
parser = argparse.ArgumentParser(
description="Serves the challenge to ACME to prove you control the domain"
)
parser.add_argument(
"--challenge-dir", required=True,
help="path to the directory that serves .well-known/acme-challenge/"
)
parser.add_argument("--port", type=int, default=8000)
parser.add_argument("--addr", default="localhost")
parser.add_argument("--pidfile", default=None)
args = parser.parse_args(args)
challenge_server(args)
def tool_main(args=None):
args = sys.argv[1:] if args is None else args
parser = argparse.ArgumentParser(
description="Various tools to support the certificate signing process"
)
subparsers = parser.add_subparsers(title="subcommands", dest="subcommand")
subparsers.required = True
privkey_parser = subparsers.add_parser("privkey")
privkey_parser.add_argument("--out", required=True)
privkey_parser.add_argument("--bits", type=int, default=4096)
privkey_parser.set_defaults(handler=create_private_key)
default_openssl_conf = '/etc/ssl/openssl.cnf'
if not os.path.exists(default_openssl_conf):
default_openssl_conf = '/etc/pki/tls/openssl.cnf'
csr_parser = subparsers.add_parser("csr")
csr_parser.add_argument("--privkey", required=True)
csr_parser.add_argument("--out", required=True)
csr_parser.add_argument(
"--dname", default=None, help="distinguished name of your organization"
)
csr_parser.add_argument(
"--conf", default=default_openssl_conf, help="the OpenSSl configuration file"
)
csr_parser.add_argument("domains", nargs='+')
csr_parser.set_defaults(handler=create_csr)
view_parser = subparsers.add_parser('view')
view_parser.add_argument("file", help="A PEM encoded CSR or certificate")
view_parser.set_defaults(handler=view_cert)
args = parser.parse_args(args)
args.handler(args)
def sign_csr(args):
client.LOGGER.setLevel(args.quiet or client.LOGGER.level)
signed_crt = client.sign_csr(
args.account_key,
args.csr,
args.challenge_dir,
args.account_email,
log=client.LOGGER,
ca_directory=args.ca,
)
print(signed_crt)
def challenge_server(args):
acme_server = server.acme_challenge_server(args.challenge_dir, args.addr, args.port)
if args.pidfile:
if os.path.isfile(args.pidfile):
raise FileExistsError(args.pidfile)
with open(args.pidfile, "w") as f:
f.write("{}\n".format(os.getpid()))
print("Starting server on {}:{}, use <Ctrl-C> to stop".format(args.addr, args.port))
try:
acme_server.serve_forever()
except KeyboardInterrupt:
pass
finally:
clean_pidfile(args.pidfile)
def create_private_key(args):
privkey = crypto.create_private_key(args.bits)
with codecs.open(args.out, "w", encoding="utf-8") as f:
f.write(privkey)
def create_csr(args):
csr = crypto.create_csr(args.privkey, args.domains, args.dname, args.conf)
with codecs.open(args.out, "w", encoding="utf-8") as f:
f.write(csr)
def view_cert(args):
print(crypto.pem_file_info(args.file))
def clean_pidfile(pidfile):
if pidfile and os.path.isfile(pidfile):
os.unlink(pidfile)
def terminate(signo, frame):
sys.exit(0)
# Make sure finally clauses are called on SIGTERM
signal.signal(signal.SIGTERM, terminate)
| 4,633 |
montreal_forced_aligner/aligner/base.py
|
ai-zahran/Montreal-Forced-Aligner
| 0 |
2171814
|
"""Class definitions for base aligner"""
from __future__ import annotations
import logging
import os
import shutil
import time
from typing import TYPE_CHECKING, Optional
from ..config import TEMP_DIR
from ..exceptions import KaldiProcessingError
from ..multiprocessing import (
align,
calc_fmllr,
compile_information,
compile_train_graphs,
convert_ali_to_textgrids,
)
from ..utils import log_kaldi_errors
if TYPE_CHECKING:
from logging import Logger
from ..config import AlignConfig
from ..corpus import Corpus
from ..dictionary import DictionaryType
from ..models import AcousticModel
__all__ = ["BaseAligner"]
class BaseAligner:
"""
Base aligner class for common aligner functions
Parameters
----------
corpus : :class:`~montreal_forced_aligner.corpus.base.Corpus`
Corpus object for the dataset
dictionary : :class:`~montreal_forced_aligner.dictionary.Dictionary`
Dictionary object for the pronunciation dictionary
align_config : :class:`~montreal_forced_aligner.config.align_config.AlignConfig`
Configuration for alignment
temp_directory : str, optional
Specifies the temporary directory root to save files need for Kaldi.
If not specified, it will be set to ``~/Documents/MFA``
debug : bool
Flag for running in debug mode, defaults to false
verbose : bool
Flag for running in verbose mode, defaults to false
logger : :class:`~logging.Logger`
Logger to use
"""
def __init__(
self,
corpus: Corpus,
dictionary: DictionaryType,
align_config: AlignConfig,
temp_directory: Optional[str] = None,
debug: bool = False,
verbose: bool = False,
logger: Optional[Logger] = None,
acoustic_model: Optional[AcousticModel] = None,
):
self.align_config = align_config
self.corpus = corpus
self.dictionary = dictionary
if not temp_directory:
temp_directory = TEMP_DIR
self.temp_directory = temp_directory
os.makedirs(self.temp_directory, exist_ok=True)
self.log_file = os.path.join(self.temp_directory, "aligner.log")
if logger is None:
self.logger = logging.getLogger("corpus_setup")
self.logger.setLevel(logging.INFO)
handler = logging.FileHandler(self.log_file, "w", "utf-8")
handler.setFormatter = logging.Formatter("%(name)s %(message)s")
self.logger.addHandler(handler)
else:
self.logger = logger
self.acoustic_model = None
self.verbose = verbose
self.debug = debug
self.speaker_independent = True
self.uses_cmvn = True
self.uses_splices = False
self.uses_voiced = False
self.iteration = None
self.acoustic_model = acoustic_model
self.setup()
def setup(self) -> None:
"""
Set up dictionary, corpus and configurations
"""
self.dictionary.set_word_set(self.corpus.word_set)
self.dictionary.write()
self.corpus.initialize_corpus(self.dictionary, self.align_config.feature_config)
self.align_config.silence_csl = self.dictionary.silence_csl
self.data_directory = self.corpus.split_directory
self.feature_config = self.align_config.feature_config
@property
def use_mp(self) -> bool:
"""Flag for using multiprocessing"""
return self.align_config.use_mp
@property
def meta(self) -> dict:
"""Metadata for the trained model"""
from ..utils import get_mfa_version
data = {
"phones": sorted(self.dictionary.nonsil_phones),
"version": get_mfa_version(),
"architecture": "gmm-hmm",
"features": "mfcc+deltas",
}
return data
@property
def align_options(self):
"""Options for alignment"""
options = self.align_config.align_options
options["optional_silence_csl"] = self.dictionary.optional_silence_csl
return options
@property
def fmllr_options(self):
"""Options for fMLLR"""
options = self.align_config.fmllr_options
options["silence_csl"] = self.dictionary.silence_csl
return options
@property
def align_directory(self) -> str:
"""Align directory"""
return os.path.join(self.temp_directory, "align")
@property
def working_directory(self) -> str:
"""Current working directory"""
return self.align_directory
@property
def current_model_path(self) -> str:
"""Current acoustic model path"""
return os.path.join(self.align_directory, "final.mdl")
@property
def alignment_model_path(self):
"""Alignment acoustic model path"""
path = os.path.join(self.working_directory, "final.alimdl")
if self.speaker_independent and os.path.exists(path):
return path
return os.path.join(self.working_directory, "final.mdl")
@property
def working_log_directory(self) -> str:
"""Current log directory"""
return os.path.join(self.align_directory, "log")
@property
def backup_output_directory(self) -> Optional[str]:
"""Backup output directory"""
if self.align_config.overwrite:
return None
return os.path.join(self.align_directory, "textgrids")
def compile_information(self, output_directory: str) -> None:
"""
Compile information about the quality of alignment
Parameters
----------
output_directory: str
Directory to save information to
"""
issues, average_log_like = compile_information(self)
errors_path = os.path.join(output_directory, "output_errors.txt")
if os.path.exists(errors_path):
self.logger.warning(
"There were errors when generating the textgrids. See the output_errors.txt in the "
"output directory for more details."
)
if issues:
issue_path = os.path.join(output_directory, "unaligned.txt")
with open(issue_path, "w", encoding="utf8") as f:
for u, r in sorted(issues.items()):
f.write(f"{u}\t{r}\n")
self.logger.warning(
f"There were {len(issues)} segments/files not aligned. Please see {issue_path} for more details on why "
"alignment failed for these files."
)
if (
self.backup_output_directory is not None
and os.path.exists(self.backup_output_directory)
and os.listdir(self.backup_output_directory)
):
self.logger.info(
f"Some TextGrids were not output in the output directory to avoid overwriting existing files. "
f"You can find them in {self.backup_output_directory}, and if you would like to disable this "
f"behavior, you can rerun with the --overwrite flag or run `mfa configure --always_overwrite`."
)
def export_textgrids(self, output_directory: str) -> None:
"""
Export a TextGrid file for every sound file in the dataset
Parameters
----------
output_directory: str
Directory to save to
"""
begin = time.time()
self.textgrid_output = output_directory
if self.backup_output_directory is not None and os.path.exists(
self.backup_output_directory
):
shutil.rmtree(self.backup_output_directory, ignore_errors=True)
convert_ali_to_textgrids(self)
self.compile_information(output_directory)
self.logger.debug(f"Exported TextGrids in a total of {time.time() - begin} seconds")
def align(self, subset: Optional[int] = None) -> None:
"""
Perform alignment
Parameters
----------
subset: int, optional
Number of utterances to align
"""
done_path = os.path.join(self.align_directory, "done")
dirty_path = os.path.join(self.align_directory, "dirty")
if os.path.exists(done_path):
self.logger.info("Alignment already done, skipping.")
return
try:
compile_train_graphs(self)
log_dir = os.path.join(self.align_directory, "log")
os.makedirs(log_dir, exist_ok=True)
self.logger.info("Performing first-pass alignment...")
align(self)
_, average_log_like = compile_information(self)
self.logger.debug(
f"Prior to SAT, average per frame likelihood (this might not actually mean anything): {average_log_like}"
)
if (
not self.align_config.disable_sat
and self.acoustic_model.feature_config.fmllr
and not os.path.exists(os.path.join(self.align_directory, "trans.0"))
):
self.logger.info("Calculating fMLLR for speaker adaptation...")
calc_fmllr(self)
self.logger.info("Performing second-pass alignment...")
align(self)
_, average_log_like = compile_information(self)
self.logger.debug(
f"Following SAT, average per frame likelihood (this might not actually mean anything): {average_log_like}"
)
except Exception as e:
with open(dirty_path, "w"):
pass
if isinstance(e, KaldiProcessingError):
log_kaldi_errors(e.error_logs, self.logger)
e.update_log_file(self.logger.handlers[0].baseFilename)
raise
with open(done_path, "w"):
pass
| 9,844 |
allennlp_overrides/dataset_readers/__init__.py
|
allenai/sledgehammer
| 47 |
2172229
|
"""
A :class:`~allennlp.data.dataset_readers.dataset_reader.DatasetReader`
reads a file and converts it to a collection of
:class:`~allennlp.data.instance.Instance` s.
The various subclasses know how to read specific filetypes
and produce datasets in the formats required by specific models.
"""
# pylint: disable=line-too-long
from allennlp_overrides.dataset_readers.classification_dataset_reader import ClassificationDatasetReader
from allennlp_overrides.dataset_readers.classification_dataset_reader_oracle import ClassificationDatasetOracleReader
from allennlp_overrides.dataset_readers.nli_dataset_reader import NLIDatasetReader
from allennlp_overrides.dataset_readers.nli_dataset_reader_oracle import NLIDatasetOracleReader
| 731 |
leetcode_python/Array/diagonal_traverse.py
|
yennanliu/Python_basics
| 0 |
2171279
|
"""
498. Diagonal Traverse
Medium
Add to List
Share
Given an m x n matrix mat, return an array of all the elements of the array in a diagonal order.
Example 1:
Input: mat = [[1,2,3],[4,5,6],[7,8,9]]
Output: [1,2,4,7,5,3,6,8,9]
Example 2:
Input: mat = [[1,2],[3,4]]
Output: [1,2,3,4]
Constraints:
m == mat.length
n == mat[i].length
1 <= m, n <= 104
1 <= m * n <= 104
-105 <= mat[i][j] <= 105
"""
# V0
# IDEA : while loop + boundary conditions
### NOTE : the "directions" trick
class Solution(object):
def findDiagonalOrder(self, matrix):
if not matrix or not matrix[0]: return []
### NOTE this trick
directions = [(-1, 1), (1, -1)]
count = 0
res = []
i, j = 0, 0
M, N = len(matrix), len(matrix[0])
while len(res) < M * N:
if 0 <= i < M and 0 <= j < N:
res.append(matrix[i][j])
direct = directions[count % 2]
i, j = i + direct[0], j + direct[1]
continue
elif i < 0 and 0 <= j < N:
i += 1
elif 0 <= i < M and j < 0:
j += 1
elif i < M and j >= N:
i += 2
j -= 1
elif i >= M and j < N:
j += 2
i -= 1
count += 1
return res
# V0'
# IDEA : Diagonal Iteration and Reversal
# NOTE !!! : for "odd" diagoal traversal, we just need to go over it and REVERSE it before append tp res
class Solution:
def findDiagonalOrder(self, matrix):
# Check for empty matrices
if not matrix or not matrix[0]:
return []
# Variables to track the size of the matrix
N, M = len(matrix), len(matrix[0])
# The two arrays as explained in the algorithm
result, intermediate = [], []
# We have to go over all the elements in the first
# row and the last column to cover all possible diagonals
for d in range(N + M - 1):
# Clear the intermediate array everytime we start
# to process another diagonal
intermediate = []
# We need to figure out the "head" of this diagonal
# The elements in the first row and the last column
# are the respective heads.
# r : row idx
# c : col idx
#r, c = 0 if d < M else d - M + 1, d if d < M else M - 1
if d < M:
r = 0
c = d
else:
r = d - M + 1
c = M - 1
# Iterate until one of the indices goes out of scope
# Take note of the index math to go down the diagonal
while r < N and c > -1:
intermediate.append(matrix[r][c])
r += 1
c -= 1
# Reverse even numbered diagonals. The
# article says we have to reverse odd
# numbered articles but here, the numbering
# is starting from 0 :P
if d % 2 == 0:
result.extend(intermediate[::-1])
else:
result.extend(intermediate)
return result
# V1
# IDEA : Diagonal Iteration and Reversal
# https://leetcode.com/problems/diagonal-traverse/solution/
class Solution:
def findDiagonalOrder(self, matrix: List[List[int]]) -> List[int]:
# Check for empty matrices
if not matrix or not matrix[0]:
return []
# Variables to track the size of the matrix
N, M = len(matrix), len(matrix[0])
# The two arrays as explained in the algorithm
result, intermediate = [], []
# We have to go over all the elements in the first
# row and the last column to cover all possible diagonals
for d in range(N + M - 1):
# Clear the intermediate array everytime we start
# to process another diagonal
intermediate.clear()
# We need to figure out the "head" of this diagonal
# The elements in the first row and the last column
# are the respective heads.
r, c = 0 if d < M else d - M + 1, d if d < M else M - 1
# Iterate until one of the indices goes out of scope
# Take note of the index math to go down the diagonal
while r < N and c > -1:
intermediate.append(matrix[r][c])
r += 1
c -= 1
# Reverse even numbered diagonals. The
# article says we have to reverse odd
# numbered articles but here, the numbering
# is starting from 0 :P
if d % 2 == 0:
result.extend(intermediate[::-1])
else:
result.extend(intermediate)
return result
# V1
# IDEA : Simulation
# https://leetcode.com/problems/diagonal-traverse/solution/
class Solution:
def findDiagonalOrder(self, matrix: List[List[int]]) -> List[int]:
# Check for an empty matrix
if not matrix or not matrix[0]:
return []
# The dimensions of the matrix
N, M = len(matrix), len(matrix[0])
# Incides that will help us progress through
# the matrix, one element at a time.
row, column = 0, 0
# As explained in the article, this is the variable
# that helps us keep track of what direction we are
# processing the current diaonal
direction = 1
# Final result array that will contain all the elements
# of the matrix
result = []
# The uber while loop which will help us iterate over all
# the elements in the array.
while row < N and column < M:
# First and foremost, add the current element to
# the result matrix.
result.append(matrix[row][column])
# Move along in the current diagonal depending upon
# the current direction.[i, j] -> [i - 1, j + 1] if
# going up and [i, j] -> [i + 1][j - 1] if going down.
new_row = row + (-1 if direction == 1 else 1)
new_column = column + (1 if direction == 1 else -1)
# Checking if the next element in the diagonal is within the
# bounds of the matrix or not. If it's not within the bounds,
# we have to find the next head.
if new_row < 0 or new_row == N or new_column < 0 or new_column == M:
# If the current diagonal was going in the upwards
# direction.
if direction:
# For an upwards going diagonal having [i, j] as its tail
# If [i, j + 1] is within bounds, then it becomes
# the next head. Otherwise, the element directly below
# i.e. the element [i + 1, j] becomes the next head
row += (column == M - 1)
column += (column < M - 1)
else:
# For a downwards going diagonal having [i, j] as its tail
# if [i + 1, j] is within bounds, then it becomes
# the next head. Otherwise, the element directly below
# i.e. the element [i, j + 1] becomes the next head
column += (row == N - 1)
row += (row < N - 1)
# Flip the direction
direction = 1 - direction
else:
row = new_row
column = new_column
return result
# V1'
# https://blog.csdn.net/fuxuemingzhu/article/details/82528226
class Solution(object):
def findDiagonalOrder(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if not matrix or not matrix[0]: return []
directions = [(-1, 1), (1, -1)]
count = 0
res = []
i, j = 0, 0
M, N = len(matrix), len(matrix[0])
while len(res) < M * N:
if 0 <= i < M and 0 <= j < N:
res.append(matrix[i][j])
direct = directions[count % 2]
i, j = i + direct[0], j + direct[1]
continue
elif i < 0 and 0 <= j < N:
i += 1
elif 0 <= i < M and j < 0:
j += 1
elif i < M and j >= N:
i += 2
j -= 1
elif i >= M and j < N:
j += 2
i -= 1
count += 1
return res
# V1''
# http://bookshadow.com/weblog/2017/02/05/leetcode-diagonal-traverse/
class Solution(object):
def findDiagonalOrder(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if not matrix: return []
i, j, k = 0, 0, 1
w, h = len(matrix), len(matrix[0])
ans = []
for x in range(w * h):
ans.append(matrix[i][j])
if k > 0:
di, dj = i - 1, j + 1
else:
di, dj = i + 1, j - 1
if 0 <= di < w and 0 <= dj < h:
i, j = di, dj
else:
if k > 0:
if j + 1 < h:
j += 1
else:
i += 1
else:
if i + 1 < w:
i += 1
else:
j += 1
k *= -1
return ans
# V2
| 9,906 |
example.py
|
evandromr/python_crosscorr
| 6 |
2171524
|
#!/usr/env python
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal, stats
import pyfits as fits
def corrfunc(x, y, t):
''' Caluclate the cross correlation function and timeshifts for a
pair of time series x,y
'''
# normalize input series
x -= x.mean()
y -= y.mean()
x /= x.std()
y /= y.std()
# calculate cross-correlation function
corr = signal.correlate(x,y)/float(len(x))
# transform time axis in offset units
lags = np.arange(corr.size) - (t.size - 1)
tstep = (t[-1] - t[0])/float(t.size)
offset = lags*tstep
# time shift is found for the maximum of the correlation function
shift = offset[np.argmax(corr)]
# new time axis to plot shifted time series
newt = t + shift
# correct time intervals if shift bigger than half the interval
if min(newt) > (max(t)/2):
newt = newt - max(t)
shift = shift - max(t)
elif max(newt) < (min(t)/2):
newt = newt + min(t)
shift = shift + min(t)
return corr, offset, newt, shift
if __name__ == "__main__":
'''
Creates 2 fake time series, and calculates the cross correlation and time
delay between then
'''
# Time Series
T = float(raw_input('Insert a period for the time series: '))
P = float(raw_input('Insert the total sampled time: '))
stp = float(raw_input('Insert the time step (sample rate): '))
# time delay
delay = float(raw_input('Insert the time delay between the 2 series: '))
# 1-sigma errors
sigma = float(raw_input('Insert the fake 1-sig deviation for the data: '))
# time data
t = np.arange(T, P, step=stp)
t -= min(t)
# sinusoidal time series
x = np.sin((2.0*np.pi*t)/T)
y = np.sin(((2.0*np.pi*(t - delay))/T))
x = x + sigma*np.random.randn(len(x))
y = y + sigma*np.random.randn(len(y))
# number of simulations
nsimulations = int(raw_input('Insert the number of simulations :'))
# generates 'nsimulations' fake time series
aux1 = []
aux2 = []
for i, meanx in enumerate(x):
newx = 0.3*np.random.randn(nsimulations) + meanx
aux1.append(newx)
for j, meany in enumerate(y):
newy = 0.3*np.random.randn(nsimulations) + meany
aux2.append(newy)
newxses = []
newyses = []
for n in xrange(nsimulations):
newxses.append(np.array([aux1[m][n] for m in xrange(len(aux1))]))
for n in xrange(nsimulations):
newyses.append(np.array([aux2[m][n] for m in xrange(len(aux2))]))
#======= DEBUG OPTION ==================================================
# plot new x lightcurves and original on top to check
for simulated in newxses:
plt.plot(t, simulated, '.')
plt.errorbar(t, x, yerr=0.2, fmt='k+-', linewidth='2.0')
plt.show()
plt.cla()
# plot new y lightcurves and original on top to check
for simulated in newyses:
plt.plot(t, simulated, '.')
plt.errorbar(t, y, yerr=0.2, fmt='k+-', linewidth='2.0')
plt.show()
plt.cla()
#=======================================================================
# store calculated time shift for each simulated curve
shiftes = []
for newx, newy in zip(newxses, newyses):
newcorr, newoffset, nnewt, newshift = corrfunc(newx, newy, t)
shiftes.append(newshift)
# histogram binning equal of time step
binlist = np.arange(-max(t), max(t), step=stp)
# plot original time shift distribution
plt.hist(shiftes, bins=binlist, normed=True, alpha=0.6)
plt.title('Distribution Function')
plt.show()
plt.cla()
# histogram binnin manually defined (step)
binlist2 = np.arange(-max(t), max(t), step=5)
# plot original time shift distribution
# plt.hist(shiftes, bins=binlist2, normed=True, alpha=0.6)
# plt.title('Distribution Function')
# plt.show()
# plt.cla()
# calculates the mean and sigma of original distribution (without selection)
mean, sigma = stats.norm.fit(shiftes)
print 'Results from the total distribution (without selection)'
print 'time shift = {0:.2f} +- {1:.2f}'.format(mean, sigma)
print ' '
# selected time shift limits for physical reasons
# use min(shiftes) and max(shiftes) if not
minoffset = float(raw_input('Enter Low limit for offset: '))
maxoffset = float(raw_input('Enter High limit for offset: '))
# newshifts = shiftes
newshifts = [shiftes[i] for i in xrange(len(shiftes))
if ((shiftes[i]>minoffset) and (shiftes[i]<maxoffset))]
# fit normal distribution
mean, sigma = stats.norm.fit(newshifts)
# histogram binning equals of time step
binlist = np.arange(minoffset, maxoffset, step=stp)
# smaller histogram bin, set mannually
binlist2 = np.arange(minoffset, maxoffset, step=1)
# plot selected time shift distribution
plt.hist(newshifts, bins=binlist, normed=True, alpha=0.6)
plt.hist(newshifts, bins=binlist2, normed=True, alpha=0.6)
# create a x-axis for the gaussian funtion with 1000 points
xgaus = np.linspace(minoffset, maxoffset, 10000)
# generates the gaussian curve with mean and sigma
gauss = stats.norm.pdf(xgaus, mean, sigma)
# plot gaussian curve over histogram, with values on legend
plt.plot(xgaus, gauss, color='k', linewidth=2.0,
label='mean={0:.2f}, sigma={1:.2f}'.format(mean,sigma))
plt.title('Selected Distribution Function')
plt.legend(loc='best')
plt.show()
plt.cla()
# =========================================================================
# Calculates correlation of x and y time series
corr, offset, newt, shift = corrfunc(x, y, t)
# === BEGIN of BLOCK ======================================================
# == Comment this block to use results
# free of monte-carlo statistics
# time shift given by the maximum of the distribution
shift = mean
# new time axis to plot shifted time series
newt = t + shift
# correct time intervals if shift bigger than half the interval
if min(newt) > (max(t)/2):
newt = newt - max(t)
shift = shift - max(t)
elif max(newt) < (min(t)/2):
newt = newt + min(t)
shift = shift + min(t)
#=============================================== END of BLOCK ==============
# visualize calculated time shift
print 'results from the selected distribution'
print 'time shift = {0:.2f} +- {1:.2f}'.format(shift, sigma)
#aheader = 'Correlacao entre as curvas 1 e 2 \n'
#np.savetxt('crosscorr.dat.gz', np.transpose([offset, corr]),
# delimiter=' ', header=aheader, comments='#')
# plot correlation function
plt.plot(offset, corr, 'o-')
# position of maximum chosen value
plt.vlines(shift, min(corr), max(corr), 'k', 'dashed',
'mean offset = {0:1f}'.format(shift))
plt.xlabel('Offset [time units]', fontsize=12)
plt.ylabel('Correlation coeficient', fontsize=12)
plt.title('Correlation Function')
plt.legend(loc='best')
plt.show()
plt.cla()
# plot original time series
plt.plot(t, x, label='series 1')
plt.plot(t, y, label='series 2')
plt.xlabel('Time [s]', fontsize=12)
plt.ylabel('Normalized Count Rate [counts/s]', fontsize=12)
plt.show()
plt.cla()
# plot original time series plus shifted time series
plt.plot(t, x, label='series 1')
#plt.plot(t, y, label='series 2')
plt.plot(newt, y, 'r', label='shifted series 2')
plt.xlabel('Time [s]', fontsize=12)
plt.ylabel('Normalized Count Rate [counts/s]', fontsize=12)
plt.legend(loc='best')
plt.show()
plt.cla()
| 7,596 |
backend/farm/serializers/reference_parcel_base.py
|
szkkteam/agrosys
| 0 |
2172096
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Common Python library imports
# Pip package imports
# Internal package imports
from backend.extensions.api import api
from backend.api import ModelSerializer, validates, ValidationError, GeometryModelConverter, GeometryField
from backend.api.utils import object_id_exists
from backend.api import fields
from ..models import ReferenceParcel
REFERENCE_PARCEL_BASE_DATA_FIELDS = (
'id',
'title',
'notes',
'geometry',
'total_area',
'ancestor_id',
)
class ReferenceParcelBaseSerializer(ModelSerializer):
total_area = fields.Decimal(as_string=True, required=True)
geometry = GeometryField(load_from='geometry')
ancestor_id = fields.Integer(required=False, default=None, validate=lambda x: object_id_exists(x, ReferenceParcel, id='parcel_id'))
class Meta:
model = ReferenceParcel
model_converter = GeometryModelConverter
#fields = REFERENCE_PARCEL_BASE_DATA_FIELDS
#dump_only = ('id',)
@validates('total_area')
def validate_total_area(self, total_area, *args, **kwargs):
print("validate_total_area, total_area: ", total_area)
print("validate_total_area, args: ", args)
print("validate_total_area, kwargs: ", kwargs)
partial = kwargs.get('partial', False)
if not partial:
if total_area <= 0:
raise ValidationError("Field may not be 0 or less.")
| 1,491 |
examples/hello_world.py
|
amzn/rheoceros
| 4 |
2171773
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
import intelliflow.api_ext as flow
from intelliflow.api_ext import *
# Refer './doc/EMAIL_ACTION_SETUP_AND_INFO.md' before proceeding
from intelliflow.core.platform.compute_targets.email import EMAIL
from intelliflow.core.platform.constructs import ConstructParamsDict
from intelliflow.utils.test.inlined_compute import NOOPCompute
flow.init_basic_logging()
# automatically reads default credentials (and creates this app in that AWS account)
# default credentials should belong to an entity that can either:
# - do everything (admin, use it for quick prototyping!)
# - can create/delete roles (dev role of this app) that would have this app-name and IntelliFlow in it
# - can get/assume its dev-role (won't help if this is the first run)
app = AWSApplication("hello-world", "us-east-1")
def example_inline_compute_code(input_map, output, params):
""" Function to be called within RheocerOS core.
Below parameters and also inputs as local variables in both
- alias form ('order_completed_count_amt')
- indexed form (input0, input1, ...)
will be available within the context.
input_map: [alias -> input signal]
output: output signal
params: contains platform parameters (such as 'AWS_BOTO_SESSION' if an AWS based configuration is being used)
Ex:
s3 = params['AWS_BOTO_SESSION'].resource('s3')
"""
print("Hello from AWS!")
class MyLambdaReactor(IInlinedCompute):
def __call__(self, input_map: Dict[str, Signal], materialized_output: Signal, params: ConstructParamsDict) -> Any:
from intelliflow.core.platform.definitions.aws.common import CommonParams as AWSCommonParams
pass
node_1_1 = app.create_data(id=f"Node_1_1",
compute_targets=[
InlinedCompute( example_inline_compute_code ),
InlinedCompute( MyLambdaReactor() ),
NOOPCompute
]
)
email_obj = EMAIL(sender="<EMAIL>", # sender from the same account
recipient_list=["<EMAIL>"])
node_1_2 = app.create_data(id=f"Node_1_2",
inputs=[node_1_1],
compute_targets=[email_obj.action(subject="IF Test Email", body="First Node Computation as Email")],
execution_hook=RouteExecutionHook(on_exec_begin=email_obj.action(subject="Hello from execution hook of Node_1_2", body="Node_1_2 exec started!"))
)
app.execute(node_1_1)
# check the execution on node_1_2
# test event propagation in AWS
path, comp_records = app.poll(node_1_2)
assert path
| 2,788 |
python/yar1/server.py
|
belmih/simpleworks
| 0 |
2172325
|
#!/usr/bin/python
import asyncore
from smtpd import SMTPServer
import threading
import time
from datetime import datetime
import sys, os
import csv
import re
import logging
IP_ADDRESS = "127.0.0.1"
SMTP_PORT = 25
CSV_FILE = "test.csv"
abspath = os.path.abspath(__file__)
workdir = os.path.dirname(abspath)
os.chdir(workdir)
class CustomSMTPServer(SMTPServer):
class CSV_file:
def __init__(self, filename):
self.filename = filename
with open(self.filename, 'w') as csvfile:
fieldnames = ['IP', 'DATE', 'FROM', 'TO', 'SUBJECT', 'BODY']
writer = csv.DictWriter(csvfile, dialect='excel', lineterminator='\n', delimiter=';', fieldnames=fieldnames)
writer.writeheader()
def write_row(self, row):
with open(self.filename, 'a') as csvfile:
spamwriter = csv.writer(csvfile, dialect='excel', lineterminator='\n', delimiter=';')
spamwriter.writerow(row)
def handle_accept(self):
self.close()
csv_file = CSV_file(CSV_FILE)
email_count = 0
def process_message(self, peer, mailfrom, rcpttos, data):
""" generates the data for the file """
ip_addr = peer[0]
date = datetime.now()
from_addr = ""
m = re.search('^from:\s(.+)$', data, re.I | re.M)
if m:
from_addr = m.group(1)
else:
from_addr = mailfrom
to_addr = ""
m = re.search('^to:\s(.+)$', data, re.I | re.M)
if m:
to_addr = m.group(1)
else:
to_addr = rcpttos
subject = ""
m = re.search('^subject:\s(.+)$', data, re.I | re.M)
if m:
subject = m.group(1)
body = ""
m = re.search('\n\n(.+)$', data, re.I | re.S)
if m:
body = m.group(1)
self.csv_file.write_row([ip_addr, date, from_addr, to_addr, subject, body])
self.email_count += 1
return
class MyReceiver(object):
def __init__(self):
self.run = False
#self.thread = None
def start(self):
""" Start the listening service """
if self.run:
print "The server is already running"
return
# create an instance of the SMTP server, derived from asyncore.dispatcher
self.smtp = CustomSMTPServer((IP_ADDRESS, SMTP_PORT), None)
# start the asyncore loop, listening for SMTP connection, within a thread
self.thread = threading.Thread(target=asyncore.loop, kwargs={'timeout': 1})
#self.thread.daemon = True
self.thread.start()
self.run = True
print "\nserver is running"
def stop(self):
""" Stop listening now to port """
logging.debug("stop self.run = %s" % self.run)
if self.run:
self.run = False
# close the SMTPserver to ensure no channels connect to asyncore
self.smtp.close()
logging.debug("close")
# now it is save to wait for the thread to finish, i.e. for asyncore.loop() to exit
self.thread.join()
logging.debug("join")
def get_statistic(self):
""" displays the number of received emails """
print "receive emails: %s" % self.smtp.email_count
def configure_logging():
logging.basicConfig(level = logging.DEBUG,
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s: %(message)s',
datefmt='%H:%M:%S',
filename='smtpserver.log',
filemode='a')
# console = logging.StreamHandler()
# console.setLevel(logging.INFO)
# formatter = logging.Formatter('%(levelname)s: %(message)s')
# console.setFormatter(formatter)
# logging.getLogger('').addHandler(console)
def main():
print "Hello! Use: start|stop|restart|statistic|exit"
reciver = MyReceiver()
reciver.start()
try:
while True:
command = str(raw_input('smtp server:'))
if 'start' == command:
reciver.start()
elif 'stop' == command:
reciver.stop()
reciver.get_statistic()
elif 'restart' == command:
reciver.stop()
time.sleep(.5)
reciver.start()
elif 'statistic' == command:
reciver.get_statistic()
elif 'exit' == command:
reciver.stop()
sys.exit(0)
else:
print "Unknown command"
time.sleep(.2)
except KeyboardInterrupt:
reciver.stop()
sys.exit(1)
if __name__ == "__main__":
configure_logging()
main()
| 4,820 |
dynsimf/examples/Ishigami_sa.py
|
Tensaiz/DyNSimF
| 3 |
2171314
|
from dynsimf.models.Model import Model
from dynsimf.models.tools.SA import SensitivityAnalysis
from dynsimf.models.tools.SA import SAConfiguration
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from SALib.test_functions import Ishigami
__author__ = "<NAME>"
__email__ = "<EMAIL>"
if __name__ == "__main__":
g = nx.random_geometric_graph(1, 1)
model = Model(g)
constants = {
'x1': 0,
'x2': 0,
'x3': 0
}
initial_state = {
'ishigami': 0
}
def update(constants):
ishigami_params = np.array([list(constants.values())])
return {'ishigami': Ishigami.evaluate(ishigami_params)}
# Model definition
model.constants = constants
model.set_states(['ishigami'])
model.add_update(update, {'constants': model.constants})
cfg = SAConfiguration(
{
'bounds': {'x1': (-3.14159265359, 3.14159265359), 'x2': (-3.14159265359, 3.14159265359), 'x3': (-3.14159265359, 3.14159265359)},
'iterations': 1,
'initial_state': initial_state,
'initial_args': {'constants': model.constants},
'n': 1024,
'second_order': True,
'algorithm_input': 'states',
'algorithm': lambda x: x,
'output_type': '',
'algorithm_args': {},
}
)
sa = SensitivityAnalysis(cfg, model)
analysis = sa.analyze_sensitivity()
print(analysis)
print("x1-x2:", analysis['ishigami']['S2'][0,1])
print("x1-x3:", analysis['ishigami']['S2'][0,2])
print("x2-x3:", analysis['ishigami']['S2'][1,2])
analysis['ishigami'].plot()
plt.show()
| 1,669 |
student-management-system/code.py
|
ashwin2401/greyatom-python-for-data-science
| 0 |
2170870
|
# --------------
# Code starts here
class_1 = ['<NAME>', '<NAME>', '<NAME>', '<NAME>']
class_2 = ['<NAME>', '<NAME>', '<NAME>']
new_class = class_1+class_2
print(new_class)
new_class.append('<NAME>')
print(new_class)
new_class.remove('<NAME>')
print(new_class)
# Code ends here
# --------------
# Code starts here
courses = {"Math":65, "English":70, "History":80, "French":70, "Science":60}
total = 65+70+80+70+60
print(total)
percentage = total/500*100
print(percentage,"Scored by <NAME>")
# Code ends here
# --------------
# Code starts here
mathematics = {'<NAME>':78, '<NAME>':95, '<NAME>':65, '<NAME>':50, '<NAME>':70}
max_marks_scored = max(courses,key = courses.get)
print (max_marks_scored)
topper = max(mathematics, key=mathematics.get)
print(topper)
# Code ends here
# --------------
# Given string
topper = '<NAME>'
# Code starts here
print('-'*20)
first_name = topper.split()[0]
last_name = topper.split()[1]
full_name = 'ng' + ' ' + 'andrew'
certificate_name = full_name.upper()
print(certificate_name)
# Code ends here
| 1,050 |
CursoEmVideo-Python3-Mundo1/desafio034.py
|
martinsnathalia/Python
| 0 |
2170535
|
# Escreva um programa que pergunte o salário de um funcionário e calcule o valor do seu aumento. Para salários
# superiores a R$1250,00, calcule um aumento de 10%. Para os inferiores ou iguais, o aumento é de 15%.
s = float(input('Digite o seu salário para saber o seu aumento: '))
if s > 1250:
print('Seu novo salário será R$ {:.2f}'.format(s * 1.10))
else:
print('Seu novo salário será R$ {:.2f}'.format(s * 1.15))
| 427 |
annotator/urls.py
|
Malorn44/BelEx-Annotator
| 0 |
2170571
|
from django.urls import path, re_path
from django.views.generic.base import RedirectView
from . import views
app_name = 'annotator'
urlpatterns = [
path('', views.home, name='home'),
path('db_upload', views.db_upload, name='db_upload'),
path('annotation_upload', views.annotation_upload, name='annotation_upload'),
path('<int:entry_pk>/', views.index, name='index'),
# path('<int:entry_pk>/submit_belief', views.submit_belief, name='submit_belief'),
path('<int:entry_pk>/change_view', views.change_view, name='change_view')
]
| 548 |
my_project/my_project/offer/tests/views/cbv/test_CreateOfferView/setup.py
|
Govedarski/World-of-books
| 0 |
2172310
|
from django import test as django_test
from django.contrib.auth import get_user_model
from django.urls import reverse
from my_project.library.models import Book
UserModel = get_user_model()
class SetupCreateOfferViewTests(django_test.TestCase):
CREDENTIALS = {
'username': 'User',
'email': '<EMAIL>',
'password': '<PASSWORD>',
}
SECOND_CREDENTIALS = {
'username': 'Second_User',
'email': '<EMAIL>',
'password': '<PASSWORD>',
}
@classmethod
def setUpClass(cls):
super().setUpClass()
user = UserModel.objects.create_user(**cls.CREDENTIALS)
second_user = UserModel.objects.create_user(**cls.SECOND_CREDENTIALS)
cls._create_book(second_user)
wanted_book = Book.objects.first()
cls.TARGET_URL = reverse('create_offer',
kwargs={'pk': wanted_book.pk})
cls.USER = user
cls.WANTED_BOOK = wanted_book
cls.SECOND_USER = second_user
def _login(self, **kwarg):
if kwarg:
self.client.login(**kwarg)
else:
self.client.login(
username=self.CREDENTIALS.get('username'),
password=self.CREDENTIALS.get('password'),
)
@staticmethod
def _create_book( owner):
Book(
title='title',
author='author',
owner=owner,
).save()
def _set_user_cf(self, user=None):
cf = self.USER.contactform
if user:
cf = user.contactform
cf.city = "test"
cf.address = "test"
cf.save()
| 1,628 |
screenshot_to_mesh.py
|
edent/3D-Screenshot-to-3D-Model
| 32 |
2172319
|
#!/usr/bin/python
import pandas as pd
import numpy as np
import cv2
import argparse
from pyntcloud import PyntCloud # https://github.com/daavoo/pyntcloud
from PIL import Image
def generate_mesh(filename):
# # Open an image as RGB
print("Opening " + filename)
original = Image.open(filename).convert('RGB')
# # Get the dimensions of the image
width, height = original.size
# # Split into left and right halves. The left eye sees the right image.
right = original.crop( (0, 0, width/2, height))
left = original.crop( (width/2, 0, width, height))
# # Over/Under. Split into top and bottom halves. The right eye sees the top image.
top = original.crop( (0, 0, width, height/2))
bottom = original.crop( (0, height/2, width, height))
# # Calculate the similarity of the left/right & top/bottom.
left_right_similarity = mse(np.array(right), np.array(left))
top_bottom_similarity = mse(np.array(top), np.array(bottom))
if (top_bottom_similarity < left_right_similarity):
# # This is an Over/Under image
print("Over-Under image detected")
left = bottom
right = top
else:
print("Side-By-Side image detected")
# # Optional. Save split images
# left.resize(original.size).save(filename + "-left.png")
# right.resize(original.size).save(filename + "-right.png")
# # Convert to arrays
image_left = np.array(left)
image_right = np.array(right)
# # Simple but less effective
# stereo = cv2.StereoBM_create(numDisparities=32, blockSize=25)
# disparity = stereo.compute(image_left,image_right)
# depth_image = Image.fromarray(disparity).convert('L')
# # Parameters for dispartiy map
print("Generating Depth Map")
window_size = 15
# # These values can be tuned depending on the image.
left_matcher = cv2.StereoSGBM_create(
# # Documentation at https://docs.opencv.org/trunk/d2/d85/classcv_1_1StereoSGBM.html
minDisparity=0,
numDisparities=16,
blockSize=5,
P1=8 * 3 * window_size ** 2,
P2=32 * 3 * window_size ** 2,
# disp12MaxDiff = 0,
# preFilterCap = 0,
# uniquenessRatio = 0,
# speckleWindowSize = 0,
# speckleRange = 0,
# mode = StereoSGBM::MODE_SGBM # https://docs.opencv.org/trunk/d2/d85/classcv_1_1StereoSGBM.html#ad985310396dd4d95a003b83811bbc138
)
# # Create matchers
right_matcher = cv2.ximgproc.createRightMatcher(left_matcher)
wls_filter = cv2.ximgproc.createDisparityWLSFilter(matcher_left=left_matcher)
wls_filter.setLambda(80000)
wls_filter.setSigmaColor(1.2)
disparity_left = left_matcher.compute(image_left, image_right)
disparity_right = right_matcher.compute(image_right, image_left)
disparity_left = np.int16(disparity_left)
disparity_right = np.int16(disparity_right)
filtered_image = wls_filter.filter(disparity_left, image_left, None, disparity_right)
# # Generate a depth map
depth_map = cv2.normalize(src=filtered_image, dst=filtered_image, beta=0, alpha=255, norm_type=cv2.NORM_MINMAX);
depth_map = np.uint8(depth_map)
# # Invert image. Optional depending on stereo pair
if (top_bottom_similarity > left_right_similarity):
depth_map = cv2.bitwise_not(depth_map)
# # Greyscale
depth_image = Image.fromarray(depth_map, mode="L")
# # Optional - Save Disparity
# depth_image.resize(original.size).save(filename+"-depth.png")
# # Get the colour information from the left image. Resized to original. Rotated 90 degrees for STL.
print("Creating Colour Map")
colours_array = np.array(left.resize(original.size)
.rotate(-90, expand=True)
.getdata()
).reshape(original.size + (3,))
# # Create a Pandas DataFrame of each pixel's position and colour
indices_array = np.moveaxis(np.indices(original.size), 0, 2)
imageArray = np.dstack((indices_array, colours_array)).reshape((-1,5))
df = pd.DataFrame(imageArray, columns=["x", "y", "red","green","blue"])
# # Get depth information. Resized to original. Rotated 90 degrees for STL.
depths_array = np.array(depth_image.resize(original.size)
.rotate(-90, expand=True)
.getdata())
# # Add depth to DataFrame
df.insert(loc=2, column='z', value=depths_array)
# # Set unit types correctly
df[['red','green','blue']] = df[['red','green','blue']].astype(np.uint)
df[['x','y','z']] = df[['x','y','z']].astype(float)
# # Optional - increase the intensity of the depth information
df['z'] = df['z']*5
# # Generate mesh
print("Generating Mesh")
cloud = PyntCloud(df)
# # Save mesh to file
print("Saving Mesh")
cloud.to_file(filename+".ply", also_save=["mesh","points"],as_text=True)
def mse(imageA, imageB):
# the 'Mean Squared Error' between the two images is the
# sum of the squared difference between the two images;
# NOTE: the two images must have the same dimension
# From https://www.pyimagesearch.com/2014/09/15/python-compare-two-images/
err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2)
err /= float(imageA.shape[0] * imageA.shape[1])
# return the MSE, the lower the error, the more "similar"
# the two images are
return err
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('image_file', help='Filename of a stereo screenshot')
args = parser.parse_args()
generate_mesh(args.image_file)
| 5,826 |
src/geoipdb_loader/tasks.py
|
kalekseev/django-geoipdb-loader
| 1 |
2169386
|
from celery import shared_task
from celery.utils.log import get_task_logger
from . import download
logger = get_task_logger(__name__)
@shared_task(ignore_result=True)
def update_geoipdb(skip_city=False, skip_country=False, skip_md5=False):
download(
skip_city=skip_city, skip_country=skip_country, skip_md5=skip_md5, logger=logger
)
| 353 |
tensorflow_v2/dragen1860/ch07/numpy-backward-prop.py
|
gottaegbert/penter
| 13 |
2172343
|
#!/usr/bin/env python
# encoding: utf-8
"""
@author: HuRuiFeng
@file: 7.9-backward-prop.py
@time: 2020/2/24 17:32
@desc: 7.9 反向传播算法实战的代码
"""
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from sklearn.datasets import make_moons
from sklearn.model_selection import train_test_split
plt.rcParams['font.size'] = 16
plt.rcParams['font.family'] = ['STKaiti']
plt.rcParams['axes.unicode_minus'] = False
def load_dataset():
# 采样点数
N_SAMPLES = 2000
# 测试数量比率
TEST_SIZE = 0.3
# 利用工具函数直接生成数据集
X, y = make_moons(n_samples=N_SAMPLES, noise=0.2, random_state=100)
# 将 2000 个点按着 7:3 分割为训练集和测试集
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=TEST_SIZE, random_state=42)
return X, y, X_train, X_test, y_train, y_test
def make_plot(X, y, plot_name, XX=None, YY=None, preds=None, dark=False):
# 绘制数据集的分布, X 为 2D 坐标, y 为数据点的标签
if (dark):
plt.style.use('dark_background')
else:
sns.set_style("whitegrid")
plt.figure(figsize=(16, 12))
axes = plt.gca()
axes.set(xlabel="$x_1$", ylabel="$x_2$")
plt.title(plot_name, fontsize=30)
plt.subplots_adjust(left=0.20)
plt.subplots_adjust(right=0.80)
if XX is not None and YY is not None and preds is not None:
plt.contourf(XX, YY, preds.reshape(XX.shape), 25, alpha=1, cmap=plt.cm.Spectral)
plt.contour(XX, YY, preds.reshape(XX.shape), levels=[.5], cmap="Greys", vmin=0, vmax=.6)
# 绘制散点图,根据标签区分颜色
plt.scatter(X[:, 0], X[:, 1], c=y.ravel(), s=40, cmap=plt.cm.Spectral, edgecolors='none')
plt.savefig('数据集分布.svg')
plt.close()
class Layer:
# 全连接网络层
def __init__(self, n_input, n_neurons, activation=None, weights=None,
bias=None):
"""
:param int n_input: 输入节点数
:param int n_neurons: 输出节点数
:param str activation: 激活函数类型
:param weights: 权值张量,默认类内部生成
:param bias: 偏置,默认类内部生成
"""
# 通过正态分布初始化网络权值,初始化非常重要,不合适的初始化将导致网络不收敛
self.weights = weights if weights is not None else np.random.randn(n_input, n_neurons) * np.sqrt(1 / n_neurons)
self.bias = bias if bias is not None else np.random.rand(n_neurons) * 0.1
self.activation = activation # 激活函数类型,如’sigmoid’
self.last_activation = None # 激活函数的输出值o
self.error = None # 用于计算当前层的delta 变量的中间变量
self.delta = None # 记录当前层的delta 变量,用于计算梯度
# 网络层的前向传播函数实现如下,其中last_activation 变量用于保存当前层的输出值:
def activate(self, x):
# 前向传播函数
r = np.dot(x, self.weights) + self.bias # X@W+b
# 通过激活函数,得到全连接层的输出o
self.last_activation = self._apply_activation(r)
return self.last_activation
# 上述代码中的self._apply_activation 函数实现了不同类型的激活函数的前向计算过程,
# 尽管此处我们只使用Sigmoid 激活函数一种。代码如下:
def _apply_activation(self, r):
# 计算激活函数的输出
if self.activation is None:
return r # 无激活函数,直接返回
# ReLU 激活函数
elif self.activation == 'relu':
return np.maximum(r, 0)
# tanh 激活函数
elif self.activation == 'tanh':
return np.tanh(r)
# sigmoid 激活函数
elif self.activation == 'sigmoid':
return 1 / (1 + np.exp(-r))
return r
# 针对于不同类型的激活函数,它们的导数计算实现如下:
def apply_activation_derivative(self, r):
# 计算激活函数的导数
# 无激活函数,导数为1
if self.activation is None:
return np.ones_like(r)
# ReLU 函数的导数实现
elif self.activation == 'relu':
grad = np.array(r, copy=True)
grad[r > 0] = 1.
grad[r <= 0] = 0.
return grad
# tanh 函数的导数实现
elif self.activation == 'tanh':
return 1 - r ** 2
# Sigmoid 函数的导数实现
elif self.activation == 'sigmoid':
return r * (1 - r)
return r
# 神经网络模型
class NeuralNetwork:
def __init__(self):
self._layers = [] # 网络层对象列表
def add_layer(self, layer):
# 追加网络层
self._layers.append(layer)
# 网络的前向传播只需要循环调各个网络层对象的前向计算函数即可,代码如下:
# 前向传播
def feed_forward(self, X):
for layer in self._layers:
# 依次通过各个网络层
X = layer.activate(X)
return X
def backpropagation(self, X, y, learning_rate):
# 反向传播算法实现
# 前向计算,得到输出值
output = self.feed_forward(X)
for i in reversed(range(len(self._layers))): # 反向循环
layer = self._layers[i] # 得到当前层对象
# 如果是输出层
if layer == self._layers[-1]: # 对于输出层
layer.error = y - output # 计算2 分类任务的均方差的导数
# 关键步骤:计算最后一层的delta,参考输出层的梯度公式
layer.delta = layer.error * layer.apply_activation_derivative(output)
else: # 如果是隐藏层
next_layer = self._layers[i + 1] # 得到下一层对象
layer.error = np.dot(next_layer.weights, next_layer.delta)
# 关键步骤:计算隐藏层的delta,参考隐藏层的梯度公式
layer.delta = layer.error * layer.apply_activation_derivative(layer.last_activation)
# 循环更新权值
for i in range(len(self._layers)):
layer = self._layers[i]
# o_i 为上一网络层的输出
o_i = np.atleast_2d(X if i == 0 else self._layers[i - 1].last_activation)
# 梯度下降算法,delta 是公式中的负数,故这里用加号
layer.weights += layer.delta * o_i.T * learning_rate
def train(self, X_train, X_test, y_train, y_test, learning_rate, max_epochs):
# 网络训练函数
# one-hot 编码
y_onehot = np.zeros((y_train.shape[0], 2))
y_onehot[np.arange(y_train.shape[0]), y_train] = 1
# 将One-hot 编码后的真实标签与网络的输出计算均方误差,并调用反向传播函数更新网络参数,循环迭代训练集1000 遍即可
mses = []
accuracys = []
for i in range(max_epochs + 1): # 训练1000 个epoch
for j in range(len(X_train)): # 一次训练一个样本
self.backpropagation(X_train[j], y_onehot[j], learning_rate)
if i % 10 == 0:
# 打印出MSE Loss
mse = np.mean(np.square(y_onehot - self.feed_forward(X_train)))
mses.append(mse)
accuracy = self.accuracy(self.predict(X_test), y_test.flatten())
accuracys.append(accuracy)
print('Epoch: #%s, MSE: %f' % (i, float(mse)))
# 统计并打印准确率
print('Accuracy: %.2f%%' % (accuracy * 100))
return mses, accuracys
def predict(self, X):
return self.feed_forward(X)
def accuracy(self, X, y):
return np.sum(np.equal(np.argmax(X, axis=1), y)) / y.shape[0]
def main():
X, y, X_train, X_test, y_train, y_test = load_dataset()
# 调用 make_plot 函数绘制数据的分布,其中 X 为 2D 坐标, y 为标签
make_plot(X, y, "Classification Dataset Visualization ")
plt.show()
nn = NeuralNetwork() # 实例化网络类
nn.add_layer(Layer(2, 25, 'sigmoid')) # 隐藏层 1, 2=>25
nn.add_layer(Layer(25, 50, 'sigmoid')) # 隐藏层 2, 25=>50
nn.add_layer(Layer(50, 25, 'sigmoid')) # 隐藏层 3, 50=>25
nn.add_layer(Layer(25, 2, 'sigmoid')) # 输出层, 25=>2
mses, accuracys = nn.train(X_train, X_test, y_train, y_test, 0.01, 1000)
x = [i for i in range(0, 101, 10)]
# 绘制MES曲线
plt.title("MES Loss")
plt.plot(x, mses[:11], color='blue')
plt.xlabel('Epoch')
plt.ylabel('MSE')
plt.savefig('训练误差曲线.svg')
plt.close()
# 绘制Accuracy曲线
plt.title("Accuracy")
plt.plot(x, accuracys[:11], color='blue')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.savefig('网络测试准确率.svg')
plt.close()
if __name__ == '__main__':
main()
| 7,474 |
ROS/sign_detection/scripts/classification.py
|
stefanbo92/SK-Creative-Challange-2016
| 0 |
2171844
|
#!/usr/bin/env python
import cv2
import numpy as np
def classify(img):
# define all templates
template=[]
template.append(cv2.imread("src/sign_detection/templates/0.png",0))
template.append(cv2.imread("src/sign_detection/templates/1.png",0))
template.append(cv2.imread("src/sign_detection/templates/2.png",0))
template.append(cv2.imread("src/sign_detection/templates/3.png",0))
out=[]
# loop through all warped images
for i in range(len(img)):
corr=[]
# test each template
for j in range (len(template)):
#res = cv2.matchTemplate(img[i],template[j],cv2.TM_CCORR_NORMED)
#min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
max_val=np.sum(np.square(np.subtract(np.asarray(img[i]),np.asarray(template[j]))))
corr.append(max_val)
# save correlation value for each template for every image
out.append(corr)
return out
| 952 |
download_trat.py
|
adriaan-portfolio/tbl-data-extractor
| 0 |
2171931
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from pathlib import Path, PureWindowsPath
from selenium.webdriver.firefox.options import Options
import os
from time import sleep
def rename_result_files(session_no):
try:
os.rename(fr'Session {session_no}\analyse_results.csv',fr'Session {session_no}\q1.csv')
os.rename(fr'Session {session_no}\analyse_results(1).csv',fr'Session {session_no}\q2.csv')
os.rename(fr'Session {session_no}\analyse_results(2).csv',fr'Session {session_no}\q3.csv')
except:
pass
def download_trat_results(session):
download_dir = fr'C:\Users\a-vanniekerk\OneDrive - UWE Bristol\UWE\2020_2021\UFMFMS30-1 Dynamics, Modelling, and Simulation\TBL-data-extractor\Session {session}'
if not os.path.exists(download_dir):
os.makedirs(download_dir)
for i in range(1,4):
if os.path.exists(download_dir+fr"\q{i}.csv"):
os.remove(download_dir+fr"\q{i}.csv")
fp = webdriver.FirefoxProfile()
fp.set_preference("browser.download.folderList", 2)
fp.set_preference("browser.download.dir", download_dir)
fp.set_preference("browser.download.manager.showWhenStarting", False)
fp.set_preference("browser.helperApps.neverAsk.saveToDisk", "application/x-download")
options = Options()
options.headless = True
driver = webdriver.Firefox(firefox_profile=fp, options=options)
driver.get("https://dewisprod.uwe.ac.uk/cgi-bin/fixed/2022/secure/management/first.cgi?uweEND")
driver.find_element_by_id("uweusername").send_keys("USERNAME")
driver.find_element_by_xpath('/html/body/div/div[5]/div/form[1]/div/input[2]').send_keys("PASSWORD")
driver.find_element_by_xpath('//*[@id="mainForm"]/div/button').click()
driver.find_element_by_xpath('/html/body/div/div[5]/div/span[6]/span[1]').click()
driver.find_element_by_xpath('/html/body/div/div[5]/div/button[2]').click()
driver.find_element_by_xpath('/html/body/div/div[5]/div/button[2]').click()
for i in range(1,4):
driver.find_element_by_xpath(f'//*[contains(text(), "trat{session}_{i}")]').click() #Select test
if i == 1:
buttons = driver.find_elements_by_class_name('button1')
for i in range(0,len(buttons)):
if buttons[i].text == "Goto Reporter":
buttons[i].click() #Select "Go to reporter"
break
driver.find_element_by_xpath('/html/body/div/div[5]/div/div[3]/button[1]').click() # Select "Analyse results"
driver.find_element_by_xpath('/html/body/div/div[5]/div/div[2]/button').click() # Select "Download CSV"
if i != 3:
driver.find_element_by_xpath('/html/body/div/div[2]/div/div/span[4]').click() # Select "Reporter"
driver.quit()
rename_result_files(session)
| 3,067 |
util/date.py
|
xiongraorao/faiss-py
| 5 |
2172208
|
import time
# from mysql import Mysql
# from logger import Log
def time_to_date(timestamp):
'''
时间戳转换成mysql的Date类型的时间
:return:
'''
timeArray = time.localtime(timestamp)
format = "%Y-%m-%d %H:%M:%S"
return time.strftime(format, timeArray)
def date_to_time(date):
'''
mysql 的Date类型的时间转化成时间戳
:param date:
:return:
'''
timeArray = time.strptime(date, "%Y-%m-%d %H:%M:%S")
timestamp = int(time.mktime(timeArray))
return timestamp
| 490 |
config_example.py
|
satapathyS/aws_dashboard
| 0 |
2171759
|
#!/usr/bin/python
# vim: set expandtab:
import os
def get_ec2_conf():
AWS_ACCESS_KEY_ID = 'YOUR ACCESS KEY ID'
AWS_SECRET_ACCESS_KEY = 'YOUR SECRET ACCESS KEY'
return {'AWS_ACCESS_KEY_ID' : AWS_ACCESS_KEY_ID, 'AWS_SECRET_ACCESS_KEY' : AWS_SECRET_ACCESS_KEY}
def region_list():
region_list = ['us-east-1','us-west-1','us-west-2']
return region_list
| 369 |
sebs/experiments/experiment.py
|
mcopik/serverless-benchmarks
| 35 |
2171655
|
from abc import ABC
from abc import abstractmethod
from multiprocessing import Semaphore
# from multiprocessing.pool import ThreadPool
from sebs.experiments.config import Config as ExperimentConfig
from sebs.utils import LoggingBase
class Experiment(ABC, LoggingBase):
def __init__(self, cfg: ExperimentConfig):
super().__init__()
self._config = cfg
self._threads = 1
self._invocations = 1
self._invocation_barrier = Semaphore(self._invocations)
@property
def config(self):
return self._config
@staticmethod
@abstractmethod
def name() -> str:
pass
@staticmethod
@abstractmethod
def typename() -> str:
pass
| 713 |
sahara_dashboard/content/data_processing/data_plugins/tables.py
|
hejunli-s/sahara-dashboard
| 33 |
2171911
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.template import loader
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from sahara_dashboard.content.data_processing.utils \
import workflow_helpers as w_helpers
class UpdatePluginAction(tables.LinkAction):
name = "update_plugin"
verbose_name = _("Update Plugin")
url = "horizon:project:data_processing.data_plugins:update"
classes = ("ajax-modal", "btn-edit")
def versions_to_string(plugin):
template_name = 'data_plugins/_list_versions.html'
versions = w_helpers.get_enabled_versions(plugin)
context = {"versions": versions}
return loader.render_to_string(template_name, context)
class PluginsTable(tables.DataTable):
title = tables.Column("title",
verbose_name=_("Title"),
link=("horizon:project:data_processing."
"data_plugins:plugin-details"))
versions = tables.Column(versions_to_string,
verbose_name=_("Enabled Versions"))
description = tables.Column("description",
verbose_name=_("Description"))
class Meta(object):
name = "plugins"
verbose_name = _("Plugins")
row_actions = (UpdatePluginAction,)
| 1,831 |
gfwanalysis/services/analysis/recent_tiles.py
|
archelogos/gfw-umd-gee
| 5 |
2171841
|
"""EE SENTINEL TILE URL SERVICE"""
import asyncio
import functools as funct
import logging
import ee
from gfwanalysis.errors import RecentTilesError
SENTINEL_BANDS = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B8A', 'B9', 'B10', 'B11', 'B12']
S2_TO_L8_DICT = {
'B12': 'B7',
'B11': 'B6',
'B10': None,
'B9': None,
'B8': 'B5',
'B8A': 'B5',
'B7': None,
'B6': None,
'B5': None,
'B4': 'B4',
'B3': 'B3',
'B2': 'B2',
}
class RecentTiles(object):
"""Create dictionary with two urls to be used as webmap tiles for Sentinel 2
data. One should be the tile outline, and the other is the RGB data visulised.
Metadata should also be returned, containing cloud score etc.
Note that the URLs from Earth Engine expire every 3 days.
"""
### TEST: http://localhost:4500/api/v1/recent-tiles?lat=-16.644&lon=28.266&start=2017-01-01&end=2017-02-01
@staticmethod
def validate_bands(bands, instrument):
"""Validate and serialide bands
"""
# Format
if type(bands) == str:
bands = bands[1:-1].split(',')
parsed_bands = [b.upper() if b.upper() in SENTINEL_BANDS else None for b in bands]
# Check for dupes
seen = set()
uniq = [b for b in bands if b not in seen and not seen.add(b)]
# Convert if Landsat
if 'LANDSAT' in instrument:
parsed_bands = [S2_TO_L8_DICT[b] for b in parsed_bands]
logging.info(f"[RECENT>BANDS] parsed bands: {parsed_bands}")
# Validate bands
if (len(parsed_bands) != 3 or len(uniq) != 3):
raise RecentTilesError('Must contain 3 unique elements in the format: [r,b,g].')
elif (None in parsed_bands):
raise RecentTilesError('One or more bands are invalid.')
else:
return parsed_bands
@staticmethod
def pansharpened_L8_image(image, bands, bmin, bmax, opacity):
hsv2 = image.select(bands).rgbToHsv()
sharpened = ee.Image.cat([hsv2.select('hue'), hsv2.select('saturation'),
image.select('B8')]).hsvToRgb().visualize(min=bmin, max=bmax, gamma=[1.3, 1.3, 1.3],
opacity=opacity)
return sharpened
@staticmethod
async def async_fetch(loop, f, data_array, bands, bmin, bmax, opacity, fetch_type=None):
"""Takes collection data array and implements batch fetches
"""
asyncio.set_event_loop(loop)
logging.info('[RECENT>ASYNC] Initiating loop.')
if fetch_type == 'first':
r1 = 0
r2 = 1
elif fetch_type == 'rest':
r1 = 1
r2 = len(data_array)
else:
r1 = 0
r2 = len(data_array)
# Set up list of futures (promises)
futures = [
loop.run_in_executor(
None,
funct.partial(f, data_array[i], bands, bmin, bmax, opacity),
)
for i in range(r1, r2)
]
# Fulfill promises
for response in await asyncio.gather(*futures):
pass
for f in range(0, len(futures)):
data_array[f] = futures[f].result()
return data_array
@staticmethod
def recent_tiles(col_data, bands, bmin, bmax, opacity):
"""Takes collection data array and fetches tiles
"""
logging.info(f"[RECENT>TILE] {col_data.get('source')}")
validated_bands = ["B4", "B3", "B2"]
if bands:
validated_bands = RecentTiles.validate_bands(bands, col_data.get('source'))
if not bmin:
bmin = 0
if 'COPERNICUS' in col_data.get('source'):
if not bmax:
bmax = 0.3
im = ee.Image(col_data['source']).divide(10000).visualize(bands=validated_bands, min=bmin, max=bmax,
opacity=opacity)
elif 'LANDSAT' in col_data.get('source'):
if not bmax:
bmax = 0.2
tmp_im = ee.Image(col_data['source'])
im = RecentTiles.pansharpened_L8_image(tmp_im, validated_bands, bmin, bmax, opacity)
m_id = im.getMapId()
url = m_id['tile_fetcher'].url_format
col_data['tile_url'] = url
logging.info(f'[RECENT>TILE] Tile url retrieved: {url}.')
return col_data
@staticmethod
def recent_thumbs(col_data, bands, bmin, bmax, opacity):
"""Takes collection data array and fetches thumbs
"""
logging.info(f"[RECENT>THUMB] {col_data.get('source')}")
validated_bands = ["B4", "B3", "B2"]
if bands: validated_bands = RecentTiles.validate_bands(bands, col_data.get('source'))
if not bmin: bmin = 0
if 'COPERNICUS' in col_data.get('source'):
if not bmax:
bmax = 0.3
im = ee.Image(col_data['source']).divide(10000).visualize(bands=validated_bands, min=bmin, max=bmax,
opacity=opacity)
elif 'LANDSAT' in col_data.get('source'):
if not bmax:
bmax = 0.2
tmp_im = ee.Image(col_data['source'])
im = RecentTiles.pansharpened_L8_image(tmp_im, validated_bands, bmin, bmax, opacity)
thumbnail = im.getThumbURL({'dimensions': [250, 250]})
col_data['thumb_url'] = thumbnail
return col_data
@staticmethod
def recent_data(lat, lon, start, end, sort_by):
logging.info("[RECENT>DATA] function initiated")
try:
point = ee.Geometry.Point(float(lon), float(lat))
S2 = ee.ImageCollection('COPERNICUS/S2').filterDate(start, end).filterBounds(point)
L8 = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA').filterDate(start, end).filterBounds(point)
s2_size = S2.size().getInfo()
l8_size = S2.size().getInfo()
collection = S2.toList(s2_size).cat(L8.toList(l8_size)).getInfo()
data = []
for c in collection:
sentinel_image = c.get('properties').get('SPACECRAFT_NAME', None)
landsat_image = c.get('properties').get('SPACECRAFT_ID', None)
if sentinel_image:
date_info = c['id'].split('COPERNICUS/S2/')[1]
date_time = f"{date_info[0:4]}-{date_info[4:6]}-{date_info[6:8]} {date_info[9:11]}:{date_info[11:13]}:{date_info[13:15]}Z"
bbox = c['properties']['system:footprint']['coordinates']
tmp_ = {
'source': c['id'],
'cloud_score': c['properties']['CLOUDY_PIXEL_PERCENTAGE'],
'bbox': {
"geometry": {
"type": "Polygon",
"coordinates": bbox
}
},
'spacecraft': c['properties']['SPACECRAFT_NAME'],
'product_id': c['properties']['PRODUCT_ID'],
'date': date_time
}
data.append(tmp_)
logging.info(f"[RECENT>TILE] [Sentinel]:{sentinel_image} {date_time}")
elif landsat_image:
date_info = c['id'].split('LANDSAT/LC08/C01/T1_RT_TOA/LC08_')[1].split('_')[1]
time_info = c['properties']['SCENE_CENTER_TIME'].split('.')[0]
date_time = f"{date_info[0:4]}-{date_info[4:6]}-{date_info[6:8]} {time_info}Z"
bbox = c['properties']['system:footprint']['coordinates']
tmp_ = {
'source': c['id'],
'cloud_score': c['properties']['CLOUD_COVER'],
'bbox': {
"geometry": {
"type": "Polygon",
"coordinates": bbox
}
},
'spacecraft': c['properties']['SPACECRAFT_ID'],
'product_id': c['properties']['LANDSAT_PRODUCT_ID'],
'date': date_time
}
data.append(tmp_)
logging.info('[RECENT>DATA] sorting by cloud cover & date of acquisition')
if sort_by and sort_by.lower() == 'cloud_score':
sorted_data = sorted(data, key=lambda k: (k.get('cloud_score', 100), k.get('date')), reverse=True)
else:
sorted_data = sorted(data, key=lambda k: (k.get('date'), -k.get('cloud_score', 100)), reverse=True)
return sorted_data
except:
raise RecentTilesError('Recent Images service failed to return image.')
| 8,937 |
setup.py
|
yellowcrescent/xbake
| 1 |
2168790
|
#!/usr/bin/env python
# coding=utf-8
# pylint: disable=W,C
from setuptools import setup, find_packages
setup(
name = "yc_xbake",
version = "0.10.190",
author = "<NAME>",
author_email = "<EMAIL>",
license = "MIT",
description = "Tool for cataloging and transcoding video files",
keywords = "video scraper scanner catalog subtitles transcode encode convert metadata",
url = "https://git.ycnrg.org/projects/YXB/repos/yc_xbake",
packages = find_packages(),
scripts = [],
install_requires = ['docutils', 'setproctitle', 'pymongo', 'redis', 'pymediainfo', 'enzyme',
'distance', 'requests', 'xmltodict', 'xattr', 'flask>=0.10.1', 'lxml',
'mutagen', 'arrow>=0.7.0'],
package_data = {
'': [ '*.md' ],
},
entry_points = {
'console_scripts': [ 'xbake = xbake.cli:_main' ]
}
# could also include long_description, download_url, classifiers, etc.
)
| 970 |
longest_sequence/dynamic_programming.py
|
noppanit/dynamic-programming
| 0 |
2171637
|
def longest_non_decreasing(lst):
return []
longest_non_decreasing([10, 12, 13, 8, 20, 9, 28])
| 100 |
modbus/Inititor.py
|
jekot1234/serial_port_controll
| 0 |
2171304
|
from masterUI import Ui_MainWindow
from modbus_initiator import Initiator
from PyQt5 import QtWidgets
class Inititator_controller(Ui_MainWindow):
def __init__(self) -> None:
super().__init__()
def start_station(self):
station_timeout = int(self.timeout.text())
station_retries = int(self.retries.text())
station_interchar = int(self.interchar_timeout.text())
station_com_port = self.com_port.text()
station_baudrate = int(self.baudrate.text())
self.inititator = Initiator(station_com_port, station_baudrate, station_timeout, station_retries, station_interchar)
print(f'Started Initiator: {station_timeout}, {station_retries}, {station_interchar}, {station_com_port}, {station_baudrate}')
def start_address_transaction(self):
address = int(self.transaction_address.text())
command = int(self.command_code.text())
argument = self.outcoming_text.toPlainText().encode()
command_hex, response_hex = self.inititator.addressed_transaction(address, command, argument)
command_hex = command_hex.hex(' ').upper()
self.command_preview.setText(command_hex)
response = response_hex[1]
response_hex = response_hex[0].hex(' ').upper()
self.respoonse_preview.setText(response_hex)
self.incoming_text.setText(response.decode())
def start_broadcast_transaction(self):
command = int(self.command_code.text())
argument = self.outcoming_text.toPlainText().encode()
command_hex = self.inititator.broadcast_transaction(command, argument)
command_hex = command_hex.hex(' ').upper()
self.command_preview.setText(command_hex)
def connect_signals(self):
self.run_station.clicked.connect(self.start_station)
self.send_command.clicked.connect(self.start_address_transaction)
self.send_command_broadcast.clicked.connect(self.start_broadcast_transaction)
pass
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Inititator_controller()
ui.setupUi(MainWindow)
ui.connect_signals()
MainWindow.show()
sys.exit(app.exec_())
| 2,263 |
notes/set-based-type-system/type-system.py
|
massimo-nocentini/simulation-methods
| 0 |
2172431
|
from functools import lru_cache, reduce
import itertools
import operator, math
from sympy import *
from commons import * # our own module with basic utilities
w = symbols('w0 w1', commutative=True) # to denote False and True, respectively.
u = symbols('u0:51', commutative=True)
o = symbols('o0:51', commutative=False)
emptybox = Symbol(r'␣', commutative=True)
class ty:
def __init__(self, *types):
try:
iterable, = types
self.types = list(iterable)
except (ValueError, TypeError):
self.types = types # the list of types that I depend on.
def tyvars(self):
vs = map(operator.methodcaller('tyvars'), self.types)
return foldr(operator.or_, lambda: set(), vs)
def label(self):
raise NotImplemented # I'm an abstract type, nameless indeed.
def gf_lhs(self):
L = Function(self.label())
return L(*self.tyvars())
def gf(self):
return [define(self.gf_lhs(), rhs, ctor=FEq)
for rhs in self.gf_rhs(*self.types)]
def gf_rhs(self, *types):
return self.definition(*types).gf_rhs(*types)
def definition(self, *types):
raise NotImplemented
def gfs_space(self):
return itertools.product(*map(lambda ty: ty.gf(), self.types))
def __or__(self, other):
return du(self, other)
def __mul__(self, other):
return cp(self, other)
def __rpow__(self, base):
if base == 2:
return powerset(self)
elif base == -2:
return ipowerset(self) * lst(self)
else:
raise ValueError
def __invert__(self):
return cycle(self)
def __getitem__(self, key):
if not isinstance(key, Basic):
raise TypeError
return self * tyvar(key)
class cp(ty):
def gf_rhs(self, *types):
return [foldr(lambda gf, acc: gf.rhs * acc,
lambda: Integer(1),
gfs)
for gfs in self.gfs_space()]
def label(self):
return r'\times'
class du(ty):
def label(self):
return r'\cup'
def gf_rhs(self, *types):
return [foldr(lambda gf, acc: gf.rhs + acc,
lambda: Integer(0),
gfs)
for gfs in self.gfs_space()]
class tyvar(ty):
def label(self):
return r'\mathcal{V}'
def gf_rhs(self, sym):
return [sym]
def tyvars(self):
sym, = self.types
args = sym.args
syms = filter(lambda a: a.is_symbol, args) if args else [sym]
return set(syms)
class maybe(ty):
def definition(self, alpha):
return tyvar(emptybox) | alpha
def label(self):
return r'\mathcal{M}'
class rec(ty):
def me(self):
return tyvar(self.gf_lhs())
def gf(self):
eqs = super().gf()
return [define(eq.lhs, sol, ctor=FEq)
for eq in eqs
for sol in solve(define(eq.lhs, eq.rhs), [eq.lhs])]
class lst(rec):
def definition(self, alpha):
return cp() | (alpha * self.me())
def label(self):
return r'\mathcal{L}'
class nnlst(rec):
def definition(self, alpha):
return alpha | (alpha * self.me())
def label(self):
return r'\mathcal{L}_{+}'
class bin_tree(rec):
def definition(self, alpha):
return cp() | (alpha * self.me() * self.me())
def label(self):
return r'\mathcal{B}'
class nnbin_tree(rec):
def definition(self, alpha):
return alpha | (alpha * self.me() * self.me())
def label(self):
return r'\mathcal{B}_{+}'
def occupancy(eq, syms, objects='unlike', boxes='unlike'):
bullet = Symbol(r'\bullet', commutative=False)
circ = Symbol(r'\circ', commutative=True)
osyms = [Symbol(s.name, commutative=False) for s in syms]
def S(expr, assocs):
return expr.subs(assocs, simultaneous=True)
oemptybox = Symbol(emptybox.name, commutative=False)
rhs = S(eq.rhs, {emptybox: oemptybox})
if (objects, boxes) == ('unlike', 'unlike'):
gf = S(rhs, dict(zip(syms, osyms)))
elif (objects, boxes) == ('like', 'unlike'):
gf = S(rhs, dict(zip(syms, itertools.repeat(bullet))))
elif (objects, boxes) == ('unlike', 'like'):
gf = rhs #S(rhs, {oemptybox: Integer(1)})
elif (objects, boxes) == ('like', 'like'):
gf = S(S(rhs, dict(zip(syms, itertools.repeat(circ)))),
{oemptybox: Integer(1)})
else:
raise ValueError('Unknown configuration')
f = Function('gf')
return define(f(*gf.free_symbols), gf, ctor=FEq)
# ______________________________________________________________________________
# Basic concrete types
# ______________________________________________________________________________
truth, falsehood = tyvar(w[1]), tyvar(w[0])
boolean = truth | falsehood
| 5,086 |
Python Scripts/mysqlupdater.py
|
8hubham/intranet_monitoring
| 0 |
2171335
|
import MySQLdb
import subprocess
import threading
import re
def mysqlupdater(ip,id,db,cur):
# 0.1 is the interval between pings and 4 is the number of times ping is sent and '-t 1' sets the timeout to 1 sec
terminal_command = ['sudo','ping','-f','-t','1','-c','4',ip]
p = subprocess.Popen(terminal_command,stdout=subprocess.PIPE)
# text is the variable to store the output from the terminal
text = p.stdout.read()
if check in text:
status = 1
temp_string_1 = re.search("stddev(.+?)ms", text)
allValues1 = temp_string_1.group(1)
temp_string_2 = re.search("/(.+?)/", allValues1)
avg_time = temp_string_2.group(1)
cur.execute("""UPDATE tbavgspeed SET ip_address = %s, avg_time = %s, status = %s WHERE id = %s""",(ip,avg_time,status,id))
db.commit()
else:
status = 0
avg_time = 0
cur.execute("""UPDATE tbavgspeed SET ip_address = %s, avg_time = %s, status = %s WHERE id = %s""",(ip,avg_time,status,id))
db.commit()
def calling_function():
# 1 is the interval for repeating calling_function after being called
threading.Timer(1, calling_function).start()
# Connecting to MySQL database
db = MySQLdb.connect("localhost","test_local","thunderbolt","dbavgspeed")
# Initializing cursor
cur = db.cursor()
for i in list_of_addresses:
mysqlupdater(i,list_of_addresses.index(i) + 1,db,cur)
# Terminates the connection to SQL Database
db.close()
#if ping is successful avg time will be obtained
check = "avg"
list_of_addresses = ['192.168.1.1', '192.168.1.33']
calling_function()
| 1,513 |
VideoCodingBasics/Animations/IntraPrediction/IntraPrediction.py
|
ChristianFeldmann/PresentationMaterial
| 0 |
2170403
|
import bpy
from bpy.types import Operator
import bpy_extras
from mathutils import Vector
class INTRAPRED_OT_test(Operator):
bl_idname = "intrapred.test"
bl_label = "Add Test Object"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
verts = [
Vector((0, 0, 0)),
Vector((4, 0, 0)),
Vector((4, 0, 1)),
Vector((0, 0, 1))
]
edges = []
faces = [[0, 1, 2, 3]]
mesh = bpy.data.meshes.new(name=f"TestPanel")
mesh.from_pydata(verts, edges, faces)
bpy_extras.object_utils.object_data_add(context, mesh)
return {'FINISHED'}
class INTRAPRED_PT_panel(bpy.types.Panel):
bl_label = "Intra Prediction"
bl_idname = "INTRAPRED_PT_panel"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "NewTab"
def draw(self, context):
row = self.layout.row()
row.operator("intrapred.test", text="Draw Test")
def register():
bpy.utils.register_class(INTRAPRED_OT_test)
bpy.utils.register_class(INTRAPRED_PT_panel)
def unregister():
bpy.utils.unregister_class(INTRAPRED_OT_test)
bpy.utils.unregister_class(INTRAPRED_PT_panel)
if __name__ == "__main__":
register()
| 1,272 |
core/nodes.py
|
Heckle1/AppController
| 0 |
2164849
|
"""
Nodes class represent the current existings scalable nodes
Node communicate with SCM API
"""
import requests
import logging
# -------- SCM API version ------------
SCM_API_VERSION = '0.1'
# -------- Logging setup ------------
LOGGER = logging.getLogger(__name__)
class NodeErr(object):
"""
Generic error class for Node errors
"""
class Node(object):
"""
Node class
"""
def __init__(self, instance_id, address, port):
"""
:param instance_id: node instance_id
:type instance_id: str
:param address: scm node address
:type address: str
:param port: scm node port
:param type: str
"""
self.instance_id = instance_id
self.address = address
self.port = port
self.lb_state = 'UNKOWN'
self.lb_description = ''
self.cpu_load = -1
self.ram_load = -1
logging.debug('New node created')
self.update()
def setup_backend_up(self):
logging.info('Setup node {0} to state UP'.format(self.instance_id))
self.lb_state = 'UP'
def set_backend_down(self):
logging.info('Setup node {0} to state DOWN'.format(self.instance_id))
self.lb_state = 'DOWN'
def set_backend_unkown(self):
logging.info('Setup node {0} to state UNKOWN'.format(self.instance_id))
self.lb_state = 'UNKOWN'
def update(self):
"""
Call node to get its current load
:returns: boolean
"""
try:
resp = requests.post('http://{0}:{1}/api/v{2}/all/usage'.format(self.address, self.port, SCM_API_VERSION)).json()
self.cpu_load = resp['cpu']
self.ram_load = resp['ram']
except Exception as err:
logging.error('Can not update node {0} information because: {1}'.format(self.instance_id, err))
return False
logging.debug('Node {0} with cpu={0} and ram={1}'.format(self.cpu_load, self.ram_load))
return True
class Nodes(object):
"""
Nodes collection
"""
def __init__(self):
self.nodes = []
def add_node(self, instance_id, address, port):
"""
Add a node
:param : node instance_id used in load balancer configuration
:type : str
:param : node url or ip to be used by the load balancer
:type : str
:param : node port to be used by the load balancer
:type : str
"""
self.append(Node(instance_id, address, port))
def remove_node(self, instance_id):
"""
:param instance_id: node instance_id
:type instance_id: str
Remove a node by its instance_id
"""
node = [node for node in self.nodes if node.instance_id == instance_id]
if not node:
logging.warning('Can not remove node {0}'.format(instance_id))
raise NodeErr('No {0} known'.format(instance_id))
def update_nodes_status(self):
"""
Each node is called to get its state
"""
for node in self.nodes:
node.update()
| 3,104 |
src/models/attention/topk_attention.py
|
xxchenxx/pixelfly
| 49 |
2171746
|
import math
import torch
import torch.nn as nn
from einops import rearrange
# Adapted from https://github.com/idiap/fast-transformers/blob/master/fast_transformers/attention/exact_topk_attention.py
class ExactTopKAttention(nn.Module):
"""Implement the oracle top-k softmax attention.
Arguments
---------
top-k: The top k keys to attend to (default: 32)
softmax_temp: The temperature to use for the softmax attention.
(default: 1/sqrt(d_keys) where d_keys is computed at
runtime)
attention_dropout: The dropout rate to apply to the attention
(default: 0.1)
"""
def __init__(self, topk, softmax_temp=None, attention_dropout=0.0, device=None, dtype=None):
super().__init__()
self.topk = topk
self.softmax_temp = softmax_temp
self.dropout = nn.Dropout(attention_dropout)
def forward(self, query, key, value, attn_mask=None, key_padding_mask=None, need_weights=False):
# Extract some shapes and compute the temperature
B, T, H, E = query.shape
_, S, _, D = value.shape
softmax_temp = self.softmax_temp or 1 / math.sqrt(E)
topk = min(self.topk, S)
# Scale the query instead of applying the softmax temperature to the
# dot products
query = query * softmax_temp
# Compute the unnormalized attention and apply the masks
QK = torch.einsum("bthe,bshe->bhts", query, key)
if attn_mask is not None and not attn_mask.all_ones:
QK.masked_fill_(~attn_mask.bool_matrix, float('-inf'))
if key_padding_mask is not None and not key_padding_mask.all_ones:
QK.masked_fill_(rearrange(~key_padding_mask.bool_matrix, 'b s -> b 1 1 s'),
float('-inf'))
_, topk_idx = torch.topk(QK, topk, dim=-1, sorted=False)
non_topk_mask = torch.ones_like(QK, dtype=torch.bool)
non_topk_mask.scatter_(dim=-1, index=topk_idx, src=torch.zeros_like(non_topk_mask))
QK.masked_fill_(non_topk_mask, float('-inf'))
# Compute the attention and the weighted average
attn = torch.softmax(QK, dim=-1)
A = self.dropout(attn)
output = torch.einsum("bhts,bshd->bthd", A, value)
return output, attn if need_weights else None
| 2,350 |
src/cdbmanager.py
|
connectordb/connectordb-laptoplogger
| 4 |
2171850
|
# The manager handles management of a local ConnectorDB database. It creates, sets up,
# and handles everything needed to run ConnectorDB
import os
import sys
import logging
import files
import shutil
import platform
# Python 2 requires subprocess32
try:
import subprocess32 as subprocess
except:
import subprocess
class Manager(object):
def __init__(self, location, cdb_executable=None):
self.cdb_executable = cdb_executable
if self.cdb_executable is None:
self.cdb_executable = files.getConnectorDB()
if self.cdb_executable is None:
raise Exception("Could not find ConnectorDB executable")
self.location = os.path.abspath(location)
def createAndImport(self, username, password, folder, out=sys.stdout):
"""
Creates and imports the database. Leaves ConnectorDB running. Run stop to
shut down the resulting database
"""
# Make sure the user exists in the folder to import
if not os.path.isdir(os.path.join(folder, username)):
raise Exception("User " + username + " not found in " + folder)
# We can't create the database if the folder already exists
if os.path.exists(self.location):
raise Exception(
"A ConnectorDB database already exists at " + self.location)
# Try to create the database
logging.info("Creating new ConnectorDB database at " + self.location)
cmd = [self.cdb_executable, "create",
self.location, "--sqlbackend=sqlite3"]
retcode = self.runproc_window(cmd, out)
if retcode != 0:
shutil.rmtree(self.location)
raise Exception("Failed to create database")
# Start the database and import the data
self.start(out)
ret = self.importDatabase(folder, out)
if ret != 0:
self.stop()
shutil.rmtree(self.location)
raise Exception("Could not import the database")
# Change the user's password to the one given.
if self.runproc([self.cdb_executable, "shell",
self.location, "passwd", username, password], out) != 0:
self.stop()
shutil.rmtree(self.location)
raise Exception("Failed to set up user's password")
logging.info("Database import complete.")
def create(self, username, password, out=sys.stdout):
logging.info("Creating new ConnectorDB database at " + self.location)
# We can't create the database if the folder already exists
if os.path.exists(self.location):
raise Exception(
"A ConnectorDB database already exists at " + self.location)
cmd = [self.cdb_executable, "create", self.location,
"--user=" + username + ":" + password, "--sqlbackend=sqlite3"]
retcode = self.runproc_window(cmd, out)
if retcode != 0:
shutil.rmtree(self.location)
return retcode
def importDatabase(self, location, out=sys.stdout):
logging.info("Importing from " + self.location)
return self.runproc_window([self.cdb_executable, "import", self.location, location], out)
def exportDatabase(self, location, out=sys.stdout):
logging.info("Exporting to " + self.location)
return self.runproc_window([self.cdb_executable, "export", self.location, location], out)
def start(self, out=sys.stdout):
logging.info("Starting database at " + self.location)
return self.runproc([self.cdb_executable, "start", self.location], out)
def stop(self, out=sys.stdout):
logging.info("Stopping database at " + self.location)
return self.runproc([self.cdb_executable, "stop", self.location], out)
def remove(self):
shutil.rmtree(self.location)
def version(self):
return version(cdb_executable=self.cdb_executable)
def runproc(self, cmd, out):
retcode = None
# There are issues in Windows with pyinstaller that make console windows pop up. We disallow the console
# window here
if platform.system() == "Windows":
# https://code.activestate.com/recipes/578300-python-subprocess-hide-console-on-windows/
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags = subprocess.CREATE_NEW_CONSOLE | subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
retcode = subprocess.call(cmd, startupinfo=startupinfo)
else:
retcode = subprocess.call(
cmd, stdout=out, stderr=out, start_new_session=True)
return retcode
def runproc_window(self, cmd, out):
retcode = None
# There are issues in Windows with pyinstaller that make console windows pop up. We allow this console window
# but don't redirect output
if platform.system() == "Windows":
retcode = subprocess.call(cmd)
else:
retcode = subprocess.call(cmd, stdout=out, stderr=out)
return retcode
def version(cdb_executable=None):
if cdb_executable is None:
cdb_executable = files.getConnectorDB()
if cdb_executable is None:
return ""
cmd = [cdb_executable, "--semver"]
p = None
if platform.system() == "Windows":
# https://code.activestate.com/recipes/578300-python-subprocess-hide-console-on-windows/
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags = subprocess.CREATE_NEW_CONSOLE | subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, startupinfo=startupinfo)
else:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = p.communicate()
return out.decode("utf-8").strip()
if (__name__ == "__main__"):
import webbrowser
logging.basicConfig(level=logging.DEBUG)
m = Manager("./db", "../bin/connectordb")
if (m.create("test", "test") != 0):
print("CREATE FAILED")
else:
if (m.start() != 0):
print("START FAILED")
else:
webbrowser.open("http://localhost:3124")
input()
m.stop()
| 6,377 |
dbbackup/tests/functional/test_commands.py
|
KessoumML/django-dbbackup
| 562 |
2172405
|
import os
import tempfile
from mock import patch
from django.test import TransactionTestCase as TestCase
from django.core.management import execute_from_command_line
from django.conf import settings
from dbbackup.tests.utils import (TEST_DATABASE, HANDLED_FILES,
clean_gpg_keys, add_public_gpg,
add_private_gpg, get_dump,
get_dump_name)
from dbbackup.tests.testapp import models
class DbBackupCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
open(TEST_DATABASE['NAME'], 'a').close()
self.instance = models.CharModel.objects.create(field='foo')
def tearDown(self):
clean_gpg_keys()
def test_database(self):
argv = ['', 'dbbackup', '--database=default']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
# Test file content
outputfile.seek(0)
self.assertTrue(outputfile.read())
def test_encrypt(self):
argv = ['', 'dbbackup', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue(filename.endswith('.gpg'))
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
def test_compress(self):
argv = ['', 'dbbackup', '--compress']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue(filename.endswith('.gz'))
def test_compress_and_encrypt(self):
argv = ['', 'dbbackup', '--compress', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue(filename.endswith('.gz.gpg'))
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
@patch('dbbackup.management.commands._base.input', return_value='y')
class DbRestoreCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
add_private_gpg()
open(TEST_DATABASE['NAME'], 'a').close()
self.instance = models.CharModel.objects.create(field='foo')
def tearDown(self):
clean_gpg_keys()
def test_restore(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup'])
self.instance.delete()
# Restore
execute_from_command_line(['', 'dbrestore'])
restored = models.CharModel.objects.all().exists()
self.assertTrue(restored)
@patch('dbbackup.utils.getpass', return_value=None)
def test_encrypted(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup', '--encrypt'])
self.instance.delete()
# Restore
execute_from_command_line(['', 'dbrestore', '--decrypt'])
restored = models.CharModel.objects.all().exists()
self.assertTrue(restored)
def test_compressed(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup', '--compress'])
self.instance.delete()
# Restore
execute_from_command_line(['', 'dbrestore', '--uncompress'])
def test_no_backup_available(self, *args):
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore'])
@patch('dbbackup.utils.getpass', return_value=None)
def test_available_but_not_encrypted(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup'])
# Restore
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore', '--decrypt'])
def test_available_but_not_compressed(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup'])
# Restore
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore', '--uncompress'])
def test_specify_db(self, *args):
# Create backup
execute_from_command_line(['', 'dbbackup', '--database', 'default'])
# Test wrong name
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'dbrestore', '--database', 'foo'])
# Restore
execute_from_command_line(['', 'dbrestore', '--database', 'default'])
class MediaBackupCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
def tearDown(self):
clean_gpg_keys()
def test_encrypt(self):
argv = ['', 'mediabackup', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue('.gpg' in filename)
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
def test_compress(self):
argv = ['', 'mediabackup', '--compress']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue('.gz' in filename)
@patch('dbbackup.utils.getpass', return_value=None)
def test_compress_and_encrypted(self, getpass_mock):
argv = ['', 'mediabackup', '--compress', '--encrypt']
execute_from_command_line(argv)
self.assertEqual(1, len(HANDLED_FILES['written_files']))
filename, outputfile = HANDLED_FILES['written_files'][0]
self.assertTrue('.gpg' in filename)
self.assertTrue('.gz' in filename)
# Test file content
outputfile = HANDLED_FILES['written_files'][0][1]
outputfile.seek(0)
self.assertTrue(outputfile.read().startswith(b'-----BEGIN PGP MESSAGE-----'))
@patch('dbbackup.management.commands._base.input', return_value='y')
class MediaRestoreCommandTest(TestCase):
def setUp(self):
HANDLED_FILES.clean()
add_public_gpg()
add_private_gpg()
def tearDown(self):
clean_gpg_keys()
self._emtpy_media()
def _create_file(self, name=None):
name = name or tempfile._RandomNameSequence().next()
path = os.path.join(settings.MEDIA_ROOT, name)
with open(path, 'a+b') as fd:
fd.write(b'foo')
def _emtpy_media(self):
for fi in os.listdir(settings.MEDIA_ROOT):
os.remove(os.path.join(settings.MEDIA_ROOT, fi))
def _is_restored(self):
return bool(os.listdir(settings.MEDIA_ROOT))
def test_restore(self, *args):
# Create backup
self._create_file('foo')
execute_from_command_line(['', 'mediabackup'])
self._emtpy_media()
# Restore
execute_from_command_line(['', 'mediarestore'])
self.assertTrue(self._is_restored())
@patch('dbbackup.utils.getpass', return_value=None)
def test_encrypted(self, *args):
# Create backup
self._create_file('foo')
execute_from_command_line(['', 'mediabackup', '--encrypt'])
self._emtpy_media()
# Restore
execute_from_command_line(['', 'mediarestore', '--decrypt'])
self.assertTrue(self._is_restored())
def test_compressed(self, *args):
# Create backup
self._create_file('foo')
execute_from_command_line(['', 'mediabackup', '--compress'])
self._emtpy_media()
# Restore
execute_from_command_line(['', 'mediarestore', '--uncompress'])
self.assertTrue(self._is_restored())
def test_no_backup_available(self, *args):
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'mediarestore'])
@patch('dbbackup.utils.getpass', return_value=None)
def test_available_but_not_encrypted(self, *args):
# Create backup
execute_from_command_line(['', 'mediabackup'])
# Restore
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'mediarestore', '--decrypt'])
def test_available_but_not_compressed(self, *args):
# Create backup
execute_from_command_line(['', 'mediabackup'])
# Restore
with self.assertRaises(SystemExit):
execute_from_command_line(['', 'mediarestore', '--uncompress'])
| 8,952 |
change_email/conf.py
|
astrobin/django-change-email
| 1 |
2171161
|
from django.conf import settings as django_settings
from easysettings import AppSettings
class Settings(AppSettings):
"""
Default settings for django-change-email.
"""
#: Determines the URL to redirect to after an email change request has been
#: deleted.
EMAIL_CHANGE_DELETE_SUCCESS_REDIRECT_URL = '/account/email/change/'
#: Determines the e-mail address field on the (custom) user model.
EMAIL_CHANGE_FIELD = 'email'
#: Determines the e-mail address used to send confirmation mails.
EMAIL_CHANGE_FROM_EMAIL = django_settings.DEFAULT_FROM_EMAIL
#: Determines wether to send HTML emails.
EMAIL_CHANGE_HTML_EMAIL = False
#: Determines the template used to render the HTML text of the
#: confirmation email.
EMAIL_CHANGE_HTML_EMAIL_TEMPLATE = 'change_email/mail/body.html'
#: Determines the template used to render the subject of the
#: confirmation email.
EMAIL_CHANGE_SUBJECT_EMAIL_TEMPLATE = 'change_email/mail/subject.txt'
#: Determines the expiration time of an e-mail address change requests.
#: Defaults to 7 days.
EMAIL_CHANGE_TIMEOUT = 60*60*24*7
#: Determines the template used to render the plain text body of the
#: confirmation email.
EMAIL_CHANGE_TXT_EMAIL_TEMPLATE = 'change_email/mail/body.txt'
#: Determines wether to use HTTPS when generating the confirmation link.
EMAIL_CHANGE_USE_HTTPS = False
#: Determines wether to check if the email address is used on a particular
#: site. Set to True to make email addresses unique on different sites.
EMAIL_CHANGE_VALIDATE_SITE = False
settings = Settings()
| 1,631 |
CodeWars/7 Kyu/Radio DJ helper function.py
|
anubhab-code/Competitive-Programming
| 0 |
2170733
|
def to_seconds(time):
minutes, seconds = map(int, time.split(':'))
return minutes * 60 + seconds
def longest_possible(playback):
longest_song = 0
song_title = ''
for song in songs:
song_length = to_seconds(song['playback'])
if longest_song < song_length <= playback:
longest_song = song_length
song_title = song['title']
return song_title or False
| 413 |
demo/car_control/debug_display.py
|
shigeyukioba/matchernet
| 1 |
2171432
|
# -*- coding: utf-8 -*-
import numpy as np
import argparse
from distutils.util import strtobool
import pygame, sys
import cv2
from matchernet import MPCEnv, MovieWriter
from car import CarDynamics, CarRenderer
from obstacle_reward_system import ObstacleRewardSystem
BLACK = (0, 0, 0)
FPS = 60
class Display(object):
def __init__(self,
display_size,
env,
recording=False):
self.env = env
pygame.init()
self.surface = pygame.display.set_mode(display_size, 0, 24)
pygame.display.set_caption('MPCenv')
if recording:
self.writer = MovieWriter("out.mov", display_size, FPS)
else:
self.writer = None
def update(self, left, right, up, down):
self.surface.fill(BLACK)
self.process(left, right, up, down)
pygame.display.update()
def show_image(self, state):
state = (state * 255.0).astype(np.uint8)
image = pygame.image.frombuffer(state, (256, 256), 'RGB')
self.surface.blit(image, (0, 0))
if self.writer is not None:
self.writer.add_frame(state)
def process(self, left, right, up, down):
force = 0.0
angle = 0.0
if up:
force += 0.2
if down:
force -= 0.2
if left:
angle -= 0.5
if right:
angle += 0.5
action = np.array([angle, force], dtype=np.float32)
state, reward = self.env.step(action)
self.show_image(state)
def close(self):
if self.writer is not None:
self.writer.close()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--recording", type=strtobool,
default="false")
args = parser.parse_args()
recording = args.recording
display_size = (256, 256)
dt = 0.03
dynamics = CarDynamics()
renderer = CarRenderer()
reward_system = ObstacleRewardSystem()
env = MPCEnv(dynamics, renderer, reward_system, dt, use_visual_state=True)
display = Display(display_size, env, recording=recording)
clock = pygame.time.Clock()
running = True
left_pressed = False
right_pressed = False
up_pressed = False
down_pressed = False
esc_pressed = False
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
left_pressed = True
elif event.key == pygame.K_RIGHT:
right_pressed = True
elif event.key == pygame.K_UP:
up_pressed = True
elif event.key == pygame.K_DOWN:
down_pressed = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.type == pygame.KEYUP :
if event.key == pygame.K_LEFT:
left_pressed = False
elif event.key == pygame.K_RIGHT:
right_pressed = False
elif event.key == pygame.K_UP:
up_pressed = False
elif event.key == pygame.K_DOWN:
down_pressed = False
display.update(left_pressed, right_pressed, up_pressed, down_pressed)
clock.tick(FPS)
display.close()
if __name__ == '__main__':
main()
| 3,585 |
src/soap/http.py
|
thelabnyc/instrumented-soap
| 1 |
2172409
|
from suds.transport import Transport, Reply
from . import settings
import urllib.request
import urllib.parse
import logging
import requests
import io
logger = logging.getLogger(__name__)
class HttpTransport(Transport):
"""
Custom HTTPTransport to replace :class:`suds.transport.http.HttpTransport <suds.transport.http.HttpTransport>`.
The default :class:`suds.transport.http.HttpTransport <suds.transport.http.HttpTransport>` class has issues with
sending SOAP traffic through HTTP proxies like Squid. This Transport fixes the HTTP Proxy issues by using
python-requests instead of urllib2.
"""
#: Timeout for opening a WSDL file. Tuple (CONNECT_TIMEOUT, READ_TIMEOUT)
open_timeout = settings.OPEN_TIMEOUT
#: Timeout for sending a SOAP call. Tuple (CONNECT_TIMEOUT, READ_TIMEOUT)
send_timeout = settings.SEND_TIMEOUT
def open(self, request):
"""
Open a SOAP WSDL
:param request: :class:`suds.transport.Request <suds.transport.Request>` object
:return: WSDL Content as a file-like object
:rtype: io.BytesIO
"""
url = request.url
logger.debug("Opening WSDL: %s " % url)
if url.startswith("file://"):
content = urllib.request.urlopen(url)
else:
resp = requests.get(
url, proxies=self.proxies(url), timeout=self.open_timeout
)
resp.raise_for_status()
content = io.BytesIO(resp.content)
return content
def send(self, request):
"""
Send a SOAP method call
:param request: :class:`suds.transport.Request <suds.transport.Request>` object
:return: :class:`suds.transport.Reply <suds.transport.Reply>` object
:rtype: suds.transport.Reply
"""
url = request.url
msg = request.message
headers = request.headers
logger.debug("Sending SOAP request: %s" % url)
resp = requests.post(
url,
proxies=self.proxies(url),
timeout=self.send_timeout,
data=msg,
headers=headers,
)
resp.raise_for_status()
reply = Reply(requests.codes.OK, resp.headers, resp.content)
return reply
def proxies(self, url):
"""
Get the transport proxy configuration
:param url: string
:return: Proxy configuration dictionary
:rtype: Dictionary
"""
netloc = urllib.parse.urlparse(url).netloc
proxies = {}
if settings.PROXIES and settings.PROXIES.get(netloc):
proxies["http"] = settings.PROXIES[netloc]
proxies["https"] = settings.PROXIES[netloc]
elif settings.PROXY_URL:
proxies["http"] = settings.PROXY_URL
proxies["https"] = settings.PROXY_URL
return proxies
| 2,857 |
server_py/flatgov/common/biden_statements.py
|
aih/BillMap
| 2 |
2172242
|
import json
import requests
from bills.models import Statement
import os
from django.core.files import File
from django.conf import settings
import yaml
def load_biden_statements():
current_biden_statements = Statement.objects.filter(administration='Biden')
url = 'https://raw.githubusercontent.com/unitedstates/statements-of-administration-policy/main/archive/46-Biden.yaml'
original_pdf_path = 'https://github.com/unitedstates/statements-of-administration-policy/tree/main/archive/'
response = requests.get(url)
response_data = yaml.safe_load(response.text)
if current_biden_statements.count() >= len(response_data):
print("There're no any new biden statement to process.")
return True
current_biden_statements.delete()
for i, meta_statement in enumerate(response_data):
statement = Statement()
statement.bill_id = str(meta_statement['congress']) + str(meta_statement['bills'][0]).lower()
statement.bill_title = meta_statement['document_title']
statement.congress = meta_statement['congress']
statement.date_issued = meta_statement['date_issued']
statement.original_pdf_link = meta_statement['fetched_from_url']
statement.bill_number = meta_statement['bills'][0]
statement.administration = 'Biden'
statement.permanent_pdf_link = original_pdf_path + meta_statement['file']
statement.save()
print(i, statement)
| 1,449 |
webdev/fornecedores/migrations/0014_remove_fornecedor_foto.py
|
h-zanetti/jewelry-manager
| 0 |
2171801
|
# Generated by Django 3.1.5 on 2021-05-20 19:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('fornecedores', '0013_auto_20210515_1915'),
]
operations = [
migrations.RemoveField(
model_name='fornecedor',
name='foto',
),
]
| 335 |
src/_lib/wordpress_journey_processor.py
|
OrlandoSoto/owning-a-home
| 95 |
2172365
|
import sys
import json
import os.path
import requests
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_journey(post)
def process_journey(item):
del item['comments']
del item['date']
custom_fields = item['custom_fields']
item['_id'] = item['slug']
if item['parent'] != 0:
# This is a step item
item['has_parent'] = True
for name in ['what_to_know', 'what_to_do_now', 'pitfalls_to_avoid']:
if custom_fields.get(name):
item[name] = custom_fields[name]
if custom_fields.get('key_tool'):
if 'url' in custom_fields['key_tool'] or \
'label' in custom_fields['key_tool']:
item['key_tool'] = custom_fields['key_tool']
else:
item['key_tool'] = {'url': custom_fields['key_tool'][0],
'label': custom_fields['key_tool'][1]}
if 'collapse_link' in custom_fields:
item['collapse_link'] = custom_fields['collapse_link']
else:
item['collapse_link'] = ''
else:
# This is a phase item
item['has_parent'] = False
if 'tools' in custom_fields:
item['tools'] = custom_fields['tools']
else:
# create list of tools
item['tools'] = []
for x in range(3):
tool = {}
fields = ['description', 'link']
for field in fields:
field_name = 'tools_%s_%s' % (str(x), field)
if field_name in custom_fields:
if field == 'link':
tool[field] = \
{'url': custom_fields[field_name][0],
'label': custom_fields[field_name][1]}
else:
tool[field] = custom_fields[field_name]
if tool:
item['tools'].append(tool)
if 'milestones' in custom_fields:
item['milestones'] = custom_fields['milestones']
else:
# create list of milestones
milestones = []
for x in range(3):
key = 'milestones_%s' % x
if key in custom_fields:
milestones.append(custom_fields[key])
if milestones:
item['milestones'] = milestones
del item['custom_fields']
return {'_type': 'journey',
'_id': item['slug'],
'_source': item}
| 2,992 |
py/acmacs_whocc/get_recent_merges.py
|
acorg/acmacs-whocc
| 0 |
2170340
|
import sys, subprocess, pprint, datetime
from pathlib import Path
import logging; module_logger = logging.getLogger(__name__)
# from . import error, utility
from acmacs_base import json, files
# ----------------------------------------------------------------------
sVirusTypeConvert = {
'A(H1N1)': 'h1seas',
'A(H1N1)2009PDM': 'h1pdm',
'A(H3N2)': 'h3',
'BYAMAGATA': 'b-yam',
'BVICTORIA': 'b-vic',
}
sVirusTypeConvert_ssm_report = {
'A(H1N1)': None,
'A(H1N1)2009PDM': 'h1',
'A(H3N2)': 'h3',
'BYAMAGATA': 'by',
'BVICTORIA': 'bv',
}
sAssayConvert = {
"HI": "hi",
"FOCUS REDUCTION": "neut",
"PLAQUE REDUCTION NEUTRALISATION": "neut",
"MN": "neut",
}
sFixSubtype = {
"H1": "A(H1N1)",
"H3": "A(H3N2)",
"BV": "BVic",
"BY": "BYam",
}
sMinColBasisConvert = {
"1280": "1280",
"none": "none",
None: "none",
}
sMergesToGet = set([
# custom "CDC A(H1N1)2009PDM HI",
"MELB A(H1N1)2009PDM HI",
"NIID A(H1N1)2009PDM HI",
# custom "NIMR A(H1N1)2009PDM HI",
"CDC A(H3N2) FOCUS REDUCTION",
# custom "MELB A(H3N2) FOCUS REDUCTION",
"MELB A(H3N2) HI", # then custon
"NIID A(H3N2) FOCUS REDUCTION",
"NIMR A(H3N2) HI",
# NIMR Neut custom
"CDC BV HI",
"MELB BV HI",
"NIID BV HI",
# custom "NIMR BV HI",
"CDC BY HI",
"MELB BY HI",
"NIID BY HI",
"NIMR BY HI",
])
# ----------------------------------------------------------------------
def get_recent_merges(target_dir :Path, subtype=None, lab=None):
if subtype is not None:
def vt(en):
if en["virus_type"] == "B":
return f"""{en["virus_type"]}{en["lineage"][0]}"""
else:
return en["virus_type"]
subtype = subtype.upper()
response = api().command(C="ad_whocc_recent_merges", log=False, virus_types=sFixSubtype.get(subtype, subtype), labs=[lab.upper()] if lab else None)
if "data" not in response:
module_logger.error("No \"data\" in response of ad_whocc_recent_merges api command:\n{}".format(pprint.pformat(response)))
raise RuntimeError("Unexpected result of ad_whocc_recent_merges c2 api command")
response = response['data']
response.sort(key=lambda e: "{lab:4s} {virus_type:10s} {assay}".format(**e))
# module_logger.info(f"\n{pprint.pformat(response)}")
print("\n".join("{lab:4s} {vt:14s} {assay:31s} {minimum_column_basis} {chart_id} {mtime}".format(**e, vt=vt(e), mtime=datetime.datetime.fromisoformat(e["m"])) for e in response), file=sys.stderr)
for entry in response:
if f"""{entry["lab"]:4s} {vt(entry):14s} {entry["assay"]}""" in sMergesToGet:
basename = f"{entry['lab'].lower()}-{subtype.lower()}-{sAssayConvert[entry['assay']].lower()}.chain-{sMinColBasisConvert[entry['minimum_column_basis']]}"
filename = target_dir.joinpath(f"{basename}.ace")
mtime = datetime.datetime.fromisoformat(entry["m"]) + datetime.timedelta(hours=1)
if not filename.exists() or datetime.datetime.fromtimestamp(filename.stat().st_mtime) < mtime:
module_logger.info(f"downloading {entry['chart_id']} to {filename}")
chart = api().command(C="chart_export", log=False, id=entry["chart_id"], format="ace", part="chart")["chart"]
if isinstance(chart, dict) and "##bin" in chart:
files.backup_file(filename)
module_logger.info(f"writing {filename}")
import base64
filename.open('wb').write(base64.b64decode(chart["##bin"].encode('ascii')))
filename_link = target_dir.joinpath(f"{entry['lab'].lower()}-{subtype.lower()}-{sAssayConvert[entry['assay']].lower()}.ace")
if not filename_link.exists():
filename_link.symlink_to(filename.name)
else:
for subtype in ["h1", "h3", "bv", "by"]:
get_recent_merges(target_dir=target_dir, subtype=subtype)
# ----------------------------------------------------------------------
sAPI = None
def api():
global sAPI
if sAPI is None:
sAPI = API(session=subprocess.check_output(["aw-session", "whocc-viewer"]).decode("utf-8").strip())
return sAPI
# ----------------------------------------------------------------------
class API:
def __init__(self, session=None, user="whocc-viewer", password=None, url_prefix='https://acmacs-web.antigenic-cartography.org/'):
"""If host is None, execute command in this acmacs instance directly."""
self.url_prefix = url_prefix
self.session = session
if not session and user:
self._login(user, password)
def _execute(self, command, print_response=False, log_error=True, raise_error=False):
if self.url_prefix:
if self.session:
command.setdefault('S', self.session)
response = self._execute_http(command)
else:
raise NotImplementedError()
# ip_address = '127.0.0.1'
# command.setdefault('I', ip_address)
# command.setdefault('F', 'python')
# if self.session:
# from ..mongodb_collections import mongodb_collections
# command.setdefault('S', mongodb_collections.sessions.find(session_id=self.session, ip_address=ip_address))
# from .command import execute
# response = execute(command)
# if isinstance(response.output, str):
# response.output = json.loads(response.output)
#module_logger.info(repr(response.output))
if isinstance(response, dict) and response.get('E'):
if log_error:
module_logger.error(response['E'])
for err in response['E']:
if err.get('traceback'):
module_logger.error(err['traceback'])
if raise_error:
raise CommandError(response['E'])
elif print_response:
if isinstance(response, dict) and response.get('help'):
module_logger.info(response['help'])
else:
module_logger.info('{} {!r}'.format(type(response), response))
return response
def _execute_http(self, command):
command['F'] = 'json'
module_logger.debug('_execute_http %r', command)
response = self._urlopen(url='{}/api'.format(self.url_prefix), data=json.dumps(command).encode('utf-8'))
return json.loads(response)
def _login(self, user, password):
import random
response = self._execute(command=dict(F='python', C='login_nonce', user=user), print_response=False)
if response.get('E'):
raise LoginFailed(response['E'])
# module_logger.debug('login_nonce user:{} nonce:{}'.format(user, response))
digest = self._hash_password(user=user, password=password)
cnonce = '{:X}'.format(random.randrange(0xFFFFFFFF))
password = self._hash_nonce_digest(nonce=response['nonce'], cnonce=cnonce, digest=digest)
response = self._execute(command=dict(F='python', C='login', user=user, cnonce=cnonce, password=password, application=sys.argv[0]), print_response=False)
module_logger.debug('response {}'.format(response))
if response.get('E'):
raise LoginFailed(response['E'])
self.session = response['S']
module_logger.info('--session={}'.format(self.session))
def command(self, C, print_response=False, log_error=True, raise_error=False, **args):
cmd = dict(C=C, log_error=log_error, **self._fix_args(args))
# try:
# getattr(self, '_fix_command_' + C)(cmd)
# except AttributeError:
# pass
return self._execute(cmd, print_response=print_response, log_error=log_error, raise_error=raise_error)
def download(self, id):
return self._urlopen(url='{}/api/?cache=1&session={}&id={}'.format(self.url_prefix, self.session, id))
def _urlopen(self, url, data=None):
import ssl, urllib.request
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
return urllib.request.urlopen(url=url, data=data, context=context).read()
def _hash_password(self, user, password):
import hashlib
m = hashlib.md5()
m.update(';'.join((user, 'acmacs-web', password)).encode('utf-8'))
return m.hexdigest()
def _hash_nonce_digest(self, nonce, cnonce, digest):
import hashlib
m = hashlib.md5()
m.update(';'.join((nonce, cnonce, digest)).encode('utf-8'))
return m.hexdigest()
def __getattr__(self, name):
if name[0] != '_':
return lambda **a: self.command(name, **a)
else:
raise AttributeError(name)
# def _fix_command_chart_new(self, cmd):
# if isinstance(cmd['chart'], str) and os.path.isfile(cmd['chart']): # read data from filename and encode it to make json serializable
# cmd['chart'] = json.BinaryData(open(cmd['chart'], 'rb').read())
def _fix_args(self, args):
for to_int in ('skip', 'max_results', 'size'):
if args.get(to_int) is not None:
args[to_int] = int(args[to_int])
return args
# ======================================================================
### Local Variables:
### eval: (if (fboundp 'eu-rename-buffer) (eu-rename-buffer))
### End:
| 9,761 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.