__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13,743,895,385,402 |
720b1f953e41583a1a6dcb7707aeaeec84c3a0fc
|
414f0c090ac45c21f724f48930b89df8e1ed1c48
|
/examples/thread_server.py
|
09edfe74d6497e641f2f4630c89cd80e81441ea4
|
[] |
no_license
|
FrancisVarga/python-lust
|
https://github.com/FrancisVarga/python-lust
|
7f80a7b4034a43fe988332cb0739c69c097f98f9
|
75e4e5bd2dc2b7f83e3499d4e6d0e5b7f361a3f5
|
refs/heads/master
| 2021-01-16T19:49:50.684306 | 2013-03-25T05:26:14 | 2013-03-25T05:26:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import threading
import SocketServer
import sys
from lust import log, server
class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler):
allow_reuse_address = True
def handle(self):
log.info("Connection %r:%r" % self.client_address)
self.request.sendall("HI!")
self.request.close()
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
class ThreadDaemon(server.Simple):
def before_drop_privs(self, args):
HOST = "0.0.0.0"
if self.config:
ports = list(int(x) for x in
self.config['threadserver.ports'].split())
else:
ports = list(int(x) for x in args)
log.debug("Ports %r" % ports)
if not ports:
log.error("You need to list some ports.")
sys.exit(1)
self.server = None # this gets the last one to do a forever on
for PORT in ports:
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
ip, port = server.server_address
server_thread = threading.Thread(target=server.serve_forever)
server_thread.daemon = True
server_thread.start()
self.server = server
def start(self, args):
self.server.serve_forever()
def shutdown(self, signal):
log.info("Shutting down now signal: %d" % signal)
if __name__ == "__main__":
# if you're on OSX then change this to whatever user nd group
server = ThreadDaemon("threadserver", uid='nobody', gid='nogroup')
server.run(sys.argv)
|
UTF-8
|
Python
| false | false | 2,013 |
7,524,782,726,950 |
5318c74408a4723defd7e0201717713aa746b87a
|
a8e59b879fe084bf1de77f8bc6ff33cb0391416f
|
/Pandora/pandora drawing/for_pupnik/mathfuncs.py
|
993d47f4062454474e40d3b4a9c45fc96c1ce9d6
|
[] |
no_license
|
lukevp/txstateprojects
|
https://github.com/lukevp/txstateprojects
|
89d3f817d27cd4e761236fa347ffc1dd6daac5fe
|
fc7a2e9d84eb4619799073fd2f050f8be2037d36
|
refs/heads/master
| 2018-01-10T04:07:41.676491 | 2010-05-12T22:43:07 | 2010-05-12T22:43:07 | 36,741,763 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def valueRangeMap(val, origMin, origMax, newMin, newMax):
"""
Will remap val from the original range into the new range.
val : The value you are mapping.
origMin : The original minimum value that val should be greater than.
origMax : The original maximum value that val should be less than.
newMin : The new minimum value that val will be mapped into.
newMax : the new maximum value that val will be mapped into.
return : float : The newly mapped value.
"""
# Find the percentage val is between origMin and origMax:
percetVal = float(val - origMin) / float(origMax - origMin)
# Map into the new range:
mappedVal = (newMin+newMax)*percetVal
return mappedVal
def lerp(start, end, startpressure, endpressure):
datapoints = [(start, startpressure)]
# 4 situations, moving up and left, up and right, down and left, or down and right.
startx, starty = start
endx,endy = end
ydist = endy - starty
xdist = endx - startx
x, y = startx, starty
if ydist < 0:# y is decreasing
yadd = -1
else:
yadd = 1
if xdist < 0:
xadd = -1
else:
xadd = 1
if startpressure < endpressure:
padd = 1
else:
padd = -1
pressure = startpressure
if abs(xdist) > abs(ydist): # we're gonna be lerp'ing the y's.
try:
yinc = abs(ydist/float(xdist))
except:
yinc = 0
try:
pinc = abs((endpressure-startpressure)/float(xdist))
except:
pinc = 0
for q in range(abs(xdist)):
x += xadd
y += yadd * yinc
pressure += padd * pinc
datapoints.append(((int(x),int(y)), int(pressure)))
else: #we're gonna be lerp'ing on the x's.
try:
xinc = abs(xdist/float(ydist))
except:
xinc = 0
try:
pinc = abs((endpressure-startpressure)/float(ydist))
except:
pinc = 0
for q in range(abs(ydist)):
x += xadd * xinc
y += yadd
pressure += padd * pinc
datapoints.append(((int(x),int(y)), int(pressure)))
return datapoints
if __name__ == "__main__":
print "Test a quadrant 1 lerp."
print lerp((0,0), (10,10)) # quadrant 1
print "test a quadrant 2 lerp."
print lerp((0,10), (10, 0))
print "test a quadrant 3 lerp."
print lerp((10, 10), (0,0))
print "test a quadrant 4 lerp."
print lerp((10, 0), (0, 10))
print lerp((223,232), (211,240))
|
UTF-8
|
Python
| false | false | 2,010 |
4,372,276,735,265 |
94cff28e3dcd9883be5ab8c461f8d862e96a005e
|
0427a9b3e0554f0152c0ac88ffe1fd1702ba2b29
|
/jindo/utils.py
|
c819319736d704b2334a00cfd4fd860b72aaa1de
|
[
"BSD-3-Clause"
] |
permissive
|
cakebread/jindo
|
https://github.com/cakebread/jindo
|
d2a1dd24639b10ffc6e25765c782f4282acd28f5
|
42f72d648902b83e3403a1639b0e3f46d8e4ba1a
|
refs/heads/master
| 2021-01-16T17:46:14.901079 | 2011-10-15T05:02:26 | 2011-10-15T05:02:26 | 1,086,254 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
utils.py
===========
Misc funcitions
---------------
"""
__docformat__ = 'restructuredtext'
import os
def blue(my_string):
'''TODO: Find some simple coloring function/lib'''
return '\033[0;34m%s\033[0m' % my_string
def get_rc_file():
"""
Return location we store config files and data
"""
rcpath = os.path.abspath("%s/.jindorc" % os.path.expanduser("~"))
if os.path.exists(rcpath):
return open(rcpath, 'r').read()
print "Create ~/.jindorc and put this in it: api_key=YOUR API KEY"
print "You can find your (mt) API key here: https://ac.mediatemple.net/api/index.mt"
def sinsert(original, new, pos):
'''Inserts new string inside original string at pos'''
return original[:pos] + new + original[pos:]
def camel_to_human(text, ansi=True):
i = 0
for s in text:
if s.isupper():
text = sinsert(text, " ", i)
i += 1
i += 1
text = text.capitalize()
if ansi:
text = blue(text)
return text
|
UTF-8
|
Python
| false | false | 2,011 |
7,722,351,228,148 |
7f5bf34d289b8a8f4d728142e7fc7d9e76a4dd61
|
c8223a7e980f979cfcfaaf80efa7eaf7d44afeb2
|
/HomeParser.py
|
877c1440e5c4f857287e6d632a268070adb611c5
|
[] |
no_license
|
looyi/little-crawler
|
https://github.com/looyi/little-crawler
|
d838dc3816d4c5c9b6347ade913842d064f0b72f
|
5a9307619a15c3c4c7fff73559c4ab4816606366
|
refs/heads/master
| 2016-09-06T07:31:51.833119 | 2014-12-20T14:39:34 | 2014-12-20T14:39:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#coding:utf-8
from html.parser import HTMLParser
from PokemonParser import PokemonParser
import urllib.request
import sqlite3
class HomeParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.url = 'http://wiki.52poke.com'
self.spanNum = 0
self.linkNum = 0
self.exact = 0
self.con = sqlite3.connect('pokemon.db')
self.cur = self.con.cursor()
sql = 'drop table if exists pokemontable'
self.cur.execute(sql)
sql = 'create table pokemontable(id integer primary key, timenum integer, name text, url text, properties text, character text, skill text)'
self.cur.execute(sql)
self.con.commit()
self.insert = 'insert into pokemontable values(?, ?, ?, ?, ?, ?, ?)'
def handle_starttag(self, tag, attrs):
if tag == 'span' and attrs and attrs[0][1] == 'mw-headline':
self.spanNum += 1
if self.spanNum == 6:
self.spanNum = 0
if self.spanNum > 0 and tag == 'a':
self.linkNum += 1
self.link = self.url + attrs[0][1]
self.exact = 1
def handle_data(self, data):
if self.linkNum > 0 and self.exact == 1:
pokemonParser = PokemonParser(self.linkNum, self.con, self.cur)
html = urllib.request.urlopen(self.link)
pokemonParser.feed(html.read().decode('utf-8'))
self.cur.execute(self.insert, (self.linkNum, self.spanNum, data, self.link, pokemonParser.properties, pokemonParser.character, pokemonParser.skill))
del pokemonParser
print(self.linkNum)
def handle_endtag(self, tag):
if tag == 'a':
self.exact = 0
def __del__(self):
self.con.commit()
self.cur.close()
self.con.close()
|
UTF-8
|
Python
| false | false | 2,014 |
16,295,105,963,694 |
9fed9d1d514dd0c4e48e88a988ca849d1c14c4d1
|
e6a899b559a92689c3de380c9bb88e016653d5d6
|
/dbp/test/test_core.py
|
54d04dda34f5b4575280288e9157c3b06733dea2
|
[] |
no_license
|
cs648/dissertation
|
https://github.com/cs648/dissertation
|
e911d536f3224a941142294c76ff7df7002f1331
|
d8aff774fe19844fe36d2f51c70bc965845e08aa
|
refs/heads/master
| 2021-05-16T02:30:26.933476 | 2013-05-17T14:18:41 | 2013-05-17T14:18:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from dbp.core import DBP
from dbp.manager import TXManager
from dbp.test import TestCase
from twisted.internet.task import Clock
class FakeNetwork(object):
"""Fake network object."""
def __init__(self, parent=None):
self.parent = parent
self.distributed_txs = []
def distribute(self, tx_id, op):
self.distributed_txs.append((tx_id, op))
class TestDBP(TestCase):
def test_sync_db(self):
p = DBP()
self.addCleanup(p.manager.txn.sock.transport.stopConnecting)
p.manager.txn = FakeNetwork()
p._queue((1, "a = b"))
self.assertEqual(p.db._db, {})
p.sync_db()
self.assertEqual(p.db._db, {'a': 'b'})
p._queue((2, "b = a"))
p._queue((3, "a = a"))
self.assertEqual(p.db._db, {'a': 'b'})
p.sync_db()
self.assertEqual(p.db._db, {'a': 'a', 'b': 'a'})
def test_process(self):
p = DBP()
self.addCleanup(p.manager.txn.sock.transport.stopConnecting)
p.manager.txn = FakeNetwork()
p.process(1, "a = b")
self.assertEqual(p.db._db, {'a': 'b'})
self.assertRaises(ValueError, p.process, 2, "foobar")
self.assertRaises(AssertionError, p.process, 1, "a = b")
self.assertRaises(AssertionError, p.process, 3, "a = b")
def test_execute(self):
c = Clock()
p = DBP()
self.addCleanup(p.manager.txn.sock.transport.stopConnecting)
p.manager = TXManager(txs=[(1, "a = b"), (2, "b = c"), (3, "a = c")], clock=c)
self.addCleanup(p.manager.txn.sock.transport.stopConnecting)
p.manager.txn = FakeNetwork()
results = []
def f(r):
results.append(r)
p.execute("b = a").addCallback(f)
c.advance(1)
self.assertEqual(results, [None])
self.assertEqual(p.db._db, {"a": "c", "b": "a"})
def test_execute2(self):
p = DBP()
c = Clock()
self.addCleanup(p.manager.txn.sock.transport.stopConnecting)
p.manager = TXManager(txs=[(1, "a = b"), (2, "b = c"), (3, "a = c")], clock=c)
self.addCleanup(p.manager.txn.sock.transport.stopConnecting)
p.manager.txn = FakeNetwork()
p.execute("b = a")
c.advance(1)
p.execute("b = c")
c.advance(1)
p.sync_db()
self.assertEqual(p.db._db, {"a": "c", "b": "c"})
|
UTF-8
|
Python
| false | false | 2,013 |
6,640,019,445,639 |
9150ad0419f1c79f5de2308b969dd225d8ede382
|
21a77ed3498e649ecc7446584edf46b62c361d59
|
/orange/controls/index_control.py
|
805c6eec7d1fa62ab180b6c4534c978984f6b16c
|
[] |
no_license
|
kejukeji/heart_counsel_py
|
https://github.com/kejukeji/heart_counsel_py
|
e45419d9b2baf3fe392d64c5596a45e96f96a280
|
3fa2dbdad43b0c12da6130d6e634e6c7003fd1f0
|
refs/heads/master
| 2021-01-13T02:11:00.869579 | 2013-11-28T08:08:15 | 2013-11-28T08:08:15 | 14,770,940 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: UTF-8
from flask import request, render_template
from orange.services.dingyue_service import *
def show_index():
index = get_dingyue()
return render_template('orange/index.html')
|
UTF-8
|
Python
| false | false | 2,013 |
25,769,829,214 |
1671b73ac6d1ba44b8ced47cb18f5f6dc435cbab
|
a4f542460ab088da6c8bb30f8677408a422475be
|
/sierrachartfeed.py
|
ca32e6404550f8627ed9d11c1fc8d610db1b825c
|
[] |
no_license
|
pentarh/sierrachartfeed
|
https://github.com/pentarh/sierrachartfeed
|
378699b0773f1bb0c86d785ad759ad4195e9dca4
|
edbb6842c89a86d15c944f442fc7dac2f95a9ddb
|
refs/heads/master
| 2021-01-15T16:42:18.957065 | 2013-04-23T15:49:03 | 2013-04-23T15:49:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
'''
Created on 14.04.2011
@author: slush
@licence: Public domain
@version 0.5
'''
from optparse import OptionParser
import datetime
import time
import os
import sys
import urllib2
import socket
try:
import simplejson as json
except ImportError:
import json
from scid import ScidFile, ScidRecord
BITCOINCHARTS_TRADES_URL = 'http://bitcoincharts.com/t/trades.csv'
BITCOINCHARTS_SOCKET = ('bitcoincharts.com', 8002)
def bitcoincharts_history(symbol, from_timestamp, volume_precision, log=False):
url = '%s?start=%s&end=99999999999999&symbol=%s' % (BITCOINCHARTS_TRADES_URL, from_timestamp, symbol)
#print url
req = urllib2.Request(url)
list1 = urllib2.urlopen(req).read().split('\n')
list1.sort()
for line in list1:
if not line:
continue
line = line.split(',')
try:
timestamp, price, volume = int(line[0]), float(line[1]), int(float(line[2])*10**volume_precision)
if log:
print symbol, datetime.datetime.fromtimestamp(timestamp), timestamp, price, float(volume)/10**volume_precision
yield ScidRecord(datetime.datetime.fromtimestamp(timestamp), price, price, price, price, 1, volume, 0, 0)
except ValueError:
print line
print "Corrupted data for symbol %s, skipping" % symbol
class ScidHandler(object):
def __init__(self, symbol, datadir, disable_history, volume_precision):
self.symbol = symbol
self.filename = os.path.join(datadir, "%s.scid" % symbol)
self.volume_precision = volume_precision
self.load()
if not disable_history:
try:
self.download_historical()
except Exception as e:
# We don't want to continue; if we receive new data from live feed,
# gap inside scid file won't be filled anymore, so we must wait
# until historical feed is available again
raise Exception("Historical download failed: %s, use -y to disable history" % str(e))
def load(self):
print 'Loading data file', self.filename
if os.path.exists(self.filename):
self.scid = ScidFile()
self.scid.load(self.filename)
else:
self.scid = ScidFile.create(self.filename)
self.scid.seek(self.scid.length)
def download_historical(self):
length = self.scid.length
if not length:
from_timestamp = 0
else:
self.scid.seek(self.scid.length-1)
rec = ScidRecord.from_struct(self.scid.readOne())
from_timestamp = int(time.mktime(rec.DateTime.timetuple())) + 1
print 'Downloading historical data'
self.scid.seek(self.scid.length)
for rec in bitcoincharts_history(self.symbol, from_timestamp, self.volume_precision, True):
self.scid.write(rec.to_struct())
self.scid.fp.flush()
def ticker_update(self, data):
price = float(data['price'])
volume = int(float(data['volume'])*10**self.volume_precision)
date = datetime.datetime.fromtimestamp(float(data['timestamp']))
print self.symbol, date, price, float(volume)/10**self.volume_precision
# Datetime, Open, High, Low, Close, NumTrades, TotalVolume, BidVolume, AskVolume):
try:
rec = ScidRecord(date, price, price, price, price, 1, volume, 0, 0)
self.scid.write(rec.to_struct())
self.scid.fp.flush()
except Exception as e:
print str(e)
def linesplit(sock):
buffer = ''
while True:
try:
r = sock.recv(1024)
if r == '':
raise Exception("Socket failed")
buffer = ''.join([buffer, r])
except Exception as e:
if str(e) != 'timed out': # Yes, there's not a better way...
raise
while "\n" in buffer:
(line, buffer) = buffer.split("\n", 1)
yield line
class ScidLoader(dict):
def __init__(self, datadir, disable_history, volume_precision):
super(ScidLoader, self).__init__() # Don't use any template dict
self.datadir = datadir
self.disable_history = disable_history
self.volume_precision = volume_precision
def __getitem__(self, symbol):
try:
return dict.__getitem__(self, symbol)
except KeyError:
handler = ScidHandler(symbol, self.datadir, self.disable_history, self.volume_precision)
self[symbol] = handler
return handler
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-d", "--datadir", dest="datadir", default='c:/SierraChart/data/',
help="Data directory of SierraChart software")
parser.add_option("-y", "--disable-history", action='store_true', default=False,
help="Disable downloads from bitcoincharts.com")
parser.add_option("-p", "--volume-precision", default=2, dest="precision", type="int",
help="Change decimal precision for market volume.")
parser.add_option("-s", "--symbols", dest='symbols', default='mtgoxUSD,*',
help="Charts to watch, comma separated. Use * for streaming all markets.")
(options, args) = parser.parse_args()
if options.precision < 0 or options.precision > 8:
print "Precision must be between 0 and 8"
sys.exit()
# Symbols to watch
symbols = options.symbols.split(',')
scids = ScidLoader(options.datadir, options.disable_history, options.precision)
for s in symbols:
if s != '*':
scids[s]
while True:
try:
print "Opening streaming socket..."
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect(BITCOINCHARTS_SOCKET)
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
s.send("{\"action\": \"subscribe\", \"channel\": \"tick\"}\n")
for line in linesplit(s):
rec = json.loads(line)
if not rec['channel'].startswith('tick.'):
# Not a tick data
continue
symbol = rec['channel'].rsplit('.')[1]
if symbol not in symbols and '*' not in symbols:
# Filtering out symbols which user don't want to store
# If '*' is in symbols, don't filter anything
continue
#print "%s: %s" % (symbol, rec['payload'])
scids[symbol].ticker_update(rec['payload'])
except KeyboardInterrupt:
print "Ctrl+C detected..."
break
except Exception as e:
print "%s, retrying..." % str(e)
time.sleep(5)
continue
finally:
print "Stopping streaming socket..."
s.close()
for scid in scids.values():
scid.scid.close()
|
UTF-8
|
Python
| false | false | 2,013 |
15,994,458,252,261 |
2f5f78f4c6f5b7a33e10ce7301fb1071f22940b8
|
fc11d336c726e795cedc18c8a2cddcb28d8de279
|
/selftests/all/functional/avocado/journal_tests.py
|
772312120e875971f1ac2cd0a0423f4d4aebc051
|
[
"GPL-2.0-only"
] |
non_permissive
|
FengYang/avocado
|
https://github.com/FengYang/avocado
|
bed2734863e2cbe800a236b6d296c637d04294bd
|
d7ad156dd679d3f69b9088f9db2a2a2d181429cd
|
refs/heads/master
| 2020-12-14T09:59:02.220019 | 2014-10-21T12:36:59 | 2014-10-21T12:36:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: Red Hat Inc. 2014
# Author: Ruda Moura <[email protected]>
import unittest
import os
import sys
import json
import sqlite3
# simple magic for using scripts within a source tree
basedir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', '..')
basedir = os.path.abspath(basedir)
if os.path.isdir(os.path.join(basedir, 'avocado')):
sys.path.append(basedir)
from avocado.utils import process
class JournalPluginTests(unittest.TestCase):
def setUp(self):
os.chdir(basedir)
self.cmd_line = './scripts/avocado run --json - --journal examples/tests/sleeptest.py'
self.result = process.run(self.cmd_line, ignore_status=True)
data = json.loads(self.result.stdout)
self.job_id = data['job_id']
jfile = os.path.join(os.path.dirname(data['debuglog']), '.journal.sqlite')
self.db = sqlite3.connect(jfile)
def test_journal_job_id(self):
self.assertEqual(self.result.exit_status, 0,
"Command '%s' did not return 0" % self.cmd_line)
cur = self.db.cursor()
cur.execute('SELECT unique_id FROM job_info;')
db_job_id = cur.fetchone()[0]
self.assertEqual(db_job_id, self.job_id,
"The job ids differs, expected %s got %s" % (self.job_id, db_job_id))
def test_journal_count_entries(self):
self.assertEqual(self.result.exit_status, 0,
"Command '%s' did not return 0" % self.cmd_line)
cur = self.db.cursor()
cur.execute('SELECT COUNT(*) FROM test_journal;')
db_count = cur.fetchone()[0]
count = 2
self.assertEqual(db_count, count,
"The checkup count of test_journal is wrong, expected %d got %d" % (count, db_count))
def tearDown(self):
self.db.close()
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
13,769,665,168,097 |
2ff04c93b922bc0f9c55afca0ddf5a026ddfc8c7
|
7590586a5b851f678b3df6907fba856393fd5654
|
/root/trees/local/sql/_performers/change/own/password.py
|
eabfa6ee2670e68c0e29629fa120614a85d24c3f
|
[
"LicenseRef-scancode-generic-exception",
"GPL-1.0-or-later",
"GPL-2.0-or-later"
] |
non_permissive
|
redhog/Grimoire
|
https://github.com/redhog/Grimoire
|
93083cdd5040acc76d68e90c6687b48eeaaf77ed
|
c3dab34c0f5625012bbdd16daef0c0acf9207a49
|
refs/heads/master
| 2021-01-01T18:54:30.273201 | 2013-04-08T19:39:05 | 2013-04-08T19:39:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import Grimoire.Performer, Grimoire.Types, Grimoire.Utils, types
A = Grimoire.Types.AnnotatedValue
Ps = Grimoire.Types.ParamsType.derive
class Performer(Grimoire.Performer.Base):
class password(Grimoire.Performer.Method):
def _call(self, newpwd):
uid = self._callWithUnlockedTree(
lambda: self._getpath(Grimoire.Types.TreeRoot).directory.get.parameters(
['local', 'sql', 'user', 'id']))
return self._callWithUnlockedTree(
lambda: self._getpath(Grimoire.Types.MethodBase, 1,
['$sqlentry', '$sqlservername', 'users']
)(uid, password=newpwd))
def _params(self):
return Grimoire.Types.AnnotatedValue(
Ps([('newpwd', A(Grimoire.Types.LoseNewPasswordType, 'New password to set'))]),
'Change your own password')
|
UTF-8
|
Python
| false | false | 2,013 |
738,734,411,039 |
5c59605d3f7a82b4f8470ddab9a9081052643196
|
b9b9197e59a86519d17498896c6f9405e66beaaa
|
/units.py
|
59d2339d635712f0cf648989155707d65b2045df
|
[] |
no_license
|
derektumolo/Apoc
|
https://github.com/derektumolo/Apoc
|
b874a004032d85342c5cf8e5eac86200524b42a9
|
9734c760fa2c86aa7ba8949c01b31f674bcd547b
|
refs/heads/master
| 2020-12-25T19:03:29.396745 | 2012-03-13T22:57:47 | 2012-03-13T22:57:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame
import spritesheet
class UnitGroup(pygame.sprite.Group):
# sort of like a unit factory?
def __init__(self):
pygame.sprite.Group.__init__(self)
class Unit(pygame.sprite.Sprite):
# basically, a sprite. Anything that is mobile.
# type - this will be where we get the image path from, and some defaults for the init fctn
# APAvail
# MaxAP
# current orders? I think for now, the order is executed as the player requests it
# so no rollback, but all that is done at EOT is to reset action points, and run the NPC orders
# hp?
# skills = skills() - i think we'll have a different class for the skills? maybe a map/hash
# then we can do things like Bob.skills.salvage++
# stats - like skills above, there are several of these attributes, which can be adjusted up or down
# equipment - just a resource, attached to a particular body part.
# inventory
# contamination?
# loyalty?
def __init__(self, x=0, y=0, name="Smiley"):
pygame.sprite.Sprite.__init__(self)
ss = spritesheet.spritesheet('img/asprite.bmp')
self.image = ss.image_at((0,0,32,32), -1)
screen = pygame.display.get_surface()
self.area = screen.get_rect()
# self.speed = 10
self.owner = []
self.x = x
self.y = y
self.name = name
self.rect = pygame.Rect(x*32,y*32,32,32)
self.selected = False
# self.highlight = []
def move(self,x,y):
if self.selected:
self.rect = pygame.Rect(x*32, y*32, 32,32)
self.x = x
self.y = y
# self.highlight.move(x,y) # move the highlight with the unit
#later, we will check distances and things
else:
print "ERROR - moving unselected unit."
# TODO - make this real error handling
# not sure if I will actually need these
def select(self):
self.selected = True
# self.highlight = Highlight(self.x, self.y)
def deselect(self):
self.selected = False
# self.highlight = []
def update(self):
pygame.event.pump()
def draw(self, surface):
print "drawing units"
# if self.selected:
# self.highlight.draw(surface)
|
UTF-8
|
Python
| false | false | 2,012 |
5,815,385,751,458 |
67aefa0f7cc8e34ed4c3624c286043110f2d5e4b
|
80bb5d89914abc8e5635b3b000fa8cea18bd0a3e
|
/python/program_refinement.py
|
5c62b14a4191acb0b93a7724c5a5db9cafada24e
|
[] |
no_license
|
fritzo/sandbox
|
https://github.com/fritzo/sandbox
|
4ccb7ee5464a5b9e399c3d759615fd236800a9e4
|
546f78577dac55b26fc6cf5e28c25aecc0d17f7e
|
refs/heads/master
| 2020-05-04T14:55:43.818720 | 2012-08-22T18:17:48 | 2012-08-22T18:17:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def argmin(items, objFun):
minVal = inf
minItem = None
for item in items:
val = objFun(item)
if val < minVal:
minVal = val
minItem = item
return minItem
class Lattice:
def __init__(self): ???
def insert(self, item): ???
def remove(self, item): ???
def merge(self, dep, rep): ???
#algebraic operations
def bot(self): ???
def top(self): ???
def getInterval(self, LB, UB): ???
def getLowerSet(self, UB): ???
def getUpperSet(self, LB): ???
class Type:
lattice = Lattice()
def __init__(self, down, up, ):
#order structure
assert isinstance(down, set), "down must be a set"
assert isinstance(up, set), "down must be a set"
self.down = down
self.up = up
#applicative structure
self.LRA = {}
self.RLA = {}
self.LRA = {}
self.LRA = {}
#structural operations
def apply(self,arg):
"constructs an app type; possibly fails; notation: F{X}"
def abstract(self,arg):
"constructs an abstraction type, possibly empty (failing); notation: DR"
def mapsto(self,range):
"the traditional dom->rng notation"
return arg.abstract(self)
def equateWith(self,other):
"merging operation; allows recursion"
???
#approximate operations
def __and__(self,other):
"intersection operation, returns a minimal set"
"""This is the trick: finding a reasonable set of types.
The set of types is an order-theoretic lattice, but the algebraic
meet operation is not representable, and in a finite approximation
must thus be approximated.
"""
class Estimator:
def __init__(self, numTypes):
self.numTypes = numTypes
#define type structure
self.Bot = ???
self.Top = ???
#define basic types
self.B = ??? #booleans, two inhabitants
self.S = ??? #(o)stream type, i.e., infinite binary sequences
self.T = ??? #true, a singleton type
self.F = ??? #false, a singleton type
#initialize state
self.X = S
def advance(self, output):
"advances estimator by one bit"
assert output <= self.B, "invalid output type"
self.X &= self.F.mapsto(output)
self.X = self.state.applyto(self.T)
def estimate(self):
"finds maximum likelihood estimate at current time"
support = self.__getLatticeInterval(self.Bot, self.X)
points = [type in support if type.isSingleton()]
result = argmin(points, (lambda p: p.getCompMeas()))
return result
def compress(self):
"conservatively compresses current state"
support = self.__getLatticeInterval(self.X, self.S)
objFun = ???
self.X = argmin(support, objFun)
def reshape(self):
"reshapes type enumeration structure based on current data"
#========== internal functions ==========
def __createRandomApp(self):
while True:
lhs = random_choice(self.types)
rhs = random_choice(self.types)
if (lhs,rhs) not in self.appTable: break
app = self.__makeApp(lhs,rhs)
def __makeApp(self, lhs, rhs):
app = Type()
self.appTable[lhs,rhs] = app
for iter in lhs.LRA():
r2 = iter.rhs
raise LATER
def __getLatticeInterval(self, LB, UB): ???
"""
(N1) what if there is no compression;
what if 'meet' is included as an algebraic operation?
Then a string of words supporting X will be created,
and rather than compress, one can simple erase part of the string,
keeping only partial information about X.
Information can be retained based on its relevance.
What _will_ be known of X is:
* its order position WRT other points,
* its app position WRT other points
(N1) The new update step would be
"""
def advance(self,output):
self.X.applyTo(self.F).equateTo(output)
self.X = self.X.applyTo(T)
"""
and the following rules axiomatize app-order interaction:
t1 < t2 <==> (s t1) < (s t2) #covariant
s1 < s2 <==> (s1 t) > (s2 t) #contravariant
(N2) A more general database might have the following operations:
"""
class InferenceSystem:
def buildApp(self,lhs,rhs): ...
def assumeEquiv(self,lhs,rhs,app): ...
def estimate(self,type): ...
def reshape(self): ...
def getPMF(self):
"returns pmf over type's inhabitants, modulo some theory"
"""
together with some relevance calculation tools based on the current
working terms.
This system could transcend the simple applications of dynamic
estimation & control, and support an entire style of programming.
In control applications, one specifies an objective function phi(x,y), and
tries to minimize the expected value bar{phi}(y) = E_x phi(x,y).
Estimation is thus the special case of phi(x,y) = delta(x,y) (dirac).
The general system supports the following programming constructs
* data (X,Y) is in the form of types
* define constants, e.g., B, S, T, F
* define the state X (possibly in terms of its previous value)
* assume equations (or inequalities <=, >=)
* define the response Y (")
* define the joint likelihood phi(x,y) (the objective function)
More generally, one could simply define a prior likelihood function
phi(x) and let X represent a (world,controller,response) triple.
(Q1) what form should the likelihood function take?
(N1) The control should be a "corrective intervention" whose complexity
is to be minimized. (this idea was mentioned months-years ago).
(A1)
"""
|
UTF-8
|
Python
| false | false | 2,012 |
2,010,044,722,677 |
bbaccc936a522ee0ee1c4280d23ca39636864860
|
5703c743ded04c199a5fcda93fc6f0d689d1d20a
|
/phase4/st_test.py
|
4ad5572511906140596b67c66f1c72bb31928537
|
[] |
no_license
|
213374school/thesis
|
https://github.com/213374school/thesis
|
af5b5b48850dd5882469f75541c09cc876349080
|
0d3bdbc9631b7fb0fb9fa8b51ad31abaa917210d
|
refs/heads/master
| 2021-05-26T13:44:01.319375 | 2012-09-15T14:45:27 | 2012-09-15T14:45:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import numpy as np
import numpy
import pylab
import random
import json
import matplotlib
import math
exp = np.exp
plt = matplotlib.pyplot
def smoothTriangle(lst=[], degree=1):
if degree < 1:
print 'degree must be > 1'
return
triangle = numpy.array(range(degree)+[degree]+range(degree)[::-1])+1
lst = numpy.array(lst)
lst_lenght = len(lst)
tri_len = len(triangle)
_max = lst_lenght - degree
triangle_normal_sum = float(sum(triangle))
smoothed_lst = []
for i in range(lst_lenght):
if i > degree and i < _max:
new_value = sum(triangle * lst[i-degree:i+degree+1]) / triangle_normal_sum
else:
left = degree - min(i, degree)
right = degree + min(degree, lst_lenght - 1 - i) + 1
tri = triangle[left:right]
triangle_sum = sum(tri)
new_value = 0.0
for j in range(len(tri)):
pos = j + i + left - degree
new_value += tri[j] * lst[pos]
new_value /= triangle_sum
smoothed_lst.append(new_value)
return smoothed_lst
def main():
# xs = [0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.2, 0.5, 0.6, 0.7, 0.2, 0.2, 0.25, 0.3, 0.35, 0.40, 0.3, 0.2]
# def inc_resolution(xs, factor=10):
# zs = []
# for i in range(len(xs)-1):
# x = xs[i]
# y = xs[i+1]
# z = np.linspace(x,y,factor)
# zs += list(z)
# return zs
# xs = inc_resolution(xs, 12)
# pylab.figure()
# pylab.suptitle('Triangle smoothing', fontsize=16)
# pylab.subplot(2,2,1, title='Original data')
# pylab.plot(xs,".k")
# pylab.plot(xs,"-k")
# pylab.axis()
# pylab.grid(True)
# d = 6
# xssmooth = smoothTriangle(xs, d)
# title = 'Smoothed with w = %d' % d
# pylab.subplot(2,2,2, title=title)
# pylab.plot(xssmooth,".k")
# pylab.plot(xssmooth,"-k")
# pylab.axis()
# pylab.grid(True)
# d = 12
# xssmooth = smoothTriangle(xs, d)
# title = 'Smoothed with w = %d' % d
# pylab.subplot(2,2,3, title=title)
# pylab.plot(xssmooth,".k")
# pylab.plot(xssmooth,"-k")
# pylab.axis()
# pylab.grid(True)
# d = 24
# xssmooth = smoothTriangle(xs, d)
# title = 'Smoothed with w = %d' % d
# pylab.subplot(2,2,4, title=title)
# pylab.plot(xssmooth,".k")
# pylab.plot(xssmooth,"-k")
# pylab.axis()
# pylab.grid(True)
# format = 'png'
# plt.savefig('triangle_smooth.png', format=format)
# pylab.show()
xs = np.linspace(0,1,250)
ys = np.log(xs) + 10
def normalize(a, factor=255.0):
# convert to floating point
if not type(a).__name__ == 'numpy.ndarray':
# a is not a numpy array
b = np.float64(np.array(a)).copy()
else:
b = np.float64(a.copy())
bmin = np.min(b)
if bmin > 0.0:
# no point in substracting 0
b -= np.min(b)
bmax = np.max(b)
if bmax > 0.0:
# to avoid dividing by zero
b *= factor / bmax
else:
return a.copy()
# convert back to integer
return b # np.int64(b)
ys = normalize(ys, 1.0)
pylab.figure()
pylab.suptitle('TITEL', fontsize=16)
pylab.subplot(2,2,1, title='ln(x)')
pylab.plot(xs,ys,".k")
pylab.plot(xs,ys,"-k")
pylab.axis()
pylab.grid(True)
ys = ys - xs
pylab.subplot(2,2,2, title='ln(x)-x')
pylab.plot(xs,ys,".k")
pylab.plot(xs,ys,"-k")
pylab.axis()
pylab.grid(True)
ys = np.exp(xs)
ys = normalize(ys, 1.0)
pylab.subplot(2,2,3, title='exp(x)')
pylab.plot(xs,ys,".k")
pylab.plot(xs,ys,"-k")
pylab.axis()
pylab.grid(True)
ys = ys - xs
pylab.subplot(2,2,4, title='exp(x)-x')
pylab.plot(xs,ys,".k")
pylab.plot(xs,ys,"-k")
pylab.axis()
pylab.grid(True)
pylab.show()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,012 |
8,993,661,531,577 |
18e431d87049b043baf7f91a200de5592883b1a8
|
300b0a873bcc398b6da87b97e4be0b09c19a8408
|
/models/sql_models.py
|
f40bbd8619c64bff41b5313050073e054c1d4336
|
[] |
no_license
|
sibblegp/fastlane
|
https://github.com/sibblegp/fastlane
|
ed87ce4350223e8514442d03c766c545b9c11bb3
|
e4299dd3548d7d013654f0163d9a8aea117d554e
|
refs/heads/master
| 2021-01-21T00:43:40.041780 | 2014-11-04T21:25:30 | 2014-11-04T21:25:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'gsibble'
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, backref, scoped_session
from sqlalchemy import Column, Integer, String, DECIMAL, ForeignKey, DateTime
from sqlalchemy import create_engine
from config import settings
import datetime
ENGINE = create_engine(settings.SQL_URI)
SESSION_MAKER = sessionmaker()
SESSION_MAKER.configure(bind=ENGINE)
BASE = declarative_base()
SESSION = SESSION_MAKER(autocommit=False,
autoflush=False,
bind=ENGINE)
def add(objects):
if type(objects) == list:
for object in objects:
SESSION.add(object)
else:
SESSION.add(objects)
def commit():
SESSION.commit()
class User(BASE):
__tablename__ = 'users'
#Columns
id = Column(Integer, primary_key=True)
urban_airship_alias = Column(String(50), nullable=False)
first_name = Column(String(30))
last_name = Column(String(30))
@classmethod
def get(cls, user_id):
return SESSION.query(cls).get(user_id)
class Device(BASE):
__tablename__ = 'devices'
#Columns
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
urban_airship_channel = Column(String(50), nullable=False)
#Foreign Keys
user_id = Column(Integer, ForeignKey('users.id'))
#Relationships
user = relationship('User', backref=backref('devices'))
class Merchant(BASE):
__tablename__ = 'merchants'
#Columns
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
@classmethod
def get(cls, merchant_id):
return SESSION.query(cls).get(merchant_id)
class Transaction(BASE):
__tablename__ = 'transactions'
#Columns
id = Column(Integer, primary_key=True)
hash = Column(String(50))
merchant_transaction_id = Column(String(50), nullable=False)
amount = Column(DECIMAL, nullable=False)
created = Column(DateTime, default=datetime.datetime.utcnow())
completed = Column(DateTime)
#Foreign Keys
merchant_id = Column(Integer, ForeignKey('merchants.id'), nullable=False)
user_id = Column(Integer, ForeignKey('users.id'), nullable=True)
#Relationships
merchant = relationship('Merchant', backref=backref('transactions', order_by=created))
user = relationship('User', backref=backref('transactions', order_by=created))
@classmethod
def create(cls, merchant, amount, merchant_transaction_id):
new_transaction = cls(amount=amount,
merchant=merchant,
merchant_transaction_id=merchant_transaction_id)
add(new_transaction)
commit()
return new_transaction
@classmethod
def get(cls, transaction_id):
return SESSION.query(cls).get(transaction_id)
def claim_for_user(self, user):
self.user = user
add(self)
commit()
def reset_db():
session = SESSION_MAKER()
BASE.metadata.drop_all(ENGINE)
BASE.metadata.create_all(ENGINE)
new_user = User()
new_user.first_name = 'George'
new_user.last_name = 'Sibble'
new_user.urban_airship_alias = 'georgesibble'
session.add(new_user)
new_merchant = Merchant()
new_merchant.name = 'Test Merchant'
session.add(new_merchant)
session.commit()
session.close()
|
UTF-8
|
Python
| false | false | 2,014 |
17,763,984,743,916 |
57477bdbb531a15ac28d5ceb677e1157b2b793d0
|
b1a8abd067d6423de3eab74d42fea6a996db7283
|
/blog/models.py
|
9aea821fe0ae483b391cbe67704c892de00fc2a0
|
[] |
no_license
|
Fiakumah-Kofi/django-blog-project
|
https://github.com/Fiakumah-Kofi/django-blog-project
|
5178f213c1412ec0c77d46e6f7378182ea682e95
|
6ed2629ee8640edbb2a08a4a8ab0209c9d911d36
|
refs/heads/master
| 2021-01-17T22:51:21.727398 | 2012-07-23T14:23:29 | 2012-07-23T14:23:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.contrib import admin
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length =60) #(60 characters)
body = models.TextField() #(large text)
created = models.DateField (auto_now_add = True) #(date created)
updated = models.DateField (auto_now = True) #(date updated)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ('post_detail',(),{'id':self.id,'showComments':'true/'})
class Comment(models.Model):
body = models.TextField() #(large text)
author= models.CharField(max_length = 60) #(60 characters)
created= models.DateField(auto_now_add = True) #(date created)
updated = models.DateField(auto_now = True) #(date updated)
post= models.ForeignKey (Post,related_name = 'message posted') #(foreign key linking Comment to Post)
def body_60(self):
return self.body[:60]
def __unicode__(self):
return self.author
class CommentInline(admin.TabularInline):
model=Comment
class PostAdmin(admin.ModelAdmin):
list_display =('title','created','updated')
search_fields=('title','body')
list_filter=('created',)
inlines = [CommentInline]
class CommentAdmin(admin.ModelAdmin):
list_display = ('post','author','body_60','created','updated')
list_filter =('created','updated')
admin.site.register(Post,PostAdmin)
admin.site.register(Comment,CommentAdmin)
|
UTF-8
|
Python
| false | false | 2,012 |
16,045,997,863,130 |
779d9f58903a3d497854d1d4fc1bcec29625152a
|
e82e932ed716e09462b57fcf3f83ca85b8843dc9
|
/heranca_multipla.py
|
8ca7a0fa7a2beace6f4e43f3fcba284d1bc46fca
|
[] |
no_license
|
guicarvalho/Python-Fundamental-Tempo-Real-Eventos
|
https://github.com/guicarvalho/Python-Fundamental-Tempo-Real-Eventos
|
2b71802cb557d35b0c74ef54aeb821122c0d0f5d
|
5a213f01c85c0fa6cf1d1acf063c87d89ab7292b
|
refs/heads/master
| 2020-04-24T18:09:49.249912 | 2014-09-26T02:50:24 | 2014-09-26T02:50:24 | 23,373,285 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
"""
Criando herança múltipla em Python.
"""
class Ave(object):
__name = 'My Ave'
def voar(self):
print 'Voando, ',
@property
def name(self):
return self.__name
class Fogo(object):
__name = 'My Fogo'
def pegar_fogo(self):
print 'em chamas!'
@property
def name(self):
return self.__name
class Fenix(Ave, Fogo):
def __init__(self):
super(Ave, self).__init__()
super(Fogo, self).__init__()
def voar(self):
print u'Voando em chamas like a Fênix!'
@property
def name(self):
return self.__name
f = Fenix()
f.voar()
f.pegar_fogo()
print f.name
|
UTF-8
|
Python
| false | false | 2,014 |
10,814,727,691,172 |
87ffd6a3865b1c5ed354478b3d975cf3a1b23245
|
9db05afab6221b9880b04570e8482f3db7b07ece
|
/desktop/opera_variables.gypi
|
f0c249e2abfff5626fef29253e0047e3e2de5a9a
|
[
"BSD-2-Clause"
] |
permissive
|
duminda111/opera
|
https://github.com/duminda111/opera
|
675b0bf72b14ed8dd409caf077ac1d98512a6194
|
d1970c444b229d793fdcd7f30960a7640de7e090
|
refs/heads/master
| 2021-01-18T07:26:01.058340 | 2013-09-27T13:34:06 | 2013-09-27T13:34:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- Mode: c++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
#
# Copyright (C) 2013 Opera Software ASA. All rights reserved.
#
# This file is an original work developed by Opera Software ASA
{
'variables': {
'opera_desktop_src_dir': '.',
'conditions': [
['OS=="mac"', {
'apply_locales_cmd': ['python', '<(DEPTH)/build/apply_locales.py'],
'mac_app_icon_name': '<!(python common/mkversion.py --mac-app-icon-name)',
'mac_bundle_id': '<!(python common/mkversion.py --mac-bundle-identifier)',
'mac_dylib_version': '<!(python common/mkversion.py --mac-dylib-version)',
'mac_full_version': '<!(python common/mkversion.py --mac-full-version)',
'mac_short_version': '<!(python common/mkversion.py --mac-short-version)',
'mac_version': '<!(python common/mkversion.py --mac-version)',
'opera_plist_file': 'mac/resources/app-Info.plist',
'opera_framework_plist_file': 'mac/resources/framework-Info.plist',
}], # OS=="mac"
],
},
}
|
UTF-8
|
Python
| false | false | 2,013 |
14,044,543,105,115 |
f818b470f9c62c3bc4a2ec4b2be6c46242edee88
|
f6cf439439e586ccb172e32eaf13ee5b0f13cfed
|
/helper_functions.py
|
6a635fe406ded45d5298b09e8d279351b3051778
|
[
"MIT"
] |
permissive
|
rishabhsixfeet/Dock-
|
https://github.com/rishabhsixfeet/Dock-
|
1630c6e79cb77eafcae15552230b5c130084b78c
|
682aae9b5199fdbfea74064f5ee4d35543b2d31d
|
refs/heads/master
| 2021-01-20T00:57:18.203632 | 2014-11-24T16:49:39 | 2014-11-24T16:49:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#General helper functions
from django.contrib import auth
from django.utils import simplejson
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
import os
import StringIO
from PIL import Image
import hashlib
import django
import re
import datetime
from boto.s3.connection import S3Connection
#Define AWS keys here
AWS_ACCESS_KEY_ID = 'AKIAIEX5CZIM6WWDY67Q'
AWS_SECRET_ACCESS_KEY = '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'
def get_request_dict(request, type_requested):
"""Get dictionary from request (either posted or in raw json form)"""
if request.method == 'GET':
return None
if (type_requested == 'rest'):
return simplejson.loads(request.raw_post_data)
else:
return request.POST
def dict_to_json_response(response):
"""Takes in a dictionary object, serializes it into JSON and returns it as an HttpResponse"""
return HttpResponse(simplejson.dumps(response), mimetype='application/json')
def login_required_view(request, type_requested, method, template, redirect=None):
"""Generic view for a view that has to check for authentication first"""
request_dict = get_request_dict(request, type_requested)
#Make sure user is logged in
response = None
if not request.user.is_authenticated():
response = { 'success' : 0, 'error' : 'You are not logged in.' }
if not response:
response = method(request.user.get_profile(), request_dict)
#Switch rendering based on type
if type_requested == 'rest':
return dict_to_json_response(response)
if type_requested == 'desktop':
if redirect is not None:
return HttpResponseRedirect(redirect)
elif template is not None:
return render_to_response(template, response, conext_instance=RequestContext(request))
else:
return render_to_response('base.html', response, conext_instance=RequestContext(request))
def my_strftime(date_time):
"""Returns string formatted in a uniform way based on received datetime object"""
return '' if date_time == None else date_time.strftime("%A, %d %B %Y %I:%M%p")
def inverse_my_strftime(date_string):
"""
Returns a datetime object created from the passed in string which has been
formatted according to my_strftime
"""
try:
date_time = datetime.datetime.strptime(date_string, "%A, %d %B %Y %I:%M%p")
return date_time
except ValueError:
return None
def relative_timestamp(date):
"""
Returns a human readable difference between the time given and now
i.e. "5 minutes ago", "27 seconds ago", etc.
"""
diff = datetime.datetime.now() - date
s = diff.seconds
if diff.days > 7:
if diff.days > 13:
return '{0} weeks ago'.format(diff.days/7)
else:
return '{0} week ago'.format(diff.days/7)
elif diff.days == 1:
return '1 day ago'
elif diff.days > 1:
return '{0} days ago'.format(diff.days)
elif s <= 1:
return 'just now'
elif s < 60:
return '{0} seconds ago'.format(s)
elif s < 120:
return '1 minute ago'
elif s < 3600:
return '{0} minutes ago'.format(s/60)
elif s < 7200:
return '1 hour ago'
else:
return '{0} hours ago'.format(s/3600)
def validate_email(email):
"""
Checks if the email address given is valid
"""
return re.match("^[a-zA-Z0-9._%-]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$", email)
def handleUploadedImage(image, user):
"""
Saves image object along with a thumbnail of the image
Returns the image object created
"""
from userInfo.models import ImageHolder
image_file = StringIO.StringIO(image.read())
real_image = Image.open(image_file)
#TODO test this
#Get connection to theblock bucket
s3 = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket('theblock')
#Get dimensions of image
(width, height) = real_image.size
#Calculate new, scaled dimensions (i.e. maximum dimensions for full size image)
(width, height) = scaleDimensions(width, height, longest_side=700)
#Convert it to RGB to make it look better
real_image = real_image.convert("RGB")
resized_image = real_image.resize((width, height))
image_file = StringIO.StringIO()
resized_image.save(image_file, 'JPEG')
filename = hashlib.md5(image_file.getvalue()).hexdigest() + '.jpg'
#Save to disk
image_file = open(os.path.join('/tmp', filename), 'w')
resized_image.save(image_file, 'JPEG')
#image_file = open(os.path.join('/tmp', filename), 'r')
#content = django.core.files.File(image_file)
prof_pic = ImageHolder(creator=user, handle=filename)
prof_pic.save() #Just to generate id
#Save the full size image in "Images" directory with filename = <prof_pic.id>.jpg
key = bucket.new_key('images/' + str(prof_pic.id) + '.jpg')
key.set_contents_from_filename(os.path.join('/tmp', filename))
key.set_acl('public-read')
#Do this all again for thumbnail
#Get dimensions of image
(width, height) = real_image.size
#Calculate new, scaled dimensions (i.e. maximum dimensions for full size image)
(width, height) = scaleDimensions(width, height, longest_side=45)
#Convert it to RGB to make it look better
real_image = real_image.convert("RGB")
resized_image = real_image.resize((width, height), Image.ANTIALIAS)
image_file = StringIO.StringIO()
resized_image.save(image_file, 'JPEG')
filename = hashlib.md5(image_file.getvalue()).hexdigest() + '.jpg'
#Save to disk
image_file = open(os.path.join('/tmp', filename), 'w')
resized_image.save(image_file, 'JPEG')
#image_file = open(os.path.join('/tmp', filename), 'r')
#content = django.core.files.File(image_file)
#Save thumbnail in "Images/Thumbnails" directory with same name as fullsize
key = bucket.new_key('images/Thumbnails/' + str(prof_pic.id) + '.jpg')
key.set_contents_from_filename(os.path.join('/tmp', filename))
key.set_acl('public-read')
prof_pic.handle = str(prof_pic.id) + '.jpg'
prof_pic.save()
return prof_pic
def scaleDimensions(width, height, longest_side):
if width > height:
if width > longest_side:
ratio = longest_side*1./width
return (int(width*ratio), int(height*ratio))
elif height > longest_side:
ratio = longest_side*1./height
return (int(width*ratio), int(height*ratio))
return (width, height)
def computeXY(latitude, longitude):
"""
Calculates the x and y coordinates for the block containing given coordinates
Returns a tuple of the form (x_coord, y_coord)
"""
#Assume 90 km == 1 degree longitude and 111 km == 1 degree latitude for now
#We want 400 m by 400 m squares for the blocks
x_coord = int(225 * longitude)
y_coord = int(277.5 * latitude)
return (x_coord, y_coord)
|
UTF-8
|
Python
| false | false | 2,014 |
17,051,020,179,588 |
80a3e748782479069d66b9d9cc9a29c745d9e7ac
|
8c6f978a53bdb6207d6c04321433d3f645af1db3
|
/includes/guifunctions.py
|
36970cd90c721f6f85c7ffa0e363e43baddb7d32
|
[] |
no_license
|
ProfDrLuigi/rewired-gui
|
https://github.com/ProfDrLuigi/rewired-gui
|
83bbae36b35b4cbd723779665b7d669d332a3236
|
ed6f37ead1167ecbc005f8b7034662ffbbd65f9e
|
refs/heads/master
| 2017-05-09T09:50:44.965626 | 2013-05-15T10:48:09 | 2013-05-15T10:48:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import platform
from Tkinter import END
from time import sleep
from os.path import expanduser, join, exists
from os import mkdir, sep, getcwd
from shutil import copyfile, copytree
from logging import getLogger
from rewiredserver.includes import wiredfunctions, wireddb, wiredcertificate
def getPlatformString(parent):
system = "Unknown OS"
version = "Unknown Version"
system = platform.system()
if system == 'Darwin':
system = "OS X"
version = platform.mac_ver()[0]
if system == 'Linux':
try:
distro = platform.linux_distribution()
version = distro[0] + " " + distro[1]
except:
version = "None"
if system == 'Windows':
version = platform.win32_ver()[0]
return system + " " + version + " - Python " + platform.python_version()
def checkPlatform(name):
if name.upper() == str(platform.system()).upper():
return 1
return 0
def initConfig(parent):
home = False
config = 0
home = expanduser("~")
if not home:
debugLog(parent, "Unable to find user home dir!")
return 0
debugLog(parent, "User Home is: " + home)
configdir = join(home, ".rewired")
configfile = join(configdir, "server.conf")
debugLog(parent, "Config Folder is: " + configdir)
debugLog(parent, "Config File is: " + configfile)
if not exists(configdir):
debugLog(parent, "Creating the configdir")
try:
mkdir(configdir)
except:
debugLog(parent, "failed to create the configdir!")
return 0
else:
debugLog(parent, "ConfigDir already exists")
if not exists(join(configdir, 'cert.pem')):
from socket import gethostname
if not createCert(join(configdir, 'cert.pem'), gethostname()):
debugLog(parent, "failed to install default cert!")
if not exists(join(configdir, 'banner.png')):
if not saveCopy('data/banner.png', join(configdir, 'banner.png')):
debugLog(parent, "failed to install default Banner!")
if not exists(join(configdir, 'server.conf')):
debugLog(parent, "creating new config file")
config = wiredfunctions.loadConfig(configfile)
config['cert'] = join(configdir, 'cert.pem')
config['serverBanner'] = join(configdir, 'banner.png')
config['logFile'] = join(configdir, 'server.log')
config['dbFile'] = join(configdir, 'rewiredDB.db')
if checkPlatform("Darwin"):
config['fileRoot'] = "/Users/Shared/Rewired Files"
if checkPlatform("Linux"):
config['fileRoot'] = "/opt/rewired/Files"
if checkPlatform("Windows"):
config['fileRoot'] = join(home + sep + "Rewired Files")
config['serverPidFile'] = join(configdir, "server.pid")
rewriteConfig(config)
else:
config = wiredfunctions.loadConfig(configfile)
parent.configFile = configfile
if config['trackerDNS'] == '""':
config['trackerDNS'] = ""
if not exists(config['dbFile']):
file = open(config['dbFile'], 'w')
file.close()
if not exists(config['fileRoot']):
try:
copytree("data/files", config['fileRoot'])
except:
debugLog(parent, "Failed to create default File Root")
parent.logger = getLogger("none")
debugLog(parent, "initConfig done")
if not exists(config['logFile']):
file = open(config['logFile'], "w")
file.write("Blank Logfile\n")
file.close()
git = gitVersion()
if git:
config['appVersion'] = git
return config
def loadData(parent, type):
db = wireddb.wiredDB(parent.config, parent.logger)
if not int(type):
return db.loadGroups()
return db.loadUsers()
def updateData(parent, data, type):
db = wireddb.wiredDB(parent.config, parent.logger)
if not db.updateElement(data, type):
return 0
return 1
def deleteData(parent, name, type):
db = wireddb.wiredDB(parent.config, parent.logger)
if not db.deleteElement(name, type):
return 0
return 1
def rewriteConfig(config):
try:
version = str(config['appVersion'])
name = str(config.pop('appName'))
config.pop('appVersion')
config.pop('appName')
config.pop('banner')
config.pop('serverStarted') # this will fail when server has not been run yet
except KeyError:
pass
try:
config['serverDesc'] = config['serverDesc'].encode('UTF-8')
config['serverName'] = config['serverName'].encode('UTF-8')
except:
pass
config.write()
config['appVersion'] = version
config['appName'] = name
return 1
def gitVersion():
if exists(join(getcwd(), "rewiredserver/includes/.gitversion")):
version = 0
print "YUP"
try:
with open(join(getcwd(), "rewiredserver/includes/.gitversion"), 'r') as f:
version = f.readline()
except (IOError, OSError):
return 0
return version.strip()
return 0
def saveCopy(src, dst):
try:
copyfile(src, dst)
except:
return 0
return 1
def getCertName(certpath):
check = wiredcertificate.reWiredCertificate("")
check.loadPem(certpath)
return check.getCommonName()
def createCert(certpath, cname):
try:
cert = wiredcertificate.reWiredCertificate(str(cname))
cert.createSignedCert()
cert.safeAsPem(str(certpath))
except:
return 0
return 1
def debugLog(parent, log):
if parent.debug:
try:
parent.debugbox.insert("1.0", log + "\n")
except:
return 0
return 1
|
UTF-8
|
Python
| false | false | 2,013 |
13,649,406,082,699 |
b63d9c2a64f662f72910010e3181270d50781a0d
|
3a1749d1c574cc9317b30bc656a5b351f6057c60
|
/SSL/SSLclient.py
|
cf33fd041e78949d4e373ef0303c6752e49723a8
|
[] |
no_license
|
Trietptm-on-Security/KoolimRezah-antiMalware
|
https://github.com/Trietptm-on-Security/KoolimRezah-antiMalware
|
6b28cb0fca92e992b85dfd21414b091847c07861
|
735e957bc65b94eccb54741c02fcf73b32003ee7
|
refs/heads/master
| 2018-05-13T05:56:01.028219 | 2014-08-30T22:47:55 | 2014-08-30T22:47:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#client
from socket import *
import ssl
import pprint
def main():
sock = socket(AF_INET,SOCK_STREAM)
ssl_sock = ssl.wrap_socket(s,
ca_certs="C:\Program Files(x86) \fileSend\SSL\server\filesmycertfile",
cert_reqs=ssl.CERT_REQUIRED)
print "Wraped!"
SSL_sock.connect(('10.0.0.7',10058))
print "Connected!"
pprint.pprint(ssl_sock.getpeercert())
SSL_sock.close()
if __name__ == '__main__':
try:
main()
except Exception, e:
print "Failed to execute main:", e
|
UTF-8
|
Python
| false | false | 2,014 |
2,070,174,238,092 |
d43ae777eb0a533beabee15513af0b50328d759a
|
ad56a0bdea178ec77875bcea40cad3bb33e9a190
|
/sentiment.py
|
28bdafc76ffaf7a29461261737bc6aaf3b96583e
|
[
"Apache-2.0"
] |
permissive
|
wlamond/sentiment_classifier
|
https://github.com/wlamond/sentiment_classifier
|
004788a026d680ee8525216d2eeef24991bee91e
|
aa4d186e9f3789921ecbb68f8e26eda0aaa25e75
|
refs/heads/master
| 2021-01-19T14:35:01.906513 | 2014-08-14T01:20:04 | 2014-08-14T01:20:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import argparse
import string
import numpy
import sys
import re
import os
from sklearn.externals import joblib
from sklearn.multiclass import OneVsRestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.feature_extraction.text import TfidfVectorizer
from scipy import sparse
from nltk.stem.snowball import SnowballStemmer
training_file = 'data/cleaned_train.tsv'
test_file = 'data/cleaned_test.tsv'
test_output_header = 'PhraseId,Sentiment\n'
validate_training_file = 'data/xaa'
validate_test_file = 'data/xab'
class Sentiment_Classifier:
def __init__(self, training=False, validate=False, require_dense=False, ncores=-2):
self.columns_per_training_example = 4
self.columns_per_test_example = 3
self.require_dense = require_dense
self.ncores = ncores
self.model_pickle_file = None
self.transformer_pickle_file = None
self.kernel_sampler_file = None
self._setup_pickle_files()
# used to filter
self._stemmer = SnowballStemmer('english')
if (training or validate):
self._setup_classifier_and_transformer()
else:
self._load_model_and_transformer()
def _setup_pickle_files(self):
""" set up the directory for the model and transformer to be stored in once trained. """
pickle_dir = 'pickled_objects'
d = os.path.dirname(pickle_dir)
if not os.path.exists(pickle_dir):
os.makedirs(pickle_dir)
self.model_pickle_file = pickle_dir + '/model.pkl'
self.transformer_pickle_file = pickle_dir + '/transformer.pkl'
def _store_model_and_transformer(self):
joblib.dump(self.classifier, self.model_pickle_file)
joblib.dump(self.transformer, self.transformer_pickle_file)
def _load_model_and_transformer(self):
self.classifier = joblib.load(self.model_pickle_file)
self.transformer = joblib.load(self.transformer_pickle_file)
def _setup_classifier_and_transformer(self):
self.transformer = TfidfVectorizer(use_idf=False, decode_error='ignore', ngram_range=(1,3))
self.classifier = OneVsRestClassifier(LogisticRegression(), n_jobs=self.ncores)
def _write_message(self, msg):
sys.stderr.write(msg + '\n')
def _filter(self, sentence):
sentence_list = sentence.split()
sentence_list = map(lambda x: self._stemmer.stem(x), sentence_list)
return ' '.join(sentence_list)
def _fit_transform(self, X):
return self.transformer.fit_transform(X)
def _transform(self, X):
return self.transformer.transform(X)
''' Get features related to word lengths. Counts of words of each length.
Max word length, min word length, ratio of words to sentence length '''
def _word_len_features(self, sentence):
word_lengths = [len(word) for word in sentence.split()]
if len(word_lengths) == 0:
# return 0 for each feature
return [0] * 12
else:
# add arbitrary counts up to size 10 (up to 20 is actually better,
# but we should probably come up with a better way than arbitrary counts,
# larger range buckets perhaps)
len_counts = [0] * 9
for i in range(1,10):
len_counts[i-1] = word_lengths.count(i)
len_counts.extend([sum(word_lengths)/float(len(sentence)), \
max(word_lengths), min(word_lengths)])
return len_counts
def _get_extra_features(self, sentence):
sentence_len = float(len(sentence))
get_count = lambda l1, l2: len(list(filter(lambda c: c in l2, l1)))
digits_count = get_count(sentence, '0123456789')
# punctuation count didn't help, pehaps indvidual punctuation count will
#punct_count = get_count(sentence, string.punctuation)
features = [sentence_len,
sum(1 for c in sentence if c.isupper())/float(sentence_len),
digits_count/sentence_len]
features.extend(self._word_len_features(sentence))
return features
def get_features_and_labels(self, training_file):
self._write_message('reading data')
training_examples = [(phrase_id, sentence_id, self._filter(sentence),
self._get_extra_features(sentence),
sentiment)
for phrase_id, sentence_id, sentence, sentiment
in self._read_file(training_file, self.columns_per_training_example)]
self._write_message('generating mapped data')
phrase_ids, sentence_ids, sentences, extra_features, y = zip(*training_examples)
return sentences, extra_features, y
def get_features_and_ids(self, data_file):
self._write_message('reading data')
examples = [(phrase_id, sentence_id, self._filter(sentence))
for phrase_id, sentence_id, sentence
in self._read_file(data_file, self.columns_per_test_example)]
self._write_message('generating mapped data')
phrase_ids, sentence_ids, sentences = zip(*examples)
X = self._transform(sentences)
return phrase_ids, X
def _train(self, X, y):
self.classifier.fit(X, y)
def train(self, training_file):
""" train the model """
X, extra_features, y = self.get_features_and_labels(training_file)
X = self._fit_transform(X)
sparse_features = sparse.csr_matrix(numpy.array(extra_features))
X = sparse.hstack((X, sparse_features))
if self.require_dense:
X = X.toarray()
#X = self.kernel.fit_transform(X)
self._write_message('training model')
self._train(X, y)
# save the classifier for later!
self._store_model_and_transformer()
def validate(self, validate_file):
X, extra_features, y = self.get_features_and_labels(validate_file)
X = self._transform(X)
sparse_features = sparse.csr_matrix(numpy.array(extra_features))
X = sparse.hstack((X, sparse_features))
if self.require_dense:
X = X.toarray()
#X = self.kernel.transform(X)
self._write_message('validate model')
print self._score(X, y)
def _score(self, X, y):
""" score the model """
score = self.classifier.score(X, y)
return score
def _predict(self, X):
""" predict a single example """
y = self.classifier.predict(X)
return y
def test(self, test_file):
""" generate the submission file. """
self._write_message('predicting test outcomes')
ids, X = self.get_features_and_ids(test_file)
if self.require_dense:
X = X.toarray()
#X = self.kernel.transform(X)
y = self._predict(X)
self.write_output(ids, y)
def classify_string(self):
""" Classify lines from stdin """
for s in sys.stdin:
X = self.transformer.transform([s])
self._write_line(self._predict(X)[0])
def _write_line(self, s):
sys.stdout.write(str(s) + '\n')
def write_output(self, ids, y):
""" write the result of the test method. """
# write the new predictions and the IDs to stdout
sys.stdout.write(test_output_header)
for i in xrange(len(ids)):
self._write_line(str(ids[i]) + ',' + str(y[i]))
def _read_file(self, filename, expected_elements):
""" generator that reads lines from the given file
and appends missing data as needed """
with open(filename, 'r') as f:
for line in f:
t = tuple(line.strip().split('\t'))
if len(t) != expected_elements:
t = t + ('',)
yield t
def main():
args = argparse.ArgumentParser()
args.add_argument('--train', action='store_true')
args.add_argument('--test', action='store_true')
args.add_argument('--validate', action='store_true')
args.add_argument('--sample', action='store_true')
args.add_argument('--ncores', type=int, default=-2)
args = args.parse_args()
# pass test flag in so the constructer can load the
# model and transformer. It doesn't need to do that for training
model = Sentiment_Classifier(training=args.train, validate=args.validate, ncores=args.ncores)
if args.train:
model.train(training_file)
if args.test:
model.test(test_file)
if args.validate:
model.train(validate_training_file)
model.validate(validate_test_file)
if args.sample:
model.classify_string()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
11,209,864,681,254 |
f3310834598e941c0cb31ae5313aaf96044f5dd5
|
d18ed72d6f8d27dd8a13eab5c6366f9dca48aa6b
|
/utils/elast/StrainGenerator.py
|
a43b79f72b63254d51688cefc679cc93fa48508b
|
[] |
no_license
|
danse-inelastic/AbInitio
|
https://github.com/danse-inelastic/AbInitio
|
6f1dcdd26a8163fa3026883fb3c40f63d1105b0c
|
401e8d5fa16b9d5ce42852b002bc2e4274afab84
|
refs/heads/master
| 2021-01-10T19:16:35.770411 | 2011-04-12T11:04:52 | 2011-04-12T11:04:52 | 34,972,670 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import numpy as np
import scipy.linalg as la
try:
from p4vasp.SystemPM import *
from p4vasp.Structure import Structure
import p4vasp.matrix as p4mat
except ImportError:
print "P4Vasp could not be imported in Python."
from UnitCell import *
import CubicStandardStrainTensors as cubic
class StrainGenerator():
"""Component to generate strained unit cells."""
def __init__(unitcell=None, tensor=None, strains=[0.01]):
if unitcell is None: unitcell = UnitCell()
if tensor is Note: tensor = cubic.C11
self._uc = unitcell
self._tensor = tensor
self._strains = strains
return
def getUnitCell(self):
return self._uc
def setUnitCell(self, unitcell):
self._uc = unitcell
return
def getStrainTensor(self):
return self._tensor
def setStrainTensor(self, tensor):
self._tensor = tensor
return
def getStrains(self):
return self._strains
def setStrains(self,strains):
self._strains = strains
return
def genStrainedPoscar(self,name="CXXstrain", struct=None, tensor=None):
"""Produces a sequence of strained P4vasp structures, \n\
for strain values in the NumPy array strains."""
# Parse the cell volume and basis vectors for the structure
# with the P4vasp parser for the vasprun.xml
try:
V0=struct.getCellVolume()
print "Original volume=", V0
except:
print "Could not access structure."
try:
basis=struct.basis
except:
print "Could not access structure."
b0=basis[0]
b1=basis[1]
b2=basis[2]
# Generate the strained structures according to the strain sequence
# and the strain tensor
for x in strains:
scale=p4mat.Matrix([[x, 0.0, 0.0],[0.0, x, 0.0],[0.0, 0.0, x]])
try:
tmp1=tensor.__rmul__(scale)
except:
print "Tensor is not a valid P4vasp matrix."
tmp2=p4mat.Matrix(3,3)
tmp2.identity()
tmp=tmp1.__add__(tmp2)
print tmp
# Apply the tensor deformation to the unitcell vectors
struct.basis[0]=tmp.__mul__(b0)
print "b0=", struct.basis[0]
struct.basis[1]=tmp.__mul__(b1)
print "b1=", struct.basis[1]
struct.basis[2]=tmp.__mul__(b2)
print "b2=", struct.basis[2]
# The transformation needs to be volume-conserving,
# so we need to rescale the volume
V=struct.getCellVolume()
print "New volume=", V
y=float(np.power((V0/V), 1/3.))
struct.scaleBasis(y,y,y)
# Now write the strained structures to file
# in VASP POSCAR format using the P4vasp parsing
struct.write("pos_"+name+'_'+repr(x), newformat=0)
# Reset to unstrained basis
struct.basis[0]=b0
struct.basis[1]=b1
struct.basis[2]=b2
continue # end of loop on strains
return # end of genStrainedPoscar
def getNormStrainUnitCell(self, strain=0.01, tensor=None):
"""Returns a unit cell object, obtained by applying the strain tensor,
with specified amplitude, to the component unit cell.
The unit cell is 'normalized' to conserve volume."""
if tensor is None: tensor = self._tensor
volume = self._uc.getVolume()
straintensor = np.array(tensor) * (1.0 + strain) # array element-wise multiply
cellvecs = np.array(self._uc.getCellVectors())
newvecs = np.dot(straintensor, cellvectors) # matrix-like multiply on arrays
# normalize the volume
volume = self._uc.getVolume()
newvol = abs(la.det(newvecs))
cuberescale = np.power(volume / newvol, 1./3.)
newvecs = newvecs * cuberescale #<< should check that this is what we want - O.D. 06/07
atoms = self._uc.getAtoms()
positions = self._uv.getPositions()
uc = create_unitcell(newvecs, atoms, positions)
return uc
pass # end of class StrainGenerator
|
UTF-8
|
Python
| false | false | 2,011 |
7,129,645,718,154 |
fc471558dd16c0d8e53821cb3e26e35c03eba1b0
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_6/ntsten002/question2.py
|
b3a7aa44bce0b7da2522c7151db4b0771e13ed58
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
https://github.com/MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#question 2
#Tendani Netshitenzhe
#25 April 2014
from math import *
#Obtain the values of the vectors
A = input("Enter vector A:\n").split()
B = input("Enter vector B:\n").split()
#Create empty vector lists
vectorA = []
vectorB = []
#Add entered values into respective lists
vectorA.append(A)
vectorB.append(B)
addition = []
dot_product = 0
norm_A = 0
norm_B = 0
for i in range(3):
addition.append(int(vectorA[0][i])+int(vectorB[0][i])) #Adding vectors
dot_product += int(vectorA[0][i])*int(vectorB[0][i]) #Dot product of vectors
norm_A += int(vectorA[0][i])**2 #normalization of vector A
norm_B += int(vectorB[0][i])**2 #normalization of vector B
#Display addition, dotproduct and normalisation of vector A and B
print("A+B =", addition)
print("A.B =", dot_product)
print("|A| =", ("%.2f" % sqrt(norm_A)))
print("|B| =", ("%.2f" % sqrt(norm_B)))
|
UTF-8
|
Python
| false | false | 2,014 |
12,025,908,438,963 |
13afcadfdfcc9d3943f3904144ce9537f8947697
|
fa1b7c87b686a21ded717c4a6767336322bb6d2c
|
/core/server.py
|
9349ab7100612d925f3e6f4673b606fd3214b50f
|
[] |
no_license
|
Sjekk/core
|
https://github.com/Sjekk/core
|
ec3ec00376c2d7513b6678d323349f0fc7689588
|
a4babfb76ff721da6a92a54b4f540090ba4256ed
|
refs/heads/master
| 2020-05-21T13:05:30.692415 | 2012-07-24T14:00:54 | 2012-07-24T14:00:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
import os
import ConfigParser
import time
import threading
import sys
import SocketServer
class serverThread(threading.Thread):
def run(self):
self.server.serve_forever()
def stop(self):
print "Stop Server Thread"
self.server.shutdown()
def set(self, status):
handlerFactroy = MyRequestHandlerFactory(status)
self.server = SocketServer.ThreadingTCPServer(("", 61855), handlerFactroy)
#self.server.set(status)
class ChatRequestHandler(SocketServer.BaseRequestHandler):
global _status
_status = "No STATUS"
def __init__(self, request, client_address, server, status):
global _status
SocketServer.BaseRequestHandler.__init__(self, request, client_address, server)
_status = status
def log(self, msg):
print "["+str(time.strftime("%d.%m.%Y %H:%M:%S")) +"]["+ip+"] "+str(msg)
def handle(self):
data = self.request.recv(1024)
antwort = list()
antwort.append("Error")
if _status != "No STATUS":
antwort = list()
for check in _status:
checkData = {}
checkData["server"] = check.getServerName()
checkData["plugin"] = check.getPluginName()
checkData["status"] = check.getStatus()
checkData["msg"] = check.getMsg()
antwort.append(checkData)
self.request.send(json.dumps(antwort))
class MyRequestHandlerFactory(object):
def __init__(self, status):
self.status = status
def __call__(self, a, b, c):
handler = ChatRequestHandler(a, b, c, self.status)
|
UTF-8
|
Python
| false | false | 2,012 |
12,395,275,648,104 |
d712c5937c7b53038099c720325288c7aefce80f
|
9bbcd1814455be0068dc98fefb011701a3605afb
|
/modules/photosender.py
|
40c513a99265d2b12f3f7e92c133aed788222a64
|
[
"MIT"
] |
permissive
|
javiersantos/python-whatsapp-bot
|
https://github.com/javiersantos/python-whatsapp-bot
|
0d9391611f31e82f571861c19b2ff84b38ff2fab
|
1d5b9371b283c02c48abe598295e650c3291849e
|
refs/heads/master
| 2021-01-21T09:30:12.017360 | 2014-09-09T01:14:48 | 2014-09-09T01:14:48 | 24,796,446 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Yowsup.Media.downloader import MediaDownloader
from Yowsup.Media.uploader import MediaUploader
from sys import stdout
import os
import hashlib
import base64
import time
import Image
import StringIO
bot=None
outbox=[]
outboxprocessing=False
outboxempty=True
gotMediaReceipt = False
done = False
pathtoimage=""
jid=""
hashimage=""
def onImageReceived(messageId, jid, preview, url, size, wantsReceipt, isBroadcast):
print("Image received: Id:%s Jid:%s Url:%s size:%s" %(messageId, jid, url, size))
print preview
downloader = MediaDownloader(onDlsuccess, onDlerror, onDlprogress)
downloader.download(url)
global bot
if wantsReceipt and bot.sendReceipts:
bot.methodsInterface.call("message_ack", (jid, messageId))
timeout = 10
t = 0;
while t < timeout:
time.sleep(0.5)
t+=1
def onDlsuccess(path):
print("Image downloded to %s"%path)
def onDlerror():
stdout.write("\n")
stdout.flush()
print("Download Error")
def onDlprogress(progress):
stdout.write("\r Progress: %s" % progress)
stdout.flush()
def createThumb(path):
try:
size = 100, 100
im = Image.open(path)
im.thumbnail(size, Image.ANTIALIAS)
output = StringIO.StringIO()
im.save(output,"JPEG")
contents = output.getvalue()
output.close()
#im.save(outfile, "JPEG")
return base64.b64encode(contents)
except IOError:
print "cannot create thumbnail for '%s'" % path
def onUploadSuccess(url):
print("Upload Succ: url: %s "%( url))
doSendImage(url)
def onError():
print("Upload Fail:")
global outboxprocessing
outboxprocessing=False
def onProgressUpdated(progress):
stdout.write("\r Progress: %s" % progress)
### send image
def doSendImage(url):
global pathtoimage,jid
print("Sending message_image")
statinfo = os.stat(pathtoimage)
name=os.path.basename(pathtoimage)
global bot
msgId = bot.methodsInterface.call("message_imageSend", (jid, url, name,str(statinfo.st_size), createThumb(pathtoimage)))
global outboxprocessing
outboxprocessing=False
#sentCache[msgId] = [int(time.time()), path]
## repeated upload
def onmedia_uploadRequestDuplicate(_hash, url):
print("Request Dublicate: hash: %s url: %s "%(_hash, url))
doSendImage(url)
global gotMediaReceipt
gotMediaReceipt = True
### upload
def uploadImage(url):
global jid,pathtoimage
uploader = MediaUploader(jid, bot.username, onUploadSuccess, onError, onProgressUpdated)
print "going to upload",pathtoimage
uploader.upload(pathtoimage,url)
### upload request ##
def onmedia_uploadRequestSuccess(_hash, url, resumeFrom):
print("Request Succ: hash: %s url: %s resume: %s"%(_hash, url, resumeFrom))
uploadImage(url)
global gotMediaReceipt
gotMediaReceipt = True
def onmedia_uploadRequestFailed(_hash):
print("Request Fail: hash: %s"%(_hash))
gotReceipt = True
global outboxprocessing
outboxprocessing=False
### First step. Get ###
def sendPicture(path):
if not os.path.isfile(path):
print("File %s does not exists" % path)
return 1
statinfo = os.stat(path)
name=os.path.basename(path)
print("Sending picture %s of size %s with name %s" %(path, statinfo.st_size, name))
mtype = "image"
sha1 = hashlib.sha256()
fp = open(path, 'rb')
try:
sha1.update(fp.read())
hsh = base64.b64encode(sha1.digest())
print("Sending media_requestUpload")
global bot
global hashjid,hashpath
a=bot.methodsInterface.call("media_requestUpload", (hsh, mtype, os.path.getsize(path)))
print "a is",a
finally:
fp.close()
timeout = 100
t = 0;
global gotMediaReceipt
while t < timeout and not gotMediaReceipt:
time.sleep(0.5)
t+=1
if not gotMediaReceipt:
print("MediaReceipt print timedout!")
global outboxprocessing
outboxprocessing=False
return 1
else:
print("Got request MediaReceipt")
return
def photo_process():
global outboxprocessing
global outbox
if ((len(outbox)==0) or outboxprocessing):
return
else:
outboxprocessing=True
x=outbox.pop(0)
recepient,path=x
global pathtoimage, jid
pathtoimage=path
jid=recepient
global bot
bot.methodsInterface.call("typing_send",(recepient,))
sendPicture(pathtoimage)
photo_process()
def photo_queue(recepient,path):
newmessage=(recepient,path)
global outbox
global outboxprocessing
outbox.append(newmessage)
photo_process()
def setup(parent):
parent.signalsInterface.registerListener("media_uploadRequestSuccess", onmedia_uploadRequestSuccess)
parent.signalsInterface.registerListener("media_uploadRequestFailed", onmedia_uploadRequestFailed)
parent.signalsInterface.registerListener("media_uploadRequestDuplicate", onmedia_uploadRequestDuplicate)
parent.signalsInterface.registerListener("image_received",onImageReceived)
global path, gotMediaReceipt, done
global bot
bot=parent
|
UTF-8
|
Python
| false | false | 2,014 |
4,844,723,157,882 |
5f1b9dfabd86acb0b1054960213435d5229ec188
|
276b789971bc3e2a9dda69be75c87794faed9c98
|
/servidor/henry/wsgi.py
|
7032e715a751b6a86076abb61d6966e9a5ae87f9
|
[] |
no_license
|
qihqi/henryFACT
|
https://github.com/qihqi/henryFACT
|
fc0c62e4dbbfa886d68bbb0669decde6a9caa9a0
|
f1bb94a3c320319ec379bc583c2d89143074e0aa
|
refs/heads/master
| 2016-09-05T22:30:54.890928 | 2014-12-30T20:36:37 | 2014-12-30T20:36:37 | 4,302,067 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
sys.path.append('/var/servidor/henry')
import os
os.environ["HOME"] = "/home/servidor/"
# This application object is used by the development server
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# as well as any WSGI server configured to use this file.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
UTF-8
|
Python
| false | false | 2,014 |
5,995,774,386,670 |
913e2ea0a320a83581e685e8e15d5583feca3fb7
|
a821bc19bfa8da55224e18b0c13ee7695760b70d
|
/apartmentscom.py
|
7fd4f18522dcd6a131df5ac0180cc61dbf2c3569
|
[] |
no_license
|
shewu/househunting
|
https://github.com/shewu/househunting
|
f6eb3b7487e3ae30295b59688f8ad2d7d2dd2776
|
99fed914a529ba8ec1fa582d9dc84157895145d6
|
refs/heads/master
| 2021-01-22T13:46:55.795274 | 2014-02-23T02:00:08 | 2014-02-23T02:00:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import apartment
from bs4 import BeautifulSoup
import urllib
URL_BASE = 'http://www.apartments.com'
def search_city(city):
f = urllib.urlopen('%(base)s/search/?query=%(city)s' % {
'base': URL_BASE,
'city': city.replace(' ', '%%20')
})
bs = BeautifulSoup(f.read())
search_results = bs.find_all(itemprop='url', href=True)
apartments = []
for search_result in search_results:
follow_link = search_result['href']
f2 = urllib.urlopen(URL_BASE + follow_link)
bs2 = BeautifulSoup(f2.read())
address = apartment.Address(
street=bs2.find(itemprop='streetAddress').string,
city=bs2.find(itemprop='addressLocality').string,
state=bs2.find(itemprop='addressRegion').string,
zipcode=bs2.find(itemprop='postalCode').string
)
name = '(No Name)'
name_string = bs2.find(itemprop='name').string
name = name_string if name_string is not None else name
name_content = bs2.find(itemprop='name').get('content')
name = name_content if name_content is not None else name
apartments.append(apartment.Apartment(
name,
URL_BASE + follow_link,
address
))
return apartments
|
UTF-8
|
Python
| false | false | 2,014 |
4,982,162,075,442 |
04ab3b55ae008d0cdd9dbba4c88f37a88aa593d3
|
eee13a0a04b45d4f53d3187a523632ffb70cd750
|
/sphinxcontrib/issuetracker/resolvers.py
|
7ddd0d2fe291a4346a1e209be05f07e25258008a
|
[
"BSD-2-Clause"
] |
permissive
|
ignatenkobrain/sphinxcontrib-issuetracker
|
https://github.com/ignatenkobrain/sphinxcontrib-issuetracker
|
21260b4fd65065d1ab18da58e0336c9a531488ab
|
fa19ff536b078c9d83fe59c329d48dc867403707
|
refs/heads/master
| 2022-09-27T04:12:49.515431 | 2014-07-24T20:32:10 | 2014-07-24T20:32:10 | 4,700,821 | 6 | 7 |
BSD-2-Clause
| false | 2022-08-27T03:18:47 | 2012-06-18T12:51:09 | 2022-08-21T13:19:12 | 2022-08-27T03:16:57 | 387 | 15 | 11 | 15 |
Python
| false | false |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, 2011, 2012, 2013 Sebastian Wiesner <[email protected]>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
sphinxcontrib.issuetracker.resolvers
====================================
Builtin resolvers for :mod:`sphinxcontrib.issuetracker`.
.. moduleauthor:: Sebastian Wiesner <[email protected]>
"""
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import time
import requests
from xml.etree import ElementTree as etree
from sphinxcontrib.issuetracker import Issue, __version__
GITHUB_API_URL = 'https://api.github.com/repos/{0.project}/issues/{1}'
BITBUCKET_URL = 'https://bitbucket.org/{0.project}/issue/{1}/'
BITBUCKET_API_URL = ('https://api.bitbucket.org/1.0/repositories/'
'{0.project}/issues/{1}/')
DEBIAN_URL = 'http://bugs.debian.org/cgi-bin/bugreport.cgi?bug={0}'
LAUNCHPAD_URL = 'https://bugs.launchpad.net/bugs/{0}'
GOOGLE_CODE_URL = 'http://code.google.com/p/{0.project}/issues/detail?id={1}'
GOOGLE_CODE_API_URL = ('http://code.google.com/feeds/issues/p/'
'{0.project}/issues/full/{1}')
# namespaces required to parse XML returned by Google Code
GOOGLE_ISSUE_NS = '{http://schemas.google.com/projecthosting/issues/2009}'
ATOM_NS = '{http://www.w3.org/2005/Atom}'
JIRA_API_URL = ('{0.url}/si/jira.issueviews:issue-xml/{1}/{1}.xml?'
# only request the required fields
'field=link&field=resolution&field=summary&field=project')
def check_project_with_username(tracker_config):
if '/' not in tracker_config.project:
raise ValueError(
'username missing in project name: {0.project}'.format(
tracker_config))
HEADERS = {
'User-Agent': 'sphinxcontrib-issuetracker v{0}'.format(__version__)
}
def get(app, url):
"""
Get a response from the given ``url``.
``url`` is a string containing the URL to request via GET. ``app`` is the
Sphinx application object.
Return the :class:`~requests.Response` object on status code 200, or
``None`` otherwise. If the status code is not 200 or 404, a warning is
emitted via ``app``.
"""
response = requests.get(url, headers=HEADERS)
if response.status_code == requests.codes.ok:
return response
elif response.status_code != requests.codes.not_found:
msg = 'GET {0.url} failed with code {0.status_code}'
app.warn(msg.format(response))
def lookup_github_issue(app, tracker_config, issue_id):
check_project_with_username(tracker_config)
# Get rate limit information from the environment
timestamp, limit_hit = getattr(app.env, 'github_rate_limit', (0, False))
if limit_hit and time.time() - timestamp > 3600:
# Github limits applications hourly
limit_hit = False
if not limit_hit:
url = GITHUB_API_URL.format(tracker_config, issue_id)
response = get(app, url)
if response:
rate_remaining = response.headers.get('X-RateLimit-Remaining')
if rate_remaining.isdigit() and int(rate_remaining) == 0:
app.warn('Github rate limit hit')
app.env.github_rate_limit = (time.time(), True)
issue = response.json()
closed = issue['state'] == 'closed'
return Issue(id=issue_id, title=issue['title'], closed=closed,
url=issue['html_url'])
else:
app.warn('Github rate limit exceeded, not resolving issue {0}'.format(
issue_id))
return None
def lookup_bitbucket_issue(app, tracker_config, issue_id):
check_project_with_username(tracker_config)
url = BITBUCKET_API_URL.format(tracker_config, issue_id)
response = get(app, url)
if response:
issue = response.json()
closed = issue['status'] not in ('new', 'open')
url = BITBUCKET_URL.format(tracker_config, issue_id)
return Issue(id=issue_id, title=issue['title'], closed=closed, url=url)
def lookup_debian_issue(app, tracker_config, issue_id):
import debianbts
try:
# get the bug
bug = debianbts.get_status(issue_id)[0]
except IndexError:
return None
# check if issue matches project
if tracker_config.project not in (bug.package, bug.source):
return None
return Issue(id=issue_id, title=bug.subject, closed=bug.done,
url=DEBIAN_URL.format(issue_id))
def lookup_launchpad_issue(app, tracker_config, issue_id):
from launchpadlib.launchpad import Launchpad
launchpad = Launchpad.login_anonymously('sphinxcontrib.issuetracker')
try:
# get the bug
bug = launchpad.bugs[issue_id]
except KeyError:
return None
project_tasks = [task for task in bug.bug_tasks
if task.bug_target_name == tracker_config.project]
if not project_tasks:
# no matching task found
return None
is_complete = all(t.is_complete for t in project_tasks)
return Issue(id=issue_id, title=bug.title, closed=is_complete,
url=LAUNCHPAD_URL.format(issue_id))
def lookup_google_code_issue(app, tracker_config, issue_id):
url = GOOGLE_CODE_API_URL.format(tracker_config, issue_id)
response = get(app, url)
if response:
issue = etree.fromstring(response.content)
state = issue.find('{0}state'.format(GOOGLE_ISSUE_NS))
title_node = issue.find('{0}title'.format(ATOM_NS))
title = title_node.text if title_node is not None else None
closed = state is not None and state.text == 'closed'
return Issue(id=issue_id, title=title, closed=closed,
url=GOOGLE_CODE_URL.format(tracker_config, issue_id))
def lookup_jira_issue(app, tracker_config, issue_id):
if not tracker_config.url:
raise ValueError('URL required')
url = JIRA_API_URL.format(tracker_config, issue_id)
response = get(app, url)
if response:
issue = etree.fromstring(response.content)
project = issue.find('*/item/project').text
if project != tracker_config.project:
return None
url = issue.find('*/item/link').text
state = issue.find('*/item/resolution').text
# summary contains the title without the issue id
title = issue.find('*/item/summary').text
closed = state.lower() != 'unresolved'
return Issue(id=issue_id, title=title, closed=closed, url=url)
def lookup_redmine_issue(app, tracker_config, issue_id):
from redmine import Redmine
if not tracker_config.url:
raise ValueError('URL required')
redmine = Redmine(tracker_config.url,
key=app.config.issuetracker_redmine_key,
username=app.config.issuetracker_redmine_username,
password=app.config.issuetracker_redmine_password,
requests=app.config.issuetracker_redmine_requests)
if redmine:
issue = redmine.issue.get(issue_id)
return Issue(id=issue_id, title=issue.subject,
closed=issue.status is "Closed",
url=issue.url)
BUILTIN_ISSUE_TRACKERS = {
'github': lookup_github_issue,
'bitbucket': lookup_bitbucket_issue,
'debian': lookup_debian_issue,
'launchpad': lookup_launchpad_issue,
'google code': lookup_google_code_issue,
'jira': lookup_jira_issue,
'redmine': lookup_redmine_issue,
}
|
UTF-8
|
Python
| false | false | 2,014 |
8,091,718,424,582 |
4cfaa01ccaabaf5ef0f6fceae09c4fb59e781085
|
38ae8f85abc137bf0cd3cee5292292afa2acafdb
|
/TCG Pricing Pull/GetCardPricing.py
|
3676fec7b146889cfb4f53fd788a36403f43b75f
|
[] |
no_license
|
nwilson75/Nolans-Projects
|
https://github.com/nwilson75/Nolans-Projects
|
ed738485d59ab632a45fe9eff7c267f383afb235
|
4a9d37c32feb0e257b484956af9087d7c4b861a5
|
refs/heads/master
| 2016-02-26T19:36:06.803604 | 2014-09-11T22:10:46 | 2014-09-11T22:10:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import os
import csv
import time
from TCGInfo import TCGInfo
folder = "path_to_folder"
print (os.listdir (folder))
current_date = time.strftime("%B %d %Y")
#make new folder for each day
new_folder = "path_to_folder" + current_date
if not os.path.isdir(new_folder):
os.mkdir(new_folder)
for filename in os.listdir (folder):
#Once "Price-at-a-glace" implemented, open text file for price change flag
if ".csv" in filename:
#if file doesn't already exist in output path, continue
#if not os.path.isfile("./" + current_date + "/" + filename + '.csv'):
print (filename)
input_file = filename
open_string = './path_to_base_files/' + input_file
input_cards_file = open(open_string,'r')
output_string = "./" + current_date + "/" + filename
output_card_file = open(output_string, 'w')
csv_read = csv.reader(input_cards_file)
csv_write = csv.writer(output_card_file, delimiter=',', quotechar='"')
output_file_set_name = ''
for row in csv_read:
if row[0] == 'ID':
csv_write.writerow([row[0],row[1],row[2],row[3]])
else:
set_name = row[2] #fix for iteration
card_name = row[1] #remove arrows and price information once "Price-at-a-glance" is implemented
print(card_name)
card = TCGInfo(card_name,set_name)
price_info = card.get_pricing()
price_string = row[3] + '<br>' + price_info
csv_write.writerow([row[0],row[1],row[2],price_string])
#Price-at-a-glance
"""
priceChangeFlag = false
Open up two iterables, one of the file created, one of file created from previous day
for row in file
If price info is different,
set priceChangeFlag to true
indicate higher or lower with arrow
calculate price difference
newTitle = old title (stripped of arrows and price), plus new arrows and price
If price change flag true, write to external text file + line break
"""
#OLD INFO
"""
try:
range(len(data[input_set]['cards']))
except KeyError:
print('CARDNOTFOUND')
else:
for n in range(len(data[input_set]['cards'])):
if input_card in data[input_set]['cards'][n]['name']:
print('Card Found')
card_number = n
if card_number == -1:
csv_write.writerow([input_card,input_set,'AAA1: Card Not Found'])
else:
try:
data[input_set]['cards'][card_number]['text']
except KeyError:
csv_write.writerow([input_card,input_set,'AAA2: No Card Text for'])
print('AAA2: No Card Text for', input_card, 'in ', input_set)
else:
csv_write.writerow([input_card,input_set,(data[input_set]['cards'][card_number]['text'])])
print(data[input_set]['cards'][card_number]['text'])
"""
|
UTF-8
|
Python
| false | false | 2,014 |
11,166,914,994,187 |
f4209a33f826a2e9e8ae81af179d78609002d287
|
340a0e239dbcbca19814d5dbd89c3bbe8968e7f3
|
/awsspotmonitor/test_capturelog.py
|
443fa3200a09c264ba1a7d0dc2883b475c3be839
|
[
"BSD-3-Clause"
] |
permissive
|
waxkinetic/awsspotmonitor
|
https://github.com/waxkinetic/awsspotmonitor
|
78c8574649c520c7dcb7615ceb705d3989012cb0
|
6ae8020898e1ab96c9f14ab62e96324bd8e60fbc
|
refs/heads/master
| 2020-07-24T05:51:22.275542 | 2013-03-20T20:19:58 | 2013-03-20T20:19:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import absolute_import
# standard
from mock import patch
import unittest
# package
from .capturelog import *
class CaptureLog_test(unittest.TestCase):
def test_create_msg(self):
mail_config = dict(
subject = 'email subject',
sender = ('Rick', '[email protected]'),
recipients = ['[email protected]', '[email protected]']
)
log = CaptureLog(mail_config)
with patch('awsspotmonitor.capturelog.send_plaintext_msg') as send_msg:
log.start_capture()
log.write('first message')
log.write('second message')
log.end_capture()
send_msg.assert_called_once()
args, _ = send_msg.call_args
self.assertEqual(args[1], 'Rick <[email protected]>')
self.assertEqual(args[2], mail_config['recipients'])
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
19,086,834,688,439 |
0e250cb76bfad802a092e119f564fa0f90ec9129
|
b4892213c65e31621d6436001dbf2b028588065d
|
/shinken/modules/livestatus_broker/livestatus_wait_query.py
|
2df834a8b10ae568b50ac6e77cd70a3ff53ffb03
|
[
"AGPL-3.0-only"
] |
non_permissive
|
mleinart/shinken
|
https://github.com/mleinart/shinken
|
4229b91b5026bec89b4552dd607bb8c4d99fdce2
|
ae12442262ff1b501266d888d6f008d5f628051c
|
refs/heads/master
| 2021-01-16T19:41:46.714995 | 2011-12-29T15:21:18 | 2011-12-29T15:21:18 | 3,069,808 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#Copyright (C) 2009 Gabes Jean, [email protected]
#
#This file is part of Shinken.
#
#Shinken is free software: you can redistribute it and/or modify
#it under the terms of the GNU Affero General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#Shinken is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU Affero General Public License for more details.
#
#You should have received a copy of the GNU Affero General Public License
#along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import re
import time
import os
try:
import sqlite3
except ImportError:
try:
import pysqlite2.dbapi2 as sqlite3
except ImportError:
import sqlite as sqlite3
from livestatus_query import LiveStatusQuery
from livestatus_response import LiveStatusResponse
from livestatus_constraints import LiveStatusConstraints
class LiveStatusWaitQuery(LiveStatusQuery):
my_type = 'wait'
def __init__(self, *args, **kwargs):
#super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
LiveStatusQuery.__init__(self, *args, **kwargs)
self.response = LiveStatusResponse(responseheader = 'off', outputformat = 'csv', keepalive = 'off', columnheaders = 'off', separators = LiveStatusResponse.separators)
self.wait_start = time.time()
self.wait_timeout = 0
self.wait_trigger = 'all'
def parse_input(self, data):
"""Parse the lines of a livestatus request.
This function looks for keywords in input lines and
sets the attributes of the request object
"""
for line in data.splitlines():
line = line.strip()
# Tools like NagVis send KEYWORK:option, and we prefer to have
# a space following the :
if ':' in line and not ' ' in line:
line = line.replace(':', ': ')
keyword = line.split(' ')[0].rstrip(':')
if keyword == 'GET': # Get the name of the base table
cmd, self.table = self.split_command(line)
self.set_default_out_map_name()
elif keyword == 'WaitObject': # Pick a specific object by name
cmd, object = self.split_option(line)
# It's like Filter: name = %s
# Only for services it's host<blank>servicedesc
if self.table == 'services':
host_name, service_description = object.split(';', 1)
self.filtercolumns.append('host_name')
self.prefiltercolumns.append('host_name')
self.filter_stack.put(self.make_filter('=', 'host_name', host_name))
self.filtercolumns.append('description')
self.prefiltercolumns.append('description')
self.filter_stack.put(self.make_filter('=', 'description', service_description))
try:
# A WaitQuery works like an ordinary Query. But if
# we already know which object we're watching for
# changes, instead of scanning the entire list and
# applying a Filter:, we simply reduce the list
# so it has just one element.
self.services = { host_name + service_description : self.services[host_name + service_description] }
except:
pass
elif self.table == 'hosts':
attribute = self.strip_table_from_column('name')
self.filtercolumns.append('name')
self.prefiltercolumns.append('name')
self.filter_stack.put(self.make_filter('=', 'name', object))
try:
self.hosts = { host_name : self.hosts[host_name] }
except:
pass
else:
attribute = self.strip_table_from_column('name')
self.filtercolumns.append('name')
self.prefiltercolumns.append('name')
self.filter_stack.put(self.make_filter('=', 'name', object))
# For the other tables this works like an ordinary query.
# In the future there might be more lookup-tables
elif keyword == 'WaitTrigger':
cmd, self.wait_trigger = self.split_option(line)
if self.wait_trigger not in ['check', 'state', 'log', 'downtime', 'comment', 'command']:
self.wait_trigger = 'all'
elif keyword == 'WaitCondition':
try:
cmd, attribute, operator, reference = self.split_option(line, 3)
except:
cmd, attribute, operator, reference = self.split_option(line, 2) + ['']
if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
# We need to set columns, if not columnheaders will be set to "on"
self.columns.append(attribute)
# Cut off the table name
attribute = self.strip_table_from_column(attribute)
# Some operators can simply be negated
if operator in ['!>', '!>=', '!<', '!<=']:
operator = { '!>' : '<=', '!>=' : '<', '!<' : '>=', '!<=' : '>' }[operator]
# Put a function on top of the filter_stack which implements
# the desired operation
self.filtercolumns.append(attribute)
self.prefiltercolumns.append(attribute)
self.filter_stack.put(self.make_filter(operator, attribute, reference))
if self.table == 'log':
if attribute == 'time':
self.sql_filter_stack.put(self.make_sql_filter(operator, attribute, reference))
else:
print "illegal operation", operator
pass # illegal operation
elif keyword == 'WaitConditionAnd':
cmd, andnum = self.split_option(line)
# Take the last andnum functions from the stack
# Construct a new function which makes a logical and
# Put the function back onto the stack
self.filter_stack.and_elements(andnum)
elif keyword == 'WaitConditionOr':
cmd, ornum = self.split_option(line)
# Take the last ornum functions from the stack
# Construct a new function which makes a logical or
# Put the function back onto the stack
self.filter_stack.or_elements(ornum)
elif keyword == 'WaitTimeout':
cmd, self.wait_timeout = self.split_option(line)
self.wait_timeout = int(self.wait_timeout) / 1000
else:
# This line is not valid or not implemented
print "Received a line of input which i can't handle : '%s'" % line
pass
# Make columns unique
self.filtercolumns = list(set(self.filtercolumns))
self.prefiltercolumns = list(set(self.prefiltercolumns))
# Make one big filter where the single filters are anded
self.filter_stack.and_elements(self.filter_stack.qsize())
if self.table == 'log':
self.sql_filter_stack.and_elements(self.sql_filter_stack.qsize())
def launch_query(self):
""" Prepare the request object's filter stacks """
print "."
# A minimal integrity check
if not self.table:
return []
try:
# Remember the number of stats filters. We need these numbers as columns later.
# But we need to ask now, because get_live_data() will empty the stack
if self.table == 'log':
result = self.get_live_data_log()
else:
# If the pnpgraph_present column is involved, then check
# with each request if the pnp perfdata path exists
if 'pnpgraph_present' in self.columns + self.filtercolumns + self.prefiltercolumns and self.pnp_path and os.access(self.pnp_path, os.R_OK):
self.pnp_path_readable = True
else:
self.pnp_path_readable = False
# Apply the filters on the broker's host/service/etc elements
result = self.get_live_data()
except Exception, e:
import traceback
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
print e
traceback.print_exc(32)
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
result = []
return result
def get_live_data(self):
"""Find the objects which match the request.
This function scans a list of objects (hosts, services, etc.) and
applies the filter functions first. The remaining objects are
converted to simple dicts which have only the keys that were
requested through Column: attributes. """
# We will use prefiltercolumns here for some serious speedup.
# For example, if nagvis wants Filter: host_groups >= hgxy
# we don't have to use the while list of hostgroups in
# the innermost loop
# Filter: host_groups >= linux-servers
# host_groups is a service attribute
# We can get all services of all hosts of all hostgroups and filter at the end
# But it would save a lot of time to already filter the hostgroups. This means host_groups must be hard-coded
# Also host_name, but then we must filter the second step.
# And a mixture host_groups/host_name with FilterAnd/Or? Must have several filter functions
handler = self.objects_get_handlers.get(self.table, None)
if not handler:
print("Got unhandled table: %s" % (self.table))
return []
# Get the function which implements the Filter: statements
filter_func = self.filter_stack.get_stack()
out_map = self.out_map[self.out_map_name]
filter_map = dict([(k, out_map.get(k)) for k in self.filtercolumns])
output_map = dict([(k, out_map.get(k)) for k in self.columns]) or out_map
without_filter = len(self.filtercolumns) == 0
cs = LiveStatusConstraints(filter_func, out_map, filter_map, output_map, without_filter)
res = handler(self, cs)
# A LiveStatusWaitQuery is launched several times, so we need to
# put back the big filter function
self.filter_stack.put_stack(filter_func)
return res
def get_live_data_log(self):
"""Like get_live_data, but for log objects"""
filter_func = self.filter_stack.get_stack()
sql_filter_func = self.sql_filter_stack.get_stack()
out_map = self.out_map[self.out_map_name]
filter_map = dict([(k, out_map.get(k)) for k in self.filtercolumns])
output_map = dict([(k, out_map.get(k)) for k in self.columns]) or out_map
without_filter = len(self.filtercolumns) == 0
result = []
# We can apply the filterstack here as well. we have columns and filtercolumns.
# the only additional step is to enrich log lines with host/service-attributes
# A timerange can be useful for a faster preselection of lines
filter_clause, filter_values = sql_filter_func()
full_filter_clause = filter_clause
matchcount = 0
for m in re.finditer(r"\?", full_filter_clause):
full_filter_clause = re.sub('\\?', str(filter_values[matchcount]), full_filter_clause, 1)
matchcount += 1
fromtime = 0
totime = int(time.time()) + 1
gtpat = re.search(r'^(\(*time|(.*\s+time))\s+>\s+(\d+)', full_filter_clause)
gepat = re.search(r'^(\(*time|(.*\s+time))\s+>=\s+(\d+)', full_filter_clause)
ltpat = re.search(r'^(\(*time|(.*\s+time))\s+<\s+(\d+)', full_filter_clause)
lepat = re.search(r'^(\(*time|(.*\s+time))\s+<=\s+(\d+)', full_filter_clause)
if gtpat != None:
fromtime = int(gtpat.group(3)) + 1
if gepat != None:
fromtime = int(gepat.group(3))
if ltpat != None:
totime = int(ltpat.group(3)) - 1
if lepat != None:
totime = int(lepat.group(3))
# now find the list of datafiles
filtresult = []
for dateobj, handle, archive, fromtime, totime in self.db.log_db_relevant_files(fromtime, totime):
dbresult = self.select_live_data_log(filter_clause, filter_values, handle, archive, fromtime, totime)
prefiltresult = [y for y in (x.fill(self.hosts, self.services, set(self.columns + self.filtercolumns)) for x in dbresult) if (without_filter or filter_func(self.create_output(filter_map, y)))]
filtresult.extend([self.create_output(output_map, x) for x in prefiltresult])
result = filtresult
self.filter_stack.put_stack(filter_func)
self.sql_filter_stack.put_stack(sql_filter_func)
#print "result is", result
return result
def condition_fulfilled(self):
result = self.launch_query()
response = self.response
response.format_live_data(result, self.columns, self.aliases)
output, keepalive = response.respond()
return output.strip()
|
UTF-8
|
Python
| false | false | 2,011 |
17,102,559,784,949 |
908f91d4a4eb7ab31bc04aa93569e20285804a58
|
a66a9461e2d691ac753e1306c9e64bf6bfb9f93f
|
/src/python/ProdCommon/CMSConfigTools/__init__.py
|
82bc6f8d60278db24858642f1ab609d7070e05bd
|
[] |
no_license
|
giffels/PRODCOMMON
|
https://github.com/giffels/PRODCOMMON
|
680008e738e9fa88c9b373ce7f1e158e51567a5c
|
1b102fa6f087a19a87127a0c73ac39e15f2f56ef
|
refs/heads/master
| 2021-01-23T15:32:02.457845 | 2013-06-11T13:44:38 | 2013-06-11T13:44:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""
_CMSConfigTools_
Python API for dealing with Python format CMSSW PSet Config Files
"""
__version__ = "$Revision: 1.1 $"
__revision__ = "$Id: __init__.py,v 1.1 2006/04/10 16:56:50 evansde Exp $"
__all__ = []
|
UTF-8
|
Python
| false | false | 2,013 |
19,447,611,948,111 |
ce9efb028c41db60aee2c92963def179f34bd563
|
f3b178037412c0224dae1bf6c54b4ee6abf04aa9
|
/test/TestAddServiceCharge.py
|
43e5dc52cdad936569c068818d23daac26bc0c66
|
[] |
no_license
|
atabishm2f/payroll-in-python
|
https://github.com/atabishm2f/payroll-in-python
|
8ff9c6bc41b875055a1cc044f5c80f75f5e29081
|
4c92ed823bf7d30d5a0cc92c36c8844a86599131
|
refs/heads/master
| 2021-12-31T09:49:24.812017 | 2012-12-08T18:50:34 | 2012-12-08T18:50:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from memory_db import MemoryDB
from change_union_member import ChangeUnionMember
from add_salaried_employee import AddSalariedEmployee
from add_service_charge import AddServiceCharge
from datetime import date
def test_adding_a_service_charge_to_a_member():
db = MemoryDB()
empId = AddSalariedEmployee("Bill", "Home", 1000, db).execute()
e = db.get_employee(empId)
ChangeUnionMember(empId, 10.0, db).execute()
sct = AddServiceCharge(e.memberId, date.today(), 12.95, db)
sct.execute()
sc = e.affiliation.get_service_charge(date.today())
assert sc.charge == 12.95
assert sc.date == date.today()
|
UTF-8
|
Python
| false | false | 2,012 |
12,189,117,236,125 |
c16b73680e50e379011ce9762092be88f347de99
|
0d6aab7ff42a40836d0123b1e38b8b853626dc09
|
/tornadotoad/__init__.py
|
c0caf3039a68624de8f91f4c2c0c5ba5701b1568
|
[
"MIT"
] |
permissive
|
karpitsky/tornadotoad
|
https://github.com/karpitsky/tornadotoad
|
6a3d630c288830e8a9555ed327ee54d0aecc0ae0
|
bd302954a8d520730e866559693e4ab4ace0bb38
|
refs/heads/master
| 2021-01-17T10:46:11.211028 | 2013-05-23T08:57:15 | 2013-05-23T08:57:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from tornadotoad import my
from tornadotoad import mixin
from tornadotoad import api
def register(api_key=None, environment="production", log_403=False,
log_404=False, log_405=False, use_ssl=False, host="hoptoadapp.com"):
"""Register a Hoptoad API key along with other run-time options.
Needs to be called early, usually when creating Tornado Application.
"""
my.registered = True
my.api_key = api_key
my.environment = environment
my.log_403 = log_403
my.log_404 = log_404
my.log_405 = log_405
my.use_ssl = use_ssl
my.host = host
|
UTF-8
|
Python
| false | false | 2,013 |
4,612,794,899,710 |
f9e76d4186fd5269c7ab85e24e6c8d68c80b6e57
|
68c589b47b4e8150f3f92b06cccb69c0cc862a30
|
/Computational Investing/Homework/hw1.py
|
c800776750b1761301b34c418eeca2fe3b95be62
|
[] |
no_license
|
Will-NU/AptanaStudio
|
https://github.com/Will-NU/AptanaStudio
|
fab32b0408faa260572e2ea72931e794c9569902
|
c7d00dd03d0ef6b0f6f54d511090956152f7ee17
|
refs/heads/master
| 2021-01-10T20:19:35.681062 | 2014-08-19T17:42:10 | 2014-08-19T17:42:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Computational Investing Part I
# Homework 1
# Peng (Will) Chen
# QSTK Imports
import QSTK.qstkutil.qsdateutil as du
import QSTK.qstkutil.tsutil as tsu
import QSTK.qstkutil.DataAccess as da
# Third Party Imports
import datetime as dt
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
# Given start date, end date, symbols and allocations,
# return volatility, averate daily return, sharpe ratio and cumulative return
def simulate(startdate, enddate, ls_symbols, ls_alloc):
dt_timeofday = dt.timedelta(hours = 16)
ldt_timestamps = du.getNYSEdays(startdate, enddate, dt_timeofday)
c_dataobj = da.DataAccess('Yahoo', cachestalltime = 0)
ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close']
ldf_data = c_dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
na_price = d_data['close'].values
na_normalized_price = na_price / na_price[0,:]
na_port_price = np.sum(ls_alloc * na_normalized_price, 1)
na_daily_rets = na_port_price.copy()
tsu.returnize0(na_daily_rets)
vol = np.std(na_daily_rets)
daily_ret = np.average(na_daily_rets)
sharpe_ratio = np.sqrt(252) * daily_ret / vol
cum_ret = na_port_price[-1]
return vol, daily_ret, sharpe_ratio, cum_ret
# bruce force optimization of portfolio
def optimize(startdate, enddate, ls_symbols):
delta = 0.1 # increment by 0.1
num_of_symbols = len(ls_symbols)
# set optimal sharpe ratio to infinity
opt_sharpe = 0
trial_alloc = np.zeros(num_of_symbols)
while odo_increment(trial_alloc, num_of_symbols, delta):
if np.sum(trial_alloc) == 1.0:
vol, daily_ret, sharpe, cum_ret = simulate(startdate, enddate, ls_symbols, trial_alloc)
if sharpe > opt_sharpe:
opt_sharpe = sharpe
opt_alloc = trial_alloc.copy()
opt_vol = vol
opt_daily_ret = daily_ret
opt_cum_ret = cum_ret
print 'Start Date:',
print startdate.date()
print 'End Date:',
print enddate.date()
print 'Symbols:',
print ls_symbols
print 'Optimal Allocations: ',
print opt_alloc
print 'Sharpe Ratio: ' + repr(opt_sharpe)
print 'Volatility (stdev of daily return): ' + repr(opt_vol)
print 'Average Daily Return: ' + repr(opt_daily_ret)
print 'Cumulative Return: ' + repr(opt_cum_ret)
return opt_alloc, opt_vol, opt_daily_ret, opt_sharpe, opt_cum_ret
def odo_increment(odo, num_of_digits, delta):
i = num_of_digits
while i != 0:
odo[i-1] = odo[i-1] + delta
if odo[i-1] <= 1.0:
break
else:
odo[i-1] = 0
i = i - 1
if i == 0:
return 0
else:
return 1
# compare a portfoilo with benchmarks
def benchmark_comparison(startdate, enddate, ls_symbols, ls_alloc, ls_benchmarks):
dt_timeofday = dt.timedelta(hours = 16)
ldt_timestamps = du.getNYSEdays(startdate, enddate, dt_timeofday)
c_dataobj = da.DataAccess('Yahoo', cachestalltime = 0)
ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close']
ldf_data = c_dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))
ldf_data_bm = c_dataobj.get_data(ldt_timestamps, ls_benchmarks, ls_keys)
d_data_bm = dict(zip(ls_keys, ldf_data_bm))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
d_data_bm[s_key] = d_data_bm[s_key].fillna(method='ffill')
d_data_bm[s_key] = d_data_bm[s_key].fillna(method='bfill')
d_data_bm[s_key] = d_data_bm[s_key].fillna(1.0)
na_price = d_data['close'].values
na_normalized_price = na_price / na_price[0,:]
na_port_price = np.sum(ls_alloc * na_normalized_price, 1)
na_port_price = na_port_price.reshape(len(ldt_timestamps), 1)
na_bm_price = d_data_bm['close'].values
na_normalized_bm_price = na_bm_price / na_bm_price[0,:]
na_all_price = np.append(na_normalized_bm_price, na_port_price, 1)
ls_all_symbols = list(ls_benchmarks)
ls_all_symbols.append('Portfolio')
plt.clf()
plt.plot(ldt_timestamps, na_all_price)
plt.legend(ls_all_symbols)
plt.ylabel('Normalized Price')
plt.xlabel('Date')
plt.savefig('Bechmarking.pdf', format = 'pdf')
def test(a):
a.append(5)
def main():
startdate_1 = dt.datetime(2011, 1, 1)
enddate_1 = dt.datetime(2011, 12, 31)
ls_symbols_1 = ['AAPL','GLD','GOOG','XOM']
#opt_alloc, opt_vol, opt_daily_ret, opt_sharpe, opt_cum_ret = \
# optimize(startdate_1, enddate_1, ls_symbols_1)
ls_alloc_1 = [0.4, 0.4, 0, 0.2]
ls_benchmarks = ['$SPX','GOOG']
benchmark_comparison(startdate_1, enddate_1, ls_symbols_1, ls_alloc_1, ls_benchmarks)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
1,606,317,789,524 |
551f4728b9f5d7f6fe3e61129f9a431450a42359
|
da4c380cc2cd9e27de858024d891e5143be16999
|
/utilities.py
|
31d8d0c183ecb20ba909f4c453537bf252193330
|
[] |
no_license
|
jlehtoma/ztests
|
https://github.com/jlehtoma/ztests
|
cc4a461adfd3ef7dadfff2fa5344308a8fb483ef
|
fd7b5f4791e523829237d0ae66c8c2e532fa424b
|
refs/heads/master
| 2016-09-06T15:00:07.391496 | 2013-06-29T08:24:50 | 2013-06-29T08:24:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import os
import platform
from subprocess import Popen, PIPE
def check_output_name(filename):
''' Checks for the existance of a given filename and creates a new and unused one if file
already exists.
@param filename String filename (abspath)
@retun filenmae String possibly altered filename (basename)
'''
suffix = 1
while os.path.exists(filename):
base = os.path.basename(filename)
base = base.split('.')
if base[0].endswith('_{0}'.format(suffix - 1)):
new_base = base[0].replace('_{0}'.format(suffix - 1), '_{0}'.format(suffix)) + '.' + base[1]
else:
new_base = base[0] + '_{0}'.format(suffix) + '.' + base[1]
filename = os.path.join(os.path.dirname(filename), new_base)
suffix += 1
return filename
def get_system_info():
''' Function to retrieve system related information.
@return list of system variables
'''
sys_info = []
sys_info.append({'Report time': datetime.datetime.now().isoformat()})
sys_info.append({'Uname': platform.uname()})
if platform.system() == 'Linux':
sys_info.append({'Version': platform.linux_distribution()})
else:
sys_info.append({'Version': platform.win32_ver()})
return sys_info
def get_zonation_info():
''' Function to retrieve Zonation version info.
NOTE: Zonation must be in PATH.
@return tuple Zonation version number
'''
version = Popen(['zig3', '-v'], stdout=PIPE)
version = version.communicate()[0]
version = version.split('\n')[0].strip()
version = version.split(':')[1].strip()
version = tuple(version.split('.'))
return version
def pad_header(msg, print_width):
# - 4 is for 2 leading stars and 2 whitespaces
nstars = print_width - len(msg) - 4
return '\n** ' + msg + ' ' + '*' * nstars
|
UTF-8
|
Python
| false | false | 2,013 |
7,876,970,063,915 |
54a49dea7d8b3233266970585c638c5f056490d6
|
3bb94274045f563a20a12d74bc9405d83eccd862
|
/MainMenu.py
|
0e55145249783ef273570ae6fe57cd4d24d82e55
|
[] |
no_license
|
IsakUlstrup/PandaRunner
|
https://github.com/IsakUlstrup/PandaRunner
|
b0c90b02e072846a4d600ba1d4e1afccd3ece61b
|
7d8425f76bc73fbad6e54c59d4bfeff6a87ef19e
|
refs/heads/master
| 2019-02-03T01:16:39.776872 | 2013-02-11T11:33:42 | 2013-02-11T11:33:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from State import State
class MainMenu(State):
"""Main menu state"""
def __init__(self, gameManager):
super(MainMenu, self).__init__(gameManager)
self.gameManager = gameManager
|
UTF-8
|
Python
| false | false | 2,013 |
7,653,631,745,591 |
55dce8ea18a44615e11533288829c462fb58392f
|
569e08914df0598247290cf337d9d514c00dc4fb
|
/Models.py
|
dcce4b409a3021723efb87cf02b1f6eb16aeb8f7
|
[] |
no_license
|
Tasignotas/CSLP
|
https://github.com/Tasignotas/CSLP
|
dd37bec18f75b892fda67498ee466b8eb6f675bd
|
21c64c626c741f98d36c9906aac4b98facc85cba
|
refs/heads/master
| 2016-09-06T02:31:08.917881 | 2014-11-07T14:52:47 | 2014-11-07T14:52:47 | 13,334,678 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
This file contains class descriptions for all kinds of objects
used in the simulation to mimic the real world: stops, roads, passengers and etc.
'''
import random
class Passenger:
''' A class representing a passenger in bus network'''
def __init__(self, destStopID):
self.destStopID = destStopID
def __eq__(self, another):
return self.destStopID == another.destStopID
class Bus:
''' A class representing a bus going on some route in the bus network'''
def __init__(self, routeID, busNumber, capacity, location):
self.routeID = routeID
self.busNumber = busNumber
self.capacity = capacity
self.status = 'Queueing'
self.location = location
self.passengers = []
self.numberOfStops = 0
self.averagePassengersTravelling = 0.0
def __eq__(self, another):
return ((self.routeID == another.routeID) and (self.busNumber == another.busNumber) and
(self.capacity == another.capacity) and (self.status == another.status) and
(self.location == another.location) and (self.passengers == another.passengers) and
(self.numberOfStops == another.numberOfStops) and
(self.averagePassengersTravelling == another.averagePassengersTravelling))
class Stop:
''' A class representing a bus stop in the bus network'''
def __init__(self, stopID):
self.stopID = stopID
self.qOfBuses = []
self.passengers = []
self.reachableStops = []
self.missedPassengers = 0
# Attributes for average bus queueing time:
self.totalQueueingTime = 0.0
self.busQChangeTime = 0.0
self.numberOfBusesQueued = 0
def __eq__(self, another):
return ((self.stopID == another.stopID) and (self.qOfBuses == another.qOfBuses) and
(self.passengers == another.passengers) and (self.reachableStops == self.reachableStops) and
(self.missedPassengers == another.missedPassengers) and
(self.totalQueueingTime == another.totalQueueingTime) and
(self.busQChangeTime == another.busQChangeTime) and
(self.numberOfBusesQueued == another.numberOfBusesQueued))
def addReachableStops(self, reachableStops):
''' Method that adds new stops to the set of reachable stops'''
for stop in reachableStops:
if not (stop in self.reachableStops) and not (self.stopID == stop):
self.reachableStops.append(stop)
def addBus(self, bus):
''' Method that adds a bus to the stop's queue'''
self.qOfBuses.append(bus)
self.numberOfBusesQueued += 1
class Route:
''' A class representing a particular route in the bus network'''
def __init__(self, stopSequence, routeID, capacity):
self.routeID = routeID
self.stopSequence = stopSequence
self.capacity = capacity
self.buses = []
self.missedPassengers = 0
def __eq__(self, another):
return ((self.routeID == another.routeID) and (self.stopSequence == another.stopSequence) and
(self.capacity == another.capacity) and (self.buses == another.buses) and
(self.missedPassengers == another.missedPassengers))
def addBus(self, bus):
''' This method adds the given bus to the route'''
self.buses.append(bus)
def getNewBus(self):
''' This method creates a new bus for the route'''
location = self.stopSequence[len(self.buses) % len(self.stopSequence)]
return Bus(self.routeID, len(self.buses), self.capacity, location)
def getNextStop(self, currentStopID):
''' This method gets the next stop's ID when current stop's ID is given'''
return self.stopSequence[(self.stopSequence.index(currentStopID) + 1) % len(self.stopSequence)]
class Network:
''' A class representing the entire bus network'''
def __init__(self):
self.routes = {}
self.stops = {}
self.roads = {}
self.params = {}
#Uncomment the following line in order to make the simulation deterministic(ish)
#random.seed(0)
def __eq__(self, another):
return ((self.routes == another.routes) and (self.stops == another.stops) and (self.roads == another.roads))
def changeGeneralParams(self, paramDict):
''' Method that changes the given network parameters'''
for key in paramDict:
self.params[key] = paramDict[key]
def changeRoadParams(self, paramDict):
''' Method that changes the road params with those specified in the dictionary'''
self.roads = paramDict
def changeRouteParams(self, routeDict):
''' Method that changes the route parameters with those specified in the dictionary'''
for route in routeDict:
for x in range(len(self.routes[route['routeID']].buses), route['buses']):
self.routes[route['routeID']].getNewBus()
for bus in self.routes[route['routeID']].buses:
bus.capacity = route['capacity']
def finishTakingStatistics(self, stopTime):
''' This method goes through all stops and makes them finish counting the bus queueing statistics'''
for stop in self.stops.values():
self.calculateQueueingTime(stop, stopTime)
def addRoute(self, routeID, stopIDs, busCount, capacity):
''' This method adds a route with its buses and stops to the network'''
# Adding new stops to the network:
for i in stopIDs:
if not (i in self.stops.keys()):
self.stops[i] = Stop(i)
self.stops[i].addReachableStops(stopIDs)
# Adding new route:
if routeID in self.routes:
raise Exception('A route with a duplicate route id has been entered')
else:
self.routes[routeID] = Route(stopIDs, routeID, capacity)
# Adding buses to the route:
for i in range(0, busCount):
bus = self.routes[routeID].getNewBus()
self.routes[routeID].addBus(bus)
self.stops[bus.location].addBus(bus)
def addPassenger(self, time, outputEvent):
''' This method adds a passenger to the bus network'''
originID = self.stops.keys()[random.randint(0, len(self.stops)-1)]
destID = random.choice(self.stops[originID].reachableStops)
self.stops[originID].passengers.append(Passenger(destID))
if outputEvent:
print 'A new passenger enters at stop {0} with destination {1} at time {2}'.format(originID, destID, time)
def getThroughput(self, bus):
''' This method gets the throughput of the road segment
that the bus is currently on '''
originStopID = bus.location
destinationStopID = self.routes[bus.routeID].getNextStop(originStopID)
return self.roads[(originStopID, destinationStopID)]
def getPaxRTB(self):
''' This method gets all passengers that are in a stop, the bus
at the front of the bus queue suits them and is not full'''
paxRTB = []
for stop in self.stops.values():
if stop.qOfBuses:
firstBus = stop.qOfBuses[0]
if len(firstBus.passengers) < firstBus.capacity:
for pax in stop.passengers:
if (pax.destStopID in self.routes[firstBus.routeID].stopSequence):
paxRTB.append((pax, firstBus))
return paxRTB
def getPaxRTD(self):
''' This method gets all passengers that are in a bus, but would like
to get off the bus. Also, the bus is at a bus stop'''
paxRTD = []
for stop in self.stops.values():
for bus in stop.qOfBuses:
for pax in bus.passengers:
if (pax.destStopID == bus.location) and (bus.status == 'Queueing'):
paxRTD.append((pax, bus))
return paxRTD
def getBusesRTD(self):
''' This method gets all of the buses that are ready to depart from
the stop that they are located'''
busesRTD = []
for stop in self.stops.values():
for bus in stop.qOfBuses:
noneToDisembark = True
noneToBoard = True
# Checking if there is any passenger that wants to get onboard:
if len(bus.passengers) < bus.capacity:
for pax in stop.passengers:
if (pax.destStopID in self.routes[bus.routeID].stopSequence):
noneToBoard = False
break
# Checking if there is any passenger that wants to disembark:
if noneToBoard:
for pax in bus.passengers:
if (pax.destStopID == bus.location) and (bus.status == 'Queueing'):
noneToDisembark = False
break
if noneToBoard and noneToDisembark:
busesRTD.append((bus, stop))
return busesRTD
def getBusesRTA(self):
''' This method gets all of the buses that are ready to arrive at
the stop that they are located at'''
busesRTA = []
for route in self.routes.values():
for bus in route.buses:
if bus.status == 'Moving':
busesRTA.append((bus, route))
return busesRTA
def boardPassenger(self, time, outputEvent):
''' This method adds a random passenger to the bus
that he wishes to board'''
(rand_pax, rand_bus) = random.choice(self.getPaxRTB())
rand_bus.passengers.append(rand_pax)
self.stops[rand_bus.location].passengers.pop(self.stops[rand_bus.location].passengers.index(rand_pax))
if outputEvent:
print 'Passenger boards bus {0} at stop {1} with destination {2} at time {3}'.format(str(rand_bus.routeID) + '.' + str(rand_bus.busNumber), rand_bus.location, rand_pax.destStopID, time)
def disembarkPassenger(self, time, outputEvent):
''' This method disembarks a random passenger from the bus that he's in'''
(rand_pax, rand_bus) = random.choice(self.getPaxRTD())
rand_bus.passengers.pop(rand_bus.passengers.index(rand_pax))
if outputEvent:
print 'Passenger disembarks bus {0} at stop {1} at time {2}'.format(str(rand_bus.routeID) + '.' + str(rand_bus.busNumber), rand_bus.location, time)
def departBus(self, time, outputEvent):
''' This method departs a random bus that's ready to depart'''
(rand_bus, rand_stop) = random.choice(self.getBusesRTD())
busPositionInQ = rand_stop.qOfBuses.index(rand_bus)
self.calculateQueueingTime(rand_stop, time)
rand_stop.busQChangeTime = time
rand_stop.qOfBuses.pop(busPositionInQ)
rand_bus.status = 'Moving'
self.calculateMissedPassengers(rand_bus, rand_stop)
self.calculateTravellingPassengers(rand_bus)
if outputEvent:
print 'Bus {0} leaves stop {1} at time {2}'.format(str(rand_bus.routeID) + '.' + str(rand_bus.busNumber), rand_bus.location, time)
def arriveBus(self, time, outputEvent):
''' This method makes a random bus that's ready to arrive to arrive'''
(rand_bus, rand_route) = random.choice(self.getBusesRTA())
next_stop_id = rand_route.getNextStop(rand_bus.location)
rand_bus.location = next_stop_id
rand_bus.status = 'Queueing'
self.calculateQueueingTime(self.stops[next_stop_id], time)
self.stops[next_stop_id].qOfBuses.append(rand_bus)
self.stops[next_stop_id].busQChangeTime = time
self.stops[next_stop_id].numberOfBusesQueued += 1
if outputEvent:
print 'Bus {0} arrives at stop {1} at time {2}'.format(str(rand_bus.routeID) + '.' + str(rand_bus.busNumber), next_stop_id, time)
def calculateMissedPassengers(self, bus, stop):
''' This method calculates and adds the missed passengers to the stop and route'''
missed = 0
stopSequence = self.routes[bus.routeID].stopSequence
for pax in stop.passengers:
if (pax.destStopID in stopSequence):
missed += 1
stop.missedPassengers += missed
self.routes[bus.routeID].missedPassengers += missed
def calculateTravellingPassengers(self, bus):
''' This method calculates the average number of passengers traveling on a given bus'''
bus.averagePassengersTravelling = (bus.averagePassengersTravelling * bus.numberOfStops + len(bus.passengers))/(bus.numberOfStops + 1.0)
bus.numberOfStops += 1
def calculateQueueingTime(self, stop, time):
''' This method calculates the total amount of time that the buses have spent queueing in a stop'''
if len(stop.qOfBuses) > 0:
stop.totalQueueingTime += (time - stop.busQChangeTime) * (len(stop.qOfBuses) - 1)
|
UTF-8
|
Python
| false | false | 2,014 |
11,905,649,383,106 |
c50714ba7ef12d84ec356b00a503e127da7921b5
|
909263214c9bd6454e54a29893dc0a89f17f05d6
|
/September 4 Assignment.py
|
2a7481e265abdd81d3720f3e61191cd22f1356c9
|
[] |
no_license
|
sankumsek/assorted-python
|
https://github.com/sankumsek/assorted-python
|
91b046cfb584d32327d481d6556e00e2feddf288
|
a78206577aff1ce29a66591095e698e965ed3ce8
|
refs/heads/master
| 2020-05-09T14:52:25.013661 | 2014-09-24T23:00:36 | 2014-09-24T23:00:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#2.29
>>> import turtle
>>> s = turtle.Screen()
>>> t = turtle.Turtle()
>>> t.circle(100,90)
>>> t.circle(100,90)
>>> t.circle(100,90)
>>> t.circle(100,90)
t.right(135)
>>> t.circle(100,90)
>>> t.circle(100,90)
>>> t.circle(100,90)
>>> t.circle(100,90)
t.right(90)
>>> t.circle(100,90)
>>> t.circle(100,90)
>>> t.circle(100,90)
>>> t.circle(100,90)
#transition
t.clear()
t.right(45)
#2.30
t.dot(50)
t.penup()
t.backward(100)
t.pendown()
t.right(90)
t.circle(100)
t.left(90)
t.penup()
t.backward(100)
t.pendown()
t.right(90)
t.circle(200)
t.left(90)
t.penup()
t.backward(100)
t.pendown()
t.right(90)
t.circle(300)
t.left(90)
#transition
t.clear()
t.penup()
t.forward(600)
t.pendown()
#2.32
import turtle
>>> s = turtle.Screen()
>>> t = turtle.Turtle()
>>> t.circle(1)
>>> t.penup()
t.forward(200)
t.pendown()
t.circle(109)
|
UTF-8
|
Python
| false | false | 2,014 |
10,754,598,158,112 |
eb1d50b34c5f6c7e223233174f8479e294148623
|
0eb1f56434c29981fa7343b22b90dd7b15a123d9
|
/mmb.pyw
|
0fb315e09aea2ea7871d10a01b6fd6406174075c
|
[] |
no_license
|
ar7em/Multiple-Monitor-Background
|
https://github.com/ar7em/Multiple-Monitor-Background
|
850683ed8217ceaab906b522eeeaa2c93df0a75f
|
0bedb1e1a1edb09075f0356419cab031870f4af5
|
refs/heads/master
| 2021-01-23T11:56:09.380799 | 2012-10-17T13:50:54 | 2012-10-17T13:50:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
####################################################################
## This script is written for educational purposes by Molokov Artem,
## display icon kindly provided by Rimshotdesign:
## http://rimshotdesign.com (see icon/readme.txt for more info)
## Feel free to use any and all parts of this program (except icon).
####################################################################
import os.path
import tempfile
import pythoncom
from win32com.shell import shell, shellcon
from PyQt4 import QtCore, QtGui
import qrc_resources
class Window(QtGui.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setWindowTitle("Multiple Monitors Background")
self.setWindowIcon(QtGui.QIcon(":/display.png"))
# main widget
grid = QtGui.QWidget()
self.setCentralWidget(grid)
# create window layout
layout = QtGui.QGridLayout()
self.detectDisplays()
self.createModeGroupBox()
self.createSingleGroupBox()
self.createMultipleGroupBox()
layout.addWidget(self.modeGroupBox, 0, 0)
layout.addWidget(self.singleBox, 1, 0)
layout.addWidget(self.multipleBox, 1, 0)
applyButton = QtGui.QPushButton("Apply")
applyButton.setFixedSize(100, applyButton.sizeHint().height())
self.connect(applyButton, QtCore.SIGNAL("clicked()"),
self.applyBackground)
layout.addWidget(applyButton, 2, 0)
layout.setRowStretch(4, 1)
layout.setAlignment(applyButton, QtCore.Qt.AlignHCenter)
grid.setLayout(layout)
self.adjustSize()
self.setMode(False)
def detectDisplays(self):
'''Get every monitor in system and keep its number and resolution'''
self.resolutions = []
numOfScreens = app.desktop().screenCount()
for screen in range(numOfScreens):
screenSize = app.desktop().screenGeometry(screen)
self.resolutions.append((screenSize.width(), screenSize.height()))
def setMode(self, singleMode):
'''Show layout for single wallpaper across all displays if True
is passed otherwise show layout that allows to set separate
wallpaper for each display'''
self.singleBox.setVisible(singleMode)
self.multipleBox.setVisible(not singleMode)
self.singleMode = singleMode
def createModeGroupBox(self):
self.modeGroupBox = QtGui.QGroupBox("Background mode")
layout = QtGui.QVBoxLayout()
self.modeGroupBox.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
multipleRButton = QtGui.QRadioButton("Set separate wallpaper "
"for each display")
multipleRButton.setChecked(True)
self.connect(multipleRButton, QtCore.SIGNAL("toggled(bool)"),
lambda singleMode: self.setMode(not singleMode))
singleRButton = QtGui.QRadioButton("Stretch single wallpaper "
"across all displays")
self.connect(singleRButton, QtCore.SIGNAL("toggled(bool)"),
self.setMode)
layout.addWidget(multipleRButton, 0)
layout.addWidget(singleRButton, 1)
self.modeGroupBox.setLayout(layout)
def createSingleGroupBox(self):
self.singleBox = QtGui.QGroupBox("Single wallpaper")
layout = QtGui.QGridLayout()
displays = QtGui.QWidget()
displaysLayout = QtGui.QHBoxLayout()
# monitor image
screenImage = QtGui.QLabel()
image = QtGui.QPixmap(":/display.png")
self.overallWidth = sum([width for (width, height) in self.resolutions])
self.overallHeight = max([height for (width, height) in self.resolutions])
widthStretch = self.overallWidth / image.size().width()
heightStretch = self.overallHeight / image.size().height()
screenImage.setPixmap(image.scaled(sum([width for (width, height)
in self.resolutions]) / heightStretch,
image.rect().height()))
self.displayOrder = []
displayList = ["{0} ({1}x{2}) ".format(i + 1, width, height)
for i, (width, height)
in zip(range(len(self.resolutions)), self.resolutions)]
# displays number and resolution text
for i, screen in zip(range(len(self.resolutions)), self.resolutions):
j = i * 2
displayLabel = QtGui.QLabel('Screens from left to right:'
if i == 0 else ', ')
displayBox = QtGui.QComboBox()
displayBox.addItems(displayList)
displayBox.setCurrentIndex(i)
self.connect(displayBox,
QtCore.SIGNAL("highlighted(int)"),
self.prepareToSwapDisplays)
self.connect(displayBox,
QtCore.SIGNAL("currentIndexChanged(int)"),
self.swapDisplays)
self.displayOrder.append(displayBox)
displaysLayout.addWidget(displayLabel, j)
displaysLayout.addWidget(displayBox, j + 1)
overallLabel = QtGui.QLabel('. Overall resolution: '
'{0}x{1}.'.format(self.overallWidth, self.overallHeight))
displaysLayout.addWidget(overallLabel, j)
displays.setLayout(displaysLayout)
layout.addWidget(screenImage, 0, 0, 1, 3)
layout.addWidget(displays, 1, 0, 1, 3)
layout.setAlignment(screenImage, QtCore.Qt.AlignHCenter)
layout.setAlignment(displays, QtCore.Qt.AlignHCenter)
openFileLabel = QtGui.QLabel("Background image:")
openFileButton = QtGui.QPushButton("Browse...")
self.singleBackgroundPath = QtGui.QLineEdit()
setOpenFile = self.createOpenFileFunction(self.singleBackgroundPath)
openFileButton.clicked.connect(setOpenFile)
layout.addWidget(openFileLabel, 2, 0)
layout.addWidget(self.singleBackgroundPath, 2, 1)
layout.addWidget(openFileButton, 2, 2)
self.singleBox.setLayout(layout)
def prepareToSwapDisplays(self, num):
'''Find the combobox that might swap it's value with active combobox'''
for swapDisplay in self.displayOrder:
if swapDisplay.currentIndex() == num:
self.swap = swapDisplay
return
def swapDisplays(self, num):
'''find missing index and assign it to combobox, which
value now is set to active combobox'''
if self.swap:
for swapDisplay in self.displayOrder:
if (swapDisplay.currentIndex() == num and
swapDisplay == self.swap):
self.swap = None
for i in range(len(self.displayOrder)):
flag = False
for missingDisplay in self.displayOrder:
if missingDisplay.currentIndex() == i:
flag = True
if not flag:
swapDisplay.setCurrentIndex(i)
return
def createMultipleGroupBox(self):
self.multipleBox = QtGui.QGroupBox("Separate wallpapers")
layout = QtGui.QGridLayout()
displays = QtGui.QWidget()
displaysLayout = QtGui.QGridLayout()
openFileLabel = QtGui.QLabel("Background images:")
self.separateWallpapers = []
displaysLayout.addWidget(openFileLabel, 2, 0)
for i, screen in zip(range(len(self.resolutions)), self.resolutions):
screenLabel = QtGui.QLabel("Screen {0} ({1}x{2})".format(i + 1,
screen[0],
screen[1]))
screenImage = QtGui.QLabel()
screenSize = app.desktop().screenGeometry(i)
# calculate image stretch ratio
image = QtGui.QPixmap(":/display.png")
stratchRatio = screen[1] / image.size().height()
screenImage.setPixmap(image.scaled(screen[0] / stratchRatio,
image.size().height()))
j = i * 3 + 1
displaysLayout.addWidget(screenImage, 0, j, 1, 2)
displaysLayout.addWidget(screenLabel, 1, j, 1, 2)
displaysLayout.setAlignment(screenImage, QtCore.Qt.AlignHCenter)
if i < (len(self.resolutions) - 1):
displaysLayout.setColumnMinimumWidth(j + 2, 50)
displaysLayout.setAlignment(screenLabel, QtCore.Qt.AlignHCenter)
backgroundPath = QtGui.QLineEdit()
openFileButton = QtGui.QPushButton("...")
openFileButton.setFixedSize(30, backgroundPath.sizeHint().height())
#return needed
setOpenFile = self.createOpenFileFunction(backgroundPath)
openFileButton.clicked.connect(setOpenFile)
displaysLayout.addWidget(backgroundPath, 2, j)
displaysLayout.addWidget(openFileButton, 2, j + 1)
self.separateWallpapers.append(backgroundPath)
displays.setLayout(displaysLayout)
layout.addWidget(displays, 0, 0)
self.multipleBox.setLayout(layout)
def createOpenFileFunction(self, lineEdit):
'''Open file dialog box'''
def setOpenFile():
fileName = QtGui.QFileDialog.getOpenFileName(self,
"Select background image",
lineEdit.text(),
"Images (*.jpeg *.jpg *.png *bmp);;All Files (*)")
if fileName:
lineEdit.setText(fileName)
return setOpenFile
def applyBackground(self):
'''Use IActiveDesktop to set wallpaper in Windows 7'''
if self.singleMode:
background = self.getSingleBackground()
else:
background = self.getMultipleBackgrounds()
backgroundPath = tempfile.gettempdir() + 'background'
background.save(backgroundPath, "JPG", 100)
iad = pythoncom.CoCreateInstance(shell.CLSID_ActiveDesktop, None,
pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IActiveDesktop)
iad.SetWallpaper(backgroundPath, 0)
iad.ApplyChanges(shellcon.AD_APPLY_ALL)
def getSingleBackground(self):
'''Resize single image to fit all of screens'''
imagePath = os.path.normpath(self.singleBackgroundPath.text())
image = QtGui.QPixmap(imagePath)
stretchFactor = self.overallWidth / image.size().width()
newSize = QtCore.QSize(self.overallWidth,
self.overallHeight * stretchFactor)
background = QtGui.QPixmap(imagePath).scaled(newSize)
return background
def getMultipleBackgrounds(self):
'''Paste together images for every screen into single background'''
newSize = QtCore.QSize(self.overallWidth, self.overallHeight)
background = QtGui.QImage(newSize, QtGui.QImage.Format_ARGB32_Premultiplied)
painter = QtGui.QPainter(background)
xPos = 0.0
yPos = 0.0
for i in range(len(self.resolutions)):
image = QtGui.QImage(self.separateWallpapers[i].text())
sourceSize = QtCore.QRectF(0.0, 0.0,
image.size().width(),
image.size().height())
targetSize = QtCore.QRectF(xPos,
yPos,
self.resolutions[i][0],
self.resolutions[i][1])
painter.drawImage(targetSize, image, sourceSize)
xPos += self.resolutions[i][0]
return background
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
# get stylesheet
with open('stylesheet.qss', 'r') as content_file:
styleSheet = content_file.read()
app.setStyleSheet(styleSheet)
window = Window()
window.show()
sys.exit(app.exec_())
|
UTF-8
|
Python
| false | false | 2,012 |
16,415,365,030,627 |
c9c493b4dd9da30e3531334fbee21915966052ea
|
93d5b8f8668ee7cb7f84103ad0ac7bcd38977846
|
/project2/kNearestNeighbor.py
|
365fb2ee06a33e1986ce284226543eebc0e562c2
|
[] |
no_license
|
freeznet/machine_learning_project2
|
https://github.com/freeznet/machine_learning_project2
|
6f0e2950f4c2df1a316a316a951c99e7739c7d30
|
98437f52f4039c64e23a61bcff30d2df0bfafd6e
|
refs/heads/master
| 2020-05-29T15:18:50.598534 | 2013-11-27T10:11:40 | 2013-11-27T10:11:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Rui'
import sys, operator
from PrincipalComponentAnalysis import *
if sys.platform == 'win32':
default_timer = time.clock
else:
default_timer = time.time
import random
np.seterr(all='ignore')
from dataloader import *
class KNN:
def __init__(self, xTrain, yTrain, xTest, yTest, pca=False):
self.set_data(xTrain, yTrain, xTest, yTest)
self.PCApreform = pca
def set_data(self, x_train, y_train, x_test, y_test):
self.x_train = x_train
self.y_train = y_train
self.x_test = x_test
self.y_test = y_test
self.n = y_train.shape[0] # num of samples / training
self.testn = y_test.shape[0] # num of samples / testing
def predict(self, X, x, y, k):
m = x.shape[0] # num of samples
n = x.shape[1] # num of features
prediction = np.zeros((X.shape[0], 1)) #
for i in range(0, X.shape[0]):
test_sample_matrix = (np.reshape(X[i,:], (X.shape[1], 1)).dot(np.ones((1, m)))).T
distances = np.sum((x - test_sample_matrix)**2, axis=1)
distances = distances ** 0.5 # sqrt
mins = np.argsort(distances, axis=0)[:k] # min k distance
minLabels = y[mins] # min k labels
uniqueLables = np.unique(minLabels) # class
max = 0
predicted_label = None
for label in uniqueLables:
crt_vote = np.sum(minLabels == label)
if crt_vote>max:
max = crt_vote
predicted_label = label
prediction[i] = predicted_label
return prediction
def training_reconstruction(self, predict):
return (predict == self.y_train).mean()*100.0
def test_predictions(self, predict):
return (predict == self.y_test).mean()*100.0
def analysis(self, predict, labels):
return (predict == labels).mean()*100.0
def getRandomData(xdata, ydata, train_size):
xlen = xdata.shape[0]
ind = [x for x in range(0, xlen)] # generate all index of target dataset
random.shuffle(ind) # shuffle the index list, let it randomly
train_ind = ind[:int(xlen*train_size)]
test_ind = ind[int(xlen*train_size):xlen]
train_x = xdata[train_ind]
train_y = ydata[train_ind]
test_x = xdata[test_ind]
test_y = ydata[test_ind]
return train_x, train_y, test_x, test_y
if __name__ == "__main__":
dataset = Dataset()
results = {}
krange = [x for x in range(1,11)] # 1 .. 10
for one in dataset.database:
kset = {} # store cross-validation result to choose best K
result = {}
print 'Current dataset:',one
initRuntime = default_timer()
currentTime = default_timer()
data = dataset.load(one,0)
print ' Load Testing data done. (%0.3fs)'%(default_timer() - currentTime)
result['t_load_test'] = default_timer() - currentTime
XTest = data.features
YTest = data.labels
currentTime = default_timer()
data = dataset.load(one,1)
print ' Load Training data done. (%0.3fs)'%(default_timer() - currentTime)
result['t_load_train'] = default_timer() - currentTime
XTrain = data.features
YTrain = data.labels
currentTime = default_timer()
print ' Create KNN classifier...'
knn = KNN(xTrain=XTrain, yTrain=YTrain, xTest=XTest, yTest=YTest)
print ' Start training ...'
currentTime = default_timer()
for k in krange:
for i in range(0,10): # preform 10 times cross-validation
randomTrainX, randomTrainY, randomTestX, randomTestY = getRandomData(XTrain, YTrain, 0.8)
trainPredict = knn.predict(randomTestX, randomTrainX, randomTrainY, k) # predict for test data with training data model
trainResult = knn.analysis(trainPredict, randomTestY)
kset[k] = kset.get(k, 0.0) + trainResult
kset[k] = kset.get(k, 0.0) / 10.0 # -> get even value
if kset.get(k, 0.0) >= 100.0: # reach maximum, no need anymore cross-validation
break
selectK = max(kset.iteritems(), key=operator.itemgetter(1))[0]
result['t_select_k'] = default_timer() - currentTime
print ' Get best k: %d with accuracy %f. (%0.3fs)'%(selectK, kset[selectK], default_timer() - currentTime)
result['select_k'] = selectK
currentTime = default_timer()
testPredict = knn.predict(XTest, XTrain, YTrain, selectK)
result['t_test'] = default_timer() - currentTime
print ' Test data predict done. (%0.3fs)'%(default_timer() - currentTime)
currentTime = default_timer()
TrainPredict = knn.predict(XTrain, XTrain, YTrain, selectK)
result['t_train'] = default_timer() - currentTime
print ' Train data predict done. (%0.3fs)'%(default_timer() - currentTime)
p_train = knn.training_reconstruction(TrainPredict)
p_test = knn.test_predictions(testPredict)
print ' %d-NN done. (%0.3fs)'%(selectK, default_timer() - currentTime)
print ' [*] Accuracy on training set: %g' % p_train
print ' [*] Accuracy on test set: %g' % p_test
result['p_train'] = p_train
result['p_test'] = p_test
### PCA
currentTime = default_timer()
print ' Estimating best parameter for PCA...'
pca = PCA(XTrain) # using training data set
bestPCA = pca.dim
result['select_dim'] = bestPCA
result['t_select_dim'] = default_timer() - currentTime
print ' Dim reduce from %d to %d.(%0.3fs)'%(XTrain.shape[1], bestPCA, default_timer() - currentTime)
XDimReducedTrain = pca.currentFeature
XDimReducedTest = pca.DimReduce(XTest, bestPCA)
currentTime = default_timer()
currentTime = default_timer()
testPCAPredict = knn.predict(XDimReducedTest, XDimReducedTrain, YTrain, selectK)
result['t_pca_test'] = default_timer() - currentTime
print ' Test data predict done. (%0.3fs)'%(default_timer() - currentTime)
currentTime = default_timer()
TrainPCAPredict = knn.predict(XDimReducedTrain, XDimReducedTrain, YTrain, selectK)
result['t_pca_train'] = default_timer() - currentTime
print ' Train data predict done. (%0.3fs)'%(default_timer() - currentTime)
p_train = knn.training_reconstruction(TrainPCAPredict)
p_test = knn.test_predictions(testPCAPredict)
result['p_pca_train'] = p_train
result['p_pca_test'] = p_test
print ' %d-NN with PCA (dim reduce to %d) done. (%0.3fs)'%(selectK, bestPCA, default_timer() - currentTime)
print ' [*] Accuracy on training set: %g' % p_train
print ' [*] Accuracy on test set: %g' % p_test
result['t_overall'] = default_timer() - initRuntime
print ' Total runtime: %0.3fs'%(default_timer() - initRuntime)
print
results[one] = result
print 'Dumping result data...'
f = file('kNearestNeighbor.sav', 'wb')
parameters = (results)
cPickle.dump(parameters, f, protocol=cPickle.HIGHEST_PROTOCOL)
f.close()
print 'done.'
|
UTF-8
|
Python
| false | false | 2,013 |
7,301,444,427,900 |
a32c0f099844cf9612b2dd32be07035a59c4cd79
|
1a406cd2c2e974aefc19f8ea4f77b9fcbd733c74
|
/apps/sso/urls.py
|
4ca95f445b254290811ca1c050ec3961de2a8757
|
[] |
no_license
|
ivansugi/django-single-sign-on--soekarno-
|
https://github.com/ivansugi/django-single-sign-on--soekarno-
|
09eb00d383c32ac61dd3ddf536ffe4d77e13547d
|
9b6763f51dd556b06e335ce70c63ec0c650f530a
|
refs/heads/master
| 2021-01-22T03:50:05.967612 | 2011-02-07T08:25:56 | 2011-02-07T08:25:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
from openid_consumer.views import complete, signout
from django.views.generic.simple import direct_to_template
#Login Views
urlpatterns = patterns('',
url(r'^connect/$', 'sso.views.connect'),
url(r'^login/$', 'sso.views.login_server'),
url(r'^register/$', 'sso.views.register'),
url(r'^generate/$', 'sso.generate.key'),
url(r'^logout/$', 'sso.views.logout'),
url(r'^check/(?P<user_id>\d+)/(?P<token>\w+)/$', 'sso.views.check'),
url(r'^send/(?P<client>\w+)/$', 'sso.views.send'),
)
|
UTF-8
|
Python
| false | false | 2,011 |
11,295,763,994,275 |
44ff30caa860c32d359813a780ae1ed8ca06b514
|
4f4dd3af2e13ef549cbd7e9af77da66a1e58d607
|
/src/tgcm/contrib/freedesktopnet/modemmanager/utils/tool/commands/Info.py
|
83dade702c1b22317112d694ce7213fce914e84a
|
[
"GPL-2.0-only"
] |
non_permissive
|
tgcmteam/tgcmlinux
|
https://github.com/tgcmteam/tgcmlinux
|
b03f0773db52981472d634777c24baf12d10ea13
|
2690bba9d4d81921ad0c74f305b954f2a5769c9b
|
refs/heads/master
| 2020-05-18T10:05:29.237724 | 2014-05-13T13:30:01 | 2014-05-13T13:30:01 | 8,928,918 | 1 | 1 | null | false | 2014-05-13T13:30:01 | 2013-03-21T13:07:59 | 2014-05-13T13:30:01 | 2014-05-13T13:30:01 | 11,650 | 4 | 3 | 1 | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Authors : Roberto Majadas <[email protected]>
#
# Copyright (c) 2010, Telefonica Móviles España S.A.U.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
import os
import sys
from freedesktopnet.modemmanager.utils.tool import Command, make_option
from freedesktopnet.modemmanager.modemmanager import ModemManager
class Info(Command):
def config_cmd(self):
self.name = "info"
self.description = "Show information about MManager"
self.description_full = "Show information about MManager"
self.options_list = [
make_option("-v", "--verbose",
action="store_true", dest="verbose", default=False)
]
def run_cmd(self):
print " "
print " Modems Availables"
print " -----------------"
mm = ModemManager()
try:
modems = mm.EnumerateDevices()
except:
print "Are you sure that MManager is running ??"
sys.exit(0)
if len(modems) == 0:
print " No modems availables"
if self.options.verbose == False:
for m in modems :
print " * %s" % m
else:
for m in modems :
print " * %s" % m
for prop in ["Device", "Driver", "Type", "UnlockRequired", "UnLockRetries"]:
print " - %s -> %s" % (prop, m[prop])
print " - Imsi -> %s" % m.iface["gsm.card"].GetImsi()
print " - Imei -> %s" % m.iface["gsm.card"].GetImei()
print ""
print ""
|
UTF-8
|
Python
| false | false | 2,014 |
15,427,522,540,326 |
e0af99337520097fc2b4d724a182c83118762901
|
99647da58045c86f73a5ac9a2adf758cd5aa2712
|
/docopt.c.py
|
af6cf56574c7d84eeb67dda33c070ca0d299f550
|
[
"MIT"
] |
permissive
|
kblomqvist/docopt.c
|
https://github.com/kblomqvist/docopt.c
|
d2c11ee1321867c666c9844d15dc75a30a0d79ea
|
e6782ff2eeea60c9257ff4f17992be1a8d6fcf8a
|
refs/heads/master
| 2020-12-01T01:04:33.637477 | 2013-05-25T16:50:54 | 2013-05-25T16:50:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
r"""#ifdef __cplusplus
#include <cstdio>
#include <cstdlib>
#include <cstring>
#else
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <string.h>
#endif
typedef enum {Option, Argument, Command, None} ElementType;
typedef struct {
ElementType type;
/*
* Should probably be union for storage efficiency, but during development
* it's easier to work without it.
*/
/* union { */
struct {
const char *oshort;
const char *olong;
bool argcount;
bool value;
char *argument;
} option;
struct {
char *name;
bool repeating;
char *value;
char **array;
} argument;
struct {
char *name;
bool value;
} command;
/* } data; */
} Element;
/*
* Tokens
*/
typedef struct Tokens Tokens;
struct Tokens {
int i;
int argc;
char **argv;
char *current;
};
Tokens tokens_create(int argc, char **argv)
{
Tokens ts = { 0, argc, argv, argv[0] };
return ts;
}
Tokens* tokens_move(Tokens *ts)
{
if (ts->i < ts->argc) {
ts->current = ts->argv[++ts->i];
}
if (ts->i == ts->argc) {
ts->current = NULL;
}
return ts;
}
/*
* parse_shorts
*/
Tokens* parse_shorts(Tokens *ts, Element options[]) {
char *raw = &ts->current[1];
tokens_move(ts);
while (raw[0] != '\0') {
int i = 0;
Element *o = &options[i];
while (o->type != None) {
if (o->type == Option && o->option.oshort != NULL
&& o->option.oshort[1] == raw[0]) {
break;
}
o = &options[++i];
}
if (o->type == None) { // TODO -%s is specified ambiguously %d times
printf("-%c is not recognized", raw[0]);
exit(1);
}
raw++;
if (!o->option.argcount) {
o->option.value = true;
} else {
if (raw[0] == '\0') {
if (ts->current == NULL) {
printf("%s requires argument", o->option.oshort);
exit(1);
}
raw = ts->current;
tokens_move(ts);
}
o->option.argument = raw;
break;
}
}
return ts;
}
/*
* parse_long
*/
Tokens* parse_long(Tokens *ts, Element options[]) {
char *eq = strchr(ts->current, '=');
char *argument = NULL;
if (eq != NULL) {
*eq = '\0'; // "--option=value\0" => "--option\0value\0"
argument = eq + 1;
}
int i = 0;
Element *o = &options[i];
while (o->type != None) {
if (o->type == Option &&
strncmp(ts->current, o->option.olong, strlen(ts->current)) == 0) {
break;
}
o = &options[++i];
}
if (o->type == None) { // TODO '%s is not a unique prefix
printf("%s is not recognized", ts->current);
exit(1);
}
tokens_move(ts);
if (o->option.argcount) {
if (argument == NULL) {
if (ts->current == NULL) {
printf("%s requires argument", o->option.olong);
exit(1);
}
o->option.argument = ts->current;
tokens_move(ts);
} else {
o->option.argument = argument;
}
} else {
if (argument != NULL) {
printf("%s must not have an argument", o->option.olong);
exit(1);
}
o->option.value = true;
}
return ts;
}
/*
* parse_args
*/
Tokens* parse_args(Tokens *ts, Element options[]) {
while (ts->current != NULL) {
if (strcmp(ts->current, "--") == 0) {
// not implemented yet
return ts;
//return parsed + [Argument(None, v) for v in tokens]
} else if (ts->current[0] == '-' && ts->current[1] == '-') {
parse_long(ts, options);
} else if (ts->current[0] == '-' ) {
parse_shorts(ts, options);
} else {
// not implemented yet
tokens_move(ts); // just skip for now
//parsed.append(Argument(None, tokens.move()))
}
}
return ts;
}
/* This is how the generated struct may look like */
typedef struct {
/* flag options */
<<<flag_options>>>;
/* options with arguments */
<<<options_with_arguments>>>;
/* special */
const char *usage_pattern;
const char *help_message;
} DocoptArgs;
const char help_message[] =
<<<help_message>>>;
const char usage_pattern[] =
<<<usage_pattern>>>;
DocoptArgs docopt(int argc, char *argv[], bool help, const char *version) {
DocoptArgs args = {
<<<defaults>>>,
usage_pattern, help_message
};
Element options[] = {
<<<options>>>,
{None}
};
Tokens ts = tokens_create(argc, argv);
parse_args(&ts, options);
int i = 0;
Element *o = &options[i];
while (o->type != None) {
if (help && o->option.value
&& strcmp(o->option.olong, "--help") == 0) {
printf("%s", args.help_message);
exit(0);
} else if (version && o->option.value
&& strcmp(o->option.olong, "--version") == 0) {
printf("%s", version);
exit(0);
}<<<if_flag>>> <<<if_not_flag>>>
o = &options[++i];
}
return args;
}
"""
import sys
import re
import docopt
def to_c(s):
if type(s) is str:
return ('"%s"' % s.replace('\\', r'\\')\
.replace('"', r'\"')\
.replace('\n', '\\n"\n"'))
if type(s) in [int, long, float]:
return str(s)
if s is True:
return '1'
if s is False:
return '0'
if s is None:
return 'NULL'
raise ValueError("can't convert to c type: %r" % s)
def c_option(o):
return '{Option, {%s}}' % ', '.join(to_c(v) for v in
(o.short, o.long, o.argcount, False, None))
def c_name(s):
return ''.join(c if c.isalnum() else '_' for c in s).strip('_')
def c_if_flag(o):
t = """ else if (strcmp(o->option.o%s, %s) == 0) {
args.%s = o->option.value;\n }"""
return t % (('long' if o.long else 'short'),
to_c(o.long or o.short),
c_name(o.long or o.short))
def c_if_not_flag(o):
t = """ else if (o->option.argument && strcmp(o->option.o%s, %s) == 0) {
args.%s = o->option.argument;\n }"""
return t % (('long' if o.long else 'short'),
to_c(o.long or o.short),
c_name(o.long or o.short))
if __name__ == '__main__':
doc = sys.stdin.read()
usage_sections = docopt.parse_section('usage:', doc)
if len(usage_sections) == 0:
raise docopt.DocoptLanguageError('"usage:" (case-insensitive) not found.')
if len(usage_sections) > 1:
raise docopt.DocoptLanguageError('More than one "usage:" (case-insensitive).')
usage = usage_sections[0]
options = docopt.parse_defaults(doc)
pattern = docopt.parse_pattern(docopt.formal_usage(usage), options)
out = __doc__
out = out.replace('<<<flag_options>>>',
';\n '.join('int %s' % c_name(o.long or o.short)
for o in options if o.argcount == 0))
out = out.replace('<<<options_with_arguments>>>',
';\n '.join('char *%s' % c_name(o.long or o.short)
for o in options if o.argcount == 1))
out = out.replace('<<<help_message>>>', to_c(doc))
out = out.replace('<<<usage_pattern>>>', to_c(usage))
defaults = ', '.join(to_c(o.value) for o in sorted(options, key=lambda o: o.argcount))
defaults = re.sub(r'"(.*?)"', r'(char*) "\1"', defaults)
out = out.replace('<<<defaults>>>', defaults)
out = out.replace('<<<options>>>',
',\n '.join(c_option(o) for o in options))
out = out.replace('<<<if_flag>>>',
''.join(c_if_flag(o) for o in options if o.argcount == 0))
out = out.replace('<<<if_not_flag>>>',
''.join(c_if_not_flag(o) for o in options if o.argcount == 1))
print(out.strip())
|
UTF-8
|
Python
| false | false | 2,013 |
15,350,213,130,411 |
7ec40a563d6b411861dcf937e77af5bca064d3a3
|
09fc99d3f47bca5188971fd45212cc7a55819061
|
/GradientDescent/pkg/algorithm.py
|
1319cde1067c278012a243834e7077a09f220190
|
[] |
no_license
|
guyo14/IAPractica1
|
https://github.com/guyo14/IAPractica1
|
4f4bbe3310aa1274eab8055590c2f6b170e14155
|
81b9bdb5d2ef962bbf5c977c9f9ba4dc0394265e
|
refs/heads/master
| 2021-01-21T08:15:06.774935 | 2014-10-28T06:30:17 | 2014-10-28T06:30:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Oct 27, 2014
@author: alejandro
'''
import random
import files
from wxPython._wx import NULL
def gradient_descent(xs, ys, alpha, tolerance, iterations):
m = len(xs)
if m == len(ys):
for row in xs:
row.insert(0,1)
ranges = get_ranges(xs)
r = len(xs[0])
thetas = [None] * r
list_cost_functions = []
cost_function = NULL
for n in range(0, r):
thetas[n] = random.uniform(-1, 1)
for n in range(0, iterations):
new_cost_function = 0
for j in range(0, r):
theta = 0
for i in range(0, m):
sumh = 0
for h in range(0, r):
sumh += xs[i][h] * thetas[h]
theta += ((sumh - ys[i][0]) * xs[i][j])
theta = alpha * theta / m
thetas[j] = thetas[j] - theta
new_cost_function = get_cost_function(thetas, xs, ys)
list_cost_functions.append(str(new_cost_function))
if cost_function != NULL and abs(cost_function - new_cost_function) <= tolerance:
break
cost_function = new_cost_function;
files.writeFile("costFunction", list_cost_functions)
return thetas
return NULL
def get_cost_function(thetas, xs, ys):
m = len(xs)
r = len(xs[0])
cost_function = 0
for i in range(0, m):
summ = 0
for j in range(0, r):
summ = thetas[j] * xs[i][j]
summ -= ys[i][0]
summ = summ * summ
cost_function += summ
cost_function = cost_function / ( 2 * m )
return cost_function
def get_ranges(rows):
result = []
r = len(rows[0])
for x in range(0, r):
xmax = rows[0][x]
xmin = rows[0][x]
for row in rows:
if row[x] > max:
xmax = row[x]
elif row[x] < min:
xmin = row[x]
result.append([xmin, xmax])
return result
|
UTF-8
|
Python
| false | false | 2,014 |
16,149,077,053,932 |
39e4e46e20baa795e6747851e40f7e3f3ed48247
|
65357070601df30c719241e119d27e7c2debf90c
|
/ServerSide/wetalk/view_user.py
|
37aa173d0391f55c93845e1cb225178e0411bded
|
[] |
no_license
|
yzf/WeTalk
|
https://github.com/yzf/WeTalk
|
bb536552bc036581312ef6959e3f53ee6d3733c7
|
73a4d5b77f4c297586c6f80871ef1a76f3a4fbe4
|
refs/heads/master
| 2016-08-07T08:12:47.905959 | 2014-06-18T15:25:38 | 2014-06-18T15:25:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: UTF-8
from django.shortcuts import render
from django.http import HttpResponse
from django.core import serializers
from models import *
import json, string, random, util
import base64
def register(request):
'''
注册新用户
参数:
request.REQUEST['username']: 账号
request.REQUEST['password']: 密码
返回值:
如果成功,则返回
{'status': 1,
'info': 'ok',
否则
{'status': 0,
'info': 'error'}
其他:
'''
data = {'status': 0, 'info': 'error'}
try:
user = User()
user.username = request.REQUEST['username']
user.password = request.REQUEST['password']
user.icon = Image.objects.get(id=1) #设置默认头像
if user.username.strip() == '' or user.password.strip() == '':
raise # 账号或者密码为空
user.save()# 插入数据
user_data = user.toJsonFormat()
auth = Auth()
auth.key = util.generate_random_string(32)
auth.data = json.dumps(user_data)
auth.save()
create_time = request.REQUEST['create_time']
# system send a welcome message to user
system_user = User.objects.get(username="[email protected]")
welcome = Message(to_user=user, from_user=system_user, is_read=False, content="Welcome to WeTalk!", create_time=create_time)
welcome.save()
data['authkey'] = auth.key
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
def login(request):
'''
登陆系统
参数:
request.REQUEST['username']: 账号
request.REQUEST['password']: 密码
返回值:
如果成功,则返回
{'status': 1,
'info': 'ok',
否则
{'status': 0,
'info': 'error'}
其他:
'''
data = {'status': 0, 'info': 'error'}
try:
username = request.REQUEST['username']
password = request.REQUEST['password']
user = User.objects.get(username=username, password=password)
user_data = user.toJsonFormat()
auth = Auth()
auth.key = util.generate_random_string(32)
auth.data = json.dumps(user_data)
auth.save()
data['authkey'] = auth.key
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
def logout(request):
'''
登出
'''
data = {'status': 0, 'info': 'error'}
try:
authkey = request.REQUEST['authkey']
auth = Auth.objects.get(key=authkey)
auth.delete()
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
def user(request):
'''
获取个人信息,必须登陆后才行
参数:
返回值:
没有任何异常,则
{'status': 1,
'info': 'ok',
'data': 用户的信息}
否则
{‘status': 0,
'info': 'error'}
'''
data = {'status': 0, 'info': 'error'}
try:
authkey = request.REQUEST['authkey']
auth = Auth.objects.get(key=authkey)
cur_user = json.loads(auth.data) # this is the json format User, not a class type User
user_ = User.objects.get(id=cur_user['id'])
data['data'] = user_.toJsonFormat()
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
# udpate user info
def user_update(request):
data = {'status': 0, 'info': 'error'}
try:
authkey = request.REQUEST['authkey']
auth = Auth.objects.get(key=authkey)
cur_user = json.loads(auth.data) # this is the json format User, not a class type User
user_ = User.objects.get(id=cur_user['id'])
infoType = request.REQUEST['infoType']
infoText = request.REQUEST['infoText']
if infoType == '0':
user_.name = infoText
if infoType == '1':
user_.password = infoText
if infoType == '2':
user_.intro = infoText
if infoType == '3':
user_.interest = infoText
if infoType == '4':
img_name = ''.join([random.choice(string.ascii_letters + string.digits) \
for i in range(15)])
img_url = 'resource/images/' + img_name + '.jpeg'
#pic = cStringIO.StringIO()
#image_string = cStringIO.StringIO(base64.b64decode(infoText))
# 解码images并保存到数据库,及关联到user_
imgData = base64.b64decode(infoText)
tmp = open(img_url,'wb')
tmp.write(imgData)
tmp.close()
image = Image(url=img_url)
image.save()
user_.icon = image
user_.save()
data['user'] = user_.toJsonFormat()
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
# get all the message to user --message.js and message.html
def get_user_message(request):
data = {'status': 0, 'info': 'error'}
try:
authkey = request.REQUEST['authkey']
auth = Auth.objects.get(key=authkey)
cur_user = json.loads(auth.data) # this is the json format User, not a class type User
user_ = User.objects.get(id=cur_user['id'])
unread_messages = Message.objects.filter(to_user=user_, is_read=False)
unread_messages_count = unread_messages.count()
data['unread_messages_count'] = unread_messages_count
data['unread_messages'] = []
for urm in unread_messages:
data['unread_messages'].append(urm.toJsonFormat())
read_messages = Message.objects.filter(to_user=user_, is_read=True)
read_messages_count = read_messages.count()
data['read_messages_count'] = read_messages_count
data['read_messages'] = []
for rm in read_messages:
data['read_messages'].append(rm.toJsonFormat())
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
# return the detail of the message --messageDetail.js and messageDetail.html
def get_message_detail(request):
data = {'status': 0, 'info': 'error'}
try:
messageID = request.REQUEST['messageID']
message = Message.objects.get(id=messageID)
message.is_read = True
message.save()
data['message'] = message.toJsonFormat()
data['status'] = 1
data['info'] = 'ok'
except Exception as e:
data['info'] = util.get_exception_message(e)
print e
return HttpResponse(json.dumps(data))
|
UTF-8
|
Python
| false | false | 2,014 |
5,231,270,183,481 |
0499d52a2242a5f6c476cf5734a3acbb3c14d35e
|
44acdaa8ef6f2e10a4bdc901deb01923d995a99b
|
/neural_network/ml_util.py
|
4f6039d12e993ff6fe8cc67e7cc040c1ce161a80
|
[] |
no_license
|
jeromewu/ml_learn
|
https://github.com/jeromewu/ml_learn
|
dbfaf921cccd98b689cb9ec4e91245f49391d56c
|
8ea03124c99114c7c6157d73e6358d9b577685e6
|
refs/heads/master
| 2016-09-02T00:33:54.057895 | 2014-07-14T21:25:11 | 2014-07-14T21:25:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2.7
import numpy as np
import theano
import theano.tensor as T
def softmax(w):
w = np.array(w)
maxes = np.amax(w, axis=1)
maxes = maxes.reshape(maxes.shape[0], 1)
e = np.exp(w - maxes)
dist = np.divide(e.T, np.sum(e, axis=1))
return dist.T
def p_y_given_x(X, w, b):
dt = np.dot(X, w) + b
return softmax(dt)
def svm2numpy(file_name, n_dim):
label = np.array([], dtype=np.int32)
feature = []
for line in open(file_name,'r'):
row = np.zeros(n_dim, dtype=np.float32)
is_label = True
for pair in line.rstrip().split(' '):
if is_label == True:
label = np.append(label, int(pair))
is_label = False
continue
key = int(pair.split(':')[0])
val = float(pair.split(':')[1])
row[key-1] = val
feature.append(row)
feature = np.array(feature, dtype=np.float32)
return (feature, label)
def numpy2svm(x, y, file_name):
f = open(file_name, 'w')
for n in xrange(len(x)):
output_str = str(y[n])
for idx in xrange(len(x[n])):
if x[n, idx] != 0.0:
output_str = ('%s %i:%f') % (output_str, idx+1, x[n, idx])
f.write(output_str + '\n')
def load_data(train_file_name, test_file_name, n_dim):
train_set = svm2numpy(train_file_name, n_dim)
test_set = svm2numpy(test_file_name, n_dim)
def shared_dataset(data_xy, borrow=True):
data_x, data_y = data_xy
shared_x = theano.shared(np.asarray(data_x, dtype=theano.config.floatX), borrow=borrow)
shared_y = theano.shared(np.asarray(data_y, dtype=theano.config.floatX), borrow=borrow)
return shared_x, T.cast(shared_y, 'int32')
train_set_x, train_set_y = shared_dataset(train_set)
test_set_x, test_set_y = shared_dataset(test_set)
return train_set_x, train_set_y, test_set_x, test_set_y
|
UTF-8
|
Python
| false | false | 2,014 |
11,871,289,633,413 |
10ecb3b7175cfc175a70cddaa24d5c81c28b9b81
|
bce8c8b0835f1b6fdf33a1aff76cb7e790ae8486
|
/test/test_sparql/sparql/ConstructTests/Test10_22.py
|
a3b7ac8b8d2a4dab3fa0f4d527f588a05e35643e
|
[
"BSD-3-Clause"
] |
permissive
|
RDFLib/rdfextras
|
https://github.com/RDFLib/rdfextras
|
9e1ab6369ea8b04421cc13843bbb1a5ce0300176
|
c66b30de4a3b9cb67090add06cb8a9cf05d2c545
|
refs/heads/master
| 2023-08-16T04:57:26.503211 | 2013-05-19T14:10:27 | 2013-05-19T14:10:27 | 3,342,174 | 6 | 5 | null | false | 2015-11-05T15:09:16 | 2012-02-03T06:16:14 | 2015-05-15T02:18:21 | 2015-11-05T07:34:15 | 2,557 | 20 | 12 | 1 |
Python
| null | null |
#!/d/Bin/Python/python.exe
# -*- coding: utf-8 -*-
#
#
# $Date: 2005/04/02 07:30:02 $, by $Author: ivan $, $Revision: 1.1 $
#
from testSPARQL import ns_rdf
from testSPARQL import ns_rdfs
from testSPARQL import ns_dc0
from testSPARQL import ns_foaf
from testSPARQL import ns_ns
from testSPARQL import ns_book
from testSPARQL import ns_vcard
from testSPARQL import ns_person
from rdflib.Literal import Literal
from rdflib import BNode
from rdfextras.sparql.sparql import PatternBNode
from rdfextras.sparql.sparqlOperators import lt, ge
import datetime
from rdfextras.sparql.graphPattern import GraphPattern
thresholdDate = datetime.date(2005,01,01)
rdfData = """<?xml version="1.0" encoding="UTF-8"?>
<rdf:RDF
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:foaf="http://xmlns.com/foaf/0.1/"
xmlns:ns = "http://example.org/ns#"
>
<rdf:Description>
<foaf:givenname>Alice</foaf:givenname>
<foaf:family_name>Hacker</foaf:family_name>
</rdf:Description>
<rdf:Description>
<foaf:givenname>Bob</foaf:givenname>
<foaf:family_name>Hacker</foaf:family_name>
</rdf:Description>
</rdf:RDF>
"""
select = []
pattern = GraphPattern([("?x",ns_foaf["givenname"],"?name"),("?x",ns_foaf["family_name"],"?fname")])
optional = []
bnode = BNode("v") #PatternBNode("")
construct = GraphPattern([("?x", ns_vcard["N"],bnode),(bnode,ns_vcard["givenName"],"?name"),(bnode,ns_vcard["familyName"],"?fname")])
tripleStore = None
|
UTF-8
|
Python
| false | false | 2,013 |
5,342,939,366,089 |
00300940c728913d5ef52ad092f04ad39c7c5835
|
1d6f8d503ca29a0a313389dc118beef93c548739
|
/bill/prognosis_model_test.py
|
6128ca43359ca4639d057d6a07a9de2d803c38ed
|
[] |
no_license
|
coderich/govtrack.us-web
|
https://github.com/coderich/govtrack.us-web
|
8b24a2fa9b8c1bac5b436438d1b4a02615d710fc
|
ccca9c138f9bcc7c0640c57d4b788157b0df4a84
|
refs/heads/master
| 2021-01-16T00:46:29.685913 | 2013-04-24T12:09:50 | 2013-04-24T12:09:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# this file was automatically generated by prognosis.py
train_congress = 111
test_congress = 112
model_test_results = {(1, 0): {'bins': [(2.0602487823132929, 81, 0.012345679012345678),
(7.0120781239268881, 65, 0.015384615384615385),
(10.430755022078882, 96, 0.052083333333333336),
(20.604774211196634, 95, 0.031578947368421054),
(37.3781022971719, 78, 0.10256410256410256),
(44.842682701130457, 176, 0.3181818181818182),
(70.065818755397316, 84, 0.13095238095238096),
(93.478607562488563, 53, 0.5471698113207547)],
'count': 845,
'max_fscore': {'accuracy': 0.7798816568047338,
'fscore': 0.5981375358166188,
'fscore_beta': 0.5,
'precision': 0.571917808219178,
'recall': 0.7324561403508771,
'table': {'fn': 61,
'fp': 125,
'tn': 492,
'tp': 167},
'threshold': 52.18609865470852},
'overall': 52.18609865470852,
'precision_recall': [{'accuracy': 0.3502958579881657,
'fscore': 0.3410456730769231,
'fscore_beta': 0.5,
'precision': 0.2929032258064516,
'recall': 0.9956140350877193,
'table': {'fn': 1,
'fp': 548,
'tn': 69,
'tp': 227},
'threshold': 2.7323722447292558},
{'accuracy': 0.3585798816568047,
'fscore': 0.343939393939394,
'fscore_beta': 0.5,
'precision': 0.2955729166666667,
'recall': 0.9956140350877193,
'table': {'fn': 1,
'fp': 541,
'tn': 76,
'tp': 227},
'threshold': 3.337326996032608},
{'accuracy': 0.36923076923076925,
'fscore': 0.3477328431372549,
'fscore_beta': 0.5,
'precision': 0.29907773386034253,
'recall': 0.9956140350877193,
'table': {'fn': 1,
'fp': 532,
'tn': 85,
'tp': 227},
'threshold': 4.076220397836621},
{'accuracy': 0.37751479289940826,
'fscore': 0.3507416563658838,
'fscore_beta': 0.5,
'precision': 0.30186170212765956,
'recall': 0.9956140350877193,
'table': {'fn': 1,
'fp': 525,
'tn': 92,
'tp': 227},
'threshold': 4.978706836786395},
{'accuracy': 0.3893491124260355,
'fscore': 0.35513141426783484,
'fscore_beta': 0.5,
'precision': 0.3059299191374663,
'recall': 0.9956140350877193,
'table': {'fn': 1,
'fp': 515,
'tn': 102,
'tp': 227},
'threshold': 6.0810062625217975},
{'accuracy': 0.40236686390532544,
'fscore': 0.36008883248730966,
'fscore_beta': 0.5,
'precision': 0.31053351573187415,
'recall': 0.9956140350877193,
'table': {'fn': 1,
'fp': 504,
'tn': 113,
'tp': 227},
'threshold': 7.427357821433388},
{'accuracy': 0.4378698224852071,
'fscore': 0.37367724867724866,
'fscore_beta': 0.5,
'precision': 0.32331902718168815,
'recall': 0.9912280701754386,
'table': {'fn': 2,
'fp': 473,
'tn': 144,
'tp': 226},
'threshold': 9.071795328941251},
{'accuracy': 0.5005917159763313,
'fscore': 0.3986291486291486,
'fscore_beta': 0.5,
'precision': 0.3474842767295597,
'recall': 0.9692982456140351,
'table': {'fn': 7,
'fp': 415,
'tn': 202,
'tp': 221},
'threshold': 11.080315836233387},
{'accuracy': 0.5207100591715976,
'fscore': 0.40865384615384615,
'fscore_beta': 0.5,
'precision': 0.3570274636510501,
'recall': 0.9692982456140351,
'table': {'fn': 7,
'fp': 398,
'tn': 219,
'tp': 221},
'threshold': 13.53352832366127},
{'accuracy': 0.5360946745562131,
'fscore': 0.41666666666666663,
'fscore_beta': 0.5,
'precision': 0.36468646864686466,
'recall': 0.9692982456140351,
'table': {'fn': 7,
'fp': 385,
'tn': 232,
'tp': 221},
'threshold': 16.529888822158654},
{'accuracy': 0.5846153846153846,
'fscore': 0.44354838709677424,
'fscore_beta': 0.5,
'precision': 0.3907637655417407,
'recall': 0.9649122807017544,
'table': {'fn': 8,
'fp': 343,
'tn': 274,
'tp': 220},
'threshold': 20.189651799465537},
{'accuracy': 0.6071005917159763,
'fscore': 0.4570116861435726,
'fscore_beta': 0.5,
'precision': 0.4040590405904059,
'recall': 0.9605263157894737,
'table': {'fn': 9,
'fp': 323,
'tn': 294,
'tp': 219},
'threshold': 24.659696394160648},
{'accuracy': 0.6568047337278107,
'fscore': 0.4909909909909911,
'fscore_beta': 0.5,
'precision': 0.43775100401606426,
'recall': 0.956140350877193,
'table': {'fn': 10,
'fp': 280,
'tn': 337,
'tp': 218},
'threshold': 30.119421191220212},
{'accuracy': 0.6674556213017752,
'fscore': 0.49625468164794007,
'fscore_beta': 0.5,
'precision': 0.4444444444444444,
'recall': 0.9298245614035088,
'table': {'fn': 16,
'fp': 265,
'tn': 352,
'tp': 212},
'threshold': 36.787944117144235},
{'accuracy': 0.7656804733727811,
'fscore': 0.5787671232876712,
'fscore_beta': 0.5,
'precision': 0.5487012987012987,
'recall': 0.7412280701754386,
'table': {'fn': 59,
'fp': 139,
'tn': 478,
'tp': 169},
'threshold': 44.932896411722155},
{'accuracy': 0.7881656804733728,
'fscore': 0.6103801169590642,
'fscore_beta': 0.5,
'precision': 0.5859649122807018,
'recall': 0.7324561403508771,
'table': {'fn': 61,
'fp': 118,
'tn': 499,
'tp': 167},
'threshold': 54.88116360940264},
{'accuracy': 0.7952662721893491,
'fscore': 0.6209415584415585,
'fscore_beta': 0.5,
'precision': 0.6095617529880478,
'recall': 0.6710526315789473,
'table': {'fn': 75,
'fp': 98,
'tn': 519,
'tp': 153},
'threshold': 67.03200460356393},
{'accuracy': 0.8497041420118343,
'fscore': 0.732421875,
'fscore_beta': 0.5,
'precision': 0.7537688442211056,
'recall': 0.6578947368421053,
'table': {'fn': 78,
'fp': 49,
'tn': 568,
'tp': 150},
'threshold': 81.87307530779819}]},
(1, 1): {'bins': [(81.180994044082468, 22, 1.0),
(87.356887528680573, 2, 1.0),
(95.087324708647486, 181, 0.9723756906077348)],
'count': 228,
'max_fscore': {'accuracy': 0.10087719298245613,
'fscore': 0.29032258064516125,
'fscore_beta': 0.5,
'precision': 0.782608695652174,
'recall': 0.08256880733944955,
'table': {'fn': 200, 'fp': 5, 'tn': 5, 'tp': 18},
'threshold': 95.70354457572503},
'overall': 95.70354457572503,
'precision_recall': [{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 2.7323722447292558},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 3.337326996032608},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 4.076220397836621},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 4.978706836786395},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 6.0810062625217975},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 7.427357821433388},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 9.071795328941251},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 11.080315836233387},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 13.53352832366127},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 16.529888822158654},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 20.189651799465537},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 24.659696394160648},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 30.119421191220212},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 36.787944117144235},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 44.932896411722155},
{'accuracy': 0.956140350877193,
'fscore': 0.9646017699115045,
'fscore_beta': 0.5,
'precision': 0.956140350877193,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 0,
'tp': 218},
'threshold': 54.88116360940264},
{'accuracy': 0.9517543859649122,
'fscore': 0.9635879218472468,
'fscore_beta': 0.5,
'precision': 0.9559471365638766,
'recall': 0.9954128440366973,
'table': {'fn': 1,
'fp': 10,
'tn': 0,
'tp': 217},
'threshold': 67.03200460356393},
{'accuracy': 0.8596491228070176,
'fscore': 0.9404990403071016,
'fscore_beta': 0.5,
'precision': 0.9514563106796117,
'recall': 0.8990825688073395,
'table': {'fn': 22,
'fp': 10,
'tn': 0,
'tp': 196},
'threshold': 81.87307530779819}]},
(2, 0): {'bins': [(1.5914354601600561e-14, 371, 0.0),
(0.32959758546205947, 243, 0.0),
(0.46641245376223495, 418, 0.0023923444976076554),
(0.90718806150691045, 441, 0.0022675736961451248),
(2.5295395505799769, 376, 0.02925531914893617),
(7.1739674240573024, 313, 0.08306709265175719),
(12.097757596946392, 435, 0.15402298850574714),
(16.281498008201069, 374, 0.16310160427807488),
(28.688716506836151, 372, 0.260752688172043)],
'count': 3716,
'max_fscore': {'accuracy': 0.7564585575888052,
'fscore': 0.34452122408687064,
'fscore_beta': 0.5,
'precision': 0.3024263431542461,
'recall': 0.77728285077951,
'table': {'fn': 100,
'fp': 805,
'tn': 2462,
'tp': 349},
'threshold': 12.884947031288494},
'overall': 12.884947031288494,
'precision_recall': [{'accuracy': 0.5688912809472552,
'fscore': 0.2560363933278899,
'fscore_beta': 0.5,
'precision': 0.21614967996061055,
'recall': 0.977728285077951,
'table': {'fn': 10,
'fp': 1592,
'tn': 1675,
'tp': 439},
'threshold': 2.7323722447292558},
{'accuracy': 0.5952637244348762,
'fscore': 0.26795546311024104,
'fscore_beta': 0.5,
'precision': 0.22682547902641118,
'recall': 0.9755011135857461,
'table': {'fn': 11,
'fp': 1493,
'tn': 1774,
'tp': 438},
'threshold': 3.337326996032608},
{'accuracy': 0.6033369214208827,
'fscore': 0.2719483422327083,
'fscore_beta': 0.5,
'precision': 0.23040504997369807,
'recall': 0.9755011135857461,
'table': {'fn': 11,
'fp': 1463,
'tn': 1804,
'tp': 438},
'threshold': 4.076220397836621},
{'accuracy': 0.6170613562970937,
'fscore': 0.27688757851557494,
'fscore_beta': 0.5,
'precision': 0.235038084874864,
'recall': 0.9621380846325167,
'table': {'fn': 17,
'fp': 1406,
'tn': 1861,
'tp': 432},
'threshold': 4.978706836786395},
{'accuracy': 0.6278256189451022,
'fscore': 0.2805215329909127,
'fscore_beta': 0.5,
'precision': 0.23852183650615902,
'recall': 0.9487750556792873,
'table': {'fn': 23,
'fp': 1360,
'tn': 1907,
'tp': 426},
'threshold': 6.0810062625217975},
{'accuracy': 0.645048439181916,
'fscore': 0.2881053642474962,
'fscore_beta': 0.5,
'precision': 0.24561403508771928,
'recall': 0.9354120267260579,
'table': {'fn': 29,
'fp': 1290,
'tn': 1977,
'tp': 420},
'threshold': 7.427357821433388},
{'accuracy': 0.6585037674919269,
'fscore': 0.293992330634853,
'fscore_beta': 0.5,
'precision': 0.2512135922330097,
'recall': 0.9220489977728286,
'table': {'fn': 35,
'fp': 1234,
'tn': 2033,
'tp': 414},
'threshold': 9.071795328941251},
{'accuracy': 0.7042518837459634,
'fscore': 0.3136742362359092,
'fscore_beta': 0.5,
'precision': 0.27080394922425954,
'recall': 0.8552338530066815,
'table': {'fn': 65,
'fp': 1034,
'tn': 2233,
'tp': 384},
'threshold': 11.080315836233387},
{'accuracy': 0.7580731969860065,
'fscore': 0.34616147589763935,
'fscore_beta': 0.5,
'precision': 0.304006968641115,
'recall': 0.77728285077951,
'table': {'fn': 100,
'fp': 799,
'tn': 2468,
'tp': 349},
'threshold': 13.53352832366127},
{'accuracy': 0.7995156081808397,
'fscore': 0.3778154516832163,
'fscore_beta': 0.5,
'precision': 0.3391304347826087,
'recall': 0.6948775055679287,
'table': {'fn': 137,
'fp': 608,
'tn': 2659,
'tp': 312},
'threshold': 16.529888822158654},
{'accuracy': 0.8231969860064585,
'fscore': 0.401673640167364,
'fscore_beta': 0.5,
'precision': 0.3673469387755102,
'recall': 0.6414253897550112,
'table': {'fn': 161,
'fp': 496,
'tn': 2771,
'tp': 288},
'threshold': 20.189651799465537},
{'accuracy': 0.8479547900968784,
'fscore': 0.4296473810338925,
'fscore_beta': 0.5,
'precision': 0.40614886731391586,
'recall': 0.5590200445434298,
'table': {'fn': 198,
'fp': 367,
'tn': 2900,
'tp': 251},
'threshold': 24.659696394160648},
{'accuracy': 0.8678686759956943,
'fscore': 0.469065911847958,
'fscore_beta': 0.5,
'precision': 0.45849802371541504,
'recall': 0.5167037861915368,
'table': {'fn': 217,
'fp': 274,
'tn': 2993,
'tp': 232},
'threshold': 30.119421191220212},
{'accuracy': 0.8764800861141012,
'fscore': 0.47525722684958355,
'fscore_beta': 0.5,
'precision': 0.48743718592964824,
'recall': 0.43207126948775054,
'table': {'fn': 255,
'fp': 204,
'tn': 3063,
'tp': 194},
'threshold': 36.787944117144235},
{'accuracy': 0.8840150699677072,
'fscore': 0.4748247291268323,
'fscore_beta': 0.5,
'precision': 0.5321428571428571,
'recall': 0.33184855233853006,
'table': {'fn': 300,
'fp': 131,
'tn': 3136,
'tp': 149},
'threshold': 44.932896411722155},
{'accuracy': 0.8891280947255114,
'fscore': 0.47768206734534063,
'fscore_beta': 0.5,
'precision': 0.5893719806763285,
'recall': 0.2717149220489978,
'table': {'fn': 327,
'fp': 85,
'tn': 3182,
'tp': 122},
'threshold': 54.88116360940264},
{'accuracy': 0.891280947255113,
'fscore': 0.4507772020725389,
'fscore_beta': 0.5,
'precision': 0.6744186046511628,
'recall': 0.19376391982182628,
'table': {'fn': 362,
'fp': 42,
'tn': 3225,
'tp': 87},
'threshold': 67.03200460356393},
{'accuracy': 0.885091496232508,
'fscore': 0.28974739970282315,
'fscore_beta': 0.5,
'precision': 0.6964285714285714,
'recall': 0.08685968819599109,
'table': {'fn': 410,
'fp': 17,
'tn': 3250,
'tp': 39},
'threshold': 81.87307530779819}]},
(2, 1): {'bins': [(1.9675462624954902e-14, 14, 0.0),
(3.6051104498585147, 68, 0.16176470588235295),
(7.5822686562794752, 11, 0.45454545454545453),
(13.360195846300552, 78, 0.1282051282051282)],
'count': 449,
'max_fscore': {'accuracy': 0.42761692650334077,
'fscore': 0.19831223628691985,
'fscore_beta': 0.5,
'precision': 0.16906474820143885,
'recall': 0.6438356164383562,
'table': {'fn': 26, 'fp': 231, 'tn': 145, 'tp': 47},
'threshold': 21.414913957934992},
'overall': 21.414913957934992,
'precision_recall': [{'accuracy': 0.23162583518930957,
'fscore': 0.1933372992266508,
'fscore_beta': 0.5,
'precision': 0.16169154228855723,
'recall': 0.8904109589041096,
'table': {'fn': 8,
'fp': 337,
'tn': 39,
'tp': 65},
'threshold': 2.7323722447292558},
{'accuracy': 0.23162583518930957,
'fscore': 0.1933372992266508,
'fscore_beta': 0.5,
'precision': 0.16169154228855723,
'recall': 0.8904109589041096,
'table': {'fn': 8,
'fp': 337,
'tn': 39,
'tp': 65},
'threshold': 3.337326996032608},
{'accuracy': 0.2383073496659243,
'fscore': 0.19472738166566805,
'fscore_beta': 0.5,
'precision': 0.16290726817042606,
'recall': 0.8904109589041096,
'table': {'fn': 8,
'fp': 334,
'tn': 42,
'tp': 65},
'threshold': 4.076220397836621},
{'accuracy': 0.2962138084632517,
'fscore': 0.2011680726800779,
'fscore_beta': 0.5,
'precision': 0.16893732970027248,
'recall': 0.8493150684931506,
'table': {'fn': 11,
'fp': 305,
'tn': 71,
'tp': 62},
'threshold': 4.978706836786395},
{'accuracy': 0.2962138084632517,
'fscore': 0.2011680726800779,
'fscore_beta': 0.5,
'precision': 0.16893732970027248,
'recall': 0.8493150684931506,
'table': {'fn': 11,
'fp': 305,
'tn': 71,
'tp': 62},
'threshold': 6.0810062625217975},
{'accuracy': 0.2962138084632517,
'fscore': 0.2011680726800779,
'fscore_beta': 0.5,
'precision': 0.16893732970027248,
'recall': 0.8493150684931506,
'table': {'fn': 11,
'fp': 305,
'tn': 71,
'tp': 62},
'threshold': 7.427357821433388},
{'accuracy': 0.2984409799554566,
'fscore': 0.19038076152304612,
'fscore_beta': 0.5,
'precision': 0.1601123595505618,
'recall': 0.7808219178082192,
'table': {'fn': 16,
'fp': 299,
'tn': 77,
'tp': 57},
'threshold': 9.071795328941251},
{'accuracy': 0.2984409799554566,
'fscore': 0.19038076152304612,
'fscore_beta': 0.5,
'precision': 0.1601123595505618,
'recall': 0.7808219178082192,
'table': {'fn': 16,
'fp': 299,
'tn': 77,
'tp': 57},
'threshold': 11.080315836233387},
{'accuracy': 0.42761692650334077,
'fscore': 0.19831223628691985,
'fscore_beta': 0.5,
'precision': 0.16906474820143885,
'recall': 0.6438356164383562,
'table': {'fn': 26,
'fp': 231,
'tn': 145,
'tp': 47},
'threshold': 13.53352832366127},
{'accuracy': 0.42761692650334077,
'fscore': 0.19831223628691985,
'fscore_beta': 0.5,
'precision': 0.16906474820143885,
'recall': 0.6438356164383562,
'table': {'fn': 26,
'fp': 231,
'tn': 145,
'tp': 47},
'threshold': 16.529888822158654},
{'accuracy': 0.42761692650334077,
'fscore': 0.19831223628691985,
'fscore_beta': 0.5,
'precision': 0.16906474820143885,
'recall': 0.6438356164383562,
'table': {'fn': 26,
'fp': 231,
'tn': 145,
'tp': 47},
'threshold': 20.189651799465537},
{'accuracy': 0.42761692650334077,
'fscore': 0.19831223628691985,
'fscore_beta': 0.5,
'precision': 0.16906474820143885,
'recall': 0.6438356164383562,
'table': {'fn': 26,
'fp': 231,
'tn': 145,
'tp': 47},
'threshold': 24.659696394160648},
{'accuracy': 0.7817371937639198,
'fscore': 0.1687763713080169,
'fscore_beta': 0.5,
'precision': 0.1951219512195122,
'recall': 0.1095890410958904,
'table': {'fn': 65,
'fp': 33,
'tn': 343,
'tp': 8},
'threshold': 30.119421191220212},
{'accuracy': 0.7861915367483296,
'fscore': 0.17467248908296945,
'fscore_beta': 0.5,
'precision': 0.20512820512820512,
'recall': 0.1095890410958904,
'table': {'fn': 65,
'fp': 31,
'tn': 345,
'tp': 8},
'threshold': 36.787944117144235},
{'accuracy': 0.7839643652561247,
'fscore': 0.15555555555555556,
'fscore_beta': 0.5,
'precision': 0.18421052631578946,
'recall': 0.0958904109589041,
'table': {'fn': 66,
'fp': 31,
'tn': 345,
'tp': 7},
'threshold': 44.932896411722155},
{'accuracy': 0.8017817371937639,
'fscore': 0.034482758620689655,
'fscore_beta': 0.5,
'precision': 0.05555555555555555,
'recall': 0.0136986301369863,
'table': {'fn': 72,
'fp': 17,
'tn': 359,
'tp': 1},
'threshold': 54.88116360940264}]},
(3, 0): {'bins': [(2.3229315434227933e-14, 201, 0.03980099502487562),
(4.5481268415587326e-13, 1123, 0.008014247551202136),
(1.9033476646711809, 514, 0.009727626459143969),
(2.2945565225899078, 839, 0.003575685339690107),
(2.9592738794807296, 573, 0.027923211169284468),
(4.8147601604921206, 578, 0.04325259515570934),
(6.1914073083068741, 877, 0.04446978335233751),
(12.515641680139366, 589, 0.11714770797962648),
(17.784985053030216, 750, 0.22266666666666668)],
'count': 6723,
'max_fscore': {'accuracy': 0.8528930536962666,
'fscore': 0.43097585855151305,
'fscore_beta': 0.5,
'precision': 0.39211136890951276,
'recall': 0.7140845070422536,
'table': {'fn': 203,
'fp': 786,
'tn': 5227,
'tp': 507},
'threshold': 13.19634703196347},
'overall': 13.19634703196347,
'precision_recall': [{'accuracy': 0.5153949129852744,
'fscore': 0.20907093151019415,
'fscore_beta': 0.5,
'precision': 0.1748340990301174,
'recall': 0.9647887323943662,
'table': {'fn': 25,
'fp': 3233,
'tn': 2780,
'tp': 685},
'threshold': 2.7323722447292558},
{'accuracy': 0.5585304179681689,
'fscore': 0.22331442142762897,
'fscore_beta': 0.5,
'precision': 0.18743078626799556,
'recall': 0.9535211267605633,
'table': {'fn': 33,
'fp': 2935,
'tn': 3078,
'tp': 677},
'threshold': 3.337326996032608},
{'accuracy': 0.5728097575487133,
'fscore': 0.22804397991041128,
'fscore_beta': 0.5,
'precision': 0.19167142042213348,
'recall': 0.9464788732394366,
'table': {'fn': 38,
'fp': 2834,
'tn': 3179,
'tp': 672},
'threshold': 4.076220397836621},
{'accuracy': 0.6381079875055778,
'fscore': 0.254561047871511,
'fscore_beta': 0.5,
'precision': 0.21558270056124135,
'recall': 0.9197183098591549,
'table': {'fn': 57,
'fp': 2376,
'tn': 3637,
'tp': 653},
'threshold': 4.978706836786395},
{'accuracy': 0.6523873270861222,
'fscore': 0.2603071948261924,
'fscore_beta': 0.5,
'precision': 0.220926243567753,
'recall': 0.9070422535211268,
'table': {'fn': 66,
'fp': 2271,
'tn': 3742,
'tp': 644},
'threshold': 6.0810062625217975},
{'accuracy': 0.7443105756358769,
'fscore': 0.31939006799917574,
'fscore_beta': 0.5,
'precision': 0.2756780791462872,
'recall': 0.8732394366197183,
'table': {'fn': 90,
'fp': 1629,
'tn': 4384,
'tp': 620},
'threshold': 7.427357821433388},
{'accuracy': 0.7751004016064257,
'fscore': 0.3453984928065768,
'fscore_beta': 0.5,
'precision': 0.30069582504970177,
'recall': 0.852112676056338,
'table': {'fn': 105,
'fp': 1407,
'tn': 4606,
'tp': 605},
'threshold': 9.071795328941251},
{'accuracy': 0.7950319797709356,
'fscore': 0.363849765258216,
'fscore_beta': 0.5,
'precision': 0.319068255687974,
'recall': 0.8295774647887324,
'table': {'fn': 121,
'fp': 1257,
'tn': 4756,
'tp': 589},
'threshold': 11.080315836233387},
{'accuracy': 0.854975457385096,
'fscore': 0.4340697273041077,
'fscore_beta': 0.5,
'precision': 0.3957513768686074,
'recall': 0.7084507042253522,
'table': {'fn': 207,
'fp': 768,
'tn': 5245,
'tp': 503},
'threshold': 13.53352832366127},
{'accuracy': 0.8634538152610441,
'fscore': 0.4480352552331987,
'fscore_beta': 0.5,
'precision': 0.41216216216216217,
'recall': 0.6873239436619718,
'table': {'fn': 222,
'fp': 696,
'tn': 5317,
'tp': 488},
'threshold': 16.529888822158654},
{'accuracy': 0.878923099806634,
'fscore': 0.478568456096546,
'fscore_beta': 0.5,
'precision': 0.44921875,
'recall': 0.647887323943662,
'table': {'fn': 250,
'fp': 564,
'tn': 5449,
'tp': 460},
'threshold': 20.189651799465537},
{'accuracy': 0.8978134761267291,
'fscore': 0.5218900160170848,
'fscore_beta': 0.5,
'precision': 0.5151515151515151,
'recall': 0.5507042253521127,
'table': {'fn': 319,
'fp': 368,
'tn': 5645,
'tp': 391},
'threshold': 24.659696394160648},
{'accuracy': 0.9045069165551093,
'fscore': 0.5418943533697631,
'fscore_beta': 0.5,
'precision': 0.5526315789473685,
'recall': 0.5028169014084507,
'table': {'fn': 353,
'fp': 289,
'tn': 5724,
'tp': 357},
'threshold': 30.119421191220212},
{'accuracy': 0.9101591551390749,
'fscore': 0.560931899641577,
'fscore_beta': 0.5,
'precision': 0.6019230769230769,
'recall': 0.44084507042253523,
'table': {'fn': 397,
'fp': 207,
'tn': 5806,
'tp': 313},
'threshold': 36.787944117144235},
{'accuracy': 0.9131340175516882,
'fscore': 0.5657773689052438,
'fscore_beta': 0.5,
'precision': 0.6721311475409836,
'recall': 0.3464788732394366,
'table': {'fn': 464,
'fp': 120,
'tn': 5893,
'tp': 246},
'threshold': 44.932896411722155},
{'accuracy': 0.9125390450691655,
'fscore': 0.5523560209424083,
'fscore_beta': 0.5,
'precision': 0.7033333333333334,
'recall': 0.2971830985915493,
'table': {'fn': 499,
'fp': 89,
'tn': 5924,
'tp': 211},
'threshold': 54.88116360940264},
{'accuracy': 0.9104566413803361,
'fscore': 0.5014224751066856,
'fscore_beta': 0.5,
'precision': 0.8103448275862069,
'recall': 0.19859154929577466,
'table': {'fn': 569,
'fp': 33,
'tn': 5980,
'tp': 141},
'threshold': 67.03200460356393},
{'accuracy': 0.9012345679012346,
'fscore': 0.2818371607515658,
'fscore_beta': 0.5,
'precision': 0.8709677419354839,
'recall': 0.07605633802816901,
'table': {'fn': 656,
'fp': 8,
'tn': 6005,
'tp': 54},
'threshold': 81.87307530779819}]},
(3, 1): {'bins': [(6.8250378327521313, 69, 0.21739130434782608),
(11.647895709817996, 6, 0.5),
(12.430562547412462, 109, 0.13761467889908258),
(14.598499569456211, 63, 0.19047619047619047),
(23.527374240394661, 219, 0.1963470319634703),
(34.637338251606899, 88, 0.3409090909090909),
(61.615868639928159, 69, 0.391304347826087)],
'count': 710,
'max_fscore': {'accuracy': 0.6873239436619718,
'fscore': 0.47073791348600513,
'fscore_beta': 0.5,
'precision': 0.4530612244897959,
'recall': 0.5577889447236181,
'table': {'fn': 88,
'fp': 134,
'tn': 377,
'tp': 111},
'threshold': 29.296424452133795},
'overall': 29.296424452133795,
'precision_recall': [{'accuracy': 0.28169014084507044,
'fscore': 0.32784184514003295,
'fscore_beta': 0.5,
'precision': 0.28067700987306066,
'recall': 1.0,
'table': {'fn': 0,
'fp': 510,
'tn': 1,
'tp': 199},
'threshold': 2.7323722447292558},
{'accuracy': 0.2830985915492958,
'fscore': 0.3282744968657209,
'fscore_beta': 0.5,
'precision': 0.2810734463276836,
'recall': 1.0,
'table': {'fn': 0,
'fp': 509,
'tn': 2,
'tp': 199},
'threshold': 3.337326996032608},
{'accuracy': 0.28732394366197184,
'fscore': 0.329579330904273,
'fscore_beta': 0.5,
'precision': 0.2822695035460993,
'recall': 1.0,
'table': {'fn': 0,
'fp': 506,
'tn': 5,
'tp': 199},
'threshold': 4.076220397836621},
{'accuracy': 0.28732394366197184,
'fscore': 0.329579330904273,
'fscore_beta': 0.5,
'precision': 0.2822695035460993,
'recall': 1.0,
'table': {'fn': 0,
'fp': 506,
'tn': 5,
'tp': 199},
'threshold': 4.978706836786395},
{'accuracy': 0.3112676056338028,
'fscore': 0.3332188251459979,
'fscore_beta': 0.5,
'precision': 0.2861356932153392,
'recall': 0.9748743718592965,
'table': {'fn': 5,
'fp': 484,
'tn': 27,
'tp': 194},
'threshold': 6.0810062625217975},
{'accuracy': 0.3211267605633803,
'fscore': 0.3356521739130435,
'fscore_beta': 0.5,
'precision': 0.2884902840059791,
'recall': 0.9698492462311558,
'table': {'fn': 6,
'fp': 476,
'tn': 35,
'tp': 193},
'threshold': 7.427357821433388},
{'accuracy': 0.33098591549295775,
'fscore': 0.3332139018273021,
'fscore_beta': 0.5,
'precision': 0.28703703703703703,
'recall': 0.9346733668341709,
'table': {'fn': 13,
'fp': 462,
'tn': 49,
'tp': 186},
'threshold': 9.071795328941251},
{'accuracy': 0.3352112676056338,
'fscore': 0.33297140788997465,
'fscore_beta': 0.5,
'precision': 0.2870514820592824,
'recall': 0.9246231155778895,
'table': {'fn': 15,
'fp': 457,
'tn': 54,
'tp': 184},
'threshold': 11.080315836233387},
{'accuracy': 0.44366197183098594,
'fscore': 0.35915188230203376,
'fscore_beta': 0.5,
'precision': 0.3143939393939394,
'recall': 0.8341708542713567,
'table': {'fn': 33,
'fp': 362,
'tn': 149,
'tp': 166},
'threshold': 13.53352832366127},
{'accuracy': 0.49577464788732395,
'fscore': 0.3782505910165485,
'fscore_beta': 0.5,
'precision': 0.33402922755741127,
'recall': 0.8040201005025126,
'table': {'fn': 39,
'fp': 319,
'tn': 192,
'tp': 160},
'threshold': 16.529888822158654},
{'accuracy': 0.49859154929577465,
'fscore': 0.378751786565031,
'fscore_beta': 0.5,
'precision': 0.33473684210526317,
'recall': 0.7989949748743719,
'table': {'fn': 40,
'fp': 316,
'tn': 195,
'tp': 159},
'threshold': 20.189651799465537},
{'accuracy': 0.6901408450704225,
'fscore': 0.4759898904802022,
'fscore_beta': 0.5,
'precision': 0.4574898785425101,
'recall': 0.5678391959798995,
'table': {'fn': 86,
'fp': 134,
'tn': 377,
'tp': 113},
'threshold': 24.659696394160648},
{'accuracy': 0.6873239436619718,
'fscore': 0.47073791348600513,
'fscore_beta': 0.5,
'precision': 0.4530612244897959,
'recall': 0.5577889447236181,
'table': {'fn': 88,
'fp': 134,
'tn': 377,
'tp': 111},
'threshold': 30.119421191220212},
{'accuracy': 0.7211267605633803,
'fscore': 0.4857142857142857,
'fscore_beta': 0.5,
'precision': 0.5029585798816568,
'recall': 0.4271356783919598,
'table': {'fn': 114,
'fp': 84,
'tn': 427,
'tp': 85},
'threshold': 36.787944117144235},
{'accuracy': 0.7225352112676057,
'fscore': 0.48794489092996546,
'fscore_beta': 0.5,
'precision': 0.5059523809523809,
'recall': 0.4271356783919598,
'table': {'fn': 114,
'fp': 83,
'tn': 428,
'tp': 85},
'threshold': 44.932896411722155},
{'accuracy': 0.7352112676056338,
'fscore': 0.49391069012178607,
'fscore_beta': 0.5,
'precision': 0.5407407407407407,
'recall': 0.36683417085427134,
'table': {'fn': 126,
'fp': 62,
'tn': 449,
'tp': 73},
'threshold': 54.88116360940264},
{'accuracy': 0.752112676056338,
'fscore': 0.5078809106830122,
'fscore_beta': 0.5,
'precision': 0.6236559139784946,
'recall': 0.2914572864321608,
'table': {'fn': 141,
'fp': 35,
'tn': 476,
'tp': 58},
'threshold': 67.03200460356393},
{'accuracy': 0.7380281690140845,
'fscore': 0.31772575250836116,
'fscore_beta': 0.5,
'precision': 0.76,
'recall': 0.09547738693467336,
'table': {'fn': 180,
'fp': 6,
'tn': 505,
'tp': 19},
'threshold': 81.87307530779819}]},
(4, 0): {'bins': [(5.9886024280399663, 61, 0.06557377049180328),
(33.302691296690412, 63, 0.3492063492063492),
(60.642511644087314, 55, 0.5272727272727272),
(80.262875351936714, 50, 0.54),
(88.557382239521388, 211, 0.9241706161137441)],
'count': 630,
'max_fscore': {'accuracy': 0.8428571428571429,
'fscore': 0.8980582524271845,
'fscore_beta': 0.5,
'precision': 0.9024390243902439,
'recall': 0.8809523809523809,
'table': {'fn': 55, 'fp': 44, 'tn': 124, 'tp': 407},
'threshold': 70.57991513437058},
'overall': 70.57991513437058,
'precision_recall': [{'accuracy': 0.7507936507936508,
'fscore': 0.7862491490810075,
'fscore_beta': 0.5,
'precision': 0.7463651050080775,
'recall': 1.0,
'table': {'fn': 0,
'fp': 157,
'tn': 11,
'tp': 462},
'threshold': 2.7323722447292558},
{'accuracy': 0.7507936507936508,
'fscore': 0.7862491490810075,
'fscore_beta': 0.5,
'precision': 0.7463651050080775,
'recall': 1.0,
'table': {'fn': 0,
'fp': 157,
'tn': 11,
'tp': 462},
'threshold': 3.337326996032608},
{'accuracy': 0.7682539682539683,
'fscore': 0.7991660875608062,
'fscore_beta': 0.5,
'precision': 0.7615894039735099,
'recall': 0.9956709956709957,
'table': {'fn': 2,
'fp': 144,
'tn': 24,
'tp': 460},
'threshold': 4.076220397836621},
{'accuracy': 0.7682539682539683,
'fscore': 0.7991660875608062,
'fscore_beta': 0.5,
'precision': 0.7615894039735099,
'recall': 0.9956709956709957,
'table': {'fn': 2,
'fp': 144,
'tn': 24,
'tp': 460},
'threshold': 4.978706836786395},
{'accuracy': 0.7904761904761904,
'fscore': 0.8150248051027641,
'fscore_beta': 0.5,
'precision': 0.7796610169491526,
'recall': 0.9956709956709957,
'table': {'fn': 2,
'fp': 130,
'tn': 38,
'tp': 460},
'threshold': 6.0810062625217975},
{'accuracy': 0.792063492063492,
'fscore': 0.816181689141235,
'fscore_beta': 0.5,
'precision': 0.7809847198641766,
'recall': 0.9956709956709957,
'table': {'fn': 2,
'fp': 129,
'tn': 39,
'tp': 460},
'threshold': 7.427357821433388},
{'accuracy': 0.792063492063492,
'fscore': 0.816181689141235,
'fscore_beta': 0.5,
'precision': 0.7809847198641766,
'recall': 0.9956709956709957,
'table': {'fn': 2,
'fp': 129,
'tn': 39,
'tp': 460},
'threshold': 9.071795328941251},
{'accuracy': 0.8,
'fscore': 0.8220157255182273,
'fscore_beta': 0.5,
'precision': 0.7876712328767124,
'recall': 0.9956709956709957,
'table': {'fn': 2,
'fp': 124,
'tn': 44,
'tp': 460},
'threshold': 11.080315836233387},
{'accuracy': 0.7984126984126985,
'fscore': 0.8214030064423765,
'fscore_beta': 0.5,
'precision': 0.7873070325900514,
'recall': 0.9935064935064936,
'table': {'fn': 3,
'fp': 124,
'tn': 44,
'tp': 459},
'threshold': 13.53352832366127},
{'accuracy': 0.8063492063492064,
'fscore': 0.8273251622206199,
'fscore_beta': 0.5,
'precision': 0.7941176470588235,
'recall': 0.9935064935064936,
'table': {'fn': 3,
'fp': 119,
'tn': 49,
'tp': 459},
'threshold': 16.529888822158654},
{'accuracy': 0.8174603174603174,
'fscore': 0.8363769174579985,
'fscore_beta': 0.5,
'precision': 0.804920913884007,
'recall': 0.9913419913419913,
'table': {'fn': 4,
'fp': 111,
'tn': 57,
'tp': 458},
'threshold': 20.189651799465537},
{'accuracy': 0.8253968253968254,
'fscore': 0.8444691907943579,
'fscore_beta': 0.5,
'precision': 0.8154121863799283,
'recall': 0.9848484848484849,
'table': {'fn': 7,
'fp': 103,
'tn': 65,
'tp': 455},
'threshold': 24.659696394160648},
{'accuracy': 0.8301587301587302,
'fscore': 0.8509433962264152,
'fscore_beta': 0.5,
'precision': 0.8244972577696527,
'recall': 0.9761904761904762,
'table': {'fn': 11,
'fp': 96,
'tn': 72,
'tp': 451},
'threshold': 30.119421191220212},
{'accuracy': 0.8507936507936508,
'fscore': 0.8756949960285942,
'fscore_beta': 0.5,
'precision': 0.857976653696498,
'recall': 0.9545454545454546,
'table': {'fn': 21,
'fp': 73,
'tn': 95,
'tp': 441},
'threshold': 36.787944117144235},
{'accuracy': 0.8523809523809524,
'fscore': 0.8870967741935485,
'fscore_beta': 0.5,
'precision': 0.8773006134969326,
'recall': 0.9285714285714286,
'table': {'fn': 33,
'fp': 60,
'tn': 108,
'tp': 429},
'threshold': 44.932896411722155},
{'accuracy': 0.8523809523809524,
'fscore': 0.8888426311407162,
'fscore_beta': 0.5,
'precision': 0.8804123711340206,
'recall': 0.9242424242424242,
'table': {'fn': 35,
'fp': 58,
'tn': 110,
'tp': 427},
'threshold': 54.88116360940264},
{'accuracy': 0.846031746031746,
'fscore': 0.892703862660944,
'fscore_beta': 0.5,
'precision': 0.8907922912205567,
'recall': 0.9004329004329005,
'table': {'fn': 46,
'fp': 51,
'tn': 117,
'tp': 416},
'threshold': 67.03200460356393},
{'accuracy': 0.8444444444444444,
'fscore': 0.9228187919463087,
'fscore_beta': 0.5,
'precision': 0.9482758620689655,
'recall': 0.8333333333333334,
'table': {'fn': 77,
'fp': 21,
'tn': 147,
'tp': 385},
'threshold': 81.87307530779819}]},
(4, 1): {'bins': [(68.594531225947023, 30, 0.8666666666666667)],
'count': 462,
'max_fscore': {'accuracy': 0.9155844155844156,
'fscore': 0.9815078236130867,
'fscore_beta': 0.5,
'precision': 1.0,
'recall': 0.9139072847682119,
'table': {'fn': 39, 'fp': 0, 'tn': 9, 'tp': 414},
'threshold': 98.59719438877755},
'overall': 98.59719438877755,
'precision_recall': [{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 2.7323722447292558},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 3.337326996032608},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 4.076220397836621},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 4.978706836786395},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 6.0810062625217975},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 7.427357821433388},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 9.071795328941251},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 11.080315836233387},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 13.53352832366127},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 16.529888822158654},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 20.189651799465537},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 24.659696394160648},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 30.119421191220212},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 36.787944117144235},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 44.932896411722155},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 54.88116360940264},
{'accuracy': 0.9805194805194806,
'fscore': 0.984354628422425,
'fscore_beta': 0.5,
'precision': 0.9805194805194806,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 0,
'tp': 453},
'threshold': 67.03200460356393},
{'accuracy': 0.9588744588744589,
'fscore': 0.9834599910594546,
'fscore_beta': 0.5,
'precision': 0.9865470852017937,
'recall': 0.9713024282560706,
'table': {'fn': 13,
'fp': 6,
'tn': 3,
'tp': 440},
'threshold': 81.87307530779819}]},
(5, 0): {'bins': [(6.9370667821626751e-15, 10, 0.0),
(4.8769148139900132e-14, 19, 0.0),
(5.9736511210626162, 13, 0.0),
(16.673734882812528, 13, 0.3076923076923077),
(28.262158296512474, 16, 0.1875),
(41.674628730879022, 52, 0.5384615384615384)],
'count': 147,
'max_fscore': {'accuracy': 0.673469387755102,
'fscore': 0.5128205128205128,
'fscore_beta': 0.5,
'precision': 0.4675324675324675,
'recall': 0.8372093023255814,
'table': {'fn': 7, 'fp': 41, 'tn': 63, 'tp': 36},
'threshold': 36.01190476190476},
'overall': 36.01190476190476,
'precision_recall': [{'accuracy': 0.4897959183673469,
'fscore': 0.41747572815533973,
'fscore_beta': 0.5,
'precision': 0.3644067796610169,
'recall': 1.0,
'table': {'fn': 0,
'fp': 75,
'tn': 29,
'tp': 43},
'threshold': 2.7323722447292558},
{'accuracy': 0.5034013605442177,
'fscore': 0.4240631163708087,
'fscore_beta': 0.5,
'precision': 0.3706896551724138,
'recall': 1.0,
'table': {'fn': 0,
'fp': 73,
'tn': 31,
'tp': 43},
'threshold': 3.337326996032608},
{'accuracy': 0.5034013605442177,
'fscore': 0.4240631163708087,
'fscore_beta': 0.5,
'precision': 0.3706896551724138,
'recall': 1.0,
'table': {'fn': 0,
'fp': 73,
'tn': 31,
'tp': 43},
'threshold': 4.076220397836621},
{'accuracy': 0.5238095238095238,
'fscore': 0.43434343434343425,
'fscore_beta': 0.5,
'precision': 0.3805309734513274,
'recall': 1.0,
'table': {'fn': 0,
'fp': 70,
'tn': 34,
'tp': 43},
'threshold': 4.978706836786395},
{'accuracy': 0.5374149659863946,
'fscore': 0.4414784394250513,
'fscore_beta': 0.5,
'precision': 0.38738738738738737,
'recall': 1.0,
'table': {'fn': 0,
'fp': 68,
'tn': 36,
'tp': 43},
'threshold': 6.0810062625217975},
{'accuracy': 0.5374149659863946,
'fscore': 0.4414784394250513,
'fscore_beta': 0.5,
'precision': 0.38738738738738737,
'recall': 1.0,
'table': {'fn': 0,
'fp': 68,
'tn': 36,
'tp': 43},
'threshold': 7.427357821433388},
{'accuracy': 0.54421768707483,
'fscore': 0.4451345755693582,
'fscore_beta': 0.5,
'precision': 0.39090909090909093,
'recall': 1.0,
'table': {'fn': 0,
'fp': 67,
'tn': 37,
'tp': 43},
'threshold': 9.071795328941251},
{'accuracy': 0.564625850340136,
'fscore': 0.4564755838641189,
'fscore_beta': 0.5,
'precision': 0.40186915887850466,
'recall': 1.0,
'table': {'fn': 0,
'fp': 64,
'tn': 40,
'tp': 43},
'threshold': 11.080315836233387},
{'accuracy': 0.564625850340136,
'fscore': 0.4564755838641189,
'fscore_beta': 0.5,
'precision': 0.40186915887850466,
'recall': 1.0,
'table': {'fn': 0,
'fp': 64,
'tn': 40,
'tp': 43},
'threshold': 13.53352832366127},
{'accuracy': 0.5782312925170068,
'fscore': 0.46436285097192215,
'fscore_beta': 0.5,
'precision': 0.4095238095238095,
'recall': 1.0,
'table': {'fn': 0,
'fp': 62,
'tn': 42,
'tp': 43},
'threshold': 16.529888822158654},
{'accuracy': 0.5918367346938775,
'fscore': 0.4609929078014184,
'fscore_beta': 0.5,
'precision': 0.4105263157894737,
'recall': 0.9069767441860465,
'table': {'fn': 4,
'fp': 56,
'tn': 48,
'tp': 39},
'threshold': 20.189651799465537},
{'accuracy': 0.5918367346938775,
'fscore': 0.4609929078014184,
'fscore_beta': 0.5,
'precision': 0.4105263157894737,
'recall': 0.9069767441860465,
'table': {'fn': 4,
'fp': 56,
'tn': 48,
'tp': 39},
'threshold': 24.659696394160648},
{'accuracy': 0.6462585034013606,
'fscore': 0.4904632152588556,
'fscore_beta': 0.5,
'precision': 0.4444444444444444,
'recall': 0.8372093023255814,
'table': {'fn': 7,
'fp': 45,
'tn': 59,
'tp': 36},
'threshold': 30.119421191220212},
{'accuracy': 0.673469387755102,
'fscore': 0.5128205128205128,
'fscore_beta': 0.5,
'precision': 0.4675324675324675,
'recall': 0.8372093023255814,
'table': {'fn': 7,
'fp': 41,
'tn': 63,
'tp': 36},
'threshold': 36.787944117144235},
{'accuracy': 0.6462585034013606,
'fscore': 0.2797202797202797,
'fscore_beta': 0.5,
'precision': 0.32,
'recall': 0.18604651162790697,
'table': {'fn': 35,
'fp': 17,
'tn': 87,
'tp': 8},
'threshold': 44.932896411722155},
{'accuracy': 0.6530612244897959,
'fscore': 0.2877697841726619,
'fscore_beta': 0.5,
'precision': 0.3333333333333333,
'recall': 0.18604651162790697,
'table': {'fn': 35,
'fp': 16,
'tn': 88,
'tp': 8},
'threshold': 54.88116360940264},
{'accuracy': 0.6870748299319728,
'fscore': 0.07936507936507936,
'fscore_beta': 0.5,
'precision': 0.2,
'recall': 0.023255813953488372,
'table': {'fn': 42,
'fp': 4,
'tn': 100,
'tp': 1},
'threshold': 67.03200460356393}]},
(5, 1): {'bins': [(21.904560321371815, 4, 0.0)],
'count': 43,
'max_fscore': {'accuracy': 0.6511627906976745,
'fscore': 0.7467532467532468,
'fscore_beta': 0.5,
'precision': 0.7419354838709677,
'recall': 0.7666666666666667,
'table': {'fn': 7, 'fp': 8, 'tn': 5, 'tp': 23},
'threshold': 52.892561983471076},
'overall': 52.892561983471076,
'precision_recall': [{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 2.7323722447292558},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 3.337326996032608},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 4.076220397836621},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 4.978706836786395},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 6.0810062625217975},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 7.427357821433388},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 9.071795328941251},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 11.080315836233387},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 13.53352832366127},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 16.529888822158654},
{'accuracy': 0.6976744186046512,
'fscore': 0.7425742574257426,
'fscore_beta': 0.5,
'precision': 0.6976744186046512,
'recall': 1.0,
'table': {'fn': 0,
'fp': 13,
'tn': 0,
'tp': 30},
'threshold': 20.189651799465537},
{'accuracy': 0.7674418604651163,
'fscore': 0.7894736842105263,
'fscore_beta': 0.5,
'precision': 0.75,
'recall': 1.0,
'table': {'fn': 0,
'fp': 10,
'tn': 3,
'tp': 30},
'threshold': 24.659696394160648},
{'accuracy': 0.7906976744186046,
'fscore': 0.8064516129032259,
'fscore_beta': 0.5,
'precision': 0.7692307692307693,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 4,
'tp': 30},
'threshold': 30.119421191220212},
{'accuracy': 0.7906976744186046,
'fscore': 0.8064516129032259,
'fscore_beta': 0.5,
'precision': 0.7692307692307693,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 4,
'tp': 30},
'threshold': 36.787944117144235},
{'accuracy': 0.7906976744186046,
'fscore': 0.8064516129032259,
'fscore_beta': 0.5,
'precision': 0.7692307692307693,
'recall': 1.0,
'table': {'fn': 0,
'fp': 9,
'tn': 4,
'tp': 30},
'threshold': 44.932896411722155},
{'accuracy': 0.6511627906976745,
'fscore': 0.7467532467532468,
'fscore_beta': 0.5,
'precision': 0.7419354838709677,
'recall': 0.7666666666666667,
'table': {'fn': 7,
'fp': 8,
'tn': 5,
'tp': 23},
'threshold': 54.88116360940264},
{'accuracy': 0.6511627906976745,
'fscore': 0.7467532467532468,
'fscore_beta': 0.5,
'precision': 0.7419354838709677,
'recall': 0.7666666666666667,
'table': {'fn': 7,
'fp': 8,
'tn': 5,
'tp': 23},
'threshold': 67.03200460356393},
{'accuracy': 0.6511627906976745,
'fscore': 0.7467532467532468,
'fscore_beta': 0.5,
'precision': 0.7419354838709677,
'recall': 0.7666666666666667,
'table': {'fn': 7,
'fp': 8,
'tn': 5,
'tp': 23},
'threshold': 81.87307530779819}]},
(6, 1): {'bins': [],
'count': 42,
'max_fscore': {'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3, 'fp': 15, 'tn': 7, 'tp': 17},
'threshold': 48.648648648648646},
'overall': 48.648648648648646,
'precision_recall': [{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 2.7323722447292558},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 3.337326996032608},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 4.076220397836621},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 4.978706836786395},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 6.0810062625217975},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 7.427357821433388},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 9.071795328941251},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 11.080315836233387},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 13.53352832366127},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 16.529888822158654},
{'accuracy': 0.47619047619047616,
'fscore': 0.5319148936170213,
'fscore_beta': 0.5,
'precision': 0.47619047619047616,
'recall': 1.0,
'table': {'fn': 0,
'fp': 22,
'tn': 0,
'tp': 20},
'threshold': 20.189651799465537},
{'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3,
'fp': 15,
'tn': 7,
'tp': 17},
'threshold': 24.659696394160648},
{'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3,
'fp': 15,
'tn': 7,
'tp': 17},
'threshold': 30.119421191220212},
{'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3,
'fp': 15,
'tn': 7,
'tp': 17},
'threshold': 36.787944117144235},
{'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3,
'fp': 15,
'tn': 7,
'tp': 17},
'threshold': 44.932896411722155},
{'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3,
'fp': 15,
'tn': 7,
'tp': 17},
'threshold': 54.88116360940264},
{'accuracy': 0.5714285714285714,
'fscore': 0.5743243243243243,
'fscore_beta': 0.5,
'precision': 0.53125,
'recall': 0.85,
'table': {'fn': 3,
'fp': 15,
'tn': 7,
'tp': 17},
'threshold': 67.03200460356393}]},
(7, 0): {'bins': [(1.7008371888318106e-16, 26, 0.0),
(2.3294074542696538e-15, 36, 0.0)],
'count': 122,
'max_fscore': {'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3, 'fp': 28, 'tn': 79, 'tp': 12},
'threshold': 14.953271028037383},
'overall': 14.953271028037383,
'precision_recall': [{'accuracy': 0.6311475409836066,
'fscore': 0.29411764705882354,
'fscore_beta': 0.5,
'precision': 0.25,
'recall': 1.0,
'table': {'fn': 0,
'fp': 45,
'tn': 62,
'tp': 15},
'threshold': 2.7323722447292558},
{'accuracy': 0.6311475409836066,
'fscore': 0.29411764705882354,
'fscore_beta': 0.5,
'precision': 0.25,
'recall': 1.0,
'table': {'fn': 0,
'fp': 45,
'tn': 62,
'tp': 15},
'threshold': 3.337326996032608},
{'accuracy': 0.6311475409836066,
'fscore': 0.29411764705882354,
'fscore_beta': 0.5,
'precision': 0.25,
'recall': 1.0,
'table': {'fn': 0,
'fp': 45,
'tn': 62,
'tp': 15},
'threshold': 4.076220397836621},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 4.978706836786395},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 6.0810062625217975},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 7.427357821433388},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 9.071795328941251},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 11.080315836233387},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 13.53352832366127},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 16.529888822158654},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 20.189651799465537},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 24.659696394160648},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 30.119421191220212},
{'accuracy': 0.7459016393442623,
'fscore': 0.34285714285714286,
'fscore_beta': 0.5,
'precision': 0.3,
'recall': 0.8,
'table': {'fn': 3,
'fp': 28,
'tn': 79,
'tp': 12},
'threshold': 36.787944117144235}]}}
|
UTF-8
|
Python
| false | false | 2,013 |
10,041,633,549,446 |
cec007980fd4d4552cb19bf811e7f3bb212f2952
|
0e6ed2d91475b653fa18e664efc38d036e257296
|
/text/font-text-1.py
|
59d2faf7afa0f8929ee6647e522ed051a49c361a
|
[] |
no_license
|
roboDocs/drawBotExamples
|
https://github.com/roboDocs/drawBotExamples
|
69d13cabe1b663070db11441bcf27dd11d770568
|
c3bed16ccd38110f83d508914b14413dd971dfbd
|
refs/heads/master
| 2021-01-18T18:25:17.722745 | 2014-12-10T03:05:48 | 2014-12-10T03:05:48 | 27,799,784 | 6 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# a simple font / text example
size(220, 170)
x, y = 30, 110
fill(1, 0, 0)
font("Verdana", 24)
text("hello world", (x, y))
y -= 40
fill(0, 1, 0)
font("Verdana Bold Italic", 24)
text(u"olá mundo", (x, y))
y -= 40
fill(0, 0, 1)
font("Verdana Italic", 24)
text("hola mundo", (x, y))
|
UTF-8
|
Python
| false | false | 2,014 |
5,789,615,932,166 |
f5138b0927a6a505a2e40e5a0fb08d4283483e53
|
c50db5dc8997d15e63ae1a1d308111fbca79d0e5
|
/uliweb/contrib/orm/install.py
|
38ac1711b9ce3801af638015eef0d881f2ddfb18
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
arikachen/uliweb
|
https://github.com/arikachen/uliweb
|
509aef8865cba511837ffe8b9de6ac347ad6aa5e
|
d3fd01edac00d2404bbe182e7f58d11d8bf1d253
|
refs/heads/master
| 2021-05-28T00:34:29.735204 | 2013-11-07T02:27:12 | 2013-11-07T02:27:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from uliweb.utils.common import install
install(['SQLAlchemy>=0.6', 'pytz'])
|
UTF-8
|
Python
| false | false | 2,013 |
19,335,942,801,734 |
e462b2236719cd2b09264b6c699e9880422112ab
|
609eb72e6f9fefe18ebe806c2aed24bb5b0562c1
|
/apps/invoices/admin.py
|
135a33fa87396b8140fe5f9b7e7a3c60b3a105c2
|
[
"MIT"
] |
permissive
|
PocketGM/django-htk
|
https://github.com/PocketGM/django-htk
|
68b0f780e9f748932e857bf66f3e0ffdf9fb2fa2
|
371ce2c68bc825df174e11d0f6f4c489a8184d9f
|
refs/heads/master
| 2020-12-27T15:26:31.946007 | 2014-12-12T10:45:45 | 2014-12-12T10:45:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from htk.utils import htk_setting
from htk.utils.general import resolve_model_dynamically
InvoiceModel = resolve_model_dynamically(htk_setting('HTK_INVOICE_MODEL'))
InvoiceLineItemModel = resolve_model_dynamically(htk_setting('HTK_INVOICE_LINE_ITEM_MODEL'))
class InvoiceLineItemInline(admin.TabularInline):
model = InvoiceLineItemModel
extra = 0
can_delete = True
class InvoiceAdmin(admin.ModelAdmin):
list_display = (
'id',
'customer',
'notes',
'total',
'date',
'paid',
'invoice_type',
'payment_terms',
'view_invoice_link',
)
inlines = (
InvoiceLineItemInline,
)
def total(self, obj):
value = '$%s' % obj.get_total()
return value
def view_invoice_link(self, obj):
value = u'<a href="%s" target="_blank">View Invoice</a>' % obj.get_url()
return value
view_invoice_link.allow_tags = True
view_invoice_link.short_description = 'View Invoice'
#admin.site.register(InvoiceModel, InvoiceAdmin)
|
UTF-8
|
Python
| false | false | 2,014 |
12,171,937,338,254 |
b3a58000ae1672dc5cccd9efc9d405da144981eb
|
19ee3cf77c85a4454e5ffde5c9cd626a6201329d
|
/test_project/test_runner.py
|
24e7c9150372012147e0fc6bf8ee80296b7115d2
|
[
"BSD-3-Clause"
] |
permissive
|
civiccc/django-pindb
|
https://github.com/civiccc/django-pindb
|
6fde797f694f8904c93d6354262f50fe0508aa37
|
3807ce1d8e60eeef58f0a506c0cfec8520c48527
|
refs/heads/master
| 2021-05-26T20:11:25.804441 | 2012-09-19T22:56:23 | 2012-09-19T22:56:23 | 7,551,675 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.test.simple import DjangoTestSuiteRunner
class PinDbTestSuiteRunner(DjangoTestSuiteRunner):
# Hacking out db creation here so the test cases
# can handle it themselves.
def setup_databases(self, **kwargs):
pass
def teardown_databases(self, old_config, **kwargs):
pass
|
UTF-8
|
Python
| false | false | 2,012 |
2,748,779,083,204 |
18c8218885396f9ef5910f8bde1b02b1798e2e73
|
3bea21645566abb2d77c6d760994d1dcdf921b36
|
/background.py
|
1330a9176dd02badcd79a6fa51e46fad18a14054
|
[
"GPL-3.0-only",
"GPL-1.0-or-later"
] |
non_permissive
|
darabos/pyweek-nemesis
|
https://github.com/darabos/pyweek-nemesis
|
18f23d4755dd94d68595423d534e2f66fd6026a8
|
645d052c355b3a7e66e825e08d1c016c1d58ac08
|
refs/heads/master
| 2021-01-18T20:21:34.393912 | 2013-04-21T15:04:04 | 2013-04-21T15:04:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import OpenGL
import math
from OpenGL.GL import *
import assets
class BackGround(object):
def __init__(self, x, y, color):
self.x = x
self.y = y
self.color = color
def Draw(self, time, second_pass):
glUseProgram(assets.BACKGROUND_PROGRAM)
glActiveTexture(GL_TEXTURE0)
glBindTexture(GL_TEXTURE_1D, assets.WAVE_TEXTURE)
location = glGetUniformLocation(assets.BACKGROUND_PROGRAM, 'tex')
glUniform1i(location, 0)
location = glGetUniformLocation(assets.BACKGROUND_PROGRAM, 'offset')
glUniform1f(location, time%100)
location = glGetUniformLocation(assets.BACKGROUND_PROGRAM, 'color')
glUniform4f(location, 0., 0.3, 0.75, 0.7)
if second_pass:
glDepthFunc(GL_LESS)
glEnable(GL_BLEND)
else:
glDisable(GL_DEPTH_TEST)
glDisable(GL_BLEND)
glBegin(GL_QUADS)
glVertex(self.x[0], self.y[1], -0.01)
glVertex(self.x[0], self.y[0], -0.01)
glVertex(self.x[1], self.y[0], -0.01)
glVertex(self.x[1], self.y[1], -0.01)
glEnd()
if second_pass:
glDepthFunc(GL_ALWAYS)
else:
glEnable(GL_DEPTH_TEST)
glUseProgram(0)
glBindTexture(GL_TEXTURE_1D,0)
|
UTF-8
|
Python
| false | false | 2,013 |
7,980,049,252,842 |
ec5c7f5154275874ec8ad1777a1f7c3efac41831
|
0b9802d039ffee38fd666659719034cf7e42c04b
|
/faker/providers/fr_FR/Person.py
|
5602c986c4adfba95315bb1a2cc7ddfe4dcc8896
|
[
"MIT"
] |
permissive
|
SysGrove/faker
|
https://github.com/SysGrove/faker
|
e1f633f3231ee94fdb82a15518ec8ecf899c5385
|
61c1aa3eeece341c0984e95cd3128bcdf3797a78
|
refs/heads/master
| 2021-01-16T18:42:56.837210 | 2013-07-23T14:46:09 | 2013-07-23T14:46:09 | 11,422,041 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
from ..Person import Provider as PersonProvider
class Provider(PersonProvider):
formats = (
'{{firstName}} {{lastName}}',
'{{firstName}} {{lastName}}',
'{{firstName}} {{lastName}}',
'{{firstName}} {{lastName}}',
'{{firstName}} {{lastName}}',
'{{firstName}} {{lastName}}',
'{{firstName}} {{lastName}}',
'{{firstName}} {{prefix}} {{lastName}}',
'{{firstName}} {{lastName}}-{{lastName}}',
'{{firstName}}-{{firstName}} {{lastName}}',
)
firstNames = (
'Adrien','Aimé','Alain','Alexandre','Alfred','Alphonse','André','Antoine','Arthur','Auguste','Augustin','Benjamin','Benoît','Bernard','Bertrand','Charles','Christophe','Daniel','David','Denis','Édouard','Émile','Emmanuel','Éric','Étienne','Eugène','François','Franck','Frédéric','Gabriel','Georges','Gérard','Gilbert','Gilles','Grégoire','Guillaume','Guy','William','Henri','Honoré','Hugues','Isaac','Jacques','Jean','Jérôme','Joseph','Jules','Julien','Laurent','Léon','Louis','Luc','Lucas','Marc','Marcel','Martin','Matthieu','Maurice','Michel','Nicolas','Noël','Olivier','Patrick','Paul','Philippe','Pierre','Raymond','Rémy','René','Richard','Robert','Roger','Roland','Sébastien','Stéphane','Théodore','Théophile','Thibaut','Thibault','Thierry','Thomas','Timothée','Tristan','Victor','Vincent','Xavier','Yves','Zacharie',
'Adélaïde','Adèle','Adrienne','Agathe','Agnès','Aimée','Alexandrie','Alix','Alexandria','Alex','Alice','Amélie','Anaïs','Anastasie','Andrée','Anne','Anouk','Antoinette','Arnaude','Astrid','Audrey','Aurélie','Aurore','Bernadette','Brigitte','Capucine','Caroline','Catherine','Cécile','Céline','Célina','Chantal','Charlotte','Christelle','Christiane','Christine','Claire','Claudine','Clémence','Colette','Constance','Corinne','Danielle','Denise','Diane','Dorothée','Édith','Éléonore','Élisabeth','Élise','Élodie','Émilie','Emmanuelle','Françoise','Frédérique','Gabrielle','Geneviève','Hélène','Henriette','Hortense','Inès','Isabelle','Jacqueline','Jeanne','Jeannine','Joséphine','Josette','Julie','Juliette','Laetitia','Laure','Laurence','Lorraine','Louise','Luce','Lucie','Lucy','Madeleine','Manon','Marcelle','Margaux','Margaud','Margot','Marguerite','Margot','Margaret','Maggie','daisy','Marianne','Marie','Marine','Marthe','Martine','Maryse','Mathilde','Michèle','Michelle','Michelle','Monique','Nathalie','Nath','Nathalie','Nicole','Noémi','Océane','Odette','Olivie','Patricia','Paulette','Pauline','Pénélope','Philippine','Renée','Sabine','Simone','Sophie','Stéphanie','Susanne','Suzanne','Susan','Suzanne','Sylvie','Thérèse','Valentine','Valérie','Véronique','Victoire','Virginie','Zoé',
'Camille','Claude','Dominique'
)
lastNames = (
'Martin','Bernard','Thomas','Robert','Petit','Dubois','Richard','Garcia','Durand','Moreau','Lefebvre','Simon','Laurent','Michel','Leroy','Martinez','David','Fontaine','Da Silva','Morel','Fournier','Dupont','Bertrand','Lambert','Rousseau','Girard','Roux','Vincent','Lefevre','Boyer','Lopez','Bonnet','Andre','Francois','Mercier','Muller','Guerin','Legrand','Sanchez','Garnier','Chevalier','Faure','Perez','Clement','Fernandez','Blanc','Robin','Morin','Gauthier','Pereira','Perrin','Roussel','Henry','Duval','Gautier','Nicolas','Masson','Marie','Noel','Ferreira','Lemaire','Mathieu','Riviere','Denis','Marchand','Rodriguez','Dumont','Payet','Lucas','Dufour','Dos Santos','Joly','Blanchard','Meunier','Rodrigues','Caron','Gerard','Fernandes','Brunet','Meyer','Barbier','Leroux','Renard','Goncalves','Gaillard','Brun','Roy','Picard','Giraud','Roger','Schmitt','Colin','Arnaud','Vidal','Gonzalez','Lemoine','Roche','Aubert','Olivier','Leclercq','Pierre','Philippe','Bourgeois','Renaud','Martins','Leclerc','Guillaume','Lacroix','Lecomte','Benoit','Fabre','Carpentier','Vasseur','Louis','Hubert','Jean','Dumas','Rolland','Grondin','Rey','Huet','Gomez','Dupuis','Guillot','Berger','Moulin','Hoarau','Menard','Deschamps','Fleury','Adam','Boucher','Poirier','Bertin','Charles','Aubry','Da Costa','Royer','Dupuy','Maillard','Paris','Baron','Lopes','Guyot','Carre','Jacquet','Renault','Herve','Charpentier','Klein','Cousin','Collet','Leger','Ribeiro','Hernandez','Bailly','Schneider','Le Gall','Ruiz','Langlois','Bouvier','Gomes','Prevost','Julien','Lebrun','Breton','Germain','Millet','Boulanger','Remy','Le Roux','Daniel','Marques','Maillot','Leblanc','Le Goff','Barre','Perrot','Leveque','Marty','Benard','Monnier','Hamon','Pelletier','Alves','Etienne','Marchal','Poulain','Tessier','Lemaitre','Guichard','Besson','Mallet','Hoareau','Gillet','Weber','Jacob','Collin','Chevallier','Perrier','Michaud','Carlier','Delaunay','Chauvin','Alexandre','Marechal','Antoine','Lebon','Cordier','Lejeune','Bouchet','Pasquier','Legros','Delattre','Humbert','De Oliveira','Briand','Lamy','Launay','Gilbert','Perret','Lesage','Gay','Nguyen','Navarro','Besnard','Pichon','Hebert','Cohen','Pons','Lebreton','Sauvage','De Sousa','Pineau','Albert','Jacques','Pinto','Barthelemy','Turpin','Bigot','Lelievre','Georges','Reynaud','Ollivier','Martel','Voisin','Leduc','Guillet','Vallee','Coulon','Camus','Marin','Teixeira','Costa','Mahe','Didier','Charrier','Gaudin','Bodin','Guillou','Gregoire','Gros','Blanchet','Buisson','Blondel','Paul','Dijoux','Barbe','Hardy','Laine','Evrard','Laporte','Rossi','Joubert','Regnier','Tanguy','Gimenez','Allard','Devaux','Morvan','Levy','Dias','Courtois','Lenoir','Berthelot','Pascal','Vaillant','Guilbert','Thibault','Moreno','Duhamel','Colas','Masse','Baudry','Bruneau','Verdier','Delorme','Blin','Guillon','Mary','Coste','Pruvost','Maury','Allain','Valentin','Godard','Joseph','Brunel','Marion','Texier','Seguin','Raynaud','Bourdon','Raymond','Bonneau','Chauvet','Maurice','Legendre','Loiseau','Ferrand','Toussaint','Techer','Lombard','Lefort','Couturier','Bousquet','Diaz','Riou','Clerc','Weiss','Imbert','Jourdan','Delahaye','Gilles','Guibert','Begue','Descamps','Delmas','Peltier','Dupre','Chartier','Martineau','Laroche','Leconte','Maillet','Parent','Labbe','Potier','Bazin','Normand','Pottier','Torres','Lagarde','Blot','Jacquot','Lemonnier','Grenier','Rocher','Bonnin','Boutin','Fischer','Munoz','Neveu','Lacombe','Mendes','Delannoy','Auger','Wagner','Fouquet','Mace','Ramos','Pages','Petitjean','Chauveau','Foucher','Peron','Guyon','Gallet','Rousset','Traore','Bernier','Vallet','Letellier','Bouvet','Hamel','Chretien','Faivre','Boulay','Thierry','Samson','Ledoux','Salmon','Gosselin','Lecoq','Pires','Leleu','Becker','Diallo','Merle','Valette'
)
prefixes = ('de', 'de la', 'Le', 'du')
@classmethod
def prefix(cls):
return cls.randomElement( cls.prefixes )
|
UTF-8
|
Python
| false | false | 2,013 |
5,480,378,316,506 |
5693183d27c9c3da79ce30ea98cfc44899e6bf69
|
47fa74f40985b2256969ff25c6469712541306e1
|
/UDPserver.py
|
c942d675a16339faf5e11ac1e002357eef78d7e7
|
[
"GPL-2.0-only"
] |
non_permissive
|
espenbo/Loxone-UDPserver
|
https://github.com/espenbo/Loxone-UDPserver
|
a5ceaf2b4db1560a5a29d28405b4a79296b9790a
|
bc378b9264907eb2c8f8d06d38a5a56b8597e878
|
refs/heads/master
| 2020-12-29T01:32:19.762441 | 2014-08-14T13:14:01 | 2014-08-14T13:14:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#/usr/bin/python
import socket
import os
import re
import xml.dom.minidom
#Script path + name
#print os.path.realpath(__file__)
#Script working Dir
#print os.getcwd()
#Define lists
functionlist = []
namelist = []
commandlist = []
#Open xml file with variables
doc = xml.dom.minidom.parse("commands.xml");
tags = doc.getElementsByTagName("tag")
for tag in tags:
#print tag.getAttribute("function")
functionlist.append(tag.getAttribute("function"))
#print tag.getAttribute("name")
namelist.append(tag.getAttribute("name"))
#print tag.getAttribute("command")
commandlist.append(tag.getAttribute("command"))
#remove double names from list
functionlist = list(set(functionlist))
#print functionlist
#print namelist
#print commandlist
#Set port for running server
UDP_IP = "127.0.0.1"
UDP_PORT = 5005
#Configure socket
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.bind((UDP_IP, UDP_PORT))
#run UDP server
while True:
data, addr = sock.recvfrom(1024) # buffer size is 1024 bytes
#print "received message:", data
data = re.sub("[^a-z0-9 ]+","", data, flags=re.IGNORECASE)
datalist = re.split('\s+', data)
for j, function in enumerate(functionlist):
#print "d is ", repr(function)
#print "data is ", repr(datalist[0])
if function == datalist[0]:
#print "running ", datalist[0], function
for i, name in enumerate(namelist):
if name == datalist[1]:
if len(datalist) == 3:
print "execute ", datalist[0], datalist[1], commandlist[i], datalist[2]
cmd = (commandlist[i] + " " + datalist[2])
#print str(cmd)
os.system(str(cmd))
break
else:
print "execute ", datalist[0], datalist[1], commandlist[i]
os.system(commandlist[i])
break
else:
#print "not found"
pass
else:
#print "not found"
pass
|
UTF-8
|
Python
| false | false | 2,014 |
1,039,382,092,428 |
00412b1570b83856f8ae7a8051286bcdc6ee3a61
|
c30f5f618cab47c46f00a995a59a761cf57de19d
|
/penguin/bin/penguin.py
|
407a04bc495f0a0bd778e8e53747e23959ae2fd9
|
[] |
no_license
|
yindashan/aquatic
|
https://github.com/yindashan/aquatic
|
fd8b7a3582b6414f9dd9e13ebb2519d9dc910c29
|
7745d6be9447f1dc47c88f64158d6944e8bc297e
|
refs/heads/master
| 2016-09-05T22:09:42.051712 | 2014-10-20T06:09:07 | 2014-10-20T06:09:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding:utf-8 -*-
###########################################################
# penguin
# 每个机房部署一套
# 1) 检查URL返回状态码
# 2) 检查URL响应时间
# 3) 检查URL返回内容
###########################################################
import urllib,urllib2
import xml.etree.ElementTree as ET
import socket
import httplib
import os
import sys
import urlparse
import time, random
import datetime
import json, logging
from threading import Thread, Event
import subprocess
from xml.dom import minidom
import Queue
# our own lib
from settings import NOCID_LIST, MONITOR_TYPE, HOST, PORT
from daemon import Daemon
from log_record import initlog
from common import QueueItem
from common import LoopTimer
from common import send_info
from async_task.tasks import app_monitor
# 存储对象的临时缓存,用字典主要是看重字典的覆盖功能比较方便
appconfig_dict = {}
# 应用配置信息类
class AppConfig(object):
def __init__(self, appname, url_list, server_list, host, port, timestamp):
# 应用名
self.appname = appname
# URL列表
self.url_list = url_list
# server列表
self.server_list = server_list
# 监控服务器host
self.host = host
# 监控服务器port
self.port = port
# 时间戳
self.timestamp = timestamp
def getAppConfig(appname):
"""
从临时缓存中根据应用名获取应用对象,如果不存在返回None
"""
if appconfig_dict.has_key(appname):
return appconfig_dict[appname]
else:
return None
def setAppConfig(appconfig):
"""
往监时缓存中添加对象,应用名称作为唯一标识,如果已经存在,则替换,如果不存在,直接添加
"""
appconfig_dict[appconfig.appname.strip()] = appconfig
class ThreadHttp(Thread):
"""http主动检查线程类"""
def __init__(self, project_path, queue):
Thread.__init__(self)
self.project_path = project_path
self.queue = queue
def run(self):
logger = logging.getLogger('monitor')
logger.debug(u'守护线程开始执行,当前线程名:%s', self.getName())
while True:
try:
#host = self.queue.get(1, 5) # get(self, block=True, timeout=None) ,1就是阻塞等待,5是超时5秒
# 调用队列对象的get()方法从队头删除并返回一个项目。可选参数为block,默认为True。如果队列为空且block为True,get()就使调用线程暂停,直至有项目可用。如果队列为空且block为False,队列将引发Empty异常。
# queueItem = self.queue.get(1, 5)
# 1) 取出任务
queueItem = self.queue.get()
logger.debug(u'当前执行线程:%s, 当前应用:%s, 当前Server:%s', self.getName(), queueItem.appname, queueItem.server)
# 2) 往redis中推送一个异步任务
app_monitor.apply_async((queueItem,), queue='_check_url_task')
logger.info(u'【获取任务】%s,推送至redis.', unicode(queueItem))
#signals to queue job is done
#self.queue.task_done() # 这行代码解除注释后队列里面的数据执行完后启动的线程会关闭,注释后启动的线程会一直监听
except Exception, e:
logger.error(u'执行队列中当前元素出错:%s,当前执行线程:%s, 当前应用:%s, 当前Server:%s', e, self.getName(), queueItem.appname, queueItem.server)
#break
def parseXmlData(project_path, timestamp, xml_data):
try:
root = ET.fromstring(xml_data)
# 应用名称
appname_tmp = root.find('appname').text.strip()
url_list = []
urllist_node = root.find('urlList')
server_list = []
serverlist_node = root.find('serverList')
# 监控服务器地址
monitor_node = root.find('monitor')
host = monitor_node.find('host').text.strip()
port = monitor_node.find('port').text.strip()
for node in serverlist_node.findall('server'):
server_list.append(node.text.strip())
for urlnode in urllist_node.findall('url'):
item = {}
url_id = urlnode.find('url_id').text.strip()
url_value = urlnode.find('url_value').text.strip()
item['url_id'] = url_id
item['url_value'] = url_value
responsetime = urlnode.find('responsetime')
if responsetime is not None:
responsetime = responsetime.text.strip()
item['responsetime'] = responsetime
type_tmp = urlnode.find('type')
target = urlnode.find('target')
value = urlnode.find('value')
if type_tmp is not None and target is not None and value is not None:
type_tmp = type_tmp.text.strip()
target = target.text.strip()
value = value.text.strip()
item['type'] = type_tmp
item['target'] = target
item['value'] = value
url_list.append(item)
# 生成XML文件
createXml(project_path, timestamp, appname_tmp, server_list, url_list)
# 创建对象,并把创建好的对象放到临时缓存区
appconfig = AppConfig(appname = appname_tmp, url_list = url_list, server_list = server_list, host = host, port = port, timestamp = timestamp)
return appconfig
except Exception, e:
print u'配置文件解析失败:%s.' % (e)
return None
def createXml(project_path, timestamp, appname, server_list, url_list):
try:
# 指定监控服务器的host和port
host = HOST
port = str(PORT)
doc = minidom.Document()
rootNode = doc.createElement("config")
doc.appendChild(rootNode)
node_appname = doc.createElement('appname')
text_node_appname = doc.createTextNode(appname) #元素内容写入
node_appname.appendChild(text_node_appname)
rootNode.appendChild(node_appname)
node_urlList = doc.createElement('urlList')
for url_dict in url_list:
node_url = doc.createElement('url')
node_url_id = doc.createElement('url_id')
text_node_url_id = doc.createTextNode(str(url_dict['url_id']))
node_url_id.appendChild(text_node_url_id)
node_url.appendChild(node_url_id)
node_url_value = doc.createElement('url_value')
text_node_url_value = doc.createTextNode(url_dict['url_value'])
node_url_value.appendChild(text_node_url_value)
node_url.appendChild(node_url_value)
if url_dict.has_key('responsetime'):
node_responsetime = doc.createElement('responsetime')
text_node_responsetime = doc.createTextNode(str(url_dict['responsetime']))
node_responsetime.appendChild(text_node_responsetime)
node_url.appendChild(node_responsetime)
if url_dict.has_key('type') and url_dict.has_key('target') and url_dict.has_key('value'):
node_type = doc.createElement('type')
text_node_type = doc.createTextNode(url_dict['type'])
node_type.appendChild(text_node_type)
node_url.appendChild(node_type)
node_target = doc.createElement('target')
text_node_target = doc.createTextNode(url_dict['target'])
node_target.appendChild(text_node_target)
node_url.appendChild(node_target)
node_value = doc.createElement('value')
text_node_value = doc.createTextNode(url_dict['value'])
node_value.appendChild(text_node_value)
node_url.appendChild(node_value)
node_urlList.appendChild(node_url)
rootNode.appendChild(node_urlList)
node_monitor = doc.createElement('monitor')
node_monitor_host = doc.createElement('host')
text_node_monitor_host = doc.createTextNode(host)
node_monitor_host.appendChild(text_node_monitor_host)
node_monitor.appendChild(node_monitor_host)
node_monitor_port = doc.createElement('port')
text_node_monitor_port = doc.createTextNode(port)
node_monitor_port.appendChild(text_node_monitor_port)
node_monitor.appendChild(node_monitor_port)
rootNode.appendChild(node_monitor)
node_serverList = doc.createElement('serverList')
for server in server_list:
node_server = doc.createElement('server')
text_node_server = doc.createTextNode(server)
node_server.appendChild(text_node_server)
node_serverList.appendChild(node_server)
rootNode.appendChild(node_serverList)
# xmlfile = doc.toxml("UTF-8")
# print xmlfile
# xmlfile_format = doc.toprettyxml(indent = "\t", newl="\n", encoding="UTF-8")
# print xmlfile_format
# path = os.path.dirname(os.path.abspath(__file__))
# project_path = os.path.dirname(path)
config_path = os.path.join(project_path, 'config')
filename = appname + '_' + timestamp + '.xml'
config_file = os.path.join(config_path, filename)
# 生成XML文件
fd = open(config_file, "w")
doc.writexml(fd, indent = "", addindent="\t", newl="\n", encoding="UTF-8")
fd.close()
except Exception, e:
print e
def createQueueItem(queue):
"""
根据本地临时缓存appconfig_dict中数据初始化队列元素
"""
logger = logging.getLogger('monitor')
try:
d1 = datetime.datetime.now()
for appconfig in appconfig_dict.itervalues():
for server in appconfig.server_list:
item = QueueItem(appname = appconfig.appname,
url_list = appconfig.url_list,
server = server,
host = appconfig.host,
port = appconfig.port,
timestamp = appconfig.timestamp)
queue.put(item)
d2 = datetime.datetime.now()
d = d2 - d1
logger.debug(u'初始化队列元素成功,队列中元素个数为:%s,所花时间为:%s', str(queue.qsize()), str(d))
except Exception, e:
logger.error(u'初始化队列元素失败:%s', e)
def cron_job(project_path, queue):
logger = logging.getLogger('monitor')
logger.debug(u'开始执行cron_job!')
nocid_list = NOCID_LIST.split(',')
dd = {'nocid_list':nocid_list, 'monitor_type':MONITOR_TYPE}
data = json.dumps(dd)
app_timestamp_data = send_info(HOST, PORT, 'getAppTimeStamp', data)
if app_timestamp_data is not None:
# 解析应用时间戳字典
app_timestamp_dict = json.loads(app_timestamp_data)
for key, value in app_timestamp_dict.iteritems():
execute_appconfig = None
appconfig = getAppConfig(key)
if appconfig: # 临时缓存中已经存在当前应用的配置信息
if appconfig.timestamp == value: # 时间戳相同,直接从临时缓存区中取配置信息
logger.debug(u'从临时缓存中获取数据!')
execute_appconfig = appconfig
# 执行检查操作
# app_monitor(project_path, execute_appconfig)
else: # 时间戳不相同,重新向海鸥发送请求
post_data = {'nocid_list':nocid_list, 'appname':key}
post_data = json.dumps(post_data)
logger.debug(u'向监控服务器发送请求获取数据!')
appname_config = send_info(HOST, PORT, 'getAppConfig', post_data)
if appname_config is not None:
execute_appconfig = parseXmlData(project_path, value, appname_config)
setAppConfig(execute_appconfig)
# 执行检查操作
# app_monitor(project_path, execute_appconfig)
else: # 临时缓存区中不存在当前应用的配置信息,重新向海鸥发送请求
post_data = {'nocid_list':nocid_list, 'appname':key}
post_data = json.dumps(post_data)
logger.debug(u'向监控服务器发送请求获取数据!')
appname_config = send_info(HOST, PORT, 'getAppConfig', post_data)
if appname_config is not None:
execute_appconfig = parseXmlData(project_path, value, appname_config)
setAppConfig(execute_appconfig)
# 执行检查操作
# app_monitor(project_path, execute_appconfig)
# 根据本地临时缓存appconfig_dict中数据创建队列元素
createQueueItem(queue)
class Agent(Daemon):
def __init__(self, project_path):
path = os.path.join(project_path, 'tmp')
pidfile = os.path.join(path, 'monitor.pid')
stdout_path = os.path.join(path, 'monitor.out')
super(Agent, self).__init__(pidfile=pidfile, stdout=stdout_path,
stderr=stdout_path)
self.project_path = project_path
def run(self):
logger = logging.getLogger('monitor')
logger.debug('Agent start.')
# 1) 创建队列
queue = Queue.Queue()
logger.debug(u'创建队列')
# 2) 初始化线程
#spawn a pool of threads, and pass them queue instance
for i in range(5):
t = ThreadHttp(self.project_path, queue)
t.setDaemon(True)
t.start()
logger.debug(u'线程初始化成功')
# 3) 先向此应用发送一次请求
# ------------------------
cron_job(self.project_path, queue)
# 4) 启动定时器
# ------------------------
# 主动监控 间隔:30秒
t = LoopTimer(30, cron_job, [self.project_path, queue])
t.start()
# 5) 对队列执行 join 操作,实际上意味着等到队列为空,再退出主程序。
queue.join()
logger.debug(u'此打印语句在queue.join()函数之后')
def main():
# 主程序所在目录
path = os.path.dirname(os.path.abspath(__file__))
project_path = os.path.dirname(path)
log_path = os.path.join(project_path, 'log')
# 日志logger 初始化
# 硬件配置及监控数据上报日志
initlog('monitor', log_path, 'monitor.log', logging.DEBUG)
logger = logging.getLogger('monitor')
logger.info('monitor start.')
# 启动定时器
agent = Agent(project_path)
agent.restart()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
7,980,049,283,928 |
38f098e755afb9140701efe4860445418f33e922
|
bfae1f4693cd39e228c1720b010fe06e1c84fa18
|
/pyramid/lib/python2.7/site-packages/pyramid_debugtoolbar-0.9.7-py2.7.egg/pyramid_debugtoolbar/panels/settings.py
|
897f9adf50e12dfbd96a40d5a3ba7244d88d50e5
|
[
"BSD-2-Clause"
] |
permissive
|
bennihepp/sandbox
|
https://github.com/bennihepp/sandbox
|
1fa8daf6f0bfd79a436ea7b84d1f7ed84036c8a9
|
926ee0d3857573274e53bbfaa7648a2b77da3fd9
|
refs/heads/master
| 2016-09-06T05:21:57.765583 | 2012-01-24T15:20:43 | 2012-01-24T15:20:43 | 2,981,055 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from operator import itemgetter
from pyramid_debugtoolbar.panels import DebugPanel
_ = lambda x: x
class SettingsDebugPanel(DebugPanel):
"""
A panel to display Pyramid deployment settings for your application (the
values in ``registry.settings``).
"""
name = 'Settings'
has_content = True
filter_old_settings = [
'debug_authorization',
'debug_notfound',
'debug_routematch',
'debug_templates',
'reload_templates',
'reload_resources',
'reload_assets',
'default_locale_name',
'prevent_http_cache',
]
def __init__(self, request):
# always repr this stuff before it's sent to the template to appease
# dumbass stuff like MongoDB's __getattr__ that always returns a
# Collection, which fails when Jinja tries to look up __html__ on it.
self.request = request
settings = request.registry.settings
# filter out non-pyramid prefixed settings to avoid duplication
if 'pyramid.default_locale_name' in settings:
reprs = [(k, repr(v)) for k, v in settings.items()
if k not in self.filter_old_settings]
else:
reprs = [(k, repr(v)) for k, v in settings.items()]
self.settings = sorted(reprs, key=itemgetter(0))
def nav_title(self):
return _('Settings')
def title(self):
return _('Settings')
def url(self):
return ''
def content(self):
vars = {
'settings': self.settings
}
return self.render(
'pyramid_debugtoolbar.panels:templates/settings.mako',
vars, self.request)
|
UTF-8
|
Python
| false | false | 2,012 |
14,602,888,811,328 |
552a458897c383a8b487ddf6187302f9c289f9a5
|
33d37008ba3a543da8a13b45c8235c0bff8de492
|
/veta/api.py
|
384ad630a67f9ee2c74b4edc71ace0a1ccdc7700
|
[] |
no_license
|
gridcentric/veta
|
https://github.com/gridcentric/veta
|
1f7fada85bc658d4f776c0ed2d3eba725716667d
|
aea3df80568802fc70e6b87104bb91c75a2af827
|
refs/heads/master
| 2020-04-06T06:44:45.425335 | 2013-10-23T18:34:23 | 2013-10-23T18:39:38 | 13,076,006 | 1 | 0 | null | false | 2017-01-16T20:38:51 | 2013-09-24T20:37:13 | 2016-01-14T15:42:32 | 2013-10-23T19:18:49 | 180 | 1 | 2 | 0 |
Python
| null | null |
# Copyright 2013 Gridcentric Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles all requests relating to Veta Backup functionality."""
import sys
from nova import exception
from nova import utils as novautils
from nova.db import base
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from . import driver
from . import meta
from . import utils
LOG = logging.getLogger('nova.veta.api')
class API(base.Base):
"""API for interacting with the Veta backup manager."""
# Allow passing in dummy image_service, but normally use the default
def __init__(self, **kwargs):
super(API, self).__init__(**kwargs)
self.driver = driver.load_snapshot_driver()
def backup_schedule_list(self, context, instance_uuid):
return self.driver.instance_backup_schedule(context, instance_uuid)
def backup_schedule_add(self, context, instance_uuid, params):
if not 'frequency' in params:
raise exception.NovaException(
"Backup schedule is missing 'frequency'")
if not 'retention' in params:
raise exception.NovaException(
"Backup schedule is missing 'retention'")
frequency = int(params['frequency'])
retention = int(params['retention'])
if retention < frequency:
raise exception.NovaException(
"Invalid backup schedule: retention < frequency")
schedule = self.driver.instance_backup_schedule(context, instance_uuid)
# Make sure we're not already full
if len(schedule) >= meta.MAX_SCHEDULE_ITEMS:
raise exception.NovaException(
"Maximum number of schedules (%d) already reached" % \
meta.MAX_SCHEDULE_ITEMS)
# Make sure we don't have any conflicts
conflict = utils.schedule_has_conflict(schedule, frequency, retention)
if conflict:
raise exception.NovaException(
"Schedule conflicts with existing schedule %s" % \
conflict[meta.SCHEDULE_ID_KEY])
# Good to go
schedule_id = novautils.generate_uid('b')
new_item = { meta.SCHEDULE_ID_KEY : schedule_id,
meta.SCHEDULE_FREQUENCY_KEY : frequency,
meta.SCHEDULE_RETENTION_KEY : retention,
meta.SCHEDULE_ACTIVE_KEY : 1 }
schedule.append(new_item)
return self.driver.instance_backup_schedule_update(context,
instance_uuid,
schedule)
def backup_schedule_update(self, context, instance_uuid, params):
if not 'schedule_id' in params:
raise exception.NovaException(
"Backup schedule is missing")
if not 'frequency' in params:
raise exception.NovaException(
"Backup schedule is missing 'frequency'")
if not 'retention' in params:
raise exception.NovaException(
"Backup schedule is missing 'retention'")
schedule_id = params['schedule_id']
frequency = int(params['frequency'])
retention = int(params['retention'])
if retention < frequency:
raise exception.NovaException(
"Invalid backup schedule: retention < frequency")
schedule = self.driver.instance_backup_schedule(context, instance_uuid)
# Make sure we don't have any conflicts
conflict = utils.schedule_has_conflict(schedule, frequency, retention)
if conflict and conflict[meta.SCHEDULE_ID_KEY] != schedule_id:
raise exception.NovaException(
"Schedule conflicts with existing schedule %s" % \
conflict[meta.SCHEDULE_ID_KEY])
# Update item
item = utils.find_schedule_item(schedule, schedule_id)
if not item:
raise exception.NovaException("Backup schedule not found: %s" % \
schedule_id)
item[meta.SCHEDULE_FREQUENCY_KEY] = frequency
item[meta.SCHEDULE_RETENTION_KEY] = retention
return self.driver.instance_backup_schedule_update(context,
instance_uuid,
schedule)
def backup_schedule_del(self, context, instance_uuid, params):
if not 'schedule_id' in params:
raise exception.NovaException(
"Backup schedule is missing")
schedule_id = params['schedule_id']
schedule = self.driver.instance_backup_schedule(context, instance_uuid)
item = utils.find_schedule_item(schedule, schedule_id)
if item:
schedule.remove(item)
else:
raise exception.NovaException("Backup schedule not found: %s" % \
schedule_id)
return self.driver.instance_backup_schedule_update(context,
instance_uuid,
schedule)
def backup_schedule_set_active(self, context, instance_uuid, params):
if not 'schedule_id' in params:
raise exception.NovaException("Backup schedule is missing")
if not 'active' in params:
raise exception.NovaException("Missing argument 'active'")
schedule_id = params['schedule_id']
active = int(params['active'])
schedule = self.driver.instance_backup_schedule(context, instance_uuid)
item = utils.find_schedule_item(schedule, schedule_id)
if item:
item[meta.SCHEDULE_ACTIVE_KEY] = active
else:
raise exception.NovaException("Backup schedule not found: %s" % \
schedule_id)
return self.driver.instance_backup_schedule_update(context,
instance_uuid,
schedule)
def backup_schedule_clear(self, context, instance_uuid):
return self.driver.instance_backup_schedule_update(context,
instance_uuid,
None)
def backup_schedule_list_backups(self, context, instance_uuid, params):
schedule_id = params.get('schedule_id')
if schedule_id:
schedule = self.driver.instance_backup_schedule(context,
instance_uuid)
item = utils.find_schedule_item(schedule, schedule_id)
if not item:
raise exception.NovaException(
"Backup schedule not found: %s" % schedule_id)
return self.driver.instance_backups(context, instance_uuid,
schedule_id)
|
UTF-8
|
Python
| false | false | 2,013 |
14,018,773,288,784 |
41a00eb009a36bbc5d974429a347e6c18b031ec7
|
310064f8a415d8cdc85d7cbc6ba56d349c16cd60
|
/wime/apps/agenda/management/commands/import_by_pages.py
|
477b214e8b03d29653909edc3fff63f503cb5383
|
[] |
no_license
|
harmo/Wime
|
https://github.com/harmo/Wime
|
b9c9b851502bc144fb7cbe069bb18ab80880cf74
|
a44cb905e47d81874a6a3a35587bafb1d110a1de
|
refs/heads/master
| 2018-05-13T03:24:06.528395 | 2014-05-12T23:05:54 | 2014-05-12T23:05:54 | 13,846,702 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import re
import time
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Manage import by pages'
Debug = None
Graph = None
Trigger = None
CityManager = None
WimetagManager = None
EventManager = None
PageManager = None
def search(self):
self.Debug.show('\nSTARTING IMPORT BY PAGES...')
for city_pk, city in self.CityManager.cities.iteritems():
events_found = []
for wimetag_fb_id, wimetag in self.WimetagManager.actives_wimetags.iteritems():
term_search = self.Graph.search(
term=city.name+'+'+wimetag.term,
page=True,
type='page',
limit=1000
)
if wimetag.tag != '' and wimetag.tag is not None and wimetag.term.lower() != wimetag.tag.lower():
tag_search = self.Graph.search(
term=city.name+'+'+wimetag.tag,
page=True,
type='page',
limit=1000
)
pages = [s['data'] for s in term_search][0] + [s['data'] for s in tag_search][0]
else:
pages = [s['data'] for s in term_search][0]
self.Debug.show('\nFound {pages} pages in {city} with term {term}'.format(pages=len(pages), city=city.name, term=wimetag))
pages_found = []
categories_found = []
for page in pages:
if page['id'] not in self.PageManager.pages:
""" Loading pages """
search = self.Graph.fql(
'SELECT {fields}\
FROM page\
WHERE page_id={page_id}'
.format(fields=self.Trigger.page_fields, page_id=page['id'])
)
if len(search['data']) > 0:
page_found = search['data'][0]
if self.is_page(page_found['page_url']):
pages_found.append(page_found)
""" Search for categories in results """
if len(page_found['categories']) > 0:
for cat in page_found['categories']:
categories_found.append(cat)
""" Loading events """
events = self.Graph.fql(
'SELECT {fields}\
FROM event\
WHERE creator={creator}\
AND start_time>={start_time}\
AND (\
strpos(lower(location), lower("{city}")) >= 0 AND venue.city="{city}"\
OR (venue.latitude>"{min_lat}" AND venue.latitude<"{max_lat}" AND venue.longitude>"{min_long}" AND venue.longitude<"{max_long}")\
)'.format(
fields=self.Trigger.events_fields,
creator=page['id'],
start_time=str(int(time.time())),
city=city.name,
min_lat=float(city.latitude) - self.Trigger.location_offset,
max_lat=float(city.latitude) + self.Trigger.location_offset,
min_long=float(city.longitude) - self.Trigger.location_offset,
max_long=float(city.longitude) + self.Trigger.location_offset)
)
if len(events['data']) > 0:
for e in events['data']:
events_found.append(e)
self.Debug.show('Filling pages...')
pages_filled = self.PageManager.fill(pages_found)
self.Trigger.pages_filled += pages_filled
self.Debug.show('\t⇨ Filled {places} new pages to register.'.format(places=pages_filled))
self.Debug.show('Filling categories...')
categories_filled = self.WimetagManager.fill(categories_found)
self.Debug.show('\t⇨ Filled {places} categories.'.format(places=categories_filled))
self.Debug.show('Filling events...')
events_filled = self.EventManager.fill(events_found, city_pk, wimetag.pk)
self.Trigger.events_filled += events_filled
self.Debug.show('\t⇨ Filled {events} events potentially registerable.'.format(events=events_filled))
def is_page(self, url):
match = re.search('\/(pages)\/', url)
if hasattr(match, 'group') and match.group(0):
return True
return False
def handle(self, *args, **options):
self.Debug = options['arguments']['Debug']
self.Graph = options['arguments']['Graph']
self.Trigger = options['arguments']['Trigger']
self.CityManager = options['arguments']['CityManager']
self.WimetagManager = options['arguments']['WimetagManager']
self.EventManager = options['arguments']['EventManager']
self.PageManager = options['arguments']['PageManager']
self.search()
|
UTF-8
|
Python
| false | false | 2,014 |
7,576,322,348,604 |
6f2fe01c40c7473f92c2a00cb3fc6f1743ef554c
|
7a5413089fb135601f73c398a7697fa6f013c216
|
/story/models.py
|
53e289557eb876c82e3a16c59e90da2954aa41ca
|
[] |
no_license
|
viperasi/onlinegalgame
|
https://github.com/viperasi/onlinegalgame
|
9825c6894499720db42fb6c6e94d3ef636516629
|
a71b96b0b8a4992246e29ae17a066d6c4cb013b5
|
refs/heads/master
| 2020-12-30T17:33:00.622327 | 2011-08-25T11:31:24 | 2011-08-25T11:31:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
# Create your models here.
class UserStory(models.Model):
title = models.CharField(max_length=32)
cdate = models.DateField(blank=True)
author = models.ForeignKey(User)
summary = models.TextField(blank=True)
process = models.TextField()
sort = models.IntegerField(default=0)
image = models.ImageField(upload_to='static/story/%Y/%m', blank=True)
events = generic.GenericRelation('StoryEvent')
def __unicode__(self):
return u'%s ' % (self.title)
class StoryEvent(models.Model):
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
event = generic.GenericForeignKey('content_type', 'object_id')
created = models.DateTimeField(auto_now_add = True)
from django.db.models.signals import post_save
def story_save(sender, instance, created, *args, **kwargs):
if created:
event = StoryEvent(user = instance.author, event = instance)
event.save()
post_save.connect(story_save, sender = UserStory)
|
UTF-8
|
Python
| false | false | 2,011 |
10,101,763,108,962 |
212805cbfa12cb14efd9ffe666defe62727dab37
|
a66479d1ee45e6667cc45a7f908bc8c34cab8cc5
|
/banana/maya/extensions/OpenMayaFX/__init__.py
|
05f8d785b149227357e99d7400f6bbc13f65eda4
|
[
"MIT"
] |
permissive
|
jonntd/banana.maya
|
https://github.com/jonntd/banana.maya
|
f83b08f1cf3410e635ca4fc1d78260c75325213f
|
f0a0b7e58b542e989b7aa465386e411dacb4449c
|
refs/heads/master
| 2020-12-26T04:04:27.739630 | 2014-06-22T21:22:27 | 2014-06-22T22:04:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
banana.maya.extensions.OpenMayaFX
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenMayaFX extensions.
:copyright: Copyright 2014 by Christopher Crouzet.
:license: MIT, see LICENSE for details.
"""
__all__ = [
]
|
UTF-8
|
Python
| false | false | 2,014 |
10,316,511,485,718 |
a08a01cf74fdfb1567fab7018c3a83ae6425fdce
|
ce6adf4b87ff9b9378146128ca01c908eb4955bd
|
/mpesafinder/database.py
|
6452106b59e6eb86c4919af04d37c7de3dd39c5a
|
[] |
no_license
|
wasonganelson/app_engine_py_j2me_code_samples
|
https://github.com/wasonganelson/app_engine_py_j2me_code_samples
|
c8bee8b14f9c8fd1d30baf5a63221c847b2215f0
|
f4c34fb3921952b3a1f79af5e01106e82b97eb36
|
refs/heads/master
| 2021-05-26T12:39:06.112427 | 2013-03-27T10:58:59 | 2013-03-27T10:58:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import db
class Agentatmtable(db.Model):
primaryid = db.IntegerProperty()
businessname = db.StringProperty()
latitude = db.StringProperty()
longitude = db.StringProperty()
businesstype = db.StringProperty()
verified = db.StringProperty()
#end class Agentatmtable ...
|
UTF-8
|
Python
| false | false | 2,013 |
14,353,780,709,304 |
9e28c3da80c293252d2b1d183ae53afa9b4a6b70
|
a1ea0faba9f2a0933764d3167f723bc41b0d689a
|
/analysis/nonFunctionalSgRNAs/count_cases_where_second_best_is_non_obvious.py
|
4d8109021bece54e75ef1333b9b2707a83dea4e6
|
[] |
no_license
|
webhappy/parseRegulonDB
|
https://github.com/webhappy/parseRegulonDB
|
305404c82bcd18b44cfd3c715e4e04f98ecc347c
|
e544c77dc569215d3c10d40248b96b2cde01b396
|
refs/heads/master
| 2021-01-19T21:25:33.435093 | 2014-06-21T01:02:17 | 2014-06-21T01:02:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Count how often I get a different result if I opt to sort by p-value versus LR
import mysql.connector
from GenericUtils import *
import csv
DIST_FROM_ATG_CUTOFF = 200 # Can't just use the gene label from All data since some sgRNAs refer to the gene with a different name
outfile=csv.writer(open('bad_sgRNAs.csv','wb'))
outfile.writerow(['Gene name','# sgRNA','Mean LR','Median LR','sgRNA LR','sgRNA p-val','position', 'sgRNA counts sum'])
genes_with_problem = [] # append names of genes to here
cnx = mysql.connector.connect(user='root', host='127.0.0.1', database='t')
cursor = cnx.cursor(buffered=True)
cursor.execute('select gene_id, gene_name, gene_posleft, gene_posright, gene_strand from gene')
for (gene_id,name,left,right,strand) in cursor:
gene_has_problem = False
cursorT=cnx.cursor(buffered=True)
if strand == 'forward':
# it's on the positive orientation, so want to let right-side by smaller of right and left+100
right = min(right,DIST_FROM_ATG_CUTOFF + left)
else:
left = max(left,right - DIST_FROM_ATG_CUTOFF)
flipped=convert_strand_to_minus_plus(flipStrand(strand))
cursorT.execute("select s.pos, s.seq,p.pval,p.logFC, p.countsSum from sgRNA s LEFT JOIN aerobic_PVALS p on s.seq=p.seq"+
" where strand=%s and s.pos<=%s and s.pos>=%s",(flipped,right,left))
items = []
for (pos,seq,pval,logFC, countsSum) in cursorT:
try:
pval = float(pval)
logFC = float(logFC)
#dist=getDistanceForSgRNAFromStart(pos,strand,left,right)
items.append((pos,seq,countsSum,pval,logFC))
except:
print seq,'has no pVal'
if len(items) == 0:
print name,'has no sgRNAs'
continue
if len(items) <= 2:
print name,'has only',len(items),'sgRNAs, skipping'
continue
sgRNA_logFC = [ x[-1] for x in items ] # extract only logFC
gene_mean = meanOfList(sgRNA_logFC)
gene_median = medianOfList(sgRNA_logFC)
sorted_by_p_val = sorted(items, key=lambda x: x[-2]) # sorted by pvalue
sorted_by_logFC = sorted(items, key=lambda x:x[-1]) # sorted by logFC
if sorted_by_logFC[1] != sorted_by_p_val[1]:
print name,'is tricky!'
if gene_has_problem:
genes_with_problem.append(name)
print len(genes_with_problem),'genes have at least one problematic sgRNA!'
|
UTF-8
|
Python
| false | false | 2,014 |
8,272,107,051,227 |
370d7985d05ea7c89d9ea1dafa84a36261bc008f
|
e506dae22ac5ae1b81660c3c39519e9440e679c4
|
/custom_apps/editorials/urls.py
|
9c62487d001389a1ce028405f19d8dac3bd30049
|
[] |
no_license
|
antonyant/elb
|
https://github.com/antonyant/elb
|
57e8375f242581265d3b3b428ca58bcaae538bf1
|
b77a52d4aadcbe3ba6a0e8f3e4b9e79468dd7289
|
refs/heads/master
| 2021-01-19T18:10:21.116813 | 2014-05-28T08:06:57 | 2014-05-28T08:06:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^$', 'editorials.views.editorial_list', name="editorial_list"),
url(r'^archive/$', 'editorials.views.editorial_archive', name="editorial_archive"),
url(r'^(?P<pk>[-\d]+)/', 'editorials.views.editorial_detail', name="editorial_detail"),
)
|
UTF-8
|
Python
| false | false | 2,014 |
8,667,244,019,048 |
4407a38813fd53a8c718220e8b6e941cc2ad97a2
|
b7e6abfce08477b33ae85365df4b872b66a47e10
|
/tcp_to_arduino/nodes/sensor_broadcast.py
|
de8ba5ccfcf3bb29f821bdd1366016ab8f0eacae
|
[] |
no_license
|
florisvb/flybot
|
https://github.com/florisvb/flybot
|
3db0588c4598b29ad669e88d2b94bc793dc83972
|
1f1ab131d32c9aa7c4d68bc25bd030d617bc9bc1
|
refs/heads/master
| 2021-01-13T02:02:08.473016 | 2012-11-27T02:20:33 | 2012-11-27T02:20:33 | 5,059,028 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import roslib
roslib.load_manifest('tcp_to_arduino')
import rospy
from tcp_to_arduino.msg import ArduinoSensorData
import socket
class Arduino_Sensor_Broadcaster:
def __init__ (self, TCP_IP, TCP_PORT, BUFFER_SIZE, sensor_list, rate):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print 'socket acquired, connecting...'
self.sock.connect((TCP_IP, TCP_PORT))
print 'socket connected to: ', TCP_IP, 'port: ', TCP_PORT
self.sensor_list = sensor_list
self.arduino_publisher = rospy.Publisher("arduino", ArduinoSensorData)
rospy.init_node('arduino_publisher', anonymous=True)
self.main(rate)
def main(self, rate):
try:
rospy.Timer( rospy.Duration(1/float(rate)), self.publish_sensor_data )
rospy.spin()
except KeyboardInterrupt:
print "Shutting Down"
self.sock.close()
def publish_sensor_data(self, timer):
ros_msg = self.get_sensor_data()
self.arduino_publisher.publish(ros_msg)
def get_sensor_data(self):
ros_msg = ArduinoSensorData()
for sensor in sensor_list:
msg_to_arduino = 'get:' + sensor + '\n'
self.sock.send(msg_to_arduino)
data = ''
collect_data = True
while collect_data:
data += self.sock.recv(BUFFER_SIZE)
if data[-1] == '\n':
collect_data = False
#print 'data: ', data
ros_msg.__setattr__(sensor, int(data) )
return ros_msg
if __name__ == '__main__':
TCP_IP = '192.168.0.104'
TCP_PORT = 8888
BUFFER_SIZE = 8
sensor_list = ['sensor1', 'sensor2']
rosrate = 10
arduino_sensor_broadcaster = Arduino_Sensor_Broadcaster(TCP_IP, TCP_PORT, BUFFER_SIZE, sensor_list, rosrate)
|
UTF-8
|
Python
| false | false | 2,012 |
9,251,359,573,503 |
20b83bae62e2079e7abeac11dc82de07aa62e531
|
60a687bf84b6086c462c953a808250046e7c7396
|
/gridmonitor/model/sft/name_mapping.py
|
5a5bbd1959d8e10c4478dde9edfc8a73248f2d86
|
[
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"MIT",
"GPL-1.0-or-later"
] |
non_permissive
|
placiflury/gridmonitor-arc
|
https://github.com/placiflury/gridmonitor-arc
|
79f0a6df1bbaab6dfbfcedb14a47aba516f80387
|
42832ea5c1a3c258900c08fae3483a0dd63cb9fd
|
refs/heads/master
| 2021-01-01T19:51:25.815320 | 2012-10-04T06:51:47 | 2012-10-04T06:51:47 | 33,272,006 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""
Module for String mappings of database columns
"""
SFT_KEYMAP = {
'name' : ['Name:'],
'cluster_group' : ['Cluster group:'],
'vo_group' : ['VO group:'],
'test_suit' : ['Test suit:'],
'minute' : ['Minute:'],
'hour' : ['Hour:'],
'day' : ['Day:'],
'month' : ['Month:'],
'day_of_week' : ['Day of Week:']
}
SFT_KEYMAP_ORDER = ['name', 'cluster_group', 'vo_group', 'test_suit', 'month', 'day_of_week', 'day', 'hour', 'minute']
CLUSTER_KEYMAP = {
'hostname' : ['Hostname:'],
'alias' : ['Alias:']
}
CLUSTER_KEYMAP_ORDER = ['hostname', 'alias']
CLUSTER_GR_KEYMAP = {
'name' : ['Name:']
}
CLUSTER_GR_KEYMAP_ORDER = ['name']
VO_KEYMAP = {
'name' : ['Name:'],
'server' : ['Server:']
}
VO_KEYMAP_ORDER = ['name', 'server']
VO_GR_KEYMAP = {
'name' : ['Name:']
}
VO_GR_KEYMAP_ORDER = ['name']
USER_KEYMAP = {
'DN' : ['DN:'],
'display_name' : ['Display Name:']
}
USER_KEYMAP_ORDER = ['display_name', 'DN']
USER_GR_KEYMAP = {
'name' : ['Name:']
}
USER_GR_KEYMAP_ORDER = ['name']
TEST_KEYMAP = {
'name' : ['Name:'],
'xrsl' : ['XRSL:']
}
TEST_KEYMAP_ORDER = ['name', 'xrsl']
TEST_SUIT_KEYMAP = {
'name' : ['Name:']
}
TEST_SUIT_KEYMAP_ORDER = ['name']
|
UTF-8
|
Python
| false | false | 2,012 |
12,232,066,863,798 |
41819ac81996998d2a1da1137a7087c30bdbe681
|
49e3f60730a2116906e182a402deca9c68551c0a
|
/wxtest/wxplatform/MessageConfigParser.py
|
caeafca1023bffd1288f352d29204954c8d96afd
|
[] |
no_license
|
FashtimeDotCom/wxtest
|
https://github.com/FashtimeDotCom/wxtest
|
7ad64c7bc17d48e54a24f5f6e3b87311200b3329
|
0067914cfdeb09815d472f29c1675584567c3319
|
refs/heads/master
| 2020-04-06T07:11:03.257657 | 2013-08-07T04:37:48 | 2013-08-07T04:37:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding:utf-8 -*-
"""
@author: U{jinlei<mailto:[email protected]>}
@date: 2013-08-02 09:12:45
@Copyright: asiainfo-linkage 2013
@version: $Revision$
"""
import sys
import ConfigParser
from global_settings import *
from logger import sm_logger
class MessageConfigParser:
def __init__(self):
configFile = MESSAGE_CONFIG_FILE
self.configFile = configFile
def config(self):
sm_logger.debug("****")
try:
configData = open(self.configFile,"r");
except Exception, e:
sm_logger.debug("没有找到" + self.configFile + ":"+e);
sys.exit();
try:
config = ConfigParser.ConfigParser();
config.readfp(configData);
configData.close();
except ConfigParser.Error, e:
sm_logger.debug("配置文件格式错误:" + e);
return config;
|
UTF-8
|
Python
| false | false | 2,013 |
18,726,057,423,330 |
d83a9190192cda6c31ad95661555d3fa6db3cfd3
|
8bffc34074d6f6328efcc0d1639e1407a147c3db
|
/fujitsu-usb-touchscreen/fujitsu_touchscreen_calibration.py
|
cd8e6f69c3c1c9bbbcd6e7b06198e960e8dbed8c
|
[
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"GPL-2.0-only",
"GPL-3.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-or-later",
"GPL-3.0-only",
"LGPL-2.0-or-later"
] |
non_permissive
|
mapmot/fujitsu-u810
|
https://github.com/mapmot/fujitsu-u810
|
d3bc01f4b3e68d8fec21d9d805d4c51daf975713
|
3f0ae9e6260d08ad7e59ed9a1cb9e62c7b8b5aa2
|
refs/heads/master
| 2021-01-25T09:59:51.314578 | 2013-10-03T15:49:48 | 2013-10-03T15:49:48 | 13,302,292 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import subprocess
import curses, os, sys, traceback
# global variables
fujitsu_utility_helper="/usr/bin/fujitsu_touchscreen_helper"
MODULE = "fujitsu_usb_touchscreen"
MODULE_PATH = "/sys/module/"+MODULE+"/parameters/"
scrn = None
o_minx = o_maxx = o_miny = o_maxy = -1
l_minx = l_maxx = l_miny = l_maxy = -1
c_minx = c_maxx = c_miny = c_maxy = -1
def runpsax():
p = os.popen('ps ax','r')
gb.cmdoutlines = []
row = 0
for ln in p:
# don't allow line wraparound, so truncate long lines
ln = ln[:curses.COLS]
# remove EOLN if it is still there
if ln[-1] == '\n': ln = ln[:-1]
gb.cmdoutlines.append(ln)
p.close()
def replaceline(l,tx,att=curses.A_NORMAL):
global scrn
scrn.move(l,0)
scrn.clrtoeol()
scrn.addstr(l,0,tx,att)
scrn.refresh()
def get_module_param(pname):
global MODULE_PATH
p = open(MODULE_PATH+pname)
pval=p.read()
p.close()
return pval.strip()
def get_calib_coordinates():
global c_minx, c_maxx, c_miny, c_maxy
c_minx=get_module_param("calib_minx")
c_maxx=get_module_param("calib_maxx")
c_miny=get_module_param("calib_miny")
c_maxy=get_module_param("calib_maxy")
def get_calib_initial():
global o_minx, o_maxx, o_miny, o_maxy
o_minx=get_module_param("touch_minx")
o_maxx=get_module_param("touch_maxx")
o_miny=get_module_param("touch_miny")
o_maxy=get_module_param("touch_maxy")
def set_values():
global c_minx, c_maxx, c_miny, c_maxy
cmdo=subprocess.Popen([fujitsu_utility_helper, "writecalibrate", c_minx, c_miny, c_maxx, c_maxy],stdout=subprocess.PIPE)
def reset_calib():
cmdo=subprocess.Popen([fujitsu_utility_helper, "resetcalibrate"],stdout=subprocess.PIPE)
def set_calibrationmode(m):
if m=="ON":
cmdo=subprocess.Popen([fujitsu_utility_helper, "calibrate", "1"],stdout=subprocess.PIPE)
else:
cmdo=subprocess.Popen([fujitsu_utility_helper, "calibrate", "0"],stdout=subprocess.PIPE)
def main():
global scrn
curses_set()
display_screen()
set_calibrationmode("ON")
get_calib_initial()
reset_calib()
while True:
get_calib_coordinates()
display_values()
swap_c2l()
c = scrn.getch()
if c==-1: curses.napms(350)
elif c==113 or c==81: break
elif c==114 or c==82:
reset_calib()
elif c==115 or c==83:
set_values()
break
set_calibrationmode("OFF")
curses_reset()
def swap_c2l():
global l_minx, l_maxx, l_miny, l_maxy
global c_minx, c_maxx, c_miny, c_maxy
l_minx=c_minx
l_maxx=c_maxx
l_miny=c_miny
l_maxy=c_maxy
def display_screen():
global MODULE
replaceline(0,MODULE+" module calibration tool",curses.A_REVERSE)
replaceline(curses.LINES-3,"Press Q to quit")
replaceline(curses.LINES-2," S to set calibration to displayed values and quit")
replaceline(curses.LINES-1," R to reset calibration values")
def display_values():
replaceline(2,"Old Calibration: x:["+str(o_minx)+","+str(o_maxx)+"] y:["+str(o_miny)+","+str(o_maxy)+"]")
replaceline(3,"New: last read: x:["+str(l_minx)+","+str(l_maxx)+"] y:["+str(l_miny)+","+str(l_maxy)+"]")
replaceline(4,"New: this read: x:["+str(c_minx)+","+str(c_maxx)+"] y:["+str(c_miny)+","+str(c_maxy)+"]")
def curses_set():
global scrn
scrn = curses.initscr()
curses.noecho()
curses.cbreak()
curses.flushinp()
scrn.keypad(1)
scrn.nodelay(1)
def curses_reset():
global scrn
scrn.keypad(0)
curses.nocbreak()
curses.echo()
curses.endwin()
if __name__ =='__main__':
try:
main()
except:
set_calibrationmode("OFF")
if scrn!=None:
curses_reset()
# print error message re exception
traceback.print_exc()
|
UTF-8
|
Python
| false | false | 2,013 |
4,492,535,801,900 |
eea05e45e8ae3c0a5087e2944cce794c0c000346
|
d8067808014ab941de5ee5a33b9c5658fe4b0c3e
|
/Level2.py
|
6d4c1d363734a9e9f73379e0984652657d6c6099
|
[] |
no_license
|
BurakM/PythonChallenge
|
https://github.com/BurakM/PythonChallenge
|
f05eafef6c1d839fce2cd64bfc892871afbb582b
|
0f86f4322c54fe9599f597e215c6282d2d665874
|
refs/heads/master
| 2016-09-06T15:33:45.147319 | 2014-10-19T02:19:21 | 2014-10-19T02:19:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'BurakM'
# http://www.pythonchallenge.com/pc/def/ocr.html
# Level 2 consists of finding latin characters in a huge set characters. Since the set is too large, we need to
# separately treat the string.
MyFile = open('Level2string.txt', 'r')
# Need to use regular expressions to solve the problem
import re
# Reads the file
data=MyFile.read()
#print(data)
# Searches for the defined pattern
for i in 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ':
for m in re.finditer(i, data):
print(i, m.start())
# We obtain the key: equality
# Next URL: http://www.pythonchallenge.com/pc/def/equality.html
|
UTF-8
|
Python
| false | false | 2,014 |
11,424,613,034,626 |
a8c08b9e0e9e890375b14f3a19b845f225ecdd07
|
c80b3cc6a8a144e9858f993c10a0e11e633cb348
|
/components/ally-http/ally/http/impl/processor/internal_error.py
|
2784145cd36703ed18689237aae1ed93991bbc0b
|
[] |
no_license
|
cristidomsa/Ally-Py
|
https://github.com/cristidomsa/Ally-Py
|
e08d80b67ea5b39b5504f4ac048108f23445f850
|
e0b3466b34d31548996d57be4a9dac134d904380
|
refs/heads/master
| 2021-01-18T08:41:13.140590 | 2013-11-06T09:51:56 | 2013-11-06T09:51:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Jun 22, 2012
@package: ally http
@copyright: 2011 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
Provide the internal error representation. This is usually when the server fails badly.
'''
from ally.container.ioc import injected
from ally.design.processor.attribute import defines
from ally.design.processor.context import Context
from ally.design.processor.execution import Chain, Error
from ally.design.processor.handler import Handler
from ally.design.processor.processor import Processor
from ally.http.spec.codes import INTERNAL_ERROR, CodedHTTP
from ally.support.util_io import convertToBytes, IInputStream
from collections import Iterable
from io import StringIO, BytesIO
import logging
import traceback
# --------------------------------------------------------------------
log = logging.getLogger(__name__)
# --------------------------------------------------------------------
class Response(CodedHTTP):
'''
The response context.
'''
# ---------------------------------------------------------------- Defined
headers = defines(dict)
class ResponseContent(Context):
'''
The response content context.
'''
# ---------------------------------------------------------------- Optional
source = defines(IInputStream, Iterable)
# --------------------------------------------------------------------
@injected
class InternalErrorHandler(Handler):
'''
Implementation for a processor that provides the handling of internal errors.
'''
errorHeaders = {'Content-Type':'text;charset=UTF-8'}
# The headers that will be placed on the response.
def __init__(self, response=Response, responseCnt=ResponseContent, **contexts):
'''
Construct the internal error handler.
'''
assert isinstance(self.errorHeaders, dict), 'Invalid error headers %s' % self.errorHeaders
super().__init__(Processor(dict(response=Response, responseCnt=ResponseContent, **contexts), self.process))
def process(self, chain, **keyargs):
'''
Provides the additional arguments by type to be populated.
'''
assert isinstance(chain, Chain), 'Invalid processors chain %s' % chain
chain.onError(self.processError)
if __debug__:
# If in debug mode and the response content has a source generator then we will try to read that
# in order to catch any exception before the actual streaming.
chain.onFinalize(self.processFinalization)
def processFinalization(self, final, response, responseCnt, **keyargs):
'''
Process the finalization.
'''
assert isinstance(response, Response), 'Invalid response %s' % response
assert isinstance(responseCnt, ResponseContent), 'Invalid response content %s' % responseCnt
if isinstance(responseCnt.source, Iterable):
content = BytesIO()
try:
for bytes in responseCnt.source: content.write(bytes)
except:
log.exception('Exception occurred while processing the content')
error = StringIO()
traceback.print_exc(file=error)
INTERNAL_ERROR.set(response)
response.headers = dict(self.errorHeaders)
responseCnt.source = convertToBytes(self.errorResponse(error), 'UTF-8', 'backslashreplace')
else:
content.seek(0)
responseCnt.source = content
def processError(self, error, response, responseCnt, **keyargs):
'''
Process the error.
'''
assert isinstance(error, Error), 'Invalid error execution %s' % error
assert isinstance(response, Response), 'Invalid response %s' % response
assert isinstance(responseCnt, ResponseContent), 'Invalid response content %s' % responseCnt
assert isinstance(error.exception, Exception), 'Invalid error exception %s' % error.exception
if error.isRetrying: return # Maybe next time
error.suppress()
excInfo = (type(error.exception), error.exception, error.exception.__traceback__)
log.error('Exception occurred while processing the execution', exc_info=excInfo)
# If there is an explanation for the error occurred, we do not need to make another one
if responseCnt.source is not None: return
ferror = StringIO()
traceback.print_exception(*excInfo, file=ferror)
INTERNAL_ERROR.set(response)
response.headers = dict(self.errorHeaders)
responseCnt.source = convertToBytes(self.errorResponse(ferror), 'UTF-8', 'backslashreplace')
def errorResponse(self, error):
'''
Generates the error response.
@param error: StringIO
The error stream that contains the stack info.
'''
assert isinstance(error, IInputStream), 'Invalid error stream %s' % error
yield 'Internal server error occurred, this is a major issue so please contact your administrator\n\n'
error.seek(0)
yield error.read()
|
UTF-8
|
Python
| false | false | 2,013 |
6,880,537,658,830 |
0070db00cb75fb3ad22debb518978378e0fbaaa4
|
b45ddc577a7946a5925de2735fcdc617384d253c
|
/core/models/migrations/versions/5a62a9de050_dodanie_pola_adres.py
|
a9086a803cb4add048a1182047fd0a18d40ef300
|
[] |
no_license
|
toudi/kwit
|
https://github.com/toudi/kwit
|
8e45729c1703f5d1b410e95b2277e52fc2980112
|
87b41ae51c4aac14f25f0e5fa8d79b161c1f5112
|
refs/heads/master
| 2021-01-06T20:41:36.362389 | 2014-11-30T02:08:01 | 2014-11-30T02:08:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Dodanie pola adres
Revision ID: 5a62a9de050
Revises: 4717277a51e
Create Date: 2014-11-30 01:13:28.074204
"""
# revision identifiers, used by Alembic.
revision = '5a62a9de050'
down_revision = '4717277a51e'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import core
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('kwit_kontrahenci')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('kwit_kontrahenci',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('nazwa', sa.VARCHAR(), nullable=True),
sa.Column('nip', sa.VARCHAR(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
|
UTF-8
|
Python
| false | false | 2,014 |
6,098,853,594,881 |
0c05a107638776df57248855344ad74a5dbca8a2
|
35b32ecd513b292c79f3fe3d37499fde07bbc386
|
/peyotl/test/test_nexson_validation.py
|
1acf008deb19fd02119a929a2ad348fa54dc2398
|
[
"Python-2.0",
"BSD-2-Clause"
] |
permissive
|
pombredanne/peyotl
|
https://github.com/pombredanne/peyotl
|
26c6bf1228e1cac3ec6bbc35acc6fadc1ce089f8
|
bb3b074054f30039b1045873225624e3afa85f36
|
refs/heads/master
| 2020-05-20T19:28:21.676380 | 2014-12-13T10:56:55 | 2014-12-13T10:56:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
from peyotl.nexson_syntax import detect_nexson_version, get_empty_nexson
from peyotl.utility.str_util import UNICODE, is_str_type
from peyotl.nexson_validation import validate_nexson
from peyotl.test.support import pathmap
from peyotl.utility import get_logger
import unittest
import codecs
import json
import os
_LOG = get_logger(__name__)
# round trip filename tuples
VALID_NEXSON_DIRS = ['9', 'otu', ]
def read_json(fp):
with codecs.open(fp, 'r', encoding='utf-8') as fo:
return json.load(fo)
def write_json(o, fp):
with codecs.open(fp, 'w', encoding='utf-8') as fo:
json.dump(o, fo, indent=2, sort_keys=True)
fo.write('\n')
def through_json(d):
return json.loads(json.dumps(d))
def dict_eq(a, b):
if a == b:
return True
return False
def conv_key_unicode_literal(d):
r = {}
if not isinstance(d, dict):
return d
for k, v in d.items():
if isinstance(v, dict):
r[k] = conv_key_unicode_literal(v)
elif isinstance(v, list):
r[k] = [conv_key_unicode_literal(i) for i in v]
elif is_str_type(v) and v == 'unicode':
r[k] = 'str'
else:
r[k] = v
return r
class TestConvert(unittest.TestCase):
def testDetectVersion(self):
o = pathmap.nexson_obj('invalid/bad_version.json.input')
v = detect_nexson_version(o)
self.assertEqual(v, '1.3.1')
def testValidFilesPass(self):
format_list = ['1.2']
msg = ''
for d in VALID_NEXSON_DIRS:
for nf in format_list:
frag = os.path.join(d, 'v{f}.json'.format(f=nf))
nexson = pathmap.nexson_obj(frag)
aa = validate_nexson(nexson)
annot = aa[0]
for e in annot.errors:
_LOG.debug('unexpected error from {f}: {m}'.format(f=frag, m=UNICODE(e)))
if len(annot.errors) > 0:
ofn = pathmap.nexson_source_path(frag + '.output')
ew_dict = annot.get_err_warn_summary_dict()
write_json(ew_dict, ofn)
msg = "File failed to validate cleanly. See {o}".format(o=ofn)
self.assertEqual(len(annot.errors), 0, msg)
def testInvalidFilesFail(self):
msg = ''
for fn in pathmap.all_files(os.path.join('nexson', 'invalid')):
if fn.endswith('.input'):
frag = fn[:-len('.input')]
inp = read_json(fn)
try:
aa = validate_nexson(inp)
except:
continue
annot = aa[0]
if len(annot.errors) == 0:
ofn = pathmap.nexson_source_path(frag + '.output')
ew_dict = annot.get_err_warn_summary_dict()
write_json(ew_dict, ofn)
msg = "Failed to reject file. See {o}".format(o=str(msg))
self.assertTrue(False, msg)
def testExpectedWarnings(self):
msg = ''
for fn in pathmap.all_files(os.path.join('nexson', 'warn_err')):
if fn.endswith('.input'):
frag = fn[:-len('.input')]
efn = frag + '.expected'
if os.path.exists(efn):
inp = read_json(fn)
aa = validate_nexson(inp)
annot = aa[0]
ew_dict = annot.get_err_warn_summary_dict()
ew_dict = conv_key_unicode_literal(through_json(ew_dict))
exp = conv_key_unicode_literal(read_json(efn))
if not dict_eq(ew_dict, exp):
ofn = frag + '.output'
write_json(ew_dict, ofn)
msg = "Validation failed to produce expected outcome. Compare {o} and {e}".format(o=ofn, e=efn)
self.assertDictEqual(exp, ew_dict, msg)
else:
_LOG.warn('Expected output file "{f}" not found'.format(f=efn))
def testOldExpectedWarnings(self):
msg = ''
for fn in pathmap.all_files(os.path.join('nexson', 'old-tests')):
if fn.endswith('.input'):
frag = fn[:-len('.input')]
efn = frag + '.expected'
if os.path.exists(efn):
inp = read_json(fn)
aa = validate_nexson(inp)
annot = aa[0]
ew_dict = annot.get_err_warn_summary_dict()
ew_dict = through_json(ew_dict)
exp = read_json(efn)
if not dict_eq(ew_dict, exp):
ofn = frag + '.output'
write_json(ew_dict, ofn)
msg = "Validation failed to produce expected outcome. Compare {o} and {e}".format(o=ofn, e=efn)
self.assertDictEqual(exp, ew_dict, msg)
else:
_LOG.warn('Expected output file "{f}" not found'.format(f=efn))
def testCreated(self):
b = get_empty_nexson()
aa = validate_nexson(b)
annot = aa[0]
self.assertFalse(annot.has_error())
b = get_empty_nexson(include_cc0=True)
aa = validate_nexson(b)
annot = aa[0]
self.assertFalse(annot.has_error())
if __name__ == "__main__":
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
7,224,134,996,124 |
d919599d29d07a23733c4ecfd2eebc9d38ddc24a
|
747f759311d404af31c0f80029e88098193f6269
|
/addons/document_sftp/__init__.py
|
43aadc8064b2b5183ff96268408d7886a3123669
|
[] |
no_license
|
sgeerish/sirr_production
|
https://github.com/sgeerish/sirr_production
|
9b0d0f7804a928c0c582ddb4ccb7fcc084469a18
|
1081f3a5ff8864a31b2dcd89406fac076a908e78
|
refs/heads/master
| 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
/home/openerp/production/extra-addons/document_sftp/__init__.py
|
UTF-8
|
Python
| false | false | 2,013 |
1,005,022,347,408 |
1ae3582489296531d4c880326ccdfa8725cb3cac
|
077bcb719c6c95a0dffa2756fe7f1a8142091218
|
/easyLeap.py
|
25c053a386109e87ed0230457a9b633866e4af90
|
[] |
no_license
|
centime/easyLeap
|
https://github.com/centime/easyLeap
|
88babaad8882ec18aad617eba254eabffab1934b
|
89a0f0ecc5ccb112c92a1e249840d14ca976d4b4
|
refs/heads/master
| 2021-01-19T11:45:34.543459 | 2013-12-14T03:15:02 | 2013-12-14T03:15:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from leap import Leap
from math import atan
import default_setup
GLOBALS = default_setup.GLOBALS
def init(globals):
for k,v in globals.items():
GLOBALS[k]=v
class Listener(Leap.Listener): #The Listener that we attach to the controller.
def __init__(self, actor):
super(Listener, self).__init__()
self.actor = actor
self.center = Leap.Vector(GLOBALS['CENTER_X'],GLOBALS['CENTER_Y'],GLOBALS['CENTER_Z'])
def on_init(self, controller):
pass
def on_connect(self, controller):
self.setup_gestures(controller)
self.actor.on_connect(controller)
def setup_gestures(self,controller):
if GLOBALS['ENABLE_GESTURE_SWIPE'] :
controller.enable_gesture(Leap.Gesture.TYPE_SWIPE)
if GLOBALS['ENABLE_GESTURE_CIRCLE'] :
controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE)
if GLOBALS['ENABLE_GESTURE_KEY_TAP'] :
controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP)
if GLOBALS['ENABLE_GESTURE_SCREEN_TAP'] :
controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP)
if(controller.config.set("Gesture.Swipe.MinLength", GLOBALS['GESTURE_SWIPE_MIN_LEN'])
and controller.config.set("Gesture.Swipe.MinVelocity", GLOBALS['GESTURE_SWIPE_MIN_VEL'])):
controller.config().save()
if controller.config.set("Gesture.Circle.MinRadius", GLOBALS['GESTURE_CIRCLE_MIN_RADIUS']):
controller.config.save()
if(controller.config.set("Gesture.KeyTap.MinDownVelocity", GLOBALS['GESTURE_KEY_TAP_MIN_VEL'])
and controller.config.set("Gesture.KeyTap.MinDistance", GLOBALS['GESTURE_KEY_TAP_MIN_LEN'])):
controller.config.save()
if(controller.config.set("Gesture.ScreenTap.MinForwardVelocity", GLOBALS['GESTURE_SCREEN_TAP_MIN_VEL'])
and controller.config.set("Gesture.ScreenTap.MinDistance", GLOBALS['GESTURE_SCREEN_TAP_MIN_LEN'])):
controller.config.save()
def on_disconnect(self, controller):
self.actor.on_disconnect(controller)
def on_exit(self, controller):
self.actor.on_exit(controller)
def on_frame(self, controller):
frame = controller.frame() # Get latest information from the leapmotion
if not frame.hands.empty: # Only process if hands present
if GLOBALS['MAIN_HAND'] == 'right' :
hand = frame.hands.rightmost
if GLOBALS['MAIN_HAND'] == 'left' :
hand = frame.hands.leftmost
if GLOBALS['ENABLE_HANDS'] :
self.watch_params_in_hand(hand) # Watch after every parameter specified in WATCHED_PARAMS_IN_HAND
gestures = frame.gestures()
if not gestures.empty : #FIX hands ?
for gesture in gestures :
self.watch_params_in_gesture(gesture)
def watch_params_in_gesture(self,gesture):
if len(gesture.hands) > 0 : #FIX hands ?
gesture_params = {}
if gesture.type == 1 :
gesture= Leap.SwipeGesture(gesture)
gesture.direction_name = self.get_direction_name(gesture.direction)
gesture.current_position_name=self.get_position_name(gesture.hands[0].palm_position)
gesture.start_position_name=self.get_position_name(gesture.start_position)
watched_params = GLOBALS['WATCHED_PARAMS_IN_SWIPE']
if gesture.type == 4 :
gesture= Leap.CircleGesture(gesture)
if gesture.pointable.direction.angle_to(gesture.normal) <= Leap.PI/2:
gesture.clockwiseness = "clockwise"
else:
gesture.clockwiseness = "counterclockwise"
gesture.position_name=self.get_position_name(gesture.center)
watched_params = GLOBALS['WATCHED_PARAMS_IN_CIRCLE']
if gesture.type == 5 :
gesture.activated = True
watched_params = GLOBALS['WATCHED_PARAMS_IN_SCREEN_TAP']
if gesture.type == 6 :
gesture.activated = True
watched_params = GLOBALS['WATCHED_PARAMS_IN_KEY_TAP']
for path, api in watched_params.items():
try :
gesture_params[path] = eval('gesture.'+api )
except:
print "%s\t%s not found. Valid api ?" %(path,api)
if gesture.type == 1 :
self.actor.on_swipe(gesture_params)
if gesture.type == 4 :
self.actor.on_circle(gesture_params)
if gesture.type == 5 :
self.actor.on_screen_tap(gesture_params)
if gesture.type == 6 :
self.actor.on_key_tap(gesture_params)
def watch_params_in_hand(self,hand):
# Watch after every parameter specified in WATCHED_PARAMS_IN_HAND
hand_params = {}
hand.openess = hand.palm_position.distance_to(hand.sphere_center)
hand.upside_down = hand.palm_position.y-hand.sphere_center.y < 0
hand.pitch = atan(1.0*hand.palm_normal.z/hand.palm_normal.y)
hand.roll = atan(1.0*hand.palm_normal.x/hand.palm_normal.y)
hand.dist_fingers_lr = hand.fingers.leftmost.stabilized_tip_position.distance_to(hand.fingers.rightmost.stabilized_tip_position)
hand.angle_fingers_lr = hand.fingers.leftmost.direction.angle_to(hand.fingers.rightmost.direction)
lm_fing_tip_pos = hand.fingers.leftmost.tip_position
hand.lm_fing_tip_rel_pos_x = hand.palm_position.x - lm_fing_tip_pos.x
hand.lm_fing_tip_rel_pos_y = hand.palm_position.y - lm_fing_tip_pos.y
hand.lm_fing_tip_rel_pos_z = hand.palm_position.z - lm_fing_tip_pos.z
rm_fing_tip_pos = hand.fingers.leftmost.tip_position
hand.rm_fing_tip_rel_pos_x = hand.palm_position.x - rm_fing_tip_pos.x
hand.rm_fing_tip_rel_pos_y = hand.palm_position.y - rm_fing_tip_pos.y
hand.rm_fing_tip_rel_pos_z = hand.palm_position.z - rm_fing_tip_pos.z
avg_angle = 0
avg_dist = 0
count = 0
for finger in hand.fingers :
others = [ f for f in hand.fingers ]
others.remove(finger)
for other in others :
avg_dist += finger.tip_position.distance_to(other.tip_position)
avg_angle += finger.direction.angle_to(other.direction)
count += 1
if count > 0 :
avg_angle /= count
avg_dist /= count
hand.fingers_avg_dist = avg_dist
hand.fingers_avg_angle = avg_angle
hand.position_name = self.get_position_name(hand.palm_position)
for path, api in GLOBALS['WATCHED_PARAMS_IN_HAND'].items():
try :
hand_params[path] = eval('hand.'+api )
except:
print "%s\t%s not found. Valid api ?" %(path,api)
self.actor.on_hand(hand_params)
def get_position_name(self, position):
position_name = ''
if position.x-self.center.x > GLOBALS['POSITION_THRSLD'] : position_name += ' right'
if position.x-self.center.x <= -GLOBALS['POSITION_THRSLD'] : position_name += ' left'
if position.y-self.center.y > GLOBALS['POSITION_THRSLD'] : position_name += ' top'
if position.y-self.center.y <= -GLOBALS['POSITION_THRSLD'] : position_name += ' bottom'
if position.z-self.center.z > GLOBALS['POSITION_THRSLD'] : position_name += ' back'
if position.z-self.center.z <= -GLOBALS['POSITION_THRSLD'] : position_name += ' front'
return position_name
def get_direction_name(self, direction):
direction_name = ''
if direction.x > GLOBALS['DIRECTION_THRSLD'] : direction_name += ' right'
if direction.x <= -GLOBALS['DIRECTION_THRSLD'] : direction_name += ' left'
if direction.y > GLOBALS['DIRECTION_THRSLD'] : direction_name += ' top'
if direction.y <= -GLOBALS['DIRECTION_THRSLD'] : direction_name += ' bottom'
if direction.z > GLOBALS['DIRECTION_THRSLD'] : direction_name += ' back'
if direction.z <= -GLOBALS['DIRECTION_THRSLD'] : direction_name += ' front'
return direction_name
class Actor():
def on_connect(self, leap_controller):
print 'Leap connected'
print
def on_disconnect(self, leap_controller):
print 'Leap disconnected'
print
def on_swipe(self, swipe_params):
for k,v in swipe_params.items():
print '%s\t\t\t%s' %(k,v)
print
def on_circle(self, circle_params):
print 'o'
for k,v in circle_params.items():
print '%s\t\t\t%s' %(k,v)
print
def on_screen_tap(self, screen_tap_params):
for k,v in screen_tap_params.items():
print '%s\t\t\t%s' %(k,v)
print
def on_key_tap(self, key_tap_params):
for k,v in key_tap_params.items():
print '%s\t\t\t%s' %(k,v)
print
def on_hand(self, hand_params):
for k,v in hand_params.items():
print '%s\t\t\t%s' %(k,v)
print
def on_exit(self, leap_controller):
print 'Exit.'
print
|
UTF-8
|
Python
| false | false | 2,013 |
11,914,239,292,374 |
f0e23fe85c30b9953a5950b23423a094e630db5a
|
1225067976fc63f5c63adafe13034f1fd4621559
|
/wifite/runengine.py
|
b3090d00e3e83f52c5d897525a46aaf58234af38
|
[
"GPL-2.0-only"
] |
non_permissive
|
MartijnBraam/wifite
|
https://github.com/MartijnBraam/wifite
|
2eafb9279c7df5a33b17c408752c1e8f3baf0498
|
d0d8115ed7c00fc0c1c25dcf0db7402fa98600e2
|
refs/heads/master
| 2021-01-17T03:42:21.233374 | 2014-07-03T21:32:16 | 2014-07-03T21:32:31 | 21,476,930 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class RunEngine:
def __init__(self, run_config):
self.RUN_CONFIG = run_config
self.RUN_CONFIG.RUN_ENGINE = self
def initial_check(self):
"""
Ensures required programs are installed.
"""
airs = ['aircrack-ng', 'airodump-ng', 'aireplay-ng', 'airmon-ng', 'packetforge-ng']
for air in airs:
if program_exists(air): continue
print R+' [!]'+O+' required program not found: %s' % (R+air+W)
print R+' [!]'+O+' this program is bundled with the aircrack-ng suite:'+W
print R+' [!]'+O+' '+C+'http://www.aircrack-ng.org/'+W
print R+' [!]'+O+' or: '+W+'sudo apt-get install aircrack-ng\n'+W
self.RUN_CONFIG.exit_gracefully(1)
if not program_exists('iw'):
print R+' [!]'+O+' airmon-ng requires the program %s\n' % (R+'iw'+W)
self.RUN_CONFIG.exit_gracefully(1)
printed = False
# Check reaver
if not program_exists('reaver'):
printed = True
print R+' [!]'+O+' the program '+R+'reaver'+O+' is required for WPS attacks'+W
print R+' '+O+' available at '+C+'http://code.google.com/p/reaver-wps'+W
self.RUN_CONFIG.WPS_DISABLE = True
elif not program_exists('walsh') and not program_exists('wash'):
printed = True
print R+' [!]'+O+' reaver\'s scanning tool '+R+'walsh'+O+' (or '+R+'wash'+O+') was not found'+W
print R+' [!]'+O+' please re-install reaver or install walsh/wash separately'+W
# Check handshake-checking apps
recs = ['tshark', 'pyrit', 'cowpatty']
for rec in recs:
if program_exists(rec): continue
printed = True
print R+' [!]'+O+' the program %s is not required, but is recommended%s' % (R+rec+O, W)
if printed: print ''
def enable_monitor_mode(self, iface):
"""
First attempts to anonymize the MAC if requested; MACs cannot
be anonymized if they're already in monitor mode.
Uses airmon-ng to put a device into Monitor Mode.
Then uses the get_iface() method to retrieve the new interface's name.
Sets global variable IFACE_TO_TAKE_DOWN as well.
Returns the name of the interface in monitor mode.
"""
mac_anonymize(iface)
print GR+' [+]'+W+' enabling monitor mode on %s...' % (G+iface+W),
stdout.flush()
call(['airmon-ng', 'start', iface], stdout=DN, stderr=DN)
print 'done'
self.RUN_CONFIG.WIRELESS_IFACE = '' # remove this reference as we've started its monitoring counterpart
self.RUN_CONFIG.IFACE_TO_TAKE_DOWN = self.get_iface()
if self.RUN_CONFIG.TX_POWER > 0:
print GR+' [+]'+W+' setting Tx power to %s%s%s...' % (G, self.RUN_CONFIG.TX_POWER, W),
call(['iw', 'reg', 'set', 'BO'], stdout=OUTLOG, stderr=ERRLOG)
call(['iwconfig', iface, 'txpower', self.RUN_CONFIG.TX_POWER], stdout=OUTLOG, stderr=ERRLOG)
print 'done'
return self.RUN_CONFIG.IFACE_TO_TAKE_DOWN
def disable_monitor_mode(self):
"""
The program may have enabled monitor mode on a wireless interface.
We want to disable this before we exit, so we will do that.
"""
if self.RUN_CONFIG.IFACE_TO_TAKE_DOWN == '': return
print GR+' [+]'+W+' disabling monitor mode on %s...' % (G+self.RUN_CONFIG.IFACE_TO_TAKE_DOWN+W),
stdout.flush()
call(['airmon-ng', 'stop', self.RUN_CONFIG.IFACE_TO_TAKE_DOWN], stdout=DN, stderr=DN)
print 'done'
def rtl8187_fix(self, iface):
"""
Attempts to solve "Unknown error 132" common with RTL8187 devices.
Puts down interface, unloads/reloads driver module, then puts iface back up.
Returns True if fix was attempted, False otherwise.
"""
# Check if current interface is using the RTL8187 chipset
proc_airmon = Popen(['airmon-ng'], stdout=PIPE, stderr=DN)
proc_airmon.wait()
using_rtl8187 = False
for line in proc_airmon.communicate()[0].split():
line = line.upper()
if line.strip() == '' or line.startswith('INTERFACE'): continue
if line.find(iface.upper()) and line.find('RTL8187') != -1: using_rtl8187 = True
if not using_rtl8187:
# Display error message and exit
print R+' [!]'+O+' unable to generate airodump-ng CSV file'+W
print R+' [!]'+O+' you may want to disconnect/reconnect your wifi device'+W
self.RUN_CONFIG.exit_gracefully(1)
print O+" [!]"+W+" attempting "+O+"RTL8187 'Unknown Error 132'"+W+" fix..."
original_iface = iface
# Take device out of monitor mode
airmon = Popen(['airmon-ng', 'stop', iface], stdout=PIPE, stderr=DN)
airmon.wait()
for line in airmon.communicate()[0].split('\n'):
if line.strip() == '' or \
line.startswith("Interface") or \
line.find('(removed)') != -1:
continue
original_iface = line.split()[0] # line[:line.find('\t')]
# Remove drive modules, block/unblock ifaces, probe new modules.
print_and_exec(['ifconfig', original_iface, 'down'])
print_and_exec(['rmmod', 'rtl8187'])
print_and_exec(['rfkill', 'block', 'all'])
print_and_exec(['rfkill', 'unblock', 'all'])
print_and_exec(['modprobe', 'rtl8187'])
print_and_exec(['ifconfig', original_iface, 'up'])
print_and_exec(['airmon-ng', 'start', original_iface])
print '\r \r',
print O+' [!] '+W+'restarting scan...\n'
return True
def get_iface(self):
"""
Get the wireless interface in monitor mode.
Defaults to only device in monitor mode if found.
Otherwise, enumerates list of possible wifi devices
and asks user to select one to put into monitor mode (if multiple).
Uses airmon-ng to put device in monitor mode if needed.
Returns the name (string) of the interface chosen in monitor mode.
"""
if not self.RUN_CONFIG.PRINTED_SCANNING:
print GR+' [+]'+W+' scanning for wireless devices...'
self.RUN_CONFIG.PRINTED_SCANNING = True
proc = Popen(['iwconfig'], stdout=PIPE, stderr=DN)
iface = ''
monitors = []
adapters = []
for line in proc.communicate()[0].split('\n'):
if len(line) == 0: continue
if ord(line[0]) != 32: # Doesn't start with space
iface = line[:line.find(' ')] # is the interface
if line.find('Mode:Monitor') != -1:
monitors.append(iface)
else: adapters.append(iface)
if self.RUN_CONFIG.WIRELESS_IFACE != '':
if monitors.count(self.RUN_CONFIG.WIRELESS_IFACE): return self.RUN_CONFIG.WIRELESS_IFACE
else:
if self.RUN_CONFIG.WIRELESS_IFACE in adapters:
# valid adapter, enable monitor mode
print R+' [!]'+O+' could not find wireless interface %s in monitor mode' % (R+'"'+R+self.RUN_CONFIG.WIRELESS_IFACE+'"'+O)
return self.enable_monitor_mode(self.RUN_CONFIG.WIRELESS_IFACE)
else:
# couldnt find the requested adapter
print R+' [!]'+O+' could not find wireless interface %s' % ('"'+R+self.RUN_CONFIG.WIRELESS_IFACE+O+'"'+W)
self.RUN_CONFIG.exit_gracefully(0)
if len(monitors) == 1:
return monitors[0] # Default to only device in monitor mode
elif len(monitors) > 1:
print GR+" [+]"+W+" interfaces in "+G+"monitor mode:"+W
for i, monitor in enumerate(monitors):
print " %s. %s" % (G+str(i+1)+W, G+monitor+W)
ri = raw_input("%s [+]%s select %snumber%s of interface to use for capturing (%s1-%d%s): %s" % \
(GR, W, G, W, G, len(monitors), W, G))
while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors):
ri = raw_input("%s [+]%s select number of interface to use for capturing (%s1-%d%s): %s" % \
(GR, W, G, len(monitors), W, G))
i = int(ri)
return monitors[i - 1]
proc = Popen(['airmon-ng'], stdout=PIPE, stderr=DN)
for line in proc.communicate()[0].split('\n'):
if len(line) == 0 or line.startswith('Interface'): continue
monitors.append(line)
if len(monitors) == 0:
print R+' [!]'+O+" no wireless interfaces were found."+W
print R+' [!]'+O+" you need to plug in a wifi device or install drivers.\n"+W
self.RUN_CONFIG.exit_gracefully(0)
elif self.RUN_CONFIG.WIRELESS_IFACE != '' and monitors.count(self.RUN_CONFIG.WIRELESS_IFACE) > 0:
return self.enable_monitor_mode(monitor)
elif len(monitors) == 1:
monitor = monitors[0][:monitors[0].find('\t')]
return self.enable_monitor_mode(monitor)
print GR+" [+]"+W+" available wireless devices:"
for i, monitor in enumerate(monitors):
print " %s%d%s. %s" % (G, i + 1, W, monitor)
ri = raw_input(GR+" [+]"+W+" select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W))
while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors):
ri = raw_input(" [+] select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W))
i = int(ri)
monitor = monitors[i-1][:monitors[i-1].find('\t')]
return self.enable_monitor_mode(monitor)
def scan(self, channel=0, iface='', tried_rtl8187_fix=False):
"""
Scans for access points. Asks user to select target(s).
"channel" - the channel to scan on, 0 scans all channels.
"iface" - the interface to scan on. must be a real interface.
"tried_rtl8187_fix" - We have already attempted to fix "Unknown error 132"
Returns list of selected targets and list of clients.
"""
remove_airodump_files(self.RUN_CONFIG.temp + 'wifite')
command = ['airodump-ng',
'-a', # only show associated clients
'-w', self.RUN_CONFIG.temp + 'wifite'] # output file
if channel != 0:
command.append('-c')
command.append(str(channel))
command.append(iface)
proc = Popen(command, stdout=DN, stderr=DN)
time_started = time.time()
print GR+' [+] '+G+'initializing scan'+W+' ('+G+iface+W+'), updates at 5 sec intervals, '+G+'CTRL+C'+W+' when ready.'
(targets, clients) = ([], [])
try:
deauth_sent = 0.0
old_targets = []
stop_scanning = False
while True:
time.sleep(0.3)
if not os.path.exists(self.RUN_CONFIG.temp + 'wifite-01.csv') and time.time() - time_started > 1.0:
print R+'\n [!] ERROR!'+W
# RTL8187 Unknown Error 132 FIX
if proc.poll() != None: # Check if process has finished
proc = Popen(['airodump-ng', iface], stdout=DN, stderr=PIPE)
if not tried_rtl8187_fix and proc.communicate()[1].find('failed: Unknown error 132') != -1:
send_interrupt(proc)
if self.rtl8187_fix(iface):
return self.scan(channel=channel, iface=iface, tried_rtl8187_fix=True)
print R+' [!]'+O+' wifite is unable to generate airodump-ng output files'+W
print R+' [!]'+O+' you may want to disconnect/reconnect your wifi device'+W
self.RUN_CONFIG.exit_gracefully(1)
(targets, clients) = self.parse_csv(self.RUN_CONFIG.temp + 'wifite-01.csv')
# Remove any already cracked networks if configured to do so
if self.RUN_CONFIG.SHOW_ALREADY_CRACKED == False:
index = 0
while index < len(targets):
already = False
for cracked in self.RUN_CONFIG.CRACKED_TARGETS:
if targets[index].ssid.lower() == cracked.ssid.lower():
already = True
if targets[index].bssid.lower() == cracked.bssid.lower():
already = True
if already == True:
targets.pop(index)
index -= 1
index += 1
# If we are targeting a specific ESSID/BSSID, skip the scan once we find it.
if self.RUN_CONFIG.TARGET_ESSID != '':
for t in targets:
if t.ssid.lower() == self.RUN_CONFIG.TARGET_ESSID.lower():
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
targets = [t]
stop_scanning = True
break
if self.RUN_CONFIG.TARGET_BSSID != '':
for t in targets:
if t.bssid.lower() == self.RUN_CONFIG.TARGET_BSSID.lower():
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
targets = [t]
stop_scanning = True
break
# If user has chosen to target all access points, wait 20 seconds, then return all
if self.RUN_CONFIG.ATTACK_ALL_TARGETS and time.time() - time_started > 10:
print GR+'\n [+]'+W+' auto-targeted %s%d%s access point%s' % (G, len(targets), W, '' if len(targets) == 1 else 's')
stop_scanning = True
if self.RUN_CONFIG.ATTACK_MIN_POWER > 0 and time.time() - time_started > 10:
# Remove targets with power < threshold
i = 0
before_count = len(targets)
while i < len(targets):
if targets[i].power < self.RUN_CONFIG.ATTACK_MIN_POWER:
targets.pop(i)
else: i += 1
print GR+'\n [+]'+W+' removed %s targets with power < %ddB, %s remain' % \
(G+str(before_count - len(targets))+W, self.RUN_CONFIG.ATTACK_MIN_POWER, G+str(len(targets))+W)
stop_scanning = True
if stop_scanning: break
# If there are unknown SSIDs, send deauths to them.
if channel != 0 and time.time() - deauth_sent > 5:
deauth_sent = time.time()
for t in targets:
if t.ssid == '':
print "\r %s deauthing hidden access point (%s) \r" % \
(GR+sec_to_hms(time.time() - time_started)+W, G+t.bssid+W),
stdout.flush()
# Time to deauth
cmd = ['aireplay-ng',
'--ignore-negative-one',
'--deauth', str(self.RUN_CONFIG.WPA_DEAUTH_COUNT),
'-a', t.bssid]
for c in clients:
if c.station == t.bssid:
cmd.append('-c')
cmd.append(c.bssid)
break
cmd.append(iface)
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
proc_aireplay.wait()
time.sleep(0.5)
else:
for ot in old_targets:
if ot.ssid == '' and ot.bssid == t.bssid:
print '\r %s successfully decloaked "%s" ' % \
(GR+sec_to_hms(time.time() - time_started)+W, G+t.ssid+W)
old_targets = targets[:]
if self.RUN_CONFIG.VERBOSE_APS and len(targets) > 0:
targets = sorted(targets, key=lambda t: t.power, reverse=True)
if not self.RUN_CONFIG.WPS_DISABLE:
wps_check_targets(targets, self.RUN_CONFIG.temp + 'wifite-01.cap', verbose=False)
os.system('clear')
print GR+'\n [+] '+G+'scanning'+W+' ('+G+iface+W+'), updates at 5 sec intervals, '+G+'CTRL+C'+W+' when ready.\n'
print " NUM ESSID %sCH ENCR POWER WPS? CLIENT" % ('BSSID ' if self.RUN_CONFIG.SHOW_MAC_IN_SCAN else '')
print ' --- -------------------- %s-- ---- ----- ---- ------' % ('----------------- ' if self.RUN_CONFIG.SHOW_MAC_IN_SCAN else '')
for i, target in enumerate(targets):
print " %s%2d%s " % (G, i + 1, W),
# SSID
if target.ssid == '':
p = O+'('+target.bssid+')'+GR+' '+W
print '%s' % p.ljust(20),
elif ( target.ssid.count('\x00') == len(target.ssid) ):
p = '<Length '+str(len(target.ssid))+'>'
print '%s' % C+p.ljust(20)+W,
elif len(target.ssid) <= 20:
print "%s" % C+target.ssid.ljust(20)+W,
else:
print "%s" % C+target.ssid[0:17] + '...'+W,
# BSSID
if self.RUN_CONFIG.SHOW_MAC_IN_SCAN:
print O,target.bssid+W,
# Channel
print G+target.channel.rjust(3),W,
# Encryption
if target.encryption.find("WEP") != -1: print G,
else: print O,
print "\b%3s" % target.encryption.strip().ljust(4) + W,
# Power
if target.power >= 55: col = G
elif target.power >= 40: col = O
else: col = R
print "%s%3ddb%s" % (col,target.power, W),
# WPS
if self.RUN_CONFIG.WPS_DISABLE:
print " %3s" % (O+'n/a'+W),
else:
print " %3s" % (G+'wps'+W if target.wps else R+' no'+W),
# Clients
client_text = ''
for c in clients:
if c.station == target.bssid:
if client_text == '': client_text = 'client'
elif client_text[-1] != "s": client_text += "s"
if client_text != '': print ' %s' % (G+client_text+W)
else: print ''
print ''
print ' %s %s wireless networks. %s target%s and %s client%s found \r' % (
GR+sec_to_hms(time.time() - time_started)+W, G+'scanning'+W,
G+str(len(targets))+W, '' if len(targets) == 1 else 's',
G+str(len(clients))+W, '' if len(clients) == 1 else 's'),
stdout.flush()
except KeyboardInterrupt:
pass
print ''
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
# Use "wash" program to check for WPS compatibility
if not self.RUN_CONFIG.WPS_DISABLE:
wps_check_targets(targets, self.RUN_CONFIG.temp + 'wifite-01.cap')
remove_airodump_files(self.RUN_CONFIG.temp + 'wifite')
if stop_scanning: return (targets, clients)
print ''
if len(targets) == 0:
print R+' [!]'+O+' no targets found!'+W
print R+' [!]'+O+' you may need to wait for targets to show up.'+W
print ''
self.RUN_CONFIG.exit_gracefully(1)
if self.RUN_CONFIG.VERBOSE_APS: os.system('clear')
# Sort by Power
targets = sorted(targets, key=lambda t: t.power, reverse=True)
victims = []
print " NUM ESSID %sCH ENCR POWER WPS? CLIENT" % ('BSSID ' if self.RUN_CONFIG.SHOW_MAC_IN_SCAN else '')
print ' --- -------------------- %s-- ---- ----- ---- ------' % ('----------------- ' if self.RUN_CONFIG.SHOW_MAC_IN_SCAN else '')
for i, target in enumerate(targets):
print " %s%2d%s " % (G, i + 1, W),
# SSID
if target.ssid == '':
p = O+'('+target.bssid+')'+GR+' '+W
print '%s' % p.ljust(20),
elif ( target.ssid.count('\x00') == len(target.ssid) ):
p = '<Length '+str(len(target.ssid))+'>'
print '%s' % C+p.ljust(20)+W,
elif len(target.ssid) <= 20:
print "%s" % C+target.ssid.ljust(20)+W,
else:
print "%s" % C+target.ssid[0:17] + '...'+W,
# BSSID
if self.RUN_CONFIG.SHOW_MAC_IN_SCAN:
print O,target.bssid+W,
# Channel
print G+target.channel.rjust(3),W,
# Encryption
if target.encryption.find("WEP") != -1: print G,
else: print O,
print "\b%3s" % target.encryption.strip().ljust(4) + W,
# Power
if target.power >= 55: col = G
elif target.power >= 40: col = O
else: col = R
print "%s%3ddb%s" % (col,target.power, W),
# WPS
if self.RUN_CONFIG.WPS_DISABLE:
print " %3s" % (O+'n/a'+W),
else:
print " %3s" % (G+'wps'+W if target.wps else R+' no'+W),
# Clients
client_text = ''
for c in clients:
if c.station == target.bssid:
if client_text == '': client_text = 'client'
elif client_text[-1] != "s": client_text += "s"
if client_text != '': print ' %s' % (G+client_text+W)
else: print ''
ri = raw_input(GR+"\n [+]"+W+" select "+G+"target numbers"+W+" ("+G+"1-%s)" % (str(len(targets))+W) + \
" separated by commas, or '%s': " % (G+'all'+W))
if ri.strip().lower() == 'all':
victims = targets[:]
else:
for r in ri.split(','):
r = r.strip()
if r.find('-') != -1:
(sx, sy) = r.split('-')
if sx.isdigit() and sy.isdigit():
x = int(sx)
y = int(sy) + 1
for v in xrange(x, y):
victims.append(targets[v - 1])
elif not r.isdigit() and r.strip() != '':
print O+" [!]"+R+" not a number: %s " % (O+r+W)
elif r != '':
victims.append(targets[int(r) - 1])
if len(victims) == 0:
print O+'\n [!] '+R+'no targets selected.\n'+W
self.RUN_CONFIG.exit_gracefully(0)
print ''
print ' [+] %s%d%s target%s selected.' % (G, len(victims), W, '' if len(victims) == 1 else 's')
return (victims, clients)
def Start(self):
self.RUN_CONFIG.CreateTempFolder()
self.RUN_CONFIG.handle_args()
self.RUN_CONFIG.ConfirmRunningAsRoot()
self.RUN_CONFIG.ConfirmCorrectPlatform()
self.initial_check() # Ensure required programs are installed.
# The "get_iface" method anonymizes the MAC address (if needed)
# and puts the interface into monitor mode.
iface = self.get_iface()
self.RUN_CONFIG.THIS_MAC = get_mac_address(iface) # Store current MAC address
(targets, clients) = self.scan(iface=iface, channel=self.RUN_CONFIG.TARGET_CHANNEL)
try:
index = 0
while index < len(targets):
target = targets[index]
# Check if we have already cracked this target
for already in RUN_CONFIG.CRACKED_TARGETS:
if already.bssid == targets[index].bssid:
if RUN_CONFIG.SHOW_ALREADY_CRACKED == True:
print R+'\n [!]'+O+' you have already cracked this access point\'s key!'+W
print R+' [!] %s' % (C+already.ssid+W+': "'+G+already.key+W+'"')
ri = raw_input(GR+' [+] '+W+'do you want to crack this access point again? ('+G+'y/'+O+'n'+W+'): ')
if ri.lower() == 'n':
targets.pop(index)
index -= 1
else:
targets.pop(index)
index -= 1
break
# Check if handshakes already exist, ask user whether to skip targets or save new handshakes
handshake_file = RUN_CONFIG.WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') + '.cap'
if os.path.exists(handshake_file):
print R+'\n [!] '+O+'you already have a handshake file for %s:' % (C+target.ssid+W)
print ' %s\n' % (G+handshake_file+W)
print GR+' [+]'+W+' do you want to '+G+'[s]kip'+W+', '+O+'[c]apture again'+W+', or '+R+'[o]verwrite'+W+'?'
ri = 'x'
while ri != 's' and ri != 'c' and ri != 'o':
ri = raw_input(GR+' [+] '+W+'enter '+G+'s'+W+', '+O+'c,'+W+' or '+R+'o'+W+': '+G).lower()
print W+"\b",
if ri == 's':
targets.pop(index)
index -= 1
elif ri == 'o':
remove_file(handshake_file)
continue
index += 1
except KeyboardInterrupt:
print '\n '+R+'(^C)'+O+' interrupted\n'
self.RUN_CONFIG.exit_gracefully(0)
wpa_success = 0
wep_success = 0
wpa_total = 0
wep_total = 0
self.RUN_CONFIG.TARGETS_REMAINING = len(targets)
for t in targets:
self.RUN_CONFIG.TARGETS_REMAINING -= 1
# Build list of clients connected to target
ts_clients = []
for c in clients:
if c.station == t.bssid:
ts_clients.append(c)
print ''
if t.encryption.find('WPA') != -1:
need_handshake = True
if not self.RUN_CONFIG.WPS_DISABLE and t.wps:
wps_attack = WPSAttack(iface, t, self.RUN_CONFIG)
need_handshake = not wps_attack.RunAttack()
wpa_total += 1
if not need_handshake: wpa_success += 1
if self.RUN_CONFIG.TARGETS_REMAINING < 0: break
if not self.RUN_CONFIG.WPA_DISABLE and need_handshake:
wpa_total += 1
wpa_attack = WPAAttack(iface, t, ts_clients, self.RUN_CONFIG)
if wpa_attack.RunAttack():
wpa_success += 1
elif t.encryption.find('WEP') != -1:
wep_total += 1
wep_attack = WEPAttack(iface, t, ts_clients, self.RUN_CONFIG)
if wep_attack.RunAttack():
wep_success += 1
else: print R+' unknown encryption:',t.encryption,W
# If user wants to stop attacking
if self.RUN_CONFIG.TARGETS_REMAINING <= 0: break
if wpa_total + wep_total > 0:
# Attacks are done! Show results to user
print ''
print GR+' [+] %s%d attack%s completed:%s' % (G, wpa_total + wep_total, '' if wpa_total+wep_total == 1 else 's', W)
print ''
if wpa_total > 0:
if wpa_success == 0: print GR+' [+]'+R,
elif wpa_success == wpa_total: print GR+' [+]'+G,
else: print GR+' [+]'+O,
print '%d/%d%s WPA attacks succeeded' % (wpa_success, wpa_total, W)
for finding in self.RUN_CONFIG.WPA_FINDINGS:
print ' ' + C+finding+W
if wep_total > 0:
if wep_success == 0: print GR+' [+]'+R,
elif wep_success == wep_total: print GR+' [+]'+G,
else: print GR+' [+]'+O,
print '%d/%d%s WEP attacks succeeded' % (wep_success, wep_total, W)
for finding in self.RUN_CONFIG.WEP_FINDINGS:
print ' ' + C+finding+W
caps = len(self.RUN_CONFIG.WPA_CAPS_TO_CRACK)
if caps > 0 and not self.RUN_CONFIG.WPA_DONT_CRACK:
print GR+' [+]'+W+' starting '+G+'WPA cracker'+W+' on %s%d handshake%s' % (G, caps, W if caps == 1 else 's'+W)
for cap in self.RUN_CONFIG.WPA_CAPS_TO_CRACK:
wpa_crack(cap)
print ''
self.RUN_CONFIG.exit_gracefully(0)
def parse_csv(self, filename):
"""
Parses given lines from airodump-ng CSV file.
Returns tuple: List of targets and list of clients.
"""
if not os.path.exists(filename): return ([], [])
targets = []
clients = []
try:
hit_clients = False
with open(filename, 'rb') as csvfile:
targetreader = csv.reader((line.replace('\0','') for line in csvfile), delimiter=',')
for row in targetreader:
if len(row) < 2:
continue
if not hit_clients:
if len(row) < 14:
continue
if row[0].strip() == 'Station MAC':
hit_clients = True
if row[0].strip() == 'BSSID' or row[0].strip() == 'Station Mac': continue
enc = row[5].strip()
wps = False
if enc.find('WPA') == -1 and enc.find('WEP') == -1: continue
if self.RUN_CONFIG.WEP_DISABLE and enc.find('WEP') != -1: continue
if self.RUN_CONFIG.WPA_DISABLE and self.RUN_CONFIG.WPS_DISABLE and enc.find('WPA') != -1: continue
if enc == "WPA2WPA":
enc = "WPA2"
wps = True
power = int(row[8].strip())
ssid = row[13].strip()
ssidlen = int(row[12].strip())
ssid = ssid[:ssidlen]
if power < 0: power += 100
t = Target(row[0].strip(), power, row[10].strip(), row[3].strip(), enc, ssid)
t.wps = wps
targets.append(t)
else:
if len(row) < 6:
continue
bssid = re.sub(r'[^a-zA-Z0-9:]', '', row[0].strip())
station = re.sub(r'[^a-zA-Z0-9:]', '', row[5].strip())
power = row[3].strip()
if station != 'notassociated':
c = Client(bssid, station, power)
clients.append(c)
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
return ([], [])
return (targets, clients)
def analyze_capfile(self, capfile):
"""
Analyzes given capfile for handshakes using various programs.
Prints results to console.
"""
# we're not running an attack
wpa_attack = WPAAttack(None, None, None)
if self.RUN_CONFIG.TARGET_ESSID == '' and self.RUN_CONFIG.TARGET_BSSID == '':
print R+' [!]'+O+' target ssid and bssid are required to check for handshakes'
print R+' [!]'+O+' please enter essid (access point name) using -e <name>'
print R+' [!]'+O+' and/or target bssid (mac address) using -b <mac>\n'
# exit_gracefully(1)
if self.UN_CONFIG.TARGET_BSSID == '':
# Get the first BSSID found in tshark!
self.RUN_CONFIG.TARGET_BSSID = get_bssid_from_cap(self.RUN_CONFIG.TARGET_ESSID, capfile)
# if TARGET_BSSID.find('->') != -1: TARGET_BSSID == ''
if self.RUN_CONFIG.TARGET_BSSID == '':
print R+' [!]'+O+' unable to guess BSSID from ESSID!'
else:
print GR+' [+]'+W+' guessed bssid: %s' % (G+self.RUN_CONFIG.TARGET_BSSID+W)
if self.RUN_CONFIG.TARGET_BSSID != '' and self.RUN_CONFIG.TARGET_ESSID == '':
self.RUN_CONFIG.TARGET_ESSID = get_essid_from_cap(self.RUN_CONFIG.TARGET_BSSID, capfile)
print GR+'\n [+]'+W+' checking for handshakes in %s' % (G+capfile+W)
t = Target(self.RUN_CONFIG.TARGET_BSSID, '', '', '', 'WPA', self.RUN_CONFIG.TARGET_ESSID)
if program_exists('pyrit'):
result = wpa_attack.has_handshake_pyrit(t, capfile)
print GR+' [+]'+W+' '+G+'pyrit'+W+':\t\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: pyrit'
if program_exists('cowpatty'):
result = wpa_attack.has_handshake_cowpatty(t, capfile, nonstrict=True)
print GR+' [+]'+W+' '+G+'cowpatty'+W+' (nonstrict):\t %s' % (G+'found!'+W if result else O+'not found'+W)
result = wpa_attack.has_handshake_cowpatty(t, capfile, nonstrict=False)
print GR+' [+]'+W+' '+G+'cowpatty'+W+' (strict):\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: cowpatty'
if program_exists('tshark'):
result = wpa_attack.has_handshake_tshark(t, capfile)
print GR+' [+]'+W+' '+G+'tshark'+W+':\t\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: tshark'
if program_exists('aircrack-ng'):
result = wpa_attack.has_handshake_aircrack(t, capfile)
print GR+' [+]'+W+' '+G+'aircrack-ng'+W+':\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: aircrack-ng'
print ''
self.RUN_CONFIG.exit_gracefully(0)
|
UTF-8
|
Python
| false | false | 2,014 |
3,135,326,169,267 |
57db8f030014ca15623a5a3cfdf271c881604199
|
757f1bc702c80b3fc6bc563e3d8da9a306592883
|
/attachment/__init__.py
|
2afcfaa60ff5995107e6890401fe6df34880a28b
|
[
"BSD-3-Clause"
] |
permissive
|
KKBOX/trac-attachment-notify-plugin
|
https://github.com/KKBOX/trac-attachment-notify-plugin
|
401d53bec594f8ed24c3f556ad5e7f34e05af192
|
24f2f291fad8628a9802d1456a8f6f2e47598dfc
|
refs/heads/master
| 2020-12-24T13:28:21.226673 | 2013-01-24T05:13:09 | 2013-01-24T05:13:09 | 7,534,207 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# Copyright (C) 2013 KKBOX Technologies Limited
# Copyright (C) 2013 Gasol Wu <[email protected]>
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
from attachment.notify import AttachmentNotify
|
UTF-8
|
Python
| false | false | 2,013 |
11,974,368,842,210 |
81308e0278488812e5a7804c46eddfdce310983c
|
82553a24bdefe86a02ce493eb659ad9d940d05ca
|
/phpfpm_fcgi_status
|
0d8da9bd1d6b2394b5ff23f794a460e20bf071f2
|
[] |
no_license
|
vader666/munin-plugins
|
https://github.com/vader666/munin-plugins
|
69b3803f1ecd8d80cf94af461d1d030ba0fbceae
|
bbe40a35c5f65b1d2214e20fc5ce9376ea0fd50e
|
refs/heads/master
| 2020-04-16T13:15:11.250469 | 2012-06-17T14:53:32 | 2012-06-17T14:53:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'vader666'
"""
Requirements
You need install libfcgi:
in Ubuntu "apt-get install libfcgi0ldbl"
in Gentoo "emerge dev-libs/fcgi"
How to configure:
Environment variable:
env.connectto: Set a connection string as ip:port or socket. Default 127.0.0.1:9000
You can use a named connections if you have more than one php-fpm:
1. Create simlink: ln -s /path/to/plugins/dir/phpfpm_fcgi_status /etc/munin/plugins/phpfpm_fcgi_status_<name>
2. Edit in /etc/munin/plugin-conf.d/munin-node environment variables: env.connectto.<name> <host:port or socket>
"""
import os, sys, commands
import json
graph_name = ""
if os.path.basename(__file__) != "phpfpm_fcgi_status":
if "phpfpm_fcgi_status_" in os.path.basename(__file__):
graph_name = os.path.basename(__file__).replace("phpfpm_fcgi_status_", "")
config = {
"graph_title" : 'PHP5-FPM Status %s' % (graph_name),
"graph_args" : '--base 1000 -l 0',
"graph_vlabel" : 'Connections',
"graph_category" : 'PHP',
"graph_order" : 'Idle Active Total',
"graph_info" : 'Plugin created by vader666 based on TJ Stein',
"graph_printf" : '%6.0lf',
"idle.label" : 'Idle',
"idle.draw" : 'AREASTACK',
"active.label" : 'Active',
"active.draw" : 'AREASTACK',
"total.label" : 'Total',
"total.draw" : 'LINE2',
}
def get_config():
for (key, value) in config.items():
print "%s %s" % (key, value)
def get_connectionString():
pass
def run():
if graph_name and "connectto.%s" % graph_name in os.environ:
connectionString = os.environ["connectto.%s" % graph_name]
elif "connectto" in os.environ:
connectionString = os.environ["connectto"]
else:
connectionString = "127.0.0.1:9000"
replay = commands.getstatusoutput('SCRIPT_NAME=/status SCRIPT_FILENAME=/status QUERY_STRING="json" REQUEST_METHOD=GET cgi-fcgi -bind -connect %s' % connectionString)
if replay[1]:
try:
fpmjson = replay[1].split('\r\n\r\n')[1]
except:
exit_with_error("Unexpected response from php. Check php-fpm configuration.")
else:
exit_with_error("Error. Can't connect to php5-fpm.")
json_object = json.loads(fpmjson)
print "idle.value %s" % json_object['idle processes']
print "active.value %s" % json_object['active processes']
print "total.value %s" % json_object['total processes']
sys.exit(0)
def exit_with_error(msg):
print msg
sys.exit(1)
if __name__ == "__main__":
if not os.path.exists(r'/usr/bin/cgi-fcgi'):
exit_with_error("The \"cgi-fcgi\" not found. You must install \"cgi-fcgi\":\n" \
" in Ubuntu \"apt-get install libfcgi0ldbl\"\n" \
" in Gentoo \"emerge dev-libs/fcgi\"")
if len(sys.argv) > 1:
cmd_name = sys.argv[1]
else:
cmd_name = None
if cmd_name and cmd_name == "autoconfig":
print "no"
sys.exit(1)
elif cmd_name and cmd_name == "suggest":
print ""
sys.exit(0)
elif cmd_name and cmd_name == "config":
get_config()
sys.exit(0)
else:
run()
sys.exit(0)
exit_with_error("Unsupported command")
|
UTF-8
|
Python
| false | false | 2,012 |
1,443,109,038,174 |
ba6107fba3ded68d5444116fc47b2d13b3727211
|
fa701c6a9c9b9256c894af0e352e1657ef18aca7
|
/config/errors.py
|
4e7d683d24221831b8ff7bfa74cc466f413bc034
|
[] |
no_license
|
wfalkwallace/wingit
|
https://github.com/wfalkwallace/wingit
|
dbfe33e4a70336555ff291b06b8ffd42507f5008
|
30ca9a1f03a53cbd0311730f1069a53abf366fb9
|
refs/heads/master
| 2016-09-05T11:06:00.356054 | 2014-03-03T06:41:23 | 2014-03-03T06:41:23 | 16,854,906 | 1 | 0 | null | false | 2014-02-15T17:19:32 | 2014-02-15T02:17:42 | 2014-02-15T17:15:40 | 2014-02-15T17:19:31 | 0 | 0 | 1 | 0 |
Python
| null | null |
import json
from flask import make_response
errors = {
'english': {
'DEFAULT': {
'message': 'unspecified error on server',
'status_code': 500
},
'NO_DB_CONN': {
'message': 'a database connection could not be established',
'status_code': 503
},
'DATASET_NOT_FOUND': {
'message': 'that dataset could not be found',
'status_code': 404
},
'DATABASE_ERROR': {
'message': 'there was an error accessing our database',
'status_code': 503
},
'NOT_LOGGED_IN': {
'message': "sorry, you're not logged in yet",
'status_code': 401
},
'DATA_NEEDED_FOR_REQUEST': {
'message': 'this endpoint requires data to operate',
'status_code': 400
},
'PASSWORDS_UNMATCHED': {
'message': 'passwords passed do not match',
'status_code': 400
},
'INCORRECT_PASSWORD': {
'message': 'password incorrect',
'status_code': 401
},
'IMPROPER_EMAIL': {
'message': 'email is invalid',
'status_code': 400
},
'EMAIL_IN_USE': {
'message': 'email already in use',
'status_code': 400
},
'MISSING_LOGIN_DATA': {
'message': 'an email and passowrd are needed for login',
'status_code': 400
},
# COMPANIES
'COMPANY_NOT_CREATED': {
'message': 'error creating compay',
'status_code': 400
},
'COMPANY_NOT_FOUND': {
'message': 'company was not found',
'status_code': 404
},
# REVIEWS
'REVIEW_NOT_CREATED': {
'message': 'error creating review',
'status_code': 400
},
'REVIEW_APPROVAL_FAILURE' : {
'message': 'error updating review',
'status_code': 400
},
'REVIEW_NOT_FOUND': {
'message': 'review was not found',
'status_code': 404
},
# ADMIN
'ADMIN_REQUIRED': {
'message': 'this endpoint requires admin credentials',
'status_code': 403
},
'ADMIN_DATA_NEEDED': {
'message': 'data is missing to create an admin',
'status_code': 400
},
'ADMIN_NOT_CREATED': {
'message': 'error creating admin',
'status_code': 400
},
'ADMIN_DNE': {
'message': 'the specified admin does not exist',
'status_code': 404
}
}
}
def make_error(err='DEFAULT', language='english'):
"""
Forms a response object based off of the passed in error name.
Returns 500 when the specified error is not found.
"""
json_data, code = construct_err(err_name=err, err_language=language)
return make_response(json_data, code)
def construct_err(err_name='DEFAULT', err_language='english'):
"""
Forms a json object based off of the passed in error name.
Returns the json & status_code
"""
if err_language not in errors.keys():
err_language = 'english'
if err_name not in errors[err_language].keys():
err_name = 'DEFAULT'
error_obj = errors[err_language][err_name]
return json.dumps({
'message': error_obj['message'],
'status_code': error_obj['status_code']
}), error_obj['status_code']
|
UTF-8
|
Python
| false | false | 2,014 |
4,320,737,131,806 |
128ec097b871acf60c84c5783ff1d906d417f5b2
|
8d1acd939b7a81031e4d274b6d3d42f2afdcc509
|
/create_binary.py
|
a8cb779f7f79aa40a46df8b3fbd2fc4c6ae5ec79
|
[] |
no_license
|
svakulenko/gvim_extention
|
https://github.com/svakulenko/gvim_extention
|
1b7799376548f3ec59da09664d79fff363252344
|
30b8727dd9316407f1411f93e10a6924aeddc5f2
|
refs/heads/master
| 2016-09-11T11:30:24.482045 | 2012-08-22T19:25:17 | 2012-08-22T19:25:17 | 958,557 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os
import sys
import re
dir_binary = 'binaries'
dir_vimfiles = '.vim'
descr_file = 'binaries_description.txt'
linux = True
debug = False
def deletePackages():
s_exe = 'rm ' + dir_binary + '/' + '*.zip'
os.system(s_exe)
def createPackage(p_name, p_files):
s_exe = 'zip ' + '..' + "\\" + dir_binary + '/' + p_name + '.zip ' + " ".join(p_files) # " " - separate each list element by space
if linux:
s_exe = s_exe.replace('\\','/')
print "exe str: " + s_exe
#print 'yahoo, you in createPackage!\n'
if debug:
print 'p_name=' + p_name,'\nfiles:'
for i in p_files:
print i
print 'exe str=' + s_exe
print '\n'
curr_path = os.getcwd()
os.chdir(curr_path + '/' + dir_vimfiles)
os.system(s_exe) # exe command
os.chdir(curr_path)
print '\n'
packages_descr = open(dir_binary + '/' + descr_file,'r')
if not packages_descr:
print 'sorry, cant open file:',descr_file
else:
deletePackages()
pack_name = ''
pack_files = []
for i in packages_descr.readlines():
if not re.match('^\#',i): # skip empty lines
if re.match('^$',i) and pack_name != '' and len(pack_files) > 0: #create package on each empty line, if list is full and package name
createPackage(pack_name,pack_files)
pack_files[:] = [] #clear list
pack_name = ''
i = i.rstrip() #remove \n from end of line
res = re.search('^package=(.*)$',i)
if res:
pack_name = res.group(1)
elif not re.match('^$',i):
#print 'i=',i
pack_files.append(i)
#print 'pack_files len=',len(pack_files)
#print i
|
UTF-8
|
Python
| false | false | 2,012 |
17,386,027,628,023 |
d999efca625942a7a2d8233be218e515510980c8
|
b4b56221f6f3bcf2b6eed39ffcd9a1489d8271da
|
/psidialogs/api/tkfiledialog_api.py
|
d50a29cd26a344d26d044af0151f2fef0b792deb
|
[
"BSD-2-Clause"
] |
permissive
|
gregwjacobs/psidialogs
|
https://github.com/gregwjacobs/psidialogs
|
4d0b80cdc13212df3c6b26130ffc6dc377a9e031
|
4d61bd0e996b6e88c1241f21aca0ed3f05a76ba7
|
refs/heads/master
| 2021-01-18T10:42:04.804589 | 2012-09-13T15:02:06 | 2012-09-13T15:02:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import psidialogs
def askopenfile(mode = "r", **options):
"Ask for a filename to open, and returned the opened file"
filename = askopenfilename(**options)
if filename:
return open(filename, mode)
return None
def askopenfiles(mode = "r", **options):
"""Ask for multiple filenames and return the open file
objects
returns a list of open file objects or an empty list if
cancel selected
"""
files = askopenfilenames(**options)
if files:
ofiles=[]
for filename in files:
ofiles.append(open(filename, mode))
files=ofiles
return files
def asksaveasfile(mode = "w", **options):
"Ask for a filename to save as, and returned the opened file"
filename = asksaveasfilename(**options)
if filename:
return open(filename, mode)
return None
def askopenfilenames(**options):
"""Original doc: Ask for multiple filenames to open
Returns a list of filenames or empty list if
cancel button selected
"""
raise NotImplementedError()
def askopenfilename(**options):
"""Original doc: Ask for a filename to open"""
return psidialogs.ask_file(save=False)
def askdirectory(**options):
"""Original doc: Ask for a directory, and return the file name"""
return psidialogs.ask_folder()
def asksaveasfilename(**options):
"""Original doc: Ask for a filename to save as"""
return psidialogs.ask_file(save=True)
|
UTF-8
|
Python
| false | false | 2,012 |
9,285,719,306,317 |
4bcb68d16fa088162ca5444fa0468d08d6e5f7b5
|
1105db82254697fe0e783776e3fab08a36b688c3
|
/identityprovider/tests/test_xrds.py
|
1559afa29bec8c62a330a13c1b3d53d065f25706
|
[
"AGPL-3.0-only"
] |
non_permissive
|
miing/mci_migo
|
https://github.com/miing/mci_migo
|
8b2dbbfc0d08dd59b5158be01dd1477d43522d3e
|
f54a0ba62d488cb320deae2ceb9c9589845a05ce
|
refs/heads/master
| 2021-01-19T16:51:01.455271 | 2013-05-25T16:20:55 | 2013-05-25T16:20:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
from django.core.urlresolvers import reverse
from identityprovider.tests.utils import SSOBaseTestCase
class XRDSTest(SSOBaseTestCase):
openid_identifier = 'mark_oid'
url = reverse('server-identity', kwargs=dict(identifier='mark_oid'))
def setUp(self):
super(XRDSTest, self).setUp()
self.account = self.factory.make_account(
openid_identifier=self.openid_identifier)
def test_id_html(self):
r = self.client.get(self.url)
self.assertContains(
r, '<html xmlns="http://www.w3.org/1999/xhtml">')
self.assertEqual(r['Content-Type'], 'text/html; charset=utf-8')
def test_id_html_with_inactive_account(self):
self.account.suspend()
r = self.client.get(self.url)
self.assertEqual(r.status_code, 404)
def test_id_accept_xrds(self):
r = self.client.get(self.url,
HTTP_ACCEPT='application/xrds+xml')
self.assertTrue(r.content.startswith('<?xml version="1.0"?>'))
self.assertEqual(r['Content-Type'], 'application/xrds+xml')
def test_id_accept_xrds_with_inactive_account(self):
self.account.suspend()
r = self.client.get(self.url,
HTTP_ACCEPT='application/xrds+xml')
self.assertEqual(r.status_code, 404)
def test_default_discovery_response(self):
r = self.client.get(reverse('server-openid'),
HTTP_ACCEPT='application/xrds+xml')
self.assertTrue(r.has_header('X-XRDS-Location'))
|
UTF-8
|
Python
| false | false | 2,013 |
7,310,034,349,140 |
7b0c3ec3b0d63830cd7a010b4fa719cbb8c0f14b
|
30707b94aec16b1e9b62165bf199f0653eb74055
|
/gscripts/output_parsers/rna_star_collector.py
|
4ec7867141f7bf8c30398479ac5a494044b99b08
|
[] |
no_license
|
lmckinney2020/gscripts
|
https://github.com/lmckinney2020/gscripts
|
e16fd7efe9af67809f80d8a1dab89bcd2d9bfdef
|
ccd5a54b9c14eaa65f4eaddd9dce104582a47682
|
refs/heads/master
| 2020-07-05T19:00:32.115197 | 2013-08-31T22:14:47 | 2013-08-31T22:14:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Collector(object):
def __init__(self, base_name=None):
self.base_name = base_name
self.tool_name = ''
self.version = 0
return
def parse(self, file_path, descriptor=None):
metrics_file = open(file_path, 'r')
metrics_dict = {}
for line in metrics_file:
line = line.lower()
try:
key, value = line.split('|')
key = key.lstrip().strip()
key = key.replace('%', 'percent')
key = key.replace(' ', '_')
key = key.replace(':', '')
key = key.replace('(', '')
key = key.replace(')', '')
key = key.replace('_of', '')
key = key.strip(',')[0]
value = value.lstrip().strip()
value = value.replace('%', '')
metrics_dict[key] = value
except:
pass
metrics_dict.pop('started_mapping_on', None)
metrics_dict.pop('started_job_on', None)
metrics_dict.pop('finished_on', None)
return metrics_dict
def record_metrics(self):
pass
if __name__ == '__main__':
import sys
collector = Collector()
for key in collector.parse(sys.argv[0]):
print key
|
UTF-8
|
Python
| false | false | 2,013 |
15,152,644,656,812 |
265da85182445ff0e4c876761a5c7f8ffa2cf620
|
7644fdd9b27fe137e0f46762c549808abe8ae7be
|
/bin/search.py
|
7b5a9065c27d26b5198f98c47fbc8b05778a9382
|
[] |
no_license
|
jamwalla/AllMusic
|
https://github.com/jamwalla/AllMusic
|
ff13533fc049e7a50225838e739e264ea817037b
|
4dc9b60f2ffc417216fc02e1fb1ffe4baeb5c84c
|
refs/heads/master
| 2015-08-01T02:28:53 | 2012-11-29T17:26:52 | 2012-11-29T17:58:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# vim: ts=4:sw=4:expandtab:
import sys, os
_loc = os.path.dirname(__file__)
sys.path.append(_loc+'/../lib/python')
import allmusic
def parse_options(cli_args):
"""
Search options.
"""
import argparse
parser = argparse.ArgumentParser(description='Search AllMusic entries')
parser.add_argument('-t', '--type', dest='type',
metavar='(%s)' % '|'.join(allmusic.TYPES), type=str,
help="Type of the looked up object", required=False, default='all')
parser.add_argument('-v', '--version', help="Displays 'version: %s'" % allmusic.__version__,
required=False, action='store_true', default=False)
parser.add_argument(dest='what', metavar='WHAT', help="Object to search for")
args = parser.parse_args(cli_args)
if args.version:
print "version: %s" % ( allmusic.__version__ )
sys.exit(0)
return args
if __name__ == '__main__':
args = parse_options(sys.argv[1:])
for res in allmusic.search(args.what, type=args.type):
print res
|
UTF-8
|
Python
| false | false | 2,012 |
5,643,587,075,719 |
d94257f88d3a535742d30106cf5e2e440d02eeaa
|
8aa0271478a66e62cf2cffc2f4ed255299c86bcf
|
/pyofwave_server/pyofwave/operations/wavelet.py
|
b1985340fd80a451704dc714d32636aa1fffde55
|
[
"GPL-2.0-only",
"LGPL-2.1-or-later",
"AGPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference",
"MPL-2.0"
] |
non_permissive
|
ImaginationForPeople/PyOfWave
|
https://github.com/ImaginationForPeople/PyOfWave
|
01539be250ab0d84f6616c2cdfb4551225a593fc
|
89762b848899c0e4365afe6a196294962ee0ac49
|
refs/heads/master
| 2020-04-08T20:35:09.942649 | 2011-12-05T03:45:09 | 2011-12-05T03:45:09 | 2,715,668 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Manage wavelets.
"""
NS = "pyofwave.info/2012/dtd/wavelet.dtd"
|
UTF-8
|
Python
| false | false | 2,011 |
7,095,285,992,255 |
6a5916ad9e444e840213121aec9b50dffc0f01e1
|
8e626f2b30826af00c9ed2d87cbd075e8b9a76b0
|
/gtc/src/gtc/settings.py
|
88804d4eb0bb4c6cf329d4020e7aba5c6fae7612
|
[] |
no_license
|
RREYESAL/RPC_HVScan
|
https://github.com/RREYESAL/RPC_HVScan
|
0ce7345fe8a9ff8aaa55e4537f131db40274c803
|
b6a99f2e2c4e50fb508a9a50969629353bb97a9d
|
refs/heads/master
| 2021-01-21T15:53:09.740113 | 2014-01-23T14:45:02 | 2014-01-23T14:45:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Django settings for gtc project.
from os.path import abspath, dirname, join
import socket
import sys
import json
import secrets
SETTINGS_ROOT = abspath(join(dirname(__file__)))
SOURCE_ROOT = abspath(join(dirname(__file__), ".."))
SERVICE_ROOT = abspath(join(dirname(__file__), "..", ".."))
settings_from_keeper_file = abspath(join(SETTINGS_ROOT, "keeper_settings.json"))
f = open(settings_from_keeper_file,"rb")
SETTINGS_FROM_KEEPER = json.load(f)
f.close()
PRODUCTION_LEVEL = SETTINGS_FROM_KEEPER["productionLevel"]
DEBUG = True #TODO set debug mode according settings from keeper
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
if PRODUCTION_LEVEL == "private":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': abspath(join(SERVICE_ROOT, "var", "db", "test.db")),
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
elif PRODUCTION_LEVEL == "dev" :
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': secrets.secrets["gtc"]["connections"]["dev"]["owner"]["db_name"],
'USER': secrets.secrets["gtc"]["connections"]["dev"]["owner"]["user"],
'PASSWORD': secrets.secrets["gtc"]["connections"]["dev"]["owner"]["password"],
'HOST': '',
'PORT': '',
}
}
elif (PRODUCTION_LEVEL == "int") or (PRODUCTION_LEVEL == "pro"):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': secrets.secrets["gtc"]["connections"]["pro"]["owner"]["db_name"],
'USER': secrets.secrets["gtc"]["connections"]["pro"]["owner"]["user"],
'PASSWORD': secrets.secrets["gtc"]["connections"]["pro"]["owner"]["password"],
'HOST': '',
'PORT': '',
}
}
else:
raise Exception("Correct settings could not be detected")
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Zurich'# 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/gtc/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 's5hsm*ghd@fnsudj8t8hrjgi5u=ex6p1a00t8e#%-0*m-s3rxi'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
)
ROOT_URLCONF = 'gtc.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'gtc.wsgi.application'
AUTHENTICATION_BACKENDS = (
'GlobalTagCollector.views.ShibbolethBackend', #custom
'django.contrib.auth.backends.ModelBackend', #default
)
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'GlobalTagCollector',
'django.contrib.humanize',
'django.contrib.sites',
'django.contrib.flatpages',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '[%(levelname)s] %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'verbose',
'strm': sys.stdout,
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'console'],
'level': 'INFO',
'propagate': True,
},
'django.db.backends':{
'level': 'DEBUG',
'handlers': [],
'propagate': False,
},
'':{
'handlers': ['console'],
'level': 'INFO',
'propagate': True,
}
}
}
#custom
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
#from here manually added. Before -default
'django.core.context_processors.request',
)
#=======================================================================================================================
#SERVICE_ACCOUNT_NAMES = 'http://webcondvm2:8083/get_connectionName'
##SERVICE_TAGS_FOR_ACCOUNT = 'http://cms-conddb.cern.ch/payload_inspector_1.1/?getTAGvsContainer='
#SERVICE_TAGS_FOR_ACCOUNT = 'https://cms-conddb-dev.cern.ch/payloadInspector/get_tagsVScontainer?dbName='
#SERVICE_FOR_RECORDS = 'https://kostas-conddev5.cern.ch:8088/recordsProvider/record_container_map'#?hardware_architecture_name=slc5_amd64_gcc323&software_release_name=CMSSW_5_1_0
#SERVICE_GLOBAL_TAG_LIST = 'http://webcondvm2.cern.ch:8081/get_list_GT'
#SERVICE_GT_INFO = 'http://webcondvm2.cern.ch:8081/getGTinfo?GT_name='
##SERVICE_GT_INFO = 'https://kostas-conddev5.cern.ch/gtList/getGTinfo?GT_name='
#SERVICE_GT_INFO_UPDATE = 'http://webcondvm2.cern.ch:8081/uploadGT?tag='
#RELEASES_PATH = "/afs/cern.ch/cms/{hardware_architecture}/cms/cmssw"
#SOFTWARE_RELEASE_NAME_PATTERN = "^CMSSW_(\d+)_(\d+)_(\d+)(?:_pre(\d+))?$"
#DATABASES_LIST = "https://cms-conddb-dev.cern.ch/payloadInspector/get_dbs"
#SCHEMAS_LIST = "https://cms-conddb-dev.cern.ch/payloadInspector/get_schemas?"
def getHostname():
'''Returns the 'official' hostname where services are run.
In private deployments, this is the current hostname. However,
in official ones, could be, for instance, a DNS alias.
e.g. cms-conddb-dev.cern.ch
'''
hostnameByLevel = {
'pro': 'cms-conddb-prod.cern.ch',
'int': 'cms-conddb-int.cern.ch',
'dev': 'cms-conddb-dev.cern.ch',
'private': socket.getfqdn(),
}
return hostnameByLevel[PRODUCTION_LEVEL]
HOSTNAME = getHostname()
SERVICE_ACCOUNT_NAMES = 'http://webcondvm2:8083/get_connectionName'
SERVICE_TAGS_FOR_ACCOUNT = 'https://%s/payloadInspector/get_tagsVScontainer?dbName=' % HOSTNAME
RECORDS_FIXTURE = abspath(join(SOURCE_ROOT, "GlobalTagCollector/fixtures/records_fixture_list.json"))
SERVICE_FOR_RECORDS = 'https://%s/recordsProvider/record_container_map' % HOSTNAME#?hardware_architecture_name=slc5_amd64_gcc323&software_release_name=CMSSW_5_1_0
SERVICE_GLOBAL_TAG_LIST = 'https://%s/gtList/getGTList' % HOSTNAME
SERVICE_GT_INFO = 'https://%s/gtList/getGTInfo?GT_name=' % HOSTNAME
RELEASES_PATH = "/afs/cern.ch/cms/{hardware_architecture}/cms/cmssw"
SOFTWARE_RELEASE_NAME_PATTERN = "^CMSSW_(\d+)_(\d+)_(\d+)(?:_pre(\d+))?$"
DATABASES_LIST = "https://%s/payloadInspector/get_dbs" % HOSTNAME
SCHEMAS_DICT = secrets.secrets["payloadInspector"]["connections"]
HARDWARE_ARCHITECTURES_LIST = 'https://cmstags.cern.ch/tc/public/py_getActiveArchitectures'
ADMIN_GROUP_NAME = 'global-tag-administrators'
#custom
#SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')
LOGIN_URL = '/gtc/accounts/login/'
EMAIL_HOST_USER = secrets.secrets["gtc"]["email"]["sender"]
EMAIL_HOST_PASSWORD = secrets.secrets["gtc"]["email"]["password"]
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_HOST = "smtp.cern.ch"
|
UTF-8
|
Python
| false | false | 2,014 |
6,631,429,530,628 |
bb0fcd9918e5876d20d4ac831d5caae4c7096283
|
f30d09693ae0bcfa46f3d93166b4d2ec2ea28ef1
|
/Tabs.py
|
7b8c9589a91a6010c68398c38168b2c8048e9bc7
|
[] |
no_license
|
pysmath/editmenu
|
https://github.com/pysmath/editmenu
|
6d19ab7658435ade9e39b7cd78f2788fa35a542d
|
4ef0400d097c1403eb5f4a7056a6ef779a9efe28
|
refs/heads/master
| 2021-01-17T21:08:36.748243 | 2014-10-16T05:04:29 | 2014-10-16T05:04:29 | 25,087,051 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Allows for tabbed editing within the pythonista editor
import ui
import editor
import sqlite3 as sql
import console
import os
from math import pi
import webbrowser
available_width = 500
open_tabs = {}
num_of_tabs = 0
tab_height = 45
tab_y = 5
count = 0
tab_width = 150
def edit_menu(sender):
webbrowser.open('pythonista://site-packages%2Feditmenu%2Feditmenu.py?action=run')
@ui.in_background
def check_tab():
open_path = os.path.split(editor.get_path())[1]
for t in range(len(view.subviews)):
if view.subviews[t].name != open_path and view.subviews[t].background_color != 'white':
view.subviews[t].background_color = 'white'
if view.subviews[t].name == open_path:
view.subviews[t].background_color = 'orange'
def add_new_button(name, new = False):
b = ui.Button(title = str(name))
b.height = tab_height
b.width = tab_width
b.border_width = 0.5
b.corner_radius = 10
if new == True:
for r in range(len(view.subviews)):
view.subviews[r].background_color = 'white'
b.background_color = 'orange'
else:
b.background_color = 'white'
b.border_color = 'grey'
b.image = ui.Image.named('_blank')
b.tint_color = 'black'
b.action = open_url
b.transform = ui.Transform.rotation(pi/2)
global count
b.y = tab_width*count*1.05 + 120
b.x = -10
b.name = str(name)
close_title = name + '_close'
c = ui.Button()
c.width = 15
c.height = 15
c.x = 3
c.y = 3
#c.corner_radius = c.height/2
#c.border_width = 1
c.image = ui.Image.named('ionicons-close-24')
c.action = close_button
b.add_subview(c)
view.add_subview(b)
count += 1
def close_button(sender):
marker = sender.superview.y
tab = sender.superview
tab_name = sender.superview.title
view.remove_subview(tab)
def move():
for i in range(len(view.subviews)):
if view.subviews[i].y > marker:
view.subviews[i].y -= tab_width*1.05
ui.animate(move, duration = 0.3)
global count
count -=1
conn = sql.connect('tabs.db')
conn.text_factory = str
c = conn.cursor()
c.execute('DELETE FROM files WHERE name = ?', (tab_name,))
conn.commit()
# Create tab for current file
@ui.in_background
def add_file(sender):
current_path = str(editor.get_path())
conn = sql.connect('tabs.db')
c = conn.cursor()
name = os.path.split(current_path)[1]
c.execute('''select url from files where name = ?''', (name,))
is_open = c.fetchall()
if is_open:
console.hud_alert('There is already a tab for this file', duration = 1)
return None
c.execute('''INSERT INTO files VALUES (?, ?)''', (name, current_path))
conn.commit()
conn.close()
open_tabs.append(name)
add_new_button(name, new = True)
# Open file when tab is pressed
def open_url(sender):
current_path = editor.get_path()
conn = sql.connect('tabs.db')
conn.text_factory = str
c = conn.cursor()
button_title = sender.title
c.execute('''select name from files where url = ?''', (current_path,))
current_tab = c.fetchall()
if current_tab:
current_tab = current_tab[0][0]
view[current_tab].background_color = 'white'
c.execute('''SELECT url FROM files WHERE name = ?''', (button_title,))
path = c.fetchone()
path = path[0]
if not os.path.isfile(path):
console.hud_alert('The file for this tab has been moved, renamed, or deleted. the tab will now be removed.', icon = 'error', duration = 3)
marker = sender.y
view.remove_subview(sender)
c.execute('''delete from files where name = ?''', (button_title,))
global count
count -= 1
def move():
for i in range(len(view.subviews)):
if view.subviews[i].y > marker:
view.subviews[i].y -= tab_width*1.05
ui.animate(move, duration = 0.3)
conn.commit()
check_tab()
else:
editor.open_file(path)
sender.background_color = 'orange'
conn.close()
view = ui.load_view('Tabs')
add_button = view['add_button']
remove = view['remove']
edit = view['edit']
# Create database and table on first run and make tabs for all files in database on start
first_time = False
current_path = editor.get_path()
if not os.path.isfile('tabs.db'):
first_time = True
conn = sql.connect('tabs.db')
conn.text_factory = str
c = conn.cursor()
if first_time == True:
c.execute('''CREATE TABLE files (name text, url text)''')
q = c.execute('''SELECT name FROM files''')
open_tabs = q.fetchall()
conn.close()
for i in range(len(open_tabs)):
add_new_button(open_tabs[i][0])
view.present('sidebar', hide_title_bar = True)
check_tab()
import clipboard
clipboard.set(editor.get_path())
|
UTF-8
|
Python
| false | false | 2,014 |
14,001,593,436,227 |
45f51d447912492e8932955a4787688a89c00566
|
9942ad2a2c7ee07470ad633c059d54122dde23f5
|
/movies/models.py
|
9237e3b69b5a589dbd1597398467201995224b7d
|
[] |
no_license
|
tigerjk/moviesf
|
https://github.com/tigerjk/moviesf
|
9c8396d1b9359c56dbb74f2c8a20df2bbc68d238
|
c2341ef08b4bae54e2ea1d91805f831cc5688236
|
refs/heads/master
| 2016-09-08T00:18:01.509144 | 2014-11-25T05:59:41 | 2014-11-25T05:59:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
# Create your models here.
class Movie(models.Model):
title=models.TextField()
release_year=models.IntegerField()
locations=models.TextField()
fun_facts=models.TextField()
production_company=models.TextField()
distributor=models.TextField()
director=models.TextField()
writer=models.TextField()
actor_1=models.TextField()
actor_2=models.TextField()
actor_3=models.TextField()
|
UTF-8
|
Python
| false | false | 2,014 |
7,748,121,043,830 |
ed77b5b3586f27cd57f612b732fe835b94a26d81
|
3866b9f4357f4ecc8c58f6b3066776e8700aeecb
|
/py_utilities/context_managers.py
|
10a8b1a9a305ff3d5843049a3460b8269c384618
|
[
"MIT"
] |
permissive
|
ryankanno/py-utilities
|
https://github.com/ryankanno/py-utilities
|
973a7634e455a688b88aa99ed8cecff8ed07ea3c
|
68c83a358ef4a607bbbe85d910fb2eba5136dd8e
|
refs/heads/master
| 2020-04-02T12:55:22.618886 | 2014-08-29T04:07:49 | 2014-08-29T04:07:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from contextlib import contextmanager
import os
@contextmanager
def cd(path=None):
prev_cwd = os.getcwd()
if path is not None:
os.chdir(path)
try:
yield
finally:
os.chdir(prev_cwd)
# vim: filetype=python
|
UTF-8
|
Python
| false | false | 2,014 |
712,964,608,464 |
8fde1c9faa46f10d5d141b1c3d83f9de9b0ebd9b
|
4204b56cbfb2ac8a21cbd271a9d22e9cf1afd2f2
|
/Tools/GardeningServer/alerts.py
|
8679c118b65dddf655ceb1c8f5e8c7cc6d3c00a6
|
[
"BSD-3-Clause"
] |
permissive
|
jtg-gg/blink
|
https://github.com/jtg-gg/blink
|
9fd9effda9bc78b0db3cf8113d5de3d1770e4a03
|
c3ef97cc9aaaf903a9b2977d4a3799fe2a8584a9
|
refs/heads/development11
| 2023-03-07T10:30:16.588846 | 2014-08-19T06:54:16 | 2015-03-06T06:47:58 | 21,159,392 | 0 | 1 | null | true | 2015-07-31T12:42:46 | 2014-06-24T09:32:31 | 2015-03-06T06:51:36 | 2015-07-31T12:42:45 | 5,761,072 | 0 | 0 | 0 |
HTML
| null | null |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import calendar
import datetime
import json
import webapp2
import zlib
from google.appengine.api import memcache
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return calendar.timegm(obj.timetuple())
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class AlertsHandler(webapp2.RequestHandler):
MEMCACHE_ALERTS_KEY = 'alerts'
def get(self):
self.response.headers.add_header('Access-Control-Allow-Origin', '*')
self.response.headers['Content-Type'] = 'application/json'
compressed = memcache.get(AlertsHandler.MEMCACHE_ALERTS_KEY)
if not compressed:
return
uncompressed = zlib.decompress(compressed)
self.response.write(uncompressed)
def post(self):
try:
alerts = json.loads(self.request.get('content'))
except ValueError:
self.response.set_status(400, 'content field was not JSON')
return
alerts.update({
'date': datetime.datetime.utcnow(),
'alerts': alerts['alerts']
})
uncompressed = json.dumps(alerts, cls=DateTimeEncoder, indent=1)
compression_level = 1
compressed = zlib.compress(uncompressed, compression_level)
memcache.set(AlertsHandler.MEMCACHE_ALERTS_KEY, compressed)
app = webapp2.WSGIApplication([
('/alerts', AlertsHandler)
])
|
UTF-8
|
Python
| false | false | 2,014 |
13,984,413,519,038 |
f4c469f3f36066b396cb23f40c2e231c8712649e
|
cebf659879e891915dc7382ee57af568991c83e9
|
/loniak_module/sources/rss.py
|
44476b6c39cfa08a5140c58bf08645cbda7666d2
|
[
"GPL-3.0-only"
] |
non_permissive
|
sniku/loniak
|
https://github.com/sniku/loniak
|
b4274526c606a604d8e1d2d5562cf21fbfb80ba0
|
7fcc26a3351a6f7966a56c99b55a139057d88fc9
|
refs/heads/master
| 2020-06-04T19:39:17.791464 | 2014-02-11T21:34:16 | 2014-02-11T21:34:16 | 16,463,586 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urllib2
from loniak_module.sources.base import SourceBase
from loniak_module.tr.torrent import Torrent
try:
import feedparser
except ImportError:
from loniak_module.libs import feedparser
try:
from dateutil import parser
except ImportError:
from loniak_module.libs.dateutil import parser
class RssSource(SourceBase):
def __unicode__(self):
return 'RSS'
def extract_torrent_urls(self, source_url):
"""
TODO: try/except on url fetching
TODO: try/except on bs parsing
"""
try:
response = urllib2.urlopen(source_url)
headers = response.info()
data = response.read()
except urllib2.URLError:
print "{0} is temporarily unavailable. No torrents downloaded.".format(source_url)
return []
root = feedparser.parse(data)
torrents = []
for entry in root['entries']:
torrent_links = set()
for x in entry['links']:
if 'type' in x and 'href' in x and x['type'] == 'application/x-bittorrent':
torrent_links.add(x['href'])
elif 'href' in x and x['href'].endswith('.torrent'):
torrent_links.add(x['href'])
if 'link' in entry:
torrent_links.add(entry['link'])
if 'magneturi' in entry:
torrent_links.add(entry['magneturi'])
try:
publication_date = parser.parse(entry['published']).replace(tzinfo=None)
except:
publication_date = None
description = entry['description'] if 'description' in entry else ''
guid = entry['id'] if 'id' in entry else torrent_links[0]
title = entry['title'] if 'id' in entry else ''
t = Torrent(torrent_links, title=title, guid=guid, description=description, publication_date=publication_date)
torrents.append(t)
return torrents
|
UTF-8
|
Python
| false | false | 2,014 |
1,142,461,340,377 |
8ebbbbb436bcb560717612efe92804996397f5d9
|
44e1cbd2554419851258b982c468c7ba55c7acda
|
/convener/schedule/views.py
|
124639effca67ce2dd584e25d27dfda899d90e70
|
[
"MIT"
] |
permissive
|
keningle/convener
|
https://github.com/keningle/convener
|
b88ab2e419a4c4a1aa44f0a7e4b6d3210289338e
|
00c1e3a09e903b275dd0f09f6014ad16bb2f0c9a
|
refs/heads/master
| 2016-09-10T09:55:07.019827 | 2013-07-27T02:20:15 | 2013-07-27T02:20:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from rest_framework import viewsets, permissions
from schedule.models import Presenter, Location, Track, Session
from schedule.serializers import (PresenterSerializer, LocationSerializer,
TrackSerializer, SessionSerializer)
class PresenterViewSet(viewsets.ModelViewSet):
'''
API endpoint for Presenter
'''
queryset = Presenter.objects.all()
serializer_class = PresenterSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class LocationViewSet(viewsets.ModelViewSet):
'''
API endpoint for Location
'''
queryset = Location.objects.all()
serializer_class = LocationSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class TrackViewSet(viewsets.ModelViewSet):
'''
API endpoint for Track
'''
queryset = Track.objects.all()
serializer_class = TrackSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class SessionViewSet(viewsets.ModelViewSet):
'''
API endpoint for Session
'''
queryset = Session.objects.all()
serializer_class = SessionSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
|
UTF-8
|
Python
| false | false | 2,013 |
17,334,488,028,033 |
c4f298ee42679f33bea0e95464bb0811b5ef3c9e
|
f1fb1dc7a0cdebfb63fecf65b1552bf3082653a8
|
/opennews/views.py
|
b522ad230001fd80e15ab750f50a2bf7cebbb7fc
|
[] |
no_license
|
alxmhe/kicknews
|
https://github.com/alxmhe/kicknews
|
d0777950a957941de6a12cfad588a488bea75eed
|
d8624a556d64723c000b26f0efcd2d828f62f47a
|
refs/heads/master
| 2020-02-26T16:15:00.257660 | 2013-03-28T00:35:04 | 2013-03-28T00:35:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Import django libs
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response,render
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.db.models import Q
from django.template import RequestContext
from django.contrib.sessions.models import Session
# Import tools
from itertools import chain
from haystack.query import SearchQuerySet
from datetime import datetime
import mimetypes
from unicodedata import normalize
import feedparser
from dateutil import parser
# Import openNews datas
from forms import *
from models import *
from tools import *
# Define your views here
def home(request):
"""The default view"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
# user = request.user
return render(request, "index.html", locals())
def view_rss_feed(request, rssID):
# Get the rss by its ID
qs = RssFeed.objects.filter(id=rssID)
# If doesn't exist, or if too bad, return empty entries for error
if not qs or qs[0].mark < 5:
return render(request, "viewrss.html", {'entries': None})
# if exist and accepted, get entries
else:
rss = qs[0]
entries = FeedEntry.objects.filter(rssfeed=rss)
# if entries doesn't exist, add all the entries
if not entries:
feed = feedparser.parse(rss.url)
entries = feed['entries']
for x in entries:
x['published'] = parser.parse(x['published']).replace(tzinfo=None)
entry = FeedEntry(rssfeed=rss, title=x['title'], date=x['published'], link=x['link'], summary=x['summary'])
entry.save()
# if entries already exist, check updated date of rss feed and add only news entries
else:
feed = feedparser.parse(rss.url)
entries = feed['entries']
for x in entries:
x['published'] = parser.parse(x['published']).replace(tzinfo=None)
if x['published'] > rss.updatedDate:
entry = FeedEntry(rssfeed=rss, title=x['title'], date=x['published'], link=x['link'], summary=x['summary'])
entry.save()
# Update the rss update date
rss.updatedDate = parser.parse(feed['feed']['updated']).replace(tzinfo=None)
rss.save()
return render(request, "viewrss.html", {'rss':rss, 'entries':entries})
@login_required(login_url='/login/') # You need to be logged for this page
def add_rss_feed(request):
"""View to add a rss feed"""
# Check if POST datas had been sent
if len(request.POST):
# make a add rss form with the POST values
form = add_rss_feed_form(request.POST)
if form.is_valid():
# If form is valid, get the url of the rss feed
rss_feed = form.cleaned_data['rss_feed']
# Try to find an existing rss feed
qs = RssFeed.objects.filter(url=rss_feed)
if not qs:
# If doesn't exist, add it
feed = feedparser.parse(rss_feed)
rss = RssFeed(name=feed['feed']['title'], url=feed['href'], updatedDate=parser.parse(feed['feed']['updated']).replace(tzinfo=None), mark=0)
rss.save()
# Clean the form and send it again
form = add_rss_feed_form()
return render_to_response("add_rss.html", {'success': "Félicitation, votre flux rss est soumis. Veuillez attendre que les admins le modère.", 'form': form}, context_instance=RequestContext(request))
else:
return render_to_response("add_rss.html", {'error': "Ce flux a déjà été soumis. Veuillez attendre son acceptation", 'form': form}, context_instance=RequestContext(request))
else:
return render_to_response("add_rss.html", {'form': form}, context_instance=RequestContext(request))
else:
# Create an empty form and send it
form = add_rss_feed_form()
return render_to_response("add_rss.html", {'error': "Ce flux a déjà été soumis. Veuillez attendre son acceptation", 'form': form}, context_instance=RequestContext(request))
#### TODO ###
@login_required(login_url='/login/') # You need to be logged for this page
def rss_validator(request, id):
if not request.user.is_staff:
error = "Désolé, vous ne faites pas partie du staff, vous ne pouvez pas accéder à cette page. Un mail contenant votre identifiant a été envoyé aux modérateurs pour vérifier vos accès."
return render_to_response("rss_validator.html", {'error': error}, context_instance=RequestContext(request))
# Get all the rss
qsFeed = RssFeed.objects.filter(mark__lt=5).order_by('name')
qsVote = AdminVote.objects.filter(userId=request.user.id).values_list('feedId', flat=True)
# Only take those whose logged user already vote
rss_feeds = [rss for rss in qsFeed if rss.id not in qsVote]
if id:
qs = RssFeed.objects.filter(id=id)
qsVote = AdminVote.objects.filter(userId=request.user.id).values_list('feedId', flat=True)
if qs and (int(id) not in qsVote):
rssfeed = qs[0]
if request.GET.get('choice') == 'ok':
rssfeed.mark += 1
rssfeed.save()
vote = AdminVote()
vote.userId = request.user.id
vote.feedId = id
vote.save()
return HttpResponseRedirect("/rss_validator")
elif request.GET.get('choice') == 'trash':
vote = AdminVote()
vote.userId = request.user.id
vote.feedId = id
vote.save()
return HttpResponseRedirect("/rss_validator")
elif request.GET.get('choice') not in ['trash', 'ok']:
error = "Désolé, ce choix n'existe pas. Veuillez vous contenter des boutons de vote."
return render_to_response("rss_validator.html", {'rss_feeds': rss_feeds, 'error': error}, context_instance=RequestContext(request))
else:
error = "Désolé, vous ne pouvez pas voter pour ce flux. Veuillez utiliser le tableau."
return render_to_response("rss_validator.html", {'rss_feeds': rss_feeds, 'error': error}, context_instance=RequestContext(request))
return render_to_response("rss_validator.html", {'rss_feeds': rss_feeds}, context_instance=RequestContext(request))
def comment(request):
article = Article.objects.get(id=request.POST.get('articleId'))
member = Member.objects.get(id=request.POST.get('memberId'))
comment = Comment(text=request.POST.get('commentText'), memberId=member, articleId=article)
comment.save()
return render(request, "comment.html", locals())
def login_user(request):
"""The view for login user"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
for cat in categoriesQuerySet:
categories.append(cat)
# Already logged In ? => go Home
if request.user.is_authenticated():
return HttpResponseRedirect("/")
# If you come from login required page, get the page url in "next"
next = request.GET.get('next')
# If form had been send
if len(request.POST) > 0:
# make a login form with the POST values
form = login_form(request.POST)
if form.is_valid():
# If form is valid, try to authenticate the user with the POST datas
s_user = authenticate(username=form.cleaned_data['username'], password=form.cleaned_data['password'])
if s_user is not None:
# If the user exist, log him
login(request, s_user)
request.session['user_id'] = s_user.id
if next is not None:
# If you come from a login required page, redirect to it
return HttpResponseRedirect(next)
else:
# Else go Home
return HttpResponseRedirect("/")
else:
# If user does not exist, return to the login page & send the next params et the formular
return render_to_response("login.html", {'categories': categories, 'form': form, 'next':next}, context_instance=RequestContext(request))
else:
# If form is not valid, return to the login page & send the next params et the formular
return render_to_response("login.html", {'categories': categories, 'form': form, 'next':next}, context_instance=RequestContext(request))
else:
# If form is not send, it's the first visit.
# Make an empty login form and send it to login template
form = login_form()
return render_to_response("login.html", {'categories': categories, 'form': form, 'next':next}, context_instance=RequestContext(request))
def logout_user(request):
"""The view for logout user"""
logout(request)
next = request.GET.get('next')
if next is not None:
# If you come from a login required page, redirect to it
return HttpResponseRedirect(next)
else:
# Else go Home
return HttpResponseRedirect("/")
def register(request):
"""The views for register new user"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
for cat in categoriesQuerySet:
categories.append(cat)
# If form had been send
if len(request.POST) > 0:
# make a user registration form with the POST values
form = user_create_form(request.POST)
if form.is_valid():
# If form is valid, create and try to authenticate the user with the POST datas
user = form.save()
# Get the password from the POST values
pwd = form.cleaned_data['password1']
# Try to authenticate the user
s_user = authenticate(username=user.username, password=pwd)
if s_user is not None:
# If user exist, log him and go to his account management panel
login(request, s_user)
return HttpResponseRedirect('preferences')
else:
# if he does not exist, return to user registration page with form filled by the POST values
return render_to_response("register.html", {'categories': categories, 'form': form}, context_instance=RequestContext(request))
else:
# if form is not valid, return to registration page
return render_to_response("register.html", {'categories': categories, 'form': form}, context_instance=RequestContext(request))
else:
# if its you first visit, make an empty user registration form and send it
form = user_create_form()
return render_to_response("register.html", {'categories': categories, 'form': form}, context_instance=RequestContext(request))
@login_required(login_url='/login/') # You need to be logged for this page
def preferences(request):
"""The view where logged user can modify their property"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
for cat in categoriesQuerySet:
categories.append(cat)
# If form had been send
if len(request.POST) > 0:
# make a user preference form with the POST values
form = user_preferences_form(request.POST)
if form.is_valid():
# If form is valid, save the user preferences and go Home
form.save(request.user)
return HttpResponseRedirect('/')
else:
# If not, send the preference form and the post datas
return render_to_response("preferences.html", {'categories': categories, 'form': form}, context_instance=RequestContext(request))
else:
# if the form is not send try to find the member from the logged user
try:
member = request.user.member
except Member.DoesNotExist:
member = None
if member is not None:
# if member is not none, create preference form with user's datas
form = user_preferences_form(instance=request.user.member)
return render_to_response("preferences.html", {'categories': categories, 'form': form}, context_instance=RequestContext(request))
else:
# If member does not exist, send an empty form
form = user_preferences_form()
return render_to_response("preferences.html", {'categories': categories, 'form': form}, context_instance=RequestContext(request))
def get_profile(request, userId):
"""Show the public profile of a user. Get it by his id"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
for cat in categoriesQuerySet:
categories.append(cat)
user = User.objects.filter(id=userId)[0]
return render_to_response("public_profile.html", {'categories': categories, 'user': user})
def read_article(request, IDarticle):
"""The view for reading an article"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
for cat in categoriesQuerySet:
categories.append(cat)
# Get the article from the IDarticle params
article = Article.objects.get(id=IDarticle)
# Set the article category as active category
catActive = article.category.url
# Get the current user votes to know if he has already voted
user_f_vote_qs = FiabilityVote.objects.filter(userId=request.user.id, articleId=IDarticle)
if user_f_vote_qs: user_f_vote = user_f_vote_qs[0]
else: user_f_vote = None
user_q_vote_qs = QualityVote.objects.filter(userId=request.user.id, articleId=IDarticle)
if user_q_vote_qs: user_q_vote = user_q_vote_qs[0]
else: user_q_vote = None
# Get the currents articles marks
article_f_vote_qs = FiabilityVote.objects.filter(articleId=IDarticle).values_list('vote', flat=True)
if article_f_vote_qs:
article_f_note = round(float(sum(article_f_vote_qs))/float(len(article_f_vote_qs)),2)
else:
article_f_note = 0
article_q_vote_qs = QualityVote.objects.filter(articleId=IDarticle).values_list('vote', flat=True)
if article_q_vote_qs:
article_q_note = round(float(sum(article_q_vote_qs))/float(len(article_q_vote_qs)),2)
else:
article_q_note = 0
# Get the tags of the article
tags = article.tags.all()
if article.media:
# If there is a media linked to the article, get the mime of it and the type of media
mime = mimetypes.guess_type(article.media.url)[0]
mediaType = mime[0:3]
else:
# If there is not, set False to mime et mediaType
mime = False
mediaType = False
return render_to_response("article.html", {'article_f_note':article_f_note, 'article_q_note':article_q_note, 'user_f_vote':user_f_vote,'user_q_vote':user_q_vote, 'catActive':catActive, 'categories': categories,'article': article, 'mediaType': mediaType, 'mime': mime, 'tags': tags}, context_instance=RequestContext(request))
@login_required(login_url='/login/') # You need to be logged for this page
def article_quality_vote_ajax(request):
if len(request.POST) > 0:
user_q_vote = QualityVote(articleId=request.POST.get('articleId'), userId=request.user.id, vote=request.POST.get('vote')).save()
# Get the currents articles marks
article_q_vote_qs = QualityVote.objects.filter(articleId=request.POST.get('articleId')).values_list('vote', flat=True)
if article_q_vote_qs:
article_q_note = round(float(sum(article_q_vote_qs))/float(len(article_q_vote_qs)),2)
else:
article_q_note = 0
return render(request, "q_or_f_vote.html", locals())
@login_required(login_url='/login/') # You need to be logged for this page
def article_fiability_vote_ajax(request):
if len(request.POST) > 0:
user_f_vote = FiabilityVote(articleId=request.POST.get('articleId'), userId=request.user.id, vote=request.POST.get('vote')).save()
# Get the currents articles marks
article_f_vote_qs = FiabilityVote.objects.filter(articleId=request.POST.get('articleId')).values_list('vote', flat=True)
if article_f_vote_qs:
article_f_note = round(float(sum(article_f_vote_qs))/float(len(article_f_vote_qs)),2)
else:
article_f_note = 0
return render(request, "q_or_f_vote.html", locals())
@login_required(login_url='/login/') # You need to be logged for this page
def write_article(request):
"""The view for writing an article"""
# Get the member from the request user
member = Member.objects.get(user=request.user)
# If form had been send
if len(request.POST) > 0:
# make a article form with the POST values
form = article_form(request.POST, request.FILES)
if form.is_valid():
# save the tags
tags = request.POST['tagInput'].split(',')
# If the form is correctly filled, check the geoloc status of the author
if member.geoloc is not False:
# Get coord from POST (an hidden input from template, filled by js)
coordonnee = request.POST['coordonnee']
# Save the article with the coord
article = form.save(m_member=member, coord=coordonnee)
else:
# Save the article without the coord
article = form.save(m_member=member)
for tag in request.POST['tagInput'].split(','):
if tag.isdigit():
tagQuery = Tag.objects.get(id=tag)
article.tags.add(tagQuery)
else:
qs = Tag(tag=tag)
qs.save()
article.tags.add(qs)
article.save()
return HttpResponseRedirect('/categories')
else:
# If it's not valid, send the form with POST datas
return render_to_response("write.html", {'form': form, 'member':member}, context_instance=RequestContext(request))
else:
# If it's not valid, send an empty form
form = article_form()
return render_to_response("write.html", {'form': form, 'member':member}, context_instance=RequestContext(request))
def list_article(request, categorie):
"""The view for listing the articles, depends on categorie"""
# Get the category and put the name in a list
categoriesQuerySet = Category.objects.all()
categories = []
for cat in categoriesQuerySet:
categories.append(cat)
if not Category.objects.filter(url=categorie) and categorie != "all":
return render_to_response("liste.html", {'categories': categories, 'error': "Cette catégorie n'existe pas"})
# Filter articles by category name
if categorie == "all":
articles = Article.objects.all()
catActive = False
else:
articles = Article.objects.filter(category=Category.objects.filter(url=categorie)) # Here, .title() is to put the first letter in upperCase
catActive = categorie
# Get the size of each columns
nbArticlePerCol = len(articles)/3
# Init columns
articlesCol1, articlesCol2, articlesCol3 = [], [], []
# Fill each columns with articles
counter = 1
# Get logged member
member = False
if request.user.is_authenticated():
qs = Member.objects.filter(user = request.user)
if qs:
member = qs[0]
for article in articles:
# Add the comments relatives to the current article
article.comments = Comment.objects.filter(articleId=article.id)
if counter <= nbArticlePerCol+1:
articlesCol1.append(article)
elif (counter > nbArticlePerCol+1) & (counter <= 2*nbArticlePerCol+2):
articlesCol2.append(article)
else:
articlesCol3.append(article)
counter += 1
# Return the articles list, the categories list and the active categorie
return render_to_response("liste.html", {'member': member, 'articles': articles, 'articlesCol1': articlesCol1, 'articlesCol2': articlesCol2, 'articlesCol3': articlesCol3, 'categories': categories, 'catActive': categorie}, context_instance=RequestContext(request))
# def search(request, words, categorie):
# """The search view"""
# categoriesList = Category.objects.all()
# categories = []
# for cat in categoriesList:
# categories.append(cat.name)
# if len(request.POST) > 0:
# form = searchForm(request.POST)
# if form.is_valid():
# words = form.cleaned_data['searchWords'].split(' ')
# else:
# return render_to_response("search.html", {'form': form, 'categories': categories, 'catActive': categorie.title()})
# else:
# form = searchForm()
# words = words.split('_')
# articles = []
# if categorie == "all":
# for word in words:
# articles = list(chain(articles, Article.objects.filter(Q(title__contains = word) | Q(text__contains = word))))
# tmp = Tag.objects.filter(tag = word )
# if len(tmp) is not 0:
# articles += tmp[0].article_set.all()
# else:
# for word in words:
# articles = list(chain(articles, Article.objects.filter(Q(category=Category.objects.filter(name=categorie.title())) & (Q(title__contains = word) | Q(text__contains = word)) )))
# tmp = Tag.objects.filter(tag = word)
# if len(tmp) is not 0:
# articles += tmp[0].article_set.all()
# return render_to_response("search.html", {'form': form, 'words': words, 'articles': list(set(articles)), 'categories': categories, 'catActive': categorie.title()})
|
UTF-8
|
Python
| false | false | 2,013 |
3,659,312,175,420 |
34ea9f16d5ab4843bf40805c7a9eb35db7317169
|
9c268112e35e4508ebb9672eb6f6ec4e2e586b13
|
/PersistLabPlugins/persistlabplugins/data/__init__.py
|
aeb2a018d4fff6d546e96070fb5ae1b2c392a4c8
|
[] |
no_license
|
ushiro/persistlab
|
https://github.com/ushiro/persistlab
|
f7952bf5da4f2c5826a755d3e59e55e91cb2b01f
|
cb124a9303c86b8ba007858c2589e63df9d7e9e5
|
refs/heads/master
| 2016-09-01T20:50:41.671111 | 2014-07-22T13:30:42 | 2014-07-22T13:30:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! python
import os
import glob
DATADIR = os.path.dirname(os.path.abspath(__file__))
# !!! SOME TESTS MIGHT BE CREATING FIGURES IN mydata directory
LIFNTRANSIENT = glob.glob(os.path.join(DATADIR,'*transient*.txt'))
LIFNPROFILE = glob.glob(os.path.join(DATADIR,'*profile*'))
LIFNCV = glob.glob(os.path.join(DATADIR,'*cv*'))
# OCP
LIFN_OCP_CHI = glob.glob(os.path.join(DATADIR,'ocp_chi*'))
LIFN_OCP_NI = glob.glob(os.path.join(DATADIR,'ocp_ni*'))
LIFN_OCP_C = glob.glob(os.path.join(DATADIR,'ocpc*'))
|
UTF-8
|
Python
| false | false | 2,014 |
4,329,327,051,912 |
80e69f58c8b7c6e9f9d54096367b3799a7ed2656
|
bb4d57ab9401d92a66afdacd3936c2631b3ad4a8
|
/vivisect/visgraph/unittests/basictest.py
|
2cda85fb4ede0078e0d457f60cef36f38f981b81
|
[] |
no_license
|
Fitblip/SocketSniff
|
https://github.com/Fitblip/SocketSniff
|
1fb23b2593bb51930aba1630eec1b677871ecba8
|
4b6e72642962413ec4ced2172a0ec1a04849768c
|
refs/heads/master
| 2021-01-11T19:43:55.446819 | 2014-07-21T07:11:29 | 2014-07-21T07:11:29 | 22,054,688 | 5 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import visgraph.graphcore as vg_graphcore
def vgtest_basic_graph():
g = vg_graphcore.HierarchicalGraph()
n1 = g.addNode(ninfo={'name':'woot'})
n2 = g.addNode(ninfo={'name':'baz'})
n3 = g.addNode(ninfo={'name':'faz'})
n4 = g.addNode(ninfo={'name':'foo'})
g.setNodeInfo(n1,'hi', 'oh hai!')
g.addEdge(n1,n2)
g.addEdge(n1,n3)
g.addEdge(n1,n4)
g.addEdge(n3,n4)
print 'ROOTS',g.getRootNodes()
print g.getNodeInfo(n1, 'hi')
|
UTF-8
|
Python
| false | false | 2,014 |
16,149,077,048,701 |
2fa47aab0a83fd3ba669270511095dd322a067cc
|
90548a047a1eb6dd8ec25a50d37d356483f92d7a
|
/calcPower.py
|
297e7aa53a0d456410cb4557a135a188dfde95aa
|
[] |
no_license
|
bataille16/sim-scripts
|
https://github.com/bataille16/sim-scripts
|
d32511f95f9999ed9b37e6c2246ee18a32e94ed5
|
c10e6ebaa0908cad6f6fe03ff8d5eb36eae73420
|
refs/heads/master
| 2016-09-03T06:51:34.301372 | 2013-06-05T03:08:29 | 2013-06-05T03:08:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os, sys
import subprocess
import argparse
"""
#Python script to process marss86 yaml stats file to McPat to generate power and area statistics
- xml file with configurations for processor is already available
- yaml stats file(s) from marss86 simulation(s) is/are available
Script take yaml stats file along with configuration xml file, process it through marss2mcpat.py,
generates temp.xml and input temp.xml to mcpat binary to output power and area numbers
"""
# Can process single or multiple marss86 yaml stats files
# All stats files in a directory must end with the '.stats' extension'
#hardcoded paths to marss2mcpat and mcpat binary
MARSS_2_MCPAT_PATH = "/home/prism/marss.dramsim/scripts/marss2mcpat.py"
MCPAT_PATH = "/home/prism/mcpat/mcpat"
"""
Arguments:
--num_core # Number of cores simulated
--cpu_mode # Mode to collect [kernel, user, total]
--freq # Processor frequency
--machine # OoO or In-Order
--xml_in machine confiuration
--marss Path to marss file(s)
--out Path to output power file(s)
"""
simStats = list() #global var of lis of marss files to process
def processOptions():
argparser = argparse.ArgumentParser(description= \
"Parse Marss results to mcpat input")
input_group = argparser.add_argument_group('Input')
input_group.add_argument('--marss', required=True, metavar='FILE/DIR',
help='Statistics output by a MARSS simulation run')
input_group.add_argument('--xml_in', required=True, metavar='FILE',
help='McPAT configuration for a processor')
input_group.add_argument('--cpu_mode', required=True, metavar='MODE',
help='Mode for stats {user, kernel, total}',
choices=['user', 'kernel', 'total'])
input_group.add_argument('--num_core', required=True, metavar='NCORE',
help='Number of cores')
input_group.add_argument('--freq', required=True, metavar='FREQ',
help='Clock rate',
choices=['1000', '1600','2000', '3333', '4000'])
input_group.add_argument('--machine', required=True, metavar='TYPE',
help='Machine type (ooo 0; inorder 1)',
choices=['0', '1'])
input_group.add_argument('--out', required=True, metavar='DIR',
help='Directory of output power file(s)')
args = argparser.parse_args()
return args
def fillSimStats(BENCH_PATH):
if os.path.isfile(BENCH_PATH):
simStats.append(BENCH_PATH)
elif os.path.isdir(BENCH_PATH):
for filename in os.listdir(BENCH_PATH):
if filename.endswith(".stats"):
simStats.append(filename)
else:
print "Invalid marss stat files\n"
sys.exit(-1)
#Main function
def processFiles(BENCH_PATH,cpu_mode,num_core,freq,machine,xml_in,OUTPUT_PATH):
#remove any outstanding temp.xml
if os.path.isfile("temp.xml"):
os.remove("temp.xml")
#for each file in simStats, call marss2mcpat to createe temp.xml
for i in range(0,len(simStats)):
print "Creating XML for ", simStats[i]
try:
stats = os.path.join(BENCH_PATH,simStats[i])
makeXML = subprocess.Popen(["python",str(MARSS_2_MCPAT_PATH), '--cpu_mode', str(cpu_mode), '--num_core', str(num_core),\
'--freq', str(freq),'--machine',str(machine), '--marss', str(stats), '--xml_in', str(xml_in), '-o', 'temp.xml'])
makeXML.wait()
#By now, temp.xml is created, now send it to McPat for processing
except:
print "Could not call marss2mcpat.py for ", simStats[i]
#sys.exit(-1)
continue
#for each temp.xml file create, call mcpat to create power file
print "\nCalling mcpath for", simStats[i]
try:
outFileName = list()
temp = simStats[i].split('/') #hack to remove absolute path in case of single file
temp = temp[len(temp) -1]
outFileName.append(temp.split('.')[0])
outFileName.append("_power")
outFileName = ''.join(outFileName)
outFileName = os.path.join(OUTPUT_PATH,outFileName)
outFile = open(outFileName, 'w')
makeMcPat = subprocess.Popen([MCPAT_PATH, '-infile','temp.xml','-print_level','1','-opt_for_clk','1'], \
stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
for line in makeMcPat.stdout:
outFile.write(line)
makeMcPat.wait()
outFile.close()
except:
print "Coulnd not call Mcpat for ", simStats[i]
#sys.exit(-1)
print "\nComplete\n"
if __name__ == "__main__":
args = processOptions()
fillSimStats(args.marss)
if len(simStats) == 0:
print "No stats to process.. Exiting!"
sys.exit(-1)
processFiles(args.marss,args.cpu_mode,args.num_core,args.freq,args.machine,args.xml_in, args.out)
|
UTF-8
|
Python
| false | false | 2,013 |
6,502,580,515,194 |
9243f4e0014f750c9628a4fe84bfd3bda644ebdb
|
f700037f9e44df482040da96d8a922a7fc053292
|
/floppy_charcount/widgets.py
|
46255ce2f1ffca69c219b7b3f0cf0f07dfd01cd3
|
[
"MIT"
] |
permissive
|
ashwoods/django-floppy-charcount
|
https://github.com/ashwoods/django-floppy-charcount
|
5909319ca40e9fcc10955ca345e76d7bf3f92c39
|
164b38ace20f4422e241fc44daf1e8c62e6010d0
|
refs/heads/master
| 2016-10-18T03:55:24.766526 | 2014-05-07T16:35:00 | 2014-05-07T16:35:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from floppyforms.widgets import TextInput, Textarea
class CountInput(TextInput):
"""Floppy input widget that adds a js char counter"""
template_name = "floppy_charcount/input.html"
class CountTextArea(Textarea):
"""Floppy textarea widget that adds a js char counter"""
template_name = "floppy_charcount/textarea.html"
class Media:
css = {'screen': ('floppy_charcount/css/charsleft.css',)}
js = ('floppy_charcount/js/charsleft.js',)
|
UTF-8
|
Python
| false | false | 2,014 |
16,140,487,134,277 |
b11ab376aac512855ac3f6a7578936a24f688357
|
d091989782182b3e8e886fe848d46463c3814c93
|
/projects/models.py
|
f19cfcabe5dd0370cfecb81faad8d01f0976d90c
|
[
"MIT"
] |
permissive
|
wapcaplet/readthedocs.org
|
https://github.com/wapcaplet/readthedocs.org
|
75163f328ee19020d7ff816ea6668d4d291e3620
|
a0eadad2d1c4c61c0b72a2cd801e2aca32291e49
|
refs/heads/master
| 2020-04-02T22:04:30.762420 | 2011-02-08T01:28:13 | 2011-02-08T01:28:13 | 870,395 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.template.defaultfilters import slugify
from django.template.loader import render_to_string
from django.utils.functional import memoize
from projects import constants
from projects.utils import diff, dmp, safe_write
from taggit.managers import TaggableManager
from vcs_support.base import get_backend
from vcs_support.utils import Lock
import fnmatch
import os
import fnmatch
import re
class ProjectManager(models.Manager):
def live(self, *args, **kwargs):
base_qs = self.filter(skip=False)
return base_qs.filter(*args, **kwargs)
class Project(models.Model):
#Auto fields
pub_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now=True)
#Generally from conf.py
user = models.ForeignKey(User, related_name='projects')
name = models.CharField(max_length=255)
slug = models.SlugField()
description = models.TextField(blank=True,
help_text='restructuredtext description of the project')
repo = models.CharField(max_length=100, blank=True,
help_text='URL for your code (hg or git). Ex. http://github.com/ericholscher/django-kong.git')
repo_type = models.CharField(max_length=10, choices=constants.REPO_CHOICES, default='git')
project_url = models.URLField(blank=True, help_text='the project\'s homepage')
version = models.CharField(max_length=100, blank=True,
help_text='project version these docs apply to, i.e. 1.0a')
copyright = models.CharField(max_length=255, blank=True,
help_text='project copyright information')
theme = models.CharField(max_length=20,
choices=constants.DEFAULT_THEME_CHOICES, default=constants.THEME_DEFAULT,
help_text='<a href="http://sphinx.pocoo.org/theming.html#builtin-themes" target="_blank">Examples</a>')
suffix = models.CharField(max_length=10, editable=False, default='.rst')
default_version = models.CharField(max_length=255, default='latest')
# In default_branch, None means the backend should choose the appropraite branch. Eg 'master' for git
default_branch = models.CharField(max_length=255, default=None, null=True,
blank=True, help_text='Leave empty to use the default value for your VCS or if your VCS does not support branches.')
#Other model data.
path = models.CharField(max_length=255, editable=False)
featured = models.BooleanField()
skip = models.BooleanField()
build_pdf = models.BooleanField()
django_packages_url = models.CharField(max_length=255, blank=True)
tags = TaggableManager(blank=True)
objects = ProjectManager()
class Meta:
ordering = ('slug',)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(Project, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('projects_detail', args=[self.slug])
def get_docs_url(self, version_slug=None):
version = version_slug or self.get_default_version()
return reverse('docs_detail', kwargs={
'project_slug': self.slug,
'version_slug': version,
'filename': '',
})
def get_builds_url(self):
return reverse('builds_project_list', kwargs={
'project_slug': self.slug,
})
def get_pdf_url(self, version_slug='latest'):
path = os.path.join(settings.MEDIA_URL,
'pdf',
self.slug,
version_slug,
'%s.pdf' % self.slug)
return path
@property
def user_doc_path(self):
return os.path.join(settings.DOCROOT, self.user.username, self.slug)
@property
def full_doc_path(self):
"""
The path to the documentation root in the project.
"""
doc_base = os.path.join(self.user_doc_path, self.slug)
for possible_path in ['docs', 'doc', 'Doc']:
if os.path.exists(os.path.join(doc_base, '%s' % possible_path)):
return os.path.join(doc_base, '%s' % possible_path)
#No docs directory, assume a full docs checkout
return doc_base
@property
def full_build_path(self):
"""
The path to the build html docs in the project.
"""
doc_path = self.full_doc_path
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_path, '%s/html' % pos_build)):
return os.path.join(doc_path, '%s/html' % pos_build)
#No standard path? Hack one.
for pos_build in ['index.html']:
matches = self.find(pos_build)
if len(matches) > 0:
return os.path.dirname(matches[0])
@property
def rtd_build_path(self):
"""
The path to the build html docs in the project.
"""
return os.path.join(self.user_doc_path, 'rtd-builds')
@property
def conf_filename(self):
if self.path:
return os.path.join(self.path, 'conf.py')
raise IOError
@property
def is_imported(self):
return bool(self.repo)
@property
def has_good_build(self):
return any([build.success for build in self.builds.all()])
@property
def has_versions(self):
return self.versions.exists()
@property
def has_pdf(self, version_slug='latest'):
return os.path.exists(self.get_pdf_path(version_slug))
@property
def sponsored(self):
non_django_projects = ['fabric', 'easy-thumbnails',
'python-storymarket', 'virtualenv',
'virtualenvwrapper', 'varnish',
'pip']
if self.slug in non_django_projects \
or self.slug.startswith('django'):
return True
return False
@property
def working_dir(self):
return os.path.join(self.user_doc_path, self.slug)
@property
def vcs_repo(self):
if hasattr(self, '_vcs_repo'):
return self._vcs_repo
backend = get_backend(self.repo_type)
if not backend:
repo = None
else:
repo = backend(self)
self._vcs_repo = repo
return repo
@property
def contribution_backend(self):
if hasattr(self, '_contribution_backend'):
return self._contribution_backend
if not self.vcs_repo:
cb = None
else:
cb = self.vcs_repo.get_contribution_backend()
self._contribution_backend = cb
return cb
def repo_lock(self, timeout=5, polling_interval=0.2):
return Lock(self.slug, timeout, polling_interval)
def find(self, file):
"""
A balla API to find files inside of a projects dir.
"""
matches = []
for root, dirnames, filenames in os.walk(self.full_doc_path):
for filename in fnmatch.filter(filenames, file):
matches.append(os.path.join(root, filename))
return matches
find = memoize(find, {}, 2)
def get_latest_build(self):
try:
return self.builds.all()[0]
except IndexError:
return None
def active_versions(self):
return self.versions.filter(built=True, active=True) | self.versions.filter(active=True, uploaded=True)
def get_pdf_path(self, version_slug='latest'):
path = os.path.join(settings.MEDIA_ROOT,
'pdf',
self.slug,
version_slug,
'%s.pdf' % self.slug)
return path
#File Building stuff.
#Not sure if this is used
def get_top_level_files(self):
return self.files.live(parent__isnull=True).order_by('ordering')
def get_index_filename(self):
return os.path.join(self.path, 'index.rst')
def get_rendered_index(self):
return render_to_string('projects/index.rst.html', {'project': self})
def write_index(self):
if not self.is_imported:
safe_write(self.get_index_filename(), self.get_rendered_index())
def get_latest_revisions(self):
revision_qs = FileRevision.objects.filter(file__project=self,
file__status=constants.LIVE_STATUS)
return revision_qs.order_by('-created_date')
def get_default_version(self):
"""
Get the default version (slug).
Returns self.default_version if the version with that slug actually
exists (is built and published). Otherwise returns 'latest'.
"""
# latest is a special case where we don't have to check if it exists
if self.default_version == 'latest':
return self.default_version
# check if the default_version exists
version_qs = self.versions.filter(
slug=self.default_version,
active=True,
built=True
)
if version_qs.exists():
return self.default_version
return 'latest'
class FileManager(models.Manager):
def live(self, *args, **kwargs):
base_qs = self.filter(status=constants.LIVE_STATUS)
return base_qs.filter(*args, **kwargs)
class File(models.Model):
project = models.ForeignKey(Project, related_name='files')
parent = models.ForeignKey('self', null=True, blank=True, related_name='children')
heading = models.CharField(max_length=255)
slug = models.SlugField()
content = models.TextField()
denormalized_path = models.CharField(max_length=255, editable=False)
ordering = models.PositiveSmallIntegerField(default=1)
status = models.PositiveSmallIntegerField(choices=constants.STATUS_CHOICES,
default=constants.LIVE_STATUS)
objects = FileManager()
class Meta:
ordering = ('denormalized_path',)
def __unicode__(self):
return '%s: %s' % (self.project.name, self.heading)
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.heading)
if self.parent:
path = '%s/%s' % (self.parent.denormalized_path, self.slug)
else:
path = self.slug
self.denormalized_path = path
super(File, self).save(*args, **kwargs)
if self.children:
def update_children(children):
for child in children:
child.save()
update_children(child.children.all())
update_children(self.children.all())
#Update modified time on project.
self.project.save()
@property
def depth(self):
return len(self.denormalized_path.split('/'))
def create_revision(self, old_content, comment):
FileRevision.objects.create(
file=self,
comment=comment,
diff=diff(self.content, old_content)
)
@property
def current_revision(self):
return self.revisions.filter(is_reverted=False)[0]
def get_html_diff(self, rev_from, rev_to):
rev_from = self.revisions.get(revision_number=rev_from)
rev_to = self.revisions.get(revision_number=rev_to)
diffs = dmp.diff_main(rev_from.diff, rev_to.diff)
return dmp.diff_prettyHtml(diffs)
def revert_to(self, revision_number):
revision = self.revisions.get(revision_number=revision_number)
revision.apply()
@property
def filename(self):
return os.path.join(
self.project.path,
'%s.rst' % self.denormalized_path
)
def get_rendered(self):
return render_to_string('projects/doc_file.rst.html', {'file': self})
def write_to_disk(self):
safe_write(self.filename, self.get_rendered())
@models.permalink
def get_absolute_url(self):
return ('docs_detail', [self.project.slug, 'en', 'latest', self.denormalized_path + '.html'])
class FileRevision(models.Model):
file = models.ForeignKey(File, related_name='revisions')
comment = models.TextField(blank=True)
diff = models.TextField(blank=True)
created_date = models.DateTimeField(auto_now_add=True)
revision_number = models.IntegerField()
is_reverted = models.BooleanField(default=False)
class Meta:
ordering = ('-revision_number',)
def __unicode__(self):
return self.comment or '%s #%s' % (self.file.heading, self.revision_number)
def get_file_content(self):
"""
Apply the series of diffs after this revision in reverse order,
bringing the content back to the state it was in this revision
"""
after = self.file.revisions.filter(revision_number__gt=self.revision_number)
content = self.file.content
for revision in after:
patch = dmp.patch_fromText(revision.diff)
content = dmp.patch_apply(patch, content)[0]
return content
def apply(self):
original_content = self.file.content
# store the old content on the file
self.file.content = self.get_file_content()
self.file.save()
# mark reverted changesets
reverted_qs = self.file.revisions.filter(revision_number__gt=self.revision_number)
reverted_qs.update(is_reverted=True)
# create a new revision
FileRevision.objects.create(
file=self.file,
comment='Reverted to #%s' % self.revision_number,
diff=diff(self.file.content, original_content)
)
def save(self, *args, **kwargs):
if not self.pk:
max_rev = self.file.revisions.aggregate(max=models.Max('revision_number'))
if max_rev['max'] is None:
self.revision_number = 1
else:
self.revision_number = max_rev['max'] + 1
super(FileRevision, self).save(*args, **kwargs)
class ImportedFile(models.Model):
project = models.ForeignKey(Project, related_name='imported_files')
name = models.CharField(max_length=255)
slug = models.SlugField()
path = models.CharField(max_length=255)
md5 = models.CharField(max_length=255)
@models.permalink
def get_absolute_url(self):
return ('docs_detail', [self.project.slug, 'en', 'latest', self.path])
def __unicode__(self):
return '%s: %s' % (self.name, self.project)
|
UTF-8
|
Python
| false | false | 2,011 |
2,130,303,818,125 |
f1d46db43f34a9a8b09a600a1c0ed1df37dcf3bd
|
7feaf597e2ebe6f0b2ac8398e9f43a4c820182f6
|
/src/experiments/python/timing.py
|
61a94713d0b8af9784ae323ff952f73d025480af
|
[] |
no_license
|
edechter/strappy
|
https://github.com/edechter/strappy
|
485f2b272995c52107790552597a45d073eb05c9
|
e81714ef13ea0f2cc6148b548392cf354dd16cfb
|
refs/heads/master
| 2021-01-10T20:04:17.545527 | 2014-05-29T17:04:24 | 2014-05-29T17:04:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from os.path import join, exists
import pandas as pd
import matplotlib as plt
datadir = "../data"
D = { 1000 : "R1_1000_wjittertiming_2013-01-20_12-45-31.810308_EST/time.txt",
500 : "R1_500_wjittertiming_2013-01-20_12-43-23.900711_EST/time.txt",
600: "R1_600_wjittertiming_2013-01-20_12-43-41.697841_EST/time.txt",
700: "R1_700_wjittertiming_2013-01-20_12-44-01.5959_EST/time.txt",
800: "R1_800_wjittertiming_2013-01-20_12-44-25.596314_EST/time.txt",
900: "R1_900_wjittertiming_2013-01-20_12-44-56.474792_EST/time.txt"}
d = {}
for k, v in D.iteritems():
filename = join(datadir, v)
assert(exists(filename))
n = float(open(filename, 'r').read()[:-1])
d[k] = n
|
UTF-8
|
Python
| false | false | 2,014 |
12,695,923,336,791 |
a9e269d35fb9ee6a8152f8bc750a0d596ea83265
|
147c0538e8283f4de0a941bcf4786f470e2ea4ca
|
/media.py
|
8c4ea134e261e06236d2479b0724619bdee68327
|
[] |
no_license
|
zoejane/CS
|
https://github.com/zoejane/CS
|
4a3cc7a130cb387d8c2a64bfb8e0f448e0927b90
|
491013f86d558d6aaadfdb9dfb5600577e453047
|
refs/heads/master
| 2020-06-02T03:20:29.856161 | 2014-04-26T05:05:03 | 2014-04-26T05:05:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import webbrowser
class Movie():
def __init__(self, movie_title, movie_storyline, poster_image,
trailer_youtube):
self.title = movie_title
self.storyline = movie_storyline
self.poster_image_url = poster_image
self.trailer_youtube_url = trailer_youtube
def show_trailer(self):
webbrowser.open(self.trailer_youtube_url)
class Music():
def __inti__(self,
music_title, music_album, music_artist,
artist_photo, album_photo,
song_url):
self.title = music_title
self.album = music_album
self.artist = music_artist
self.artist_photo = artist_photo
self.album_photo = album_photo
self.song_url = song_url
|
UTF-8
|
Python
| false | false | 2,014 |
18,382,460,049,270 |
4cf333c3c91089cd17d6ee87e7c464a52bc9e3eb
|
2be2393362128514fb50045b1766eb83ef976fd1
|
/docgen/ondemand/models.py
|
a47b11f2dd87db48bdefa52b20960b78d95d9a5d
|
[] |
no_license
|
Open-MBEE/DocWeb-Django
|
https://github.com/Open-MBEE/DocWeb-Django
|
bb04f9306c800e86ebe1200071fcb30fad3e446d
|
c6965a1d42274eb55f97e86391aed1b82c035b29
|
refs/heads/master
| 2021-01-16T19:21:06.608329 | 2013-09-23T04:45:42 | 2013-09-23T04:45:42 | 19,901,813 | 0 | 0 | null | false | 2020-04-06T00:57:41 | 2014-05-18T03:17:24 | 2014-06-09T15:34:02 | 2020-04-06T00:33:49 | 25,772 | 0 | 0 | 1 |
XSLT
| false | false |
from django.db import models
from django.contrib.auth.models import User, Group
from django.core.urlresolvers import reverse
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from favorite.models import Favorite
from comment.models import Comment
import urllib
from django.conf import settings
MAX_LENGTH = 100
## Categories stored on TeamWork
class TW_server(models.Model):
name = models.CharField(max_length=MAX_LENGTH)
address = models.CharField(max_length=MAX_LENGTH)
twport = models.CharField(max_length=MAX_LENGTH)
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
class TW_category(models.Model):
name = models.CharField(max_length=MAX_LENGTH)
tw_server = models.ForeignKey(TW_server)
deprecated = models.BooleanField() #true if no longer in teamwork
class Meta:
ordering = ['name']
def __unicode__(self):
if self.deprecated:
return self.name + "_DEPRICATED"
else:
return self.name
def type_id(self):
return ContentType.objects.get_for_model(TW_category).id
def favorited(self,user):
return Favorite.exists(user,self.type_id,self.id)
def tags(self):
from tag.models import Tag_instance
return Tag_instance.objects.filter(object_id=self.id,content_type__pk=self.type_id())
def url(self):
try:
r = reverse('twcategory.views.twcategory_view',kwargs={'category_str':self.id})
except:
r = ''
return r
def href(self):
cat = urlencode(self.name)
return '<a class="favorite" href="%s">%s</a>' % (self.url(), str(self))
## Projects stored on TeamWork
class TW_project(models.Model):
name = models.CharField(max_length=MAX_LENGTH)
tw_server = models.ForeignKey(TW_server)
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
def type_id(self):
return ContentType.objects.get_for_model(TW_project).id
def tags(self):
from tag.models import Tag_instance
return Tag_instance.objects.filter(object_id=self.id,content_type__pk=self.type_id())
def url(self):
try:
r = reverse('twproject.views.twproject_view',kwargs={'project_str':self.id})
except:
r = ''
return r
def href(self):
proj = urlencode(self.name)
return '<a href="%s">%s</a>' % (self.url(), str(self))
class Project(models.Model):
name = models.CharField(max_length=MAX_LENGTH)
groups = models.ManyToManyField(Group, related_name='projects')
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
def type_id(self):
return ContentType.objects.get_for_model(Project).id
def url(self):
return reverse('views.homePage_view') + '?project=' + str(self.id)
def href(self):
return '<a href="%s">%s</a>' % (self.url(), str(self))
def groupNames(self):
blah =[]
for group in self.groups.all():
blah.append(group.name)
return ', '.join(blah)
class Category(models.Model):
name = models.CharField(max_length=MAX_LENGTH)
project = models.ForeignKey(Project)
parent = models.ForeignKey('self', null=True, blank=True, related_name='children') #this is not used, can be deleted or changed
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
def type_id(self):
return ContentType.objects.get_for_model(Category).id
def tags(self):
from tag.models import Tag_instance
return Tag_instance.objects.filter(object_id=self.id,content_type__pk=self.type_id())
def getProject(self):
return self.project
def url(self):
try:
r = reverse('ondemand.views.category_view',kwargs={'cid':self.id})
except:
r = ''
return r
def href(self):
return '<a href="%s">%s</a>' % (self.url(), str(self))
class Stylesheet(models.Model):
name = models.CharField(max_length=MAX_LENGTH, default = 1)
file = models.CharField(max_length=MAX_LENGTH)
class Meta:
ordering = ['name']
def type_id(self):
return ContentType.objects.get_for_model(Stylesheet).id
def __unicode__(self):
return self.name
class Document(models.Model):
tw_package_id = models.CharField(max_length=MAX_LENGTH,null=True,blank=True)
tw_package = models.CharField(max_length=MAX_LENGTH)
tw_project = models.ForeignKey(TW_project)
category = models.ForeignKey(Category)
comments = generic.GenericRelation(Comment)
class Meta:
ordering = ['tw_package']
def type_id(self):
return ContentType.objects.get_for_model(Document).id
def tags(self):
from tag.models import Tag_instance
return Tag_instance.objects.filter(object_id=self.id,content_type__pk=self.type_id())
def link_helper(self,ext):
try:
r = reverse('ondemand.views.link_view',kwargs={'package':self.id,'extension':ext})
except:
r = ''
return r
def link(self):
return self.link_helper('')
def nav(self):
return self.link_helper('.nav')
def pdf(self):
return self.link_helper('.pdf')
def html(self):
return self.link_helper('.html')
def log(self):
return self.link_helper('.log')
def pdfslide(self):
return self.link_helper('.pdfslide')
def html2(self):
return self.link_helper('.html2')
def zip(self):
return self.link_helper('.zip')
def getLatestRequest(self):
try:
ret = Request.objects.filter(document=self).exclude(job=None).latest('job__started')
except Request.DoesNotExist:
ret = None
return ret
def getLatestJob(self):
r = self.getLatestRequest()
if r is not None:
return r.job
else:
return None
def getProject(self):
return self.category.getProject()
def __unicode__(self):
return str(self.tw_package)
def url(self):
return self.link()
def href(self):
return '<a href="%s">%s</a>' % (self.url(), str(self))
class Job(models.Model):
received = models.DateTimeField()
started = models.DateTimeField()
finished = models.DateTimeField()
log = models.CharField(max_length=1000)
html = models.CharField(max_length=1000)
html2 = models.CharField(max_length=1000,null=True)
pdf = models.CharField(max_length=1000)
pdfslide = models.CharField(max_length=1000,null=True)
class Meta:
ordering = ['started']
get_latest_by="started"
# Returns the time to generate the document
def type_id(self):
return ContentType.objects.get_for_model(Job).id
def tags(self):
from tag.models import Tag_instance
return Tag_instance.objects.filter(object_id=self.id,content_type__pk=self.type_id())
def generate(self):
return self.finished-self.started
def log_href(self):
return '<a href="%s">Log</a>' % self.log_url()
def html_href(self):
return '<a href="%s">HTML</a>' % self.url('.html')
def html2_href(self):
return '<a href="%s">Model</a>' % self.url('.html2')
def pdf_href(self):
return '<a href="%s">PDF</a>' % self.pdf_url()
def pdfslide_href(self):
return '<a href="%s">PDF Slide</a>' % self.pdfslide_url()
def html_url(self):
return self.url('.html')
def zip_url(self):
return self.url('.zip')
def html2_url(self):
return self.url('.html2')
def pdf_url(self):
ticket = self.pdf.split('/')[0]
path = self.pdf[len(ticket)+1:]
return reverse('ondemand.views.files', kwargs={'ticket':ticket, 'path':path})
def pdfslide_url(self):
ticket = self.pdfslide.split('/')[0]
path = self.pdfslide[len(ticket)+1:]
return reverse('ondemand.views.files', kwargs={'ticket':ticket, 'path':path})
def log_url(self):
ticket = self.log.split('/')[0]
path = self.log[len(ticket)+1:]
return reverse('ondemand.views.files', kwargs={'ticket':ticket, 'path':path})
def html_url_real(self):
ticket = self.html.split('/')[0]
path = self.html[len(ticket)+1:]
return reverse('ondemand.views.files', kwargs={'ticket':ticket, 'path':path})
def html2_url_real(self):
ticket = self.html2.split('/')[0]
path = self.html2[len(ticket)+1:]
return reverse('ondemand.views.files', kwargs={'ticket':ticket, 'path':path})
def zip_url_real(self):
ticket = self.html.split('/')[0]
return reverse('ondemand.views.files', kwargs={'ticket':ticket, 'path':'html.zip'})
def request(self):
return self.request_set.all()[0] #this is stupid, job should have a foreign key to request, but it'll all be changed in db refactor anyways
def nav(self):
return self.url('.nav')
def url(self, extension=''):
return reverse('ondemand.views.generation_id_view',kwargs={'ID':self.id, 'extension':extension})
def href(self):
return '<a href="%s">%s</a>' % (self.url(''), str(self))
def getProject(self):
return self.request().getProject()
def __unicode__(self):
return '%s (%s)' % (str(self.request().document), str(self.started))
class Request(models.Model):
document = models.ForeignKey(Document)
stylesheet = models.ForeignKey(Stylesheet)
user = models.ForeignKey(User)
time = models.DateTimeField(auto_now_add=True) #Sets time/date when first created
ticket = models.CharField(max_length=MAX_LENGTH, blank=True, null=True, unique=True)
sent = models.BooleanField()
job = models.ForeignKey(Job, null=True, blank=True, unique=True)
email = models.BooleanField()
web20 = models.BooleanField()
preview = models.BooleanField(default=False)
class Meta:
ordering = ['time']
def __unicode__(self):
return "Request:"+str(self.user)+":"+str(self.document)
def type_id(self):
return ContentType.objects.get_for_model(Request).id
def tags(self):
from tag.models import Tag_instance
return Tag_instance.objects.filter(object_id=self.id,content_type__pk=self.type_id())
def getProject(self):
return self.document.getProject()
def urlencode(url):
return urllib.quote_plus(url)
def urlunencode(url):
return urllib.unquote_plus(url)
|
UTF-8
|
Python
| false | false | 2,013 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.