__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13,013,750,940,114 |
9aa52325d3a607e58c0f20445e1edf38336729d7
|
76aaac038926734b39e7f37665ada75c33746b6e
|
/emulation/mesh0/ns3CCN.py
|
18e85febde7bdf7d9105a089bf0a503fcfda57b9
|
[] |
no_license
|
cl4u2/clone
|
https://github.com/cl4u2/clone
|
0b0eeb915d2da8fb4e818d3f739a4bc49203bdf8
|
2e4789f49ee281dc68e1f2e074a8346d60b6c010
|
refs/heads/master
| 2021-01-18T02:40:25.780140 | 2013-10-10T10:32:03 | 2013-10-10T10:32:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python -i
# Copyright (c)2011-2013 the Boeing Company.
# See the LICENSE file included in this distribution.
#
# author: Jeff Ahrenholz <[email protected]>
#
'''
ns3wifi.py - This script demonstrates using CORE with the ns-3 Wifi model.
How to run this:
pushd ~/ns-allinone-3.16/ns-3.16
sudo ./waf shell
popd
python -i ns3wifi.py
To run with the CORE GUI:
pushd ~/ns-allinone-3.16/ns-3.16
sudo ./waf shell
cored
# in another terminal
cored -e ./ns3wifi.py
# in a third terminal
core
# now select the running session
'''
import os, sys, time, optparse, datetime, math
try:
from core import pycore
except ImportError:
# hack for Fedora autoconf that uses the following pythondir:
if "/usr/lib/python2.6/site-packages" in sys.path:
sys.path.append("/usr/local/lib/python2.6/site-packages")
if "/usr/lib64/python2.6/site-packages" in sys.path:
sys.path.append("/usr/local/lib64/python2.6/site-packages")
if "/usr/lib/python2.7/site-packages" in sys.path:
sys.path.append("/usr/local/lib/python2.7/site-packages")
if "/usr/lib64/python2.7/site-packages" in sys.path:
sys.path.append("/usr/local/lib64/python2.7/site-packages")
from core import pycore
import ns.core
from core.misc import ipaddr
from core.misc.ipaddr import MacAddr
from corens3.obj import Ns3Session, Ns3WifiNet, CoreNs3Net
# python interactive shell tab autocompletion
import rlcompleter, readline
readline.parse_and_bind('tab:complete')
def add_to_server(session):
''' Add this session to the server's list if this script is executed from
the cored server.
'''
global server
try:
server.addsession(session)
return True
except NameError:
return False
def wifisession(opt):
''' Run a test wifi session.
'''
#myservice = "Olsrd4Service"
myservice = "CcnOlsrNS3Service"
numWirelessNode=16;
numWiredNode=0;
ns.core.Config.SetDefault("ns3::WifiMacQueue::MaxPacketNumber",ns.core.UintegerValue(100))
session = Ns3Session(persistent=True, duration=opt.duration)
session.cfg['ccnx_dir']='/home/clauz/clone-git/ccnx/'
session.cfg['olsr_dir']='/home/clauz/clone-git/olsrd-ccninfo/'
session.cfg['rbn_dir']='/home/clauz/clone-git/routingbyname/'
session.name = "ns3ccn"
session.filename = session.name + ".py"
session.node_count = str(numWirelessNode + numWiredNode + 1)
session.services.importcustom("/home/clauz/.core/myservices")
add_to_server(session)
wifi = session.addobj(cls=Ns3WifiNet, name="wlan1", rate="OfdmRate54Mbps")
wifi.setposition(150, 150, 0)
wifi.phy.Set("RxGain", ns.core.DoubleValue(20.0))
prefix = ipaddr.IPv4Prefix("10.0.0.0/16")
def ourmacaddress(n):
return MacAddr.fromstring("02:02:00:00:00:%02x" % n)
nodes = []
for i in range(4):
for j in range(4):
k = 1 + i*4 + j
node = session.addnode(name = "n%d" % k)
node.newnetif(wifi, ["%s/%s" % (prefix.addr(k), prefix.prefixlen)], hwaddr=ourmacaddress(k))
session.services.addservicestonode(node, "router", myservice, verbose=True)
nodes.append(node)
session.setupconstantmobility()
for i in range(4):
for j in range(4):
k = i*4 + j
nodes[k].setns3position(100 * j, 100 * i, 0)
nodes[k].setposition(100 * j, 100 * i, 0)
#wifi.usecorepositions()
# PHY tracing
#wifi.phy.EnableAsciiAll("ns3wifi")
session.thread = session.run(vis=False)
for node in nodes:
session.services.bootnodeservices(node)
return session
def main():
''' Main routine when running from command-line.
'''
usagestr = "usage: %prog [-h] [options] [args]"
parser = optparse.OptionParser(usage = usagestr)
parser.set_defaults(duration = 600, verbose = False)
parser.add_option("-d", "--duration", dest = "duration", type = int,
help = "number of seconds to run the simulation")
parser.add_option("-v", "--verbose", dest = "verbose",
action = "store_true", help = "be more verbose")
def usage(msg = None, err = 0):
sys.stdout.write("\n")
if msg:
sys.stdout.write(msg + "\n\n")
parser.print_help()
sys.exit(err)
(opt, args) = parser.parse_args()
for a in args:
sys.stderr.write("ignoring command line argument: '%s'\n" % a)
return wifisession(opt)
if __name__ == "__main__" or __name__ == "__builtin__":
session = main()
print "\nsession =", session
|
UTF-8
|
Python
| false | false | 2,013 |
7,593,502,219,316 |
89e63b4d83ca2ed324efe494edcc1df3d457c238
|
1252814110dd89294afc48e9d23ff060203c39d9
|
/build/third_party/buildbot_8_4p1/buildbot/status/web/builder.py
|
4a4f19def5a957096326a02fbb4083706d89e989
|
[
"GPL-1.0-or-later",
"GPL-2.0-only",
"MIT",
"BSD-3-Clause",
"GPL-3.0-only"
] |
non_permissive
|
leiferikb/bitpop
|
https://github.com/leiferikb/bitpop
|
5a69c7afcde2ea0bd1d7b2e7c962a47ad41271eb
|
a9952d42790e3c7ea44137574623dcdae7ac148d
|
refs/heads/master
| 2021-05-15T02:07:17.069341 | 2014-10-05T12:53:32 | 2014-10-05T12:53:32 | 20,436,564 | 7 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.web import html
from twisted.web.util import Redirect
import re, urllib, time
from twisted.python import log
from twisted.internet import defer
from buildbot import interfaces
from buildbot.status.web.base import HtmlResource, BuildLineMixin, \
path_to_build, path_to_slave, path_to_builder, path_to_change, \
path_to_root, getAndCheckProperties, ICurrentBox, build_get_class, \
map_branches, path_to_authfail, ActionResource
from buildbot.status.web.build import BuildsResource, StatusResourceBuild
from buildbot import util
# /builders/$builder
class StatusResourceBuilder(HtmlResource, BuildLineMixin):
addSlash = True
def __init__(self, builder_status):
HtmlResource.__init__(self)
self.builder_status = builder_status
def getPageTitle(self, request):
return "Buildbot: %s" % self.builder_status.getName()
def builder(self, build, req):
b = {}
b['num'] = build.getNumber()
b['link'] = path_to_build(req, build)
when = build.getETA()
if when is not None:
b['when'] = util.formatInterval(when)
b['when_time'] = time.strftime("%H:%M:%S",
time.localtime(time.time() + when))
if build.started:
b['delay'] = util.formatInterval(time.time() - build.started)
step = build.getCurrentStep()
# TODO: is this necessarily the case?
if not step:
b['current_step'] = "[waiting for Lock]"
else:
if step.isWaitingForLocks():
b['current_step'] = "%s [waiting for Lock]" % step.getName()
else:
b['current_step'] = step.getName()
b['stop_url'] = path_to_build(req, build) + '/stop'
return b
@defer.deferredGenerator
def content(self, req, cxt):
b = self.builder_status
cxt['name'] = b.getName()
req.setHeader('Cache-Control', 'no-cache')
slaves = b.getSlaves()
connected_slaves = [s for s in slaves if s.isConnected()]
cxt['current'] = [self.builder(x, req) for x in b.getCurrentBuilds()]
cxt['pending'] = []
wfd = defer.waitForDeferred(
b.getPendingBuildRequestStatuses())
yield wfd
statuses = wfd.getResult()
for pb in statuses:
changes = []
wfd = defer.waitForDeferred(
pb.getSourceStamp())
yield wfd
source = wfd.getResult()
wfd = defer.waitForDeferred(
pb.getSubmitTime())
yield wfd
submitTime = wfd.getResult()
if source.changes:
for c in source.changes:
changes.append({ 'url' : path_to_change(req, c),
'who' : c.who,
'revision' : c.revision,
'repo' : c.repository })
cxt['pending'].append({
'when': time.strftime("%b %d %H:%M:%S",
time.localtime(submitTime)),
'delay': util.formatInterval(util.now() - submitTime),
'id': pb.brid,
'changes' : changes,
'num_changes' : len(changes),
})
numbuilds = int(req.args.get('numbuilds', ['20'])[0])
recent = cxt['recent'] = []
for build in b.generateFinishedBuilds(num_builds=int(numbuilds)):
recent.append(self.get_line_values(req, build, False))
sl = cxt['slaves'] = []
connected_slaves = 0
for slave in slaves:
s = {}
sl.append(s)
s['link'] = path_to_slave(req, slave)
s['name'] = slave.getName()
c = s['connected'] = slave.isConnected()
if c:
s['admin'] = unicode(slave.getAdmin() or '', 'utf-8')
connected_slaves += 1
cxt['connected_slaves'] = connected_slaves
cxt['authz'] = self.getAuthz(req)
cxt['builder_url'] = path_to_builder(req, b)
template = req.site.buildbot_service.templates.get_template("builder.html")
yield template.render(**cxt)
def force(self, req, auth_ok=False):
name = req.args.get("username", ["<unknown>"])[0]
reason = req.args.get("comments", ["<no reason specified>"])[0]
branch = req.args.get("branch", [""])[0]
revision = req.args.get("revision", [""])[0]
repository = req.args.get("repository", [""])[0]
project = req.args.get("project", [""])[0]
log.msg("web forcebuild of builder '%s', branch='%s', revision='%s',"
" repository='%s', project='%s' by user '%s'" % (
self.builder_status.getName(), branch, revision, repository,
project, name))
# check if this is allowed
if not auth_ok:
if not self.getAuthz(req).actionAllowed('forceBuild', req, self.builder_status):
log.msg("..but not authorized")
return Redirect(path_to_authfail(req))
# keep weird stuff out of the branch revision, and property strings.
# TODO: centralize this somewhere.
if not re.match(r'^[\w.+/~-]*$', branch):
log.msg("bad branch '%s'" % branch)
return Redirect(path_to_builder(req, self.builder_status))
if not re.match(r'^[ \w\.\-\/]*$', revision):
log.msg("bad revision '%s'" % revision)
return Redirect(path_to_builder(req, self.builder_status))
properties = getAndCheckProperties(req)
if properties is None:
return Redirect(path_to_builder(req, self.builder_status))
if not branch:
branch = None
if not revision:
revision = None
master = self.getBuildmaster(req)
d = master.db.sourcestamps.addSourceStamp(branch=branch,
revision=revision, project=project, repository=repository)
def make_buildset(ssid):
r = ("The web-page 'force build' button was pressed by '%s': %s\n"
% (html.escape(name), html.escape(reason)))
return master.addBuildset(
builderNames=[self.builder_status.getName()],
ssid=ssid, reason=r, properties=properties.asDict())
d.addCallback(make_buildset)
d.addErrback(log.err, "(ignored) while trying to force build")
# send the user back to the builder page
return Redirect(path_to_builder(req, self.builder_status))
def ping(self, req):
log.msg("web ping of builder '%s'" % self.builder_status.getName())
if not self.getAuthz(req).actionAllowed('pingBuilder', req, self.builder_status):
log.msg("..but not authorized")
return Redirect(path_to_authfail(req))
c = interfaces.IControl(self.getBuildmaster(req))
bc = c.getBuilder(self.builder_status.getName())
bc.ping()
# send the user back to the builder page
return Redirect(path_to_builder(req, self.builder_status))
def getChild(self, path, req):
if path == "force":
return self.force(req)
if path == "ping":
return self.ping(req)
if path == "cancelbuild":
return CancelChangeResource(self.builder_status)
if path == "stopchange":
return StopChangeResource(self.builder_status)
if path == "builds":
return BuildsResource(self.builder_status)
return HtmlResource.getChild(self, path, req)
class CancelChangeResource(ActionResource):
def __init__(self, builder_status):
ActionResource.__init__(self)
self.builder_status = builder_status
@defer.deferredGenerator
def performAction(self, req):
try:
request_id = req.args.get("id", [None])[0]
if request_id == "all":
cancel_all = True
else:
cancel_all = False
request_id = int(request_id)
except:
request_id = None
authz = self.getAuthz(req)
if request_id:
c = interfaces.IControl(self.getBuildmaster(req))
builder_control = c.getBuilder(self.builder_status.getName())
wfd = defer.waitForDeferred(
builder_control.getPendingBuildRequestControls())
yield wfd
brcontrols = wfd.getResult()
for build_req in brcontrols:
if cancel_all or (build_req.brid == request_id):
log.msg("Cancelling %s" % build_req)
if authz.actionAllowed('cancelPendingBuild',
req, build_req):
build_req.cancel()
else:
yield path_to_authfail(req)
return
if not cancel_all:
break
yield path_to_builder(req, self.builder_status)
class StopChangeMixin(object):
@defer.deferredGenerator
def stopChangeForBuilder(self, req, builder_status, auth_ok=False):
try:
request_change = req.args.get("change", [None])[0]
request_change = int(request_change)
except:
request_change = None
authz = self.getAuthz(req)
if request_change:
c = interfaces.IControl(self.getBuildmaster(req))
builder_control = c.getBuilder(builder_status.getName())
wfd = defer.waitForDeferred(
builder_control.getPendingBuildRequestControls())
yield wfd
brcontrols = wfd.getResult()
build_controls = dict((x.brid, x) for x in brcontrols)
wfd = defer.waitForDeferred(
builder_status.getPendingBuildRequestStatuses())
yield wfd
build_req_statuses = wfd.getResult()
for build_req in build_req_statuses:
wfd = defer.waitForDeferred(
build_req.getSourceStamp())
yield wfd
ss = wfd.getResult()
if not ss.changes:
continue
for change in ss.changes:
if change.number == request_change:
control = build_controls[build_req.brid]
log.msg("Cancelling %s" % control)
if (auth_ok or authz.actionAllowed('stopChange',
req, control)):
control.cancel()
else:
yield False
return
yield True
class StopChangeResource(StopChangeMixin, ActionResource):
def __init__(self, builder_status):
ActionResource.__init__(self)
self.builder_status = builder_status
@defer.deferredGenerator
def performAction(self, req):
"""Cancel all pending builds that include a given numbered change."""
wfd = defer.waitForDeferred(
self.stopChangeForBuilder(req, self.builder_status))
yield wfd
success = wfd.getResult()
if not success:
yield path_to_authfail(req)
else:
yield path_to_builder(req, self.builder_status)
class StopChangeAllResource(StopChangeMixin, ActionResource):
def __init__(self, status):
ActionResource.__init__(self)
self.status = status
@defer.deferredGenerator
def performAction(self, req):
"""Cancel all pending builds that include a given numbered change."""
authz = self.getAuthz(req)
if not authz.actionAllowed('stopChange', req):
yield path_to_authfail(req)
return
for bname in self.status.getBuilderNames():
builder_status = self.status.getBuilder(bname)
wfd = defer.waitForDeferred(
self.stopChangeForBuilder(req, builder_status, auth_ok=True))
yield wfd
if not wfd.getResult():
yield path_to_authfail(req)
return
yield path_to_root(req)
# /builders/_all
class StatusResourceAllBuilders(HtmlResource, BuildLineMixin):
def __init__(self, status):
HtmlResource.__init__(self)
self.status = status
def getChild(self, path, req):
if path == "forceall":
return self.forceall(req)
if path == "stopall":
return self.stopall(req)
if path == "stopchangeall":
return StopChangeAllResource(self.status)
return HtmlResource.getChild(self, path, req)
def forceall(self, req):
authz = self.getAuthz(req)
if not authz.actionAllowed('forceAllBuilds', req):
return Redirect(path_to_authfail(req))
for bname in self.status.getBuilderNames():
builder_status = self.status.getBuilder(bname)
build = StatusResourceBuilder(builder_status)
build.force(req, auth_ok=True) # auth_ok because we already checked
# back to the welcome page
return Redirect(path_to_root(req))
def stopall(self, req):
authz = self.getAuthz(req)
if not authz.actionAllowed('stopAllBuilds', req):
return Redirect(path_to_authfail(req))
for bname in self.status.getBuilderNames():
builder_status = self.status.getBuilder(bname)
(state, current_builds) = builder_status.getState()
if state != "building":
continue
for b in current_builds:
build_status = builder_status.getBuild(b.number)
if not build_status:
continue
build = StatusResourceBuild(build_status)
build.stop(req, auth_ok=True)
# go back to the welcome page
return Redirect(path_to_root(req))
# /builders/_selected
class StatusResourceSelectedBuilders(HtmlResource, BuildLineMixin):
def __init__(self, status):
HtmlResource.__init__(self)
self.status = status
def getChild(self, path, req):
if path == "forceselected":
return self.forceselected(req)
if path == "stopselected":
return self.stopselected(req)
return HtmlResource.getChild(self, path, req)
def forceselected(self, req):
authz = self.getAuthz(req)
if not authz.actionAllowed('forceAllBuilds', req):
return Redirect(path_to_authfail(req))
for bname in [b for b in req.args.get("selected", []) if b]:
builder_status = self.status.getBuilder(bname)
build = StatusResourceBuilder(builder_status)
build.force(req, auth_ok=True) # auth_ok because we already checked
# back to the welcome page
return Redirect(path_to_root(req))
def stopselected(self, req):
authz = self.getAuthz(req)
if not authz.actionAllowed('stopAllBuilds', req):
return Redirect(path_to_authfail(req))
for bname in [b for b in req.args.get("selected", []) if b]:
builder_status = self.status.getBuilder(bname)
(state, current_builds) = builder_status.getState()
if state != "building":
continue
for b in current_builds:
build_status = builder_status.getBuild(b.number)
if not build_status:
continue
build = StatusResourceBuild(build_status)
build.stop(req, auth_ok=True)
# go back to the welcome page
return Redirect(path_to_root(req))
# /builders
class BuildersResource(HtmlResource):
pageTitle = "Builders"
addSlash = True
@defer.deferredGenerator
def content(self, req, cxt):
status = self.getStatus(req)
builders = req.args.get("builder", status.getBuilderNames())
branches = [b for b in req.args.get("branch", []) if b]
# get counts of pending builds for each builder
brstatus_ds = []
brcounts = {}
def keep_count(statuses, builderName):
brcounts[builderName] = len(statuses)
for builderName in builders:
builder_status = status.getBuilder(builderName)
d = builder_status.getPendingBuildRequestStatuses()
d.addCallback(keep_count, builderName)
brstatus_ds.append(d)
wfd = defer.waitForDeferred(
defer.gatherResults(brstatus_ds))
yield wfd
wfd.getResult()
cxt['branches'] = branches
bs = cxt['builders'] = []
building = 0
online = 0
base_builders_url = path_to_root(req) + "builders/"
for bn in builders:
bld = { 'link': base_builders_url + urllib.quote(bn, safe=''),
'name': bn }
bs.append(bld)
builder = status.getBuilder(bn)
builds = list(builder.generateFinishedBuilds(map_branches(branches),
num_builds=1))
if builds:
b = builds[0]
bld['build_url'] = (bld['link'] + "/builds/%d" % b.getNumber())
try:
label = b.getProperty("got_revision")
except KeyError:
label = None
if not label or len(str(label)) > 20:
label = "#%d" % b.getNumber()
bld['build_label'] = label
bld['build_text'] = " ".join(b.getText())
bld['build_css_class'] = build_get_class(b)
current_box = ICurrentBox(builder).getBox(status, brcounts)
bld['current_box'] = current_box.td()
builder_status = builder.getState()[0]
if builder_status == "building":
building += 1
online += 1
elif builder_status != "offline":
online += 1
cxt['authz'] = self.getAuthz(req)
cxt['num_building'] = building
cxt['num_online'] = online
template = req.site.buildbot_service.templates.get_template("builders.html")
yield template.render(**cxt)
def getChild(self, path, req):
s = self.getStatus(req)
if path in s.getBuilderNames():
builder_status = s.getBuilder(path)
return StatusResourceBuilder(builder_status)
if path == "_all":
return StatusResourceAllBuilders(self.getStatus(req))
if path == "_selected":
return StatusResourceSelectedBuilders(self.getStatus(req))
return HtmlResource.getChild(self, path, req)
|
UTF-8
|
Python
| false | false | 2,014 |
12,025,908,476,545 |
1abbb5a0800add59c5bf0217da092d2a0eb84ee5
|
365d01ec96911e2c99fa2a9ca6276f7bb3d6b3a2
|
/core/lib/FileManifest.py
|
aeaf454844f988a98633fa9c0a25f3bd2c56214a
|
[] |
no_license
|
psbanka/bombardier
|
https://github.com/psbanka/bombardier
|
a2a364830766d8a058d93dd4b531522ff52b4c85
|
bb528eed464a63e0f6772fa27a9d472ef3a407aa
|
refs/heads/master
| 2021-01-13T14:27:57.478930 | 2013-03-11T03:39:18 | 2013-03-11T03:39:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""FileManifest.py: This is a class for automatically creating and
verifying file manifests for installed code."""
# BSD License
# Copyright (c) 2009, Shawn Sherwood
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the GE Security nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
from mini_utility import md5_sum, make_path, yaml_load, yaml_dump
class FileManifest:
"""File manifest creation and verification class"""
def __init__( self, root_dir, sub_dirs, manifest_path, \
md5_extensions=None ):
"""md5_extensions is a list of file extensions that
are md5summed as part of the file manifest"""
if md5_extensions == None:
md5_extensions = []
self.md5_extensions = md5_extensions
self.root_dir = root_dir.replace("\\", "/")
self.manifest_dictionary = {}
self.sub_dirs = sub_dirs
self.manifest_path = manifest_path
def create_manifest( self ):
"""Loop through subdirectories, collecting manifest data"""
for inode in os.listdir( self.root_dir ):
if inode in self.sub_dirs:
temp_dictionary = {}
full_path = make_path( self.root_dir, inode )
if os.path.isdir( full_path ):
self.manifest_dictionary[inode] = \
self.create_path_dictionary( full_path, temp_dictionary )
def write_manifest_file(self):
"""Write out the json or yaml for the manifest to the expected path"""
dump_string = yaml_dump( self.manifest_dictionary )
handle = open( self.manifest_path, "w" )
handle.write( dump_string )
handle.close()
def create_path_dictionary( self, path, work_dict ):
"""Loop through the directories gathering data into the manifest"""
for inode in os.listdir( path ):
full_path = make_path( path, inode )
relative_path = self.get_relative_path(full_path)
if os.path.isdir( full_path ):
self.create_path_dictionary( full_path, work_dict )
elif os.path.isfile( full_path ):
if inode.split('.')[-1].lower() in self.md5_extensions:
handle = open( full_path, 'rb' )
data = handle.read()
work_dict[relative_path] = md5_sum(data)
handle.close()
else:
work_dict[relative_path] = ''
return work_dict
def get_relative_path( self, full_path ):
"""Utility function that creates a relative path from a full path"""
path_from_subdir = full_path.split( self.root_dir + '/' )[-1]
rel_path = '/'.join( path_from_subdir.split( '/' )[1:] )
return( rel_path )
def load_manifest( self ):
"""Initialize manifest_dictionary from the existing manifest_path"""
load_string = open( self.manifest_path, 'r' ).read()
self.manifest_dictionary = yaml_load(load_string)
def verify_manifest( self, mapping_dict ):
"""Loop through keys in manifest file, checking for files and
md5sums as necessary. The mapping dictionary maps directories
from the manifest file to directories in the file system."""
tuple_check_list = []
for subdir in self.manifest_dictionary.keys():
for inode in self.manifest_dictionary[subdir]:
new_tuple = ( make_path( mapping_dict[subdir], inode ),
self.manifest_dictionary[subdir][inode] )
tuple_check_list.append( new_tuple )
error_list = self.verify_file_md5_tuples( tuple_check_list )
return( error_list )
@classmethod
def verify_file_md5_tuples(cls, file_md5_tuple_list):
"Verify that a list of filepath, md5 pairs match"
error_list = []
for file_tuple in file_md5_tuple_list:
filepath = file_tuple[0]
md5sum = file_tuple[1]
lastslash = filepath.rfind( '/' )+1
base = filepath[0:lastslash]
if not os.path.isdir(base):
err_string = "missing directory"
error_list.append((filepath, err_string))
elif not os.path.isfile(filepath):
err_string = "missing file"
error_list.append((filepath, err_string))
elif md5sum != '':
computed = md5_sum(open(filepath, 'rb').read())
if md5sum != computed:
err_string = "invalid checksum: Actual: %s Expected %s" \
% (computed, md5sum)
error_list.append((filepath, err_string))
return error_list
|
UTF-8
|
Python
| false | false | 2,013 |
206,158,477,658 |
0715f184bdee63871833b2e5d6847186a992a0a9
|
570c5d41ab8d20b66870578c48a027d72c9a1b32
|
/server.py
|
3a10ed7932b88454a957ad44736102a68dedc32d
|
[] |
no_license
|
krammandrea/Mandelbrot
|
https://github.com/krammandrea/Mandelbrot
|
d218195ef40b29bb29e962ec28b138467957b5ba
|
7d802a7263d6407ca01b77aab0a6735dc56087fa
|
refs/heads/master
| 2021-01-02T08:19:36.048320 | 2012-10-09T23:57:53 | 2012-10-09T23:57:53 | 2,452,464 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import string,re,math,urlparse
import BaseHTTPServer
import mandelbrot,coloralg,imageAdministrator
HOST_NAME = '' # empty because using http://localhost
PORT_NUMBER = 8080
#TODO rename file, restructre if/else part and add comments what the user actions are
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""
Interprets the user clicks on the website and relates the parameter to the
Mandelbrot calculating class
"""
ZOOM_ON_CLICK= 2.0
OFFSETFACTOR = 0.20 #image section moves by 20%
ZOOMRELATIVE = 2.0
def __init__(self,request, client_adress,server):
self.imageAdministrator = server.imageAdministrator
self.colorAlg = server.colorAlg
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self,request,client_adress,server)
#TODO hash table for url path instead of if elif
def do_GET(self):
query = urlparse.parse_qs(urlparse.urlparse(self.path).query)
#extract the requested url path and strip the first "/" for later use with open()
url_path = string.lstrip(urlparse.urlparse(self.path).path,"/")
if "index.html" in url_path:
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif self.path.endswith("/"):
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
#TODO only /images or /jscolor
elif ".png" in url_path or ".gif" in url_path:
self.get_image(url_path)
elif "style" in url_path:
self.get_css(url_path)
elif ".js"in url_path:
self.get_javascript(url_path)
elif "change_color" in url_path:
if(self.imageAdministrator.isColorInputValid(query['col'])):
self.imageAdministrator.change_colorscheme(query['col'])
self.colorAlg.initcolorscheme(query['col'][1:len(query['col'])])
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
else:
pass #TODO put useralert on mainpage
self.get_main_page()
elif "change_iteration" in url_path:
iterationString = query["iter"][0]
if self.imageAdministrator.isIterationInputValid(iterationString):
self.imageAdministrator.change_maxiteration(int(iterationString))
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
else:
pass #TODO put useralert on mainpage
self.get_main_page()
elif "change_size" in url_path:
if (self.imageAdministrator.isSizeInputValid(query['pxwidth'][0]) and
self.imageAdministrator.isSizeInputValid(query['pxheight'][0])):
self.imageAdministrator.change_imagesize(int(query['pxwidth'][0]),int(query['pxheight'][0]))
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
else:
pass #TODO put useralert on mainpage
self.get_main_page()
elif "section" in url_path:
new_borderlines = self.get_borderlines(query)
self.imageAdministrator.change_section(new_borderlines)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'zoom_offset' in url_path:
"""
when clicking into the image the new image will be calculated centered
around the clicked point
"""
new_x,new_y = self.get_new_coordinate(query)
self.imageAdministrator.change_offset_and_zoom(new_x,new_y,self.ZOOM_ON_CLICK)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
"""
when clicking on arrow buttons the new image section will move to the
corresponding direction
"""
elif 'offset_right' in url_path:
self.imageAdministrator.change_offset(self.OFFSETFACTOR,0)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'offset_left' in url_path:
self.imageAdministrator.change_offset(-self.OFFSETFACTOR,0)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'offset_up' in url_path:
self.imageAdministrator.change_offset(0,-self.OFFSETFACTOR)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'offset_down' in url_path:
self.imageAdministrator.change_offset(0,self.OFFSETFACTOR)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'zoom_in' in url_path:
self.imageAdministrator.change_zoom(self.ZOOMRELATIVE)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'zoom_out' in url_path:
self.imageAdministrator.change_zoom(1/self.ZOOMRELATIVE)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif 'zoom' in url_path:
self.imageAdministrator.change_zoom(self.ZOOMRELATIVE)
mandelbrot.calculate_mandelbrot(*self.imageAdministrator.get_parameters())
self.get_main_page()
elif "save" in url_path:
self.download_fractal_param_dat()
else:
self.send_response(404, "not in alternative list")
#TODO self.send_header("Content-type","application/x-download")
def get_main_page(self):
"""Respond to a GET request."""
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
main_page_html = open("main.html","r")
self.wfile.write(main_page_html.read())
#TODO catch files not openable
def get_css(self,csspath):
if csspath.endswith(".css"):
self.send_response(200)
self.send_header("Content-type","text/css")
self.end_headers()
cssfile = open(csspath,"rb")
self.wfile.write(cssfile.read())
cssfile.close()
else:
self.send_response(404)
print "no .css file found"
def get_javascript(self,jscolorpath):
if jscolorpath.endswith(".js"):
self.send_response(200)
self.send_header("Content-type","text/javascript")
self.end_headers()
jscolorfile = open(jscolorpath,"rb")
self.wfile.write(jscolorfile.read())
jscolorfile.close()
else:
self.send_response(404)
def get_borderlines(self,querydict):
new_borderlines = [0.0 for x in range(4)]
new_borderlines[0] = float(querydict["xs"][0])
new_borderlines[1] = float(querydict["xe"][0])
new_borderlines[2] = float(querydict["ys"][0])
new_borderlines[3] = float(querydict["ye"][0])
# regExp = re.compile("(-?[0-9]+\.?[0-9]*)")
# new_borderlines = [float(corner) for corner in regExp.findall(sectionstring)]
return new_borderlines
#TODO else only after imagename not existent
def get_image(self,imagepath):
"""
responds to a request for an image by checking for the file at the given path
"""
if imagepath.endswith(".png"):
self.send_response(200)
self.send_header("Content-type","image/png")
self.end_headers()
pngfile = open(imagepath,"rb")
self.wfile.write(pngfile.read())
pngfile.close()
elif imagepath.endswith(".gif"):
self.send_response(200)
self.send_header("Content-type","image/gif")
self.end_headers()
giffile = open(imagepath,"rb")
self.wfile.write(giffile.read())
giffile.close()
else:
self.send_response(404)
def download_fractal_param_dat(self):
self.send_response(200)
self.send_header("Content-type","application/x-download")
self.send_header("Content-disposition","attachement; filename='filenametest'")
self.end_headers()
fractal_para=self.imageAdministrator.get_parameters()
fractal_para_str=repr(fractal_para)
self.wfile.write(fractal_para_str)
def get_new_coordinate(self,querydict):
"""
extract new center of image after the user click out of the url
"""
number = re.compile(r"([0-9]+)")
if number.match(querydict["zoom_offset.x"][0]) is None or number.match(querydict["zoom_offset.y"][0]) is None:
self.send_response(400, "offset out of range")
#TODO what happens after a 400?
else:
new_x = int(querydict["zoom_offset.x"][0])
new_y = int(querydict["zoom_offset.y"][0])
"""
regExp = re.compile(r"x=([0-9]+)")
new_x= string.atoi(regExp.findall(self.path)[0])
regExp = re.compile(r"y=([0-9]+)")
new_y = string.atoi(regExp.findall(self.path)[0])
"""
return new_x, new_y
if __name__ == '__main__':
httpd = BaseHTTPServer.HTTPServer((HOST_NAME, PORT_NUMBER), MyHandler)
#piggybacking imageadministrator into Myhandler instead of using it globally
#idea: this could be a singleton pattern
httpd.colorAlg = coloralg.ColorAlg()
httpd.imageAdministrator = imageAdministrator.ImageAdministrator(httpd.colorAlg)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
|
UTF-8
|
Python
| false | false | 2,012 |
12,970,801,260,964 |
bcdd9c229ab8703e886e555974c9414138751f0a
|
bf80e294ee2a8c55124de43e88d307f98f4a0fe3
|
/python/problem26.py
|
55b83122e19f52392817377e421f00d007914b6c
|
[
"GPL-3.0-or-later",
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-1.0-or-later",
"LGPL-2.1-or-later",
"GPL-3.0-only"
] |
non_permissive
|
Rentier/project-euler
|
https://github.com/Rentier/project-euler
|
59ee719432de79a50b9bc2806e68c7d5f97b08ec
|
0ce35325d62dd1741f403d36e8e591d28f5b4b34
|
refs/heads/master
| 2021-01-13T01:55:59.116270 | 2013-10-11T16:39:58 | 2013-10-11T16:39:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from decimal import *
A = 42
getcontext().prec = A
def max_pattern(x):
j = 0
for i in range(1, len(x)//2):
#print(x[0:i],x[i:2*i])
if x[0:i] == x[i:2*i]:
j = i
return x[0:j]
for i in range(1, 10):
s = str(Decimal(1)/Decimal(i))[2:]
s += "0"*(A-len(s))
print(max_pattern(s))
|
UTF-8
|
Python
| false | false | 2,013 |
12,025,908,456,031 |
326a54b2554c5fa53535c5e85331d3f3a3498d08
|
c0642a72c5e936a5bc6e2333d183d638f8fbacfb
|
/CD_bundle/src/evolveIFS/Crossover.py
|
a2a50112c2da40f2be780b02e717233247474b0e
|
[] |
no_license
|
mirjamsk/DNAdrivenIFS
|
https://github.com/mirjamsk/DNAdrivenIFS
|
bc8b489b08b867997feea5fbb11f4165c7877f76
|
d6ea70ddf2a792fc10bc29c445bb54c244e19a70
|
refs/heads/master
| 2021-01-16T00:56:47.028511 | 2014-07-01T20:50:56 | 2014-07-01T20:50:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Individual import Individual
import random
class Crossover(object):
'''
classdocs
'''
@staticmethod
def crossover( p, indexP1, indexP2):
parent1 = p.getIndividual(indexP1)
parent2 = p.getIndividual(indexP2)
child1 = Individual()
child2 = Individual()
crossPoint1 = random.randrange(0,8)
##similitudes one-point crossover
tempList = parent1.getAllSimilitudes()[:crossPoint1]
tempList.extend( parent2.getAllSimilitudes()[crossPoint1:] )
child1.setAllSimilitudes(tempList)
tempList = parent2.getAllSimilitudes()[:crossPoint1]
tempList.extend( parent1.getAllSimilitudesCopy()[crossPoint1:] )
child2.setAllSimilitudes(tempList)
##indexList 2 point crossover
crossPoint1 = random.randrange(0,64)
crossPoint2 = random.randrange(0,64)
if crossPoint1 > crossPoint2:
crossPoint1, crossPoint2 = crossPoint2, crossPoint1
tempList = parent1.getIndexes()[:crossPoint1]
tempList.extend( parent2.getIndexes()[crossPoint1:crossPoint2] )
tempList.extend( parent1.getIndexes()[crossPoint2:] )
child1.setAllIndexes(tempList)
tempList = parent2.getIndexes()[:crossPoint1]
tempList.extend( parent1.getIndexes()[crossPoint1:crossPoint2] )
tempList.extend( parent2.getIndexes()[crossPoint2:] )
child2.setAllIndexes(tempList)
return (child1, child2)
|
UTF-8
|
Python
| false | false | 2,014 |
8,607,114,478,168 |
0836888427e03d490911e24af5ac2c8350448cde
|
702d1f57a2091a3ee23761cfdb7b066ae02b7230
|
/git.py
|
d2e230e574a46b255d2d824f872d7d5f9b0a9d73
|
[
"MIT"
] |
permissive
|
krismolendyke/GitCommand
|
https://github.com/krismolendyke/GitCommand
|
fd11030c55b86fa6eae8938d1141daf502b163b5
|
b7d54a38bd36e19cc7e6fbb7ea97676f23095d8e
|
refs/heads/master
| 2016-09-01T23:03:26.598280 | 2012-03-14T16:41:22 | 2012-03-14T16:41:22 | 2,328,710 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sublime, sublime_plugin
import os
import functools
class GitCommand(sublime_plugin.WindowCommand):
"""
GitCommand is a WindowCommand for running simple git commands from within
Sublime Text 2. Instead of using the Default exec.py implementation which
outputs to the Build Results window, it creates a new scratch file in its
own view and displays the results there. It will attempt to set a proper
language syntax file when it makes sense too, as well.
"""
commands = [
{
"description": "Log with patch",
"arguments": ["log", "--stat", "--patch", "--max-count=2"],
"syntax_file": "Packages/Diff/Diff.tmLanguage"
},
{
"description": "Diff",
"arguments": ["diff", "--patch-with-stat"],
"syntax_file": "Packages/Diff/Diff.tmLanguage"
},
{
"description": "Blame",
"arguments": ["blame"],
"syntax_file": "based_on_extension"
},
{
"description": "Log",
"arguments": ["log", "--stat"]
}
]
# File extension to language syntax file mapping.
syntaxes = {
".js": "Packages/JavaScript/JavaScript.tmLanguage",
".py": "Packages/Python/Python.tmLanguage"
}
def quick_panel_callback(caller, index):
if index == -1: return
self = caller
active_file = os.path.split(self.window.active_view().file_name())[1]
active_file_ext = os.path.splitext(active_file)[1]
working_dir = os.path.dirname(self.window.active_view().file_name())
selected_command = GitCommand.commands[index]
cmd = ["git"]
cmd.extend(selected_command["arguments"])
cmd.append(active_file)
args = {
"cmd": cmd,
"working_dir": working_dir
}
try:
if "based_on_extension" == selected_command["syntax_file"]:
args["syntax_file"] = GitCommand.syntaxes[active_file_ext]
else:
args["syntax_file"] = selected_command["syntax_file"]
except:
pass
self.window.run_command("exec_to_view", args)
def run(self):
quick_panel_items = []
for command in GitCommand.commands:
quick_panel_items.append(
[
command["description"],
"git %s" % " ".join(command["arguments"])
]
)
self.window.show_quick_panel(quick_panel_items,
self.quick_panel_callback)
class ExecToViewCommand(sublime_plugin.WindowCommand,
__import__("exec").ProcessListener):
"""
Executes a command and prints its output to a new scratch file in its own
view within Sublime Text 2. The majority of this code was swiped from
exec.py in the Default plugin directory.
"""
def run(self, cmd = [], file_regex = "", line_regex = "",
working_dir = "", encoding = "utf-8", env = {}, quiet = True,
kill = False, syntax_file = "",
# Catches "path" and "shell"
**kwargs):
if kill:
if self.proc:
self.proc.kill()
self.proc = None
self.append_data(None, "[Cancelled]")
return
self.output_view = self.window.new_file()
self.output_view.set_name(" ".join(cmd))
if len(syntax_file) > 0: self.output_view.set_syntax_file(syntax_file)
self.output_view.set_scratch(True)
# Default the to the current files directory if no working directory was given
if (working_dir == "" and self.window.active_view()
and self.window.active_view().file_name() != ""):
working_dir = os.path.dirname(self.window.active_view().file_name())
self.output_view.settings().set("result_file_regex", file_regex)
self.output_view.settings().set("result_line_regex", line_regex)
self.output_view.settings().set("result_base_dir", working_dir)
# Call get_output_panel a second time after assigning the above
# settings, so that it'll be picked up as a result buffer
# self.window.get_output_panel("exec")
self.encoding = encoding
self.quiet = quiet
self.proc = None
if not self.quiet:
print "Running " + " ".join(cmd)
# self.window.run_command("show_panel", {"panel": "output.exec"})
self.window.focus_view(self.output_view)
merged_env = env.copy()
if self.window.active_view():
user_env = self.window.active_view().settings().get('build_env')
if user_env:
merged_env.update(user_env)
# Change to the working dir, rather than spawning the process with it,
# so that emitted working dir relative path names make sense
if working_dir != "":
os.chdir(working_dir)
err_type = OSError
if os.name == "nt":
err_type = WindowsError
try:
# Forward kwargs to AsyncProcess
self.proc = __import__("exec").AsyncProcess(cmd, merged_env, self, **kwargs)
except err_type as e:
self.append_data(None, str(e) + "\n")
if not self.quiet:
self.append_data(None, "[Finished]")
def is_enabled(self, kill = False):
if kill:
return hasattr(self, 'proc') and self.proc and self.proc.poll()
else:
return True
def append_data(self, proc, data):
if proc != self.proc:
# a second call to exec has been made before the first one
# finished, ignore it instead of intermingling the output.
if proc:
proc.kill()
return
try:
str = data.decode(self.encoding)
except:
str = "[Decode error - output not " + self.encoding + "]"
proc = None
# Normalize newlines, Sublime Text always uses a single \n separator
# in memory.
str = str.replace('\r\n', '\n').replace('\r', '\n')
selection_was_at_end = (len(self.output_view.sel()) == 1
and self.output_view.sel()[0]
== sublime.Region(self.output_view.size()))
self.output_view.set_read_only(False)
edit = self.output_view.begin_edit()
self.output_view.insert(edit, self.output_view.size(), str)
# if selection_was_at_end:
# self.output_view.show(self.output_view.size())
self.output_view.show(0)
self.output_view.end_edit(edit)
self.output_view.set_read_only(True)
def finish(self, proc):
if not self.quiet:
self.append_data(proc, "[Finished]")
if proc != self.proc:
return
# Set the selection to the start, so that next_result will work as expected
edit = self.output_view.begin_edit()
self.output_view.sel().clear()
self.output_view.sel().add(sublime.Region(0))
self.output_view.end_edit(edit)
def on_data(self, proc, data):
sublime.set_timeout(functools.partial(self.append_data, proc, data), 0)
def on_finished(self, proc):
sublime.set_timeout(functools.partial(self.finish, proc), 0)
|
UTF-8
|
Python
| false | false | 2,012 |
8,804,682,984,607 |
6a611fd0a0567c0ce738bb42275f49c7dc3faee4
|
1c689210e132449bdb563d039029b525a0adb79e
|
/rename.py
|
8ad4881cd98d321204c0eb3b7a2b5813c0c57102
|
[] |
no_license
|
dsoto/python
|
https://github.com/dsoto/python
|
58724646948aa3136d31cd5af199589cfe39f082
|
20c81f6fb176c9ed819671798f2a2f3458db1fef
|
HEAD
| 2016-09-06T17:40:33.003099 | 2010-07-01T04:13:16 | 2010-07-01T04:13:16 | 155,868 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
'''
meant to rename a list of files based on strings in the filename
daniel soto
Tue Dec 11 02:58:17 PST 2007
'''
'''
playing with inline editing of github repo
'''
import re
import os
import glob
def renameFile ( searchString, replacementString ):
regExp = re.compile( searchString )
# get list of files containing search string
globString = '*' + searchString + '*'
fileNameList = glob.glob(globString)
# iterate over list renaming files
for fname in fileNameList:
foutName = regExp.sub( replacementString, fname )
os.rename( fname, foutName )
print fname,
print '\tchanged to\t',
print foutName
searchString = 'r1'
replacementString = 'S1'
renameFile( searchString, replacementString )
searchString = 'r2'
replacementString = 'S2'
renameFile( searchString, replacementString )
|
UTF-8
|
Python
| false | false | 2,010 |
7,524,782,702,657 |
09a14a02f58e958537da97c6383ff0dd9748014d
|
c6aebce18d1eea989e12d064739f0d5bceaedced
|
/django/db/models/sql/datastructures.py
|
f47e25208c2d174bb6359d79f3c342435967c6d7
|
[
"BSD-3-Clause"
] |
permissive
|
kuskumar/django
|
https://github.com/kuskumar/django
|
e48afa08be6921cbee02fb0521d3808021575476
|
4b25ebf112d8bf4fcee6ee8339913bffdc81035d
|
refs/heads/master
| 2021-01-18T10:00:33.439335 | 2014-06-09T16:00:12 | 2014-06-09T16:00:12 | 20,652,933 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Useful auxiliary data structures for query construction. Not useful outside
the SQL domain.
"""
class Col(object):
def __init__(self, alias, target, source):
self.alias, self.target, self.source = alias, target, source
def as_sql(self, qn, connection):
return "%s.%s" % (qn(self.alias), qn(self.target.column)), []
@property
def output_type(self):
return self.source
def relabeled_clone(self, relabels):
return self.__class__(relabels.get(self.alias, self.alias), self.target, self.source)
def get_group_by_cols(self):
return [(self.alias, self.target.column)]
def get_lookup(self, name):
return self.output_type.get_lookup(name)
def get_transform(self, name):
return self.output_type.get_transform(name)
def prepare(self):
return self
class EmptyResultSet(Exception):
pass
class MultiJoin(Exception):
"""
Used by join construction code to indicate the point at which a
multi-valued join was attempted (if the caller wants to treat that
exceptionally).
"""
def __init__(self, names_pos, path_with_names):
self.level = names_pos
# The path travelled, this includes the path to the multijoin.
self.names_with_path = path_with_names
class Empty(object):
pass
class Date(object):
"""
Add a date selection column.
"""
def __init__(self, col, lookup_type):
self.col = col
self.lookup_type = lookup_type
def relabeled_clone(self, change_map):
return self.__class__((change_map.get(self.col[0], self.col[0]), self.col[1]))
def as_sql(self, qn, connection):
if isinstance(self.col, (list, tuple)):
col = '%s.%s' % tuple(qn(c) for c in self.col)
else:
col = self.col
return connection.ops.date_trunc_sql(self.lookup_type, col), []
class DateTime(object):
"""
Add a datetime selection column.
"""
def __init__(self, col, lookup_type, tzname):
self.col = col
self.lookup_type = lookup_type
self.tzname = tzname
def relabeled_clone(self, change_map):
return self.__class__((change_map.get(self.col[0], self.col[0]), self.col[1]))
def as_sql(self, qn, connection):
if isinstance(self.col, (list, tuple)):
col = '%s.%s' % tuple(qn(c) for c in self.col)
else:
col = self.col
return connection.ops.datetime_trunc_sql(self.lookup_type, col, self.tzname)
|
UTF-8
|
Python
| false | false | 2,014 |
2,869,038,184,628 |
b1e9f9c49bd22aa9bb69dc9627ffa5fa961b1a78
|
893ccb350cc32a36ad719fd6178002425c3185cc
|
/celltone/cellmidi.py
|
b93437fd50667183689100f9ddffe388d2dfe16a
|
[] |
no_license
|
andreasjansson/Celltone
|
https://github.com/andreasjansson/Celltone
|
2d20c00d49e18125324fdaa5d42be5a18d6a968a
|
3e5aefe992baf5934c8d99ddd80a379a83c79936
|
refs/heads/master
| 2021-03-12T23:17:08.293790 | 2013-04-17T19:30:41 | 2013-04-17T19:30:41 | 3,988,862 | 11 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Celltone - Generative music composition using cellular automata
# Copyright (C) 2012 [email protected]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import math
import time
import threading
import celltone
try:
import midi
except ImportError:
celltone.notice('No midi module, midi writing will not work')
try:
import pypm
except ImportError:
celltone.notice('No pypm module, midi playback will not work')
class Handler(object):
def __init__(self, bpm, subdivision):
self.bpm = float(bpm)
self.thread = None
self.subdivision = int(subdivision)
self.time = 0
def set_tempo(self, tempo):
self.bpm = float(tempo)
def set_subdivision(self, subdivision):
self.subdivision = int(subdivision)
def play(self, midi_notes):
self.thread = OutputThread(self, midi_notes)
self.thread.start()
return self.thread
def stop(self):
leftover_midi_notes = self.thread.midi_notes
self.thread.midi_notes = []
return leftover_midi_notes
def noteon(self, midi_note):
raise NotImplementedError()
def noteoff(self, midi_note):
raise NotImplementedError()
def after_noteon(self):
self.time += (60.0 / self.bpm) * ((1.0 / self.subdivision) * 4)
# TODO: make this a @decorator
def check_midi_note(self, midi_note):
if midi_note.pitch < 0 or midi_note.pitch > 127:
celltone.warning("Bad note number %d" % midi_note.pitch)
return False
if midi_note.velocity < 0 or midi_note.velocity > 127:
celltone.warning("Bad velocity %d" % midi_note.velocity)
return False
if midi_note.channel < 0 or midi_note.channel > 15:
celltone.warning("Bad channel number %d" % midi_note.channel)
return False
return True
class Player(Handler):
def __init__(self, bpm, subdivision):
Handler.__init__(self, bpm, subdivision)
pypm.Initialize()
dev = pypm.GetDefaultOutputDeviceID()
self.midi_out = pypm.Output(dev)
def noteon(self, midi_note):
if self.check_midi_note(midi_note):
self.midi_out.WriteShort(0x90 + midi_note.channel,
midi_note.pitch, midi_note.velocity)
def noteoff(self, midi_note):
if self.check_midi_note(midi_note):
if midi_note.pitch >= 0 or midi_note.pitch <= 127:
self.midi_out.WriteShort(0x80 + midi_note.channel,
midi_note.pitch, 0)
def after_noteon(self):
Handler.after_noteon(self)
seconds = (60.0 / self.bpm) * ((1.0 / self.subdivision) * 4)
time.sleep(seconds)
class Writer(Handler):
def __init__(self, filename, bpm, subdivision):
Handler.__init__(self, bpm, subdivision)
self.filename = filename
self.track = []
self.tick = 0
self.prev_tick = 0
self.subres = 12
def noteon(self, note):
if self.check_midi_note(note):
on = midi.NoteOnEvent(
tick = self.delta_tick(), channel = note.channel,
data = [note.pitch, note.velocity])
self.track.append(on)
def noteoff(self, note):
if self.check_midi_note(note):
off = midi.NoteOffEvent(
tick = self.delta_tick(), channel = note.channel,
data = [note.pitch, note.velocity])
self.track.append(off)
def after_noteon(self):
Handler.after_noteon(self)
self.tick += self.subres
def delta_tick(self):
delta = self.tick - self.prev_tick
if delta:
self.prev_tick = self.tick
return delta
def make_meta_track(self):
tempo_event = midi.SetTempoEvent(tick = 0)
tempo_event.set_bpm(self.bpm)
eot = midi.EndOfTrackEvent()
eot.tick = 0
return [tempo_event, eot]
def write(self):
eot = midi.EndOfTrackEvent()
eot.tick = self.delta_tick()
self.track.append(eot)
meta_track = self.make_meta_track()
pattern = midi.Pattern(tracks = [meta_track, self.track],
resolution = math.ceil(self.subres * self.subdivision / 4.0))
midi.write_midifile(self.filename, pattern)
class OutputThread(threading.Thread):
def __init__(self, handler, midi_notes):
self.handler = handler
self.midi_notes = midi_notes
threading.Thread.__init__(self)
def run(self):
while len(self.midi_notes) > 0:
notes = self.midi_notes[0]
self.midi_notes = self.midi_notes[1:]
for note in notes:
self.handler.noteon(note)
self.handler.after_noteon()
for note in notes:
self.handler.noteoff(note)
class MidiNote(object):
def __init__(self, pitch, channel, velocity):
self.pitch = int(pitch)
self.channel = int(channel)
self.velocity = int(velocity)
|
UTF-8
|
Python
| false | false | 2,013 |
18,511,309,068,955 |
f8244ed65be4d37e5bc0699b53cad0e88f006f9f
|
647a51cdbb1cff6576ab30a20e32f4909341ac06
|
/spec/sample/name.yml.spec
|
de2d517e6d0ca7f9ad2986354aad2296dd1160aa
|
[] |
no_license
|
cruelwen/ymlex
|
https://github.com/cruelwen/ymlex
|
4028364a3d3694ce051297bc7d4bb01e26e6aac8
|
d9923db6f7b019c136b69a19b9474481d46b80e9
|
refs/heads/master
| 2020-05-18T00:33:06.658407 | 2014-08-01T07:16:52 | 2014-08-01T07:16:52 | 19,692,450 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
---
name: name_v
a: a_v
b: a_v/b
c:
d: d_v
e: d_v/e
f: name_v_c_d/e
|
UTF-8
|
Python
| false | false | 2,014 |
7,138,235,655,516 |
8b9a6ed9a77aefcf5195db4f644565f5b4cc139c
|
5b51bf14323b0949eba8e726acbebf889b35ccd6
|
/getNewEntities_test.py
|
edcccc734974704e46372826f7672f4ef9a9513e
|
[] |
no_license
|
dpgailey/sourcereader
|
https://github.com/dpgailey/sourcereader
|
32432e21696e5cd826749c4987cd6a6d5730d237
|
e494937fbd415e0bfbede4c591a6a4af74ee412b
|
refs/heads/master
| 2021-05-26T18:10:13.649556 | 2011-12-07T19:03:58 | 2011-12-07T19:03:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import infoModule
infoModule.info.site['remoteLogging'] = False
import mysql_tools
import _mysql
from getNewEntities import *
infoModule.info.source['body_extractor_host'] = 'angelina.celebrifi.com'
infoModule.info.source['body_extractor_port'] = '1348'
link = mysql_tools.mysqlConnect('localhost', 'root', '4rf5tg')
if link == False :
print "no connection"
sys.exit(0)
infoModule.info.site['database'] = 'peepbuzz'
infoModule.info.site['dblink'] = link
if(getNewEntities()):
print("hoorah")
else:
print("boo")
|
UTF-8
|
Python
| false | false | 2,011 |
15,384,572,887,241 |
dbc4db9c5ac3eb4b74086831e784100f3abe0b9f
|
1ca08cb7d16808c4dd73216688d737605f500ba7
|
/baymodels/import_dummy_data.py
|
8c1a4f95fe8867f0b9fdf9ef76bf8f2b388cb6f4
|
[
"BSD-3-Clause",
"LGPL-3.0-only",
"LGPL-2.0-or-later",
"MIT",
"GPL-1.0-or-later",
"LGPL-2.1-or-later"
] |
non_permissive
|
rchaber/publishbay
|
https://github.com/rchaber/publishbay
|
b5b6bf2d153f0ce939bf5c94d66a343be69f5ccb
|
1952483ed0952a92b6dd0bb1f4b2c81a1b3c4849
|
refs/heads/master
| 2021-01-20T23:18:13.997812 | 2013-09-19T18:58:12 | 2013-09-19T18:58:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# import data into ProDetails
from webapp2_extras.appengine.auth.models import User
from google.appengine.ext import ndb
from baymodels import models as bmodels
from boilerplate import models as models
import csv
import random
joblist = ['Publisher',
'Manager',
'Editor',
'Professional Reader',
'Designer',
'Translator',
'Proofreader']
lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla scelerisque posuere tortor auctor sagittis. Nulla facilisi. Aliquam eget mauris eu ante convallis dapibus id eu eros. Nullam massa mauris, egestas nec ornare sit amet, consequat vel est. Aenean non lorem eu neque aliquam euismod ac id nulla. Aenean sit amet feugiat massa. Ut in lacus sit amet ipsum rhoncus accumsan. Integer molestie, lacus in sodales ultricies, augue tortor posuere eros, a feugiat libero libero in dolor. Proin porta nunc non purus posuere et rutrum orci semper. Mauris viverra ultricies posuere. Vestibulum placerat eros purus. Etiam diam ipsum, tempus et gravida non, malesuada at ipsum. Nulla facilisi. Sed viverra nisi ut odio rutrum iaculis. Suspendisse potenti. Cras semper lacinia dignissim. Ut dui risus, laoreet et luctus ac, blandit non erat. Pellentesque scelerisque purus non mauris malesuada in ultrices purus blandit. Etiam et urna nec quam hendrerit pretium vel eget metus. Nulla rhoncus bibendum enim, vel sollicitudin nibh sodales in. Nunc id massa a nisi vulputate posuere eget a risus. Nunc quis erat nibh, et varius mi. Nullam congue dictum sollicitudin. Vivamus consectetur, ligula a venenatis vehicula, massa nunc suscipit enim, in auctor orci elit nec tellus. Phasellus eleifend, nibh sit amet aliquam feugiat, dui sem luctus orci, nec condimentum lectus est a est. Aenean hendrerit gravida gravida. Morbi placerat vulputate tortor, eu suscipit erat pretium elementum. Sed sed risus ipsum, nec blandit lacus. Phasellus vel ipsum tellus. Cras laoreet facilisis tellus, ut accumsan sem cursus at. Pellentesque semper sem quis quam sodales in vehicula nunc volutpat. Vivamus imperdiet pellentesque lectus, eget ullamcorper quam congue sit amet. Fusce mi eros, malesuada at sollicitudin ut, bibendum ac tellus. Pellentesque ac nisi est, ut placerat ligula. Aenean sit amet porta sem. Proin porta pretium metus, sit amet tincidunt enim malesuada quis. Cras tempor lectus vitae nulla placerat quis dictum ipsum suscipit. Aliquam sit amet aliquam purus. Integer ut elit leo, non auctor quam. Suspendisse in leo non magna posuere rutrum. Nam velit metus, pulvinar id malesuada in, viverra sed neque. Maecenas volutpat nibh at tellus eleifend porttitor. Nullam lobortis bibendum mi, non aliquam ligula venenatis eu. Etiam eleifend fermentum enim non pellentesque. Phasellus at sem id velit blandit malesuada. Nullam sagittis justo eu dolor sodales et malesuada massa pulvinar. Vestibulum vitae condimentum augue."
lorem_split = lorem.replace(' .', ' ').replace(', ', ' ').split(' ')
def bulkdelete(mdl, number):
qry = mdl.query()
ndb.delete_multi(qry.fetch(number, keys_only=True))
print '%s entities deleted from %s' % (str(number), str(mdl))
def import_users():
reader = csv.reader(open('/Users/richardhaber/Projects/publishbay/baymodels/fakedata.csv', 'rb'), delimiter=',', quotechar='"')
count = 0
for row in reader:
entity = models.User(
name=row[1],
last_name=row[2],
username=row[9],
email=row[8],
country=row[7],
activated=True,
auth_ids=['own:'+row[9]]
)
entity.put()
count += 1
print '%s users imported' % count
def import_prodetails():
qry = models.User.query().fetch(400)
reader = csv.reader(open('/Users/richardhaber/Projects/publishbay/baymodels/fakedata.csv', 'rb'), delimiter=',', quotechar='"')
count = 0
for i in qry:
row = reader.next()
a = bmodels.ProDetails()
a.user = i.key
a.display_full_name = random.choice([True, False])
a.title = ' '.join(random.sample(lorem_split, random.randint(3, 10)))
a.overview = ' '.join(lorem_split[random.randint(0, 15): random.randint(20, 60)])
a.english_level = random.randint(0, 5)
a.jobs = random.sample(joblist, random.randint(1, 7))
a.profile_visibility = random.choice(['everyone', 'pb_users_only', 'hidden'])
a.address1 = row[3]
a.city = row[4]
a.state = row[5]
a.zipcode = row[6]
a.phone = row[10]
a.put()
count += 1
print '%s prodetails imported' % count
def import_users_prodetails():
reader = csv.reader(open('/Users/richardhaber/Projects/publishbay/baymodels/fakedata.csv', 'rb'), delimiter=',', quotechar='"')
reader.next() # stripping header
count = 0
for row in reader:
entity = models.User(
name=row[1],
last_name=row[2],
username=row[9],
email=row[8],
country=row[7],
activated=True,
auth_ids=['own:'+row[9]]
)
entity.put()
k = entity.key
# k = models.User.query(models.User.username == entity.username).get().key
a = bmodels.ProDetails()
a.user = k
a.display_full_name = random.choice([True, False])
a.title = ' '.join(random.sample(lorem_split, random.randint(3, 10)))
a.overview = ' '.join(lorem_split[random.randint(0, 15): random.randint(20, 60)])
a.english_level = random.randint(0, 5)
a.jobs = random.sample(joblist, random.randint(1, 7))
a.profile_visibility = random.choice(['everyone', 'pb_users_only', 'hidden'])
a.address1 = row[3]
a.city = row[4]
a.state = row[5]
a.zipcode = row[6]
a.phone = row[10]
a.put()
count += 1
|
UTF-8
|
Python
| false | false | 2,013 |
5,153,960,774,998 |
88ff7a6eec126b69a34b964b3ba305dccd4d1f89
|
809751f4c7e1c176ebb2e8f48d45ec9cf9eaf0c8
|
/test/utils.py
|
c30e081d6444c30e613c2e2f116847d8ffe140a4
|
[
"GPL-3.0-only",
"GPL-3.0-or-later"
] |
non_permissive
|
andreav/git4f
|
https://github.com/andreav/git4f
|
a91377543b42eedd4312e1af63645a3a25cc77c9
|
bb63d66b6162561f0ee914e3e00dbdcba276fedf
|
refs/heads/master
| 2021-01-01T17:42:06.729046 | 2012-08-08T10:38:43 | 2012-08-08T10:38:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/user/bin/python
# Copyright (C) 2012 Andrea Valle
#
# This file is part of git4f.
#
# git4f is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# git4f is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with git4f. If not, see <http://www.gnu.org/licenses/>.
import os, shutil, subprocess, sys
import shlex
import nose.tools as nt
DIR_TEST_ROOT = os.path.abspath(os.path.dirname(__file__))
DIR_SANDBOX = os.path.join(DIR_TEST_ROOT, 'sandbox')
DIR_REPO_BASE = os.path.join(DIR_SANDBOX,'repo')
DIR_REPO_BASE_DELETE = os.path.join(DIR_SANDBOX,'repo.delete')
DIR_REPO_CLONE_INTBR = os.path.join(DIR_SANDBOX,'clone.intbr')
DIR_REPO_CLONE_FTRBR = os.path.join(DIR_SANDBOX,'clone.ftrbr')
DIR_REPO_CLONE_FTRBR_INTEGRATE = os.path.join(DIR_SANDBOX,'clone.ftrbr.integrate')
DIR_REPO_CLONE_FTRBR_UPDATE = os.path.join(DIR_SANDBOX,'clone.ftrbr.update')
DIR_REPO_CLONE_FTRBR_PUSH = os.path.join(DIR_SANDBOX,'clone.ftrbr.push')
FILE_REPO_BASE = 'a.txt'
CFG_FTR_PREFIX = '4f.ftrbr-prefix'
CFG_FTR_PULL_MERGE_OPT = '4f.ftrbr-pull-merge-opt'
CFG_FTR_PUSH_MERGE_OPT = '4f.ftrbr-push-merge-opt'
# EXCEPTIONS ##################
class FailedCommand(Exception): pass
class FileNotExists(Exception): pass
class FileNotInSandbox(Exception): pass
# UTILS #######################
def touch(fname, times = None):
with file(fname, 'a'):
os.utime(fname, times)
def append(fname, cont = None):
with file(fname, 'a') as fobj:
print (' -C- (%s)' % os.path.basename(os.path.dirname(fname))).ljust(18), 'append'
fobj.write(cont+'\n' if cont else 'default msg\n')
def clone_makebr_edit_commit_repo(repodir, intbr='master', ftrbr='work'):
oriR, cloR = clone_arepo(DIR_REPO_BASE, repodir)
so,se,rc = cloR.exe_cmd_succ('git intbr %s' % intbr)
makebr_edit_commit(cloR, ftrbr)
return oriR, cloR
def makebr_edit_commit(repoobj, brname='work', filecont='some content'):
so,se,rc = repoobj.exe_cmd_succ('git config --local %s ftr/' % CFG_FTR_PREFIX )
so,se,rc = repoobj.exe_cmd_succ('git ftrbr-start %s' % brname)
edit_commit(repoobj)
def edit_commit(repoobj, msg = 'something', afile = FILE_REPO_BASE):
#bare repo must be edited by clone repo
onori = False
if os.path.abspath(repoobj.dir()) == os.path.abspath(DIR_REPO_BASE):
onori = True
shutil.rmtree(DIR_REPO_BASE_DELETE,ignore_errors=True)
oriR, repoobj = clone_arepo(DIR_REPO_BASE, DIR_REPO_BASE_DELETE)
repoobj.exe_cmd_succ('git checkout master')
#edit, commit
afile = os.path.join(repoobj.dir(), afile)
append(afile, msg)
repoobj.exe_cmd('git add %s' % afile)
repoobj.exe_cmd('git commit -m "modified file"')
#bare, must push
if onori:
repoobj.exe_cmd('git push origin master')
def check_fname(f):
#if not os.path.exists(f):
# raise FileNotExists(f)
nt.assert_in('/sandbox', f)
# GIT MGT #####################
def exe_cmd(cmd, adir, mustsucc=None):
print (' -C- (%s)'%os.path.basename(adir)).ljust(18), cmd
#sys.stdout.flush()
#sys.stderr.flush()
#return subprocess.call(shlex.split(cmd), stdout=None, cwd=adir)
po = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, cwd=adir)
so, se = po.communicate()
rc = po.returncode
if mustsucc == True:
nt.eq_(rc, 0, 'Failed command: cd %s && %s' % (adir, cmd))
if mustsucc == False:
nt.assert_not_equal(rc, 0, 'Must fail command: cd %s && %s' % (adir, cmd))
if so.strip() != '': print ' -SO- ', so.strip()
if se.strip() != '': print ' -SE- ', se.strip()
return so.strip(), se.strip(), rc
class Repo:
def __init__(self, root):
check_fname(root)
self._root = root
def dir(self):
return self._root
def exe_cmd(self, cmd, mustsucc=None):
return exe_cmd(cmd, self._root)
def exe_cmd_succ(self, cmd):
return exe_cmd(cmd, self._root, mustsucc=True)
def exe_cmd_deny(self, cmd):
return exe_cmd(cmd, self._root, mustsucc=False)
# FUNCTIONS ###################
def sandbox():
check_fname(DIR_SANDBOX)
shutil.rmtree(DIR_SANDBOX,ignore_errors=True)
os.mkdir(DIR_SANDBOX)
create_arepo(DIR_REPO_BASE,FILE_REPO_BASE)
def create_arepo(adir, afile):
check_fname(adir)
#clean old, init bare
origRepo = Repo(adir)
shutil.rmtree(adir,ignore_errors=True)
os.mkdir(adir)
origRepo.exe_cmd('git init --bare')
#clone, populate, push, destroy
tbddir = adir + '.tbd'
tbdfile = os.path.join(tbddir, afile)
oriR, cloR = clone_arepo(adir, tbddir)
touch(tbdfile)
cloR.exe_cmd('git add %s' % afile)
cloR.exe_cmd('git commit -m "first commit"')
cloR.exe_cmd('git push origin master')
shutil.rmtree(tbddir,ignore_errors=True)
def clone_arepo(adir, clonedir):
check_fname(clonedir)
shutil.rmtree(clonedir,ignore_errors=True)
cmd_clone = 'git clone %s %s' % (adir, clonedir)
exe_cmd( cmd_clone, DIR_SANDBOX, mustsucc=True )
nt.assert_true(os.path.exists(clonedir), 'Not cloned %s' % clonedir)
return Repo(adir), Repo(clonedir)
|
UTF-8
|
Python
| false | false | 2,012 |
1,700,807,055,035 |
80703d9afa1b2c60fd8a6116650fd3fa46be932a
|
4b581c19d1dc7e82d09448b639bf811cb5b06b63
|
/parser/num.py
|
a56bd904440b1ec3c9bb5b693ba14c3733160136
|
[] |
no_license
|
fanfannothing/tpsr
|
https://github.com/fanfannothing/tpsr
|
1e4d8a5bc77467d88cb37178dd7711713d084634
|
257f6a84ab344b0f5d26fe2b9266b5dde2a09d18
|
refs/heads/master
| 2021-01-16T20:38:48.342045 | 2013-08-21T17:34:23 | 2013-08-21T17:34:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /bin/env python
# encoding=utf-8
# [email protected]
#
import re
from tpsr_core import *
class NumParser(BasicParser):
def __init__(self):
BasicParser.__init__(self, 'NUM')
self.__num_pattern = re.compile('[+-]{0,1}[0-9]+(\.[0-9]+){0,1}')
self.__num_pattern_with_comma = re.compile('[+-]{0,1}([0-9]{3},){,}[0-9]{3}(\.[0-9]+){0,1}')
def find_all(self, text_object):
text = text_object.text
spans = []
temp_spans = []
for m in self.__num_pattern.finditer(text):
temp_spans.append( (m.span(), m.group()) )
for m in self.__num_pattern_with_comma.finditer(text):
temp_spans.append( (m.span(), m.group()) )
temp_spans = sorted(temp_spans, key=lambda x:(x[0][0], -x[0][1]))
if len(temp_spans)>0:
spans.append(temp_spans[0])
for i in range(1, len(temp_spans)):
my_span = temp_spans[i][0]
last_span = spans[-1][0]
if my_span[0]>=last_span[0] and my_span[1]<=last_span[1]:
# ignore span which is in last span.
continue
spans.append( temp_spans[i] )
# make answer list.
ans = []
for s in spans:
sp = TextSpan(s[0][0], s[0][1], float(s[1]) )
ans.append(sp)
text_object.set(self.key, ans)
if __name__=='__main__':
psr = NumParser()
while 1:
text = raw_input()
if text=='': break
to = TextObject(text)
psr.find_all(to)
ans = to.get('NUM')
for s in ans:
print '(%d,%d) : [%f]' % (s.begin, s.end, s.value)
|
UTF-8
|
Python
| false | false | 2,013 |
13,168,369,755,981 |
828b85ae13c85f92de8ceab40e5ba1dac46ddbe8
|
84e16db62fb6b242680d1a93e01f982c86c549b4
|
/tuning/r/no_confusion_gam.py
|
fe4e871ff3a58589b3c8b8d47c098e1a19691a4e
|
[] |
no_license
|
amcmorl/motorlab
|
https://github.com/amcmorl/motorlab
|
58193bfa3f0cfe570b696310ba91e0462a0047b2
|
e124530b369c827b385f6dc3b9be20e4b75b6256
|
refs/heads/master
| 2021-01-25T00:10:10.709281 | 2013-06-07T03:40:56 | 2013-06-07T03:40:56 | 3,430,317 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import rpy2.robjects as ro
import matplotlib as mpl
import matplotlib.pyplot as plt
ro.r('source("fit_noCV.R")')
ro.r('source("simulate_realkinematics.R")')
fit_noCV = ro.r('fit_noCV')
data = ro.r('simu.realpos(sd.factor=0.0001)')
out = fit_noCV(data)
sse = np.array(out.r['sse'][0])
# plot out to a postscript file
super_star = "$^*$"
modellist = ["kd","kdp","kds","kdps","kv","kvp","kvs","kvps"]
modellist += [model + super_star for model in modellist]
#"kd*","kdp*","kds*","kdps*","kv*","kvp$^*$","kvs*","kvps*"]
fig = plt.figure(figsize = (4,3))
l, b = 0.15, 0.225
r, t = 0.00, 0.05
w, h = 1 - l - r, 1 - t - b
ax = fig.add_axes([l, b, w, h])
ax.imshow(np.log(sse), origin='lower', cmap=mpl.cm.Greys_r)
axes = [ax.xaxis, ax.yaxis]
rots = ['vertical', 'horizontal']
for axis, rot in zip(axes, rots):
axis.set_ticks(range(0, len(modellist)))
axis.set_ticklabels(modellist, rotation=rot, fontsize='small')
axis.set_ticks_position('none')
|
UTF-8
|
Python
| false | false | 2,013 |
3,942,779,987,195 |
8c8395304acfdcd3bd60bc03a53ab9bafafab7c5
|
04e282c43147e4d6e7dbf156d5735ab8c17ff79e
|
/scrapy_canadian_budgets/pipelines.py
|
9955e4f6ea71583f51794b44a2a225e76c884aa8
|
[] |
no_license
|
msukmanowsky/scrappy_canadian_budgets
|
https://github.com/msukmanowsky/scrappy_canadian_budgets
|
b3eb6fa8d94608d33421e87022e67850917e4991
|
7dbce934790306dfade91d148e4d754952384a41
|
refs/heads/master
| 2020-05-13T06:30:39.849878 | 2012-03-28T04:38:17 | 2012-03-28T04:38:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pymongo
import re
from scrapy.conf import settings
from scrapy_canadian_budgets.items import Table, Summary
from scrapy_canadian_budgets.spiders.ontario_2011_budget_spider import Ontario2011BudgetSpider
from scrapy_canadian_budgets.spiders.ontario_2012_budget_spider import Ontario2012BudgetSpider
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/topics/item-pipeline.html
class MongoDBPipeline(object):
def __init__(self):
connection = pymongo.Connection(settings['MONGODB_SERVER'], settings['MONGODB_PORT'])
self.db = connection[settings['MONGODB_DB']]
self.db.authenticate(settings['MONGODB_USERNAME'], settings['MONGODB_PASSWORD'])
self.db.drop_collection('ontario_2012_budget_summary')
self.db.drop_collection('ontario_2012_budget_sable')
self.db.drop_collection('ontario_2011_budget_summary')
self.db.drop_collection('ontario_2011_budget_sable')
def process_item(self, item, spider):
cleaned_item = self.clean_item(item, spider)
collection_name = '_'.join([spider.name, cleaned_item.__class__.__name__]).lower()
collection = self.db[collection_name]
collection.insert(dict(cleaned_item))
def clean_item(self, item, spider):
if type(item) == Table:
item["caption"] = self.clean_html(item["caption"])
item["footnote"] = self.clean_html(item["footnote"])
elif type(item) == Summary:
item["heading"] = self.clean_html(item["heading"])
item["bullets"] = [re.sub(r'\s+', r' ', s) for s in item["bullets"]]
return item
def clean_html(self, bad_html):
if type(bad_html) == str:
return bad_html
s = bad_html
s = re.sub(r'<.*?>', r'', s)
s = re.sub(r'\s+', r' ', s)
return s
class OntarioBudgetPipeline(object):
def process_item(self, item, spider):
if type(spider) == Ontario2011BudgetSpider or type(spider) == Ontario2012BudgetSpider:
return self.process_ontario(item, spider)
else:
return item
def process_ontario(self, item, spider):
if type(item) == Table:
item["caption"] = self.clean_html(item["caption"])
item["footnote"] = self.clean_html(item["footnote"])
elif type(item) == Summary:
item["heading"] = self.clean_html(item["heading"])
item["bullets"] = [re.sub(r'\s+', r' ', s) for s in item["bullets"]]
return item
@staticmethod
def clean_html(bad_html):
if type(bad_html) == str:
return bad_html
s = bad_html
s = re.sub(r'<.*?>', r'', s)
s = re.sub(r'\s+', r' ', s)
return s
|
UTF-8
|
Python
| false | false | 2,012 |
15,590,731,314,525 |
67f373e7de662599342ea11e130d992fdcd92e93
|
1a2f90f9fdeada93d65de83c0a48fa5f6a418458
|
/web_site/zabbix_center/screens/admin.py
|
afc967a738ad056d0bea1d1a2105d49246e70db9
|
[] |
no_license
|
stefanmonkey/zabbix_center
|
https://github.com/stefanmonkey/zabbix_center
|
3a94955a9da0b79d3d4c249f4888697330495e36
|
c2f15ec8da2aca052f8872e89f5ee0e8115c84b3
|
refs/heads/master
| 2016-09-03T07:01:40.362453 | 2014-04-04T03:38:46 | 2014-04-04T03:38:46 | 17,424,156 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from screens.models import Groups, HostsGroups
# Register your models here.
class HostsInline(admin.StackedInline):
model = HostsGroups
extra = 3
class GroupsAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields':['group_name']}),
]
inlines = [HostsInline]
list_display = ('group_name',)
admin.site.register(Groups, GroupsAdmin)
|
UTF-8
|
Python
| false | false | 2,014 |
13,864,154,479,151 |
13e524c4461eea61c3af7b36957df4e720ab69a2
|
59d15bd038d52eb9061bad5b347ebf9ef9ddd6e5
|
/modules/memebot.py
|
59307d4adbee0ab4aa832f89eda388941e60ddc3
|
[] |
no_license
|
gipi/Richie
|
https://github.com/gipi/Richie
|
f7238c0f943048bd93ce483e62f9636edae62c4d
|
ed00425b7d0e5203eb1e17d0f9e10e5bc5177a16
|
refs/heads/master
| 2021-01-01T05:32:43.012905 | 2011-12-30T18:22:30 | 2011-12-30T18:22:30 | 3,075,881 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""Watch URLs in channel, punish people for living under a rock"""
import re
import os
import urlparse
import datetime
from sqlobject import *
import random
from include.throttle import Throttle
from include.utils import Base, Module
import logging as log
try:
class url(SQLObject):
url = StringCol()
clean = StringCol()
author = ForeignKey('author')
channel = ForeignKey('channel')
citations = IntCol(default=0)
posted = DateTimeCol(default = datetime.datetime.now)
comments = MultipleJoin('comments')
def truncated_url(self):
if (len(self.url) > 48):
return self.url[:48] + ' ... ' + self.url[-4:]
else:
return self.url
turl = property(truncated_url)
class author(SQLObject):
name = StringCol(alternateID=True, length=50)
urls = MultipleJoin('url')
comments = MultipleJoin('comments')
pointsNew = IntCol(default=0)
pointsOld = IntCol(default=0)
pointsCredit = IntCol(default=0)
class channel(SQLObject):
name = StringCol(alternateID=True, length=50)
urls = MultipleJoin('url')
class comments(SQLObject):
text = StringCol()
author = ForeignKey('author')
url = ForeignKey('url')
except:
pass
class Main(Module):
pattern = Module._any
allow_threading = False
priority = 10
terminate = False
require_addressing = False
help = 'score [name,range] - get memescore, empty for top10'
matchURL = re.compile('(http://\S+)', re.I)
scoreRequest = re.compile(r'^\s*score(?:(?:\s+|[:-]+\s*)(\S+?)(?:\s*-\s*(\S+))?)?\s*$', re.I)
colonHeader = re.compile(r'^\s*(.*?)\s*:\s*$')
riffs = [
'OLD MEME ALERT!',
'omg, SO OLD!',
'Welcome to yesterday.',
'been there, done that.',
'you missed the mememobile.',
'oldest. meme. EVAR.',
'jesus christ you suck.',
'you need a new memesource, bucko.',
'that was funny the first time i saw it.',
'new to the internet?',
'i think that came installed with the internet',
]
get_frag = re.compile(r'^(.*)#([^;/?:@=&]*)$')
def __init__(self, madcow):
self.throttle = Throttle()
config = madcow.config.memebot
engine = config.db_engine
uri = engine + '://'
if engine == 'sqlite':
uri += os.path.join(madcow.dir, 'data/db-%s-memes' % madcow.ns)
elif engine == 'mysql':
user = config.db_user
if len(config.db_pass):
user += ':' + config.db_pass
host = config.db_host
if not len(host):
host = 'localhost'
if len(config.db_port):
host += ':' + config.db_port
uri += '%s@%s/%s' % (user, host, config.db_name)
try:
sqlhub.processConnection = connectionForURI(uri)
except Exception, e:
log.warn('invalid uri: %s (%s)' % (uri, e))
self.enabled = False
return
# show raw SQL being dispatched if loglevel is debug
if log.root.level <= log.DEBUG:
url._connection.debug = True
author._connection.debug = True
channel._connection.debug = True
comments._connection.debug = True
url.createTable(ifNotExists=True)
author.createTable(ifNotExists=True)
channel.createTable(ifNotExists=True)
comments.createTable(ifNotExists=True)
def cleanURL(self, url):
# stolen from urlparse.urlsplit(), which doesn't handle
# splitting frags correctly
netloc = query = fragment = ''
i = url.find(':')
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
delim = len(url)
for c in '/?#':
wdelim = url.find(c, 2)
if wdelim >= 0:
delim = min(delim, wdelim)
netloc, url = url[2:delim], url[delim:]
if '#' in url:
try:
url, fragment = self.get_frag.search(url).groups()
except:
pass
if '?' in url:
url, query = url.split('?', 1)
### now for memebots normalizing..
# make hostname lowercase and remove www
netloc = netloc.lower()
if netloc.startswith('www.') and len(netloc) > 4:
netloc = netloc[4:]
# all urls have trailing slash
if url == '':
url = '/'
# remove empty query settings, these are usually form artifacts
# and put them in order
try:
query = query.split('&')
query = [part.split('=') for part in query]
query = [[x, y] for x, y in query if len(y)]
query = ['='.join([x, y]) for x, y in query]
query = sorted(query)
query = '&'.join(query)
except:
# probably not valid query string, just "?newmeme"
query = ''
# ignore fragments
fragment = ''
return urlparse.urlunsplit([scheme, netloc, url, query, fragment])
def getScoreForAuthor(self, a):
return a.pointsNew + (a.pointsOld * -2) + (a.pointsCredit * 2)
def getScores(self):
scores = [(a.name, self.getScoreForAuthor(a)) for a in author.select()]
scores = sorted(scores, lambda x, y: cmp(y[1], x[1]))
return scores
def response(self, nick, args, kwargs):
nick = nick.lower()
chan = kwargs['channel'].lower()
addressed = kwargs['addressed']
message = args[0]
if addressed:
try:
x, y = self.scoreRequest.search(message).groups()
scores = self.getScores()
size = len(scores)
if x is None:
scores = scores[:10]
x = 1
elif x.isdigit():
x = int(x)
if x == 0:
x = 1
if x > size:
x = size
if y is not None and y.isdigit():
y = int(y)
if y > size:
y = size
scores = scores[x-1:y]
else:
scores = [scores[x-1]]
else:
for i, data in enumerate(scores):
name, score = data
if name.lower() == x.lower():
scores = [scores[i]]
x = i+1
break
out = []
for i, data in enumerate(scores):
name, score = data
out.append('#%s: %s (%s)' % (i + x, name, score))
return ', '.join(out)
except:
pass
match = self.matchURL.search(message)
if match is None:
return
event = self.throttle.registerEvent(name='memebot', user=nick)
if event.isThrottled():
if event.warn():
return '%s: Stop abusing me plz.' % nick
else:
return
orig = match.group(1)
clean = self.cleanURL(orig)
comment1, comment2 = re.split(re.escape(orig), message)
try:
comment1 = self.colonHeader.search(comment1).group(1)
except:
pass
comment1 = comment1.strip()
comment2 = comment2.strip()
try:
me = author.byName(nick)
except SQLObjectNotFound:
me = author(name=nick)
try:
# old meme
try:
old = url.select(url.q.clean == clean)[0]
except:
raise SQLObjectNotFound
if len(comment1) > 0:
comments(url=old, text=comment1, author=me)
if len(comment2) > 0:
comments(url=old, text=comment2, author=me)
# chew them out unless its my own
if old.author.name != nick:
response = 'first posted by %s on %s' % (old.author.name,
old.posted)
riff = random.choice(self.riffs)
old.author.pointsCredit = old.author.pointsCredit + 1
me.pointsOld = me.pointsOld + 1
old.citations = old.citations + 1
return '%s %s' % (riff, response)
except SQLObjectNotFound:
try:
c = channel.byName(chan)
except SQLObjectNotFound:
c = channel(name=chan)
urlid = url(url=orig, clean=clean, author=me, channel=c)
if len(comment1) > 0:
comments(url=urlid, text=comment1, author=me)
if len(comment2) > 0:
comments(url=urlid, text=comment2, author=me)
me.pointsNew = me.pointsNew + 1
except Exception, e:
log.warn('error in %s: %s' % (self.__module__, e))
log.exception(e)
|
UTF-8
|
Python
| false | false | 2,011 |
17,712,445,160,513 |
2fadeabba4a7613df0bd8740c7a81c94ac3ff898
|
a5327693d9d3813549e71d0433771bff888e2e12
|
/exampleInstaller/MyModules/Mark.py
|
3f9a7f10a6c4c2e5619afb2dce0723550ac7a9c2
|
[
"BSD-3-Clause"
] |
permissive
|
MrBrezina/buildInstaller
|
https://github.com/MrBrezina/buildInstaller
|
c6b40b20ec32773fb26e4da5350dc2e6d59363fd
|
bd46e31c894888f0fde6b5312bf00ec1a8e045c3
|
refs/heads/master
| 2021-01-10T21:39:42.863253 | 2011-05-13T06:46:27 | 2011-05-13T06:46:27 | 1,742,114 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__copyright__ = """
Copyright (c) David Brezina, 2008. All rights reserved.
Redistribution and use of the script is limited by the BSD licence (http://creativecommons.org/licenses/BSD/).
"""
colour = {"Red":1, "Green":80, "Blue":170}
def Mark(glyphs, colourCode):
"""
Mark glyphs with the defined colour.
"""
for gl in glyphs:
gl.mark = colourCode
#fl.UpdateGlyph(gl.index)
|
UTF-8
|
Python
| false | false | 2,011 |
4,561,255,305,105 |
83cceb6bc62204a54e911580df07976dc946d18e
|
4442d5529f2be82110df4bf4bfffaaf6babce47a
|
/links/views.py
|
4e68af82d47f4df05482183b24dd6e100afbd0cc
|
[
"MIT"
] |
permissive
|
mdotson/MMHackathon
|
https://github.com/mdotson/MMHackathon
|
864863b3e47c975aa9b88eeb0ca5c308000f3103
|
7e28eda02bdea919b71b6435308059d27a37f88e
|
refs/heads/master
| 2016-10-24T08:36:07.742331 | 2014-08-08T16:32:42 | 2014-08-08T16:32:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Copyright Varun Kumar
import os
import webapp2
import re
import jinja2
import random
import string
import hashlib
import hmac
import json
import logging
import time
from google.appengine.api import memcache
from models import *
template_dir = os.path.join(os.path.dirname(__file__), '../templates')
jinja_env= jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir), autoescape = True)
SECRET = 'imsosecret'
# --------------------------------------
# FUNCTIONS
# --------------------------------------
def elapsed_time(cache_time):
time_since = int(time.time() - cache_time)
return '{0} second{1}'.format(time_since, 's' if time_since != 1 else '')
# Cookie Hash Functions
def hash_str(val):
return hmac.new(SECRET, val).hexdigest()
def make_secure_val(val):
return "%s|%s" % (val, hash_str(val))
def check_secure_val(user_id_str):
print "ANYTHING"
print ("LOOK HERE "+str(user_id_str))
val = user_id_str.split('|')[0]
if user_id_str == make_secure_val(val):
return val
# JSON functions
def create_post_dict(post):
post_dict = {}
post_dict['id'] = post.key().id()
post_dict['title'] = post.title
post_dict['url'] = post.url
post_dict['created'] = post.created.strftime("%c")
return post_dict
# Memcache functions
def top_posts(update = False):
key_post = "top"
posts = memcache.get(key_post)
if posts is None or update:
logging.error("DB QUERY")
posts = db.GqlQuery("SELECT * FROM Post ORDER BY created DESC LIMIT 10")
posts = list(posts)
posts = (posts, time.time())
memcache.set(key_post, posts)
return posts
def single_post(post_id):
# leaving out update param because we never edit the post
key_post = "post_" + post_id
post = memcache.get(key_post)
if post is None:
logging.error("DB QUERY")
post = Post.get_by_id(int(post_id))
post = (post, time.time())
memcache.set(key_post, post)
return post
# --------------------------------------
# HANDLERS
# --------------------------------------
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
class MainHandler(Handler):
def get(self):
# TODO: not sure if this is "correct" way to do this, but links.html won't render with posts
# unless we hit the /links route first:
self.redirect("/links");
self.render("links.html");
class WelcomeHandler(Handler):
def get(self):
user_id = -1
user_id_str = self.request.cookies.get('user_id')
if user_id_str:
cookie_val = check_secure_val(user_id_str)
if cookie_val:
user_id = int(cookie_val)
if user_id != -1:
user = User.get_by_id(int(user_id))
if user:
self.render("links.html", user=user, user_id = user_id)
else:
self.redirect("/links/signup")
else:
self.redirect("/links/signup")
class LinkHandler(Handler):
def render_links(self):
user_id = -1
user_id_str = self.request.cookies.get('user_id')
if user_id_str:
cookie_val = check_secure_val(user_id_str)
if cookie_val:
user_id = int(cookie_val)
posts, cache_time = top_posts()
cache_timer = elapsed_time(cache_time)
if user_id != -1:
user = User.get_by_id(int(user_id))
self.render("links.html", posts=posts, cache_timer=cache_timer, user=user)
else:
self.render("links.html", posts=posts, cache_timer=cache_timer)
def get(self):
self.render_links()
class JSONLinkHandler(Handler):
def get(self, limit = 10, skip = 0):
posts = db.GqlQuery("SELECT * FROM Post ORDER BY created DESC LIMIT 10")
posts_list = []
for post in posts:
posts_list.append(create_post_dict(post))
self.response.headers['Content-Type'] = 'application/json' #add charset here
self.write(json.dumps(posts_list))
class PostHandler(Handler):
def getComments(self, post):
comments = db.GqlQuery("SELECT * FROM Comment WHERE post_id='" + str(post.key().id()) + "' ORDER BY created DESC" )
return comments
def get(self, post_id):
post, cache_time = single_post(post_id)
comments = self.getComments(post)
comments = list(comments)
session_uuid = self.request.cookies.get('puppylynx_session')
logged_in_user = False
if session_uuid != None:
session = db.GqlQuery("SELECT * FROM Session WHERE uuid = '"+str(session_uuid)+"' LIMIT 1")
session = list(session)
if len(session) > 0:
session = session[0]
logged_in_user = True
if post:
self.render("post.html", post=post, comments=comments, logged_in_user=logged_in_user)
else:
self.render_post(error="links post %s not found!" % post_id)
def post(self, post_id):
# fix for multiline input and unicode characters
comment = self.request.get("comment").encode('ascii', 'ignore')
user = None
session_uuid = self.request.cookies.get('puppylynx_session')
if session_uuid != None:
session = db.GqlQuery("SELECT * FROM Session WHERE uuid = '"+str(session_uuid)+"' LIMIT 1")
session = list(session)
if len(session) > 0:
session = session[0]
else:
session = None
if session != None:
user = db.GqlQuery("SELECT * FROM User WHERE username = '"+str(session.username)+"' LIMIT 1")
user = list(user)
if len(user) > 0:
user = user[0]
else:
user = None
if user != None:
if comment:
username = user.username
comment = Comment(comment=comment, username=username, post_id=post_id)
new_comment = comment.put()
#TODO top_posts(True)
post, cache_time = single_post(post_id)
comments = self.getComments(post)
comments = list(comments)
the_one = Comment.get_by_id(new_comment.id())
print the_one.comment
#comments.insert(0, the_one)
self.render("post.html", post=post, comments=comments, logged_in_user=True)
else:
error = "comment needed dude!"
self.render_post(error=error)
class JSONPostHandler(Handler):
def get(self, post_id):
post = Post.get_by_id(int(post_id))
post_dict = create_post_dict(post)
self.response.headers['Content-Type'] = 'application/json'
self.write(json.dumps(post_dict))
class PaginationHandler(Handler):
def render_newpost(self, title="", url="", error=""):
self.render("newpost.html", title=title, url=url, error=error)
#logged_in_user
def get(self, page = 0):
offset = page * 10
posts = db.GqlQuery("SELECT * FROM Post ORDER BY created DESC LIMIT 10 OFFSET " + str(offset))
posts = list(posts)
#posts = (posts, time.time())
session_uuid = self.request.cookies.get('puppylynx_session')
logged_in_user = False
if session_uuid != None:
session = db.GqlQuery("SELECT * FROM Session WHERE uuid = '"+str(session_uuid)+"' LIMIT 1")
session = list(session)
if len(session) > 0:
session = session[0]
logged_in_user = True
self.render("links.html", posts=posts, logged_in_user=logged_in_user)
#self.render_newpost(posts, url, error)
def post(self, page = 0):
title = self.request.get("title")
url = self.request.get("url")
user = None
session_uuid = self.request.cookies.get('puppylynx_session')
if session_uuid != None:
session = db.GqlQuery("SELECT * FROM Session WHERE uuid = '"+str(session_uuid)+"' LIMIT 1")
session = list(session)
if len(session) > 0:
session = session[0]
else:
session = None
if session != None:
user = db.GqlQuery("SELECT * FROM User WHERE username = '"+str(session.username)+"' LIMIT 1")
user = list(user)
if len(user) > 0:
user = user[0]
else:
user = None
if user != None:
if title and url:
username = user.username
p = Post(title=title, url=url, username=username)
p.put()
#rerun the query and update the cache
top_posts(True)
self.redirect("/links/item/%s" % p.key().id())
else:
error = "title and url needed!"
self.render_newpost(title, url, error)
class FlushHandler(Handler):
def get(self):
memcache.flush_all()
self.redirect('/links')
class NewHandler (Handler):
def get(self):
print "FUCK"
|
UTF-8
|
Python
| false | false | 2,014 |
6,244,882,456,318 |
18e40d15b02f80a23523941f8bc5e0f0c783ed36
|
42b84b02e64d21234372501a20bf820e0bcbf281
|
/site/threath/apps/registration/utils.py
|
564b0279f6f1c50b1010fc7cad26160fd3fc8fc5
|
[] |
no_license
|
gage/proto
|
https://github.com/gage/proto
|
861d1e1190770b0cc74f51a6fe140157cc0ac12e
|
e13ac7d0ee5c6acce2557dcf71a00a941543c006
|
refs/heads/master
| 2020-04-06T06:44:01.712532 | 2013-06-28T06:30:59 | 2013-06-28T06:30:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import threading
import string
from django.conf import settings
from django.contrib.auth.models import User
from django.core.mail import EmailMultiAlternatives
from django.utils.translation import ugettext_lazy as _
from django.template.loader import get_template
from django.template import Context
from slug.models import Slug
from globals.utils import random_generator
def send_invite_email(from_user, to_email, use_thread=True):
try:
site_url = settings.SITE_DOMAIN
account_name = from_user.get_profile().get_display_name()
title = _('%(account)s is inviting you to example.com!!') % {'account':account_name}
msg = _('Join and have fun!!<br/> Just check <a href="http://%(addr)s">example.com</a>')% {'addr': site_url}
htmly = get_template('email_templates/inc_email_invite_site.html')
d = Context({'title':title, 'msg': msg, 'site_url':settings.SITE_DOMAIN, 'account_name':account_name })
subject, from_email, to = title, '[email protected]', to_email
html_content = htmly.render(d)
mail = EmailMultiAlternatives(subject, msg, from_email, [to])
mail.attach_alternative(html_content, "text/html")
if use_thread:
threading.Thread(target=mail.send).start()
else:
mail.send()
return True
except:
print 'Failed to send invitation mail'
return False
def sign_up(username, password, request):
""" Processes a new signup. And returns the created user object. """
from registration.models import Registration
user = User(
username=username,
)
user.set_password(password)
user.is_active = False
user.save()
registration = Registration.objects.create_registration_obj_for_user(user=user)
return user
def check_username(name):
""" Returns True is a username is valid and available, False otherwise. """
return Slug.objects.validate(name)
def make_username(seed="user"):
""" Generates a valid, unique username from `seed`. """
from globals.utils import u_slugify
seed = u_slugify(seed)
if check_username(seed):
return seed
postfix = 1
while not check_username("%s%s" % (seed, postfix)):
postfix += 1
return "%s%s" % (seed, postfix)
def create_new_user():
def _gen_username():
username = '_' + random_generator(size=7, chars=string.lowercase + string.digits)
return username
def _gen_password():
password = random_generator(size=6, chars=string.lowercase + string.digits)
return password
username = _gen_username()
while User.objects.filter(username=username).exists():
username = _gen_username()
email = username + '@example.com'
system_gen_password = _gen_password()
user = User.objects.create_user(username, email, system_gen_password)
user.is_active = False
user.save()
user.get_profile().system_gen_password = system_gen_password
user.get_profile().save()
return user
def get_or_create_user_by_phone(phone_sms_e164, country_code):
from user_profiles.models import UserProfile
created = False
# User already exists
if UserProfile.objects.filter(phone_sms_e164=phone_sms_e164).exists():
user_profile = UserProfile.objects.get(phone_sms_e164=phone_sms_e164)
created = False
return user_profile.user, created
created = True
user = create_new_user()
profile = user.get_profile()
profile.phone_sms_e164 = phone_sms_e164
profile.phone_sms = phone_sms_e164
profile.country_code = country_code
# TODO: Gen password
profile.save()
return user, created
|
UTF-8
|
Python
| false | false | 2,013 |
6,743,098,667,858 |
08472118ac8015af5e1957c6db4443a134a58c4d
|
75d46817a45380adb8bb68a76a66e965932dd544
|
/maybe/Maybe.py
|
a64cf9986fbe69aa5685ed3f1bce0d8d563fff72
|
[] |
no_license
|
codemiller/lambdaland
|
https://github.com/codemiller/lambdaland
|
edbe4fac8b861c5164fa0ed840b2383fe70f8b67
|
9e1975b03ff246eb7c2969fec3815aa7d97e4896
|
refs/heads/master
| 2020-05-19T07:28:27.187646 | 2014-05-12T23:14:32 | 2014-05-12T23:14:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Example by Fraser Tweedale: http://www.gitorious.org/frasertweedale/talks
# Using a Maybe monad for division, where there may be a failure due to division by zero
class Maybe(object):
@classmethod
def ret(cls, x): return Just(x)
class Nothing(Maybe):
def __init__(self): pass
def __rshift__(self, f): return self
def __repr__(self): return 'Nothing()'
class Just(Maybe):
def __init__(self, x): self._x = x
def __rshift__(self, f):
return f(self._x)
def __repr__(self): return 'Just({!r})'.format(self._x)
def mdiv(n, d):
return Nothing() if not d else Just(n / d)
print Just(10) >> (lambda x: Just(2) >> (lambda y: mdiv(x, y)))
print Just(10) >> (lambda x: Just(0) >> (lambda y: mdiv(x, y)))
def divby(d):
return lambda n: mdiv(n, d)
print Just(10) >> divby(2)
print Just(10) >> divby(0)
print Just(10) >> divby(0) >> divby(2)
print Just(16) >> divby(2) >> divby(2)
|
UTF-8
|
Python
| false | false | 2,014 |
16,131,897,175,614 |
43cec6084a2f829c7a6752380634a906db39f734
|
1252814110dd89294afc48e9d23ff060203c39d9
|
/build/third_party/buildbot_8_4p1/buildbot/status/web/build.py
|
b22a04bef58969b5bc4087f4f1339691e02f46fe
|
[
"MIT",
"GPL-1.0-or-later",
"BSD-3-Clause",
"GPL-3.0-only",
"GPL-2.0-only"
] |
non_permissive
|
leiferikb/bitpop
|
https://github.com/leiferikb/bitpop
|
5a69c7afcde2ea0bd1d7b2e7c962a47ad41271eb
|
a9952d42790e3c7ea44137574623dcdae7ac148d
|
refs/heads/master
| 2021-05-15T02:07:17.069341 | 2014-10-05T12:53:32 | 2014-10-05T12:53:32 | 20,436,564 | 7 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.web import html
from twisted.web.util import Redirect, DeferredResource
from twisted.internet import defer, reactor
import urllib, time
from twisted.python import log
from buildbot.status.web.base import HtmlResource, \
css_classes, path_to_build, path_to_builder, path_to_slave, \
getAndCheckProperties, path_to_authfail
from buildbot.status.web.step import StepsResource
from buildbot.status.web.tests import TestsResource
from buildbot import util, interfaces
# /builders/$builder/builds/$buildnum
class StatusResourceBuild(HtmlResource):
addSlash = True
def __init__(self, build_status):
HtmlResource.__init__(self)
self.build_status = build_status
def getPageTitle(self, request):
return ("Buildbot: %s Build #%d" %
(self.build_status.getBuilder().getName(),
self.build_status.getNumber()))
def content(self, req, cxt):
b = self.build_status
status = self.getStatus(req)
req.setHeader('Cache-Control', 'no-cache')
cxt['b'] = b
cxt['path_to_builder'] = path_to_builder(req, b.getBuilder())
if not b.isFinished():
step = b.getCurrentStep()
if not step:
cxt['current_step'] = "[waiting for Lock]"
else:
if step.isWaitingForLocks():
cxt['current_step'] = "%s [waiting for Lock]" % step.getName()
else:
cxt['current_step'] = step.getName()
when = b.getETA()
if when is not None:
cxt['when'] = util.formatInterval(when)
cxt['when_time'] = time.strftime("%H:%M:%S",
time.localtime(time.time() + when))
else:
cxt['result_css'] = css_classes[b.getResults()]
if b.getTestResults():
cxt['tests_link'] = req.childLink("tests")
ss = cxt['ss'] = b.getSourceStamp()
if ss.branch is None and ss.revision is None and ss.patch is None and not ss.changes:
cxt['most_recent_rev_build'] = True
got_revision = None
try:
got_revision = b.getProperty("got_revision")
except KeyError:
pass
if got_revision:
cxt['got_revision'] = str(got_revision)
try:
cxt['slave_url'] = path_to_slave(req, status.getSlave(b.getSlavename()))
except KeyError:
pass
cxt['steps'] = []
for s in b.getSteps():
step = {'name': s.getName() }
if s.isHidden():
continue
if s.isFinished():
step['css_class'] = css_classes[s.getResults()[0]]
(start, end) = s.getTimes()
step['time_to_run'] = util.formatInterval(end - start)
elif s.isStarted():
if s.isWaitingForLocks():
step['css_class'] = "waiting"
step['time_to_run'] = "waiting for locks"
else:
step['css_class'] = "running"
step['time_to_run'] = "running"
else:
step['css_class'] = "not_started"
step['time_to_run'] = ""
cxt['steps'].append(step)
step['link'] = req.childLink("steps/%s" % urllib.quote(s.getName(),
safe=''))
step['text'] = " ".join(s.getText())
step['urls'] = map(lambda x:dict(url=x[1],logname=x[0]), s.getURLs().items())
step['logs']= []
for l in s.getLogs():
logname = l.getName()
step['logs'].append({ 'link': req.childLink("steps/%s/logs/%s" %
(urllib.quote(s.getName(), safe=''),
urllib.quote(logname, safe=''))),
'name': logname })
ps = cxt['properties'] = []
for name, value, source in b.getProperties().asList():
value = str(value)
p = { 'name': name, 'value': value, 'source': source}
if len(value) > 500:
p['short_value'] = value[:500]
ps.append(p)
cxt['responsible_users'] = list(b.getResponsibleUsers())
(start, end) = b.getTimes()
cxt['start'] = time.ctime(start)
if end:
cxt['end'] = time.ctime(end)
cxt['elapsed'] = util.formatInterval(end - start)
else:
now = util.now()
cxt['elapsed'] = util.formatInterval(now - start)
cxt['exactly'] = (ss.revision is not None) or b.getChanges()
cxt['build_url'] = path_to_build(req, b)
cxt['authz'] = self.getAuthz(req)
template = req.site.buildbot_service.templates.get_template("build.html")
return template.render(**cxt)
def stop(self, req, auth_ok=False):
# check if this is allowed
if not auth_ok:
if not self.getAuthz(req).actionAllowed('stopBuild', req, self.build_status):
return Redirect(path_to_authfail(req))
b = self.build_status
log.msg("web stopBuild of build %s:%s" % \
(b.getBuilder().getName(), b.getNumber()))
name = req.args.get("username", ["<unknown>"])[0]
comments = req.args.get("comments", ["<no reason specified>"])[0]
# html-quote both the username and comments, just to be safe
reason = ("The web-page 'stop build' button was pressed by "
"'%s': %s\n" % (html.escape(name), html.escape(comments)))
c = interfaces.IControl(self.getBuildmaster(req))
bldrc = c.getBuilder(self.build_status.getBuilder().getName())
if bldrc:
bldc = bldrc.getBuild(self.build_status.getNumber())
if bldc:
bldc.stopBuild(reason)
# we're at http://localhost:8080/svn-hello/builds/5/stop?[args] and
# we want to go to: http://localhost:8080/svn-hello
r = Redirect(path_to_builder(req, self.build_status.getBuilder()))
d = defer.Deferred()
reactor.callLater(1, d.callback, r)
return DeferredResource(d)
def rebuild(self, req):
# check auth
if not self.getAuthz(req).actionAllowed('forceBuild', req, self.build_status.getBuilder()):
return Redirect(path_to_authfail(req))
# get a control object
c = interfaces.IControl(self.getBuildmaster(req))
bc = c.getBuilder(self.build_status.getBuilder().getName())
b = self.build_status
builder_name = b.getBuilder().getName()
log.msg("web rebuild of build %s:%s" % (builder_name, b.getNumber()))
name = req.args.get("username", ["<unknown>"])[0]
comments = req.args.get("comments", ["<no reason specified>"])[0]
reason = ("The web-page 'rebuild' button was pressed by "
"'%s': %s\n" % (name, comments))
extraProperties = getAndCheckProperties(req)
if not bc or not b.isFinished() or extraProperties is None:
log.msg("could not rebuild: bc=%s, isFinished=%s"
% (bc, b.isFinished()))
# TODO: indicate an error
else:
d = bc.rebuildBuild(b, reason, extraProperties)
d.addErrback(log.err, "while rebuilding a build")
# we're at
# http://localhost:8080/builders/NAME/builds/5/rebuild?[args]
# Where should we send them?
#
# Ideally it would be to the per-build page that they just started,
# but we don't know the build number for it yet (besides, it might
# have to wait for a current build to finish). The next-most
# preferred place is somewhere that the user can see tangible
# evidence of their build starting (or to see the reason that it
# didn't start). This should be the Builder page.
r = Redirect(path_to_builder(req, self.build_status.getBuilder()))
d = defer.Deferred()
reactor.callLater(1, d.callback, r)
return DeferredResource(d)
def getChild(self, path, req):
if path == "stop":
return self.stop(req)
if path == "rebuild":
return self.rebuild(req)
if path == "steps":
return StepsResource(self.build_status)
if path == "tests":
return TestsResource(self.build_status)
return HtmlResource.getChild(self, path, req)
# /builders/$builder/builds
class BuildsResource(HtmlResource):
addSlash = True
def __init__(self, builder_status):
HtmlResource.__init__(self)
self.builder_status = builder_status
def content(self, req, cxt):
return "subpages shows data for each build"
def getChild(self, path, req):
try:
num = int(path)
except ValueError:
num = None
if num is not None:
build_status = self.builder_status.getBuild(num)
if build_status:
return StatusResourceBuild(build_status)
return HtmlResource.getChild(self, path, req)
|
UTF-8
|
Python
| false | false | 2,014 |
18,923,625,910,013 |
f2a4452a6b5c3f56d9fde8dc4ad23d7d67bbce67
|
e9283172ba7a5726291c39cf15de70adc34dc49f
|
/python/pandas/nyc-housing-master/lib/python/statistics/cost_functions.py
|
6998e51ba503208fd5149ab87f34b197bb3bbb84
|
[] |
no_license
|
elyerandio/TestScripts
|
https://github.com/elyerandio/TestScripts
|
123c969c9a5c14f2e73fe730029b94cf8f81f1ff
|
576a55c6a42d6fb0547b16d50c1686833f4eb7de
|
refs/heads/master
| 2021-01-10T20:35:52.577083 | 2014-02-20T07:41:10 | 2014-02-20T07:41:10 | 17,012,516 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
class CostFunction:
def __init__(self, df, y, yhat, group_by, min_group_by_n, std_ol=None, std_oh=None):
"""
CostFunction class contains all relevant cost functions.
df: Pandas DataFrame that has actual and predicted values
y: Actual/Dependent variable
yhat: Predicted variable
group_by: If computing volume-weighted metrics, specify the group you'd like to subset by.
min_group_by: if computing volume-weighted metrics, specify the minimum number of observations per group.
std_ol: If you'd like to remove outliers from your predictions, specify the min standard deviation.
std_oh: Max standard deviation for outlier removal
"""
self.df = df
self.y = y
self.yhat = yhat
self.group_by = group_by
self.min_group_by_n = min_group_by_n
self.std_ol = std_ol
self.std_oh = std_oh
def cf_rsquared(self):
"""
Computes the R-Squared of the actual vs predicted values.
"""
df = self.df
y_mean = self.df[self.y].mean()
n = len(df)
df['ss_tot'] = (df[self.y] - y_mean) ** 2
ss_tot = df['ss_tot'].sum()
df['ss_err'] = (df[self.y] - df[self.yhat]) ** 2
ss_err = df['ss_err'].sum()
ss_reg = ((df[self.yhat] - y_mean)**2).sum()
#rsq = 1 - (ss_err / ss_tot)
rsq = ss_reg / ss_tot
return rsq
def cf_mpe(self):
"""
Computes the Mean Percent Error (MPE)
"""
df = self.df
df['residual'] = (df[self.yhat] - df[self.y]) / df[self.y]
if self.std_ol is not None and self.std_oh is not None:
#Remove Outliers
error_mean = df['residual'].describe()['mean']
error_std = df['residual'].describe()['std']
df = self.df[(df['residual'] < error_mean + error_std * self.std_oh) &
(df['residual'] > error_mean - error_std * self.std_ol)]
mpe = df['residual'].mean()
return mpe
def cf_rmse(self):
"""
Computes Root-Mean-Squared-Error (RMSE)
df: pandas data frame
y: dependent variable
yhat: predicted variable
std_ol: Outlier (max) of error
std_oh: Outlier (min) of error
"""
#Compute the error
df = self.df
if self.std_ol is not None and self.std_oh is not None:
df['error'] = df[self.y] - df[self.yhat]
mean_error = df['error'].describe()['mean']
std_error = df['error'].describe()['std']
df = df[(df['error'] < mean_error + std_error * self.std_oh) &
(df['error'] > mean_error - std_error * self.std_ol)]
df['sq_error'] = (df[self.y] - df[self.yhat]) ** 2
rmse = np.sqrt(df['sq_error'].describe()['mean'])
return rmse
def cf_mape(self):
"""
Compute Mean Absolute Percent Error.
"""
df = self.df
df['abs_residual'] = np.abs((df[self.y] - df[self.yhat]) / df[self.y])
if self.std_oh is not None:
#Remove Outliers
error_mean = df['abs_residual'].describe()['mean']
error_std = df['abs_residual'].describe()['std']
df = df[df['abs_residual'] < error_mean + error_std * self.std_oh]
mape = df['abs_residual'].mean()
return mape
def cf_vw_mape(self):
"""
Computes volume weighted MAPE.
"""
df = self.df
df['mape'] = np.abs((df[self.y] - df[self.yhat]) / df[self.y])
if self.std_oh is not None:
error_mean = df['mape'].describe()['mean']
error_std = df['mape'].describe()['std']
df = df[df['mape'] < error_mean + error_std * self.std_oh]
df_grouped = df[[self.group_by, self.y, self.yhat, 'mape']].groupby(self.group_by).agg([np.mean, np.std, np.size])
df_grouped = df_grouped[df_grouped['mape']['size'] >= self.min_group_by_n]
df_grouped['mean_sum'] = df_grouped['mape']['size'] * df_grouped['mape']['mean']
vw_mape = df_grouped.sum()['mean_sum'] / df_grouped.sum()['mape']['size']
return vw_mape
def cf_mpsd(self):
"""
Computes Mean Percent Standard Deviation.
"""
df = self.df
df['mpe'] = (df[self.yhat] - df[self.y]) / df[self.y]
if self.std_ol is not None and self.std_oh is not None:
#Remove Outliers
error_mean = df['mpe'].describe()['mean']
error_std = df['mpe'].describe()['std']
df = df[(df['mpe'] < error_mean + error_std * self.std_oh) &
(df['mpe'] > error_mean - error_std * self.std_ol)]
mpsd = df['mpe'].describe()['std']
return mpsd
def all_glm(self):
"""
Returns all cost functions relevant to a GLM model.
"""
rsq = self.cf_rsquared()
mpe = self.cf_mpe()
mape = self.cf_mape()
vw_mape = self.cf_vw_mape()
mpsd = self.cf_mpsd()
rmse = self.cf_rmse()
cost_functions = {'r_squared': rsq,
'mpe': mpe,
'mape': mape,
'vw_mape': vw_mape,
'mpsd': mpsd,
'rmse': rmse
}
return cost_functions
|
UTF-8
|
Python
| false | false | 2,014 |
6,262,062,357,026 |
a616cdcfed33bfa586b77d6314dcb4bcd761fd6b
|
acf8177a1f1ee68825d14ba5e8b7003b54fe9d32
|
/chat/server/protocol/rest.py
|
4019072b66d923e35817c62e7b79b4b00d329ab1
|
[] |
no_license
|
yrunts/python-chat
|
https://github.com/yrunts/python-chat
|
fffe718bfd464fd95a802d4ffaf27706502a5efe
|
aaabf3152287d3a62e666febbf47171b57dd1659
|
refs/heads/master
| 2016-09-10T18:26:01.600831 | 2014-08-27T20:13:06 | 2014-08-27T20:13:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from chat.server.protocol import base
class RESTProtocol(base.Protocol):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
5,437,428,642,929 |
52303934b6e4a343bbefdd84537ce9c4217bb9cb
|
2b1dd880d67a44ea43deb5a4a2a8462e0791c4e3
|
/setup.py
|
23f27f2f800d9edcd50ba9fa75f56a7804ca4012
|
[] |
no_license
|
marciopocebon/cuda-profiler-tools
|
https://github.com/marciopocebon/cuda-profiler-tools
|
7136d2119144becf96c02aa2d584b06e3fb41548
|
abc4388606fdfcd90543e516d69392e42037909b
|
refs/heads/master
| 2020-07-22T07:39:15.907388 | 2013-07-10T12:22:49 | 2013-07-10T12:22:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from distutils.core import setup
setup(name='cudaprof',
version='0.5.1',
description='CUDA Profiler Tools',
author='Javier Cabezas',
author_email='[email protected]',
url='https://code.google.com/p/cuda-profiler-tools/',
packages=[ 'cudaprof', 'cudaprof.gui' ],
package_dir={'cudaprof': 'tools/cudaprof'},
scripts=['tools/cuda-profiler']
)
# vim:set backspace=2 tabstop=4 shiftwidth=4 textwidth=120 foldmethod=marker expandtab:
|
UTF-8
|
Python
| false | false | 2,013 |
17,386,027,633,076 |
d87b1e731ded9d0000a489944efc65d370af972e
|
f301ff75ca798d6c740587e18d6a1753bd1776de
|
/models.py
|
eabdbb10a6a281995ac81667a010e14c33b29d63
|
[] |
no_license
|
galgeek/testdayserver
|
https://github.com/galgeek/testdayserver
|
fec7d6344d9846a8570eb486049248698951db22
|
12c9a105482c82dc9a4ef0c05b1dbf1279c385d0
|
refs/heads/master
| 2021-01-24T20:01:54.961725 | 2012-07-20T09:41:16 | 2012-07-20T09:41:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import db
class BotResults(db.Model):
testday = db.StringProperty(required=True)
greetedName = db.StringListProperty()
greetedNumber = db.IntegerProperty()
firebotBugs = db.StringListProperty()
usersTalks = db.StringListProperty()
class Speakers(db.Model):
botresults = db.ReferenceProperty()
speaker = db.StringProperty()
spoke = db.IntegerProperty()
|
UTF-8
|
Python
| false | false | 2,012 |
1,305,670,078,588 |
de4f6d2af4c09c15b599e43ecaaae8317f7e15ef
|
7b07ff022770053274ab13476b7495f7e791e0e7
|
/Credits.py
|
c35da68aa67e4ee553ea446bf705ac73eb27d462
|
[] |
no_license
|
Andy3189/PyGladiator
|
https://github.com/Andy3189/PyGladiator
|
01d3d05b5e96cb661e7457348ed1c40c755a294c
|
0b4395930c49bebf5c1dfc69fbc605927a4a44c7
|
refs/heads/master
| 2016-09-06T05:14:37.490695 | 2012-08-14T21:37:17 | 2012-08-14T21:37:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import time
print()
print()
print()
print()
print()
print()
print()
print()
print(' --------------------------------------------------------- ')
time.sleep(0.16)
print("│ _____ _____ _ _ _ _ │")
time.sleep(0.16)
print("│ | __ \ / ____| | | (_) | | │")
time.sleep(0.16)
print("│ | |__) | _| | __| | __ _ __| |_ __ _| |_ ___ _ __ │")
time.sleep(0.16)
print("│ | ___/ | | | | |_ | |/ _` |/ _` | |/ _` | __/ _ \| '__|│")
time.sleep(0.16)
print("│ | | | |_| | |__| | | (_| | (_| | | (_| | || (_) | | │")
time.sleep(0.16)
print("│ |_| \__, |\_____|_|\__,_|\__,_|_|\__,_|\__\___/|_| │")
time.sleep(0.16)
print("│ __/ | │")
time.sleep(0.16)
print("│ |___/ │")
time.sleep(0.16)
print('│---------------------------------------------------------│')
time.sleep(0.16)
print('│ ---------------------------- │')
time.sleep(0.16)
print('│ Credits │')
time.sleep(0.16)
print('│ ---------------------------- │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ --------------------------- │')
time.sleep(0.16)
print('│ Main Coding │')
time.sleep(0.16)
print('│ --------------------------- │')
time.sleep(0.16)
print('│ Andy │')
time.sleep(0.16)
print('│ Swum │')
time.sleep(0.16)
print('│ Jonathan │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ --------------------------- │')
time.sleep(0.16)
print('│ Art&Animations │')
time.sleep(0.16)
print('│ --------------------------- │')
time.sleep(0.16)
print('│ Andy │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ --------------------------- │')
time.sleep(0.16)
print('│ Special Thanks to │')
time.sleep(0.16)
print('│ --------------------------- │')
time.sleep(0.16)
print('│ Tom W. │')
time.sleep(0.16)
print('│ All the testers │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print(' --------------------------------------------------------- ')
time.sleep(0.16)
print("│ _____ _____ _ _ _ _ │")
time.sleep(0.16)
print("│ | __ \ / ____| | | (_) | | │")
time.sleep(0.16)
print("│ | |__) | _| | __| | __ _ __| |_ __ _| |_ ___ _ __ │")
time.sleep(0.16)
print("│ | ___/ | | | | |_ | |/ _` |/ _` | |/ _` | __/ _ \| '__|│")
time.sleep(0.16)
print("│ | | | |_| | |__| | | (_| | (_| | | (_| | || (_) | | │")
time.sleep(0.16)
print("│ |_| \__, |\_____|_|\__,_|\__,_|_|\__,_|\__\___/|_| │")
time.sleep(0.16)
print("│ __/ | │")
time.sleep(0.16)
print("│ |___/ │")
time.sleep(0.16)
print('│---------------------------------------------------------│')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print('│ PyPet 2012 All rights reserved │')
time.sleep(0.16)
print('│ │')
time.sleep(0.16)
print(' -------------------------------------------- ')
time.sleep(0.16)
|
UTF-8
|
Python
| false | false | 2,012 |
7,395,933,699,364 |
43a3449a2567a361073d1dcc86e0d55b019e92e6
|
5283e906febc4e42c0b0d0455de93d8df4f68108
|
/srv/src/sipaiSampleServer_old.py
|
c454b117ae163b871fee93c597d4053fbbf0f5d2
|
[
"Apache-2.0"
] |
permissive
|
zepheir/pySrv_sipai
|
https://github.com/zepheir/pySrv_sipai
|
2fc61f25b8cac55960e4a926ffeadc3c3e917e93
|
dd013d2211bf97ff296c881fa00e50fa38464e87
|
refs/heads/master
| 2020-04-10T19:18:34.504467 | 2014-10-12T16:47:04 | 2014-10-12T16:47:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2012-2-5
@author: zepheir
'''
#import pymongo
#from pymongo import Connection
from binascii import b2a_hex
from twisted.internet import reactor
from twisted.python import log
from twisted.application import service
from zhyDB import ZhyDB
import Zoro
from ussop import sipai as Sipai
import time
import config
from config import *
# ECHO = True
# DEBUG = True
# class SipaiDB(ZhyDB):
# ''''''
# def __init__(self):
# if ECHO: print " ** Start Initialization"
# # if DEBUG:ZhyDB.__init__(self, ip='127.0.0.1')
# ZhyDB.__init__(self, ip=config.serverIP)
# # # setup the conneciton of the mongodb
# # try:
# # conn = Connection('127.0.0.1', 27017)
# # except:
# # print 'db connection error!'
# # conn = None
# #
# # if conn != None:
# # # print the dbs in the mongodb
# # print conn.database_names()
# #
# # # create database object from connecting database name
# # self.db = conn[u'hyegdb']
# self.loadMods()
# records = self.db.results.find()
# self.record = records[0]
# if DEBUG: print self.collectionNames()
# if ECHO: print " ** End Initialization"
# #----------------------------------------------------------
# # Sipai Module functions
# #----------------------------------------------------------
# # def readSipaiMods(self):
# # ''' Read Sipai Modules '''
# # if self.db:
# # mods = list(self.db.devices.find({"manufacturer":IDFSipai}))
# # if DEBUG: print dbgIDF+"SipaiModules:",mods
# # return mods
# # else:
# # if DEBUG: print dbgIDF+"SipaiModules:",False
# # return False
# # def readSipaiVars(self):
# # ''' Read Sipai Module Vars '''
# # if self.db:
# # _vars = list(self.db.vars.find({"manufacturer":IDFSipai}))
# # if DEBUG: print dbgIDF+"SipaiVars:",_vars
# # return _vars
# # else:
# # if DEBUG: print dbgIDF+"SipaiVars:",False
# # return False
# def loadMods(self):
# # mods = self.db.SipaiMods.find()
# mods = self.readSipaiMods()
# self.modDict = {}
# for mod in mods:
# _ip = mod['ip']
# _port = mod['port']
# _addr = mod['addr']
# _type = mod['type']
# try:
# self.modDict[_ip, _port].append((_type, _addr))
# except:
# self.modDict[_ip, _port] = [(_type, _addr), ]
# # if DEBUG:
# # self.modDict = {#(u'192.168.192.100', u'6020'): [(u'1040', u'5')],
# # (u'192.168.192.100', u'6021'): [(u'1001', u'4'), (u'1030', u'7'), (u'1020', u'8')],
# # }
# # else:
# # for mod in mods:
# # ip = mod['IP']
# # port = mod['PORT']
# # addr = mod['ADDRESS']
# # type = mod['MODTYPE']
# # try:
# # self.modDict[ip, port].append((type, addr))
# # except:
# # self.modDict[ip, port] = [(type, addr), ]
# self.devices = self.modDict.keys()
# if DEBUG: print dbgIDF+"modules:",self.modDict
def ReceiveData(*data):
if DEBUG: print 'print data----------------', data
# 确定 module
# mod = modules[(data[0], data[1])]
# deal with data
# result = mod.dealdata(data[2])
# print result
# # get string data
# # save to mongodb
# key = str(data[1][0].split('.')[-1]) + '-' + str(data[1][1]) + '-' + str(data[0])
# # records = ZDB.db.temp.find()
# # for record in records:
# # for ch, _result in enumerate(result):
# # _key = key + '-' + str(ch)
# # record[_key] = _result
# # ZDB.db.temp.save(record)
# for ch, _result in enumerate(result):
# _key = key + '-' + str(ch)
# ZDB.record[_key] = _result
# # ZDB.db.temp.save(ZDB.record)
# 常量
# ZDB = SipaiDB()
zhy = ZhyDB()
#ZDB.loadMods()
# devices = ZDB.devices
# devices = ZDB.readSipaiMods()
#result = ZDB.readResults()
SipaiModsDict = zhy.listSipaiMods(None)
factoryDict = {}
modules = {}
# alltimes = 1 * 60 / 3 - 1 # 10 minutes * 60 seconds / 3 seconds
# global nowtimes
nowtimes = 1
# def update():
# # global nowtimes
# if ECHO: print " -- Now Times: %04d --"%nowtimes
# t1 = time.clock()
# # ZDB.db.temp.save(ZDB.record)
# factorys = factoryDict.keys()
# # for factory in factorys:
# # for i, mod in enumerate(factoryDict[factory]):
# # _cmd = mod.readdata()
# # reactor.callLater(factory.spendtime * i, factory.protocol.SendCmd, _cmd)
# t2 = time.clock()
# print t2 - t1
# nowtimes+=1
# reactor.callLater(3, update)
# # if nowtimes < alltimes:
# # reactor.callLater(3, update)
# # nowtimes += 1
# # else:
# # reactor.stop()
# reactor.callLater(0, reactor.stop)
def main():
''' 初始化 '''
# Initial states
# print devices
factory = Zoro.SetupModbusConnect('130.139.200.50', int(6021), ReceiveData,
reConnectMode=False)
factory.modules += SipaiModsDict['130.139.200.50','6021']
factory.spendtime =0.2
# for sds in SipaiModsDict:
# factoryDict[sds[0],sds[1]] = Zoro.SetupModbusConnect(sds[0], int(sds[1]), ReceiveData,
# reConnectMode=False)
# factoryDict[sds[0],sds[1]].modules += SipaiModsDict[sds[0],sds[1]]
# factoryDict[sds[0],sds[1]].spendtime = 0.2
# if DEBUG: print "===> factory modules: ", factoryDict[sds[0],sds[1]].modules
# for dev in SipaiModsDict:
# # print dev
# # factory = Zoro.SetupModbusConnect(dev[0], int(dev[1]), ReceiveData,
# # reConnectMode=False)
# factoryDict[factory] = []
# # for mod in ZDB.modDict[dev]:
# # _mod = Sipai.createspm(None, type=mod[0], address=mod[1])
# # factoryDict[factory].append(_mod)
# # # 定义modules
# # modules[int(mod[1]), (dev[0], int(dev[1]))] = _mod
# print factoryDict
# print modules
# Main loop
if ECHO: print " -- Main loop running! --"
def upd():
print 'in upd program:',factory
if len(factory.modules)>0:
modinfo = factory.modules.pop(0)
# modinfo = factory.modules[0]
# mod = Sipai.SIPAIModule(address=modinfo['addr'])
mod = Sipai.createspm(type=modinfo['type'], address=modinfo['addr'])
_cmd = mod.cmd(mod.CMD_READDATA)
if DEBUG: print "===> Output command:",b2a_hex(_cmd)
reactor.callLater(0, factory.protocol.SendCmd, _cmd)
reactor.callLater(factory.spendtime,upd)
else:
# _host,_port = factory.connection.host, str(factory.connection.port)
reactor.callLater(factory.spendtime,factory.connection.disconnect)
# factory.modules += SipaiModsDict[_host, _port]
# print factory
reactor.callLater(factory.spendtime+1, main)
# reactor.callLater(factory.spendtime+1,upd)
# def update(_factory):
# if len(_factory.modules)>0:
# modinfo = _factory.modules.pop(0)
# # modinfo = _factory.modules[0]
# # mod = Sipai.SIPAIModule(address=modinfo['addr'])
# mod = Sipai.createspm(type=modinfo['type'], address=modinfo['addr'])
# _cmd = mod.cmd(mod.CMD_READDATA)
# if DEBUG: print "===> Output command:",b2a_hex(_cmd)
# # reactor.callLater(0, _factory.protocol.SendCmd, _cmd)
# reactor.callLater(_factory.spendtime,update,_factory)
# else:
# _host,_port = _factory.connection.host, int(_factory.connection.port)
# reactor.callLater(_factory.spendtime,_factory.connection.disconnect)
# reactor.callLater(_factory.spendtime+1,reload, _host,_port)
# # reactor.callLater(_factory.spendtime+1, main)
# for key in factoryDict:
# reactor.callLater(1,update, factoryDict[key])
reactor.callLater(1, upd)
# reactor.run()
if __name__ == '__main__':
import sys
main()
reactor.run()
print 'reactor stopped!'
sys.exit(1)
elif __name__ =="__builtin__":
import sys
main()
application = service.Application("SIPAI")
|
UTF-8
|
Python
| false | false | 2,014 |
9,285,719,336,965 |
8a9a051b581dcc956ca6acf03c3d4abd2ce81f5c
|
c4bead2f19440f44171e2b5e7cf044f75160466b
|
/gittkt/GitTktCache.py
|
67d20577d49189baa2b61ad284d3fadac0cbcc2d
|
[
"GPL-3.0-only"
] |
non_permissive
|
tstone2077/git-tkt
|
https://github.com/tstone2077/git-tkt
|
bc77723a6856dad7d0a88d52d4ece858adc897df
|
2b748655a023aa4cd27df7ce2ff2ddac49351084
|
refs/heads/master
| 2016-09-05T23:44:12.031909 | 2013-04-23T04:14:15 | 2013-04-23T04:14:15 | 9,404,710 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from GitTktFolder import GitTktFolder
import sys
GITTKT_DEFAULT_FOLDER = 'active'
class GitTktCache(object):
gitTktFolders = {}
def __init__(self, folders, fields, branch, outstream = None):
if outstream is None:
outstream = sys.stdout
if folders is None:
folders = [GITTKT_DEFAULT_FOLDER]
for folder in folders:
self.gitTktFolders[folder] = GitTktFolder(folder,fields,branch,
outstream)
def Add(self,ticketData,folder = None):
if folder is None:
folder = GITTKT_DEFAULT_FOLDER
return self.gitTktFolders[folder].Add(ticketData)
|
UTF-8
|
Python
| false | false | 2,013 |
11,081,015,640,077 |
612d0c08e4a8b27e88fd2302d3f44b692ccc779b
|
5cf1f9ead167d43ea4bd4c2b10765f9ae9fae53b
|
/kv_tests/NameScreen.py
|
ccf306c1bcd894d1af501dd505de7ddf54cb285a
|
[] |
no_license
|
ericgarbar/KivyClient_MotorSensorControlServer
|
https://github.com/ericgarbar/KivyClient_MotorSensorControlServer
|
24bec0c65e0a3a7f5ffaf0dae5aaa55a71237ad9
|
e0a219627beb4edac3157443f4d149faac441665
|
refs/heads/master
| 2021-11-24T20:02:31.027513 | 2014-11-28T17:37:39 | 2014-11-28T17:37:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Eric'
from kivy.app import App
from kivy.uix.screenmanager import Screen
from kivy.properties import ObjectProperty
from kivy.uix.widget import Widget
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.button import Button
class NameScreen(RelativeLayout):
new_name = ObjectProperty(None)
def set_name(self, new_name):
print 'hi'
class NameScreenApp(App):
def build(self):
return NameScreen()
NameScreenApp().run()
|
UTF-8
|
Python
| false | false | 2,014 |
3,513,283,260,192 |
354767c2a414370485206c1f84de8f53013f9744
|
10ac17df6b003c8fd0b3cf153903c960b8a0b3c6
|
/cummplotter.py
|
67e2895c8a1f8414491ecc7f8a52560b79291d2d
|
[] |
no_license
|
nippoo/astroimageprocessing
|
https://github.com/nippoo/astroimageprocessing
|
9b732e63ddfce24cabe0f220fb18796762a47e0e
|
16c1a154a743e3500a976a152305c2cbda46cf09
|
refs/heads/master
| 2016-09-06T02:09:07.914230 | 2014-03-28T12:06:36 | 2014-03-28T12:06:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
from starprocessor import StarProcessor
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.path as path
import itertools
import csv
# finds galaxies and stores the corresponding data in the catalogue.
s = StarProcessor()
stars = np.load("catalogue.npy")
#print stars
fluxlist = np.array([[i['flux'], i['fluxerror']] for i in stars])
b = 80 #specifies the number of bins for the historgram
values, base = np.histogram(fluxlist[:,0], bins=b)
fluxlist = np.sort(fluxlist,axis=1)
# print np.where(fluxlist[:,0] == 0, fluxlist[:,0], fluxlist[:,1])
x_error = []
for first, second in itertools.izip(base, base[1:]):
x_error.append(np.std([t[0] for t in fluxlist if ((t[1] >= first) & (t[1] < second))]))
#fluxerror = np.array([[np.where(fluxlist[0] > i[0])] for i in base])
#[t for t in fluxlist if (fluxlist[0] > base[0][0])]
print x_error
cumulative = np.cumsum(values)
#x_error
y_error = cumulative**0.5
cumulative_log=np.log10(cumulative)
y_error_log=y_error*(2*cumulative*np.log(10))**-1
with open('cumplot_data_inc_errors.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(['base','x_error','cumulative','y_error','log10(N)','y_error_log'])
for i in range(len(base)-1):
spamwriter.writerow([base[i],x_error[i], cumulative[i], y_error[i], cumulative_log[i], y_error_log[i]])
#print cumulative
#print y_error
plt.errorbar(base[:-1], cumulative,y_error,x_error,c='blue')
plt.semilogy()
plt.show()
|
UTF-8
|
Python
| false | false | 2,014 |
14,173,392,087,983 |
a5a974ad2e8091422ba9be334612c8b217cb5bda
|
b78c4371c79e5f1074c185aed56b3d627c0f16cf
|
/Oving8/odd.py
|
3d0ed23443e0764bcae86fc740eb6105328ef7b4
|
[] |
no_license
|
aydex/TDT4120-AlgDat
|
https://github.com/aydex/TDT4120-AlgDat
|
813f2f818377f4bc2610942fbc5406025be90a23
|
830315c874d2081eeefe520bf11ddba5cbf3388c
|
refs/heads/master
| 2021-01-23T13:45:06.791158 | 2014-11-07T13:39:38 | 2014-11-07T13:39:38 | 23,576,537 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from sys import stdin, stderr
def he(A,i,j):
m = i
l = 2*i + 1
r = 2*i + 2
if l < j and A[l] > A[m]:
m = l
if r < j and A[r] > A[m]:
m = r
if m != i:
A[i],A[m] = A[m],A[i]
he(A,m,j)
def dj(d, p, sans,n):
val = [0]*(n+1)
val[0] = 1
h = [(sans[0],0)]
j = 1
s = set([])
s.add(0)
while j > 0:
u = h[0]
h[0] = h[j-1]
j -= 1
h.pop()
he(h,0,j)
for g in d[u[1]]:
b = 0
print "true"
print h
print "Val: " + str(val)
print "Node " + str(u[1]) + ": " + str(d[u[1]])
print "Edge:" + str(u)
print val[u[1]],
print " * ",
print sans[u[1]],
print "(" + str(g) + ")"
new = val[u[1]]*sans[u[1]]
print new,
print " - ",
print val[g]
if new > val[g]:
val[g] = new
b = 1
print h
print "----------"
if b:
p[g] = list(p[u[1]]) + ['-'] + [g]
j += 1
h.append((val[g],g))
i = j - 1
k = (val[g],g)
while k > 0 and h[i/2] < h[i]:
h[i],h[i/2] = h[i/2],h[i]
i = i/2
s.add(g)
print val
return p[n-1]
def ma():
re = stdin.readline
n = int(re())
sans = map(float,re().split())
nm = []
i = 0
d = {}
p = {}
for linje in stdin:
d[i] = map(int,linje.split())
p[i] = []
i += 1
p[0] = [0]
w = dj(d,p,sans,n)
s = ''
for v in w:
s += str(v)
if not w:
s += str(0)
return s
print ma()
|
UTF-8
|
Python
| false | false | 2,014 |
19,121,194,421,128 |
05c167ea8a7735b400dd3bfb8e93cb747966dac4
|
7d9f92fba6af53bd385e0b4173134241c9998fff
|
/global_presence/admin.py
|
2697d45f6857a40be5d06783bfde3ce57be63ee0
|
[] |
no_license
|
ljarufe/intifil
|
https://github.com/ljarufe/intifil
|
856f77c6ece7f444fd331a3eff3c35260201f78f
|
d478a8a1309d526a2508ca7b559e16de03aaa384
|
refs/heads/master
| 2021-01-02T09:09:13.613026 | 2013-10-21T17:00:03 | 2013-10-21T17:00:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
""" global_precence's admin """
from django.contrib import admin
from common.admin import BasePermissionAdmin, BasePermissionTabularAdmin
from .models import Item, Country, Team
from .forms import ItemAdminForm
class CountryInline(BasePermissionTabularAdmin):
""" Country inline model admin """
model = Country
class ItemAdmin(BasePermissionAdmin):
""" Item model admin """
list_display = ('__unicode__', 'team_name', 'category', 'order')
list_display_links = ('__unicode__', 'team_name', 'category')
list_editable = ('order', )
form = ItemAdminForm
inlines = [ CountryInline ]
class TeamAdmin(BasePermissionAdmin):
""" Team model admin """
# changing order of the fields
fields = (
'name', 'address_line_1', 'address_line_2','address_line_3',
'address_line_4', 'phone', 'email', 'image', 'country'
)
# for some reason the change of the order on the form is not working
# maybe due to the tool used to show the django admin
# form = TeamAdminForm
list_display = ('__unicode__', 'country')
list_display_links = list_display
admin.site.register(Item, ItemAdmin)
admin.site.register(Team, TeamAdmin)
|
UTF-8
|
Python
| false | false | 2,013 |
12,713,103,199,979 |
4b2ac897739894d427e6a38d0421a4d3b80a84ea
|
03167e589311de414624fae2361873c8ce3ebdc3
|
/rest_json_helper.py
|
b72ccc9acbc0640e410d65ff6fdcd16e2292324f
|
[
"BSD-3-Clause"
] |
permissive
|
vpyshnenko/xbee_temp_sensor
|
https://github.com/vpyshnenko/xbee_temp_sensor
|
6ebb100ce67a893e5b4d758fc3cf8b6290d30ce7
|
1b1dd275687c2aea2f22a4feb9db5f87a18ad598
|
refs/heads/master
| 2020-04-04T19:38:07.082207 | 2014-02-06T18:34:58 | 2014-02-06T18:34:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Simple helper function to do HTTP request to give URL and parse response
as a JSON document.
The main reason for this module is to isloate code working with urllib2.
In python 2.7 there is a connection leak in urllib2 which could cause
some long-term running REST API pollers to stop working.
See https://github.com/vzaliva/xbee_temp_sensor/issues/1 for details.
"""
import urllib2
import json
import subprocess
USE_URLLIB2 = False
def json_GET(endpoint, timeout):
if USE_URLLIB2:
f = urllib2.urlopen(endpoint, body, timeout)
try:
json_string = f.read()
finally:
f.close()
else:
json_string = subprocess.check_output(["curl", "-s", "-connect-timeout=%d" %timeout, endpoint])
return json.loads(json_string)
|
UTF-8
|
Python
| false | false | 2,014 |
11,287,174,103,520 |
5849e7cc42a61afba188b39f2cbb2d5e2030d528
|
34b31e6de0366f1af4510b8a446e0fa57a598057
|
/plugins/amires/src/amires/modules/common_render.py
|
816d5acde21c80d7cf7ab0560a61f0c2179759dc
|
[
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-python-cwi",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"GPL-2.0-or-later",
"LGPL-2.1-or-later",
"HPND",
"LicenseRef-scancode-other-copyleft",
"OpenSSL",
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.1-only",
"LicenseRef-scancode-llnl",
"BSD-4-Clause",
"MIT",
"LicenseRef-scancode-other-permissive",
"Python-2.0",
"LicenseRef-scancode-mit-veillard-variant",
"LicenseRef-scancode-x11-xconsortium-veillard",
"LicenseRef-scancode-public-domain-disclaimer",
"LicenseRef-scancode-secret-labs-2011",
"GPL-2.0-only",
"ZPL-2.1",
"Apache-2.0",
"LGPL-2.0-only",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown",
"BSD-2-Clause"
] |
non_permissive
|
mehulsbhatt/clacks
|
https://github.com/mehulsbhatt/clacks
|
8b740fef4f6ca6df0cfcc9ad00a47e1e71436422
|
da579f0acc4e48cf2e9451417ac6792282cf7ab6
|
refs/heads/master
| 2018-05-30T00:13:14.012349 | 2013-03-14T14:36:19 | 2013-03-14T14:36:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This file is part of the clacks framework.
#
# http://clacks-project.org
#
# Copyright:
# (C) 2010-2012 GONICUS GmbH, Germany, http://www.gonicus.de
#
# License:
# GPL-2: http://www.gnu.org/licenses/gpl-2.0.html
#
# See the LICENSE file in the project's top-level directory for details.
import cgi
import pkg_resources
import gettext
from amires.render import BaseRenderer, mr
from clacks.common import Environment
# Set locale domain
t = gettext.translation('messages', pkg_resources.resource_filename("amires", "locale"),
fallback=False)
_ = t.ugettext
class CommonRenderer(BaseRenderer):
priority = 1
def __init__(self):
pass
def getHTML(self, info, selfInfo, event):
super(CommonRenderer, self).getHTML(info, selfInfo, event)
# build html for company name
comp = u""
if info['company_name']:
if 'company_detail_url' in info and info['company_detail_url']:
comp += "<a href='%s'>%s</a>" % (
cgi.escape(mr(info['company_detail_url'])),
cgi.escape(mr(info['company_name'])))
else:
comp += cgi.escape(mr(info['company_name']))
# build html for contact name
cont = u""
if info['contact_name']:
if 'contact_detail_url' in info and info['contact_detail_url']:
cont += "<a href='%s'>%s</a>" %(
cgi.escape(mr(info['contact_detail_url'])),
cgi.escape(mr(info['contact_name'])))
else:
cont += cgi.escape(mr(info['contact_name']))
# build actual html section
html = u"<b>%s:</b>\n" % cgi.escape(_("Attendee"))
if cont:
html += cont
if comp:
html += " (" + comp + ")"
elif comp:
html += comp
if 'Duration' in event:
duration = int(float(event['Duration']))
html += "\n\n<b>%s:</b> " % cgi.escape(_("Duration"))
html += "%d\'%02d\"\n" % (duration / 60, duration % 60)
return html
|
UTF-8
|
Python
| false | false | 2,013 |
13,228,499,287,054 |
189eac13f7d0683c997dc212e866aac7e48ab657
|
d54cfca77bf8f5c6d5b13a14fce1ab811b27fdac
|
/feedReader.py
|
15b18b5efdd084d73d3970ba1bf0786498c67191
|
[] |
no_license
|
alabid/Python-Feed-Reader
|
https://github.com/alabid/Python-Feed-Reader
|
5d167dd2ce44c214986f2b5092c1c9e55193e641
|
db221b9c6579b711b991eed64866ed123ed63ef0
|
refs/heads/master
| 2020-05-17T18:36:16.646972 | 2012-03-02T08:17:42 | 2012-03-02T08:17:42 | 3,600,366 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
feedReader.py
By Daniel Alabi and Michael Domingues
A basic RSS feed reader.
Usage: python feedReader.py feeds [options]
Requires the feedparser module.
'feeds' is a mandatory text file containing a list of feed URLs,
and the options are as follows:
--date Display the story links in order of date posted, from
newest to oldest (not grouped by feed).
--alpha Display the story links ordered by feed, with the feeds
ordered alphabetically.
--n NUM Display the latest NUM story links in each feed, where
NUM is an integer value.
--since DATE Display the story links in each feed published on or
after the specified date. DATE must be in the form
YYYY-MM-DD.
--title REGEX Display the stories whose titles match the regular
expression REGEX.
--description {on|off} Display the description associated with a story.
Defaults to off.
--newest Display only the stories that have been posted or
updated since the last time this program was run.
By default, this program displays items grouped by feed, in the order
that the feeds were listed in the input file.
Note that multiple command line options can be triggered; they will be
processed and displayed sequentially. For example, if you were to run:
'python feedReader.py feedsList.txt --date --alpha'
the program would first present the date view, followed by the alpha view.
For the purposes of this program, our dummy time is 1000-01-01 00:00:00.
For more information, see README.txt
"""
import feedparser
class Controller(object):
"""
The Controller class is the center of execution of commands and views.
"""
def __init__(self, options, args):
# options is a 'Values' object, the
# output from the optparser.
self.options = options
self.args = args
self.feeds = []
self.lastSeenTime = None
self.setLastSeenTime()
# Begin execution of the program
self.run()
def setLastSeenTime(self):
"""
Sets the instance variable self.lastSeenTime to the last time
this program was run. If this is the first run of the program or
the data file lastSeen.txt does not exist / was deleted, sets
self.lastSeenTime to our standard dummy time.
"""
import glob
from time import gmtime, strftime
dateFile = "lastSeen.txt"
if not glob.glob(dateFile):
self.lastSeenTime = PubTime((1000, 1, 1, 0, 0, 0))
lastSeenFile = open(dateFile, 'w')
timeString = strftime("%Y-%m-%d %H:%M:%S", gmtime())
lastSeenFile.write(timeString)
lastSeenFile.close()
else:
# Read from the dateFile.
lastSeenFile = open(dateFile, 'r')
timeString = lastSeenFile.read()
self.lastSeenTime = self.timeStringToPubTime(timeString)
lastSeenFile.close()
# Re-open and then write the current time to the dateFile.
lastSeenFile = open(dateFile, 'w')
timeString = strftime("%Y-%m-%d %H:%M:%S", gmtime())
lastSeenFile.write(timeString)
lastSeenFile.close()
def getFeedsFromFile(self, feedFile):
"""
Given a file containing a list of URLs (one per line),
returns a list of the URLs.
"""
feedList = []
try:
with open(feedFile, "r") as openFile:
possibleFeeds = openFile.readlines()
# The following list comprehension makes possibleFeeds a list
# of links stripped of newlines.
possibleFeeds = [link.strip() for link in possibleFeeds if \
link != '\n']
for link in possibleFeeds:
feedList.append(self.appendHTTP(link))
return feedList
except:
errorMessage = "The file " + feedFile + \
" does not exist or is not valid."
raise FeedReaderError(errorMessage)
def makeFeed(self, parserData):
"""
Given the output of feedparser.parse(), validate that
we do have a feed, and return a Feed object. If the feed is invalid,
returns a feed object with the title 'ERROR'.
"""
# Import classes for error type checking.
from urllib2 import URLError
from xml.sax._exceptions import SAXParseException as SynErr
# parserData.bozo will equal 1 if the feed is malformed, or
# 0 if it is well formed.
if parserData.bozo == 1:
if isinstance(parserData.bozo_exception, URLError) or \
isinstance(parserData.bozo_exception, SynErr):
return Feed("ERROR", "")
# Make the feed object.
title = ""
pubDate = ""
hasTitle = parserData.feed.has_key("title")
hasPubDate = parserData.feed.has_key("updated_parsed")
if hasPubDate:
pubDate = parserData.feed.updated_parsed
if hasTitle:
title = parserData.feed.title
if hasPubDate or hasTitle:
feed = Feed(title, pubDate)
if parserData.has_key("entries"):
for entry in parserData.entries:
feed.addItem(Item(entry))
return feed
else:
return Feed("ERROR", "")
############### Accessors and Mutators ###############
def getFeeds(self):
"""
Returns a copy of the internal feeds list.
"""
return self.feeds[:]
def addFeed(self, feedObject):
"""
Appends the given Feed object to the internal list
of feeds.
"""
self.feeds.append(feedObject)
############### Helper Methods ###############
def appendHTTP(self, url):
"""
Appends http:// to the beginning of a potential url
if it is not already present and returns the url.
"""
import re
if not re.match("^http[s]?://(?!/)", url):
url = "http://" + url
return url
def timeStringToPubTime(self, timeString):
"""
This method takes in a timeString in format
YYYY-MM-DD HH:MM:SS and converts it to a PubTime object which it
returns.
"""
date, time = timeString.split(' ')
year, month, day = date.split('-')
hour, minute, sec = time.split(':')
timeArray = [year, month, day, hour, minute, sec]
timeTuple = tuple(map(int, timeArray))
return PubTime(timeTuple)
def setFeedDescriptionFlag(self, feed):
if self.options.description.lower() == "on":
feed.descOn = True
def setItemDescriptionFlag(self, item):
if self.options.description.lower() == "on":
item.descOn = True
############### Views ###############
def defaultView(self):
"""
Displays feeds in the same order as they are listed in the
input file.
"""
for feed in self.getFeeds():
self.setFeedDescriptionFlag(feed)
feed.printFeed()
def dateView(self):
"""
Displays the items in order by date, from newest to oldest.
Entries with no date are not displayed.
"""
printList = []
for feed in self.getFeeds():
for entry in feed.getItems():
printList.append(entry)
printList.sort(key=lambda entry: entry.getPubDate())
printList.reverse()
for entry in printList:
self.setItemDescriptionFlag(entry)
if entry.getPubDate() != PubTime((1000, 1, 1, 0, 0, 0)):
print entry
def alphaView(self):
"""
Displays the feeds in alphabetical order.
"""
feedsList = self.getFeeds()
feedsList.sort(key=lambda feed: feed.getTitle())
for feed in feedsList:
self.setFeedDescriptionFlag(feed)
feed.printFeed()
def numView(self, n):
"""
Displays the latest n items in each feed.
"""
for feed in self.getFeeds():
self.setFeedDescriptionFlag(feed)
feed.printFeed(numItems=n)
def sinceView(self, withLastSeen=False):
"""
Displays the items published or updated since date specified at
the command line. If withLastSeen == True, displays the items
published or updated since the last time this program was run.
"""
if not withLastSeen:
sinceDate = self.options.since.split("-")
year = int(sinceDate[0])
month = int(sinceDate[1])
day = int(sinceDate[2])
date = PubTime((year, month, day, 0, 0, 0))
else:
date = self.lastSeenTime
for feed in self.getFeeds():
self.setFeedDescriptionFlag(feed)
feed.printFeed(sinceDate=date)
def titleView(self):
"""
Displays the items whose titles are matched by the regular
expression specified at the command line.
"""
import re
printList = []
for feed in self.getFeeds():
for entry in feed.getItems():
if re.search(self.options.title, entry.getTitle()):
printList.append(entry)
for entry in printList:
self.setItemDescriptionFlag(entry)
print entry
def newestView(self):
"""
Wrapper for sinceView. Displays items that have been updated or
published since the last time the program was run.
"""
self.sinceView(withLastSeen=True)
############### Execution ###############
def run(self):
"""
Sets up the program, and sequentially executes the views specified
at the command line.
"""
# Process the input. self.args[0] is the input file.
feedList = self.getFeedsFromFile(self.args[0])
for feed in feedList:
feedObject = self.makeFeed(feedparser.parse(feed))
if feedObject.getTitle() != "ERROR":
self.addFeed(feedObject)
# Set up the views
optionsAreTriggered = (self.options.date or self.options.alpha or \
(self.options.n > -1) or self.options.since or \
self.options.title or self.options.newest)
# Run the view(s)
if optionsAreTriggered:
if self.options.date:
print "\n###### DATE VIEW ######\n"
self.dateView()
if self.options.alpha:
print "\n###### ALPHA VIEW ######\n"
self.alphaView()
if (self.options.n > -1):
print "\n###### NUM VIEW ######\n"
self.numView(self.options.n)
if self.options.since:
print "\n###### SINCE VIEW ######\n"
self.sinceView()
if self.options.title:
print "\n###### TITLE VIEW ######\n"
self.titleView()
if self.options.newest:
print "\n###### NEWEST VIEW ######\n"
self.newestView()
else:
self.defaultView()
class Feed(object):
"""
A Feed stores title, publication date (pubDate), and the stories associated
with the feed it comes from.
"""
def __init__(self, feedTitle, feedPubDate):
# Some of this data might not exist; specifically feedPubDate.
self.title = str(feedTitle)
self.pubDate = feedPubDate
self.items = []
# self.descOn is false by default. It is a flag set by the Controller
# that specifies if the user wants to display the description associated
# with a story link.
self.descOn = False
def getTitle(self):
return self.title
def getPubDate(self):
return self.pubDate
def getItems(self):
return self.items
def addItem(self, item):
self.items.append(item)
def printFeed(self, numItems=-1, sinceDate=-1):
"""
Prints the feed and its contained items according to the options
specified.
"""
# If numItems is not specified, print all stories.
if numItems == -1:
numItems = len(self.items)
# If sinceDate is not specified, print all stories.
if not isinstance(sinceDate, PubTime):
sinceDate = PubTime((1000, 1, 1, 0, 0, 0))
string = "Feed: " + self.getTitle() \
+ "\n===============================\n"
for entry in self.getItems()[0:numItems]:
if self.descOn:
entry.descOn = True
if entry.getPubDate() >= sinceDate:
string += str(entry) + '\n'
print string
class Item(object):
"""
A single story formed from an feedparser entry. Instance variables are
initialized to empty strings, and are then populated.
"""
def __init__(self, entry):
# Some of this data might not exist; specifically itemPubDate
# self.content will be a unicode string.
self.title = ""
self.url = ""
self.pubDate = PubTime((1000, 1, 1, 0, 0, 0)) # Dummy time
self.content = u"" # A unicode string
self.descOn = False
self.populate(entry)
def getTitle(self):
return self.title
def getURL(self):
return self.url
def getPubDate(self):
return self.pubDate
def getContent(self):
return self.content
def stripTags(self, summary):
"""A helper method to stript html tags and entities from a string."""
import re
return re.sub("</?[\w]*.*?/?>|&#?\w+;","", summary)
def populate(self, entry):
"""
This populates the Item object with properties extracted from
the entry parameter.
"""
if entry.has_key("title"):
titleString = entry.title.encode('UTF-8')
self.title = self.stripTags(titleString)
if entry.has_key('link'):
self.url = str(entry.link)
if entry.has_key("summary"):
self.content = self.stripTags(entry.summary)
# If published_parsed exists, use it to set self.pubDate. Otherwise, use
# updated_parsed.
if entry.has_key("published_parsed"):
self.pubDate = PubTime(entry.published_parsed)
elif entry.has_key("updated_parsed"):
self.pubDate = PubTime(entry.updated_parsed)
def __str__(self):
string = self.getTitle() + "\n----------------------"
if str(self.getPubDate().getYear()) != "1000":
# If the entry has a publication date, add it to the string.
string += "\nPublished: " + str(self.getPubDate())
if str(self.getURL()) != "":
# If the entry has a URL, add it to the string.
string += "\nLink: " + self.getURL()
if self.descOn:
string += "\nDescription: " + self.content.encode('UTF-8')
string += "\n"
return string
class PubTime(object):
"""
A PubTime is an object that keeps time in a format easily used by the
program.
"""
def __init__(self, time_tuple):
import datetime
self.year = time_tuple[0]
self.month = time_tuple[1]
self.day = time_tuple[2]
self.hour = time_tuple[3]
self.min = time_tuple[4]
self.sec = time_tuple[5]
self.dateTime = datetime.datetime(*time_tuple[0:6])
def getYear(self):
return self.year
def getMonth(self):
return self.month
def getDay(self):
return self.day
def getHour(self):
return self.hour
def getMin(self):
return self.min
def getSec(self):
return self.sec
def __cmp__(self, other):
"""
Compares two PubTime objects to each other. Returns -1 if this PubTime
is earlier than the other PubTime object, returns 0 if they are equal,
and 1 if this object is later than the other object.
"""
if self.dateTime.__lt__(other.dateTime):
return -1
elif self.dateTime.__gt__(other.dateTime):
return 1
else:
return 0
def __str__(self):
"""Returns the time in YYYY-MM-DD HH:MM:SS format."""
return self.dateTime.isoformat(' ')
class FeedReaderError(Exception):
"""
A subclass of the built-in Python Exception class, to throw
errors and quit.
"""
def __init__(self, errorMessage):
self.errorMessage = errorMessage
def __str__(self):
return repr(self.errorMessage)
############### Helper Functions ###############
def processCmdLineArgs():
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--date", action="store_true")
parser.add_option("--alpha", action="store_true")
parser.add_option("-n", type="int")
parser.add_option("--since", type="string")
parser.add_option("--title", type="string")
parser.add_option("--description", type="string", default="off")
parser.add_option("--newest", action="store_true")
return parser.parse_args()
def validateCmdLineArgs(options, args):
"""
Validates commandline options and arguments; raises FeedReaderError
as needed.
"""
descriptionFlag = options.description.lower()
# Check for missing / extra parameters
if len(args) != 1:
raise FeedReaderError("Check commandline parameters.")
# Check for incorrectly used options
elif not isDateValid(options.since):
raise FeedReaderError("Date must be in this format: 'YYYY-MM-DD'")
elif (descriptionFlag != "on") and (descriptionFlag != "off"):
raise FeedReaderError("--description must be either 'on' or 'off'.")
def isDateValid(date):
"""
Returns True if the given date is in YYYY-MM-DD form, else returns
False.
"""
import re
datePt = "^\d{4}-([0][1-9]|[1][0-2])-([0][1-9]|[1][0-9]|[2][0-9]|[3][0-1])$"
if (not date) or re.match(datePt, date):
return True
return False
############### Execution ###############
def main():
try:
options, args = processCmdLineArgs()
validateCmdLineArgs(options, args)
feedReader = Controller(options, args)
except FeedReaderError as feedError:
print "FeedReaderError:", feedError
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,012 |
13,443,247,650,271 |
248c9916a95856ba3efd625e2b3f1334b66de5c6
|
82022d0f6aafdf35c09619a812865ca62d3d6c8c
|
/hist1.py
|
134e19a9a8604abde06adfc955220089afb296e0
|
[] |
no_license
|
efloehr/cohpy-oct2013
|
https://github.com/efloehr/cohpy-oct2013
|
93aeee6bbda62ed0c6752c2164405d32da278626
|
0df84bd68a84f21b53ebc4590f8ffc4d2b121958
|
refs/heads/master
| 2021-01-20T02:13:16.678493 | 2013-10-29T14:51:56 | 2013-10-29T14:51:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#coding:utf-8
# Introduce PIL, show properties (mode) and methods (like histogram, show) on Image object
from PIL import Image
def get_histogram(image):
# Histogram will only work on RGB type
if image.mode <> "RGB":
raise Exception("Not RGB mode")
hg = image.histogram()
# Returns a 768 member array with counts of R, G, B values
rhg = hg[0:256]
ghg = hg[256:512]
bhg = hg[512:]
return rhg, ghg, bhg
if __name__ == '__main__':
# Open the image
skyimg = Image.open("2013-10-01-16-17-12.jpg")
# Need ImageMagick to view
skyimg.show()
get_histogram(skyimg)
|
UTF-8
|
Python
| false | false | 2,013 |
8,194,797,602,919 |
42006163dd03cfc24b5d84ae7854027bb8364f7c
|
88eab302a47e6a7ac3c97c37debfc67d3b593121
|
/catalog/__init__.py
|
c0da183cf981a6111652a3401e162a460eb70727
|
[] |
no_license
|
Marilyna/promua-test
|
https://github.com/Marilyna/promua-test
|
aec15330a44d2d5ab550e6d388441ec16fb398ed
|
3017429646325aae80699f1e91de8026261cb7c2
|
refs/heads/master
| 2021-01-19T05:43:11.189465 | 2014-01-22T15:05:39 | 2014-01-22T15:05:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flaskext.bcrypt import Bcrypt
app = Flask('catalog')
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
def create(config):
app.config.update(config)
app.secret_key = 'Drugs are bad! okay?'
app.static_folder = 'static' # Enable is back, but the URL rule is
app.add_url_rule('/static/<path:filename>', endpoint='static', view_func=app.send_static_file)
from catalog.login import login_manager
login_manager.init_app(app)
from flask_wtf.csrf import CsrfProtect
CsrfProtect(app)
import catalog.views
|
UTF-8
|
Python
| false | false | 2,014 |
17,016,660,468,330 |
b8b5917de3e5592f4df4fe8211fff9e3428735db
|
a2ff618aa71332bdf22d960100b721cbbda32387
|
/timechess/chess/board.py
|
7a425b7ac9fc94e830e2151064c5034796ed65bc
|
[
"CC-BY-SA-3.0"
] |
non_permissive
|
atomopawn/timechess
|
https://github.com/atomopawn/timechess
|
888a8150a35ec58a92278a7c13b4d0ad11c71f2f
|
47d25e09776a2c5a5ab1bcd5ca3298bfdb42859a
|
refs/heads/master
| 2018-05-29T22:02:10.292079 | 2014-07-10T17:54:01 | 2014-07-10T17:54:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from dfiance import SchemaObj, Boolean, List
from timechess.boardgame import Board, Piece, PieceRegistry, PointType
ChessPieces = PieceRegistry(dict(
Pawn = 'p',
Knight = 'N',
Bishop = 'B',
Rook = 'R',
Queen = 'Q',
King = 'K'
))
class CastleState(SchemaObj):
'''Indicates whether a player can castle.
kingside is True if the kingside rook or the king has moved.
queenside is True if the queenside rook or the king has moved.
'''
field_types = dict(
kingside = Boolean(),
queenside = Boolean(),
)
def __init__(self, kingside=False, queenside=False):
self.kingside = kingside
self.queenside = queenside
def clear(self):
self.kingside = self.queenside = True
class EPState(SchemaObj):
'''Indicates an En Passant capture point'''
field_types = dict(
capture_point = PointType(),
target = PointType(),
)
def __init__(self, capture_point=None, target=None):
self.capture_point=capture_point
self.target = target
class ChessBoard(Board):
'''A Chess Board, plus castling state and legal En Passant capture point.'''
field_types = dict(
castling = List(CastleState),
ep = EPState.dfier(),
)
def __init__(self, cells=None, wcstate=None, bcstate=None, ep=None, size=None):
super(ChessBoard, self).__init__(cells, size)
self.castling = [wcstate or CastleState(), bcstate or CastleState()]
self.ep = ep or EPState()
|
UTF-8
|
Python
| false | false | 2,014 |
2,980,707,309,097 |
5fefa3ee0336e023fe7eaf21043d7979cda0dd8a
|
7b45f4aa9de0ab591336a0204eb5a8a85354fb01
|
/LeaderBoard/views.py
|
b8fa707fcb38e9714b8537650109ef21d4b96a17
|
[] |
no_license
|
PeterLiao/CityHunt
|
https://github.com/PeterLiao/CityHunt
|
ecf7db04c82e4061032950b7f77e69a859b3257a
|
4f8dae142329ec3c33129f96d41f3b34eb71ad76
|
refs/heads/master
| 2021-01-19T22:33:54.798849 | 2014-06-30T05:46:03 | 2014-06-30T05:46:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render
import datetime
import json
from datetime import date, timedelta
from django.http import Http404
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render_to_response
from LeaderBoard.forms import *
from LeaderBoard.common import *
from LeaderBoard.models import *
# Create your views here.
def add_user(request):
result = 'fail'
if request.method == 'POST':
form = UserForm(request.POST)
if form.is_valid():
user = User(name=form.cleaned_data['name'],
fb_id=form.cleaned_data['user_id'],
email=form.cleaned_data['email'],
pub_date=get_utc_now())
user.save()
result = 'ok'
response_data = {}
response_data['result'] = result
return HttpResponse(json.dumps(response_data), content_type="application/json")
def add_post_page(request):
if request.method == 'POST':
form = PostForm(request.POST)
if form.is_valid():
city = City.objects.filter(id=form.cleaned_data['city_id'])[0]
user = User.objects.filter(fb_id=form.cleaned_data['user_id'])[0]
post = Post(post_text=form.cleaned_data['post_text'], post_title=form.cleaned_data['post_title'], city=city, user=user, pub_date=get_utc_now())
post.save()
else:
print form.errors
return HttpResponseRedirect('/hottest/')
def add_comment_page(request):
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
post = Post.objects.filter(id=form.cleaned_data['post_id'])[0]
user = User.objects.filter(fb_id=form.cleaned_data['user_id'])[0]
comment = Comment(comment_text=form.cleaned_data['comment_text'], post=post, user=user, pub_date=get_utc_now())
comment.save()
else:
print form.errors
return HttpResponseRedirect("/hottest/%d" % post.id)
def add_post_like_page(request):
result = 'fail'
if request.method == 'POST':
form = PostLikeForm(request.POST)
if form.is_valid():
post = Post.objects.filter(id=form.cleaned_data['post_id'])[0]
user = User.objects.filter(fb_id=form.cleaned_data['user_id'])[0]
post_like = PostLike.objects.filter(post=post, user=user)
if post_like.count() == 0:
post_like = PostLike(post=post, user=user, pub_date=get_utc_now())
post_like.save()
result = 'like'
elif post_like.count() == 1:
PostLike.objects.filter(post=post, user=user).delete()
result = 'not_like'
response_data = {}
response_data['result'] = result
return HttpResponse(json.dumps(response_data), content_type="application/json")
def post_detail_page(request, post_id):
try:
user_id = 0
if 'user_id' in request.COOKIES:
user_id = request.COOKIES.get('user_id')
post = Post.objects.filter(id=post_id)[0]
comment_list = Comment.objects.filter(post=post)
like_list = PostLike.objects.filter(post=post)
logged_user = User.objects.filter(fb_id=user_id)
liked = PostLike.objects.filter(post=post, user=logged_user).count()
return render_to_response("detail.html", {"current_page": "hottest",
"post_obj": post,
"comment_list": comment_list,
"like_list": like_list,
"liked": liked,
"user_form": UserForm(),
"post_like_form": PostLikeForm()},
context_instance = RequestContext(request))
except IndexError:
raise Http404
def get_post_data_by_date(start_date, end_date):
data_list = []
post_list = Post.objects.filter(pub_date__gte=start_date, pub_date__lte=end_date)
post_list = sorted(post_list, key=lambda x: x.like_count, reverse=True)
for post in post_list:
data_list.append({"post": post, "liked": 0})
return data_list
def get_post_data_by_date_ex(start_date, end_date, user_id):
data_list = []
post_list = Post.objects.filter(pub_date__gte=start_date, pub_date__lte=end_date)
post_list = sorted(post_list, key=lambda x: x.like_count, reverse=True)
logged_user = User.objects.filter(fb_id=user_id)
for post in post_list:
liked = PostLike.objects.filter(post=post, user=logged_user).count()
data_list.append({"post": post, "liked": liked})
return data_list
def hottest_page(request):
user_id = 0
if 'user_id' in request.COOKIES:
user_id = request.COOKIES.get('user_id')
days_ago = 0
while True:
post_list_in_today = get_post_data_by_date_ex(datetime.date.today()-timedelta(days_ago),
datetime.date.today()+timedelta(1)-timedelta(days_ago),
user_id)
if len(post_list_in_today) > 0 or days_ago >= 30:
break
else:
days_ago += 1
while True:
post_list_in_yesterday = get_post_data_by_date_ex(datetime.date.today()-timedelta(1)-timedelta(days_ago),
datetime.date.today()-timedelta(days_ago),
user_id)
if len(post_list_in_yesterday) > 0 or days_ago >= 30:
break
else:
days_ago += 1
while True:
post_list_in_two_days_ago = get_post_data_by_date_ex(datetime.date.today()-timedelta(2)-timedelta(days_ago),
datetime.date.today()-timedelta(1)-timedelta(days_ago),
user_id)
if len(post_list_in_two_days_ago) > 0 or days_ago >= 30:
break
else:
days_ago += 1
while True:
post_list_in_three_days_ago = get_post_data_by_date_ex(datetime.date.today()-timedelta(3)-timedelta(days_ago),
datetime.date.today()-timedelta(2)-timedelta(days_ago),
user_id)
if len(post_list_in_three_days_ago) > 0 or days_ago >= 30:
break
else:
days_ago += 1
return render_to_response("hottest.html",
{"current_page": "hottest",
"today": get_formatted_date(datetime.date.today()),
"today_list": post_list_in_today,
"yesterday": get_formatted_date(datetime.date.today()-timedelta(1)),
"yesterday_list": post_list_in_yesterday,
"two_days_ago_list": post_list_in_two_days_ago,
"two_days_ago": get_formatted_date(datetime.date.today()-timedelta(2)),
"three_days_ago_list": post_list_in_three_days_ago,
"three_days_ago": get_formatted_date(datetime.date.today()-timedelta(3)),
"user_form": UserForm(),
"post_like_form": PostLikeForm()},
context_instance = RequestContext(request))
def hottest_logged_page(request, user_id):
post_list_in_today = get_post_data_by_date_ex(datetime.date.today(), datetime.date.today()+timedelta(1), user_id)
post_list_in_yesterday = get_post_data_by_date_ex(datetime.date.today()-timedelta(1), datetime.date.today(), user_id)
return render_to_response("hottest.html",
{"current_page": "hottest",
"today": get_formatted_date(datetime.date.today()),
"today_list": post_list_in_today,
"yesterday": get_formatted_date(datetime.date.today()-timedelta(1)),
"yesterday_list": post_list_in_yesterday,
"user_form": UserForm(),
"post_like_form": PostLikeForm()},
context_instance = RequestContext(request))
def about_page(request):
return render_to_response("about.html", {'current_page': 'about'})
def login_page(request):
return render_to_response("login.html", {'current_page': 'login'})
def fb_page(request):
return render_to_response("fb.html")
|
UTF-8
|
Python
| false | false | 2,014 |
12,481,175,004,896 |
2f10d0aa6209407989efc7b1a8f6f93842430256
|
36560b159924eadfaf31cd137a7023b54ef6a92a
|
/youtube_dl/extractor/motherless.py
|
6229b21732b70525b832ab2f3370594736cae8df
|
[
"LicenseRef-scancode-public-domain",
"Unlicense"
] |
permissive
|
HarukoXChan/youtube-dl
|
https://github.com/HarukoXChan/youtube-dl
|
ad38c5e1a94dcc92677a2c93a51eb2a58f07629b
|
f83dda12ad37d1b83142e2821e72f8e6c0b4405e
|
refs/heads/master
| 2016-10-31T14:52:45.504120 | 2014-08-20T13:30:29 | 2014-08-20T13:30:29 | 173,237,040 | 0 | 0 |
Unlicense
| true | 2019-03-01T04:54:39 | 2019-03-01T04:54:39 | 2019-03-01T04:42:13 | 2019-02-28T18:05:56 | 54,436 | 0 | 0 | 0 | null | false | null |
from __future__ import unicode_literals
import datetime
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
unified_strdate,
)
class MotherlessIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?motherless\.com/(?P<id>[A-Z0-9]+)'
_TESTS = [
{
'url': 'http://motherless.com/AC3FFE1',
'md5': '5527fef81d2e529215dad3c2d744a7d9',
'info_dict': {
'id': 'AC3FFE1',
'ext': 'flv',
'title': 'Fucked in the ass while playing PS3',
'categories': ['Gaming', 'anal', 'reluctant', 'rough', 'Wife'],
'upload_date': '20100913',
'uploader_id': 'famouslyfuckedup',
'thumbnail': 're:http://.*\.jpg',
'age_limit': 18,
}
},
{
'url': 'http://motherless.com/532291B',
'md5': 'bc59a6b47d1f958e61fbd38a4d31b131',
'info_dict': {
'id': '532291B',
'ext': 'mp4',
'title': 'Amazing girl playing the omegle game, PERFECT!',
'categories': ['Amateur', 'webcam', 'omegle', 'pink', 'young', 'masturbate', 'teen', 'game', 'hairy'],
'upload_date': '20140622',
'uploader_id': 'Sulivana7x',
'thumbnail': 're:http://.*\.jpg',
'age_limit': 18,
}
}
]
def _real_extract(self,url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(r'id="view-upload-title">\s+([^<]+)<', webpage, 'title')
video_url = self._html_search_regex(r'setup\(\{\s+"file".+: "([^"]+)",', webpage, 'video_url')
age_limit = self._rta_search(webpage)
view_count = self._html_search_regex(r'<strong>Views</strong>\s+([^<]+)<', webpage, 'view_count')
upload_date = self._html_search_regex(r'<strong>Uploaded</strong>\s+([^<]+)<', webpage, 'upload_date')
if 'Ago' in upload_date:
days = int(re.search(r'([0-9]+)', upload_date).group(1))
upload_date = (datetime.datetime.now() - datetime.timedelta(days=days)).strftime('%Y%m%d')
else:
upload_date = unified_strdate(upload_date)
like_count = self._html_search_regex(r'<strong>Favorited</strong>\s+([^<]+)<', webpage, 'like_count')
comment_count = webpage.count('class="media-comment-contents"')
uploader_id = self._html_search_regex(r'"thumb-member-username">\s+<a href="/m/([^"]+)"', webpage, 'uploader_id')
categories = self._html_search_meta('keywords', webpage)
if categories:
categories = [cat.strip() for cat in categories.split(',')]
return {
'id': video_id,
'title': title,
'upload_date': upload_date,
'uploader_id': uploader_id,
'thumbnail': self._og_search_thumbnail(webpage),
'categories': categories,
'view_count': int_or_none(view_count.replace(',', '')),
'like_count': int_or_none(like_count.replace(',', '')),
'comment_count': comment_count,
'age_limit': age_limit,
'url': video_url,
}
|
UTF-8
|
Python
| false | false | 2,014 |
12,790,412,624,173 |
b5bfce31842ffe4ffe17322aa0970b0c2e9eb77a
|
f785e6c4986700d791870face98efb155cdf4c5b
|
/tests/features/steps.py
|
07d81d98a0f5108e1cb0ba43aa803400b06a1524
|
[
"BSD-3-Clause"
] |
permissive
|
mariamota/sqa2014tennis
|
https://github.com/mariamota/sqa2014tennis
|
d436305e043fd11569b8df450519939806f56ee7
|
ec65af72d82a9570c82151223ac4a879c961ea0c
|
refs/heads/master
| 2021-01-22T13:52:14.484684 | 2014-09-25T20:48:13 | 2014-09-25T20:48:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from lettuce import *
import app.match as m
@step('Given: "([^"]*)" and "([^"]*)" start a match to "([^"]*)" sets')
def given_group1_and_group2_start_a_match_to_group3_sets(step, p1, p2, s):
world.match = m.Match(p1, p2, s)
@step('Then: I see score: "([^"]*)"')
def then_i_see_score_group1(step, set):
assert world.match.score() == set, \
"Got %s" % world.match.score()
@step('When: "([^"]*)" won the "([^"]*)" set "([^"]*)"-"([^"]*)"')
def when_group1_won_the_group2_set_group3_group4(step, w, s, s1, s2):
world.match.won_set(w, s, s1, s2)
@step('And: "([^"]*)" won the "([^"]*)" set "([^"]*)"-"([^"]*)"')
def and_group1_won_the_group2_set_group3_group4(step, w, s, s1, s2):
world.match.won_set(w, s, s1, s2)
@step('Then: The match score is: "([^"]*)"')
def then_the_match_score_is_group1(step, res):
assert world.match.resultados() == res, \
"Got %s" % world.match.resultados()
|
UTF-8
|
Python
| false | false | 2,014 |
16,595,753,633,552 |
cc22d8e6cc8663cbe347d586bb78778aa4cdbf29
|
eab00469f2c29d4747e8f93ee9dde776184e583b
|
/bearded_web/apps/dashboard/views.py
|
4b418a1e743e7deb4c583d97940951b3c3676e06
|
[] |
no_license
|
slonoed/bearded
|
https://github.com/slonoed/bearded
|
a36a2b00ba2ae598a181a308d242cde44a520424
|
db94c88b0d4b91f728b51d6fbea82acd48ba84fe
|
refs/heads/master
| 2021-01-20T23:47:31.226165 | 2013-06-09T10:59:11 | 2013-06-09T10:59:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse_lazy
from django.shortcuts import render
from django.utils.translation import gettext as _
from plugin.models import Plugin
from plugin.api import PluginResource
DASHBOARD_MENUS = (
{'url': reverse_lazy('dashboard:target'), 'appName': 'targetApp', 'title': _(u'Targets')},
{'url': reverse_lazy('dashboard:template'), 'appName': 'templateApp', 'title': _(u'Templates')},
{'url': reverse_lazy('dashboard:job'), 'appName': 'jobApp', 'title': _(u'Jobs')},
{'url': reverse_lazy('dashboard:report'), 'appName': 'reportApp', 'title': _(u'Reports')},
{'url': reverse_lazy('dashboard:vuln_explorer'), 'appName': 'vulnApp', 'title': _(u'Vuln explorer')},
)
@login_required
def target(request):
ctx = {'dashboard_menus': DASHBOARD_MENUS}
return render(request, 'dashboard/target.html', ctx)
@login_required
def template(request):
plugin_resource = PluginResource()
plugins = Plugin.objects.all()
plugin_bundles = [plugin_resource.full_dehydrate(plugin_resource.build_bundle(obj=plugin)) for plugin in plugins]
plugins_serialized = plugin_resource.serialize(None, plugin_bundles, 'application/json')
ctx = {
'dashboard_menus': DASHBOARD_MENUS,
'tool_plugins': plugins_serialized,
}
return render(request, 'dashboard/template.html', ctx)
@login_required
def job(request):
ctx = {'dashboard_menus': DASHBOARD_MENUS}
return render(request, 'dashboard/job.html', ctx)
@login_required
def report(request):
ctx = {'dashboard_menus': DASHBOARD_MENUS}
return render(request, 'dashboard/report.html', ctx)
@login_required
def vuln_explorer(request):
ctx = {'dashboard_menus': DASHBOARD_MENUS}
return render(request, 'dashboard/vuln_explorer.html', ctx)
|
UTF-8
|
Python
| false | false | 2,013 |
16,011,638,116,193 |
9a5cc8a5b8757c085f0148374313c5e2d6d8d5cb
|
850c3b3a987da542f7d7b674bcaf64094991f382
|
/Service/appfog/wsi/sites/pub/api-center/index.py
|
24b5a5a76f4ccffdedbcd035bf9576be4455a4d9
|
[
"LGPL-2.0-or-later"
] |
non_permissive
|
inbei/nnt
|
https://github.com/inbei/nnt
|
87c48e9af72778fb6eda7bf5991b7a6de4ae4140
|
360d339a2725b0bc13c4172c33e011dc36840b1a
|
refs/heads/master
| 2022-01-20T02:41:21.028570 | 2013-11-27T01:48:06 | 2013-11-27T01:48:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from api.sites import mvc
from api.sapi import manager
class View(mvc.View):
def render(self, req):
apis = manager.All().run()
return self.load_template('index.djhtml').fill({'apis':apis})
def Controller(req):
return mvc.load(View, req)
|
UTF-8
|
Python
| false | false | 2,013 |
14,224,931,719,720 |
d26235f96f9395b7f393ad60f2aead4256aa9088
|
ac932039d403bc9e83c590550499e1abe6ededcd
|
/IRC/client.py
|
eceda9b12c4243a0070472473760b30413cad246
|
[
"Apache-2.0"
] |
permissive
|
IsmaeRLGV/UserBot
|
https://github.com/IsmaeRLGV/UserBot
|
520f7c0e29c8dad4ef509022ba7eb03d0ac485a2
|
96cfed58c7728a7d63bd662b5c4ee5a6bd4e2190
|
refs/heads/master
| 2020-05-17T06:54:24.476502 | 2014-08-07T21:18:58 | 2014-08-07T21:18:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import time
import logger
class cliente:
def __init__(self, Param00):
self.s = Param00
def send_msg(self, msg):
"""Send Msg. """
self.s.send("%s\r\n" % msg)
logger.log(msg).LogSend()
def privmsg(self, target, text):
"""Send a PRIVMSG command."""
time.sleep(1)
self.s.send("PRIVMSG %s :%s\n" % (target, text))
logger.log("PRIVMSG %s :%s" % (target, text)).LogSend()
def notice(self, target, text):
"""Send a NOTICE command."""
time.sleep(1)
self.s.send("NOTICE %s :%s\n" % (target, text))
logger.log("NOTICE %s :%s" % (target, text)).LogSend()
def Join(self, channel, key=""):
"""Send a JOIN command."""
if channel != "0":
time.sleep(1)
self.s.send("JOIN %s%s\n" % (channel, (key and (" " + key))))
logger.log("JOIN %s%s" % (channel, (key and (" " + key)))).LogSend()
def part(self, channel, message=""):
"""Send a PART command."""
time.sleep(1)
self.s.send("PART %s%s\n" % (channel, (message and (" :" + message))))
logger.log("PART %s%s" % (channel, (message and (" :" + message)))).LogSend()
def kick(self, channel, nick, comment=""):
"""Send a KICK command."""
time.sleep(1)
self.s.send("KICK %s %s%s\n" % (channel, nick, (comment and (" :" + comment))))
logger.log("KICK %s %s%s" % (channel, nick, (comment and (" :" + comment)))).LogSend()
def remove(self, channel, nick, comment=""):
"""Send a REMOVE command."""
time.sleep(1)
self.s.send("REMOVE %s %s%s\n" % (channel, nick, (comment and (" :" + comment))))
logger.log("REMOVE %s %s%s" % (channel, nick, (comment and (" :" + comment)))).LogSend()
def mode(self, channel, target, command=""):
"""Send a MODE command."""
time.sleep(1)
self.s.send("MODE %s %s%s\n" % (channel, target, (command and (" " + command))))
logger.log("MODE %s %s%s" % (channel, target, (command and (" " + command)))).LogSend()
def topic(self, channel, new_topic=None):
"""Send a TOPIC command."""
if new_topic is None:
time.sleep(1)
self.s.send("TOPIC %s\n" % channel)
logger.log("TOPIC %s" % channel).LogSend()
else:
time.sleep(1)
self.s.send("TOPIC %s :%s\n" % (channel, new_topic))
logger.log("TOPIC %s :%s" % (channel, new_topic)).LogSend()
def ctcp_version(self, user):
time.sleep(1)
self.s.send("NOTICE %s :IRCBot. UserBot by Kwargs.\n" % user)
logger.log("NOTICE %s :IRCBot. UserBot by Kwargs." % user).LogSend()
def ctcp_ping(self, user, target):
self.s.send("NOTICE %s :PING %s\n" % (user, target))
logger.log("NOTICE %s :PING %s" % (user, target)).LogSend()
|
UTF-8
|
Python
| false | false | 2,014 |
18,511,309,060,599 |
91f8b059bb073e00011b14aebf24a27a056effdc
|
43e99e75b4569ee06db646af305029bd8ae49c20
|
/1/test.py
|
514fda1aafb3b29e4001ecf3bba9671f7b937673
|
[] |
no_license
|
xi4nyu/sae
|
https://github.com/xi4nyu/sae
|
5cc44121381405f983902f525766eb7bee898bc3
|
021f6211f530b1aab57e03cd253af5413f288f4c
|
refs/heads/master
| 2020-04-12T12:08:58.130057 | 2012-06-01T09:39:14 | 2012-06-01T09:39:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from main import settings, urls
from tornado.web import Application
from tornado.ioloop import IOLoop
from tornado.wsgi import WSGIApplication
from wsgiref import simple_server
from setting import HTTP_PORT
from werkzeug.serving import run_simple
from pprint import pprint
def run():
pprint(sorted(urls))
app = Application(urls, **settings)
app.listen(HTTP_PORT)
IOLoop.instance().start()
def run_wsgi():
pprint(sorted(urls))
app = WSGIApplication(urls, **settings)
run_simple('localhost', 8080, app, use_reloader = True, use_debugger = True, extra_files = [])
def main():
run()
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,012 |
12,919,261,632,276 |
7016b94979f00ff31bb32da32fca7bee51cbd0b7
|
88b336fb422efac7e44ce52f2d465c20310060f6
|
/ceo/urwid/positions.py
|
6f0f6e075fe9762ff8a44664087433a9a3bfb5e6
|
[] |
no_license
|
jeremyroman/pyceo
|
https://github.com/jeremyroman/pyceo
|
8a861d7d8b7539dca721a82ff88bfe288606fc91
|
beef8f4abd29f1bf436bd9e16113d693b58ffa4f
|
refs/heads/master
| 2020-06-05T17:54:02.351493 | 2013-09-07T16:07:45 | 2013-09-07T16:07:45 | 2,263,985 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urwid
from ceo import members
from ceo.urwid.widgets import *
from ceo.urwid.window import *
position_data = [
('president', 'President'),
('vice-president', 'Vice-president'),
('treasurer', 'Treasurer'),
('secretary', 'Secretary'),
('sysadmin', 'System Administrator'),
('cro', 'Chief Returning Officer'),
('librarian', 'Librarian'),
('imapd', 'Imapd'),
('webmaster', 'Web Master'),
('offsck', 'Office Manager'),
]
class IntroPage(WizardPanel):
def init_widgets(self):
self.widgets = [
urwid.Text( "Managing Positions" ),
urwid.Divider(),
urwid.Text( "Enter a username for each position. If a position is "
"held by multiple people, enter a comma-separated "
"list of usernames. If a position is held by nobody "
"leave the username blank." ),
]
def focusable(self):
return False
class InfoPage(WizardPanel):
def init_widgets(self):
self.widgets = [
urwid.Text( "Positions" ),
urwid.Divider(),
]
positions = members.list_positions()
self.position_widgets = {}
for (position, text) in position_data:
widget = LdapWordEdit(csclub_uri, csclub_base, 'uid',
"%s: " % text)
if position in positions:
widget.set_edit_text(','.join(positions[position].keys()))
else:
widget.set_edit_text('')
self.position_widgets[position] = widget
self.widgets.append(widget)
def parse(self, entry):
if len(entry) == 0:
return []
return entry.split(',')
def check(self):
self.state['positions'] = {}
for (position, widget) in self.position_widgets.iteritems():
self.state['positions'][position] = \
self.parse(widget.get_edit_text())
for p in self.state['positions'][position]:
if members.get(p) == None:
self.focus_widget(widget)
set_status( "Invalid username: '%s'" % p )
return True
clear_status()
class EndPage(WizardPanel):
def init_widgets(self):
old = members.list_positions()
self.headtext = urwid.Text("")
self.midtext = urwid.Text("")
self.widgets = [
self.headtext,
urwid.Divider(),
self.midtext,
]
def focusable(self):
return False
def activate(self):
failed = []
for (position, info) in self.state['positions'].iteritems():
try:
members.set_position(position, info)
except ldap.LDAPError:
failed.append(position)
if len(failed) == 0:
self.headtext.set_text("Positions Updated")
self.midtext.set_text("Congratulations, positions have been "
"updated. You should rebuild the website in order to update "
"the Positions page.")
else:
self.headtext.set_text("Positions Results")
self.midtext.set_text("Failed to update the following positions: "
"%s." % join(failed))
def check(self):
pop_window()
|
UTF-8
|
Python
| false | false | 2,013 |
8,650,064,179,690 |
61e9842a75d5528750586d620cefa41270f91ae7
|
5aecfe6959c4fafebdb669dae08159d0390768cf
|
/app/members/forms.py
|
0c423e287547cc2b0ea42631783d343662905187
|
[] |
no_license
|
cadu-leite/associados
|
https://github.com/cadu-leite/associados
|
3522d4700c239acbaec791ebb3410492e30f9078
|
71decd5c4fb4d517c5f0d5bab346eb3892fd83b6
|
refs/heads/master
| 2021-01-18T05:57:40.832222 | 2013-01-30T16:15:44 | 2013-01-30T16:15:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
from django import forms
from django.contrib.auth.models import User
from django.contrib.localflavor.br.forms import BRCPFField, BRPhoneNumberField, BRStateSelect
from django.utils.translation import gettext_lazy as _
from app.members.models import City, Organization, Member
class UserForm(forms.ModelForm):
class Meta:
model = User
exclude = ('username', )
fields = ('first_name', 'last_name', 'email')
class MemberForm(forms.ModelForm):
cpf = BRCPFField(label=_("CPF"), required=True)
phone = BRPhoneNumberField(label=_("Phone"), required=False)
organization = forms.CharField(label=_("Organization"))
city = forms.CharField(label=_("City"))
state = forms.CharField(label=_("State"), widget=BRStateSelect())
class Meta:
model = Member
exclude = ('user', )
fields = ('category', 'organization', 'cpf', 'phone', 'address', 'city', 'state', 'relation_with_community', 'mailing', 'partner')
def clean_organization(self):
organization = self.cleaned_data['organization']
if organization:
organization_instance, created = Organization.objects.get_or_create(name=organization)
return organization_instance
return None
def clean_city(self):
city = self.cleaned_data['city']
state = self.data.get('state')
if city:
city_instance, created = City.objects.get_or_create(name=city, state=state)
return city_instance
return None
def save(self, user, commit=True):
self.instance.user = user
return super(MemberForm, self).save(commit)
|
UTF-8
|
Python
| false | false | 2,013 |
2,276,332,668,077 |
b8afd3d226de2a3c9199b62de9f9eda1b5ef7951
|
7e168a6067300f686be8b1209970c87ccb3efe26
|
/src/eval.py
|
48ed8c33bc39a544dbb599871182475f5b846fd0
|
[] |
no_license
|
nh0815/SearchEngineEvaluation
|
https://github.com/nh0815/SearchEngineEvaluation
|
a50501dba7c61cbf4c435312cb1eb71733e805ca
|
8942bce885cbb6634c3637aa020813cdb56405d0
|
refs/heads/master
| 2016-09-02T09:42:37.698059 | 2014-08-09T15:20:58 | 2014-08-09T15:20:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import division
from collections import OrderedDict
from math import log
__author__ = 'Nicholas Hirakawa'
def get_judgement(result, relevance):
tmp = OrderedDict()
for q, d in result.iteritems():
for docid in d:
if q in relevance and docid in relevance[q]:
if q in tmp:
tmp[q].append(1)
else:
tmp[q] = [1]
else:
if q in tmp:
tmp[q].append(0)
else:
tmp[q] = [0]
return tmp
def get_precision_values(judge):
result = list()
for key, value in judge.iteritems():
t = list()
for i in range(len(value)):
if i > 0:
t.insert(i, (t[i-1]*i + value[i])/(i+1))
else:
t.insert(0, value[0])
result.append(t)
return result
def average_precision(result, relevance):
tmp = get_judgement(result, relevance)
r = list()
precision_values = get_precision_values(tmp)
for i in precision_values:
r.append(sum(i) / len(i))
return r
def NDCG(result, relevance, rank=5):
tmp = list()
judge = get_judgement(result, relevance)
for k, v in judge.iteritems():
actual = DCG(v)
ideal = DCG(filter(lambda x: x == 1, v))
if ideal == 0:
tmp.append(0.0)
else:
tmp.append(actual/ideal)
return tmp
def DCG(judge):
sum = 0
for index, element in enumerate(judge):
sum += ((2 ** element) - 1) / (log(1+index+1, 2))
return sum
def precision_rank_n(result, relevance, rank=10):
judge = get_judgement(result, relevance)
precision = get_precision_values(judge)
return map(lambda x: x[rank-1], precision)
|
UTF-8
|
Python
| false | false | 2,014 |
1,451,698,981,521 |
1564ae71dc2d5191e2e8bfb2eaee5b51f8c1adea
|
d65fd1fa1b4ef4b63e4e6ec97836cac98d15fc96
|
/examples/cli/event.py
|
89b5f602476d0c5eb1ae5cc5d3491e3a20785011
|
[
"GPL-3.0-only",
"GPL-3.0-or-later"
] |
non_permissive
|
bjornarg/metadoc
|
https://github.com/bjornarg/metadoc
|
3e69efb7c7c974470ec595a3d8f3c170ce044c5a
|
f08bc7ff2b95298817f9248375d5cec3196bfc5a
|
refs/heads/master
| 2021-01-24T03:09:09.686196 | 2010-08-13T14:10:53 | 2010-08-13T14:10:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# event.py is part of MetaDoc (Client).
#
# All of MetaDoc is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# MetaDoc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MetaDoc. If not, see <http://www.gnu.org/licenses/>.
"""Registers an event through a command line interface that can be sent
through MetaDoc client.
events.py makes use of the MetaDoc client's caching system to add events to
the cache. This way, any events added will be sent the next time mapi.py is
run without the --no-cache or -n handles.
mapi.py should be run afterwards in order to send data to server.
Please remember to enclose argument strings with spaces in quotes, such as
--datedown='2010-06-29 9:39'.
Usage:
--help Displays this help message.
--up Sets event type to resource up(1).
--down Sets event type to resource down(1).
--reason=<reason> Sets the reason for the event(2,5)
--dateup=<date> Sets the date for the system comming up(2,3).
Defaults to now if nothing is given.
--datedown=<date> Sets the date for the system going down(2,3,5).
--sharedown=<share> Sets the percentage of the system being down during
the event(2,5).
--dateformat=<format> Sets the format of the dates that are passed in python
format(4)
--remarks=<file> File path for a text file containing remarks. This may
be a longer description than reason.
(1) Either --up or --down must be passed.
(2) Required if --down is passed.
(3) Must follow <format>. If --dateformat is not passed, <format> is set to
"%Y-%m-%d %H:%M"
(4) http://docs.python.org/library/datetime.html#strftime-and-strptime-behavior
(5) Only used by --down.
Examples:
System back up now, no reason given:
./event.py --up
System going down 10:00 the 20th august 2010 due to equipment replacement. The
file /tmp/systemshutdown contains more detailed information. Excepted downtime
two hours:
./event.py --down --reason='Equipment replacement' --datedown='2010-08-20 10:00'
--dateup='2010-08-20 12:00' --remarks=/tmp/systemshutdown
"""
import getopt
import sys
import os
import ConfigParser
from datetime import datetime
from events.definition import Events
from events.entries import ResourceUpEntry, ResourceDownEntry
from metadoc import MetaDoc
from cacher import Cacher
def main():
optlist = ['help', 'up', 'down', 'reason=', 'dateup=', 'datedown=',
'sharedown=', 'dateformat=', 'remarks=']
event_type = None
reason = None
date_up = datetime.now()
date_down = None
share_down = None
date_format = "%Y-%m-%d %H:%M"
remarks = None
try:
opts, args = getopt.getopt(sys.argv[1:], "", optlist)
except getopt.GetoptError, goe:
print str(goe)
print __doc__
sys.exit(2)
for opt, arg in opts:
if opt in ('--help'):
print __doc__
sys.exit()
elif opt in ('--up'):
event_type = "resourceUp"
elif opt in ('--down'):
event_type = "resourceDown"
elif opt in ('--reason'):
reason = arg
elif opt in ('--dateup'):
date_up = arg
elif opt in ('--datedown'):
date_down = arg
elif opt in ('--sharedown'):
share_down = arg
elif opt in ('--dateformat'):
date_format = arg
elif opt in ('--remarks'):
try:
rfile = open(arg, "r")
except IOError, e:
print "Could not open file containing remarks."
print "Got error: %s" % str(e)
print "Halting."
sys.exit(2)
else:
remarks = rfile.read()
rfile.close()
SCRIPT_PATH = os.path.abspath(os.path.dirname(sys.argv[0]))
conf = ConfigParser.ConfigParser()
conf.read("%s/%s" % (SCRIPT_PATH, "metadoc.conf"))
try:
v = conf.items("MetaDoc")
except ConfigParser.NoSectionError as nose:
print "Missing configuration file. Please make sure you have a MetaDoc"
print "configuration file in %s before continuing." % SCRIPT_PATH
sys.exit(2)
vals = dict(v)
site_name = vals.get("site_name")
if not isinstance(date_up, datetime):
if date_up is not None:
try:
date_up = datetime.strptime(date_up, date_format)
except ValueError, e:
print "Date up recieved, but format does not match."
print str(e)
print __doc__
sys.exit(2)
if date_down is not None:
try:
date_down = datetime.strptime(date_down, date_format)
except ValueError, e:
print "Date down recieved, but format does not match."
print str(e)
print __doc__
sys.exit(2)
if event_type is None:
print "Missing event type."
print __doc__
sys.exit(2)
else:
if event_type == "resourceDown":
if reason is None:
print "Recieved resource down handle, but missing reason."
print __doc__
sys.exit(2)
if share_down is None:
print "Recieved resource down handle, but missing share down."
print __doc__
sys.exit(2)
if date_down is None:
print "Recieved resource down handle, but missing date down."
# We have everything we require to create an event.
# Attempt to find already cached data:
m = MetaDoc(site_name)
c = Cacher("events")
cached_data = c.get_cache()
if cached_data is not None:
processor = Events.from_xml_element(cached_data, Events)
if processor is None:
print "Found previous event cache, but could not load. Please check "
print "\"%s\" for errors. " % c.file_path
print "Halting."
sys.exit(2)
else:
c.remove_cache()
else:
processor = Events()
if event_type == "resourceUp":
e = ResourceUpEntry(date_up, reason, remarks)
else:
e = ResourceDownEntry(reason, date_down, date_up, share_down, remarks)
processor.add_element(e)
m.reg_meta_element(processor)
Cacher(Events.xml_tag_name, m)
print "Event has been registered. Run mapi.py to send to server."
if __name__=='__main__':
main()
|
UTF-8
|
Python
| false | false | 2,010 |
18,622,978,229,314 |
2a33f1d519a5bc8d2f12b95586d50fe53e349907
|
c18b73295c69501c25dd95337cbcca970e5452ca
|
/dp/edit_distance/main.py
|
0f15bbf1383498bee7c7f6cff6ee17cd84a17d54
|
[] |
no_license
|
XianYuBO/Algorithm
|
https://github.com/XianYuBO/Algorithm
|
35c2828216440dc38432d894130e7b24cc68b58a
|
56587b36334a0176ba77ce18eb8617476165ea6a
|
refs/heads/master
| 2016-09-06T14:33:51.164475 | 2014-04-18T03:25:15 | 2014-04-18T03:25:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def get_edit_distance(a, b):
r = [[float('inf') for y in xrange(0, len(b) + 1)] for x in xrange(0, len(a) + 1)]
for x in xrange(0, len(a) + 1):
r[x][0] = x
for y in xrange(0, len(b) + 1):
r[0][y] = y
for a_index, a_item in enumerate(a):
for b_index, b_item in enumerate(b):
if a_item == b_item:
r[a_index + 1][b_index + 1] = r[a_index][b_index]
else:
r[a_index + 1][b_index + 1] = min((r[a_index][b_index],
r[a_index][b_index + 1], r[a_index + 1][b_index])) + 1
return r[len(a)][len(b)]
if __name__ == "__main__":
print get_edit_distance("kitten", "sitting")
|
UTF-8
|
Python
| false | false | 2,014 |
16,716,012,720,026 |
860402e5afc24240d856b1f48da40406e5707bb5
|
e39877626ebc1036dd17939bd78f1939f7937f32
|
/wikipediabase/api.py
|
7a60117fee6ceb25c4e3e858ca960ac5008ce966
|
[
"BSD-3-Clause"
] |
permissive
|
fakedrake/WikipediaBase-skinz
|
https://github.com/fakedrake/WikipediaBase-skinz
|
5e53207af6edc4a3648eda7653dc08b084161346
|
d3cd2a187e49ef5b3d1c337abdb16093bf94df95
|
refs/heads/master
| 2021-01-25T12:02:03.258149 | 2013-12-19T20:15:21 | 2013-12-19T20:15:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Advertising means registering a function to the 'functions'
domain. From a user's standpoint in that domain lives a dictionary of
'name' -> *fn-obj* that is used by the front end to provide functions.
"""
from context import Context
from default import DEFAULTS_DOMAIN
from functions import MetaAdvert
from skin import Skin, DictSkinConfig
import types
def set(domain, value, function=False):
Context.get_skin(function=function)[domain] = value
def get(domain, function=False, **kwargs):
"""
Get a single piece of data. function needs to be true if you want
a callable.
"""
return Context.get_skin(function=function).get(domain, **kwargs)
def append(domain, value, function=False, **kwargs):
return Context.get_skin(function=function).append(domain, value, **kwargs)
def defaults_decorator(fn):
"""
Decorated function will have default args what is in
DEFAULTS_DOMAIN of the context.
"""
def wrap(*args, **kwargs):
# Convert all positional arguments to kwargs
argdic = dict(zip(fn.__code__.co_varnames, args))
kw = (Context.get_skin(function=False).get(DEFAULTS_DOMAIN) or {}).copy()
kw.update(kwargs)
kw.update(argdic)
return fn(**kw)
return wrap
@defaults_decorator
def get_fn(name, domain=None, **kw):
"""
Access functions in a domain.
"""
d = Context.get_skin(function=True)[domain or name]
try:
return d[name]
except TypeError:
return d
def setdict(dic):
"""
Creates a new skin with config dict.
"""
Context.set_skin(Skin(DictSkinConfig(dic)))
def domaincall(domain, name, *args, **kwargs):
return get_fn(name, domain=domain)(*args, **kwargs)
def freecall(name, *args, **kwargs):
"""
Call a function saved in a 'name' domain.
"""
return get_fn(name, domain=None)(*args, **kwargs)
def call(name, *args, **kwargs):
"""
Call a function from the 'functions' domain.
"""
return get_fn(name)(*args, **kwargs)
@defaults_decorator
def advertise_fn(func, **kwargs):
Context.register_function(func, **kwargs)
return func
@defaults_decorator
def advertise(name=None, domain=None, append=None, **kw):
"""
To decorate methods of a class it needs to subclass
`Advertisable`. Also this decorator implies `@staticmethod`.
Decorator for advertising functions using their name as key, or
provide a name and you may decorate with parameters. Default
parameters are in DEFAULT_DOMAIN of context. You may see what
params you can pass by looking at `Contex.register_function`.
Provide domain and not name to put the vanilla function in the
slot.
"""
def real_dec(fn): return advertise_fn(fn, name=name,
domain=domain,
append=append, **kw)
return real_dec
def jsondump():
return Context.get_skin(function=False).dump()
def attribute_resolvers():
"""
Get a list of the attribute resolvers available.
"""
Context.get_skin(function=True)["resolvers"]
class Advertisable(object):
"""
Subclassing this will give make your methods advertisable.
"""
__metaclass__ = MetaAdvert
|
UTF-8
|
Python
| false | false | 2,013 |
6,459,630,853,741 |
f98e20a4fea2e79e2a4c366f60a45f989820591e
|
4d5e3fb0bbeb2965ed937b7168d4bb44dc8bf5fc
|
/PM_app/views.py
|
073e206a06d4fbe6644d88237ab4b9d46b8e559e
|
[] |
no_license
|
MichelleGlauser/django-project-manager
|
https://github.com/MichelleGlauser/django-project-manager
|
afc7d6215eac4911ca8dfa899100647591603a4b
|
8b3338c3be19f1089ff34a4e44b1221cb7e443df
|
refs/heads/master
| 2020-05-03T06:02:38.643514 | 2014-12-20T03:54:23 | 2014-12-20T03:54:23 | 28,153,939 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render, get_object_or_404, get_list_or_404, redirect
from django.views.generic.edit import UpdateView
from django.http import HttpResponse
from django.core.serializers import serialize
from PM_app.models import Project, Task
from .forms import ProjectForm, TaskForm
# def index(request):
# return render(request, 'index.html')
def list_projects(request):
projects = Project.objects.all()
return render(request, 'index.html', {'projects': projects})
def create_project(request):
if request.method == "POST":
form = ProjectForm(request.POST)
if form.is_valid():
project = form.save(commit=False)
project.save()
return redirect('list_projects')
else:
form = ProjectForm()
return render(request, 'create_project.html', {'form': form})
def show_project(request, project_pk):
project = get_object_or_404(Project, pk=project_pk)
# Needs to show specific tasks for this project:
tasks = Task.objects.filter(project=project_pk)
# tasks = get_list_or_404(Task)
return render(request, 'show_project.html', {'project': project, 'tasks': tasks})
def create_task(request, project_pk):
project = get_object_or_404(Project, pk=project_pk)
if request.method == "POST":
form = TaskForm(request.POST)
if form.is_valid():
# Save the underyling model:
task = form.save(commit=False)
# Needs to link this new task to a specific project:
task.project_id = project_pk
task.save()
# project.task_set.add(task)
return redirect('show_project', project_pk=project.pk)
else:
form = TaskForm()
return render(request, 'create_task.html', {'form': form})
class TaskUpdate(UpdateView):
model = Task
fields = ['name', 'description', 'difficulty_level']
template_name = 'edit_task.html'
form_class = TaskForm
def edit_task(request, task_pk, project_pk):
# Needs to show previous values for this task:
instance = Task.objects.get(id=task_pk)
if request.method == "POST":
form = TaskForm(request.POST, instance=instance)
if form.is_valid():
task = form.save(commit=False)
task.save()
return redirect('show_project', project_pk=project_pk)
else:
form = TaskForm(instance=instance)
return render(request, 'edit_task.html', {'form': form})
def list_tasks(request, project_pk):
project = get_object_or_404(Project, pk=project_pk)
return HttpResponse(serialize('json', project.task_set.all()), content_type='application/json')
|
UTF-8
|
Python
| false | false | 2,014 |
171,798,715,568 |
46c7504099375ae6d41163dbb2c8f6261f0fd99f
|
da614d20650d0962134be95912243648b6b65e19
|
/userprofile/urls.py
|
83f35fbb21abca3ae50f3057a14aea63545f49c0
|
[
"GPL-2.0-only"
] |
non_permissive
|
The-WebOps-Club/project-management-portal
|
https://github.com/The-WebOps-Club/project-management-portal
|
38833531bd3364e16e922607ce33a1e2bbd312b0
|
15c50c2aef76416edf3bb5255a996aec019634f4
|
refs/heads/master
| 2021-01-22T22:57:51.288143 | 2014-05-27T16:30:57 | 2014-05-27T16:30:57 | 17,944,485 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
from django.contrib.auth import views as auth_views
urlpatterns = patterns('',
url(r'^test/', 'userprofile.views.test'),
url(r'^user/', 'userprofile.views.account', name='edit_account'),
url(r'^password/', 'userprofile.views.password', name='edit_password'),
url(r'^upload/', 'userprofile.views.upload', name='save_account'),
url(r'^password/change/$',
auth_views.password_change,
name='password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
name='password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
name='password_reset'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
name='password_reset_done'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
name='password_reset_complete'),
url(r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
name='password_reset_confirm'),
url(r'^', include('registration.backends.default.urls')),
)
|
UTF-8
|
Python
| false | false | 2,014 |
3,100,966,394,847 |
361ab402702c149373ede76e97d60e4ec6d95c28
|
18b0f6bb472f2de01ce46fdcef3848e3f47b28cb
|
/wsgi/openshift/agot/tests/test_models.py
|
9e6f164dfadd87b5dfc86ffc6e544aa4816aad96
|
[] |
no_license
|
mebusw/lotr-django-rest
|
https://github.com/mebusw/lotr-django-rest
|
20942889a9c9400fe6a1eb858587c578069929dc
|
f05b21385f57429beeb95a4d4fc674e4e6278adf
|
refs/heads/master
| 2016-09-05T12:48:42.112042 | 2012-10-14T13:20:37 | 2012-10-14T13:20:37 | 5,112,429 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.test import TestCase
from django.utils import timezone
from datetime import datetime, time, date, timedelta
from poll.models import *
from agot.models import *
from django.db.models import Q, F
import logging
logger = logging.getLogger('myproject.custom')
class CycleModelTest(TestCase):
def setUp(self):
cycle = Cycle(name='cycle1')
cycle.save()
def test_creating_and_saving(self):
all_cycles = Cycle.objects.all()
self.assertEqual(1, len(all_cycles))
self.assertEqual('cycle1', all_cycles[0].name)
def test_package_count(self):
all_cycles = Cycle.objects.all()
self.assertEqual(0, all_cycles[0].package_set.count())
def test_unicode_display(self):
all_cycles = Cycle.objects.all()
self.assertEqual('cycle1', unicode(all_cycles[0]))
class CardModelTest(TestCase):
def setUp(self):
self.package = Package.objects.create(name='abc', cycle=None, pub_date=timezone.now(), type=u'基础')
self.house = House.objects.create(name='Stark')
self.card = Card.objects.create(name='sword', package=self.package, type=u'附属牌', cost=2)
self.card.house.add(self.house)
def test_creating_and_saving(self):
all_cards= Card.objects.all()
self.assertEqual('sword', all_cards[0].name)
self.assertEqual(2, all_cards[0].cost)
self.assertEqual(0, all_cards[0].strength)
self.assertIn(self.house, all_cards[0].house.all())
|
UTF-8
|
Python
| false | false | 2,012 |
11,553,462,068,510 |
1612fde326130e1e15d0e3dd1878fc959bda2d33
|
36ff64692e50a720f66340a226f368fd7602f587
|
/extract-features/InterestRegion.py
|
2669d66f08d6389a2fe1dbf0f5312081d91f670c
|
[] |
no_license
|
balasanjeevi/bazinga
|
https://github.com/balasanjeevi/bazinga
|
996eeae53510311d68ce109f120ee0ccb206784c
|
39068369a5f5a1610f6d958ff2d54b479e5b80a6
|
refs/heads/master
| 2018-01-07T05:58:19.435512 | 2013-08-15T17:41:41 | 2013-08-15T17:41:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
def compute(pmtrs,ix_image):
str_fi_image='/home/dragon/neo-human/data/oxford/affine-covariant-regions/bark/img1.ppm'
str_fi_feat='/home/dragon/neo-human/programs/code-name/bazinga/data/oxford/bark/img1.hesaff'
str_pmtrs='-'+pmtrs.data['intpt']['type']+' '
str_pmtrs+=' -i ' + pmtrs.image_path(ix_image)
str_pmtrs+=pmtrs.intpt_thresholds()
str_pmtrs+=' -o1 ' + pmtrs.feat_path(ix_image)
str_pmtrs+=' -DE '
os.system(pmtrs.data['folders']['extract-feat']+' '+str_pmtrs)
def read(pmtrs,ix_image):
fp=open(pmtrs.feat_path(ix_image))
# read the first two lines
str_line=fp.readline()
feat_dim=int(str_line.strip('\n'))
str_line=fp.readline()
feat_count=int(str_line.strip('\n'))
# read data
feat=[]
for ix_line in range(feat_count):
str_line=fp.readline()
feat.append(map(lambda x:float(x),str_line.strip('\n').split(' ')))
fp.close()
return feat
|
UTF-8
|
Python
| false | false | 2,013 |
9,629,316,719,007 |
8b21133eb2cbea08bf1fc28a4f7df1b5247d5472
|
597f7e2104f0fa54b439e0872bae3744ac90c505
|
/sandbox/get_ctrl.py
|
e3123c8d6258274d76d31a65b63b0294fbb7e387
|
[] |
no_license
|
bashburn/agietst
|
https://github.com/bashburn/agietst
|
4481e2869bb991567c647f59c1eccdf48abb1ab5
|
f6863546ee0bb6b6be670b0bef13daa0c217a287
|
refs/heads/master
| 2021-01-17T22:15:56.675279 | 2013-03-14T23:06:25 | 2013-03-14T23:06:25 | 8,787,118 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import sys
from qpid.messaging import *
#global vars
broker_local = "localhost:5672"
addr_control = "agie_inbound/agie_inbound_control"
intf_table = []
title = ['status', 'intf_name', 'intf_ip']
def intf_up(msg_list):
tmp_tbl = intf_table
tmp_entry = dict(zip(title,msg_list))
print 'tmp_entry', tmp_entry
intf_tmp = tmp_entry.get('intf_name')
print 'intf', intf_tmp
exist = [ iface for iface in tmp_tbl if iface.get('intf_name') == intf_tmp ]
if exist:
print "already exists"
else:
tmp_tbl.append(tmp_entry)
print 'Added inteface on ', msg_list[1]
print tmp_tbl
return tmp_tbl
def intf_down(msg_list):
tmp_tbl = intf_table
tmp_entry = dict(zip(title,msg_list))
intf_to_rm = tmp_entry['intf_name']
up = [ iface for iface in intf_table if iface.get('intf_name') != intf_to_rm ]
print up
print 'Inteface removed '
return tmp_tbl
def broker_conn():
# create connection to local broker
lb_connection = Connection(broker_local)
try:
lb_connection.open()
session = lb_connection.session()
receiver = session.receiver("agie_inbound_control")
while True:
message = receiver.fetch()
received = message.content
print 'received', received
session.acknowledge()
except MessagingError,m:
print m
finally:
lb_connection.close()
broker_conn()
|
UTF-8
|
Python
| false | false | 2,013 |
6,098,853,574,449 |
ed4a37a5520ae53f90ce2cfe617268d9b70d7f35
|
932bc2ab6e910eddb0e1b3a12bc78f8685a0b6ad
|
/setup.py
|
ca606ddec494b53ddf6bf672aed1b81a98114f85
|
[] |
no_license
|
haridsv/logss
|
https://github.com/haridsv/logss
|
a85f9dc4034d0183668223c934b80e82a9062729
|
386c623052a62031129340c2e1c51a8b5cfdfc35
|
refs/heads/master
| 2020-12-25T02:01:08.726994 | 2011-11-06T01:42:34 | 2011-11-06T01:42:34 | 2,600,197 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
NEWS = open(os.path.join(here, 'NEWS.txt')).read()
version = '0.1'
install_requires = [
'gdata>=2.0.0',
]
setup(name='logtogss',
version=version,
description="Log rows to a Google SpreadSheet",
long_description=README + '\n\n' + NEWS,
classifiers=[
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
],
keywords='gdata google spreadsheet csv import',
author='Hari Dara',
author_email='[email protected]',
url='https://github.com/haridsv/logss',
license='BSD 2-Clause',
packages=find_packages('src'),
package_dir = {'': 'src'},include_package_data=True,
zip_safe=False,
install_requires=install_requires,
entry_points={
'console_scripts':
['logtogss=logtogss:main']
}
)
|
UTF-8
|
Python
| false | false | 2,011 |
12,558,484,420,213 |
8a5653a89039e7e058379ffdbd8e5b89bb64c146
|
1df999796f13595e1cb6741fb5324c4d40f2d417
|
/restapi/settings.py
|
37557a058814bb177852b1c444c60a55a4c79554
|
[] |
no_license
|
nrabe/t8_restapi_py
|
https://github.com/nrabe/t8_restapi_py
|
55802443925e74a89859121d7a2bb2dad594613e
|
60263bfa353ee976f74b28a4d82b41169fb7cc9a
|
refs/heads/master
| 2016-09-06T16:26:57.994484 | 2014-05-20T18:33:16 | 2014-05-20T18:33:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
DEBUG = True
SECRET_KEY = 'dev_key_h8hfne89vm'
CSRF_ENABLED = False
|
UTF-8
|
Python
| false | false | 2,014 |
9,019,431,334,395 |
92367e41b966aa411d87eb16b27b5ac38c8a1427
|
5c55dafa27b617b62076c02e83f62f401ae56120
|
/bundle/CryptoPlusLite/Util/__init__.py
|
b676d089f48db05142c6f9ad5680f1dc4a1a30b6
|
[
"MIT"
] |
permissive
|
bluele/spm
|
https://github.com/bluele/spm
|
ad9cde1f1b95556b230a39a32d7b13bc1daf5366
|
3be3d4879a829c93115eb928b67a0881a1af9697
|
refs/heads/master
| 2023-05-11T23:41:09.831056 | 2013-04-06T14:23:17 | 2013-04-06T14:23:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Util initialization
makes the Util modules from Crypto AND CryptoPlus available here
"""
#import Crypto
#from Crypto.Util import number, randpool, RFC1751
import util
from pkg_resources import parse_version
__all__ = ["util"]
#if parse_version(Crypto.__version__) > parse_version("2.0.1"):
# from Crypto.Util import python_compat
# __all__.append("python_compat")
#del Crypto
|
UTF-8
|
Python
| false | false | 2,013 |
7,095,285,997,483 |
82133c16470cc28995fe49dd923e80fb94f466a4
|
de488b9e8e3e81b4821d6ab66ce4bc6a04164989
|
/tools/__init__.py
|
c3bdc8faf5e132af4e91a673329ec4110c09658b
|
[] |
no_license
|
surchs/cpac_netmat
|
https://github.com/surchs/cpac_netmat
|
799fa828c740d2d56ed431e53e6464f05274d78d
|
00ab023780d90d598736200e037c58ac0ccc800a
|
refs/heads/master
| 2021-01-19T09:44:08.103244 | 2013-03-28T14:11:28 | 2013-03-28T14:11:28 | 6,661,647 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Nov 1, 2012
@author: sebastian
'''
import phenowave
import waveselector
import meisterlein
import subjectMaker
|
UTF-8
|
Python
| false | false | 2,013 |
11,682,311,063,276 |
ef86d8549cb4220fd1a159f5843dbc8f6dadd070
|
26315a51e9662499fc7d58e930d9580c34e76d3a
|
/config/configfilehandler.py
|
8569406245486b566188d44518c16fddd5fa30d1
|
[] |
no_license
|
PawelPamula/geosynoptic
|
https://github.com/PawelPamula/geosynoptic
|
b620e886f9679cfdf774cb344b6d482c917bd84b
|
8c004522ce373c541f06d6b85c2b0216748e2c25
|
refs/heads/master
| 2018-12-28T09:31:37.363819 | 2013-10-30T11:20:29 | 2013-10-30T11:20:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
from lxml import etree
class InvalidPathException(Exception):
pass
class ConfigFileHandler(object):
def __init__(self, configFilePath='config.xml'):
"""
Creates configFile under specified path if one does not exist
Creates lxml.etree structure for handling config info
"""
self.configFilePath = configFilePath
self.configFileName = os.path.basename(self.configFilePath)
if not os.path.isfile(self.configFilePath):
try:
print "config.xml was not found and it's being created..."
#creates new default config
self.root = etree.Element("config")
defaultIconPath = etree.SubElement(self.root, "defaultIconPath")
defaultIconPath.text = os.path.join(os.path.dirname(__file__), 'resource/icons/default.png')
self.root.append(etree.Element("groups"))
self._saveConfigToFile()
except OSError:
print 'Error: Config file was not created'
else:
print "config.xml was found!"
try:
self.root = etree.parse(self.configFilePath).getroot()
except etree.XMLSyntaxError:
print "%s is not valid xml file" % self.configFileName
raise Exception("Invalid XML File")
def getDefaultIconPath(self):
for element in self.root.iter():
if element.tag == 'defaultIconPath':
return element.text
def printConfig(self):
print etree.tostring(self.root, pretty_print=True)
def _saveConfigToFile(self):
"""
Writes the xml structure of xml tree to configFilePath
"""
with open(self.configFileName, 'w') as configFile:
#print etree.tostring(self.root, pretty_print=True)
#configFile.write(etree.tostring(self.root, pretty_print=True))
doc = etree.ElementTree(self.root)
doc.write(configFile, pretty_print=True)
configFile.close()
def editGroupName(self, old_name, new_name, new_path):
element = self.root.xpath("//groups/group[@name='%s']" % old_name)
if element:
print element[0].attrib['name']
element[0].attrib['name'] = new_name
if new_path != '':
element[0].attrib['icon_path'] = new_path
self._saveConfigToFile()
else:
'NOT FOUND'
def addGroup(self, group_name, icon_path):
newGroup = etree.Element("group")
newGroup.attrib['name'] = group_name
newGroup.attrib['icon_path'] = icon_path
if not self.root.xpath("//groups/group[@name='%s']" % group_name):
self.root.xpath('//groups')[0].insert(0, newGroup)
self._saveConfigToFile()
def addDevice(self, device_name, group_name):
"""
Returns True if adding device was successfull
"""
self.addGroup(group_name=group_name)
element = self.root.xpath("//groups/group[@name='%s']" % group_name)
for dev in element[0]:
if dev.text == device_name:
return False
else:
newDevice = etree.Element("device")
newDevice.text = device_name
element[0].insert(-1, newDevice)
self._saveConfigToFile()
return True
def groupExists(self, group_name):
element = self.root.xpath("//groups/group[@name='%s']" % group_name)
if element:
return True
return False
def getGroups(self):
groups = self.root.xpath("//groups/group")
return [g.attrib['name'] for g in groups]
def getGroupsWithIcons(self):
groups = self.root.xpath("//groups/group")
return [(g.attrib['name'], g.attrib['icon_path']) for g in groups]
if __name__ == '__main__':
c = ConfigFileHandler()
c.printConfig()
|
UTF-8
|
Python
| false | false | 2,013 |
19,198,503,821,420 |
d751459a66483be1c5f9df680d3bb445659328d2
|
dd949f215d968f2ee69bf85571fd63e4f085a869
|
/subarchitectures/planner.sa/branches/post-review-yr2-11531/src/python/standalone/pddl/partial_observability.py
|
5acfd696d12aea4467ac361d78d3e63cb52b4f50
|
[] |
no_license
|
marc-hanheide/cogx
|
https://github.com/marc-hanheide/cogx
|
a3fd395805f1b0ad7d713a05b9256312757b37a9
|
cb9a9c9cdfeba02afac6a83d03b7c6bb778edb95
|
refs/heads/master
| 2022-03-16T23:36:21.951317 | 2013-12-10T23:49:07 | 2013-12-10T23:49:07 | 219,460,352 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from dtpddl import *
|
UTF-8
|
Python
| false | false | 2,013 |
1,511,828,496,211 |
a6d364b65b5f8c80a03d907752414ea7252fd86c
|
26a64aa244661b334f9546ec9ecfd5d358c2703f
|
/challenge5.py
|
964133101d28cd16f4d25fe8cbc8acb6d43deb6f
|
[] |
no_license
|
DmitryPodvyaznikov/python_challenge
|
https://github.com/DmitryPodvyaznikov/python_challenge
|
ba88a949193691f900a89621c23a94b59dee2118
|
58e0e78f34bc9f69c8a70d3038064bd0e88ab448
|
refs/heads/master
| 2016-09-05T11:28:19.358625 | 2014-09-21T19:18:31 | 2014-09-21T19:18:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def challenge5(adress):
import pickle
ser = open(adress).read()
deser = pickle.loads(ser)
for stroka in deser:
output_st = ''
for simvol in stroka:
output_st += simvol[0]*simvol[1]
print output_st
|
UTF-8
|
Python
| false | false | 2,014 |
2,224,793,104,700 |
c896a60766a96d02c6e6d1e7af51626c29210151
|
d93b391cb6f75ae12b128b6240bd72df17997a2d
|
/src/media_item.py
|
1547ae46516ca2215538f5563f55f157c23b8007
|
[
"GPL-2.0-only"
] |
non_permissive
|
maloep/heimdall
|
https://github.com/maloep/heimdall
|
61892475ced6235c8e3fafe62ecd53fd56d48b7f
|
f28b559ee93532557cca7b042c33c76cd0782315
|
refs/heads/master
| 2021-01-18T07:14:25.744776 | 2013-07-05T05:29:52 | 2013-07-05T05:29:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import heimdall
from heimdall import tasks
from heimdall import resources
from heimdall import supplies, demands
from heimdall.predicates import *
from pymediainfo import MediaInfo
import json
from urlparse import urlparse
class ExtractStreamDetails(tasks.SubjectTask):
demand = [
demands.required(dc.identifier, "^file://"),
demands.requiredClass("item", True)
]
supply = [
supplies.replace(rdf.Class, "item.audio"),
supplies.replace(rdf.Class, "item.video"),
supplies.emit("video_stream"),
supplies.emit("audio_stream")
]
def run(self):
uri = urlparse(self.subject[dc.identifier]).path
mime_type = self.subject[dc.format]
if uri:
media_info = MediaInfo.parse(uri)
video_streams = list()
audio_streams = list()
for track in media_info.tracks:
if track.track_type == 'General' and track.duration:
self.subject.emit("duration", track.duration / 1000.0)
elif track.track_type == 'Video':
v = dict()
if track.frame_rate:
v["framerate"] = float(track.frame_rate)
if track.codec:
v["codec"] = track.codec
if track.height:
v["height"] = int(track.height)
if track.width:
v["width"] = int(track.width)
video_streams.append(v)
elif track.track_type == "Audio":
a = dict()
if track.sampling_rate:
a["samplerate"] = int(track.sampling_rate)
if track.codec:
a["codec"] = track.codec
if track.channel_s:
a["channels"] = int(track.channel_s)
audio_streams.append(a)
for v in video_streams:
self.subject.emit("video_stream", v)
for a in audio_streams:
self.subject.emit("audio_stream", a)
if len(video_streams) > 0:
self.subject.extendClass("item.video")
elif len(audio_streams) > 0:
self.subject.extendClass("item.audio")
module = [ ExtractStreamDetails ]
|
UTF-8
|
Python
| false | false | 2,013 |
10,591,389,363,951 |
263609ddf3046cf12259ac18494eb4dca635a7c2
|
189aaa0db5d757821b14748853e0bf896beab601
|
/flexlmlogparser.py
|
f25950c2eed53cd5b867e8f1f4ec18be9997a9ed
|
[] |
no_license
|
soukhoi47/flexlmlogparser
|
https://github.com/soukhoi47/flexlmlogparser
|
f49b321166af3ff021e737cc4e80e1e56bc7ad47
|
becc792be7de150a4a49c835c6ae3f70fe75e45d
|
refs/heads/master
| 2020-05-20T23:52:05.447363 | 2013-05-22T13:42:53 | 2013-05-22T13:46:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
FLEXLMLOGPARSER
===============
VERSION
-------
1.2
add user name in csv file
1.1
init
'''
import sys
import re
import time
usage_top_old = 0
usage = 0
usage_top = 0
out_of_license = 0
queued = 0
user_name = ''
if len(sys.argv) < 4:
print "USAGE: flexlmlogparser feature input.log output.csv"
exit(1)
feature = sys.argv[1]
cvsfile = open(sys.argv[3], 'w+')
print "FEATURE: " + feature
print "COUTING... please wait"
time.sleep(1)
with open(sys.argv[2], 'r') as f:
for line in f:
if re.search('TIMESTAMP',line):
print line,
if re.search('OUT: "' + feature + '"', line):
usage += 1
user_name += ", " + re.search('OUT: "' + feature + '" (.*) ', line).group(1)
if re.search('IN: "' + feature + '"', line):
usage -= 1
sub_name = ", " + re.search('IN: "' + feature + '" (.*) ', line).group(1)
user_name = user_name.replace(sub_name , '', 1)
if re.search('DENIED: "' + feature + '" .* already reached', line):
out_of_license += 1
if re.search('QUEUED: "' + feature + '"', line):
queued += 1
if usage != usage_top_old:
usage_top_old = usage # sync
cvsfile.write("\"" + `usage` + "\",\"" + re.sub('^, ', '', user_name) + "\"\n")
if usage >= usage_top:
usage_top = usage
cvsfile.close()
print "\n\n"
print "**********************************************"
print "Report:"
print " Max usage = " + `usage_top`
print " License Runs out = " + `out_of_license`
print " Queued = " + `queued`
print "**********************************************"
|
UTF-8
|
Python
| false | false | 2,013 |
5,609,227,304,218 |
00b803f5e493058e86d2ed8ffe628cd435473bbb
|
02fcefc7506918caaf4dabd1fb29ad2140c3efe3
|
/footyNN/footyNN.py
|
0ab0c1da22fdd66ff017ef012d487b36cc5a9811
|
[] |
no_license
|
lmNt/footyNN
|
https://github.com/lmNt/footyNN
|
a6f197d3f80bd233af9e5ab2be3c07052427a632
|
b1b933a343d57466e6059969237f277eee6274a7
|
refs/heads/master
| 2016-09-05T09:21:53.541501 | 2014-08-13T10:08:14 | 2014-08-13T10:08:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from random import random
import csv
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from IPython.display import SVG
from pybrain.structure import SigmoidLayer, TanhLayer, SoftmaxLayer
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import ClassificationDataSet, SupervisedDataSet
from pybrain.utilities import percentError
import utils
###################################################################################################
# Initialization
###################################################################################################
# Settings set by user
data_dir = "C:/Users/lmnt/coding/footyNN/data"
league = "bundesliga"
train_seasons = [2012]
features = ["result", "goals", "shots", "shots_on_target", "comeback"]
test_seasons = [2013]
feat_normalizing = True
# Constants
no_of_teams = 18
no_of_home_matches = 17
no_of_away_matches = 17
no_of_matches = no_of_home_matches + no_of_away_matches
# Auxilary stuff like iterators
home_res_idx = ["H", "A", "D"]
away_res_idx = ["A", "H", "D"]
home_goal_idx = ["FTHG", "FTAG"]
away_goal_idx = ["FTAG", "FTHG"]
home_shots_idx = ["HS", "AS"]
away_shots_idx = ["AS", "HS"]
home_shots_target_idx = ["HST", "AST"]
away_shots_target_idx = ["AST", "HST"]
home_shots_wood_idx = ["HHW", "AHW"]
away_shots_wood_idx = ["AHW", "HHW"]
# Basic methods to get relevant information from the folder structure
data_dir = os.path.abspath(data_dir)
league_dir = os.path.join(data_dir, league)
matches_dir = os.path.join(league_dir, "matches")
seasons = [int(os.path.splitext(x)[0]) for x in os.listdir(matches_dir)]
season_dirs = [os.path.join(matches_dir, x) for x in os.listdir(matches_dir)]
season_dict = dict(zip(seasons, season_dirs))
# Get list of all teams
teams = pd.read_csv(os.path.join(league_dir, "teams.csv"))
# Read season results from all the years specified in @train_seasons
seasons_train = [pd.read_csv(x) for x in
[season_dict.get(key) for key in
list(set(seasons).intersection(set(train_seasons)))]]
seasons_test = [pd.read_csv(x) for x in
[season_dict.get(key) for key in
list(set(seasons).intersection(set(test_seasons)))]]
###################################################################################################
# Extract team statistics
###################################################################################################
teams = teams['TeamNameList'].values
team_features = []
for team in teams:
for season in seasons_train:
# Find occurances of the current team playing as home team and away team
home_team = season[season['HomeTeam'] == team].index.tolist()
away_team = season[season['AwayTeam'] == team].index.tolist()
# Process full time results ###############################################################
home_res = season['FTR'].loc[home_team]
away_res = season['FTR'].loc[away_team]
home_res_ft = home_res.values.tolist()
away_res_ft = away_res.values.tolist()
home_res = np.array([home_res_ft.count(x) for x in home_res_idx])
away_res = np.array([away_res_ft.count(x) for x in away_res_idx])
avg_home_res = np.true_divide(home_res, no_of_home_matches)
avg_away_res = np.true_divide(away_res, no_of_home_matches)
#avg_home_res = [float(x)/float(no_of_home_matches) for x in home_res]
#avg_away_res = [float(x)/float(no_of_away_matches) for x in away_res]
# Full time goals #########################################################################
home_goals = np.array([sum(season[x].loc[home_team].values) for x in home_goal_idx])
away_goals = np.array([sum(season[x].loc[away_team].values) for x in away_goal_idx])
total_goals = np.add(home_goals, away_goals)
home_goal_diff = home_goals[0]-home_goals[1]
away_goal_diff = away_goals[0]-away_goals[1]
total_goal_diff = home_goal_diff+away_goal_diff
# Shots ###################################################################################
# Shots
home_shots = np.array([sum(season[x].loc[home_team].values) for x in home_shots_idx])
away_shots = np.array([sum(season[x].loc[away_team].values) for x in away_shots_idx])
total_shots = np.add(home_shots, away_shots)
home_shot_diff = home_shots[0]-home_shots[1]
away_shot_diff = away_shots[0]-away_shots[1]
total_shot_diff = home_shot_diff+away_shot_diff
# Shots on target
home_shots_target = np.array([sum(season[x].loc[home_team].values) for x in home_shots_target_idx])
away_shots_target = np.array([sum(season[x].loc[away_team].values) for x in away_shots_target_idx])
total_shots_target = np.add(home_shots_target, away_shots_target)
home_shot_target_diff = home_shots_target[0]-home_shots_target[1]
away_shot_target_diff = away_shots_target[0]-away_shots_target[1]
total_shot_target_diff = home_shot_target_diff+away_shot_target_diff
# Shots on woodwork
if features.count("shots_on_wood")>0:
home_shots_wood = np.array([sum(season[x].loc[home_team].values) for x in home_shots_wood_idx])
away_shots_wood = np.array([sum(season[x].loc[away_team].values) for x in away_shots_wood_idx])
total_shots_wood = np.add(home_shots_wood, away_shots_wood)
home_shot_wood_diff = home_shots_wood[0]-home_shots_wood[1]
away_shot_wood_diff = away_shots_wood[0]-away_shots_wood[1]
total_shot_wood_diff = home_shot_wood_diff+away_shot_wood_diff
# Comebacks ###############################################################################
home_res_ht = season['HTR'].loc[home_team]
away_res_ht = season['HTR'].loc[away_team]
home_res_ht = home_res_ht.values.tolist()
away_res_ht = away_res_ht.values.tolist()
[home_adrift_comeback_rate,
home_draw_comeback_rate,
home_no_ht_adrifts,
home_no_ht_draws] = utils.calcComebackRates(home_res_ht, home_res_ft, True)
[away_adrift_comeback_rate,
away_draw_comeback_rate,
away_no_ht_adrifts,
away_no_ht_draws] = utils.calcComebackRates(away_res_ht, away_res_ft, False)
feat_vec = np.array([avg_home_res, avg_away_res,
home_goal_diff, away_goal_diff, total_goal_diff,
home_shot_diff, away_shot_diff, total_shot_diff,
home_shot_target_diff, away_shot_target_diff, total_shot_target_diff,
home_draw_comeback_rate, away_draw_comeback_rate,
home_adrift_comeback_rate, away_adrift_comeback_rate,
home_no_ht_adrifts, away_no_ht_adrifts,
home_no_ht_draws, away_no_ht_draws])
#feat_vec = np.array([avg_home_res, avg_away_res,
# home_goal_diff, away_goal_diff, total_goal_diff,
# home_shot_diff, away_shot_diff, total_shot_diff,
# home_shot_target_diff, away_shot_target_diff, total_shot_target_diff])
team_features.append(np.hstack(feat_vec.flatten()))
###################################################################################################
# Get match results for training set
###################################################################################################
train_matches_string = []
scaler_list = []
input_layer_size = team_features[0].shape[0]*2
ds = SupervisedDataSet(input_layer_size, 1)
for season in seasons_train:
train_matches_string.extend(zip(season['HomeTeam'], season['AwayTeam'], season['FTR']))
for match in train_matches_string:
if match[2] == "D":
continue
#output_NN = 1
elif match[2] == "H":
output_NN = 2
else:
output_NN = 0
home_team_feat = team_features[np.where(teams==match[0])[0][0]]
away_team_feat = team_features[np.where(teams==match[1])[0][0]]
input_NN = np.array([home_team_feat, away_team_feat]).flatten()
input_NN_tuple = tuple(input_NN)
ds.addSample(input_NN_tuple, (output_NN,))
inp = ds['input']
targ = ds['target']
if feat_normalizing:
scaler, input_NN = utils.normalize(inp[:,:])
ds.clear()
for input, output in zip(input_NN,targ):
ds.addSample(input, (output,))
###################################################################################################
# Init neural network and trainer
###################################################################################################
net = buildNetwork(input_layer_size, 30, 1)
trainer = BackpropTrainer(net, ds)
trainer.trainEpochs(5)
#trainer.testOnData()
###################################################################################################
# Get matches for testing
###################################################################################################
correct_pred = 0
incorrect_pred = 0
test_matches_string = []
for season in seasons_test:
test_matches_string.extend(zip(season['HomeTeam'], season['AwayTeam'], season['FTR']))
for match in test_matches_string:
idx = np.where(teams==match[0])[0]
if idx.size == 0:
continue
home_team_feat = team_features[idx[0]]
idx = np.where(teams==match[1])[0]
if idx.size == 0:
continue
away_team_feat = team_features[idx[0]]
input_NN = np.array([home_team_feat, away_team_feat]).flatten()
if feat_normalizing:
input_NN = scaler.transform(input_NN)
input_NN_tuple = tuple(input_NN)
result = net.activate(input_NN_tuple)
if match[2]=="H":
winner = match[0]
elif match[2]=="A":
winner = match[1]
else:
winner = "DRAW"
if result < 1:
winner_pred = match[1]
elif result >= 1:
winner_pred = match[0]
else:
winner_pred = "DRAW"
if winner==winner_pred:
correct_pred += 1
else:
incorrect_pred += 1
#print match[0] + ' vs ' + match[1] + " = " + winner_pred + " (" + winner + ")" + " " + str(result)
print "correct pred: " + str(correct_pred)
print "incorrect pred: " + str(incorrect_pred)
|
UTF-8
|
Python
| false | false | 2,014 |
11,321,533,832,666 |
c2e36d8aa65f48ac1343f463d31a21e0697adc0b
|
f229ee935ea39d37d95bca3ae344002e7de99c81
|
/MDWISEAlerts/__init__.py
|
1fa17f2dd14b52e2c58c136cc3f8664f9c07ae8e
|
[] |
no_license
|
RohitKumar13/mdwise_jiva
|
https://github.com/RohitKumar13/mdwise_jiva
|
96f0048439126d5ca74a8bf1e91fa6bc76224d6f
|
362fba642fbe7e046d420f84fdabbdc17129f205
|
refs/heads/master
| 2015-08-19T15:52:05.371325 | 2014-12-02T17:08:45 | 2014-12-02T17:08:45 | 29,470,476 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Products.CMFCore.DirectoryView import registerDirectory
from Products.ZeAlerts.ZeAlertsCtrl import ZeAlertsCtrl, manage_addZeAlertsCtrl
from MDWISEAlertsCtrl import MDWISEAlertsCtrl, manage_addMDWISEAlertsCtrlForm, manage_addMDWISEAlertsCtrl
components = ['mdwise_views', 'mdwise_zsqls', 'www','mdwise_dtml']
for dir in components:
registerDirectory(dir, globals())
def initialize(context):
"""
Initializes this MDWISEAlerts Module
"""
context.registerClass(
MDWISEAlertsCtrl,
constructors = ( manage_addMDWISEAlertsCtrlForm,
manage_addMDWISEAlertsCtrl),
)
|
UTF-8
|
Python
| false | false | 2,014 |
15,161,234,598,774 |
bcf7711d9ca78a48fc1bbc35a8a385c2c00ff214
|
8b4ab8cb5ef3ebb79203906d5d18ed6300100a81
|
/gui/wxpython/icons/__init__.py
|
02800108f0b3666df82c0901fc4f88b9128e1f9a
|
[
"GPL-2.0-only",
"GPL-1.0-or-later",
"GPL-2.0-or-later",
"LicenseRef-scancode-philippe-de-muyter"
] |
non_permissive
|
imincik/pkg-grass
|
https://github.com/imincik/pkg-grass
|
f4fa784dc39329cc5c6e00b9800325e0a3a5e25b
|
39287a8c507e8dd38b4169ff95afa9b18c93dc7f
|
refs/heads/master
| 2021-01-19T09:42:21.886052 | 2014-03-27T15:22:21 | 2014-03-27T15:22:21 | 18,230,795 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
all = [ "icon", "silk" ]
|
UTF-8
|
Python
| false | false | 2,014 |
6,416,681,167,477 |
03ee4f350950508d202ce72c3363d5b5cd684022
|
50c548b35d86e3405c5e9def60b3e5954886f9f7
|
/1/python/problem1.py
|
a9e223407676bdb51b170f2dbddb354d6b8e75ea
|
[] |
no_license
|
hrkeni/Project-Euler
|
https://github.com/hrkeni/Project-Euler
|
2d1b9a09d8e362a6cbba674fab3d8e1c0d81cecd
|
eacc0237e153892e08180dd55d977c1fc92c7a72
|
refs/heads/master
| 2018-03-10T02:41:26.600009 | 2012-08-19T19:28:42 | 2012-08-19T19:28:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
num = set()
for x in range(3, 1000, 3):
num.add(x)
for x in range(5, 1000, 5):
num.add(x)
total = reduce(lambda x, y: x+y, num)
print total
|
UTF-8
|
Python
| false | false | 2,012 |
16,827,681,900,095 |
2cad4c1705868c30fc3ca497243a26dce32d9680
|
e95b40bc78c8c25b80db446294759b99ad5fd988
|
/Easy Python/37 Swap Elements/swap.py2
|
ef24c36f93a897a9bfe6c4996607a7738d75a997
|
[] |
no_license
|
taibhse/Code-Eval
|
https://github.com/taibhse/Code-Eval
|
194678ed3451aa3e112d1af9ab52df3d14227c6e
|
5942f65afd3e5e036e5019d3340333226d262c53
|
refs/heads/master
| 2020-02-22T06:53:16.280772 | 2014-12-28T15:06:15 | 2014-12-28T15:06:15 | 10,776,593 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
def swap_elements(line,swaps):
"""
This function takes in two lists and swaps elements in the first based off the second
Input:
A list of numbers in string format '1,2,3,4,5,6,7,8,9' seperated by ',''
A list of swaps to make, seperated by ',' from different swaps and '-' seperating the swap pair.
Output:
Prints to standward output the resulting number list after the swaps are made
"""
line = line.split()
swaps = swaps.strip().split(',')
tmp = ''
run = ''
for x in swaps:
run = x.strip().split('-')
run = map(int,run)
tmp = line[run[0]]
line[run[0]] = line[run[1]]
line[run[1]] = tmp
for value in line:
print value,
print ''
def main():
with open(sys.argv[1]) as f:
for line in f:
line = line.strip().split(':')
swap_elements(line[0],line[1])
if __name__=="__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
16,930,761,093,492 |
855b5d06106dc3afe0fd1e7811b83d6528e7e1ae
|
26fa3a1731b17f2dcbd35effe2c2f90ced9c7733
|
/getent-report.py
|
639a51aefdc257ec70df768f6f139a3983acb096
|
[] |
no_license
|
RaymiiOrg/getent-report
|
https://github.com/RaymiiOrg/getent-report
|
a75b37edf206af982c02057462b3c3ea54d834f2
|
de97c759cb118a231ddb5870b7edd361f407b08a
|
refs/heads/master
| 2020-04-02T01:08:37.571198 | 2013-08-12T11:24:13 | 2013-08-12T11:24:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2
import subprocess, sys, operator
domain = "example.com"
allUsers = subprocess.Popen(["getent passwd"], stdout=subprocess.PIPE, shell=True)
(outUser, errGroup) = allUsers.communicate()
allGroup = subprocess.Popen(["getent group"], stdout=subprocess.PIPE, shell=True)
(outGroup, errGroup) = allGroup.communicate()
users = {}
usernames = []
groups = {}
groupnames = []
Filter = ["sync", "guest", "nobody", "sshd", "sm_cb01120b539244b7b", "sm_a831d1c051744f3aa", "read-only domain controllers", "samba", "lp", "sudo", "lpadmin", "adm", "admins", "dip", "plugdev", "cdrom", "sambashare", "organization management", "exchange windows permissions", "utmp", "schema admins", "view-only organization management", "tty", "mail", "ircd", "ntp", "proxy", "news", "winbindd_priv", "disk", "staff", "tape", "mail", "bin", "fax", "kmem", "enterprise read-only domain controllers", "ssh", "shadow", "delegated setup", "daemon", "list", "receipient management", "domain controllers", "read only domain controllers", "syslog", "crontab", "video", "um management", "public folder management", "denied rodc password replication group", "irc", "ntp", "group policy creator owners", "news", "proxy", "src", "netdev", "libuuid", "games", "backup", "ssl-cert", "cert publishers", "records management", "operator", "gnats", "landscape", "server management", "enterprise admins", "system", "ump", "exchange trusted subsystem", "domain users", "domain guests", "whoopsie", "dialout", "ras and ias servers", "cdrom", "exchange servers", "utempter", "munin", "voice", "root", "nagios", "exchangelegacyinterop", "logcheck", "uucp", "floppy", "users", "exchange all hosted organizations", "sys", "postdrop", "man", "dnsupdateproxy", "audio", "nogroup", "postfix", "discovery management", "www-data", "allowed rodc password replication group", "sasl", "help desk", "domain computers", "recipient management", "dnsadmins"]
for line in outUser.split("\n"):
if len(line.split(":")) == 7:
username = line.split(":")[0]
if username not in Filter:
usernames.append(username)
users[username] = {"fullname": line.split(":")[4], "email": username + "@" + domain, "function":"", "telephone":"", "groups":[]}
for line in outGroup.split("\n"):
if len(line.split(":")) == 4:
groupname = line.split(":")[0]
members = line.split(":")[3].split(",")
if groupname and groupname not in Filter:
groupnames.append(groupname)
groups[groupname] = {"members":members}
for user in users:
for group in groups:
if group and group not in Filter:
if user in groups[group]['members']:
if user:
users[user]['groups'].append(group)
usernames.sort()
groupnames.sort()
print("# Users")
for user in usernames:
print(("Name: %s") % users[user]['fullname'])
print(("Email: %s") % users[user]['email'] )
sys.stdout.write(("Groups:"))
for group in users[user]['groups']:
sys.stdout.write(("%s, ") % group)
print("\n\n")
print("# Groups")
for group in groupnames:
print(("Group: %s") % group)
sys.stdout.write(("Members: "))
for user in groups[group]["members"]:
sys.stdout.write(("%s, ") % user)
print("\n\n")
print("# Statistics")
print(("Number of Users: %i") % len(usernames))
print(("Number of Groups: %i") % len(groupnames))
|
UTF-8
|
Python
| false | false | 2,013 |
8,564,164,788,974 |
bcd921cced0d4e0f9f33b145d9fbcc7b8e479bcf
|
d2b5e5039b2fb0556ebaadf68c491b7f35cadc32
|
/django_temporal/db/backends/util.py
|
42b996f6c86b0918f7458dafee30105470c2f244
|
[
"BSD-3-Clause"
] |
permissive
|
hwinkel/django_temporal
|
https://github.com/hwinkel/django_temporal
|
65d4bac57bbd8ffc92789b8260524ece7f957b74
|
c192a4f2bd052d14ae43795962e2451be22985d2
|
refs/heads/master
| 2021-01-17T12:22:43.948709 | 2014-10-24T21:24:49 | 2014-10-24T21:24:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class TemporalOperation(object):
sql_template = '%(temporal_col)s %(operator)s %(period)s'
def __init__(self, function='', operator='', result='', **kwargs):
self.function = function
self.operator = operator
self.result = result
self.extra = kwargs
def as_sql(self, temporal_col, period='%s'):
return self.sql_template % self.params(temporal_col, period)
def params(self, temporal_col, period='%s'):
params = {
'function': self.function,
'temporal_col': temporal_col,
'period': period,
'operator': self.operator,
'result': self.result,
}
params.update(self.extra)
return params
class TemporalFunction(TemporalOperation):
sql_template = '%(function)s(%(temporal_col)s, %(period)s)'
def __init__(self, func, result='', operator='', **kwargs):
# Getting the function prefix.
default = {'function' : func,
'operator' : operator,
'result' : result
}
kwargs.update(default)
super(TemporalFunction, self).__init__(**kwargs)
# rework as temporal attribute?
class TemporalFunctionTS(TemporalFunction):
sql_template = '%(function)s(%(temporal_col)s) = %(period)s'
|
UTF-8
|
Python
| false | false | 2,014 |
16,922,171,168,086 |
d28ec6d6e4902a8bc8fad8bbac33bce3008d5640
|
0fe847d83cc6b7962b92a79c8aad91d9e6ed8407
|
/project/src/jason_code/earthMine.py
|
d7712be56ee19ec350e78542e06e8e572c8752e7
|
[
"AGPL-3.0-only"
] |
non_permissive
|
jasonzliang/image_retreival
|
https://github.com/jasonzliang/image_retreival
|
f2d365d39299eeab52189a1f3e6c0ff11e17c694
|
00b92a50d73d90fbc39d1f818d283ec7d4903baf
|
refs/heads/master
| 2020-05-18T20:41:41.080591 | 2012-09-21T18:06:13 | 2012-09-21T18:06:13 | 5,578,973 | 0 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# An implementation of the EarthMine DirectData API using their
# HTTP Request Protocol
import httplib2
import urllib
import time
import json
import hashlib
import math
import numpy
import os
from PIL import Image
#DD_KEY="v12ypuyn9a2renzw9ivvu5fg"
#DD_SECRET="Q4xfUF0vqO"
#Joe's
#DD_KEY = "bv5tdq3uw9mjudeqf6kjctm3"
#DD_SECRET = "M3jXvKKTKg"
DD_KEY = "sc9v12b8f6gs5kj8e52x1grl"
DD_SECRET = "pdX4Z5YvaY"
DD_MAX_IMAGE_SIZE = 2048
# DEFWIDTH = 512
# DEFHEIGHT = 512
#DEFWIDTH = 768
#DEFHEIGHT = 512
DEFWIDTH = 2048
DEFHEIGHT = 1371
DEFFOV = 60
DD_DELAY = 0.20
DD_VIEWPIXELMAX = 400
DD_VIEWPIXELSIDE = 20
EARTHMINE_URL = "http://cloud.earthmine.com/service"
DEBUG=0
class ddError(Exception):
"""Default Exception class for ddErrors."""
def __init__(self, status, reason):
self.status = status
self.reason = reason
def __str__(self):
return "STATUS: "+repr(self.status)+" REASON:"+self.reason
class ddLocation( dict ):
def __init__(self, lat, lon, alt=None):
dict.__init__(self)
#check safety
try:
if lat > 90.0 or lat < -90.0:
raise ddError(1, "Latitude out of range [90.0,-90.0]")
elif lon > 180.0 or lon < -180.0:
raise ddError(2, "Longitude out of range [180.0,-180.0]")
elif alt is not None and alt < 0:
raise ddError(3, "Altitude less than zero")
except ValueError:
raise ddError(4, "ddLocation initialized with non-numerical parameter")
else:
self["lat"] = lat
self["lon"] = lon
if alt is not None:
self["alt"] = alt
def __setitem__(self, key, value):
if key not in ["lat", "lon", "alt"]:
raise ddError(5, "Can only modify lat, lon, alt")
elif key is "lat" and (value > 90.0 or value < -90.0):
raise ddError(1, "Latitude out of range [90.0,-90.0]")
elif key is "lon" and (value > 180.0 or value < -180.0):
raise ddError(2, "Longitude out of range [180.0,-180.0]")
elif key is "alt" and value < 0:
raise ddError(3, "Altitude less than zero")
else:
dict.__setitem__(self, key, value)
class ddImageSize( dict ):
def __init__(self, width, height):
dict.__init__(self)
try:
if width < 0 or width > DD_MAX_IMAGE_SIZE:
raise ddError(5, "Image width out of bounds [0,{max}]".format(max=DD_MAX_IMAGE_SIZE))
elif height < 0 or height > DD_MAX_IMAGE_SIZE:
raise ddError(6, "Image height out of bounds [0,{max}]".format(max=DD_MAX_IMAGE_SIZE))
except ValueError:
raise ddError(7, "ddImageSize initialized with bad parameter")
else:
self["width"]=width
self["height"]=height
def __setitem__(self, key, value):
if key not in ["width", "height"]:
raise ddError(5, "Can only modify width, height")
elif key is "width" and (value < 0 or value > DD_MAX_IMAGE_SIZE):
raise ddError(5, "Image width out of bounds [0,{max}]".format(max=DD_MAX_IMAGE_SIZE))
elif key is "height" and (value < 0 or value > DD_MAX_IMAGE_SIZE):
raise ddError(6, "Image height out of bounds [0,{max}]".format(max=DD_MAX_IMAGE_SIZE))
else:
dict.__setitem__(self, key, value)
class ddView( dict ):
def __init__(self, imageSize, fieldOfView, isKnownOccluded,
url, viewLocation, viewId):
dict.__init__(self)
self["image-size"] = imageSize
self["view-location"] = viewLocation
self["field-of-view"] = fieldOfView
self["is-known-occluded"] = isKnownOccluded
self["url"] = url
self["id"] = viewId
def __setitem__(self, key, value):
if key not in ["image-size", "view-location", "field-of-view",
"url", "view-location", "id", "is-known-occluded"]:
raise ddError(0, "Fail")
else:
dict.__setitem__(self, key, value)
class ddViewRequest( dict ):
def __init__(self, imageSize, viewSubject, FOV,
maxResults = 12, searchRadius = 60):
dict.__init__(self)
self["image-size"] = imageSize
self["view-subject"]= viewSubject
self["field-of-view"] = FOV
self["max-results"]=maxResults
self["search-radius"]=searchRadius
def __setitem__(self, key, value):
if key not in ["image-size", "view-subject", "field-of-view",
"max-results", "search-radius"]:
raise ddError(0, "Fail")
else:
dict.__setitem__(self, key, value)
class ddViewPixel( dict ):
def __init__(self, x, y):
dict.__init__(self)
self["x"] = x
self["y"] = y
def __setitem__(self, key, value):
if key not in ["x", "y"]:
raise ddError(0, "Fail")
else:
dict.__setitem__(self, key, value)
def imprt( data ):
"""Imports a dictionary with the right structure
as a ddViewPixel object."""
try:
self["x"] = data["x"]
self["y"] = data["y"]
except KeyError:
raise ddError(32, "Import failed because of incorrect dictionary.")
class ddViewLocationForViewPixelsRequest( dict ):
def __init__(self, viewID, viewPixels):
"""viewID should be the earthmine view-id string.
viewPixels should be a list of viewPixel objects."""
dict.__init__(self)
self["view-id"] = viewID
self["view-pixels"] = viewPixels
def __setitem__(self, key, value):
if key not in ["view-id", "view-pixels"]:
raise ddError(0, "Fail")
else:
dict.__setitem__(self, key, value)
class ddObject():
"""Handles connections to the earthmine Direct Data API.
Supports all Direct Data Request types.
General workflow:
ddObject.buildXYZRequest(PARAMS)
try:
content = ddObject.sendRequest()
except ddError:
HANDLE"""
def __init__(self):
#Construct request headers
self.requestURL = EARTHMINE_URL
self.httpObject = httplib2.Http()
self.headers={"x-earthmine-auth-id": DD_KEY}
self.JSONData = ""
self.JSONEncoder = json.JSONEncoder(ensure_ascii=False)
self.JSONDecoder = json.JSONDecoder()
def signRequest(self):
currTime = int(time.time())
sig = hashlib.md5(DD_KEY+DD_SECRET+str(currTime)).hexdigest()
#fullURL = self.requestURL + "?sig="+sig
fullURL = self.requestURL + "?sig="+sig+"×tamp="+str(currTime)
# print fullURL
return fullURL
def buildGetViews(self, width, height, FOV=DEFFOV,
lat=0.0, lon=0.0, alt=None,
maxResults=12, searchRadius=60.0):
requestDict={"operation": "get-views",
"parameters": {
"request": ddViewRequest(ddImageSize(width, height),
ddLocation(lat, lon, alt),
FOV, maxResults, searchRadius)
}
}
self.JSONData = self.JSONEncoder.encode(requestDict)
def buildGetPano(self, lat=0.0, lon=0.0, maxResults=12, searchRadius=60.0):
requestDict={"operation": "get-panoramas",
"parameters": {
"request":{
"subject-location":{
"lat": lat,
"lon": lon
},
"max-results": maxResults,
"search-radius": searchRadius
}
}
}
self.JSONData = self.JSONEncoder.encode(requestDict)
def processViewSearchResult():
pass
def adjustView(self, view, FOV=DEFFOV, pan=0, tilt=0, width=False, height=False):
"""Adjust any view passed to the object and return the new view.
The old view is not kept. Pan and tilt are in degrees"""
if width == False:
width = view["image-size"]["width"]
if height == False:
height = view["image-size"]["height"]
request = {"operation":"adjust-view",
"parameters": {
"request": {
"view-id": view["id"],
"field-of-view":FOV,
"image-size":{"width":width, "height":height},
"pan":pan,
"tilt": tilt
}
}
}
self.JSONData = self.JSONEncoder.encode(request)
return self.sendRequest()
def getdepthurl(self, view):
"""Retrieve a url of a depth image for the given view."""
request = {"operation":"get-depth-image",
"parameters" : {
"request" : {
"view-id": view["id"]
}
}
}
self.JSONData = self.JSONEncoder.encode(request)
return self.sendRequest()
def getLocationsForViewPixels( self, viewID, viewPixels):
request = {
"operation" : "get-locations-from-view",
"parameters" : {
"request" :
ddViewLocationForViewPixelsRequest(viewID, viewPixels)
}
}
self.JSONData = self.JSONEncoder.encode(request)
print self.JSONData
response = self.sendRequest()
#correllate the pixels and locations in a dictionary
return [(request["parameters"]["request"]["view-pixels"][i], response["locations"][i]) for i in range(len(response["locations"]))]
def processResult(self, content):
"""Generic Handler for processing all earthmine repsonses.
Checks for internal exceptions and returns a dictionary of
the result."""
resultDict = self.JSONDecoder.decode(content)
#check for errors
if resultDict["exception"] is None:
return resultDict["result"]
else:
raise ddError(13,
"Exception in response: {resp}".format(resp=
repr(resultDict["exception"])))
def sendRequest(self):
if self.JSONData == "":
pass
else:
fullURL = self.signRequest()
self.httpObject = httplib2.Http()
try:
resp, content = self.httpObject.request(fullURL, 'POST',
headers=self.headers,
body=self.JSONData)
except: #Try again for one error
resp, content = self.httpObject.request(fullURL, 'POST',
headers=self.headers,
body=self.JSONData)
#process header errors here
while True:
if resp.status == 200:
return self.processResult(content)
elif resp.status == 504:
resp, content = self.httpObject.request(fullURL, 'POST',
headers=self.headers,
body=self.JSONData)
else:
raise ddError(resp.status, resp.reason + content)
#Higher Level functions
def LocationSubtract(view1, view2):
## print "v1: ",
## print view1
## print "v2: ",
## print view2
"""Gets distance between locations using spherical law
of cosines"""
R = 6.3781e6
try:
lat1 = math.radians(view1["lat"])
lon1 = math.radians(view1["lon"])
except KeyError:
raise ddError(3, "view1 did not have members 'lat' or 'lon'")
try:
lat2 = math.radians(view2["lat"])
lon2 = math.radians(view2["lon"])
except KeyError:
raise ddError(3, "view1 did not have members 'lat' or 'lon'")
d = math.acos(math.sin(lat1)*math.sin(lat2) \
+ math.cos(lat1)*math.cos(lat2) \
* math.cos(lon2 - lon1)) * R
return d
#def LocationSubtractHaver(view1, view2):
# """Subtracts two ddLocations and returns the distance between them
# using the Haversine formula"""
#
# R = 6.3781e6 #Earth's radius in meters
# dLat = math.radians(view1["lat"]-view2["lat"])
# dLon = math.radians(view1["lon"]-view2["lon"])
# a = math.sin(dLat/2)*math.sin(dLat/2) \
# + math.cos(math.radians(view1["lat"])) \
# * math.cos(math.radians(view2["lat"])) \
# * math.sin(dLon/2)*math.sin(dLon/2)
# c = math.atan2(math.sqrt(a), math.sqrt(1-a))
# return R * c
#def tobearing(rad):
# return (math.degrees(rad) + 360) % 360
def getbearing(view1, view2):
"""Takes two ddLocations and returns the initial bearing (in degrees) from
view1 to view2 where 0 is north."""
dLon = math.radians(view2["lon"]-view1["lon"])
lat1 = math.radians(view1["lat"])
lat2 = math.radians(view2["lat"])
y = math.sin(dLon)*math.cos(lat2)
x = math.cos(lat1)*math.sin(lat2) - \
math.sin(lat1)*math.cos(lat2)*math.cos(dLon)
return math.degrees(math.atan2(y,x))
# return tobearing(math.atan2(y,x))
def moveLocation(view1, d, bearingDegrees):
"""Returns a ddLocation that is d meters from view1
along a great circle heading along the bearing"""
R = 6.3781e6 #Earth's radius in meters
try:
lat1 = math.radians(view1["lat"])
lon1 = math.radians(view1["lon"])
except KeyError:
raise ddError(3, "Passed view was not a ddLocation...")
bearing = math.radians(bearingDegrees)
lat2 = math.asin(math.sin(lat1)*math.cos(d/R) \
+ math.cos(lat1)*math.sin(d/R)*math.cos(bearing))
lon2 = lon1 + \
math.atan2(math.sin(bearing)*math.sin(d/R)*math.cos(lat1),
math.cos(d/R) - math.sin(lat1)*math.sin(lat2))
return ddLocation(math.degrees(lat2), math.degrees(lon2))
def moveLocation4(lat1, lon1, d, bearingDegrees):
"""Returns a ddLocation that is d meters from view1
along a great circle heading along the bearing"""
R = 6.3781e6 #Earth's radius in meters
lat1 = math.radians(lat1)
lon1 = math.radians(lon1)
bearing = math.radians(bearingDegrees)
lat2 = math.asin(math.sin(lat1)*math.cos(d/R) \
+ math.cos(lat1)*math.sin(d/R)*math.cos(bearing))
lon2 = lon1 + \
math.atan2(math.sin(bearing)*math.sin(d/R)*math.cos(lat1),
math.cos(d/R) - math.sin(lat1)*math.sin(lat2))
return math.degrees(lat2), math.degrees(lon2)
def getCell(ddConnection, lat, lon, radius):
conn = ddObject()
views = earthMine.ddGetViews(conn, lat, lon, radius, maxResults=100)
def ddGetViews(ddConnection, lat, lon, radius=60, maxResults=12, FOV=DEFFOV, width=DEFWIDTH, height=DEFHEIGHT):
"""Gets a list of views within RADIUS meters of
the given latitude and longitude."""
try:
ddConnection.buildGetViews(width, height, searchRadius=radius, maxResults=maxResults, FOV=FOV, lat=lat, lon=lon)
if DEBUG > 0:
print ddConnection.JSONData
result = ddConnection.sendRequest()
except Exception as msg:
print "sent: {0}".format(ddConnection.JSONData)
raise ddError(0, "ddConnection object raised an exception: {msg}".format(msg=msg))
try:
return result["views"]
except KeyError:
return None
def ddGetPanos(ddConnection, lat, lon, radius=60, maxResults=12):
"""Gets a list of panos within RADIUS meters of
the given latitude and longitude."""
try:
ddConnection.buildGetPano(searchRadius=radius, maxResults=maxResults, lat=lat, lon=lon)
if DEBUG > 0:
print ddConnection.JSONData
result = ddConnection.sendRequest()
except Exception as msg:
print "sent: {0}".format(ddConnection.JSONData)
raise ddError(0, "ddConnection object raised an exception: {msg}".format(msg=msg))
try:
return result["panoramas"]
except KeyError:
return None
def getFrontalViews(ddConnection, lat, lon, radius=60, maxResults=12, FOV=DEFFOV, width=DEFWIDTH, height=DEFHEIGHT):
panos = ddGetPanos(ddConnection, lat, lon, radius=radius, maxResults=maxResults)
views = [getFrontalView(ddConnection, pano, FOV, width, height) for pano in panos]
return views
def getFrontalView(ddConnection, pano, FOV, width, height):
lat, lon = moveLocation4(pano['location']['lat'], pano['location']['lon'], .25, pano['pano-orientation']['yaw'])
return ddGetViews(ddConnection, lat, lon, 10, maxResults=1, FOV=FOV, width=width, height=height)[0]
def saveViews(views, outputDir, getDepth=False):
if not os.path.exists(outputDir):
try:
os.makedirs(outputDir)
except Exception:
print "Error making directory...quitting..."
return
count=0
for view in views:
error = 0
while error < 10:
try:
fname = os.path.join(outputDir, str(view["view-location"]["lat"])+","+str(view["view-location"]["lon"])+"-"+"{0:04}".format(count))
urllib.urlretrieve(view["url"]["href"], fname+".jpg")
#Get depth image
#Other depth things...
if getDepth:
conn = earthMine.ddObject()
locs = earthMine.ddGetImageLocs(view, conn)
ThreeDData = earthMine.ddImageLocstoLPT(view, locs)
earthMine.ddWriteLocsFile(locs, fname+".locs")
earthMine.ddWriteLPTFile(ThreeDData, fname+".lpt")
earthMine.ddWriteDepthFile(ThreeDData, fname+".depth")
earthMine.ddMakeDepthImage(view, ThreeDData, fname+"depth.tif")
error = 10
except IOError:
error = error + 1
f = open(fname+".info", "w")
f.write(repr(view))
f.close()
count += 1
def buildCylinderFromViewNoStreet(conn, spot, imagesPerCylinder):
views = []
FOV = max(360 / imagesPerCylinder + 20, 60) #For 4 views, FOV is 110, max allowed
panAmount = 360 / imagesPerCylinder / 2
for i in range(1, imagesPerCylinder):
if(i != imagesPerCylinder/2):
time.sleep(DD_DELAY)
views.append(conn.adjustView(spot, FOV, panAmount*i)["view"])
return views
def buildCylinderFromView(conn, spot, imagesPerCylinder):
views = []
FOV = max(360 / imagesPerCylinder + 20, 60) #For 4 views, FOV is 110, max allowed
panAmount = 360 / imagesPerCylinder / 2
# conn = earthMine.ddObject()
# count = 0
# for spot in spots:
# dbgmsg("Cylinder {n} of {k}".format(n=count, k=len(spots)))
views.append(spot)
for i in range(1, imagesPerCylinder):
time.sleep(DD_DELAY)
views.append(conn.adjustView(spot, FOV, panAmount*i)["view"])
# count += 1
return views
def ddMakeImageCyl(ddConnection, lat, lon, numImages, width=DEFWIDTH, height=DEFHEIGHT):
"""Given a latitude and longitude, get closest view
and derive an image polygon from around this view. FOV
is set automaticallty to overlap views in the polygon.
The number of images must be at least 4"""
if numImages+1-1 != numImages:
raise ddError(0, "numImages must be a number")
if numImages < 4:
raise ddError(0, "Not enough views specified")
views = []
FOV = max(360 / numImages + 20, 60) #For 4 views, FOV is 110, max allowed
panAmount = 360 / numImages / 2 #Some unkown bug makes this division necessary
nView = ddGetViews(ddConnection, lat, lon, FOV=FOV, width=width, height=height)[0]
views.append(nView)
for i in range(1, numImages):
time.sleep(DD_DELAY) #Don't exceed my rate limit
views.append(ddConnection.adjustView(views[i-1], FOV, panAmount)["view"])
return views
class coord(tuple):
def __new__(self,x=0,y=0,z=0):
self = (x,y,z)
def ddGetImageLocs(view, conn):
"""Given an earthmine view, get 3D points for every pixel in the image.
the camera is assumed to sit at world point (0,0,0)."""
x = 0
y = 0
locs = []
while x < view["image-size"]["width"]:
while y < view["image-size"]["height"]:
xmax = min(x+DD_VIEWPIXELSIDE, view["image-size"]["width"])
ymax = min(y+DD_VIEWPIXELSIDE, view["image-size"]["height"])
pixels = [ddViewPixel(i,j) for i in range(x,xmax)
for j in range(y,ymax)]
print("x={0}, y={1}, {2} pixels.".format(x, y, len(pixels)))
locs.extend(conn.getLocationsForViewPixels(view["id"],pixels))
y = y+DD_VIEWPIXELSIDE
x = x+DD_VIEWPIXELSIDE
y = 0
return locs
def ddImageLocstoLPT(view, locs):
# Translate location for each pixel into x,y,z where (0,0,0) is the camera center.
# First, convert ECEF-geodetic to ECEF-rectangular
originR = ddViewLocationToECEFR(view["view-location"])
#Compute rotations and translations to center origin at 0 and make a
#tangent plane aligned with east and north.
#A good reference can be found at:
#http://psas.pdx.edu/CoordinateSystem/Latitude_to_LocalTangent.pdf
t = originR
theta = math.radians(view["view-location"]["lat"])
phi = math.radians(view["view-location"]["lon"])
R = numpy.matrix([[-1*math.sin(phi), math.cos(phi), 0],
[-1*math.cos(phi)*math.sin(theta), -1*math.sin(phi)*math.sin(theta), math.cos(theta)],
[math.cos(phi)*math.cos(theta), math.cos(theta)*math.sin(phi), math.sin(theta)]])
ThreeDData = []
for loc in locs: #Process image points into 3D points.
if loc[1] is not None:
ECEFPt = ddViewLocationToECEFR(loc[1])
LPTPt = R * (ECEFPt - t)
ThreeDData.append( ((loc[0]["x"],loc[0]["y"]),
LPTPt))
else:
ThreeDData.append( ((loc[0]["x"],loc[0]["y"]), None))
return ThreeDData
def ddMakeDepthImage(view, ThreeDData, filename):
im = Image.new("I", (view["image-size"]["width"],
view["image-size"]["height"]))
for p in ThreeDData:
if p[1] is not None:
a = p[1]
pix = numpy.sqrt(numpy.dot(a.T,a))*100 #cm
im.putpixel(p[0], pix)
else:
im.putpixel(p[0], 0)
im.save(filename)
def ddWriteLocsFile(locs, filename):
f = open(filename,"w")
for l in locs:
if l[1] is not None:
f.write("{0}, {1}, {2}, {3}, {4}\n".format(l[0]["x"],l[0]["y"], l[1]["lat"], l[1]["lon"], l[1]["alt"]))
else:
f.write("{0}, {1}, NULL, NULL, NULL\n".format(l[0]["x"], l[0]["y"]))
f.close()
def ddWriteLPTFile(ThreeDData, filename):
f = open(filename,"w")
for d in ThreeDData:
if d[1] is not None:
f.write("{0}, {1}, {2}, {3}, {4}\n".format(d[0][0], d[0][1], d[1][0], d[1][1], d[1][2]).replace("[[","").replace("]]",""))
#else:
#f.write("{0}, {1}, {2}\n".format(d[0][0], d[0][1], "NULL"))
f.close()
def ddWriteDepthFile(ThreeDData, filename):
f = open(filename,"w")
for d in ThreeDData:
if d[1] is not None:
f.write("{0}, {1}, {2}\n".format(d[0][0], d[0][1], numpy.sqrt(numpy.dot(d[1].T,d[1]))*100).replace("[[","").replace("]]",""))
else:
f.write("{0}, {1}, 0\n".format(d[0][0], d[0][1]))
f.close()
def ddViewLocationToECEFR(viewLocation):
"""returns earth-centered, earth-fixed, rectangular coordinates"""
a = 6378137.0 #Equatorial Radius
b = 6356752.3 #Polar Radius
theta = math.radians(viewLocation["lat"])
R = math.sqrt( ((a*a*math.cos(theta))**2 + (b*b*math.sin(theta))**2) /
((a*math.cos(theta))**2 + (b*math.sin(theta))**2) )
r = R+viewLocation["alt"] #Origin R in meters.
phi = math.radians(viewLocation["lon"])
return numpy.matrix([r*math.cos(phi)*math.cos(theta),
r*math.sin(phi)*math.cos(theta),
r*math.sin(theta)]).T
def ddMakeDepthMap(lat, lon, conn, name, width=DEFWIDTH, height=DEFHEIGHT):
"""Given an earthmine view, build a sparse
point cloud of depth around the center of the 'bubble'."""
#Make a square of images that encircle the current view.
views = ddMakeImageCyl(conn, lat, lon, 12, width, height)
print "Done getting views!"
#Get depth map for each view
count = 0
for view in views:
locs = ddGetImageLocs(view, conn)
ThreeDData = ddImageLocstoLPT(view, locs)
ddWriteLocsFile(locs, name+repr(count)+".locs")
ddWriteLPTFile(ThreeDData, name+repr(count)+".lpt")
ddWriteDepthFile(ThreeDData, name+repr(count)+".depth")
ddMakeDepthImage(view, ThreeDData, name+repr(count)+"depth.tif")
urllib.urlretrieve(view["url"]["href"], name+repr(count)+".jpg")
count = count + 1
return
def ddGetAllPixels(pixels, viewId, keep_None=False):
"""fetches an arbitrary amount of pixels from EarthMine Direct Data"""
conn = ddObject()
viewPixels = [ddViewPixel(p[0], p[1]) for p in pixels]
locs = {}
while viewPixels:
response = None
for retry in range(3):
try:
if retry:
print 'try %d' % retry
response = conn.getLocationsForViewPixels(viewId, viewPixels[:490])
break
except Exception, e:
print e
if response is None:
raise Exception
viewPixels = viewPixels[490:] # limit for api
for pixel, loc in response:
if loc or keep_None: # has valid mapping
locs[(pixel['x'], pixel['y'])] = loc
return locs # map of (x,y) to coords
|
UTF-8
|
Python
| false | false | 2,012 |
3,478,923,540,083 |
f6a6cb6499a388b528a0ddd6125499d7d5753573
|
6d9266cb235a116545aaaba8054343c5611ccd3f
|
/lib/book_scan.py
|
a24b2584862ded04ff239e1a3267b33afbef6a4a
|
[] |
no_license
|
pombredanne/flatfile
|
https://github.com/pombredanne/flatfile
|
f0a6e23850edaddb21cfe06672bd6cdfff69b9a4
|
07023bae37006bfee55f754f43b2b32cb8d37b37
|
refs/heads/master
| 2018-06-03T01:29:59.200203 | 2014-04-06T09:39:34 | 2014-04-06T09:39:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8-*-
"""load books to repo
"""
import os
import time
import shutil
if __name__ == "__main__":
import sys
sys.path.append('..') # flatfile package
from lib.util import md5_for_file, is_hiden
import model as model
from settings import media_path, ignore_seq
class BookScan():
def __init__(self, src_path, output, ext_pool='.pdf', ignore_hidden=True):
self.db = model.connect()
if not os.path.exists(media_path):
os.makedirs(media_path)
self.ext_pool = ext_pool
self.ignore_hidden = ignore_hidden
self.src_path = src_path
self.output = output
self.flag = True
def start(self):
self.output('%s books added\n' % self.add_path(self.src_path))
def add_path(self, src_path):
"""add file/dir and copy files.
@src_path: unicode encoding is required
@ext_pool: startswith . and separated by ,
@ignore_hidden: boolen
"""
if not os.path.exists(src_path): # not exists
return None
# common check
# sensitive information
if os.path.isfile(src_path): # file
rawname, ext = os.path.splitext(os.path.basename(src_path))
if not ext or ext not in self.ext_pool: # file extension check
return 0
file_meta = {'rawname': [rawname],
'ext': ext
}
self._add_file(src_path, file_meta)
return 1
else: # dir
added = 0
# ignore log/.git etc
tar_path = set(os.listdir(src_path)) - ignore_seq
for rel_path in tar_path:
abs_path = os.path.join(src_path, rel_path)
if not self.ignore_hidden or not is_hiden(abs_path):
# ignore hidden
added += self.add_path(abs_path) or 0
return added
def StopScan(self):
self.flag = False
def _add_file(self, src_path, file_meta):
if 'md5' not in file_meta:
file_meta['md5'] = md5_for_file(src_path)
if "rawname" in file_meta:
rawname = file_meta.pop("rawname").pop()
self.output("add %s\n" % rawname)
file_meta.update({'sizeInBytes': os.path.getsize(src_path),
'create_time': time.time()
})
matcher = {'md5': file_meta['md5']}
setter = {"$set": file_meta, "$addToSet": {"rawname": rawname}}
self.db.book.update(matcher, setter, True)
filename = '%s%s' % (file_meta['md5'], file_meta['ext'])
dst_file = os.path.join(media_path, filename)
if not os.path.exists(dst_file):
shutil.copy(src_path, dst_file)
if __name__ == "__main__":
import sys
scanner = BookScan(u'/media/document/books', sys.stdout.write)
# daemonic 为 True 时,表示主线程结束时子线程也要跟着退出
#scanner.setDaemon(True)
scanner.start()
|
UTF-8
|
Python
| false | false | 2,014 |
7,876,970,061,303 |
ba9bd909d68cdc6526acaa288a3a6da291d78bbb
|
6703e02500b0c6133b88a6cdcb7d697aab548a43
|
/game_objects/weapons.py
|
478e23ebd458f2f4bf962b4a22ca57235f748504
|
[] |
no_license
|
volrath/aigames
|
https://github.com/volrath/aigames
|
f127b852efedd161f5bac257215e19f4d1c157a8
|
385d243c44d2c967ed3f6df10e44886f332745c2
|
refs/heads/master
| 2020-06-04T20:28:22.175879 | 2009-04-08T09:19:02 | 2009-04-08T09:19:02 | 196,714 | 4 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from projectiles import *
class Weapon(object):
def __init__(self):
self.hit_state = 1
self.shooting_force = 25
def _dynamic_size(self):
return self.hit_size
size = property(_dynamic_size)
def hit(self, increase_size):
if self.original_size <= self.hit_size <= self.original_size * increase_size:
self.hit_size += self.hit_state * .4
else:
if self.hit_size > self.original_size * increase_size:
self.hit_size = self.original_size * increase_size
self.hit_state = -1
return True
if self.original_size > self.hit_size:
self.hit_size = self.original_size
self.hit_state = 1
return False
return True
class SlashWeapon(Weapon):
shooting_force = 20
orientation = 5
max_shooting_force = 50
bullet_class = SlashNormalBullet
color = 1., 234/255., 0.
original_size = 3.2
hit_size = 3.2
class SlashSuperWeapon(Weapon):
shooting_force = 440
orientation = 2
max_shooting_force = 440
bullet_class = SlashSuperBullet
color = 1., 234/255., 0.
original_size = 3.8
hit_size = 3.8
class EnemyNormalWeapon(Weapon):
shooting_force = 40
orientation = 10
max_shooting_force = 40
bullet_class = EnemyBullet
color = 84/255., 212/255., 244/255.
original_size = 3.2
hit_size = 3.2
|
UTF-8
|
Python
| false | false | 2,009 |
10,342,281,289,403 |
f3d1fc748baea28790b19aa40ebb324a58c8ddf6
|
178d09d591559956f3216c612de85ef83eaf5072
|
/omxerr.py
|
895d340014a010d0141560082d5309782ad5963d
|
[] |
no_license
|
zhang-peter/py_omxilc
|
https://github.com/zhang-peter/py_omxilc
|
0ec676a1a4bd7e83cc4049ad1715376e80f3ccfa
|
f03f2cec234b9341dc05c015048945f3ead30a0f
|
refs/heads/master
| 2020-12-02T22:46:22.742347 | 2014-10-19T12:48:47 | 2014-10-19T12:48:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Module Name: omxerr.py
Python Version: 2.7.6
Standard OMX Error Class
Copyright (c) 2014 Binh Bui
Redistribution and use in source and binary forms, with or without
modification, are permitted.
"""
#===============================================================================
# Copyright (c) 2008 The Khronos Group Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject
# to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#===============================================================================
class OMXError(object):
"""
Standard OMX Error Class
"""
def __init__(self):
"""
Initialize a dictionary of standard OMX errors indexed by codes.
For the definition of these errors, see the OMX_ERRORTYPE enumeration
in OMX_Core.h (or omx_core.py).
"""
self.dict = {
0x00000000: 'OMX_ErrorNone',
0x80001000: 'OMX_ErrorInsufficientResources',
0x80001001: 'OMX_ErrorUndefined',
0x80001002: 'OMX_ErrorInvalidComponentName',
0x80001003: 'OMX_ErrorComponentNotFound',
0x80001004: 'OMX_ErrorInvalidComponent',
0x80001005: 'OMX_ErrorBadParameter',
0x80001006: 'OMX_ErrorNotImplemented',
0x80001007: 'OMX_ErrorUnderflow',
0x80001008: 'OMX_ErrorOverflow',
0x80001009: 'OMX_ErrorHardware',
0x8000100A: 'OMX_ErrorInvalidState',
0x8000100B: 'OMX_ErrorStreamCorrupt',
0x8000100C: 'OMX_ErrorPortsNotCompatible',
0x8000100D: 'OMX_ErrorResourcesLost',
0x8000100E: 'OMX_ErrorNoMore',
0x8000100F: 'OMX_ErrorVersionMismatch',
0x80001010: 'OMX_ErrorNotReady',
0x80001011: 'OMX_ErrorTimeout',
0x80001012: 'OMX_ErrorSameState',
0x80001013: 'OMX_ErrorResourcesPreempted',
0x80001014: 'OMX_ErrorPortUnresponsiveDuringAllocation',
0x80001015: 'OMX_ErrorPortUnresponsiveDuringDeallocation',
0x80001016: 'OMX_ErrorPortUnresponsiveDuringStop',
0x80001017: 'OMX_ErrorIncorrectStateTransition',
0x80001018: 'OMX_ErrorIncorrectStateOperation',
0x80001019: 'OMX_ErrorUnsupportedSetting',
0x8000101A: 'OMX_ErrorUnsupportedIndex',
0x8000101B: 'OMX_ErrorBadPortIndex',
0x8000101C: 'OMX_ErrorPortUnpopulated',
0x8000101D: 'OMX_ErrorComponentSuspended',
0x8000101E: 'OMX_ErrorDynamicResourcesUnavailable',
0x8000101F: 'OMX_ErrorMbErrorsInFrame',
0x80001020: 'OMX_ErrorFormatNotDetected',
0x80001021: 'OMX_ErrorContentPipeOpenFailed',
0x80001022: 'OMX_ErrorContentPipeCreationFailed',
0x80001023: 'OMX_ErrorSeperateTablesUsed',
0x80001024: 'OMX_ErrorTunnelingUnsupported',
0x8F000000: 'OMX_ErrorKhronosExtensions',
0x90000000: 'OMX_ErrorVendorStartUnused',
0x90000001: 'OMX_ErrorDiskFull',
0x90000002: 'OMX_ErrorMaxFileSize',
0x90000003: 'OMX_ErrorDrmUnauthorised',
0x90000004: 'OMX_ErrorDrmExpired',
0x90000005: 'OMX_ErrorDrmGeneral'
}
#---------------------------------------------------------------------------
def Print(self, error, comp_name='', method=''):
"""
Method to print the name of an error detected by a component, or
resulted from the unsuccessful execution of a method or function.
Parameters:
error <int> error code from OMX_ERRORTYPE enumeration
comp_name <str> name of component (optional)
method <str> name of method or function (optional)
Return values:
None
"""
if error:
e = error & 0xffffffff
s = ''
if comp_name:
s += comp_name + '.'
if method:
s += method + ' failed:'
l = len(s)
if l > 0:
s = s[:-1] + ':'
else:
s = '%s:' % hex(e)
if e in self.dict:
error_name = self.dict[e]
else:
error_name = 'Undefined Error Code'
if l > 0:
error_name += ' (%s)' % hex(e)
print s, error_name
|
UTF-8
|
Python
| false | false | 2,014 |
14,405,320,311,253 |
400aba642c43e2b31ec12e2601f3bd06dfbe3548
|
c6ac3bade87a886b413e80b163893e5a9cc38c3b
|
/TestIosLogParser/TestScanner.py
|
65cb1d13250747780d177564a165ccdc94f6024d
|
[] |
no_license
|
fstakem/CockroachKungfu
|
https://github.com/fstakem/CockroachKungfu
|
8eacedb97c8a108e2370076cba578ae580ae8606
|
5689c12639e2c0c8986094fe7b7cc5ed5b0eb92e
|
refs/heads/master
| 2021-01-01T19:16:05.065207 | 2013-06-19T15:16:21 | 2013-06-19T15:16:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ------------------------------------------------------
#
# TestScanner.py
# By: Fred Stakem
# Created: 3.30.13
#
# ------------------------------------------------------
# Libs
import unittest
# User defined
from Globals import *
from Utilities import *
from LogParser import LogSource
from IosLogParser import TokenType
from IosLogParser import Token
from IosLogParser import Scanner
from IosLogParser import ScannerState
class ScannerTest(unittest.TestCase):
# Setup logging
logger = Utilities.getLogger('ScannerTest')
data_file = '../data/iphone_log_1.txt'
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
self.source = LogSource('iOS Test Source', None)
self.scanner = Scanner('iOS Test Scanner', self.source)
def tearDown(self):
pass
@log_test(logger, globals.log_separator)
def testScannerTimestamp(self):
timestamp_str = '2013-03-22 22:17:52.317'
input_symbols = timestamp_str + ' MobileCalculator'
expected_token = Token(TokenType.TIMESTAMP, timestamp_str)
# Setup
self.scanner.reset(input_symbols)
ScannerTest.logger.debug('Input data: %s' % (input_symbols))
# Timestamp
self.getNextTokenAndTestEquality(expected_token, 'Timestamp')
ScannerTest.logger.debug('Test succeeded!')
@log_test(logger, globals.log_separator)
def testScannerSource(self):
source_str = 'MobileCalculator'
input_symbols = source_str + '[22262:c07]'
expected_token = Token(TokenType.SOURCE, source_str)
# Setup
self.scanner.reset(input_symbols)
self.scanner.state = ScannerState.SCANNED_DATETIME
ScannerTest.logger.debug('Input data: %s' % (input_symbols))
# Source
self.getNextTokenAndTestEquality(expected_token, 'Source')
ScannerTest.logger.debug('Test succeeded!')
@log_test(logger, globals.log_separator)
def testScannerPid(self):
pid_str = '22262'
input_symbols = 'MobileCalculator[' + pid_str + ':c07]'
expected_token = Token(TokenType.PID, pid_str)
# Setup
self.scanner.reset(input_symbols)
self.scanner.state = ScannerState.SCANNED_DATETIME
ScannerTest.logger.debug('Input data: %s' % (input_symbols))
# Source
self.getNextToken()
# PID
self.getNextTokenAndTestEquality(expected_token, 'PID')
ScannerTest.logger.debug('Test succeeded!')
@log_test(logger, globals.log_separator)
def testScannerMachPort(self):
mach_port_str = 'c07'
input_symbols = 'MobileCalculator[22262:' + mach_port_str + ']'
expected_token = Token(TokenType.MACH_PORT, mach_port_str)
# Setup
self.scanner.reset(input_symbols)
self.scanner.state = ScannerState.SCANNED_DATETIME
ScannerTest.logger.debug('Input data: %s' % (input_symbols))
# Source
self.getNextToken()
# PID
self.getNextToken()
# Mach port
self.getNextTokenAndTestEquality(expected_token, 'Mach port')
ScannerTest.logger.debug('Test succeeded!')
@log_test(logger, globals.log_separator)
def testScannerMsg(self):
msg_str = 'Digit pressed: 8'
input_symbols = msg_str + '\n'
expected_token = Token(TokenType.MSG, msg_str)
# Setup
self.scanner.reset(input_symbols)
self.scanner.state = ScannerState.SCANNED_MACH_PORT
ScannerTest.logger.debug('Input data: %s' % (input_symbols[:-1]))
# Msg
self.getNextTokenAndTestEquality(expected_token, 'Message')
ScannerTest.logger.debug('Test succeeded!')
@log_test(logger, globals.log_separator)
def testScannerLine(self):
timestamp_str = '2013-03-22 22:17:52.317'
source_str = 'MobileCalculator'
pid_str = '22262'
mach_port_str = 'c07'
msg_str = 'Digit pressed: 8'
input_symbols = '%s %s[%s:%s] %s\n' % (timestamp_str, source_str, pid_str, mach_port_str, msg_str)
# Expected tokens
timestamp_token = Token(TokenType.TIMESTAMP, timestamp_str)
source_token = Token(TokenType.SOURCE, source_str)
pid_token = Token(TokenType.PID, pid_str)
mach_port_token = Token(TokenType.MACH_PORT, mach_port_str)
msg_token = Token(TokenType.MSG, msg_str)
# Setup
self.scanner.reset(input_symbols)
ScannerTest.logger.debug('Input data: %s' % (input_symbols[:-1]))
# Timestamp
self.getNextTokenAndTestEquality(timestamp_token, 'Timestamp')
# Source
self.getNextTokenAndTestEquality(source_token, 'Source')
# PID
self.getNextTokenAndTestEquality(pid_token, 'PID')
# Mach port
self.getNextTokenAndTestEquality(mach_port_token, 'Mach port')
# Msg
self.getNextTokenAndTestEquality(msg_token, 'Message')
ScannerTest.logger.debug('Test succeeded!')
@log_test(logger, globals.log_separator)
def testScannerRealData(self):
ScannerTest.logger.debug('Testing data in the file: ' + ScannerTest.data_file)
lines = readLinesFromFile(ScannerTest.data_file)
for line in lines:
line += '\n'
errors = []
for i, line in enumerate(lines):
token = 1
self.scanner.reset(line)
ScannerTest.logger.debug('Scanning line %d' % (i+1))
ScannerTest.logger.debug('Input data: %s' % (line[:-1]))
while token != None:
token, error = self.getNextToken()
if error != None:
errors.append(error)
if len(errors) != 0:
ScannerTest.logger.debug('Found the following errors scanning the file:')
for i, error in enumerate(errors):
start_position = error[0]
current_position = error[1]
error_except = error[2]
output = 'Error: %s Start position: %d End position: %d' % (str(error_except), start_position, current_position)
ScannerTest.logger.debug(output)
assert False, 'Found %d errors scanning the file.' % (str(len(errors) + 1))
ScannerTest.logger.debug('Test succeeded!')
def getNextTokenAndTestEquality(self, expected_token, type_str):
ScannerTest.logger.debug('Expected token: %s' % (str(expected_token)))
token, error = self.getNextToken()
output = '%s string was incorrectly scanned.' % (type_str)
assert token == expected_token, output
return token
def getNextToken(self):
token, current_symbol, state, error = self.scanner.scan()
assert error == None, error[2]
ScannerTest.logger.debug('Scanned token: %s' % (str(token)))
return (token, error)
|
UTF-8
|
Python
| false | false | 2,013 |
19,396,072,341,861 |
878b5a816fedf821ee3facdfb2f726d6e604d36d
|
21b5c9dfbf150cc654007c4a2a57d72334d5c82c
|
/utils/bingscraper.py
|
93c61f5229254c66d656af26ddae33d2aeb16af6
|
[] |
no_license
|
irtefa/skoop
|
https://github.com/irtefa/skoop
|
6e7cb2b4611a641ecea9ba19be0d32caf262cf53
|
138eba54c2b8f73bca76738d146e0c7ac698fbb4
|
refs/heads/master
| 2016-09-06T07:09:08.843375 | 2013-05-07T18:40:30 | 2013-05-07T18:40:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#from bs4 import BeautifulSoup
from BeautifulSoup import BeautifulSoup
import urllib2
def get_urls(q, pages):
## Fetch the results
query = q
pageStart = pages # The number of the result to start at
url = "http://www.bing.com/search?q={0}&first={1}".format(query, pageStart)
print url
page = urllib2.urlopen(url)
## Parse results
soup = BeautifulSoup(page.read())
## Find usable results, in sa_wr class list items
results = soup.findAll('li', attrs={'class': 'sa_wr'})
resultList = []
## Return dictionaries of results
for r in results:
header = r.find('h3').find('a')
link = header['href']
title = header.text
content = r.find('p').text
resultList.append({'url': link, 'title': title, 'content': content})
return resultList
|
UTF-8
|
Python
| false | false | 2,013 |
5,093,831,214,066 |
5c0ae0530a791aa8a68775c1699fd2034b3b7d91
|
708b6bc22f9b40d4b5c989967951ea0af006fcb9
|
/gui.py
|
914d72a235af827f64c9b67c6f5ccb8210ea45a1
|
[] |
no_license
|
AndreMiras/repartiteur-de-mises
|
https://github.com/AndreMiras/repartiteur-de-mises
|
cb473e29c7c441c6bd33d5bb35924339baaaa060
|
33e7ea6d6311e477bf97d364b2377c15d1f0c71d
|
refs/heads/master
| 2020-04-18T21:50:53.329373 | 2011-12-28T01:07:33 | 2011-12-28T01:07:33 | 33,007,640 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui.ui'
#
# Created: Tue Dec 27 20:37:33 2011
# by: PyQt4 UI code generator 4.8.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(472, 298)
self.gridLayout_2 = QtGui.QGridLayout(Form)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(Form)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.targetedProfitSpinBox = QtGui.QSpinBox(Form)
self.targetedProfitSpinBox.setMaximum(999)
self.targetedProfitSpinBox.setProperty(_fromUtf8("value"), 10)
self.targetedProfitSpinBox.setObjectName(_fromUtf8("targetedProfitSpinBox"))
self.gridLayout.addWidget(self.targetedProfitSpinBox, 0, 1, 1, 1)
self.label_2 = QtGui.QLabel(Form)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.totalBetLabel = QtGui.QLabel(Form)
self.totalBetLabel.setObjectName(_fromUtf8("totalBetLabel"))
self.gridLayout.addWidget(self.totalBetLabel, 1, 1, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 0, 0, 1, 1)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 0, 1, 1, 1)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem1, 1, 0, 1, 1)
self.tableWidget = QtGui.QTableWidget(Form)
self.tableWidget.setObjectName(_fromUtf8("tableWidget"))
self.tableWidget.setColumnCount(6)
self.tableWidget.setRowCount(4)
item = QtGui.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(3, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(4, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(5, item)
self.tableWidget.horizontalHeader().setVisible(False)
self.gridLayout_2.addWidget(self.tableWidget, 3, 0, 1, 5)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 5, 0, 1, 3)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem3, 5, 4, 1, 2)
spacerItem4 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem4, 4, 0, 1, 1)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.addColumnPushButton = QtGui.QPushButton(Form)
self.addColumnPushButton.setObjectName(_fromUtf8("addColumnPushButton"))
self.verticalLayout.addWidget(self.addColumnPushButton)
self.removeColumnPushButton = QtGui.QPushButton(Form)
self.removeColumnPushButton.setObjectName(_fromUtf8("removeColumnPushButton"))
self.verticalLayout.addWidget(self.removeColumnPushButton)
spacerItem5 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem5)
self.gridLayout_2.addLayout(self.verticalLayout, 3, 5, 1, 1)
self.okCancelButtonBox = QtGui.QDialogButtonBox(Form)
self.okCancelButtonBox.setStandardButtons(QtGui.QDialogButtonBox.Ok|QtGui.QDialogButtonBox.Reset)
self.okCancelButtonBox.setObjectName(_fromUtf8("okCancelButtonBox"))
self.gridLayout_2.addWidget(self.okCancelButtonBox, 5, 3, 1, 1)
self.integerBetCheckBox = QtGui.QCheckBox(Form)
self.integerBetCheckBox.setObjectName(_fromUtf8("integerBetCheckBox"))
self.gridLayout_2.addWidget(self.integerBetCheckBox, 0, 3, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Repartiteur de mises", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "Gain vise", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Form", "Mise totale effective:", None, QtGui.QApplication.UnicodeUTF8))
self.totalBetLabel.setText(QtGui.QApplication.translate("Form", "0", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.verticalHeaderItem(0).setText(QtGui.QApplication.translate("Form", "N° PMU", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.verticalHeaderItem(1).setText(QtGui.QApplication.translate("Form", "Cote", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.verticalHeaderItem(2).setText(QtGui.QApplication.translate("Form", "Mise", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.verticalHeaderItem(3).setText(QtGui.QApplication.translate("Form", "Gain effectifs", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(0).setText(QtGui.QApplication.translate("Form", "Col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(1).setText(QtGui.QApplication.translate("Form", "Col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(2).setText(QtGui.QApplication.translate("Form", "Col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(3).setText(QtGui.QApplication.translate("Form", "Col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(4).setText(QtGui.QApplication.translate("Form", "Col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(5).setText(QtGui.QApplication.translate("Form", "Col", None, QtGui.QApplication.UnicodeUTF8))
self.addColumnPushButton.setText(QtGui.QApplication.translate("Form", "+", None, QtGui.QApplication.UnicodeUTF8))
self.removeColumnPushButton.setText(QtGui.QApplication.translate("Form", "-", None, QtGui.QApplication.UnicodeUTF8))
self.integerBetCheckBox.setText(QtGui.QApplication.translate("Form", "Mises entieres", None, QtGui.QApplication.UnicodeUTF8))
|
UTF-8
|
Python
| false | false | 2,011 |
15,401,752,763,401 |
9bc77dd78d504de82879d9fc49e8cd86e13291c0
|
661919e4d718798b9d758229062e077d645e8ba0
|
/dashboard/services/bqclient.py
|
668077d02e092d941808557f861eed7f99ffdbf1
|
[] |
no_license
|
cparks0225/old_dashboard
|
https://github.com/cparks0225/old_dashboard
|
9468b219ca5aff7f98a80cfe299c65dc10c9ee32
|
54f96acd552d357328d33c51c18ecd966a60e7ed
|
refs/heads/master
| 2016-08-07T03:43:38.888207 | 2014-08-30T18:39:00 | 2014-08-30T18:39:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os, sys
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials, AccessTokenRefreshError
import httplib2
import socks
from query import QueryError
from datetime import datetime, date, timedelta
import time
import json
# initialize logging
from dashboard.logger.ttdiag import Logger
log = Logger('bq')
try:
from flask import session
log = Logger('bq', session)
except ImportError:
pass
TTDIAG_PROJECT = '1021946877460'
# WARNING! ONLY TO BE GENERATED ONCE
# Use this one time token generation when the server starts.
# Use the same token for multiple bqclient instances
def generateToken():
"""
generates OAuth2.0 token/credentials for login to google big query
"""
abspath = os.path.dirname(os.path.abspath(__file__))
sys.path.append(abspath)
os.chdir(abspath)
abspath = os.path.dirname(__file__)
credentials = SignedJwtAssertionCredentials("[email protected]",
open("f93b9d0fddadbeb5903ccf176bf196b0bd7e7b7d-privatekey.p12", 'rb').read(), "https://www.googleapis.com/auth/bigquery")
return credentials
class BigQueryClient(object):
def authenticate(self, credentials):
http = httplib2.Http(proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, '119.0.200.81', 31280))
http = credentials.authorize(http)
return http
def __init__(self, credentials, project):
http = self.authenticate(credentials)
self.service = build('bigquery', 'v2', http=http)
self._project = project
def delete_table(self, dataset, table_root, start_date, end_date=None):
"""
function to delete bq tables
arguments
dataset - project dataset (required)
table_root - table root name excluding the dates (required)
start_date - delete table start date [mm-dd-yyyy] (required)
end_date - delete table end date [mm-dd-yyyy] (required)
* the time range is [start_date - end_date) end_date excluded
"""
if end_date is None:
end_date = start_date
begin = time.mktime(time.strptime(start_date, '%m-%d-%Y'))
end = time.mktime(time.strptime(end_date , '%m-%d-%Y'))
begin = date.fromtimestamp(begin)
end = date.fromtimestamp(end)
# We have the start and end dates. Iterate to get the daily tables
day = timedelta(days=1)
tables = []
while begin != end :
table = "".join([
table_root,
"%04d" % begin.year,
"%02d" % begin.month,
"%02d" % begin.day
])
tables.append(table)
begin = begin + day
errors = []
for table in tables:
try:
delete_job = self.service.tables().delete(projectId=self._project, datasetId=dataset, tableId=table)
delete_job.execute()
except HttpError:
errors.append(table)
if errors:
print "tables not deleted (probably missing)"
for missing_table in errors:
print missing_table
def query(self, query):
query_str = " ".join(query.split('\n'))
params = {'query': query, 'timeoutMs': 1000*30}
try:
query = self.service.jobs().query(projectId=self._project, body=params)
result = query.execute()
except Exception as err:
log.error(" | ".join([str(e), query_str]))
else:
log.info("Query Executed: {0}".format(query_str))
# TODO - error handling for timeouts (using getQuery interface)
return result
def data_table(self, assembled_query):
"""
responsible for returning a google datatable formatted data for the each user query
takes a dictionary input which contains the a list of query and the handler for the
result of the query
"""
raw_data = []
for query_str in assembled_query['query']:
try:
result = self.query(query_str)
raw_data.append(result)
handler = assembled_query['handler']
except HttpError as err:
return {'errorcode': -1, 'error': err._get_reason(), 'query': assembled_query['query']} # BQError(err, assembled_query['query']))}
except Exception as err:
return {'errorcode': -1, 'error': err, 'query': assembled_query['query']} # BQError(err, assembled_query['query']))}
try:
data = handler.data_table(raw_data, assembled_query)
return data
except KeyError as kerr:
print kerr
return {'errorcode': -3, 'error': 'No Data was returned for your Query'}
except QueryError as qerr:
print qerr
return {'errorcode': -3, 'error': qerr.html()}
except Exception as err:
print err
return {'errorcode':-1, 'error': "Internal Error!"}
class BQError(Exception):
"""
Error class for BQ errors. Useful for "stringifying" errors.
"""
def __init__(self, msg, query):
self.msg = msg
self.query = query
def __str__(self):
return "BQ Error: \n Message: {0} \n Associated Query: {1}".format(self.msg, self.query)
if __name__ == '__main__':
print os.path.dirname(os.path.abspath(__file__))
bqclient = BigQueryClient(generateToken(), TTDIAG_PROJECT)
bqclient.delete_table('latency_data', 'fa_round_trip', '09-03-2013', '09-10-2013')
|
UTF-8
|
Python
| false | false | 2,014 |
4,569,845,229,360 |
9a4617e8ac2bde5a0ee697a0ad9e3558878eb0b6
|
f3762dca5e4956144f430f423340bdcd6604dfea
|
/scripts/Define_1_vector_layer_properties.py
|
831cde2c01582d97383fa3866e7d4971539ef640
|
[] |
no_license
|
sindile/QGIS-Processing
|
https://github.com/sindile/QGIS-Processing
|
7ba7a6e5eda79d86589770b423ae4d00528d0bf9
|
9cd18fa13ab7b74c24de0da3653aa252ec055de7
|
refs/heads/master
| 2020-12-25T03:49:19.966042 | 2014-06-02T20:33:24 | 2014-06-02T20:33:24 | 20,726,685 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##[3liz - Vector]=group
##Define 1 vector layer properties=name
##Vector_layer=vector
##QML_file=file
##Coordinate_Reference_System=crs None
##Create_spatial_index=boolean False
##Calculate_extent=boolean False
##Layer_title=string
##Layer_abstract=longstring
##Save_layer_style_as_default=boolean False
from qgis.core import *
from qgis.utils import iface
import os
# rename inputs
qml = QML_file
crs = Coordinate_Reference_System
csi = Create_spatial_index
ce = Calculate_extent
lt = Layer_title
la = Layer_abstract
ss = Save_layer_style_as_default
# Get layer object
layer = processing.getObject(Vector_layer)
provider = layer.dataProvider()
# Set style from QML
if os.path.exists(qml):
layer.loadNamedStyle(qml)
iface.legendInterface().refreshLayerSymbology(layer)
# Set CRS
if Coordinate_Reference_System:
qcrs = QgsCoordinateReferenceSystem()
qcrs.createFromOgcWmsCrs(crs)
layer.setCrs(qcrs)
# Create spatial index
if csi and provider.capabilities() and QgsVectorDataProvider.CreateSpatialIndex:
if not provider.createSpatialIndex():
progress.setText(u'Cannot create spatial index for layer : %s' % layer.name())
# Calculate extent
if ce:
layer.updateExtents()
# Set layer metadata
if lt:
layer.setTitle(lt)
if la:
layer.setAbstract(la)
# Save style as default
if ss:
layer.saveDefaultStyle()
|
UTF-8
|
Python
| false | false | 2,014 |
1,236,950,595,375 |
50f0b12ed632eb168f7f82da8e53854d2771b009
|
585ae7cc811433953f518891d745242891072b3a
|
/src/nox/apps/coreui/authui.py
|
d9c284684bee62f119bd34e10fb8903ee630c387
|
[
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-other-permissive",
"GPL-3.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
florence-amaddine/snac-nox
|
https://github.com/florence-amaddine/snac-nox
|
2a33939ba128f47f8ad35bf0b793e3457f51c5c0
|
e45bf1f0f35821c447f288701be7851d941b4c2e
|
refs/heads/master
| 2020-05-30T11:54:00.108330 | 2011-03-25T15:25:18 | 2011-03-25T15:25:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2008 (C) Nicira, Inc.
#
# This file is part of NOX.
#
# NOX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NOX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NOX. If not, see <http://www.gnu.org/licenses/>.
from nox.apps.pyrt.pycomponent import *
from nox.lib.core import *
from nox.lib.directory import AuthResult
from nox.lib import config
from twisted.internet import defer
from twisted.web.resource import Resource
from twisted.web import server
from twisted.web.util import redirectTo
from twisted.python import log
from twisted.python.failure import Failure
from mako.template import Template
from mako.lookup import TemplateLookup
from nox.apps.directory.directorymanager import directorymanager
import os
import types
import urllib
import coreui
# Twisted hardcodes the string TWISTED_SESSION as the cookie name.
# This hack let's us append a Nicira string to that cookie name.
# In the future, we may want to override Request.getSession()
# as implemented in twisted/web/server.py to completely remove
# TWISTED_SESSION from the cookie name.
# NOTE: spaces in the cookie name are incompatible with Opera
def get_current_session(request):
old_sitepath = request.sitepath
request.sitepath = [ "Nicira_Management_Interface" ]
session = request.getSession()
request.sitepath = old_sitepath
return session
all_immutable_roles = ["Superuser",
"Policy Administrator",
"Network Operator",
"Security Operator",
"Viewer",
"No Access" ]
class UnknownCapabilityError(Exception):
pass
class InvalidRoleError(Exception):
pass
class authui(Component):
def __init__(self, ctxt):
Component.__init__(self, ctxt)
self.coreui = None
self.directorymanager = self.resolve(directorymanager)
def setup_immutable_roles(self):
# Create and register the immutable roles
for name in all_immutable_roles:
if name == "Superuser":
r = SuperuserRole(name)
elif name == "No Access":
r = NoAccessRole(name)
else:
r = Role(name, immutable=True)
Roles.register(r)
self.coreui.authui_initialized = True
def bootstrap_complete_callback(self, *args):
# if these resources are installed within install(),
# they are not able to resolve authui
self.coreui.install_resource("/login", LoginRes(self))
self.coreui.install_resource("/logout", LogoutRes(self))
self.coreui.install_resource("/denied", DeniedRes(self))
self.coreui.install_resource("/server_error", ServerErrRes(self))
self.setup_immutable_roles()
return CONTINUE
def install(self):
self.coreui = self.resolve(str(coreui.coreui))
self.register_for_bootstrap_complete(self.bootstrap_complete_callback)
def requestIsAuthenticated(self, request):
session = get_current_session(request)
if hasattr(session, "roles"):
return True
if not self.directorymanager.supports_authentication():
session.requestIsAllowed = requestIsAllowed
user = User("assumed-admin", set(("Superuser",)))
session.user = user
roles = [ Roles.get(r) for r in user.role_names ]
session.roles = roles
session.language = "en"
return True
return False
def getInterface(self):
return str(authui)
class Capabilities:
"""Stores and provides info on entire set of defined capabilities"""
def __init__(self):
self._dict = {}
def register(self, name, description, immutable_roles=None):
"""Register a capability.
Capabilities used to control visibity and actions in the UI should
be registered using this method in the component's install() method.
Arguments are:
name: The name of the capability. This is the string that will
be used to refer to the capability subsequently in tests,
etc.
description: A user-readable description of the capability.
This will be displayed in the role definition UI to
assist the user in determining the appropriate capabilities
to give to the role.
immutable_roles: A list of the names of immutable roles that
should have this capability. Immutable roles are a default
set of roles provided by Nicira which the user can not
edit. The capabilities for each of those roles are built
from these lists. This is needed because the capability
set may change over time and the editable roles will always
assume a role does not have a capability if the user did
not specifically set it. Note it is not neccesary to
include the 'Superuser' role in this list as the
implementation gurantees that role will have all
capabilities."""
if immutable_roles == None:
immutable_roles = []
else:
for r in immutable_roles:
if r not in all_immutable_roles:
raise InvalidRoleError, "Only roles in authui.all_immutable_roles are appropiate."
self._dict[name] = (description, immutable_roles)
def has_registered(self, name):
return self._dict.has_key(name)
def list(self):
return self._dict.keys()
def describe(self, name):
try:
return self._dict[name][0]
except KeyError, e:
raise UnknownCapabilityError, str(name)
def immutable_roles(self, name):
try:
return self._dict[name][1]
except KeyError, e:
raise UnknownCapabilityError, str(name)
# Following ensures there is only ever one capabilities manager...
Capabilities = Capabilities()
class Role:
"""Named set of capabilities"""
def __init__(self, name, immutable=False):
self.name = name
self._capabilities = set()
if immutable:
self._immutable = False # is overridden below...
if not self.name in all_immutable_roles:
raise InvalidRoleError, "Only roles in authui.all_immutable_roles can be set immutable."
for c in Capabilities.list():
if name in Capabilities.immutable_roles(c):
self.add_capability(c)
self._immutable = immutable
def capabilities(self):
return self._capabilities
def has_capability(self, name):
return name in self._capabilities
def has_all_capabilities(self, capability_set):
return len(capability_set.difference(self._capabilities)) == 0
def has_anyof_capabilities(self, capability_set):
return len(capability_set.intersection(self._capabilities)) != 0
def is_immutable(self):
return self._immutable
def add_capability(self, name):
if not self._immutable:
if Capabilities.has_registered(name):
self._capabilities.add(name)
else:
raise UnknownCapabilityError, "Name=%s" % name
def remove_capability(self, name):
if not self._immutable:
try:
self._capabilities.remove(name)
except KeyError, e:
pass
class SuperuserRole(Role):
"""Role guaranteed to always have all capabilities"""
def __init__(self, name):
Role.__init__(self, name, True)
def capabilities(self):
return Capabilities.list()
def has_capability(self, name):
return True
def has_all_capabilities(self, capability_set):
return True
def has_anyof_capabilities(self, capability_set):
return True
class NoAccessRole(Role):
"""Role guaranteed to never have any capabilities"""
def __init__(self, name):
Role.__init__(self, name, True)
def capabilities(self):
return []
def has_capability(self, name):
return False
def has_all_capabilities(self, capability_set):
return False
def has_anyof_capabilities(self, capability_set):
return False
class Roles:
"""Manages defined roles."""
def __init__(self):
self._roles = {}
def register(self, role):
self._roles[role.name] = role
def has_registered(self, role_name):
return self._roles.has_key(role_name)
def get(self, role_name):
try:
return self._roles[role_name]
except KeyError, e:
raise InvalidRoleError(role_name)
def names(self):
return [ r.name for r in self._roles.values() ]
def instances(self):
return self._roles.values()
# Following ensures there is only ever one roles manager...
Roles = Roles()
class User:
"""User information class"""
def __init__(self, username=None, role_names=set(), language=None):
self.username = username
self.language = language
self.role_names = role_names
class InvaidAuthSystemError(Exception):
pass
def redirect(request, uri):
# TBD: make handle child links automatically, normalize URI, etc.
return redirectTo(uri, request)
def requestIsAllowed(request, cap):
session = get_current_session(request)
try:
roles = session.roles
except AttributeError:
e = "Forbidding access due to unknown role in requestIsAllowed()"
log.err(e, system="authui")
return False
if cap is None:
return True
for role in roles:
if role.has_all_capabilities(cap):
return True
return False
class MissingTemplateError(Exception):
pass
class UIResource(Resource):
"""UI resource class handling template search and authentication.
This is a subclass of twisted.web.resource.Resource that ensures that
the current session is associated with a user with the required
capabilities set to interact with the resource. It is intended to be
subclassed in the same way as its twisted parent class. Similar to
the way the Twisted Resource class uses the isLeaf class variable,
subclasses of this class can use two class variables to control
authentication:
noUser: (default=False) if True, no authentication will be
done for this resource.
required_capabilities: (default=set()) a set object of capabilities
the user must hold to interact with this resource.
Capabilites in the list are supplied as strings naming the
capability and must also be registered with the capabilities
manager (authui.Capabilities). Alternatively, can be a
dictionary keyed by request method containing a set of
capabilities for each request method implemented by the
resource. If a method is implemented but has not entry
in the dictionary, it is assummed that no capabilities
are required.
Note that the capability checking is primarily a convenience to
handle the most common cases for simple resources. For more
complex situations such as a resources that parses request.postpath
and thus supports many different URIs, it may be appropriate for the
method specific render methods to check capabilities directly.
This class also sets up component specific template search paths,
and provides a conveience function to render templates with global
site configuration information passed into the template using the
contents of the coreui component siteConfig dictionary."""
noUser = False
required_capabilities = set()
template_search_path = [ coreui.coreui ]
def _tmpl_paths(self, component, path_type):
# TBD: come up with the right solution for supporting finding
# TBD: templates after install. Install dir templates should
# TBD: be lower priority than build dir templates so we use
# TBD: buildir templates in preference.
interface = component.getInterface().split(".")
pkgpath = interface[0]
for p in interface[1:-2]:
pkgpath = os.path.join(pkgpath, p)
return [ os.path.join(pkgpath, path_type, interface[-1]) ]
def __init__(self, component):
Resource.__init__(self)
self.component = component
self.coreui = component.resolve(str(coreui.coreui))
self.authui = component.resolve(str(authui))
# TBD: select base_module_dir based on whether started in build
# TBD: directory or not.
started_in_build_dir = True
if started_in_build_dir:
i = 0
else:
i = 1
base_module_dir = self._tmpl_paths(self.component, "mako_modules")[i]
base_template_dirs = []
base_template_dirs.extend(self._tmpl_paths(self.component, "templates"))
for o in self.template_search_path:
if str(o) == self.component.getInterface():
continue
c = self.component.resolve(str(o))
base_template_dirs.extend(self._tmpl_paths(c, "templates"))
self.tlookups = {}
for l in coreui.supported_languages:
template_dirs = []
module_dir = os.path.join(base_module_dir, l)
for d in base_template_dirs:
template_dirs.append(os.path.join(d, l))
if l != "en":
template_dirs.append(os.path.join(d, "en"))
template_dirs.append(d)
self.tlookups[l] = TemplateLookup(directories=template_dirs,
module_directory=module_dir,
output_encoding='utf-8',
encoding_errors='replace')
def _lang_from_request(self, request):
languages = []
if request.getHeader("Accept-Language") is None:
return "en"
for l in request.getHeader("Accept-Language").split(","):
t = l.split(";q=")
name = t[0].strip()
if len(t) > 1:
qvalue = float(t[1])
else:
qvalue = 1.0
languages.append((name, qvalue))
languages.sort(key=lambda x: x[1], reverse=True)
lang = "en"
base_lang = lang
if len(languages) > 0:
t = languages[0][0].split("-")
if len(t) > 1:
lang = t[0].lower() + "_" + t[1].upper()
else:
lang = t[0].lower()
base_lang = t[0].lower()
if base_lang not in coreui.supported_languages:
lang = "en" # This had better always be supported.
return lang
def _lookup_template(self, lang, name):
if self.tlookups.has_key(lang):
return self.tlookups[lang].get_template(name)
else:
t = lang.split("_")
if len(t) > 1:
return self._lookup_template(t[0], name)
raise MissingTemplateError, "lang=%s, name=%s" % (lang, name)
def pp_empty(self, pcstr, argd):
"""Verify a null string at a location in the postpath.
No argd is required. Typically this would be used at
the end of a rule to verify a path ends with a trailing
slash."""
if pcstr == "":
return True
return None
def pp_opt_empty(self, pcstr, argd):
"""Verify an optional null string at a location in the postpath.
This is exactly the same as pp_empty() except that it still
succeeds if pcstr is None, indicating the path component
does not exist. It returns False in this case. This can be
used to allow a single rule to cover both trailing slash and
non-trailing slash cases."""
if pcstr == None:
return False
if pcstr == "":
return True
return None
def pp_static_str(self, pcstr, argd):
"""Verify a static string at a location in the postpath.
The argd dictionary must contain the following argument:
str: The string to match against
The argd dictionary may contain the following argument:
foldcase: If true, both str in argd and the pcstr are
converted to lowercase before comparison.
The value of the string as it existed in the path component
is returned on success."""
if pcstr == None:
return None
try:
foldcase = argd["foldcase"]
except KeyError:
foldcase = False
if foldcase:
if pcstr.lower() == argd["str"].lower():
return pcstr
else:
if pcstr == argd["str"]:
return pcstr
return None
def parse_postpath(self, request, rules):
"""Parse the postpath according to a set of rules.
NOTE: This is only intended for use by subclasses!
The rules are lists of lists of the form:
[ <rulename>, [[<path component name>, <check method>, <argd>]...]]
The <check method> should should have the signature:
method(self, path_component_str, argd)
The method should return the validated value for this component of
the path or None, indicating the test failed. The argd
parameter is an additional dictionary of arguments for the method.
The method can use this as required. It's docstring should indicate
the valid values a caller can set in it. If it doesn't require any
additional arguments, the rule can specify it as None.
This method will test each rule in the order they are specified
and return a 2-tuple consisting of the name of the first rule
that matched (or None if no rule matched) and a dictionary
containing the validated values from each path component keyed
by the path component name (or a failure message to be passed to
badRequest() if no rule matched).
If there are more path component checks than actual path components,
the remaining checks will be called with a path_component_str
of None. The check method should be prepared for this. if
all path component checks pass but there are still path component
value remaining the test will be considered to have failed."""
failure_msg = ["The received request was:\n\n ",
request.method, " ", request.path,
"\n\nIt was matched against the following possibilities",
" and each failed for the reason given.\n"]
for r in rules:
rulename, checks = r
i = 0
resultd = {}
failed_component = None
failure_msg.append("\n - /")
failure_msg.append("/".join(request.prepath))
for c in checks:
pcname, check_method, argd = c
if check_method == self.pp_empty:
failure_msg.append("/")
elif check_method == self.pp_opt_empty:
pass
else:
failure_msg.append("/")
failure_msg.append(pcname)
if failed_component != None:
continue
try:
pcstr = request.postpath[i]
except IndexError:
pcstr = None
pcvalue = check_method(pcstr, argd)
if pcvalue == None:
if check_method == self.pp_opt_empty:
failed_component = "end of expected request."
elif check_method == self.pp_empty:
failed_component = "at required trailing slash of request."
else:
failed_component = pcname
continue
else:
resultd[pcname] = pcvalue
i += 1
if failed_component != None:
failure_msg.append("\n Failed at ")
failure_msg.append(failed_component)
failure_msg.append("\n")
elif i >= len(request.postpath):
return (rulename, resultd)
else:
failure_msg.append("\n Failed due to contents beyond the end of the expected request.")
return (None, "".join(failure_msg))
def render_tmpl(self, request, name, *arg, **data):
session = get_current_session(request)
lang = getattr(session, "language", None)
if lang == None:
lang = self._lang_from_request(request)
# This may be overridden after login based on user preferences
session.language = lang
tmpl = self._lookup_template(lang, name)
return tmpl.render(siteConfig=self.coreui.siteConfig,
request=request, session=session, *arg, **data)
def getChild(self, name, request):
if name == '':
return self
return Resource.getChild(self, name, request)
def _authredirect(self, request):
if self.noUser: # If resource doesn't require user at all...
get_current_session(request).requestIsAllowed = requestIsAllowed
return None
if not self.authui.requestIsAuthenticated(request):
get_current_session(request).requestIsAllowed = requestIsAllowed
return "/login?last_page=" + urllib.quote(request.uri)
if type(self.required_capabilities) == types.DictionaryType:
try:
cs = self.required_capabilities[request.method]
except KeyError, e:
cs = None
else:
cs = self.required_capabilities
if cs != None and not isinstance(cs, set):
e = "Invalid required_capabilities on object: %s" % repr(self)
log.err(e, system="authui")
return (False, "/server_error")
if not requestIsAllowed(request, cs):
return "/denied"
return None
def render(self, request):
session = get_current_session(request)
redirect_uri = self._authredirect(request)
if redirect_uri != None:
session.return_uri = request.uri
return redirect(request, redirect_uri)
else:
return Resource.render(self, request)
class UISection(UIResource):
"""Class representing a top-level section of the site.
A top-level section of the site is one that is available on the toolbar
under the main site banner. Typically this class will be subclassed
for each top-level section and an instance of that subclass will be
registered to show up in the UI with the coreui component
install_section() method."""
def __init__(self, component, name, icon="fixmeButtonIcon"):
"""Initialize a top-level section.
Arguments are:
name: the name of the section. This should be a single word
and will be used as the text on the toolbar (if shown)
and the base URI path for the section.
icon: uri path to an icon to be shown in the toolbar. This is
not currently used but is very likely to be used in the
near future. If no icon is specified a default one will
be used.
A subclass is free to register additional resources as children
of itself before and after registering the section with the coreui
code using the coreui component install_section() method."""
UIResource.__init__(self, component)
self.section_name = name
self.section_icon = icon
self.redirect_subpath = None
def set_default_subpath(self, subpath):
"""Specify the default URI subpath under this section
By default, if the user performs a GET request on the section
top-level URI, this is the page where the user will be redirected."""
self.redirect_subpath = subpath
def redirect_URI(self, request):
# Subclasses can override this if they want to specify an
# alternative method of determining the URI to reditect to.
if self.redirect_subpath != None:
return request.childLink(self.redirect_subpath)
else:
return "/server_error"
def render_GET(self, request):
# Subclasses can override this if they have an alternative
# method of providing data for a GET request on the top-level
# section URI.
return redirect(request, self.redirect_URI(request))
class ServerErrRes(UIResource):
isLeaf = True
noUser = True
def __init__(self, component):
UIResource.__init__(self, component)
def render_GET(self, request):
return self.render_tmpl(request, "server_error.mako")
class DeniedRes(UIResource):
isLeaf = True
noUser = True
def __init__(self, component):
UIResource.__init__(self, component)
def render_GET(self, request):
return self.render_tmpl(request, "denied.mako", last_page="")
class LoginRes(UIResource):
isLeaf = True
noUser = True
def __init__(self, component):
UIResource.__init__(self, component)
self.coreui = self.component.resolve(str(coreui.coreui))
self.directorymanager = self.component.resolve(str(directorymanager))
if self.directorymanager is None:
raise Exception("Unable to resolve required component '%s'"
%str(directorymanager))
def render_GET(self, request):
if not self.directorymanager.supports_authentication():
uri = self._return_uri(request,get_current_session(request))
request.write(redirect(request,uri))
return
get_current_session(request).expire()
return self.render_tmpl(request, "login.mako", login_failed=False, last_page="")
def _return_uri(self, request, session):
try:
return_uri = session.return_uri
del(session.return_uri)
except AttributeError, e:
last_page = urllib.unquote(request.args.get("last_page", [""])[0])
if last_page not in ("", "/login", "/logout", "/denied", "/server_error"):
return_uri = last_page
else:
return_uri = self.coreui.default_uri
return return_uri
def render_POST(self, request):
if not self.directorymanager.supports_authentication():
uri = self._return_uri(request,get_current_session(request))
request.write(redirect(request,uri))
return
username = request.args["username"][0]
password = request.args["password"][0]
d = self.directorymanager.simple_auth(username, password)
d.addCallback(self._auth_callback, request)
d.addErrback(self._auth_errback, request)
return server.NOT_DONE_YET
def _auth_errback(self, failure, request):
log.err("Failure during authentication: %s" %failure)
get_current_session(request).expire()
request.write(self.render_tmpl(request, "login.mako", login_failed=True, last_page=request.args.get("last_page", [""])[0]))
request.finish()
def _auth_callback(self, res, request):
if res.status == AuthResult.SUCCESS:
session = get_current_session(request)
session.user = User(res.username, set(res.nox_roles))
try:
session.roles = [Roles.get(r) for r in session.user.role_names]
except InvalidRoleError, e:
log.err("Failed to resolve user role: %s" %e)
request.write(self.render_tmpl(request, "server_error.mako"))
request.finish()
return
if session.user.language != None:
session.language = session.user.language
else:
session.language = self._lang_from_request(request)
request.write(redirect(request, self._return_uri(request,session)))
else:
request.write(self.render_tmpl(request, "login.mako", login_failed=True, last_page=request.args.get("last_page", [""])[0]))
request.finish()
class LogoutRes(UIResource):
isLeaf = True
noUser = True
def __init__(self, component):
UIResource.__init__(self, component)
def render_GET(self, request):
get_current_session(request).expire()
return self.render_tmpl(request, "logout.mako", last_page="")
def getFactory():
class Factory:
def instance(self, ctxt):
return authui(ctxt)
return Factory()
|
UTF-8
|
Python
| false | false | 2,011 |
5,849,745,483,485 |
7236f0bc92f890b71bfca19a72c4444df66091cb
|
98962a2a3ab97ecdff576af2092bc7dcaf654bff
|
/src/pyvision/data/__init__.py
|
428d39b10ffab3d567b2d9a48807664f03bf9637
|
[
"BSD-3-Clause"
] |
permissive
|
Pictobar/pyvision
|
https://github.com/Pictobar/pyvision
|
6112d213f78ee0c41d55f93f3f4d83935ca27574
|
0d6d95d0d518dd4535f9472e5be1622a11427278
|
refs/heads/master
| 2021-01-16T19:01:16.397508 | 2013-10-13T21:00:19 | 2013-10-13T21:00:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
This module contains some standard tests that can be performed with
the test data distributed with pyvision.
'''
import pyvision as pv
import os.path
from pyvision.analysis.FaceAnalysis.EyesFile import EyesFile
SCRAPS_EYES = EyesFile(os.path.join(pv.__path__[0],'data','csuScrapShots','coords.txt'))
# Common test images
AIRPLANE = os.path.join(pv.__path__[0],'data','misc','airplane.jpg')
BABOON = os.path.join(pv.__path__[0],'data','misc','baboon.jpg')
FRUITS = os.path.join(pv.__path__[0],'data','misc','fruits.jpg')
LENA = os.path.join(pv.__path__[0],'data','misc','lena.jpg')
LOGO = os.path.join(pv.__path__[0],'data','misc','logo.jpg')
TAZ_IMAGE = os.path.join(pv.__path__[0],'data','test','TAZ_0010.jpg')
TAZ_VIDEO = os.path.join(pv.__path__[0],'data','test','TazSample.m4v')
def genderClassifier(clsfy, ilog=None):
'''
genderClassifier takes a classifier as an argument and will use the
csuScrapShot data to perform a gender classification test on that
classifier.
These three functions will be called::
for im in training_images:
clsfy.addTraining(label,im,ilog=ilog)
clsfy.train(ilog=ilog)
for im in testing_images:
clsfy.predict(im,ilog=ilog)
label = 0 or 1 (0=Female,1=Male)
im is a 64x64 pyvision image that is normalized to crop the face
Output of predict should be a class label (0 or 1)
@returns: the success rate for the testing set.
'''
filename = os.path.join(pv.__path__[0],'data','csuScrapShots','gender.txt')
f = open(filename,'r')
image_cache = []
examples = []
for line in f:
im_name, class_name = line.split()
if class_name == 'F':
class_name = 0
else:
class_name = 1
long_name = os.path.join(pv.__path__[0],'data','csuScrapShots',im_name)
leye,reye = SCRAPS_EYES.getEyes(im_name)[0]
im = pv.Image(long_name)
image_cache.append(im)
im = pv.AffineFromPoints(leye,reye,pv.Point(22,27),pv.Point(42,27),(64,64)).transformImage(im)
#im = pv.Image(im.asPIL().resize((64,64)))
examples.append([class_name,im,im_name])
training = examples[:103]
testing = examples[103:]
for each in training[:103]:
clsfy.addTraining(each[0],each[1],ilog=ilog)
clsfy.train(ilog=ilog)
table = pv.Table()
values = {0:[],1:[]}
correct = 0
total = 0
for each in testing:
label = clsfy.predict(each[1],ilog=ilog)
total += 1
if label == each[0]:
correct += 1
rate = float(correct)/total
if ilog: ilog.table(table)
return rate
if __name__ == "__main__":
from pyvision.vector.SVM import SVM
svm = SVM(kernel='LINEAR',random_seed=30)
ilog = pv.ImageLog()
print "SVM rate:",genderClassifier(svm,ilog=None)
svm = SVM(kernel='RBF',random_seed=30)
ilog = pv.ImageLog()
print "SVM rate:",genderClassifier(svm,ilog=None)
ilog.show()
|
UTF-8
|
Python
| false | false | 2,013 |
16,398,185,166,263 |
9ff1b31ef230fc157bfb9006c984f4c9c7f833bf
|
0d06c301a7eb936830b629d7400d0cc523e8e621
|
/month_stats.py
|
bf5fd64bcf851e10bb56a2549a2a5547b79b52e4
|
[] |
no_license
|
ramalho/isarch
|
https://github.com/ramalho/isarch
|
59e732c3420cdeea38c31ce14d96c910562f1e14
|
7db251d7a8c902b08cd2e5e6409727c27a0deb4f
|
refs/heads/master
| 2020-12-29T01:42:17.952985 | 2011-04-18T18:46:01 | 2011-04-18T18:46:01 | 1,626,544 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
DEST_DIR = '../data/logs/'
import os
import glob
from datetime import datetime
import logging
logging.basicConfig(filename='month_stats.log',level=logging.DEBUG)
'''
Date: Mon, 22 Jun 92 20:37:48 -0400
Date: Wed, 22 Jul 92 06:28:49 GMT
Tue, 13 Apr 1993 22:40:00 +0000
'''
NO_MATCH_MSG = 'does not match format'
REMAINS_MSG = 'unconverted data remains:'
def parse_date_format(s, format):
try:
dt = datetime.strptime(s, format)
except ValueError as err:
msg = str(err)
if msg.startswith(REMAINS_MSG):
remainder = msg.replace(REMAINS_MSG,'').strip()
s = s.replace(remainder, '').strip()
dt = datetime.strptime(s, format)
else:
raise
return dt
def parse_date(s):
formats = [ '%a, %d %b %y %H:%M:%S', # Mon, 22 Jun 92 20:37:48 -0400
'%a, %b %d, %Y at %I:%M %p', # Wed, May 12, 2010 at 9:55 AM
'%a, %d %b %y %H:%M', # Thu, 2 Sep 93 14:59 GMT
'%a %b %d, %Y %H:%M%p', #Sun Jul 3, 2005 0:59pm
'%a, %d %b %Y %H:%M:%S', # Tue, 13 Apr 1993 22:40:00 +0000
# Mon, 11 Jul 1994 07:29:00 +0000
'%a %d %b %Y %H:%M:%S', # Mon 1 Nov 2010 20:55:22 -0700
'%a, %d %b %Y', #Wed, 3 Sep 2003
'%a, %d %b %Y %H:%M', # Tue, 31 Jan 1995 09:15 +0530 (GMT +0530)
'%a, %B %d, %Y %H:%M', # Thu, October 28, 2010 14:16
'%d %B %y, %H:%M:%S', # 16 September 92, 12:55:59 ITA
'%d. %B %Y %H:%M', # 8. september 1999 12:18
'%d %B %Y', # 12 October 2000
'%d %b %y %H:%M:%S', # 24 Jan 95 10:02:57
'%d %b %Y %H:%M', # 01 Jun 1993 11:44 +1000
'%d %b %Y, %H:%M', # 07 OCT 1994, 12:19 CET
'%Y-%m-%d %H:%M:%S', # 1995-02-28 09:46:14
'%Y/%m/%d', # 2010/3/15
'%Y%m%d', # 20010511
'%A, %B %d, %Y %I:%M%p', # Tuesday, April 25, 1995 9:08AM
'%A, %d %B, %Y, %H:%M', # Thursday, 28 October, 2010, 8:34
'%A, %B %d, %Y %I:%M %p', # Monday, December 08, 1997 10:54 PM
'%A, %B %d, %Y, %I:%M %p', # Thursday, October 28, 2010, 10:19 AM
'%A, %B %d, %Y %H:%M:%S', # Friday, September 1, 1995 9:12:00 CET
'%A %d %B %Y %H.%M', # Friday 19 November 1999 12.46
'%A, %d. %B %Y %H:%S', # Wednesday, 8. January 1997 09:40
'%A %d %b, %Y %H', # Wednesday 13 May, 1998 05:09
'%A, %d-%b-%y %I:%M %p', # Tuesday, 16-Sep-97 04:43 PM
'%m/%d/%y %I:%M %p', # 10/17/95 3:41 PM
'%m/%d/%y %H:%M:%S', # 06/22/04 18:26:25=3D0D
'%m/%d/%y %I:%M%p', # 6/14/96 9:31PM
'%d/%m/%y %I:%M', #14/06/96 9:31 a
'%d/%m/%Y %H:%M', #01/06/1999 06:16 ^YS
'%d.%m.%Y %H:%M:%S', # 12.12.2003 11:41:21=3D0D
]
for i, format in enumerate(formats):
try:
dt = parse_date_format(s, format)
except ValueError as err:
#logging.debug('TRIED: '+repr(err))
if i == len(formats) - 1:
msg = '"{0}" does not match any of the {1} expected date formats'
raise ValueError(msg.format(s, len(formats)))
else:
#logging.debug('SOLVED: '+repr(dt))
return dt
DATE_IGNORE_LIST = ['1/1/4', 'Signature:', '---', '5 =F4=E5=E2=',
'15 =EE=EA=', 'maandag 22',
'Jeudi 14', 'Wed =3D', 'S=3D',
'Jueves 7', '5. j=FAl 2000', '6. j=FAl',
'13 Januari', '21 Desember', '19. j?l 1999',
'=3D20', 'M=3D', '=3D',
'Tue, 26 Jun 0103 07:14:06 +0000',
'Wed, 10 Nov 0100 13:59:46 +0100',
'Wed, 23 Jun 0100 04:19:24 PDT',
'15 =EC=E0=F0=F2=E0 1999',
# probably not date headers:
'29-31 October 2007', 'October 13th - 16th',
'November 5 to 9, 2007', 'November 26 to 30, 2007',
'11 - 12 April 2007', 'May 3 to 5, 2005', '6th-8th November 2000',
' ', '"v934})',
]
# 'Viernes', 'Lunes', 'quinta-feira', 'Mercredi',
m_stats = {}
for filename in sorted(glob.glob(DEST_DIR+'????-??.email')):
ym_file = os.path.split(filename)[1][:7]
with open(filename) as file:
print '*'*40, filename
for lin in file:
if lin.startswith('Date:'):
if '<' in lin or 'Apr 2011' in lin: continue
m_stats[ym_file] = m_stats.get(ym_file, 0) + 1
ds = lin[len('Date:'):]
ds = ds.replace('=3D2C', ' ') # what is this gremlin?
ds = ds.replace('=C2=A0 =C2=A0 =C2=A0 =C2=A0', ' ')
ds = ds.replace('marec', 'March')
ds = ds.replace('maart', 'March')
ds = ds.replace('apr=EDl', 'April')
ds = ds.replace('m=E1j', 'May')
ds = ds.replace('j=FAn', 'June')
ds = ds.replace('maandag', '')
ds = ds.replace('donderdag', '')
ds = ds.replace('Viernes', '')
ds = ds.replace('Mercredi', '')
ds = ds.replace('Lunes', '')
ds = ds.replace('Mardi', '')
ds = ds.replace('Lundi', '')
ds = ds.replace('Jeudi', '')
ds = ds.replace('Jueves', '')
ds = ds.replace('quinta-feira,', '')
ds = ds.replace(' de ', ' ')
ds = ds.replace('janu_r', 'January')
ds = ds.replace('Janvier', 'January')
ds = ds.replace('Januari', 'January')
ds = ds.replace('febru_r', 'February')
ds = ds.replace('februari', 'February')
ds = ds.replace('Julio', 'July')
ds = ds.replace('Juin', 'June')
ds = ds.replace('julho', 'July')
ds = ds.replace('Agosto', 'August')
ds = ds.replace('Mars', 'March')
ds = ds.replace('Octobre', 'October')
ds = ds.replace('okt=F3ber', 'October')
ds = ds.replace('Septembre', 'September')
ds = ds.replace('D=3DC3=3DA9cembre', 'December')
ds = ds.replace('Desember', 'December')
ds = ds.replace('Diciembre', 'December')
while ' ' in ds:
ds = ds.replace(' ',' ')
ds = ds.strip()
if ds == '' or ds == 'Mon, 11' or any(ds.startswith(garbage)
for garbage
in DATE_IGNORE_LIST):
logging.debug('UNABLE TO PARSE: '+lin.strip())
continue
dt = parse_date(ds)
assert dt.year > 1900, lin
ym = dt.strftime('%Y-%m')
flag = '=' if ym_file == ym else '!'
#print ym_file, flag, ds
for ym in sorted(m_stats):
print '{0}\t{1}'.format(ym, m_stats[ym])
|
UTF-8
|
Python
| false | false | 2,011 |
8,469,675,516,266 |
1250929b3cc0cccd56339ee9de67340eee62854c
|
6dd24190c3c2bc1261e1290b5e4e643d52750cb9
|
/simpleProgress.py
|
a981ad3d8c21f8e4165e420bfc088e2a45d5f934
|
[] |
no_license
|
wwward/simpleProgress
|
https://github.com/wwward/simpleProgress
|
536defb99d1e958d2187104479d54d2d36d279aa
|
62c7fa36b23cf7b602680bfcc85e873fe3e4ab05
|
refs/heads/master
| 2020-06-02T10:56:15.481438 | 2012-01-05T12:46:16 | 2012-01-05T12:46:16 | 3,109,905 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Jan 5, 2012
@author: wwward
'''
import serial, sys
class display:
def __init__(self, usb='/dev/ttyUSB0', baud=9600, debug=True):
try:
self.comm = serial.Serial(usb, baud)
if debug: print "serial init" + usb + baud
except serial.SerialException:
sys.exit("error initializing port")
self.comm.setRTS(0)
def clear(self):
self.comm.write(chr(0xFE)+chr(0x01))
def write(self,string):
self.comm.write(string)
def close(self):
print "Terminating: Closing serial port."
self.comm.close()
if __name__ == '__main__':
d = display(sys.argv[1],sys.argv[2])
d.clear()
d.write('foo')
sys.exit(0)
|
UTF-8
|
Python
| false | false | 2,012 |
13,365,938,231,967 |
3948ced4bd84b3e5db1ef894e1bc505c3ea82cbd
|
0ee9d9bb2237e1452b40dad6babcbaf1b4ea866e
|
/services/sqlimport/service.py
|
7c8436570de44f8e1958abc7fbfe895aa7e4fa37
|
[
"MIT"
] |
permissive
|
UQ-UQx/old_injestor
|
https://github.com/UQ-UQx/old_injestor
|
50c92e2e58111c8716ec44505176f8e5eb635b19
|
e4add6d08239875af7b2669a29814e8679fffcb0
|
refs/heads/master
| 2021-06-10T08:45:33.888632 | 2014-09-04T06:48:26 | 2014-09-04T06:48:26 | 18,049,879 | 0 | 0 |
MIT
| false | 2021-06-01T21:53:16 | 2014-03-24T02:48:09 | 2015-04-20T05:58:07 | 2021-06-01T21:53:13 | 10,638 | 0 | 0 | 2 |
Python
| false | false |
#!/usr/bin/python
import hashlib
import os
import baseservice
basepath = os.path.dirname(__file__)
class SQLImport(baseservice.BaseService):
inst = None
def __init__(self):
SQLImport.inst = self
super(SQLImport, self).__init__()
self.status['name'] = "SQL Importer"
self.sql_enabled = True
self.sql_dbname = 'uqxdump'
self.sql_tablename = ""
self.initialize()
def setup(self):
pass
def run(self):
self.setaction('test running')
self.status['status'] = 'running'
#load a file
while self.load_incoming_file():
#edge to ignore
if self.filename.find("prod-edge") > -1:
self.movetofinish()
continue
columns = []
#split the headers
for line in self.file:
columns = line.split("\t")
break
self.setaction("creating table for "+self.filename)
if self.createtableandvalid(self.filename,columns):
self.setaction("loading data from "+self.filename)
self.status['progress']['total'] = self.numlines()
self.status['progress']['current'] = 0
self.parselines(self.parseline)
self.movetofinish()
self.reset_cache('sql')
def createtableandvalid(self, tablename, columns=[]):
isvalid = False
if self.filename.find('UQx-') > -1 and self.filename.find('-prod-analytics.sql') > -1:
isvalid = True
if not isvalid:
return isvalid
# Remove date
tablename = tablename[tablename.find('UQx-'):]
# Remove Prod analytics
tablename = tablename[:tablename.find('-prod-analytics.sql')]
# Find the last dash, assume its the database, figure out the tablename
usedb = tablename.split("-")
tablename = usedb[len(usedb)-1]
usedb = '_'.join(usedb)
usedb = usedb.replace("_"+tablename,"")
# Change to the database
self.connect_to_sql(usedb)
query = ""
query += "CREATE TABLE IF NOT EXISTS "
query += tablename
query += " ( "
for column in columns:
coltype = "varchar(255)"
if column == "id":
coltype = "int NOT NULL UNIQUE"
if column == "key":
column = "_key"
if column == "state" or column == "content" or column == "meta":
coltype = "longtext"
if column == "goals" or column == "mailing_address":
coltype = "text"
query += column.replace("\n","")+" "+coltype+", "
query += " xhash varchar(200) "
query += ", UNIQUE (xhash)"
query += " );"
self.sql_query(query)
self.sql_tablename = tablename
return isvalid
def parseline(self,line):
if line[:2] == 'id' or line[:4] == 'hash':
return
datahash = hashlib.sha256(line).hexdigest()
line = line.replace("\n","")
line = line.replace('"',"''")
data = line.split("\t")
data.append(datahash)
insertdata = '"'+'","'.join(data)+'"'
self.sql_query("REPLACE INTO "+self.sql_tablename+" VALUES ( "+insertdata+" );",True)
self.status['progress']['current'] += 1
def name():
return str("sqlimport")
def status():
return SQLImport.inst.status
def runservice():
return SQLImport()
|
UTF-8
|
Python
| false | false | 2,014 |
3,152,506,016,781 |
e522189ab9108ca439537e2c0660f6d37650ffb0
|
511bd641d8882963d9753d5b648200cb802497b8
|
/distributions/dbg/models/dd.py
|
047f6efae79cc28704abf8495680dab91d73956b
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
ericmjonas/distributions
|
https://github.com/ericmjonas/distributions
|
2b3b7c4f5526e6fb570d72ebf3910eb059a5e9ae
|
2fe47de8974961b17e9e4f1ae9fd854b72bd9f9f
|
refs/heads/master
| 2021-01-15T18:26:13.460388 | 2014-03-17T06:17:05 | 2014-03-17T06:17:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (c) 2014, Salesforce.com, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of Salesforce.com nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy
from distributions.dbg.special import log, gammaln
from distributions.dbg.random import sample_discrete, sample_dirichlet
from distributions.mixins import ComponentModel, Serializable
class DirichletDiscrete(ComponentModel, Serializable):
def __init__(self):
self.alphas = None
@property
def dim(self):
return len(self.alphas)
def load(self, raw):
self.alphas = numpy.array(raw['alphas'], dtype=numpy.float)
def dump(self):
return {'alphas': self.alphas.tolist()}
#-------------------------------------------------------------------------
# Datatypes
Value = int
class Group(object):
def __init__(self):
self.counts = None
def load(self, raw):
self.counts = numpy.array(raw['counts'], dtype=numpy.int)
def dump(self):
return {'counts': self.counts.tolist()}
#-------------------------------------------------------------------------
# Mutation
def group_init(self, group):
group.counts = numpy.zeros(self.dim, dtype=numpy.int)
def group_add_value(self, group, value):
group.counts[value] += 1
def group_remove_value(self, group, value):
group.counts[value] -= 1
def group_merge(self, destin, source):
destin.counts += source.counts
#-------------------------------------------------------------------------
# Sampling
def sampler_create(self, group=None):
if group is None:
return sample_dirichlet(self.alphas)
else:
return sample_dirichlet(group.counts + self.alphas)
def sampler_eval(self, sampler):
return sample_discrete(sampler)
def sample_value(self, group):
sampler = self.sampler_create(group)
return self.sampler_eval(sampler)
def sample_group(self, size):
sampler = self.sampler_create()
return [self.sampler_eval(sampler) for _ in xrange(size)]
#-------------------------------------------------------------------------
# Scoring
def score_value(self, group, value):
"""
\cite{wallach2009rethinking} Eqn 4.
McCallum, et. al, 'Rething LDA: Why Priors Matter'
"""
numer = group.counts[value] + self.alphas[value]
denom = group.counts.sum() + self.alphas.sum()
return log(numer / denom)
def score_group(self, group):
"""
\cite{jordan2001more} Eqn 22.
Michael Jordan's CS281B/Stat241B
Advanced Topics in Learning and Decision Making course,
'More on Marginal Likelihood'
"""
dim = self.dim
a = self.alphas
m = group.counts
score = sum(gammaln(a[k] + m[k]) - gammaln(a[k]) for k in xrange(dim))
score += gammaln(a.sum())
score -= gammaln(a.sum() + m.sum())
return score
#-------------------------------------------------------------------------
# Examples
EXAMPLES = [
{
'model': {'alphas': [1.0, 4.0]},
'values': [0, 1, 1, 1, 1, 0, 1],
},
{
'model': {'alphas': [0.5] * 4},
'values': [0, 1, 0, 2, 0, 1, 0],
},
#{
# 'model': {'alphas': [0.5] * 256},
# 'values': [0, 1, 3, 7, 15, 31, 63, 127, 255],
#},
]
Model = DirichletDiscrete
|
UTF-8
|
Python
| false | false | 2,014 |
10,239,202,047,661 |
72f2e53ea072312849806ca93fd74360aae8e0da
|
362aa14a87c945ab26dad12bf69547cce79a83b4
|
/test.py
|
756bf0a921c8c5ae856768ba259a85b286b8a045
|
[] |
no_license
|
stormxuwz/ParkingLot
|
https://github.com/stormxuwz/ParkingLot
|
984749d71d8811b04cef82b7b498b228f3c1ed88
|
e5cbd4cc832fc9e910da0b5f35ed64a12a379686
|
refs/heads/master
| 2016-09-06T01:14:24.931016 | 2014-05-25T22:52:45 | 2014-05-25T22:52:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python
import sys
sys.path.append('/Users/XuWenzhao/w/rh/tinyos-main/support/sdk/python')
from TOSSIM import *
from ParkingLot import *
from random import *
from tinyos.tossim.TossimApp import *
def injection(node=3,Msgtype=0,data=10):
# Inject a packet with data to node, by Msgtype AM message
msg = ParkingLot()
msg.set_counter(data)
msg.set_type(Msgtype)
pkt = t.newPacket()
pkt.setData(msg.data)
pkt.setType(msg.get_amType())
pkt.setDestination(node);
pkt.deliver(node, t.time()+2)
def injectionList(NodeList,dataList):
for i,node in enumerate(NodeList):
injection(node=node,data=dataList[i])
for i in range(20000):
t.runNextEvent()
def MoniCommNode():
# Initialize the nodes that might be monitorred often
for i in [37,40,54,77,80]:
m = t.getNode(i)
v_Counterup = m.getVariable("ParkingLotC.counterUp")
v_Counterdown = m.getVariable("ParkingLotC.counterDown")
v_forward = m.getVariable("ParkingLotC.NodeForward")
v_Nodeup=m.getVariable("ParkingLotC.NodeUp")
v_Nodedown=m.getVariable("ParkingLotC.NodeDown")
def getInfo(i):
# Get the node information
m = t.getNode(i)
v_Counterup = m.getVariable("ParkingLotC.counterUp")
v_Counterdown = m.getVariable("ParkingLotC.counterDown")
v_forward = m.getVariable("ParkingLotC.NodeForward")
v_Nodeup=m.getVariable("ParkingLotC.NodeUp")
v_Nodedown=m.getVariable("ParkingLotC.NodeDown")
counter=[v_Counterup.getData()+v_Nodeup.getData(),v_Counterdown.getData()+v_Nodedown.getData(),v_forward.getData()]
print "Node",i,"UP,DOWN,FORWARD",counter;
def Initialize(): ## Initialize node configurations through injection
injection(node=20,Msgtype=3)
injection(node=40,Msgtype=3)
injection(node=60,Msgtype=3)
injection(node=80,Msgtype=3)
for i in range(10000):
# Wait entil boundary nodes initial up;
t.runNextEvent()
# Initialize all node config
injection(node=3,data=0)
injection(node=5,data=0)
injection(node=6,data=0)
injection(node=8,data=0)
injection(node=9,data=0)
# Doule initialize to make sure the packet is reveived
injection(node=3,data=0)
injection(node=5,data=0)
injection(node=6,data=0)
injection(node=8,data=0)
injection(node=9,data=0)
for i in range(30000):
# Wait entil all nodes initial up;
t.runNextEvent()
injection(node=83,data=0)
injection(node=85,data=0)
injection(node=86,data=0)
injection(node=88,data=0)
injection(node=89,data=0)
injection(node=83,data=0)
injection(node=85,data=0)
injection(node=86,data=0)
injection(node=88,data=0)
injection(node=89,data=0)
for i in range(10000):
t.runNextEvent()
### Initialize the system
n = NescApp()
vars = n.variables.variables()
# t = Tossim([])
t = Tossim(vars);
r = t.radio()
t.addChannel("ParkingLotC", sys.stdout)
N=100;
for i in range(1, N):
m = t.getNode(i)
m.bootAtTime(i + 100)
# m.turnOn()
f = open("text.txt", "r")
for line in f:
s = line.split()
if s:
r.add(int(s[0]), int(s[1]), float(s[2]))
noise = open("meyer-heavy.txt", "r")
for line in noise:
s = line.strip()
if s:
val = int(s)+25;
for i in range(1,N):
t.getNode(i).addNoiseTraceReading(val)
for i in range(1,N):
t.getNode(i).createNoiseModel()
for i in range(10000):
# Wait entil all nodes start up;
t.runNextEvent()
# All Initialized
Initialize()
Initialize() # Double initialize to ensure all nodes are correctly set up
for i in range(40000):
t.runNextEvent()
MoniCommNode()
# getInfo(54);
raw_input("All Set Up")
getInfo(37);
raw_input("A car parked at 48")
injection(node=48,data=-1)
for i in range(50000):
t.runNextEvent()
raw_input("A car parked at 6")
injection(node=6,data=-1)
for i in range(50000):
t.runNextEvent()
raw_input("A car parked at 26")
injection(node=26,data=-1)
for i in range(50000):
t.runNextEvent()
raw_input("A car left at 6")
injection(node=6,data=1)
for i in range(50000):
t.runNextEvent()
raw_input("A car parked at 86")
injection(node=86,data=-1)
for i in range(80000):
t.runNextEvent()
raw_input("cars parked at 9,29,49,69,89")
injection(node=9,data=-5)
for i in range(40000):
t.runNextEvent()
injection(node=29,data=-5)
for i in range(40000):
t.runNextEvent()
injection(node=49,data=-5)
for i in range(40000):
t.runNextEvent()
injection(node=69,data=-5)
for i in range(40000):
t.runNextEvent()
injection(node=89,data=-5)
for i in range(80000):
t.runNextEvent()
|
UTF-8
|
Python
| false | false | 2,014 |
1,692,217,128,382 |
2ce5f8f85231e873de5f4c57785bc823bf533a72
|
63c89d672cb4df85e61d3ba9433f4c3ca39810c8
|
/python/testdata/launchpad/lib/lp/services/database/tests/test_sqlbase.py
|
8256258ab77c1b7d5620828a1f73d8c30975126e
|
[
"AGPL-3.0-only",
"AGPL-3.0-or-later"
] |
non_permissive
|
abramhindle/UnnaturalCodeFork
|
https://github.com/abramhindle/UnnaturalCodeFork
|
de32d2f31ed90519fd4918a48ce94310cef4be97
|
e205b94b2c66672d264a08a10bb7d94820c9c5ca
|
refs/heads/master
| 2021-01-19T10:21:36.093911 | 2014-03-13T02:37:14 | 2014-03-13T02:37:14 | 17,692,378 | 1 | 3 |
AGPL-3.0
| false | 2020-07-24T05:39:10 | 2014-03-13T02:52:20 | 2018-01-05T07:03:31 | 2014-03-13T02:53:59 | 24,904 | 0 | 3 | 1 |
Python
| false | false |
# Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import doctest
from doctest import (
ELLIPSIS,
NORMALIZE_WHITESPACE,
REPORT_NDIFF,
)
import unittest
from lp.services.database import sqlbase
def test_suite():
optionflags = ELLIPSIS|NORMALIZE_WHITESPACE|REPORT_NDIFF
dt_suite = doctest.DocTestSuite(sqlbase, optionflags=optionflags)
return unittest.TestSuite((dt_suite,))
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(test_suite())
|
UTF-8
|
Python
| false | false | 2,014 |
18,614,388,272,084 |
8175b22e6a7a9688ce1c562d672a6f128d7ca698
|
2cb2e9bf0fb22586ee5e591ffb9330389c7aabf5
|
/stockmapp-cli.py
|
4e59213d6f3101b0820eca768eeb337be4972cfc
|
[
"GPL-3.0-only"
] |
non_permissive
|
sthesing/Stockmapp
|
https://github.com/sthesing/Stockmapp
|
8105bb670c0ec0595dbe4a0aa063a8451b83934c
|
617421138f66323d77e4c8e595b912ddb1addcfa
|
refs/heads/master
| 2021-01-23T03:05:26.622351 | 2014-01-09T08:14:59 | 2014-01-09T08:14:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##! /usr/bin/python
# -*- coding: utf8 -*-
## Copyright (c) 2014 Stefan Thesing
##
##This file is part of Stockmapp.
##
##Stockmapp is free software: you can redistribute it and/or modify
##it under the terms of the GNU General Public License as published by
##the Free Software Foundation, either version 3 of the License, or
##(at your option) any later version.
##
##Stockmapp is distributed in the hope that it will be useful,
##but WITHOUT ANY WARRANTY; without even the implied warranty of
##MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
##GNU General Public License for more details.
##
##You should have received a copy of the GNU General Public License
##along with Stockmapp. If not, see http://www.gnu.org/licenses/.
__author__ = "Stefan Thesing <[email protected]>"
__version__ = "0.1.1alpha"
__date__ = "Date: 2014/01/09"
__copyright__ = "Copyright (c) 2014 Stefan Thesing"
__license__ = "GPL"
import sys
import argparse
from classes.stockmapp import Stockmapp
from qrtools import QR
if __name__ == "__main__":
"""
Stockmapp is a little tool to catalogue arbitrary items and to check them into and out of containers.
"""
#################################################
# Define command line arguments #
#################################################
# Define the parser
parser = argparse.ArgumentParser(description=
'Stockmapp is a little tool to catalogue arbitrary items \
and to check them into and out of containers.')
# General arguments
parser.add_argument('collection', help='relative or absolute \
path to the collection file, e.g. "foo/bar.json"')
parser.add_argument('--settings', help='relative or absolute \
path to a settings file, Default: "settings.json"')
#################################################
# Parse and process command line arguments #
#################################################
# Well... parse them.
args = parser.parse_args()
# Init ia Stockmapp with the settings
stockmapp = Stockmapp(args.settings)
collection = stockmapp.load_collection(args.collection)
# Let's see if we can add an item by fetching the ID by scanning a QR code
stockmapp.add_item(collection, stockmapp.scan_qr(), container="another container", tags=["gescannt", "obswohlklappt?"])
print collection['items']
|
UTF-8
|
Python
| false | false | 2,014 |
146,028,922,138 |
173fc38d33b4a81edd126930cfd252d9cb19a84f
|
385a8d743feb238fb0d939c58b564232aa5f5291
|
/tekton-master/backend/apps/membro_app/model.py
|
6d80a125b3f0e0c70c3b6e873201fa29a4e8b6a1
|
[
"MIT"
] |
permissive
|
lucasgcampos/app-engine-learning
|
https://github.com/lucasgcampos/app-engine-learning
|
7189439e9e431f738f05e0463b6dce8bf6601d8f
|
0c582d6150be152e55464b6bdfb5c6ab1d5c26fb
|
refs/heads/master
| 2016-08-02T22:02:31.816654 | 2014-11-14T03:36:01 | 2014-11-14T03:36:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.ext import ndb
from celula_app.model import Celula
from gaegraph.model import Node, origins_cache_key, Arc
class Membro(Node):
nome = ndb.StringProperty(required=True)
celular = ndb.StringProperty(required=True)
email = ndb.StringProperty(required=True)
celula = ndb.KeyProperty(Celula, required=True)
celulaNome = ndb.StringProperty()
|
UTF-8
|
Python
| false | false | 2,014 |
6,820,408,083,267 |
2b50d2e720f2440fc048a8f6d704bf027c3c96cf
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_9/cvdsha002/question1.py
|
e7e496cf16be15b02cb7c581a77cc1fd1aac7d2a
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
https://github.com/MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Shahrain Coovadia"""
import math
def marks(files):
f=open(files,"r")
lines=f.readlines()
f.close()
y=eval(average(file))
stnd=eval(standard_deviation())
inp=[]
for i in lines:
if i[-1]=='\n':
i=i[:-1]
x=i.split(",")
mrk=eval(x[1])
if mrk < y-stnd:
inp.append(x[0])
return inp
def standard_deviation(file):
f=open(file,"r")
lines=f.readlines()
f.close()
x=0
for i in lines:
x+=1
y=eval(average(file))
s=0
for l in lines:
if l[-1]=="\n":
l=l[:-1]
l=l.split(",")
s+=(eval(l[1])-y)**2
stnd=str(round(math.sqrt(s/x),2))
return stnd
def main():
file=input("Enter the marks filename:\n")
print("The average is:", "{0:0,5}".format(average(file)))
print("The std deviation is:", "{{0:0<4}".format(standard_deviation(file)))
m=marks(file)
if m:
print("List of students who need to see an advisor:")
for i in m:
print(i)
main()
|
UTF-8
|
Python
| false | false | 2,014 |
6,588,479,863,399 |
8a3fd722ab8ec1d54fca6ea318f09e2b70f1c3bc
|
55fb435d5e67af46dec62648ebf4ae035a52b1f1
|
/DjangoUeditor/urls.py
|
fa8cbdf7bdc3bbe2495a07beb91f6a2ae8acfc7c
|
[] |
no_license
|
xiongharry/Django-Ueditor
|
https://github.com/xiongharry/Django-Ueditor
|
a48ff38f31075fec42761954b86d727afc245e70
|
377e0a32b1156dd044abc345c456cb79702e27b0
|
refs/heads/master
| 2016-02-26T02:19:39.087894 | 2013-01-28T07:50:42 | 2013-01-28T07:50:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#coding:utf-8
from django import VERSION
if VERSION[0:2]>(1,3):
from django.conf.urls import patterns, url
else:
from django.conf.urls.defaults import patterns, url
from DjangoUeditor.views import upload_file_view, image_manager_view, catche_remote_image_view, \
search_moview_view, scrawlup_view
urlpatterns = patterns('',
url(r'^ImageUp/(?P<upload_path>.*)', upload_file_view, {'upload_type':'image'}),
url(r'^FileUp/(?P<upload_path>.*)', upload_file_view, {'upload_type':'file'}),
url(r'^scrawlUp/(?P<upload_path>.*)$', scrawlup_view),
url(r'^ImageManager/(?P<image_path>.*)$', image_manager_view),
url(r'^RemoteCatchImage/(?P<image_path>.*)$', catche_remote_image_view),
url(r'^SearchMovie/$', search_moview_view),
)
|
UTF-8
|
Python
| false | false | 2,013 |
17,626,545,787,617 |
4fe1715adfa9726d7cce938b60e03380d51f57cb
|
3861161d66573939212337424d8da404fabccab3
|
/xpa.py
|
ec1d2f29e92adeb3848fd6374c58fb21802803ec
|
[] |
no_license
|
andreww5au/Prosp
|
https://github.com/andreww5au/Prosp
|
cddb3928fe8f98fb8d8b5ccbd82ec05b492bb2e2
|
849cdbdf0cebfbd76c5b9484cdb76e12a0a7556d
|
refs/heads/master
| 2021-01-15T23:07:39.851177 | 2013-12-17T02:11:15 | 2013-12-17T02:11:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#XPA (X Public Transport Access) in Python. (C) Andrew Williams, 2000, 2001
# [email protected]
#
# or
#
# [email protected]
#
import commands
import string
import os
from subprocess import Popen,PIPE,STDOUT
import globals
import fits
viewer='ds9' #Default to ds9 unless otherwise overriden
class marker:
"""Defines a class for storing SAO(tng/ds9) regions. Should work with any
type that defined by x, y, and size (eg circle), only tested for points.
Create an instance with: m=xpa.marker(123.4, 345.6, 'lens'), for example.
"""
def __init__(self,x=0,y=0,label='',type='point',size=0):
self.x=x
self.y=y
self.label=label
self.type=type
self.size=size
def display(self): #Sends an XPA message to the viewer to show this marker
if self.type=='point': #Points don't have a size attribute
if viewer=='SAOtng':
cmd="regions '#"+self.label+"; "+self.type+" "+`self.x`+" "+`self.y`+"'"
commands.getoutput('echo "'+cmd+'" | xpaset '+viewer)
else:
cmd="'"+self.type+" "+`self.x`+" "+`self.y`+" # text={"+self.label+"} '"
commands.getoutput('echo '+cmd+' | xpaset '+viewer+' regions')
print cmd
class wcsmarker:
"""Defines a class for storing SAO(tng/ds9) regions where x and y are
strings containing sexagesimal RA and Dec in FK5, J2000. Size, if
relevant for the marker type (eg circle) can be, for example 5"
(5 arcseconds) or 0.5' (half an arcminute).
Create an instance with: m=xpa.marker('12:34:56', '-32:10:54, 'lens'),
for example.
"""
def __init__(self,x='',y='',label='',type='point',size=''):
self.x=x
self.y=y
self.label=label
self.type=type
self.size=size
def display(self): #Sends an XPA message to the viewer to show this marker
if self.type=='point': #Points don't have a size attribute
cmd="'wcs; "+self.type+" "+self.x+" "+self.y+" # text={"+self.label+"} '"
commands.getoutput('echo '+cmd+' | xpaset '+viewer+' regions')
print cmd
def deleteregions():
"""Send an XPA message to the viewer to delete all markers.
"""
if viewer=='SAOtng':
commands.getoutput('echo "regions delete" | xpaset '+viewer)
else:
commands.getoutput('echo "regions deleteall" | xpaset '+viewer)
def showmlist(mlist=[]):
"""When called with a list of markers, call the display method for
each marker in the list.
"""
for m in mlist:
m.display()
def getregions():
"""Ask the viewer what regions are defined, parse the output, and
return a list of marker objects.
"""
#Call the xpaget command, get the output, and split it into a list of lines
if viewer=='ds9':
out=string.split(commands.getoutput('xpaget ds9 regions'),'\n')
label=''
mlist=[]
for r in out: #For each line
if r.find('(') <= 0:
pass
else:
print r
hs=string.find(r,'#')
ob=string.find(r,'(')
cb=string.find(r,')')
try:
type = r[:ob]
print type
except:
print "region type unknown: "+r
type='point'
ocb=string.find(r,'{')
ccb=string.find(r,'}')
if ocb > 0: #There is a label
label=r[ocb+1:ccb] #The label is between curly brackets
else:
label = '' #No label for this point
if type=='point':
x,y=eval(r[ob+1:cb]) #Grab the X and Y values for a point
m=marker(x,y,label,type) #Create a marker object
else:
x,y,size=eval(r[ob+1:cb]) #Grab X,Y, and Size for a circle, etc
m=marker(x,y,label,type,size) #Create a marker object
mlist.append(m) #Add the object to the list
return mlist
else:
out=string.split(commands.getoutput('xpaget SAOtng regions'),'\n')
label=''
mlist=[]
for r in out: #For each line
if r[0]=='#': #This line is a label that refers to the next region
label=r[1:]
else:
ob=string.find(r,'(')
cb=string.find(r,')')
type=r[1:ob] #type is the string between the leading + and the (
if type=='point':
x,y=eval(r[ob+1:cb]) #Grab the X and Y values for a point
m=marker(x,y,label,type) #Create a marker object
else:
x,y,size=eval(r[ob+1:cb]) #Grab X,Y, and Size for a circle, etc
m=marker(x,y,label,type,size) #Create a marker object
label='' #Clear label so it won't get attached to the next marker
mlist.append(m) #Add the object to the list
return mlist
def _displayfile(fname, iraf=False):
"""Usage: display(fname, iraf=0)
Send an image to SAOtng or DS9 for display - if iraf=True then send in 8-bit
IRAF format.
"""
fullfilename=os.path.abspath(os.path.expanduser(fname))
if iraf:
os.system('/home/dts/bin/display '+fullfilename)
else:
os.system('echo file '+fullfilename+' | xpaset '+viewer)
if viewer=='ds9':
os.system('echo scale mode zscale | xpaset '+viewer)
else:
os.system('echo scale histeq | xpaset '+viewer)
os.system('echo saveimage jpeg /tmp/ds9.jpeg | xpaset '+viewer)
def displayimage(im, iraf=False):
"""Send a FITS image array directly to DS9, without saving to disk.
"""
xdim = int(im.headers['NAXIS1'])
ydim = int(im.headers['NAXIS2'])
p = Popen(['/usr/local/bin/xpaset', 'ds9', 'array', '[xdim=%d,ydim=%d,bitpix=-32,arch=littleendian]' % (xdim,ydim)], stdin=PIPE)
p.communicate(input=im.data.astype(fits.Float32).tostring())
os.system('echo scale mode zscale | xpaset ds9')
def display(fpat, iraf=False):
"""Display the specified file/s on the viewer program (eg SAOtng).
If no filename is given, display the last image taken.
if iraf=1 then the image is displayed in 8-bit IRAF format (so 'imexam'
will work)
eg: display('/data/junk001.fits')
display('/data/junk002.fits',iraf=1)
display( reduce('/data/comet*.fits') )
display()
"""
globals.distribute(fpat,_displayfile,iraf)
|
UTF-8
|
Python
| false | false | 2,013 |
5,583,457,532,047 |
da63a3d1e0e2fa17310cfce71fb64b81f8df5ccd
|
45be844b2f055d201f8cee36640d3532e33c83bc
|
/data/createdata.py
|
ec45cf05534141077992e202a11a334b7154011e
|
[] |
no_license
|
sgurjar/hashfun-benchmark
|
https://github.com/sgurjar/hashfun-benchmark
|
4e0d13d26a499149788017d74613ef2846438927
|
a83a77b6a5e90ab9ff7e78a124b59f8d91bc9e63
|
refs/heads/master
| 2016-09-06T03:23:37.596474 | 2014-04-04T15:42:15 | 2014-04-04T15:42:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os
file_sizes_in_mb=(
2,
4,
8,
16,
32,
64,
128,
256,
512,
640,
768,
896,
1024)
def create_rnd_file(size_in_mb, filename):
with open(filename,'wb') as f:
f.write(os.urandom(size_in_mb*1024*1024))
for mb in file_sizes_in_mb:
create_rnd_file(mb, '%04dmb.dat'%mb)
|
UTF-8
|
Python
| false | false | 2,014 |
1,752,346,691,564 |
0323dd1879da5b01275db56e28643760cb525b78
|
0f3ef2aaeb94e7ad4d6fa288abb45427ae1d8213
|
/applications/init/controllers/account.py
|
63f4daef90986c9bd9b1abcd3e25ac36efe5b1c4
|
[
"LGPL-3.0-only",
"LicenseRef-scancode-free-unknown",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-2.0-only",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
bigdaddycade/web2py
|
https://github.com/bigdaddycade/web2py
|
3d787ea8c044007b2cc99604eff812aa1c515fe9
|
7ee12073bc8839f8452d4980090501f5400b8d70
|
refs/heads/master
| 2021-01-19T06:20:04.444227 | 2014-09-07T20:36:35 | 2014-09-07T20:36:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
apiVersion = '0.9'
@auth.requires_login()
def index():
view = request.vars.view or None
account={}
user={}
title = 'Account Settings'
from AccountUser import Account
accountObj = Account(db)
accountID = accountObj.get_account_id_by_user_id(session.auth.user.id)
if not accountID:
accountID = accountObj.create_account_and_add_user_data_and_profile(session.auth.user.id)
if accountID:
redirect(URL('account', 'index'))
if not view:
if accountID:
account=accountObj.get(accountID)
users=accountObj.get_user_by_account(accountID)
return dict(
view=view,
account=account,
users=users,
title=title)
if view == 'default':
query = db.account(db.account.id==accountID)
form = SQLFORM(db.account, query, submit_button = 'Update Account', deletable=False, showid=False, fields=[
'default_friend',
'default_blog',
'default_album'
])
if form.process().accepted:
redirect(URL('account', 'index'))
elif form.errors:
response.flash = 'The form contains errors.'
return dict(
view=view,
title=title,
form=form)
if view == 'location':
query = db.account(db.account.id==accountID)
form = SQLFORM(db.account, query, submit_button = 'Update Account', deletable=False, showid=False, fields=[
'phone',
'address',
'city',
'state_id',
'zip_code',
'country_id'
])
if form.process().accepted:
redirect(URL('account', 'index'))
elif form.errors:
response.flash = 'The form contains errors.'
return dict(
view=view,
title=title,
form=form)
if view == 'personal':
query = db.account(db.account.id==accountID)
form = SQLFORM(db.account, query, submit_button = 'Update Account', deletable=False, showid=False, fields=[
'gender',
'email',
'show_me_filter'
])
if form.process().accepted:
redirect(URL('account', 'index'))
elif form.errors:
response.flash = 'The form contains errors.'
return dict(
view=view,
title=title,
form=form)
if view == 'moderator':
return dict(
view=view,
title=title)
|
UTF-8
|
Python
| false | false | 2,014 |
13,623,636,283,164 |
f123a5134c94dacd2cb7fa2acf9b0bc245c8cde7
|
0711465ddb2fd49111047165a9aea49b83b2fa0c
|
/addons/web_graph/controllers/graph.py
|
21ec7d6e973ffaeb3f5bbe79554fc6928e822cef
|
[
"AGPL-3.0-only"
] |
non_permissive
|
daqing15/openerp-web
|
https://github.com/daqing15/openerp-web
|
b56433ebfea52fa62383f5d3f312d083310f9d5b
|
abd39f35b02bce17b48db65e45f7c7209036b100
|
refs/heads/master
| 2021-01-19T05:36:58.143230 | 2012-10-12T14:49:43 | 2012-10-12T14:49:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import openerp
from lxml import etree
class GraphView(openerp.addons.web.controllers.main.View):
_cp_path = '/web_graph/graph'
@openerp.addons.web.http.jsonrequest
def data_get(self, req, model=None, domain=[], context={}, group_by=[], view_id=False, orientation=False, stacked=False, mode="bar", **kwargs):
obj = req.session.model(model)
res = obj.fields_view_get(view_id, 'graph')
fields = res['fields']
toload = filter(lambda x: x not in fields, group_by)
if toload:
fields.update( obj.fields_get(toload, context) )
tree = etree.fromstring(res['arch'])
pos = 0
xaxis = group_by or []
yaxis = []
for field in tree.iter(tag='field'):
if (field.tag != 'field') or (not field.get('name')):
continue
assert field.get('name'), "This <field> tag must have a 'name' attribute."
if (not group_by) and ((not pos) or field.get('group')):
xaxis.append(field.get('name'))
if pos and not field.get('group'):
yaxis.append(field.get('name'))
pos += 1
assert len(xaxis), "No field for the X axis!"
assert len(yaxis), "No field for the Y axis!"
# Convert a field's data into a displayable string
ticks = {}
def _convert_key(field, data):
if fields[field]['type']=='many2one':
data = data and data[0]
return data
def _convert(field, data, tick=True):
if fields[field]['type']=='many2one':
data = data and data[1]
elif (fields[field]['type']=='selection') and (type(fields[field]['selection']) in (list, tuple)):
d = dict(fields[field]['selection'])
data = d[data]
if tick:
return ticks.setdefault(data, len(ticks))
return data or 0
def _orientation(x, y):
if not orientation:
return (x,y)
return (y,x)
result = []
if mode=="pie":
res = obj.read_group(domain, yaxis+[xaxis[0]], [xaxis[0]], context=context)
for record in res:
result.append( {
'data': [(_convert(xaxis[0], record[xaxis[0]]), record[yaxis[0]])],
'label': _convert(xaxis[0], record[xaxis[0]], tick=False)
})
elif (not stacked) or (len(xaxis)<2):
for x in xaxis:
res = obj.read_group(domain, yaxis+[x], [x], context=context)
result.append( {
'data': map(lambda record: _orientation(_convert(x, record[x]), record[yaxis[0]] or 0), res),
'label': fields[x]['string']
})
else:
xaxis.reverse()
axis = obj.read_group(domain, yaxis+xaxis[0:1], xaxis[0:1], context=context)
for x in axis:
key = x[xaxis[0]]
res = obj.read_group(domain+[(xaxis[0],'=',_convert_key(xaxis[0], key))], yaxis+xaxis[1:2], xaxis[1:2], context=context)
result.append( {
'data': map(lambda record: _orientation(_convert(xaxis[1], record[xaxis[1]]), record[yaxis[0]] or 0), res),
'label': _convert(xaxis[0], key, tick=False)
})
res = {
'data': result,
'ticks': map(lambda x: (x[1], x[0]), ticks.items())
}
return res
|
UTF-8
|
Python
| false | false | 2,012 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.