__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
15,513,421,903,353 |
9b2a8dfbf7716c1cf8ed56392cba4bce0090a825
|
f1a9ee1318e80d833b6a8cf3bff5a93448c6aa03
|
/tests/test_queue.py
|
d73b508af7d9da8795cb3d80d0bee1a9ca363b44
|
[] |
no_license
|
trezorg/stack2queue
|
https://github.com/trezorg/stack2queue
|
8394468d52d85eba39d7a52639e3cd5c16aae8e2
|
8b5ad4c20c4e140a493f97399738bf683ddb3588
|
refs/heads/master
| 2018-12-28T02:51:58.805625 | 2013-11-05T17:26:55 | 2013-11-06T06:50:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from stack2queue.queue import StackQueue
class QueueTests(unittest.TestCase):
def test_dequeue_on_empty_queue(self):
queue = StackQueue()
self.assertTrue(queue.isEmpty())
with self.assertRaises(ValueError):
queue.dequeue()
def test_size(self):
queue = StackQueue(*range(100))
self.assertEqual(queue.size(), 100)
def test_enqueue_with_init(self):
queue = StackQueue(1, 2, 3)
self.assertFalse(queue.isEmpty())
self.assertEqual(queue.dequeue(), 1)
self.assertEqual(queue.dequeue(), 2)
self.assertEqual(queue.dequeue(), 3)
def test_get(self):
queue = StackQueue(1, 2, 3)
self.assertEqual(queue.size(), 3)
self.assertEqual(queue.get(), 1)
self.assertEqual(queue.size(), 3)
queue.dequeue()
queue.dequeue()
queue.enqueue(4, 5)
queue.dequeue()
self.assertEqual(queue.get(), 4)
def test_enqueue_dequeue(self):
queue = StackQueue(1, 2)
self.assertEqual(queue.dequeue(), 1)
self.assertEqual(queue.dequeue(), 2)
self.assertTrue(queue.isEmpty())
with self.assertRaises(ValueError):
queue.dequeue()
queue.enqueue(3, 4, 5)
self.assertEqual(queue.dequeue(), 3)
self.assertEqual(queue.dequeue(), 4)
self.assertEqual(queue.dequeue(), 5)
self.assertTrue(queue.isEmpty())
with self.assertRaises(ValueError):
queue.dequeue()
def test_iter(self):
self.assertEqual(list(StackQueue()), [])
queue = StackQueue(1, 2, 3, 4, 100)
queue.dequeue()
queue.dequeue()
queue.enqueue(3, 4, 5)
queue.dequeue()
self.assertEqual(list(queue), [4, 100, 3, 4, 5])
def test_iter_with_one_element(self):
queue = StackQueue(1)
self.assertEqual(list(queue), [1])
def main():
unittest.main()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
9,981,504,010,439 |
98e2860be53ab3980574992cb2aa26fe98f3b6db
|
46eb5ed5d7f823df355d94af9f519acf755e09e7
|
/prefs.py
|
ef57522cebabdb04e46e356a11b039e54ed7f879
|
[] |
no_license
|
scottwallacesh/smughost
|
https://github.com/scottwallacesh/smughost
|
ef1d0c392a3481d213f9bbe684afffe47213c925
|
27ec07fd2bbd39aa282dfbc5b0a41fe16369632b
|
refs/heads/master
| 2021-01-01T19:02:22.202988 | 2014-01-25T17:32:29 | 2014-01-25T17:32:29 | 6,640,710 | 1 | 1 | null | false | 2014-01-25T17:32:31 | 2012-11-11T17:08:09 | 2014-01-25T17:32:30 | 2014-01-25T17:32:30 | 236 | 0 | 0 | 0 |
JavaScript
| null | null |
#!/usr/bin/env python
from google.appengine.ext import db,webapp
from google.appengine.ext.webapp import util
from google.appengine.api import users
import xhtml
class AppPrefs(db.Model):
"""Class to fetch and store user preferences."""
api_key = db.StringProperty(default="")
nickname = db.StringProperty(default="")
app_name = db.StringProperty(default="")
category = db.IntegerProperty(default=0)
title = db.StringProperty(default="smughost")
def fetch(self):
"""Function to fetch the application preferences."""
# Get the key from the DB.
key = db.Key.from_path("AppPrefs", "AppPrefs")
appPrefs = db.get(key)
# Check for data.
if appPrefs is None:
# None. Create an entry.
appPrefs = AppPrefs(key_name="AppPrefs")
return appPrefs
class PrefHandler(webapp.RequestHandler):
"""Preferences handler."""
def get(self):
html = xhtml.HTML(self)
# Fetch any existing preferences.
prefs = AppPrefs().fetch()
# Display a form to add/update the application settings.
html.header(title="Preferences")
self.response.out.write("""
<h1>Edit preferences</h1>
<form action="/prefs" method="post">
<label for="api_key">SmugMug API Key</label>
<input type="text" id="api_key" name="api_key" value="%s" /> <br/>
<label for="nickname">SmugMug Username</label>
<input type="text" id="nickname" name="nickname" value="%s" /> <br/>
<label for="app_name">SmugMug API App Name</label>
<input type="text" id="app_name" name="app_name" value="%s" /> <br/>
<label for="category">SmugMug Category</label>
<input type="text" id="category" name="category" value="%s" /> <br/>
<label for="title">Site Title</label>
<input type="text" id="title" name="title" value="%s" /> <br/>
<input type="submit" value="submit" />
</form>
""" % (prefs.api_key, prefs.nickname, prefs.app_name, prefs.category, prefs.title))
html.footer()
def post(self):
"""Function to store the settings provided in the <form ...> in the get() function."""
# Fetch our preferences object
prefs = AppPrefs().fetch()
try:
# Use the variables from the form
prefs.api_key = self.request.get("api_key")
prefs.nickname = self.request.get("nickname")
prefs.app_name = self.request.get("app_name")
prefs.category = int(self.request.get("category"))
prefs.title = self.request.get("title")
# Push the changes to the DB.
prefs.put()
except Exception, e:
# Oops. An error.
self.response.out.write("There was an error storing the preferences: %s" % e)
return
# Back to the main page
self.redirect("/")
def main():
"""Main function for executing the script."""
application = webapp.WSGIApplication([('/prefs', PrefHandler)],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
16,045,997,844,160 |
3d05adc79fb9290bce30e3d7d32819fdd5fb39ac
|
643dd6ccf67599fbfaca386d1500146b32dd85ae
|
/thunder/testutils.py
|
8e8b98a13fcf6451d4a7559168914ba01982e040
|
[
"LGPL-2.1-only"
] |
non_permissive
|
jdahlin/thunder
|
https://github.com/jdahlin/thunder
|
a95bd34f13ad0467762f15eb24e3b965dc15f72d
|
1c1a6d1374dc4c0787f1f82fbc494b7f171c2d65
|
refs/heads/master
| 2020-05-17T01:24:47.072163 | 2012-01-06T16:05:55 | 2012-01-06T16:05:55 | 3,094,954 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from thunder.info import get_cls_info
from thunder.store import Store
class StoreTest(unittest.TestCase):
def setUp(self):
self.store = Store('localhost', 'thunder-test')
self.store.trace = True
def tearDown(self):
self.store.drop_collections()
for collection in self.store.collections:
ops = collection.ops
collection.ops = []
if ops:
self.fail("%r still has %d ops: %r" % (
collection, len(ops), ops))
def assertOp(self, cls, **kwargs):
collection = self.getCollection(cls)
op = collection.ops.pop()
for attr, value in kwargs.items():
op_value = getattr(op, attr)
if op_value != value:
self.fail("op %s: %r != %r" % (attr, op_value, value))
def getCollection(self, cls):
return get_cls_info(cls).get_collection(self.store)
|
UTF-8
|
Python
| false | false | 2,012 |
4,922,032,552,780 |
c3c96ca7a260455e027e5f65fe9ab309c577f54a
|
b50eae95be1ccc22569efd2493f071747a0697fe
|
/shaire/user_profile/views.py
|
bd68d70ad5cfb1d973a33578e6685fe775a4a461
|
[
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
FrodoTheTrue/shaire
|
https://github.com/FrodoTheTrue/shaire
|
642faeab1a68f9b7c0c877945bd8dc7b8e5c868e
|
33272a6f335f03ee90496d1886d2d0a151da30af
|
refs/heads/master
| 2020-04-23T14:38:15.873208 | 2014-12-12T11:45:49 | 2014-12-12T11:45:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.shortcuts import render_to_response, redirect
from django.contrib import auth
from loginsys.models import Users, Alerts, Accepted_Alerts
from django.contrib.auth.models import User
import datetime
from user_profile.forms import ChangeAvatar, ChangeName, ChangeSecondName, ChangeSex, ChangeCity, ChangeBirthday, ChangeVK
from django.core.context_processors import csrf
# Create your views here.
def profile(request, username):
if not request.user.is_authenticated():
return redirect('/')
args = {}
args.update(csrf(request))
args['form_change_avatar'] = ChangeAvatar()
args['my_page'] = False
user = auth.get_user(request)
args['username'] = user.username
if args['username'] == username:
args['my_page'] = True
args['browsable_user'] = User.objects.get(username=username)
args['user_profile'] = args['browsable_user'].users
birthday = args['user_profile'].user_birthday
all_invites = list(Accepted_Alerts.objects.filter(users=user))
args['invites_count'] = len(all_invites)
age = find_age(birthday)
args['user_age'] = age[0]
args['user_age_word'] = age[1]
args['invites'] = Alerts.objects.filter(alert_to_user=args['browsable_user'].id)
args['invites'] = list(args['invites'])
args['invites'].reverse()
return render_to_response('profile.html', args)
def settings(request):
args = {}
args.update(csrf(request))
user = auth.get_user(request)
all_invites = list(Accepted_Alerts.objects.filter(users=user))
args['invites_count'] = len(all_invites)
args['username'] = user.username
args['user'] = user
args['user_profile'] = args['user'].users
args['change_name_form'] = ChangeName()
args['change_second_name_form'] = ChangeSecondName()
args['change_sex_form'] = ChangeSex()
args['change_city_form'] = ChangeCity()
args['change_birthday_form'] = ChangeBirthday()
args['change_vk_form'] = ChangeVK()
return render_to_response('settings.html', args)
def find_age(birthday):
cur_date = str()
now = str(datetime.datetime.now())
for s in now:
if not s == ' ':
cur_date += (s)
else:
break
birthday = str(birthday)
temp = []
for letter in range(len(cur_date)):
if cur_date[letter] == '-':
temp.append(letter)
cur_year = int(cur_date[:temp[0]])
cur_month = int(cur_date[temp[0] + 1:temp[1]])
cur_day = int(cur_date[temp[1] + 1:temp[1] + 3])
temp = []
for letter in range(len(birthday)):
if birthday[letter] == '-':
temp.append(letter)
birthday_year = int(birthday[:temp[0]])
birthday_month = int(birthday[temp[0] + 1:temp[1]])
birthday_day = int(birthday[temp[1] + 1:temp[1] + 3])
age = cur_year - birthday_year
if cur_month < birthday_month:
age -= 1
elif cur_month == birthday_month:
if cur_day < birthday_day:
age -= 1
age_last_number = int(str(age)[len(str(age)) - 1])
if age_last_number >= 5 or age_last_number == 0 or (age in {11, 12, 13, 14}):
age_word = "лет"
elif age_last_number == 1:
age_word = "год"
else:
age_word = "года"
return (age, age_word)
def change_avatar(request):
if request.method == 'POST':
form = ChangeAvatar(request.POST, request.FILES)
user = auth.get_user(request)
user_profile = user.users
if "default_avatar.jpg" not in str(user_profile.user_photo):
user_profile.user_photo.delete()
user_profile.user_photo = request.FILES['user_photo']
user_profile.save()
return redirect('/')
def change_name(request):
if request.method == 'POST':
form = ChangeName(request.POST)
if form.is_valid():
username = auth.get_user(request).username
user = User.objects.get(username=username)
user_profile = user.users
user_profile.user_firstname = request.POST['user_firstname']
user_profile.save()
return redirect('/profile/settings/')
else:
return redirect('/profile/settings/')
def change_secondname(request):
if request.method == 'POST':
form = ChangeSecondName(request.POST)
if form.is_valid():
username = auth.get_user(request).username
user = User.objects.get(username=username)
user_profile = user.users
user_profile.user_surname = request.POST['user_surname']
user_profile.save()
return redirect('/profile/settings/')
else:
return redirect('/profile/settings/')
def change_sex(request):
if request.method == 'POST':
form = ChangeSex(request.POST)
if form.is_valid():
username = auth.get_user(request).username
user = User.objects.get(username=username)
user_profile = user.users
user_profile.user_sex = request.POST['user_sex']
user_profile.save()
return redirect('/profile/settings/')
else:
return redirect('/profile/settings/')
def change_city(request):
if request.method == 'POST':
form = ChangeCity(request.POST)
if form.is_valid():
username = auth.get_user(request).username
user = User.objects.get(username=username)
user_profile = user.users
user_profile.user_city = request.POST['user_city']
user_profile.save()
return redirect('/profile/settings/')
else:
return redirect('/profile/settings/')
def change_vk(request):
if request.method == 'POST':
form = ChangeVK(request.POST)
if form.is_valid():
username = auth.get_user(request).username
user = User.objects.get(username=username)
user_profile = user.users
user_profile.user_vk = request.POST['user_vk']
user_profile.save()
return redirect('/profile/settings/')
else:
return redirect('/profile/settings/')
def change_birthday(request):
return redirect('/profile/settings/')
|
UTF-8
|
Python
| false | false | 2,014 |
11,407,433,183,769 |
f8b29540bd640891b8051bc5b79746c27c059a91
|
a1ad03a074f1dd9a7ea486fa7bb2594f37d680fa
|
/HomeBase.py
|
fe5ad614480dc07bf38c54db03597c16284645b8
|
[
"GPL-3.0-only",
"GPL-3.0-or-later"
] |
non_permissive
|
jseibel/Hieratic-Attack
|
https://github.com/jseibel/Hieratic-Attack
|
8583924c30f1b1955e073eb93ccde8c23f1bfa43
|
ae6f553c0a0a588849e5aaf3f59ce8efe6a6dede
|
refs/heads/master
| 2021-03-12T20:41:41.615482 | 2010-05-17T15:29:21 | 2010-05-17T15:29:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Home Base for defense, end of enemy movement path
from Tile import Tile
import pygame
from pygame.locals import *
class HomeBase(Tile):
def __init__(self,i,j):
Tile.__init__(self)
self.kind = 'home base'
self.next = None
self.dir = ''
self.loc = (i,j)
self.pic = pygame.Surface((30,30)).convert()
self.pic.fill((0,105,40))
self.pic1 = pygame.image.load('IMG/hut.png').convert_alpha()
self.pic.blit(self.pic1, (0,0))
def update_pic(self, a, b):
return None
|
UTF-8
|
Python
| false | false | 2,010 |
18,373,870,096,438 |
296bd7cbe6c29f3a489e770935de15fa85ecc21f
|
89735e50dad9d49c582c098463fc06e8b9786c8d
|
/src/twirem/reporter/reporting.py
|
15dd6db984e29e22037bf0836d2f457ef79a42ae
|
[] |
no_license
|
sowcod/Twirem
|
https://github.com/sowcod/Twirem
|
342150be65853d5e48ffe70f42f35f215d2fb020
|
d2bea254e2d935e8801187d50353c86f0cc7097a
|
refs/heads/master
| 2016-09-05T22:23:32.941261 | 2011-06-29T14:35:58 | 2011-06-29T14:35:58 | 1,951,427 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*- coding: utf-8 -*-
from models import Report, DiffScreenName, DiffIcon
from twirem.main.models import UserProfile, q_inner
from twirem.main.models import UserScreenName, UserIcon
from twirem.arrayutil import Marge
import time
def create_new_report(user_id, end = None):
if end is None: end = time.time()
try:
last_date = Report.objects.select_related('user').latest('start_date').end_date
except Report.DoesNotExist:
last_date = end
return create_report(user_id, last_date, end)
def create_report(user_id, start, end):
user = UserProfile.objects.get_or_create(user_id = user_id)[0]
report = Report.objects.select_related('user').get_or_create(
user = user,
start_date = start,
end_date = end)[0]
attach_followers(report)
attach_friends(report)
attach_screen_name(report)
attach_icon(report)
report.save()
return report
def attach_followers(report):
followers = report.followers
user = report.user
oldf = user.followers.filter(q_inner(report.start_date), unfollow = False).order_by('user')
newf = user.followers.filter(q_inner(report.end_date), unfollow = False).order_by('user')
m = Marge(oldf, newf, lambda a,b: cmp(a.user_id, b.user_id))
m.full(left = lambda l: followers.create(user = l.user, remove = True),
right = lambda r: followers.create(user = r.user, remove = False))
return followers
def attach_friends(report):
friends = report.friends
user = report.user
oldf = user.friends.filter(q_inner(report.start_date), unfollow = False).order_by('friend')
newf = user.friends.filter(q_inner(report.end_date), unfollow = False).order_by('friend')
m = Marge(oldf, newf, lambda a,b: cmp(a.friend_id, b.friend_id))
m.full(left = lambda l: friends.create(user = l.friend, remove = True),
right = lambda r: friends.create(user = r.friend, remove = False))
return friends
def attach_screen_name(report):
user = report.user
try:
new = user.screen_names.get(q_inner(report.end_date))
except UserScreenName.DoesNotExist:
new = None
try:
old = user.screen_names.get(q_inner(report.start_date))
except UserScreenName.DoesNotExist:
old = None
if new == None: names = DiffScreenName(report = report, diff_type = 'N')
elif old == None: names = DiffScreenName(report = report, new = new, diff_type = 'E')
elif old.screen_name == new.screen_name:
names = DiffScreenName(report = report, old = old, new = new, diff_type = 'E')
else:
names = DiffScreenName(report = report, old = old, new = new, diff_type = 'C')
names.save()
report.screen_names = names
return names
def attach_icon(report):
user = report.user
try:
newi = user.icons.get(q_inner(report.end_date))
except UserIcon.DoesNotExist:
newi = None
try:
oldi = user.icons.get(q_inner(report.start_date))
except UserIcon.DoesNotExist:
oldi = None
if newi == None: icons = DiffIcon(report = report, diff_type = 'N')
elif oldi == None: icons = DiffIcon(report = report, new = newi, diff_type = 'E')
elif oldi.digest == newi.digest:
icons = DiffIcon(report = report, old = oldi, new = newi, diff_type = 'E')
else:
icons = DiffIcon(report = report, old = oldi, new = newi, diff_type = 'C')
icons.save()
report.icons = icons
return icons
|
UTF-8
|
Python
| false | false | 2,011 |
8,950,711,847,756 |
6be7ec26ef5bdb5d841307c0d534170c2ba67803
|
cf3d764f74b5a02398634b268f3cf777f4064528
|
/terraform.py
|
5830f8ede959b776b1a79ed60654e617f259d5a7
|
[] |
no_license
|
aaronreba/Pangaea
|
https://github.com/aaronreba/Pangaea
|
db072133747a0f7a9434f3bcbe101d8a74f71db5
|
adf99670ada32d15b3c1d049b3089f873edebdbc
|
refs/heads/master
| 2021-01-25T05:21:33.629612 | 2014-02-10T23:37:20 | 2014-02-10T23:37:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
import common
#(0, 0) (1, 0) (2, 0)
# (0, 1) (1, 1) (2, 1)
#(0, 2) (1, 2) (2, 2)
#########
#all chunks within this many distance units from the human must be
#generated whenever a generation is requested
#testing: 30 chunks
do_chunk_generate_distance = 3
#when a deletion is requested, chunks within this many distance units
#from the human must be deleted
do_chunk_delete_distance = 3
#a generation is requested when the human is within this many
#chunks from the edge of the world.
#when the human reaches the boundary of this region,
#another check for land is made. if that request can be carried out,
#it will.
#testing: 12 chunks
check_chunk_generate_distance = 3
chunk_size = 5
biome_graph = {}
biome_graph[(-4, -2), (-4, -1)] = 'tundra'
biome_graph[(-4, -2), (0, 4)] = 'forest'
biome_graph[(-1, 1), (-4, -3)] = 'desert'
biome_graph[(-1, 1), (-2, -1)] = 'plains'
biome_graph[(-1, 1), (0, 2)] = 'grass'
biome_graph[(-1, 1), (3, 4)] = 'heavy_grass'
biome_graph[(2, 4), (-4, -1)] = 'desert'
biome_graph[(2, 4), (0, 2)] = 'grass'
biome_graph[(2, 4), (3, 4)] = 'jungle'
#to do: make this a subclass of a pygame sprite to make use of its rect
#and image field when drawing the map. it would make animations easier, and,
#more importantly, possible
class tile(object):
def __init__(self, information=None):
if information != None:
self.write(information)
else:
self.temperature = None
self.precipitation = None
self.psych = None
self.terrain_image_name = None
self.terrain_type = None
self.walkable = None
self.occupied = None
self.seethrough = None
self.interactable = None
self.items = []
self.actors = []
def write(self, information):
self.temperature = information[0][0]
self.precipitation = information[0][1]
self.psych = information[0][2]
for biome in biome_graph:
if biome[0][0] <= self.temperature <= biome[0][1] and\
biome[1][0] <= self.temperature <= biome[1][1]:
self.terrain_type = biome_graph[biome]
break
self.walkable = information[1][0]
self.occupied = information[1][1]
self.seethrough = information[1][2]
self.interactable = information[1][3]
self.items = information[2]
self.images = information[3]
class landscape(object):
def __init__(self, level):
self.landscape = {}
self.landscape_chunk_mask = {}
self.landscape_size = None
self.cities = []
self.portals = []
self.level = level
#traversed_location is where the player entered a portal
self.traversed_location = None
#center is actor to center map on. accounts for sight of actor.
def __str__(self, center_actor=None):
if center_actor != None:
#get tiles in sight distance of center_actor
center_actor_position = center_actor.position
print_me = ''
for y in xrange(self.landscape_size[1][0], self.landscape_size[1][1]):
if y & 1 == 1:
print_me += ' '
for x in xrange(self.landscape_size[0][0], self.landscape_size[0][1]):
chunk_location = (int(x / chunk_size), int(y / chunk_size))
distance_to_actor = common.hex_distance(center_actor_position, (x, y))
if distance_to_actor <= center_actor.sight_distance:
if self.get_portal_direction_at_location((x, y)) != None:
print_me += self.get_portal_direction_at_location((x, y))[0] + ' '
elif self.location_has_building((x, y)):
print_me += 'b '
elif self.chunk_in_city_bounds((chunk_location)):
print_me += 'c '
elif self.landscape[x, y].occupied:
print_me += 'a '
else:
print_me += '. '
else:
print_me += ' '
print_me += '\n'
print_me += '\n'
else:
print_me = ''
for y in xrange(self.landscape_size[1][0], self.landscape_size[1][1]):
if y & 1 == 1:
print_me += ' '
for x in xrange(self.landscape_size[0][0], self.landscape_size[0][1]):
chunk_location = (int(x / chunk_size), int(y / chunk_size))
if self.get_portal_direction_at_location((x, y)) != None:
print_me += self.get_portal_direction_at_location((x, y))[0] + ' '
elif self.location_has_building((x, y)):
print_me += 'b '
elif self.chunk_in_city_bounds((chunk_location)):
print_me += 'c '
elif self.landscape[x, y].occupied:
print_me += 'a '
else:
print_me += '. '
print_me += '\n'
print_me += '\n'
return print_me
def __repr__(self):
return self.landscape.__str__()
def chunk_in_city_bounds(self, this_chunk):
for this_city in self.cities:
if this_chunk in this_city.chunk_locations:
return True
return False
def location_has_building(self, this_location):
this_chunk = (int(this_location[0] / chunk_size), int(this_location[1] / chunk_size))
for this_city in self.cities:
if this_chunk in this_city.chunk_locations:
for this_building in this_city.buildings:
if this_building.location == this_location:
return True
return False
def get_portal_direction_at_location(self, location):
for this_portal in self.portals:
if this_portal.location == location:
return this_portal.direction
return None
def has_actor(self, this_actor):
for this_tile in self.landscape:
if this_actor in self.landscape[this_tile].actors:
return True
return False
def print_actors(self):
print 'land'
for this_tile in self.landscape:
for this_actor in self.landscape[this_tile].actors:
print this_tile
print this_actor
def remove_actor(self, actor_id):
do_remove = False
for location in self.landscape:
for each_actor in self.landscape[location].actors:
if each_actor.id_number == actor_id:
do_remove = True
break
if do_remove:
self.landscape[location].actors.remove(each_actor)
break
###################################
# pathfinding and related goodies #
###################################
def is_open_path(p0, p1):
#if a path is found from p0 to p1 that is equal to the distance of the
#points, then there is an open path.
return len(self.pathfind(p0, p1)) - 1 == common.hex_distance(p0, p1)
#a* based
def pathfind(self, p0, p1):
open_list = [p0]
closed_list = []
closed_list.append(p0)
current_point = p0
current_path = []
while len(open_list) != 0:
closed_list.append(current_point)
adjacent_points = common.get_adjacent(current_point)
#append adjacent points that aren't in open
for adjacent_point in adjacent_points:
if adjacent_point not in closed_list and\
not self.landscape[adjacent_point].occupied and\
self.landscape[adjacent_point].walkable or\
adjacent_point == p1:
open_list.append(adjacent_point)
#get best f of adjacent
best_f = None
best_adjacent = None
for adjacent_point in adjacent_points:
if adjacent_point in open_list:
f_score = common.hex_distance(adjacent_point, p1)
if best_f == None or f_score < best_f:
best_f = f_score
best_adjacent = adjacent_point
if best_f == None:
if len(current_path) == 0:
#not possible to find path
path_found = False
break
else:
closed_list.append(current_point)
current_point = current_path[-1]
elif best_adjacent == p1:
current_path.append(current_point)
path_found = True
break
else:
#one more point towards the end (hopefully)
current_path.append(current_point)
current_point = best_adjacent
if path_found:
current_path.append(p1)
return current_path
else:
return []
#each city has 3-4 (or more?) chunks designated to its boundaries.
#its building placements are stored. its terrain is not stored.
#building types: civilian (questing), item shop, trainers (each city has
#1-2 trainer types for skills (fighting/blocking/healing))
class city(object):
def __init__(self, chunk_locations):
#chunk_location is in format: ((x, y), (x, y), (x, y)...)
self.chunk_locations = chunk_locations
self.buildings = []
building_type_list = ['civilian', 'item', 'instructor']
class building(object):
def __init__(self, location):
self.civilian_list = []
self.item_list = []
self.instructor_list = []
self.this_type = None
self.location = location
def populate(self, this_type=None):
#if this_type is None, a random city actor type is given.
#else, it will use that type
if this_type == None:
building_type = building_type_list[random.randint(0, 2)]
else:
building_type = this_type
class portal(object):
def __init__(self, location, direction):
self.location = location
self.direction = direction
def make_terrain_test(scheme):
#10x15 map
if scheme == 'basic_grass':
this_landscape = landscape(1)
this_landscape.landscape_size = ((0, 5), (0, 5))
for x in xrange(5):
for y in xrange(5):
this_tile = tile(((0, 0, 0), [True, False, True, False], [], []))
this_landscape.landscape[x, y] = this_tile
for x in xrange(1):
for y in xrange(1):
this_landscape.landscape_chunk_mask[x, y] = (0, 0, 0)
#generate 2 cities: in range, and out of range
add_this_city = city(((1, 1), (2, 1), (1, 2), (2, 2)))
add_this_building = building((5, 5))
add_this_building.populate('civilian')
add_this_city.buildings.append(add_this_building)
add_this_building = building((6, 5))
add_this_building.populate('item')
add_this_city.buildings.append(add_this_building)
add_this_building = building((7, 5))
add_this_building.populate('instructor')
add_this_city.buildings.append(add_this_building)
this_landscape.cities.append(add_this_city)
add_this_city = city(((-4, 1), (-4, 0), (-5, 1), (-5, 0)))
add_this_building = building((-20, 0))
add_this_building.populate('civilian')
add_this_city.buildings.append(add_this_building)
add_this_building = building((-19, 0))
add_this_building.populate('item')
add_this_city.buildings.append(add_this_building)
add_this_building = building((-18, 0))
add_this_building.populate('instructor')
add_this_city.buildings.append(add_this_building)
this_landscape.cities.append(add_this_city)
add_this_portal = portal((2, 0), 'up')
this_landscape.portals.append(add_this_portal)
return this_landscape
#get ungenerated in given distance
def find_immediate_ungenerated(
this_landscape,
center_position,
given_distance):
#if surrounding check_distance chunks in each direction are not generated,
#generate them.
center_chunk_position = (
center_position[0] / chunk_size,
center_position[1] / chunk_size)
neg_x = center_chunk_position[0] - given_distance
neg_y = center_chunk_position[1] - given_distance
pos_x = center_chunk_position[0] + given_distance
pos_y = center_chunk_position[1] + given_distance
#determine generated
generated_chunks = set(this_landscape.landscape_chunk_mask.keys())
all_check_chunks = set()
for x in xrange(neg_x, pos_x + 1):
for y in xrange(neg_y, pos_y + 1):
all_check_chunks.add((x, y))
#and ungenerated
ungenerated_chunks = all_check_chunks - generated_chunks
return ungenerated_chunks
#ignorant of checking, assumes that terrain must be generated.
#assumes ungenerated_chunks are all the ungenerated_chunks to be generated
def extend_map_using_ungenerated(
this_landscape,
ungenerated_chunks):
##################################
# determine shape of ungenerated #
##################################
#if it touches an ungenerated edge, generate from generated part
#cases:
#1. if 3 sides are ungenerated and one is,
#generate from what is generated
#2. if all 4 sides are generated and middle is ungenerated,
#generate from random piece of generated territory.
#if a chunk touches this many chunks or more (less than or equal
#to 0 chunks), generate that chunk first
generate_for_touch = 4
while len(ungenerated_chunks) > 0:
touching_threshold = []
for ungenerated_chunk in ungenerated_chunks:
border_chunk_positions = set([(ungenerated_chunk[0], ungenerated_chunk[1] - 1),
(ungenerated_chunk[0], ungenerated_chunk[1] + 1),
(ungenerated_chunk[0] - 1, ungenerated_chunk[1]),
(ungenerated_chunk[0] + 1, ungenerated_chunk[1])])
bordering = 0
for border_chunk in border_chunk_positions:
if border_chunk in this_landscape.landscape_chunk_mask:
bordering += 1
if 4 >= bordering >= generate_for_touch:
touching_threshold.append(ungenerated_chunk)
for ungenerated_chunk in touching_threshold:
generate_chunk(
this_landscape,
ungenerated_chunk)
ungenerated_chunks.remove(ungenerated_chunk)
if len(touching_threshold) == 0:
generate_for_touch -= 1
else:
generate_for_touch = 4
min_x = this_landscape.landscape.keys()[0][0]
max_x = this_landscape.landscape.keys()[0][0]
min_y = this_landscape.landscape.keys()[0][1]
max_y = this_landscape.landscape.keys()[0][1]
for position in this_landscape.landscape.keys():
if min_x > position[0]:
min_x = position[0]
if max_x < position[0]:
max_x = position[0]
if min_y > position[1]:
min_y = position[1]
if max_y < position[1]:
max_y = position[1]
return ((min_x, max_x + 1), (min_y, max_y + 1))
#assumes actor's position, not in chunk form
def delete_map_at_position(
this_landscape,
position):
chunk_position = (int(position[0] / chunk_size), int(position[1] / chunk_size))
safe_chunk_positions = []
for x_position in xrange(chunk_position[0] - do_chunk_delete_distance, chunk_position[0] + do_chunk_generate_distance + 1):
for y_position in xrange(chunk_position[1] - do_chunk_delete_distance, chunk_position[1] + do_chunk_generate_distance + 1):
safe_chunk_positions.append((x_position, y_position))
retract_changed = False
for map_position in this_landscape.landscape.keys():
if (int(map_position[0] / chunk_size), int(map_position[1] / chunk_size)) not in safe_chunk_positions:
del this_landscape.landscape[map_position]
retract_changed = True
for chunk_position in this_landscape.landscape_chunk_mask.keys():
if chunk_position not in safe_chunk_positions:
del this_landscape.landscape_chunk_mask[chunk_position]
return retract_changed
def generate_chunk(
this_landscape,
chunk_position):
#check where surrounding chunks are
#get their terrain information
#draw based on what's around
border_chunk_positions = set([(chunk_position[0], chunk_position[1] - 1),
(chunk_position[0], chunk_position[1] + 1),
(chunk_position[0] - 1, chunk_position[1]),
(chunk_position[0] + 1, chunk_position[1])])
touching_chunks = 0
new_temp = 0
new_precip = 0
new_whatever = 0
for border_chunk in border_chunk_positions:
if border_chunk in this_landscape.landscape_chunk_mask:
touching_chunks += 1
new_temp += this_landscape.landscape_chunk_mask[border_chunk][0]
new_precip += this_landscape.landscape_chunk_mask[border_chunk][1]
new_whatever += this_landscape.landscape_chunk_mask[border_chunk][2]
#fill in based on surrounding chunks
#the more touching chunks, the higher likelihood of randomizing
if touching_chunks == 4:
new_chunk = [
new_temp / 4,
new_precip / 4,
new_whatever / 4]
elif touching_chunks == 3:
new_chunk = [
new_temp / 3,
new_precip / 3,
new_whatever / 3]
change_field = random.randint(0, 2)
random_change = random.randint(0, 9)
if random_change < 1:
change = 0
while change == 0:
change = random.randint(-1, 1)
new_value = new_chunk[change_field] + change
if -4 <= new_value <= 4:
new_chunk[change_field] = new_value
elif touching_chunks == 2:
new_chunk = [
new_temp / 2,
new_precip / 2,
new_whatever / 2]
change_field = random.randint(0, 2)
random_change = random.randint(0, 10)
if random_change < 3:
change = 0
while change == 0:
change = random.randint(-1, 1)
new_value = new_chunk[change_field] + change
if -4 <= new_value <= 4:
new_chunk[change_field] = new_value
elif touching_chunks == 1:
new_chunk = [
new_temp ,
new_precip ,
new_whatever]
change_field = random.randint(0, 2)
random_change = random.randint(0, 10)
if random_change < 5:
change = 0
while change == 0:
change = random.randint(-1, 1)
new_value = new_chunk[change_field] + change
if -4 <= new_value <= 4:
new_chunk[change_field] = new_value
else:
new_chunk = [0, 0, 0]
this_landscape.landscape_chunk_mask[chunk_position] = tuple(new_chunk)
this_landscape.landscape_chunk_bounds = (
(chunk_position[0] * chunk_size,
chunk_position[0] * chunk_size + chunk_size - 1),
(chunk_position[1] * chunk_size,
chunk_position[1] * chunk_size + chunk_size - 1)
)
for x in xrange(this_landscape.landscape_chunk_bounds[0][0], this_landscape.landscape_chunk_bounds[0][1] + 1):
for y in xrange(this_landscape.landscape_chunk_bounds[1][0], this_landscape.landscape_chunk_bounds[1][1] + 1):
new_tile = list(new_chunk)
#perform micro-randomizations
change_field = random.randint(0, 2)
random_change = random.randint(0, 10)
if random_change < 2:
change = 0
while change == 0:
change = random.randint(-1, 1)
new_value = new_tile[change_field] + change
if -4 <= new_value <= 4:
new_tile[change_field] = new_value
#map values:
#(
#('type', temp, precip, ??),
#[walkable, empty, see-through, interactable],
#[items],
#[images in order of layering]
#)
this_landscape.landscape[x, y] = tile(((new_tile[0], new_tile[1], new_tile[2]), [True, False, True, False], [], []))
#post processing to add land features (cities, doodads, etc)
|
UTF-8
|
Python
| false | false | 2,014 |
13,271,448,991,015 |
5170a3e1e5ae3ff1a5b8f0b23ecfa99859f8cf61
|
c8efe88dac7b5e49c01222069e86882b553fe6bb
|
/reaction_centre_QHE/plots/scripts/highest_exciton_transfer.py
|
cbde43a9f0328283dfaa142c1ea0b7608d9f44aa
|
[] |
no_license
|
rstones/reaction_centre_QHE
|
https://github.com/rstones/reaction_centre_QHE
|
3c71d01471d8a7a6cc20de30e596f2a3f89f5077
|
9f7641d253acf00b5a2598a895568d2fac649ec6
|
refs/heads/master
| 2016-08-31T07:25:46.830592 | 2014-11-07T11:20:37 | 2014-11-07T11:20:37 | 11,796,222 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
current_scale = 1.24e-6
data = np.load('../../data/highest_exciton_transfer4_data.npz')
voltage = data['ideal_voltage_values']
current = data['ideal_current_values']
F2 = data['F2']
data2 = np.load('../../data/highest_exciton_transfer5_data.npz')
voltage2 = data2['ideal_voltage_values']
current2 = data2['ideal_current_values']
F22 = data2['F2']
ideal_case_coupling_data = np.load('../../data/ideal_case_coupling_data.npz')
ideal_case_coupling_voltage = ideal_case_coupling_data['ideal_voltage_values']
ideal_case_coupling_current = ideal_case_coupling_data['ideal_current_values']
ideal_case_coupling_F2 = ideal_case_coupling_data['F2']
ideal_case_coupling_data2 = np.load('../../data/ideal_case_coupling_data2.npz')
ideal_case_coupling_voltage2 = ideal_case_coupling_data2['ideal_voltage_values']
ideal_case_coupling_current2 = ideal_case_coupling_data2['ideal_current_values']
ideal_case_coupling_F22 = ideal_case_coupling_data2['F2']
disorder_no_coupling_data = np.load('../../data/disorder_no_coupling_data.npz')
no_coupling_voltage_values = disorder_no_coupling_data['no_coupling_voltage_values']
no_coupling_current_values = disorder_no_coupling_data['no_coupling_current_values']
no_coupling_F2 = disorder_no_coupling_data['no_coupling_F2']
plt.subplot(121)
plt.plot(voltage, current, label='4 level system')
plt.plot(voltage2, current2, label='4 level system 2')
plt.plot(ideal_case_coupling_voltage, ideal_case_coupling_current, label='5LS, J != 0')
plt.plot(ideal_case_coupling_voltage2, ideal_case_coupling_current2, label='5LS 2, J != 0')
plt.plot(no_coupling_voltage_values, no_coupling_current_values/current_scale, label='5LS, J = 0')
plt.ylabel(r'$j / (e \gamma_h)$')
plt.xlabel("Voltage (V)")
plt.xlim(1.1,1.7)
plt.legend()
plt.subplot(122)
plt.plot(voltage, F2, label='4 level system')
plt.plot(voltage2, F22, label='4 level system 2')
plt.plot(ideal_case_coupling_voltage, ideal_case_coupling_F2, label='5LS, J != 0')
plt.plot(ideal_case_coupling_voltage2, ideal_case_coupling_F22, label='5LS 2, J != 0')
plt.plot(no_coupling_voltage_values, no_coupling_F2, label='5LS, J = 0')
plt.ylabel("F2(0)")
plt.xlabel("Voltage (V)")
plt.xlim(1.1,1.7)
plt.legend()
plt.show()
|
UTF-8
|
Python
| false | false | 2,014 |
16,432,544,892,925 |
b6cecd76f28fc7bb382a568d9d594f4a7001d402
|
1e4959383b3b2afcaf30e3a532457c4db17ade8d
|
/main.py
|
667208c8e2abc3497e6500f3ed9d0d58e485b736
|
[] |
no_license
|
alokdidde/mongster
|
https://github.com/alokdidde/mongster
|
7ee76473f9e6eeaf72cd32d973027fc68815e2c5
|
00d1fcbf6c5a4b2edc5c17c863e85879ac0bf9ef
|
refs/heads/master
| 2021-01-20T07:02:53.964382 | 2013-10-09T17:48:50 | 2013-10-09T17:48:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 08-Oct-2013
@author: Alok
'''
import sys
from PyQt4 import QtGui
from gui import window
def main():
app = QtGui.QApplication(sys.argv)
mongsterwindow = window.Window()
mongsterwindow.initUI()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
10,591,389,373,897 |
0ecd3923250e87330ce118ebd5e8effae96b2a77
|
2ef7fea6e1ef73ecaf1d1a61384c0939193a7da4
|
/docs/models.py
|
4aba9c761a6ecae33549668eb08d00d7803a5db6
|
[] |
no_license
|
keynes-foss/key-ness
|
https://github.com/keynes-foss/key-ness
|
af9e4f440c80b102a3fcef2e21db62a20b107e7f
|
a753d20de14f0bf0ade1350fb856dea0278cdd28
|
refs/heads/master
| 2021-03-12T22:55:29.949346 | 2011-11-09T10:38:44 | 2011-11-09T10:38:44 | 1,604,442 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from organization.models import *
#Ditemi che va
class Document(models.Model):
uploader = models.ForeignKey(User)
description = models.TextField()
content = models.TextField()
filename = models.TextField()
mime_type = models.TextField()
private = models.BooleanField(default = True)
protected = models.BooleanField(default = True)
uploaded_at = models.DateTimeField(auto_now = True, editable = False)
last_modified = models.DateTimeField(auto_now_save = True, editable = False)
tags = models.ManyToManyField(OrganizationTag)
def __unicode__(self):
return self.filename
class DocumentComment(models.Model):
document = models.ForeignKey(Document)
author = models.ForeignKey(User)
content = models.TextField()
date = models.DateTimeField(auto_now=True)
|
UTF-8
|
Python
| false | false | 2,011 |
5,703,716,610,866 |
b48990fa9920003076b20bc0939f0b53ccb36629
|
1a831db46a43781b4239d9eefd00696f9a0d5447
|
/ninja_ide/tools/introspection.py
|
6f5ba13f3ec91bf947eea9039909a2d399e80270
|
[
"GPL-3.0-only"
] |
non_permissive
|
eteq/ninja-ide
|
https://github.com/eteq/ninja-ide
|
1543fc2062b63df86e186f0085b12e0171fa66f3
|
66c83f54b9a8f0ce8c7b030572ee05d3f16b87f9
|
refs/heads/master
| 2021-01-18T07:49:19.834991 | 2012-04-03T20:42:59 | 2012-04-03T20:42:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import ast
import logging
from ninja_ide.tools.completion import analyzer
logger_imports = logging.getLogger(
'ninja_ide.tools.introspection.obtaining_imports')
logger_symbols = logging.getLogger(
'ninja_ide.tools.introspection.obtainint_symbols')
_FILE_CONTENT = None
def _parse_assign(symbol):
assigns = {}
attributes = {}
for var in symbol.targets:
if var.__class__ == ast.Attribute:
attributes[var.attr] = var.lineno
elif var.__class__ == ast.Name:
assigns[var.id] = var.lineno
return (assigns, attributes)
def _parse_class(symbol, with_docstrings):
docstring = ''
attr = {}
func = {}
name = symbol.name + '('
name += ', '.join([
analyzer.expand_attribute(base) for base in symbol.bases])
name += ')'
for sym in symbol.body:
if sym.__class__ is ast.Assign:
result = _parse_assign(sym)
attr.update(result[0])
attr.update(result[1])
elif sym.__class__ is ast.FunctionDef:
result = _parse_function(sym, with_docstrings)
attr.update(result['attrs'])
if with_docstrings:
func[result['name']] = (result['lineno'], result['docstring'])
else:
func[result['name']] = result['lineno']
if with_docstrings:
docstring = ast.get_docstring(symbol, clean=True)
return {'name': name, 'attributes': attr, 'functions': func,
'lineno': symbol.lineno, 'docstring': docstring}
def _parse_function(symbol, with_docstrings):
docstring = ''
attrs = {}
global _FILE_CONTENT
line_pos = symbol.lineno - 1
line = _FILE_CONTENT[line_pos]
index = line.find('def')
if index != -1:
func_name = line[index + 3:].strip()
line_pos += 1
while not func_name.endswith(':') and (len(_FILE_CONTENT) > line_pos):
func_name += ' ' + _FILE_CONTENT[line_pos].strip()
line_pos += 1
func_name = func_name[:-1]
func_name = func_name.replace('\\', '')
else:
func_name = symbol.name + '()'
for sym in symbol.body:
if sym.__class__ is ast.Assign:
result = _parse_assign(sym)
attrs.update(result[1])
if with_docstrings:
docstring = ast.get_docstring(symbol, clean=True)
return {'name': func_name, 'lineno': symbol.lineno,
'attrs': attrs, 'docstring': docstring}
def obtain_symbols(source, with_docstrings=False):
"""Parse a module source code to obtain: Classes, Functions and Assigns."""
try:
module = ast.parse(source)
global _FILE_CONTENT
_FILE_CONTENT = source.splitlines()
except:
logger_symbols.debug("A file contains syntax errors.")
return {}
symbols = {}
globalAttributes = {}
globalFunctions = {}
classes = {}
docstrings = {}
for symbol in module.body:
if symbol.__class__ is ast.Assign:
result = _parse_assign(symbol)
globalAttributes.update(result[0])
globalAttributes.update(result[1])
elif symbol.__class__ is ast.FunctionDef:
result = _parse_function(symbol, with_docstrings)
if with_docstrings:
globalFunctions[result['name']] = result['lineno']
docstrings[result['lineno']] = result['docstring']
else:
globalFunctions[result['name']] = result['lineno']
elif symbol.__class__ is ast.ClassDef:
result = _parse_class(symbol, with_docstrings)
classes[result['name']] = (result['lineno'],
{'attributes': result['attributes'],
'functions': result['functions']})
docstrings[result['lineno']] = result['docstring']
if globalAttributes:
symbols['attributes'] = globalAttributes
if globalFunctions:
symbols['functions'] = globalFunctions
if classes:
symbols['classes'] = classes
if docstrings and with_docstrings:
symbols['docstrings'] = docstrings
_FILE_CONTENT = None
return symbols
def obtain_imports(source='', body=None):
if source:
try:
module = ast.parse(source)
body = module.body
except:
logger_imports.debug("A file contains syntax errors.")
#Imports{} = {name: asname}, for example = {sys: sysAlias}
imports = {}
#From Imports{} = {name: {module: fromPart, asname: nameAlias}}
fromImports = {}
for sym in body:
if type(sym) is ast.Import:
for item in sym.names:
imports[item.name] = {'asname': item.asname,
'lineno': sym.lineno}
if type(sym) is ast.ImportFrom:
for item in sym.names:
fromImports[item.name] = {'module': sym.module,
'asname': item.asname, 'lineno': sym.lineno}
return {'imports': imports, 'fromImports': fromImports}
|
UTF-8
|
Python
| false | false | 2,012 |
2,422,361,606,273 |
43f56a2fc5a3068d84176addd4ef5f5cdfc5a319
|
cbd2b5c259ed66aa9ce36a928bac1eb19bc4e5ee
|
/everbook_dbus.py
|
8631156538c96f1cd0d44156017da06cb1de8c9b
|
[] |
no_license
|
jyeh/everbook
|
https://github.com/jyeh/everbook
|
47988cb244851b760f8868e5642cce41aac811d3
|
3b9fc59a56b9de507c5793df785e3f4569285f4b
|
refs/heads/master
| 2015-07-31T05:37:03 | 2012-06-08T00:45:25 | 2012-06-08T00:45:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import dbus
import gobject
from dbus.mainloop.glib import DBusGMainLoop
from pydispatch import dispatcher
UDiskPath = '/org/freedesktop/UDisks'
UDiskDevicesPath = '/org/freedesktop/UDisks/devices'
UDiskClass = 'org.freedesktop.UDisks'
UDiskInterfaceStr = 'org.freedesktop.UDisks'
UDiskDeviceInterfaceStr = 'org.freedesktop.UDisks.Device'
# Class which deals with DBus
class DbusMgr:
DEVICE_CONNECT_SIG = 'SUPPORTED_DEVICE_CONNECTED'
DEVICE_DISCONNECT_SIG = 'SUPPORTED_DEVICE_DISCONNECTED'
def __init__(self):
self.supportDeviceList = ['PRS-T1']
self.bus = dbus.SystemBus(mainloop=DBusGMainLoop())
self.udManagerObj = self.bus.get_object(UDiskInterfaceStr, "/org/freedesktop/UDisks")
self.udManager = dbus.Interface(self.udManagerObj, 'org.freedesktop.UDisks')
self.udManagerObj.connect_to_signal('DeviceAdded', self.addHandler, dbus_interface=UDiskInterfaceStr)
self.udManagerObj.connect_to_signal('DeviceRemoved', self.removeHandler, dbus_interface=UDiskInterfaceStr)
self.enumerateDevices()
def getDeviceInfo(self, device_props):
print device_props.Get('org.freedesktop.UDisks.Device', "DriveVendor"), " ", device_props.Get('org.freedesktop.UDisks.Device', "DriveModel"), " supported"
value = {}
value['DriveVendor'] = device_props.Get('org.freedesktop.UDisks.Device', "DriveVendor")
value['DriveModel'] = device_props.Get('org.freedesktop.UDisks.Device', "DriveModel")
value['DriveSerial'] = device_props.Get('org.freedesktop.UDisks.Device', "DriveSerial")
value['PartitionSize'] = device_props.Get('org.freedesktop.UDisks.Device', "PartitionSize")
value['mounted_paths'] = device_props.Get('org.freedesktop.UDisks.Device', "DeviceMountPaths")
return value
def addHandler(self, args):
dev = args
device_obj = self.bus.get_object("org.freedesktop.UDisks", dev)
device_props = dbus.Interface(device_obj, dbus.PROPERTIES_IFACE)
device_model = device_props.Get('org.freedesktop.UDisks.Device', "DriveModel")
print "JYY addHandler receive model ", device_model
if self.deviceSupported(device_model):
device_info = self.getDeviceInfo(device_props)
self.devices.append(device_info)
dispatcher.send(signal = DbusMgr.DEVICE_CONNECT_SIG, sender = self, path = device_info)
else:
print device_props.Get('org.freedesktop.UDisks.Device', "DriveVendor"), " ", device_props.Get('org.freedesktop.UDisks.Device', "DriveModel"), " just plugged is not supported"
def removeHandler(self, args):
dispatcher.send(signal = DbusMgr.DEVICE_DISCONNECT_SIG, sender = self, path=args.__str__())
def deviceSupported(self, model):
return model in self.supportDeviceList
# Get a list of supported devices, use mount point as key, and properties as value
def enumerateDevices(self):
self.devices = []
for dev in self.udManager.EnumerateDevices():
print "dev = ", dev
device_obj = self.bus.get_object("org.freedesktop.UDisks", dev)
device_props = dbus.Interface(device_obj, dbus.PROPERTIES_IFACE)
if self.deviceSupported(device_props.Get('org.freedesktop.UDisks.Device', "DriveModel")):
print device_props.Get('org.freedesktop.UDisks.Device', "DriveVendor"), " ", device_props.Get('org.freedesktop.UDisks.Device', "DriveModel"), " supported"
device_info = self.getDeviceInfo(device_props)
self.devices.append(device_info)
print "JYY device_info = ", device_info
dispatcher.send(signal = DbusMgr.DEVICE_CONNECT_SIG, sender = self, **device_info)
else:
print device_props.Get('org.freedesktop.UDisks.Device', "DriveVendor"), " ", device_props.Get('org.freedesktop.UDisks.Device', "DriveModel"), " not supported"
def getConnectedDevices(self):
return self.devices
def wait_for_device(self):
# Wait for a supported devices
if len(self.devices) == 0:
loop = gobject.MainLoop()
loop.run()
else:
loop = gobject.MainLoop()
loop.run()
|
UTF-8
|
Python
| false | false | 2,012 |
9,577,777,085,902 |
36fb8557903d4191532aec823db8b217b5912e45
|
2222da436df4f26f97d85316183295e8bc63cad4
|
/docker_scripts/docker_predict.py
|
5541fb4bd106aa8fc16dff102284b18e76bcdb03
|
[] |
no_license
|
stefanv/MLTP
|
https://github.com/stefanv/MLTP
|
1a567c4cb8ca12bc8d936aab8b57b51daa56a208
|
53a16084b0bf16ad3c0174f70abb38245b692721
|
refs/heads/master
| 2020-06-02T15:04:13.135331 | 2014-10-16T18:00:16 | 2014-10-16T18:00:16 | 25,461,928 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# docker_featurize.py
# to be run from INSIDE a docker container
import subprocess
import sys,os
sys.path.append("/home/mltp")
import custom_feature_tools as cft
import build_rf_model
import predict_class
from subprocess import Popen, PIPE, call
import cPickle
def predict():
# load pickled ts_data and known features
with open("/home/mltp/copied_data_files/function_args.pkl","rb") as f:
function_args = cPickle.load(f)
# ensure required files successfully copied into container:
if "newpred_file_path" in function_args:
newpred_file_path = str(function_args['newpred_file_path'])
if os.path.isfile(newpred_file_path):
pass
else:
raise Exception("ERROR - IN DOCKER CONTAINER predict - newpred_file_path = %s is not a file currently on disk."%newpred_file_path)
else:
raise Exception("ERROR - IN DOCKER CONTAINER predict - newpred_file_path not in function args.")
if "custom_features_script" in function_args and function_args["custom_features_script"] not in [None,False,"None",""]:
custom_features_script = str(function_args['custom_features_script'])
if not os.path.isfile(custom_features_script):
raise Exception("ERROR - (IN DOCKER CONTAINER) predict - custom_features_script = %s is not a file currently on disk."%custom_features_script)
if "metadata_file" in function_args and function_args["metadata_file"] not in [None,False,"None",""]:
metadata_file = str(function_args['metadata_file'])
if not os.path.isfile(metadata_file):
raise Exception("ERROR - (IN DOCKER CONTAINER) predict - metadata_file = %s is not a file currently on disk."%metadata_file)
results_dict = predict_class.predict(function_args["newpred_file_path"], function_args["model_name"], function_args["model_type"], featset_key=function_args["featset_key"], sepr=function_args["sep"], n_cols_html_table=function_args["n_cols_html_table"], features_already_extracted=function_args["features_already_extracted"], custom_features_script=function_args["custom_features_script"], metadata_file_path=function_args["metadata_file"], in_docker_container=True)
with open("/tmp/%s_pred_results.pkl"%function_args["prediction_entry_key"], "wb") as f:
cPickle.dump(results_dict, f)
print "Done."
return "Featurization and prediction complete."
if __name__=="__main__":
results_str = predict()
print results_str
|
UTF-8
|
Python
| false | false | 2,014 |
14,147,622,295,521 |
853984554f206562a37e11046d0b6a1bdeade699
|
0242b09c907aee7dff47e14b6e197689289bd824
|
/context/testurls.py
|
045be80289216a0e6c4ae0a0c009a2c3fd4bca21
|
[] |
no_license
|
mokelly2/specifyweb
|
https://github.com/mokelly2/specifyweb
|
dfc332f052aa6a5183998024441877ebc331281c
|
20279244d88d032625b125821f7fa02a31d9f132
|
refs/heads/master
| 2021-04-12T03:23:27.784769 | 2013-04-30T18:17:43 | 2013-04-30T18:17:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns
urlpatterns = patterns(
'context.testsviews',
(r'^collection/$', 'collection'),
(r'^domain.json$', 'domain'),
(r'^viewsets/(?P<level>\d+).xml$', 'viewsets'),
(r'^schema_localization.json$', 'schema_localization'),
(r'^app.resource$', 'app_resource'),
(r'^available_related_searches.json$', 'available_related_searches'),
)
|
UTF-8
|
Python
| false | false | 2,013 |
13,262,859,042,068 |
351647575a2002c3235c0eb6f60534e5b04643d4
|
5b20918be71e1283956020ac9c30306d834fbe4a
|
/src/gui/InstallMediaWindow.py
|
7d8663632e66c8ded0cf27d6a7604080ebeb0a4a
|
[
"GPL-3.0-only"
] |
non_permissive
|
OlafRadicke/isar
|
https://github.com/OlafRadicke/isar
|
9dced9f1afedcb68b3e6f9b552ca61c188a9fd8c
|
0088a71082188a9590bc8db0edd7f8f7b848fb7e
|
refs/heads/master
| 2020-05-18T20:23:47.595625 | 2011-05-24T14:53:24 | 2011-05-24T14:53:24 | 1,560,093 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (C) 2011 by Olaf Radicke #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, see #
# http:#www.gnu.org/licenses/gpl.txt #
# #
###########################################################################
import sys
import logging
import sqlite3
import os.path
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import pyqtSlot
from VMinfoDB import VMinfoDB
from UserInfo import UserInfo
from GLOBALS import ICONDIR, FRAM_STYLE_SHEET
## @file installMediaWindow.py
# @author Olaf Radicke<[email protected]>
## The window view info about install ISOs.
class InstallMediaWindow(QtGui.QDialog):
## Frame style
__owneFramStyleSheet = FRAM_STYLE_SHEET
## Database binding.
__vmInfoDB = VMinfoDB()
## Simple List
listview = ""
__isoPathName = ""
## path of install Media. Is a QLineEdit class.
isoPathLineEdit = ""
## Button for select a path
isoPathPushButton = ""
## Save information.
#vmInfoDB = VMinfoDB()
## Constructor
def __init__(self, vmInfoDB, parent=None):
logging.debug('init installMediaWindow....')
self.__vmInfoDB = vmInfoDB
QtGui.QDialog.__init__(self, parent)
logging.debug('init installMediaWindow....')
self.resize(800,480)
self.setWindowTitle('Isar::Instalations medias')
self.setStyleSheet(self.__owneFramStyleSheet)
## Main layout V
vMainLayout = QtGui.QVBoxLayout()
#centralWidget.setLayout(vMainLayout)
self.setLayout(vMainLayout)
## Main layout H
hMainLayout = QtGui.QHBoxLayout()
vMainLayout.addLayout(hMainLayout)
# ----------- Left box ---------------------------------
# VBox left with GrouBox-frame
listBox = QtGui.QGroupBox("list of install medias")
listBox.setMaximumWidth(600)
vListLayoutL = QtGui.QVBoxLayout()
listBox.setLayout(vListLayoutL)
hMainLayout.addWidget(listBox)
# -------------- List --------------
self.listview = QtGui.QTreeWidget()
_haderList = ["ISOs"]
self.listview.setColumnCount(len(_haderList))
self.listview.setHeaderLabels(_haderList)
vListLayoutL.addWidget(self.listview)
self.connect \
( \
self.listview, \
QtCore.SIGNAL('itemSelectionChanged()'), \
QtCore.SLOT('fillDetailView()') \
)
#self.connect(self.listview, QtCore.SIGNAL('clicked()'), QtCore.SLOT('fillDetailView()'))
# ----------- right box ---------------------------------
# VBox right with GrouBox-frame
editBox = QtGui.QGroupBox("Details")
editBox.setMaximumWidth(600)
vEditLayoutR = QtGui.QVBoxLayout()
editBox.setLayout(vEditLayoutR)
hMainLayout.addWidget(editBox)
# ISO path
hLayoutUserDir = QtGui.QHBoxLayout()
vEditLayoutR.addLayout(hLayoutUserDir)
isoPathLabel = QtGui.QLabel("Path of install ISO:")
hLayoutUserDir.addWidget(isoPathLabel)
self.isoPathLineEdit = QtGui.QLineEdit()
self.isoPathLineEdit.setReadOnly(True)
hLayoutUserDir.addWidget(self.isoPathLineEdit)
self.isoPathPushButton = QtGui.QPushButton() #":/")
_icon = QtGui.QIcon(os.path.join(ICONDIR + 'search.png'))
self.isoPathPushButton.setIcon(_icon)
# self.isoPathPushButton.setReadOnly(True)
self.connect(self.isoPathPushButton, QtCore.SIGNAL('clicked()'), QtCore.SLOT('selectISOpath()'))
hLayoutUserDir.addWidget(self.isoPathPushButton)
# Save buttom
hSefeLayout = QtGui.QHBoxLayout()
vEditLayoutR.addLayout(hSefeLayout)
saveButton = QtGui.QPushButton("Save Edits")
self.connect(saveButton, QtCore.SIGNAL('clicked()'), QtCore.SLOT('saveEdits()'))
hSefeLayout.addWidget(saveButton)
vEditLayoutR.insertStretch(10000, 0)
# ---------- Bottom area --------------------
# Bottom layout H
hBottomLayout = QtGui.QHBoxLayout()
vMainLayout.addLayout(hBottomLayout)
closePushButton = QtGui.QPushButton("New")
self.connect(closePushButton, QtCore.SIGNAL('clicked()'), QtCore.SLOT('newISOpathDialog()'))
hBottomLayout.addWidget(closePushButton)
closePushButton = QtGui.QPushButton("Delete")
self.connect(closePushButton, QtCore.SIGNAL('clicked()'), QtCore.SLOT('deleteISOpath()'))
hBottomLayout.addWidget(closePushButton)
closePushButton = QtGui.QPushButton("Close")
self.connect(closePushButton, QtCore.SIGNAL('clicked()'), QtCore.SLOT('close()'))
hBottomLayout.addWidget(closePushButton)
self.refreshISOList()
## Slot delete user.
@pyqtSlot()
def deleteISOpath(self):
print "[delete ISO path...]"
_name = ""
ret = QtGui.QMessageBox.warning(self, \
"Warning", \
"Do you want to delete this entry?", \
QtGui.QMessageBox.Cancel | QtGui.QMessageBox.Ok)
if (ret == QtGui.QMessageBox.Cancel):
print "...cencel"
return
elif (ret == QtGui.QMessageBox.Ok):
print "...Ok"
for item in self.listview.selectedItems():
print ".." , item.text(0)
_name = item.text(0)
if str(_name) == "":
infotext = "No user select!"
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
else:
try:
self.__vmInfoDB.deleteISOpath(str(_name))
except sqlite3.Error, e:
infotext = "An error occurred:", e.args[0]
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
self.refreshISOList()
## Slot delete user.
@pyqtSlot()
def fillDetailView(self):
#print "[fillDetailView...]"
_name = ""
_listIsEmpty = True
for item in self.listview.selectedItems():
_listIsEmpty = False
#print "[...]" , item.text(0)
_name = item.text(0)
if _listIsEmpty:
return
if str(_name) == "":
infotext = "No entry select!"
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
else:
try:
_path = self.__vmInfoDB.getISOpath(str(_name))
except sqlite3.Error, e:
infotext = "An error occurred:", e.args[0]
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
if _path == -1 or _path == None:
print "[20110405234854] _path.' ", _path
infotext = "ISO name not found!"
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
else:
print "[] _path: ", _path
self.isoPathLineEdit.setText( _path )
self.isoPathLineEdit.setReadOnly(False)
self.isoPathLineEdit.setReadOnly(False)
# self.isoPathPushButton.setReadOnly(False)
## A function with qt-slot. it's creade a new vm.
@pyqtSlot()
def newISOpathDialog(self):
text, ok = QtGui.QInputDialog.getText(self, "New ISO path", "Label name (not path):", 0)
if ok != True :
logging.debug("[201104] if: " + str(text) + str(ok))
return
else:
logging.debug("[20110411] else: " + str(text) + str(ok))
print "[2011042] else: " + str(text)
try:
self.__vmInfoDB.addISOpath(str(text))
except sqlite3.Error, e:
infotext = "An error occurred:", e.args[0]
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
self.__isoPathName = str(text)
self.refreshISOList()
## Refrash the list of tasks.
@pyqtSlot()
def refreshISOList(self):
print "[refreshISOList]"
nameList = self.__vmInfoDB.getAllISOnames()
self.listview.clear()
for item in nameList:
qStringList = QtCore.QStringList( [ str(item) ] )
#twItem = QtGui.QTreeWidgetItem(qStringList)
twItem = QtGui.QTreeWidgetItem(QtCore.QStringList(item))
self.listview.addTopLevelItem(twItem)
self.isoPathLineEdit.setReadOnly(True)
# self.isoPathPushButton.setReadOnly(True)
## Slot for save edits
@pyqtSlot()
def saveEdits(self):
print "[save edits...]"
_name = ""
for item in self.listview.selectedItems():
print ".." , item.text(0)
_name = item.text(0)
if str(_name) == "":
infotext = "No entry select!"
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
try:
self.__vmInfoDB.updateISOpath(str(_name), str(self.isoPathLineEdit.text()))
except sqlite3.Error, e:
infotext = "An error occurred:", e.args[0]
QtGui.QMessageBox.critical(self, "Error",str(infotext))
return
infotext = "Ok, saved..."
QtGui.QMessageBox.information(self, "OK",str(infotext))
## Slot with file dialog, for selct a dir.
@pyqtSlot()
def selectISOpath(self):
#print "selectISOpath()"
dirname = QtGui.QFileDialog.getOpenFileName(self, "Select ISO image", "","ISO(*.iso *.ISO);; all(*.*)")
self.isoPathLineEdit.setText(dirname)
|
UTF-8
|
Python
| false | false | 2,011 |
180,388,651,972 |
2b76a5406376c0460b5332a456a374af10d9b30d
|
79c92dd85ab83c553bba4f3c1cab0647e9af8ded
|
/tests/writebatch_test.py
|
0dbb84612ed46d3319e1f29cb2652788ef8ed30b
|
[
"MIT"
] |
permissive
|
onitu/py-elevator
|
https://github.com/onitu/py-elevator
|
20f7c70b70c7118ec10bd2b06a10b6479d9fb547
|
8ade3f3b830e64fbed769f6da358dfe42ea6a3e3
|
refs/heads/master
| 2021-01-20T11:35:08.009952 | 2013-03-28T09:42:02 | 2013-03-28T09:42:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest2
from pyelevator import Elevator, WriteBatch
from pyelevator.constants import *
from .fakers import TestDaemon
class WriteBatchElevator(unittest2.TestCase):
def setUp(self):
self.elevator_daemon = TestDaemon()
self.elevator_daemon.start()
self.endpoint = '{0}:{1}'.format(self.elevator_daemon.bind, self.elevator_daemon.port)
self.client = Elevator(endpoint=self.endpoint)
self.batch = WriteBatch(endpoint=self.endpoint)
def tearDown(self):
self.elevator_daemon.stop()
del self.elevator_daemon
del self.client
del self.batch
def _bootstrap_db(self):
for l in ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']:
self.batch.Put(l, l)
self.batch.Write()
def test_batch_put_only(self):
for x in xrange(10):
self.batch.Put(str(x), str(x))
self.assertEqual(len(self.batch.container), 10)
for elem in self.batch.container:
self.assertEqual(elem[0], SIGNAL_BATCH_PUT)
self.assertIsInstance(elem[1], str)
self.batch.Write()
for x in xrange(10):
r = self.client.Get(str(x))
self.assertIsNotNone(r)
self.assertIsInstance(r, str)
self.assertEqual(r, str(x))
def test_batch_mixed_put_delete(self):
self.batch.Delete('a')
self.batch.Put('a')
self.batch.Delete('b')
self.batch.Write()
self.assertEqual(self.client.Get('a'), 'a')
self.assertIsNone(self.client.Get('b'))
def test_batch_with(self):
with self.batch as batch:
for x in xrange(10):
batch.Put(str(x), str(x))
self.assertEqual(len(batch.container), 10)
for elem in batch.container:
self.assertEqual(elem[0], SIGNAL_BATCH_PUT)
self.assertIsInstance(elem[1], str)
for x in xrange(10):
r = self.client.Get(str(x))
self.assertIsNotNone(r)
self.assertIsInstance(r, str)
self.assertEqual(r, str(x))
|
UTF-8
|
Python
| false | false | 2,013 |
13,168,369,770,643 |
29a414b6709fa19652cfce556da9d22d3e94f157
|
06283ebd66870627aecc230c4aa8b9606ff7c4f5
|
/hitranlbl/search_astrophysics.py
|
7c84ed6ad7b6b9bd00b848f81af8978cb9d88fc8
|
[] |
no_license
|
xnx/www_hitran
|
https://github.com/xnx/www_hitran
|
c0e26a87134b7e2aac435008dde39ea4713ef633
|
023eb6aa75541b530330d13601be27aceed926de
|
refs/heads/master
| 2015-08-01T23:56:22 | 2013-01-18T12:39:20 | 2013-01-18T12:39:20 | 5,718,406 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import os
import time
from django.conf import settings
from search_utils import make_simple_sql_query, get_filestem, hitranIDs,\
get_pfn_filenames
def do_search_astrophysics(form):
"""
A search involving return of a fairly minimal set of parameters from the
hitranlbl_trans table only, with no joins required. This set might well
be enough for many astrophysical applications.
"""
start_time = time.time()
query_fields = ['iso_id', 'nu', 'a', 'Elower', 'gp', 'gpp']
query = make_simple_sql_query(form, query_fields)
search_summary = {'summary_html':
'<p>Here are the results of the query in "astrophysics" format.'\
' Note that no data sources are output in this format</p>'}
# here's where we do the rawest of the raw SQL query
from django.db import connection, transaction
cursor = connection.cursor()
cursor.execute(query)
rows = cursor.fetchall()
ntrans = len(rows)
te = time.time()
print 'time to get %d transitions = %.1f secs' % (ntrans, (te-start_time))
ts = time.time()
filestem = get_filestem()
output_files = write_astrophysics(filestem, rows, form)
te = time.time()
print 'time to write transitions = %.1f secs' % (te - ts)
# strip path from output filenames:
output_files = [os.path.basename(x) for x in output_files]
if form.output_partition_function:
# get the partition function filenames
output_files.extend(get_pfn_filenames(form))
end_time = time.time()
search_summary['ntrans'] = ntrans
search_summary['timed_at'] = '%.1f secs' % (end_time - start_time)
return output_files, search_summary
def write_astrophysics(filestem, rows, form):
"""
Write the output transitions file for the "astrophysics" output collection.
The rows returned from the database query are:
iso_id, nu, Sw, Elower, gp
"""
outpath = os.path.join(settings.RESULTSPATH, '%s-trans.txt' % filestem)
fo = open(outpath, 'w')
s_fmt = form.field_separator.join(
['%2d','%2d','%12.6f','%10.3e','%10s','%5s','%5s'])
if form.default_entry == '-1':
default_Epp = ' -1.0000'
default_g = ' -1'
else:
default_Epp = form.default_entry * 10
default_g = form.default_entry * 5
for row in rows:
iso_id = row[0]
molecule_id, local_iso_id = hitranIDs[iso_id]
try:
s_Epp = '%10.4f' % row[3]
except TypeError:
s_Epp = default_Epp
try:
s_gp = '%5d' % row[4]
except TypeError:
s_gp = default_g
try:
s_gpp = '%5d' % row[4]
except TypeError:
s_gpp = default_g
print >>fo, s_fmt % (
molecule_id,
local_iso_id,
row[1], # nu
row[2], # A
s_Epp,
s_gp, s_gpp)
fo.close()
return [outpath,]
|
UTF-8
|
Python
| false | false | 2,013 |
17,660,905,544,205 |
8eff5ee51eb0b31e009d0fa1b0e34d403c550d8d
|
19f05c91b991f38eca19275bfcb8a2a27000bb45
|
/makahiki/apps/components/prizes/management/commands/pick_winners.py
|
9a850fbfb04d5bfc5e15ec877bae411109820a38
|
[] |
no_license
|
keokilee/makahiki
|
https://github.com/keokilee/makahiki
|
9c40576c73fef2bf11dc22194dbabf98f5e67e64
|
783db33ed0b38fb4dccc371c426265f7028a2d13
|
refs/heads/master
| 2020-04-30T03:44:52.309826 | 2012-03-17T01:36:19 | 2012-03-17T01:36:19 | 543,870 | 2 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
import datetime
from django.core import management
from components.prizes.models import RaffleDeadline
from components.makahiki_notifications.models import NoticeTemplate, UserNotification
class Command(management.base.BaseCommand):
help = 'Picks winners for raffle deadlines that have passed.'
def handle(self, *args, **options):
"""
Picks winners for raffle deadlines that have passed.
"""
deadlines = RaffleDeadline.objects.filter(end_date__lte=datetime.datetime.today())
for deadline in deadlines:
self.stdout.write("Picking winners for %s prizes\n" % deadline.round_name)
self.__pick_winners(deadline.raffleprize_set.filter(winner__isnull=True))
def __pick_winners(self, prizes):
for prize in prizes:
if not prize.winner:
# Randomly order the tickets and then pick a random ticket.
while True:
tickets = prize.raffleticket_set.order_by("?").all()
if tickets.count() == 0:
self.stdout.write('No tickets for %s. Skipping.\n' % prize)
ticket = random.randint(0, tickets.count() - 1)
user = tickets[ticket].user
self.stdout.write(str(prize) + ": " + user.username + '\n')
value = raw_input('Is this OK? [y/n] ')
if value.lower() == 'y':
prize.winner = user
prize.save()
self.stdout.write("Notifying %s\n" % user.username)
# Notify winner using the template.
try:
template = NoticeTemplate.objects.get(notice_type='raffle-winner')
message = template.render({'PRIZE': prize})
UserNotification.create_info_notification(user, message, True, prize)
except NoticeTemplate.DoesNotExist:
self.stdout.write("Could not find the raffle-winner template. User was not notified.\n")
break
|
UTF-8
|
Python
| false | false | 2,012 |
14,448,269,995,487 |
32b568c728f62461f4a5ae199c8d2db54dc8f4ce
|
50d3ba7b4d55226211d0b97e7c635a65fd9ffb25
|
/WIndows/nsUI/apps/Onlivedesktop.py
|
0e97f16278f3f0d63e0453fccbb315653b211580
|
[] |
no_license
|
jumboTest/test
|
https://github.com/jumboTest/test
|
c476f0e4447181fadce17f269b0cc1c1012b15ca
|
c58a031ee5b140e934ef507b969dc457f5d2ac9f
|
refs/heads/master
| 2021-01-23T08:56:29.892685 | 2014-04-05T16:56:12 | 2014-04-05T16:56:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
from nsUI.browser import Browser
from nsUI.actions import *
from nsUI.apps.OnlivedesktopConfig import *
logger = logging.getLogger('nsUI')
class Onlivedesktop(object):
def __init__(self):
self.browser = Browser()
set_window_size(1400,1000)
set_base_url(baseURL)
def login(self,user=username, passwd=password):
"""
Logs in to Onlivedesktop using the provided username & password
:param user: username (email) to log in to Onlivedesktop,
default: [email protected]
:type user: str
:param passwd: passwd for the provided username,
default: neova123
:type passwd: str
"""
logger.debug('Logging in to Onlivedesktop...')
go_to(url=loginURL)
wait_for(assert_title_contains,text='Sign In')
wait_for(assert_element,username_locator)
write_textfield(get_element_by_css(username_locator),user)
write_textfield(get_element_by_css(password_locator),passwd)
click_element(get_element_by_css(login_button_locator))
wait_for(assert_element,tag=logout_link_locator)
sleep(2)
click_element(get_element_by_css("li>[href='/account/myfiles']"))
sleep(2)
logger.debug('Login Successfully on Onlivedesktop...')
def logout(self):
"""
Logout from Onlivedesktop
"""
logger.debug('Logout from Onlivedesktop')
wait_for(assert_element,tag=logout_link_locator)
click_element(get_element_by_css(logout_link_locator))
wait_for(assert_title_contains,text='Onlive Desktop')
logger.debug('Logout Successful')
def invalidLogin(self,errormessage=error_message,user=invalidusername, passwd=invalidpassword):
"""
Logs in to onlivedesktop using the provided username & password
:param user: username (email) to log in to onlivedesktop,
default: [email protected]
:type user: str
:param passwd: passwd for the provided username,
default: neova123
:type passwd: str
"""
logger.debug('Logging in to onlivedesktop...')
go_to(url=loginURL)
wait_for(assert_title_contains,text='Sign In')
wait_for(assert_element,username_locator)
write_textfield(get_element_by_css(username_locator),user)
write_textfield(get_element_by_css(password_locator),passwd)
click_element(get_element_by_css(login_button_locator))
sleep(2)
wait_for(assert_element,error_message_locator)
assert_text_contains(get_element_by_css(error_message_locator),errormessage )
def checkName(self,filename):
"""
Check if entered FolderName or Filename exist or not in view Panel. Returns a string 'Exist' or 'NotExist' respectively
:param objectname: Name of Folder or File
:type objectname: str
:return Type:Str Values:Exist | NotExist
"""
count = 0
elements = get_elements_by_css(row_locator)
for li in elements:
name = get_text(get_child_element_by_css(li,selected_locator))
if(name == filename):
count = count+1
break
if(count == 0):
return "NotExist"
else:
return "Exist"
def deleteFile1(self,filename):
"""
Deletes a File from Onlive Desktop
:param filename: name of the file to be deleted.
:type filename: str
"""
logger.debug('Delete a file...')
elements = get_elements_by_css(row_locator)
for li in elements:
name = get_text(get_child_element_by_css(li,selected_locator))
if(name == filename):
click_element(get_child_element_by_css(li,delete_link_locator))
break
click_element(get_element_by_css(yes_button_locator))
wait_for(assert_element,tag=message_locator)
sleep(3)
def renameFile1(self,filename1,filename2):
"""
Rename file in Onlive Desktop
:param filename1: Old file name
:type filename1: str
:param filename2: New file name
:type filename2: str
"""
logger.debug('Rename a file...')
elements = get_elements_by_css(row_locator)
for li in elements:
name = get_text(get_child_element_by_css(li,selected_locator))
if(name == filename1):
click_element(get_child_element_by_css(li,rename_link_locator))
click_element(get_child_element_by_css(li,rename_file_input_locator))
write_textfield(get_child_element_by_css(li,rename_file_input_locator),filename2)
click_element(get_child_element_by_css(li,ok_button_locator))
break
wait_for(assert_element,tag=message_locator)
sleep(5)
def resultLogger(self, result):
"""
Logs results in the log file
"""
logger.debug(result)
def uploadFile(self,filename):
"""
Uploads a File to Onlive Desktop
:param filename: name of the file to be uploaded.
:type filename: str
"""
logger.debug('Uploading file...')
click_element(get_element_by_css(upload_button_locator))
wait_for(assert_element,get_element_by_css(choosefile_div_locator))
write_textfield(get_element_by_css(upload_file_input_locator), new_text=filename, check=False, clear=False)
click_element(get_element_by_css(upload_file_button_locator))
sleep(5)
wait_for(assert_element,tag=message_locator)
message = get_element_by_css(message_locator)
assert_text_contains(message,"Successfully uploaded:")
def deleteFile2(self,filename):
"""
Deletes a File from Onlive Desktop
:param filename: name of the file to be deleted.
:type filename: str
"""
logger.debug('Delete a file...')
elements = get_elements_by_css(row_locator)
for li in elements:
name = get_text(get_child_element_by_css(li,selected_locator))
if(name == filename):
click_element(get_child_element_by_css(li,checkbox_locator))
break
click_element(get_element_by_css(delete_button_locator))
click_element(get_element_by_css(yes_button_locator))
wait_for(assert_element,tag=message_locator)
sleep(3)
|
UTF-8
|
Python
| false | false | 2,014 |
15,109,694,989,181 |
74414b183bb5b0bf0533fa73d3456178efcd7817
|
93577a61132a3234694959901c29b548626edc56
|
/qfault/counting/component/adapter.py
|
7d391f7caf41eb246322305bfa95c29d8b291c63
|
[] |
no_license
|
apaetz/qfault
|
https://github.com/apaetz/qfault
|
777b8f564e5ecb62f6087883a5ff8e07b1f4461b
|
4d4114824144049c53e8dd08c5ba202f9058540b
|
refs/heads/master
| 2021-01-22T11:42:06.311436 | 2012-09-06T15:37:28 | 2012-09-06T15:37:28 | 33,093,518 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2011-11-14
This file contains components that adapt the output from other components in some way.
@author: adam
'''
from qfault.circuit.block import Block
from qfault.counting.component.base import Filter, SequentialComponent, \
ParallelComponent
from qfault.counting.key import IdentityManipulator, KeyManipulator, \
SyndromeKeyDecoder, SyndromeKeyFilter
from qfault.qec.qecc import TrivialStablizerCode
class IdealDecoder(Filter):
'''
An ideal decoder.
Input syndrome keys are decoded to syndrome keys for the trivial code.
TODO: should the keys be decoded directly to Paulis?
'''
def __init__(self, code):
super(IdealDecoder, self).__init__()
self._code = code
def inBlocks(self):
return (Block('', self._code),)
def outBlocks(self):
return (Block('', TrivialStablizerCode()),)
def keyPropagator(self, subPropagator=IdentityManipulator()):
return self.KeyDecoder(self._code, subPropagator)
class KeyDecoder(KeyManipulator):
def __init__(self, code, manipulator):
super(IdealDecoder.KeyDecoder, self).__init__(manipulator)
self.decoder = SyndromeKeyDecoder(code)
def _manipulate(self, key):
decoded = self.decoder.decode(key[0])
return (decoded,) + key[1:]
class FlaggingDecoder(IdealDecoder):
'''
An ideal decoder that also flags detectable errors. The syndrome key
for each count is transformed into a (flag, error) tuple.
'''
def __init__(self, code, decode_as_pauli=False):
self._code = code
self._decode_as_pauli = decode_as_pauli
class KeyDecoder(KeyManipulator):
def __init__(self, code, manipulator):
super(FlaggingDecoder.KeyDecoder, self).__init__(manipulator)
self._code = code
self.decoder = SyndromeKeyDecoder(code)
def _manipulate(self, key):
syndrome = self.decoder.syndrome(key[0])
flag = self._code.detectSyndrome(syndrome)
decoded = self.decoder.decode(key[0])
return ((flag, decoded),) + key[1:]
class SyndromeFilter(Filter):
def __init__(self, code):
super(SyndromeFilter, self).__init__()
self._code = code
def inBlocks(self):
return (Block('', self._code),)
def keyPropagator(self, subPropagator=IdentityManipulator()):
return SyndromeKeyFilter(self._code, subPropagator)
class DecodeAdapter(SequentialComponent):
'''
Applies ideal decoders to all output blocks of the given component.
'''
def __init__(self, component):
outBlocks = component.outBlocks()
idealDecoders = [IdealDecoder(block.get_code()) for block in outBlocks]
decoder = ParallelComponent({}, *idealDecoders)
super(DecodeAdapter, self).__init__(component.kGood, (component, decoder))
class SyndromeAdapter(SequentialComponent):
def __init__(self, component):
outBlocks = component.outBlocks()
idealDecoders = [SyndromeFilter(block.get_code()) for block in outBlocks]
decoder = ParallelComponent({}, *idealDecoders)
super(SyndromeAdapter, self).__init__(component.kGood, (component, decoder))
|
UTF-8
|
Python
| false | false | 2,012 |
8,461,085,609,344 |
0e24c33f378fe0bf3a2371e6be4ec8f48131331b
|
14727c3b48d869d04688e370188370181f25b83e
|
/libs/icebergsdk/api.py
|
ea79ba4b2f65131db6cca4ea3dc04420df742b40
|
[] |
no_license
|
Rafkraft/twitter_bot
|
https://github.com/Rafkraft/twitter_bot
|
74047facfd0103235e591ff56b1d62b6f9ea4fab
|
8d409d8b0e3be7fff14beb316cb743c3da0e46f1
|
refs/heads/master
| 2021-01-10T21:14:58.157239 | 2014-09-30T08:28:49 | 2014-09-30T08:28:49 | 21,033,968 | 1 | 0 | null | false | 2014-07-21T13:05:11 | 2014-06-20T10:20:25 | 2014-07-07T12:36:25 | 2014-07-21T13:05:11 | 474 | 0 | 1 | 0 |
Python
| null | null |
# -*- coding: utf-8 -*-
import logging, urllib, json, time, hashlib, hmac
from icebergsdk.exceptions import IcebergAPIError, IcebergServerError, IcebergClientError
from icebergsdk.exceptions import IcebergClientUnauthorizedError, IcebergMissingApplicationSettingsError
from icebergsdk.exceptions import IcebergMissingSsoData
from icebergsdk.conf import ConfigurationSandbox
from icebergsdk import resources
from icebergsdk.json_utils import DateTimeAwareJSONEncoder
from google.appengine.api.urlfetch import fetch
logger = logging.getLogger('icebergsdk')
class IcebergAPI(object):
def __init__(self, username = None, access_token = None, lang = None, timeout = None, conf = None):
"""
@conf:
Configuration, ConfigurationSandbox or custom conf
"""
# Conf
self.conf = conf or ConfigurationSandbox
self.username = username
self.access_token = access_token
self.timeout = timeout
self.lang = lang or self.conf.ICEBERG_DEFAULT_LANG
# Resources definition
self.Application = resources.Application.set_handler(self)
self.Address = resources.Address.set_handler(self)
self.Cart = resources.Cart.set_handler(self)
self.Country = resources.Country.set_handler(self)
self.MerchantOrder = resources.MerchantOrder.set_handler(self)
self.Order = resources.Order.set_handler(self)
self.ProductVariation = resources.ProductVariation.set_handler(self)
self.ProductOffer = resources.ProductOffer.set_handler(self)
self.Product = resources.Product.set_handler(self)
self.Profile = resources.Profile.set_handler(self)
self.Payment = resources.Payment.set_handler(self)
self.Store = resources.Store.set_handler(self)
self.User = resources.User.set_handler(self)
self.Message = resources.Message.set_handler(self)
self.Review = resources.Review.set_handler(self)
self.MerchantReview = resources.MerchantReview.set_handler(self)
self.UserShoppingPreference = resources.UserShoppingPreference.set_handler(self)
self.Category = resources.Category.set_handler(self)
### Missing
# Return
# Store Reviews
# Product Reviews
# Invoices
# Currencies
# Webhooks
# Feed Management
def get_auth_token(self):
if self.username == "Anonymous":
return '%s %s:%s:%s' % (self.conf.ICEBERG_AUTH_HEADER, self.username, self.conf.ICEBERG_APPLICATION_NAMESPACE, self.access_token)
else:
return '%s %s:%s' % (self.conf.ICEBERG_AUTH_HEADER, self.username, self.access_token)
def auth_user(self, username, email, first_name = '', last_name = '', is_staff = False, is_superuser = False):
"""
Method for Iceberg Staff to get or create a user into the platform and get the access_token .
For authentication, please use the SSO method.
"""
if not self.conf.ICEBERG_API_PRIVATE_KEY:
raise IcebergMissingApplicationSettingsError()
timestamp = int(time.time())
secret_key = self.conf.ICEBERG_API_PRIVATE_KEY
to_compose = [username, email, first_name, last_name, is_staff, is_superuser, timestamp]
hash_obj = hmac.new(b"%s" % secret_key, b";".join(str(x) for x in to_compose), digestmod = hashlib.sha1)
message_auth = hash_obj.hexdigest()
data = {
'username': username,
'email': email,
'first_name': first_name,
'last_name': last_name,
'is_staff': is_staff,
'is_superuser': is_superuser,
'timestamp': timestamp,
'message_auth': message_auth
}
response = self.request('user/auth/', args = data)
self.username = username
self.access_token = response['access_token']
self._auth_response = response
return self
def generate_messages_auth(self, data):
email = data['email']
first_name = data['first_name']
last_name = data['last_name']
timestamp = data['timestamp']
secret_key = self.conf.ICEBERG_APPLICATION_SECRET_KEY
to_compose = [email, first_name, last_name, timestamp]
hash_obj = hmac.new(b"%s" % secret_key, b";".join(str(x) for x in to_compose), digestmod = hashlib.sha1)
message_auth = hash_obj.hexdigest()
return message_auth
# def sso(self, email, first_name, last_name):
def sso(self, email, first_name, last_name):
"""
Depreciated
"""
if not self.conf.ICEBERG_APPLICATION_NAMESPACE or not self.conf.ICEBERG_APPLICATION_SECRET_KEY:
raise IcebergMissingApplicationSettingsError()
timestamp = int(time.time())
data = {
'application': self.conf.ICEBERG_APPLICATION_NAMESPACE,
'email': email,
'first_name': first_name,
'last_name': last_name,
'timestamp': timestamp,
'message_auth': self.generate_messages_auth({
'email': email,
'first_name': first_name,
'last_name': last_name,
'timestamp': timestamp
})
}
response = self.request('user/sso/', args = data)
self.username = response['username']
self.access_token = response['access_token']
return response
def sso_user(self, email = None, first_name = None, last_name = None, currency = "EUR", shipping_country = "FR", include_application_data = True):
if not self.conf.ICEBERG_APPLICATION_NAMESPACE or not self.conf.ICEBERG_APPLICATION_SECRET_KEY:
raise IcebergMissingApplicationSettingsError()
print "sso_user %s on application %s" % (email, self.conf.ICEBERG_APPLICATION_NAMESPACE)
timestamp = int(time.time())
data = {
'application': self.conf.ICEBERG_APPLICATION_NAMESPACE,
'email': email,
'first_name': first_name,
'last_name': last_name,
'timestamp': timestamp,
'include_application_data': include_application_data,
'message_auth': self.generate_messages_auth({
'email': email,
'first_name': first_name,
'last_name': last_name,
'timestamp': timestamp,
'currency': currency,
'shipping_country': shipping_country
})
}
response = self.request('user/sso/', args = data)
self.username = response['username']
self.access_token = response['access_token']
self._auth_response = response
return self
# return response
def _sso_response():
doc = "For compatibility matter, but now, should use _auth_response."
def fget(self):
return self._auth_response
def fset(self, value):
self._auth_response = value
def fdel(self):
del self._auth_response
return locals()
_sso_response = property(**_sso_response())
def request(self, path, args = None, post_args = None, files = None, method = None):
args = args or {}
method = method or "GET"
headers = {
'Content-Type': 'application/json',
'Accept-Language': self.lang,
'Authorization': self.get_auth_token()
}
if '//' not in path:
url = "%s:%s/%s/" % (self.conf.ICEBERG_API_URL, self.conf.ICEBERG_API_PORT, self.conf.ICEBERG_API_VERSION)
else:
url = ""
url += path
if post_args:
post_args = json.dumps(post_args, cls=DateTimeAwareJSONEncoder, ensure_ascii=False)
if args:
url += "?%s" % urllib.urlencode(args)
res = fetch(url,
payload=post_args,
method=method,
headers=headers,
follow_redirects=True
)
return json.loads(res.content)
def get_element(self, resource, object_id):
return self.request("%s/%s/" % (resource, object_id))
def get_list(self, path, **kwargs):
if not path.endswith('/'):
path = "%s/" % path
result = self.request(path, **kwargs)
return result['objects']
def convert_to_register_user(self):
raise NotImplementedError()
def me(self):
"""
Return User resource
"""
if not hasattr(self, '_auth_response'):
raise IcebergMissingSsoData()
return self.User.findOrCreate(self._auth_response)
#####
#
# Shortcuts.. Will be removed
#
#####
# User
def get_me(self):
return self.request("user/me/")
# Cart
def get_my_cart(self):
return self.request("cart/mine/")
# Merchants
def get_my_merchants(self):
return self.get_list('merchant')
def get_merchant(self, object_id):
return self.get_element('merchant', object_id)
# Applications
def get_my_applications(self):
return self.get_list('application')
def get_application(self, object_id):
return self.get_element('application', object_id)
|
UTF-8
|
Python
| false | false | 2,014 |
9,028,021,261,957 |
6e50a7c429ec982b9007bbf908e6d4470d298ebc
|
ccf3080c07cde269bba2d93d7bc532eaff2545f6
|
/languages/scripts/process_language.py
|
f4ab941361f3e445591e212f14cb8c890ffcf2a7
|
[] |
no_license
|
XiaogangHe/ADM_ML
|
https://github.com/XiaogangHe/ADM_ML
|
25131ec6d673f40f5a7d7f47af0af074ee27de86
|
0f0a64b2ef8c5f2d8ea40bebb5502c403e211413
|
refs/heads/master
| 2018-09-02T04:41:36.867986 | 2014-03-10T02:21:58 | 2014-03-10T02:21:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#convert .po files to .mo files for the server to read
|
UTF-8
|
Python
| false | false | 2,014 |
12,927,851,600,604 |
7eaa7e13a62532bdd86755fddda706a4ffa2e356
|
cafdb96e5013dc6bd750b1581255188c6912685d
|
/Mitch.py
|
46c89f7e6fa2ba6c7143cb9e9c7e81abd8088166
|
[] |
no_license
|
SilentShadow87/git
|
https://github.com/SilentShadow87/git
|
0a1ef9814741cb189be925caf55fd2b8361e250c
|
c066b5d114d3eb8f15698c8502428a594022e7f1
|
refs/heads/master
| 2019-05-29T06:53:10.651989 | 2014-10-27T12:12:17 | 2014-10-27T12:12:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Mitch:
pass
|
UTF-8
|
Python
| false | false | 2,014 |
12,352,325,969,069 |
208815144201730194be1b3786a19b70cb5fe69f
|
99925b5b9154c9b714eb0d942249d0b61fdfc1b2
|
/accounts/models.py
|
9ef01cbe1806c50fb50ad7491ad1b6706d7a461d
|
[] |
no_license
|
bdenne2/ps1auth
|
https://github.com/bdenne2/ps1auth
|
f26df64df852ec03c2bc9140ef828b6cfcf682f0
|
7ee9a5abb9ed09c578c1b53aba0d72db844481b5
|
refs/heads/master
| 2020-04-09T22:57:56.164807 | 2013-05-19T02:31:35 | 2013-05-19T02:31:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
import backends
import ldap
from django.conf import settings
from pprint import pprint
class PS1UserManager(BaseUserManager):
def create_user(self, username, email, password):
pass
def create_superuser(self, username, email, password):
self.create_user(username, email, password)
class PS1User(AbstractBaseUser):
objects = PS1UserManager()
object_guid = models.CharField(
max_length=48,
primary_key=True,
unique=True,
db_index=True,
editable=False,
)
USERNAME_FIELD = 'object_guid'
def get_full_name(self):
first_name = self.ldap_user['name']
last_name = self.ldap_user['sn']
return ("{0} {1}").format(first_name, last_name)
def get_short_name(self):
return self.ldap_user['name']
def check_password(self, raw_password):
# HEFTODO strict check
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW)
l = ldap.initialize(settings.AD_URL)
l.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
username = self.ldap_user['sAMAccountName'][0]
binddn = "{0}@{1}".format(username, settings.AD_DOMAIN)
try:
l.simple_bind_s(binddn, raw_password)
return True
except ldap.INVALID_CREDENTIALS:
return False
def set_password(self, raw_password):
"""" HEFTODO: would prefer a non admin override
That means we need the current password and the new password.
Requiring those means that the change password form needs some
rework."""
l = backends.get_ldap_connection()
#unicode_pass = unicode('"' + raw_password + '"', 'iso-8859-1')
unicode_pass = '"' + raw_password + '"'
password_value = unicode_pass.encode('utf-16-le')
add_pass = [(ldap.MOD_REPLACE, 'unicodePwd', [password_value])]
user_dn = self.ldap_user['distinguishedName'][0]
l.modify_s(user_dn, add_pass)
print("password changed")
def set_unusable_password(self):
print("Set unusable password")
def has_usable_password(self):
print("has unusable password")
return False
|
UTF-8
|
Python
| false | false | 2,013 |
16,149,077,035,295 |
4acc6dc1b83eccda4f2533022e4277341ec1d4e2
|
4148260054c2cf4605dacb8bdef3605c82eca470
|
/temboo/Library/Twitter/Trends/__init__.py
|
74440c61962db6ced93c131188d9ef9cfe1b861c
|
[] |
no_license
|
wimsy/actuarize-web
|
https://github.com/wimsy/actuarize-web
|
0f23d5f00afe3d36d430621cdb497d2e64998416
|
5f43af3019da6fb08cafeec9ff0a89df5196b864
|
refs/heads/master
| 2021-03-12T19:38:21.887681 | 2012-12-19T01:13:50 | 2012-12-19T01:13:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Weekly import *
from Available import *
from Daily import *
|
UTF-8
|
Python
| false | false | 2,012 |
17,008,070,506,164 |
cde85f82daa7462452c3581f4c034b5e3e46edfc
|
9dcf6a0c1cc82eb9fa956872e306133c5d9dc706
|
/exe/export/singlepageexport.py
|
cfab5559514a24ac1bb547258bdea6ec897e4b11
|
[
"GPL-2.0-only",
"LGPL-2.0-or-later",
"LicenseRef-scancode-secret-labs-2011",
"LGPL-2.1-only",
"MIT",
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"BSD-3-Clause",
"NPL-1.1",
"GPL-2.0-or-later",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"BSD-2-Clause"
] |
non_permissive
|
kohnle-lernmodule/palama
|
https://github.com/kohnle-lernmodule/palama
|
8a873d8e3b5576d82b4e33f34a163b2cffd9af47
|
6347d67363e0c4bbf8df6d985d6516079c0391f4
|
refs/heads/master
| 2021-01-10T08:31:51.545158 | 2013-02-19T00:11:23 | 2013-02-19T00:11:23 | 46,979,366 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
SinglePageExport will export a package as a website of HTML pages
"""
from cgi import escape
from exe.webui.blockfactory import g_blockFactory
from exe.engine.error import Error
from exe.engine.path import Path
from exe.export.singlepage import SinglePage
import logging
log = logging.getLogger(__name__)
# ===========================================================================
class SinglePageExport(object):
"""
SinglePageExport will export a package as a website of HTML pages
"""
def __init__(self, stylesDir, outputDir, imagesDir, scriptsDir, cssDir, templatesDir):
"""
'stylesDir' is the directory where we can copy the stylesheets from
'outputDir' is the directory that will be [over]written
with the website
"""
self.html = ""
self.style = None
self.name = None
self.stylesDir = Path(stylesDir)
self.outputDir = Path(outputDir)
self.imagesDir = Path(imagesDir)
self.scriptsDir = Path(scriptsDir)
self.cssDir = Path(cssDir)
self.templatesDir = Path(templatesDir)
self.page = None
# Create the output dir if it doesn't already exist
if not self.outputDir.exists():
self.outputDir.mkdir()
def export(self, package, for_print=0):
"""
Export web site
Cleans up the previous packages pages and performs the export
"""
self.style = package.style
self.page = SinglePage("index", 1, package.root)
self.page.save(self.outputDir/"index.html", for_print)
self.copyFiles(package)
def copyFiles(self, package):
"""
Copy all the files used by the website.
"""
# Copy the style sheet files to the output dir
# But not nav.css
styleFiles = [self.stylesDir/'..'/'base.css']
styleFiles += [self.stylesDir/'..'/'popup_bg.gif']
styleFiles += self.stylesDir.files("*.css")
if "nav.css" in styleFiles:
styleFiles.remove("nav.css")
styleFiles += self.stylesDir.files("*.jpg")
styleFiles += self.stylesDir.files("*.gif")
styleFiles += self.stylesDir.files("*.png")
styleFiles += self.stylesDir.files("*.js")
styleFiles += self.stylesDir.files("*.html")
styleFiles += self.stylesDir.files("*.ico")
styleFiles += self.stylesDir.files("*.ttf")
styleFiles += self.stylesDir.files("*.eot")
styleFiles += self.stylesDir.files("*.otf")
styleFiles += self.stylesDir.files("*.woff")
self.stylesDir.copylist(styleFiles, self.outputDir)
# copy the package's resource files
package.resourceDir.copyfiles(self.outputDir)
# copy script files. - with modification by lernmodule.net
self.scriptsDir.copylist(('libot_drag.js', 'common.js', 'lernmodule_net.js'),
self.outputDir)
#JR Metemos los reproductores necesarios
self.compruebaReproductores(self.page.node)
if package.license == "GNU Free Documentation License":
# include a copy of the GNU Free Documentation Licence
(self.templatesDir/'fdl.html').copyfile(self.outputDir/'fdl.html')
def compruebaReproductores(self, node):
"""
Comprobamos si hay que meter algun reproductor
"""
# copy players for media idevices.
hasFlowplayer = False
hasMagnifier = False
hasXspfplayer = False
hasGallery = False
for idevice in node.idevices:
if (hasFlowplayer and hasMagnifier and hasXspfplayer and hasGallery):
break
if not hasFlowplayer:
if 'flowPlayer.swf' in idevice.systemResources:
hasFlowplayer = True
if not hasMagnifier:
if 'magnifier.swf' in idevice.systemResources:
hasMagnifier = True
if not hasXspfplayer:
if 'xspf_player.swf' in idevice.systemResources:
hasXspfplayer = True
if not hasGallery:
if 'GalleryIdevice' == idevice.klass:
hasGallery = True
if hasFlowplayer:
videofile = (self.templatesDir/'flowPlayer.swf')
videofile.copyfile(self.outputDir/'flowPlayer.swf')
controlsfile = (self.templatesDir/'flowplayer.controls.swf')
controlsfile.copyfile(self.outputDir/'flowplayer.controls.swf')
if hasMagnifier:
videofile = (self.templatesDir/'magnifier.swf')
videofile.copyfile(self.outputDir/'magnifier.swf')
if hasXspfplayer:
videofile = (self.templatesDir/'xspf_player.swf')
videofile.copyfile(self.outputDir/'xspf_player.swf')
if hasGallery:
imageGalleryCSS = (self.cssDir/'exe_lightbox.css')
imageGalleryCSS.copyfile(self.outputDir/'exe_lightbox.css')
imageGalleryJS = (self.scriptsDir/'exe_lightbox.js')
imageGalleryJS.copyfile(self.outputDir/'exe_lightbox.js')
self.imagesDir.copylist(('exeGallery_actions.png', 'exeGallery_loading.gif'), self.outputDir)
for child in node.children:
self.compruebaReproductores(child)
# ===========================================================================
|
UTF-8
|
Python
| false | false | 2,013 |
14,671,608,292,239 |
f5799ae571a881c432d3dba57cf3efede0773d71
|
379934f86f2e7fce60c88222ed61bc106390271e
|
/glasslab/dataanalysis/misc/scripts/inbred_strains_pu_1_comparisons.py
|
d4cc335ed36909bd7c2e44bb1fbdec5021394e4d
|
[] |
no_license
|
karmel/glasslab
|
https://github.com/karmel/glasslab
|
a022fb3e1147382ba5f64c67d6db9b87b9bca2de
|
754774390f03852d1385c5fffeb32fcdab5cd7e4
|
refs/heads/master
| 2021-09-04T18:00:49.650817 | 2014-10-06T19:37:25 | 2014-10-06T19:37:25 | 5,957,226 | 1 | 1 | null | false | 2019-09-22T16:55:29 | 2012-09-25T21:56:42 | 2018-01-21T14:36:36 | 2018-01-20T21:50:42 | 614,032 | 1 | 1 | 0 |
Python
| false | false |
'''
Created on Mar 23, 2012
@author: karmel
'''
from __future__ import division
from glasslab.dataanalysis.graphing.seq_grapher import SeqGrapher
import os
from scipy.stats.stats import ttest_ind
if __name__ == '__main__':
grapher = SeqGrapher()
dirpath = '/Volumes/karmel/Desktop/Projects/GlassLab/Notes_and_Reports/Inbred strains/Peak comparisons/Compared with NOD/'
filename = os.path.join(dirpath, 'bl6_gt_balb_with_nod_pu_1_unique.txt')
data = grapher.import_file(filename)
data = data.fillna(0) # For easy log graphing
wt_peaks, balb_peaks, nod_peaks, balb2_peaks = 67074, 79353, 107716, 94199
data = grapher.normalize(data, 'balb_pu_1_tag_count', 1)#balb_peaks/wt_peaks)
data = grapher.normalize(data, 'nod_pu_1_tag_count', 1)#nod_peaks/wt_peaks)
data = grapher.normalize(data, 'balb2_pu_1_tag_count', 1)#balb2_peaks/wt_peaks)
for i, row in data[data['wt_pu_1_tag_count'] < 20].iterrows(): data['wt_pu_1_tag_count'][i] = 0
for i, row in data[data['balb_pu_1_tag_count_norm'] < 20].iterrows(): data['balb_pu_1_tag_count_norm'][i] = 0
for i, row in data[data['nod_pu_1_tag_count_norm'] < 20].iterrows(): data['nod_pu_1_tag_count_norm'][i] = 0
data['wt_to_balb'] = data['wt_pu_1_tag_count']/data['balb_pu_1_tag_count']
data['nod_to_balb'] = data['nod_pu_1_tag_count']/data['balb2_pu_1_tag_count']
data['nod_with_bl6'] = data['nod_sv_id'] <= .1
nod_with_bl6 = data[data['nod_with_bl6'] == True]
nod_with_balb = data[data['nod_with_bl6'] == False]
if False:
ax = grapher.scatterplot(nod_with_bl6, 'wt_pu_1_tag_count', 'nod_pu_1_tag_count',
subplot=121, log=True, color='blue',
xlabel='C57Bl6 PU.1 tag counts', ylabel='NOD PU.1 tag counts',
title='C57Bl6 vs. NOD PU.1 peaks\nwhere C57Bl6 has a PU.1 motif and BALBc does not',
label='NOD SNP == C57Bl6 SNP',
add_noise=False,
show_2x_range=False, show_legend=True,
show_count=True, show_correlation=True, text_shift=False,
text_color=True, show_plot=False)
#grapher.save_plot(os.path.join(dirpath, 'bl6_vs_nod_pu_1_peak_tag_counts_bl6_gt_balb_no_balb_motif_nod_eq_bl6.png'))
#grapher.show_plot()
ax = grapher.scatterplot(nod_with_balb, 'wt_pu_1_tag_count', 'nod_pu_1_tag_count',
subplot=122, log=True, color='red',
xlabel='C57Bl6 PU.1 tag counts', ylabel='NOD PU.1 tag counts',
title='C57Bl6 vs. NOD PU.1 peaks\nwhere C57Bl6 has a PU.1 motif and BALBc does not',
label='NOD SNP == BALBc SNP',
add_noise=False,
show_2x_range=False, show_legend=True, text_color=True,
show_count=True, show_correlation=True, show_plot=False, ax=ax)
#ax.set_ylim(4,128)
grapher.save_plot(os.path.join(dirpath, 'bl6_vs_nod_pu_1_peak_tag_counts_bl6_gt_balb.png'))
grapher.show_plot()
if True:
# Boxplots: avg PU.1 in Bl6 for whole set; avg PU.1 in BALB for whole set;
# avg PU.1 for NOD in whole set; avg PU.1 in NOD set with Bl6; avg PU.1 in NOD set with BALB
ax = grapher.boxplot([data['wt_pu_1_tag_count'],data['balb_pu_1_tag_count_norm'],data['nod_pu_1_tag_count_norm'],
nod_with_bl6['nod_pu_1_tag_count_norm'],nod_with_balb['nod_pu_1_tag_count_norm']],
bar_names=['C57Bl6 Peaks', 'BALBc Peaks', 'NOD Peaks',
'NOD Peaks\nwhere\nNOD == C57Bl6', 'NOD Peaks\nwhere\nNOD == BALBc',],
title='PU.1 peak tags where BALBc has a SNP that ruins its PU.1 Motif',
xlabel='', ylabel='Tags per PU.1 peak',
show_outliers=False, show_plot=False)
grapher.save_plot(os.path.join(dirpath, 'peak_boxplots_no_balb_motif_filter_low_peaks.png'))
grapher.show_plot()
print 'p-val that BALBc is different than C57Bl6: %g' % ttest_ind(data['wt_pu_1_tag_count'],data['balb_pu_1_tag_count_norm'])[1]
print 'p-val that NOD (all) is different than C57Bl6: %g' % ttest_ind(data['wt_pu_1_tag_count'],data['nod_pu_1_tag_count_norm'])[1]
print 'p-val that NOD == C57Bl6 is different than C57Bl6: %g' % ttest_ind(data['wt_pu_1_tag_count'],nod_with_bl6['nod_pu_1_tag_count_norm'])[1]
print 'p-val that NOD == BALBc is different than C57Bl6: %g' % ttest_ind(data['wt_pu_1_tag_count'],nod_with_balb['nod_pu_1_tag_count_norm'])[1]
|
UTF-8
|
Python
| false | false | 2,014 |
6,622,839,622,055 |
83f982ede5f5e2b681b6873f01184a592ef1a767
|
5fbce3f3065a58725303ffc297933bbe0caadada
|
/greenlet_customer.py
|
9d6ea95ff260246e549be15027a25dd4a7cfbdde
|
[] |
no_license
|
pombredanne/motoboto_benchmark
|
https://github.com/pombredanne/motoboto_benchmark
|
8add982ac3f3a592479573daddf055fff4ccb658
|
48f249f4e0661ba20a79437d10af2fd4eb1b8b7a
|
refs/heads/master
| 2017-10-03T02:18:26.979619 | 2013-03-24T21:06:20 | 2013-03-24T21:06:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
greenlet_customer.py
A greenlet to represent a single nimbus.io customer
"""
from gevent.greenlet import Greenlet
from base_customer import BaseCustomer
class GreenletCustomer(Greenlet, BaseCustomer):
"""
A greenlet object to represent a single nimbus.io customer
"""
def __init__(self, halt_event, user_identity, test_script):
Greenlet.__init__(self)
BaseCustomer.__init__(self, halt_event, user_identity, test_script)
def join(self, timeout=None):
self._log.info("joining")
Greenlet.join(self, timeout)
def _run(self):
self._main_loop()
def __str__(self):
return self._user_identity.user_name
|
UTF-8
|
Python
| false | false | 2,013 |
4,294,967,296,635 |
65ff436369e238376f2727669172d18cdda12ec9
|
e4a3e3587122312e1d23646f720288428a431355
|
/sphinx_multitheme_ext.py
|
055b9b4a53124a11548582c79d737a015c8333ef
|
[
"MIT"
] |
permissive
|
shimizukawa/sphinx-multitheme-sample
|
https://github.com/shimizukawa/sphinx-multitheme-sample
|
268115c75d54ce255789c34be6b7e336fab48982
|
c5bd0a4409827c9f63cb79258513f2a8efd23c28
|
refs/heads/master
| 2022-11-11T10:41:03.096134 | 2013-03-01T16:41:46 | 2013-03-01T16:41:46 | 277,073,887 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
#conf.py settings to use this extension.
extensions = ['sphinx_multitheme_ext']
html_theme = 'default'
html_multi_themes = {
# eg. 'regex-path', ('theme-name', theme_options),
r'^foo.*': ('haiku', {}),
r'^bar/.*': ('pyramid', {}),
}
html_page_template = {
# eg. 'regex-path', 'template-name',
r'^egg.*': 'egg.html',
}
"""
import re
from os import path, makedirs
import codecs
from sphinx.theming import Theme
from sphinx.builders import html
from sphinx.util import copy_static_entry
from sphinx.util.osutil import os_path, relative_uri, ensuredir, copyfile
def init_multi_templates(self):
self.multithemes = {}
for key, (themename, themeoptions) in self.config.html_multi_themes.items():
matcher = re.compile(key)
theme, options = (Theme(themename), themeoptions.copy())
templates = create_template_bridge(self)
templates.init(self, theme)
self.multithemes[matcher] = theme, options, templates
def create_template_bridge(self):
"""Return the template bridge configured."""
if self.config.template_bridge:
templates = self.app.import_object(
self.config.template_bridge, 'template_bridge setting')()
else:
from sphinx.jinja2glue import BuiltinTemplateLoader
templates = BuiltinTemplateLoader()
return templates
def theme_context(theme, options):
ctx = {}
stylename = theme.get_confstr('theme', 'stylesheet')
ctx['style'] = theme.name + '/' + stylename
ctx.update(
('theme_' + key, val) for (key, val) in
theme.get_options(options).iteritems())
return ctx
def handle_page(self, pagename, addctx, templatename='page.html',
outfilename=None, event_arg=None):
ctx = self.globalcontext.copy()
# current_page_name is backwards compatibility
ctx['pagename'] = ctx['current_page_name'] = pagename
default_baseuri = self.get_target_uri(pagename)
# in the singlehtml builder, default_baseuri still contains an #anchor
# part, which relative_uri doesn't really like...
default_baseuri = default_baseuri.rsplit('#', 1)[0]
def pathto(otheruri, resource=False, baseuri=default_baseuri):
if resource and '://' in otheruri:
# allow non-local resources given by scheme
return otheruri
elif not resource:
otheruri = self.get_target_uri(otheruri)
uri = relative_uri(baseuri, otheruri) or '#'
return uri
ctx['pathto'] = pathto
ctx['hasdoc'] = lambda name: name in self.env.all_docs
if self.name != 'htmlhelp':
ctx['encoding'] = encoding = self.config.html_output_encoding
else:
ctx['encoding'] = encoding = self.encoding
ctx['toctree'] = lambda **kw: self._get_local_toctree(pagename, **kw)
self.add_sidebars(pagename, ctx)
ctx.update(addctx)
for key, _templatename in self.config.html_page_template.items():
matcher = re.compile(key)
if matcher.match(pagename):
templatename = _templatename
self.app.emit('html-page-context', pagename, templatename,
ctx, event_arg)
try:
for matcher in self.multithemes:
if matcher.match(pagename):
theme, options, templates = self.multithemes[matcher]
ctx.update(theme_context(theme, options))
break
else:
templates = self.templates
output = templates.render(templatename, ctx)
except UnicodeError:
self.warn("a Unicode error occurred when rendering the page %s. "
"Please make sure all config values that contain "
"non-ASCII content are Unicode strings." % pagename)
return
if not outfilename:
outfilename = self.get_outfilename(pagename)
# outfilename's path is in general different from self.outdir
ensuredir(path.dirname(outfilename))
try:
f = codecs.open(outfilename, 'w', encoding, 'xmlcharrefreplace')
try:
f.write(output)
finally:
f.close()
except (IOError, OSError), err:
self.warn("error writing file %s: %s" % (outfilename, err))
if self.copysource and ctx.get('sourcename'):
# copy the source file for the "show source" link
source_name = path.join(self.outdir, '_sources',
os_path(ctx['sourcename']))
ensuredir(path.dirname(source_name))
copyfile(self.env.doc2path(pagename), source_name)
def copy_static_theme_files(self):
# then, copy over theme-supplied static files
for theme, options, templates in self.multithemes.values():
ctx = self.globalcontext.copy()
ctx.update(self.indexer.context_for_searchtool())
ctx.update(theme_context(theme, options))
themeentries = [path.join(themepath, 'static')
for themepath in theme.get_dirchain()[::-1]]
theme_static_dir = path.join(self.outdir, '_static', theme.name)
if not path.exists(theme_static_dir):
makedirs(theme_static_dir)
for entry in themeentries:
copy_static_entry(entry, theme_static_dir, self, ctx)
def patch():
init_templates = html.StandaloneHTMLBuilder.init_templates
copy_static_files = html.StandaloneHTMLBuilder.copy_static_files
def init(self):
init_templates(self)
init_multi_templates(self)
def copy_files(self):
copy_static_files(self)
copy_static_theme_files(self)
html.StandaloneHTMLBuilder.init_templates = init
html.StandaloneHTMLBuilder.handle_page = handle_page
html.StandaloneHTMLBuilder.copy_static_files = copy_files
def setup(app):
app.add_config_value('html_multi_themes', {}, True)
app.add_config_value('html_page_template', {}, True)
patch()
|
UTF-8
|
Python
| false | false | 2,013 |
3,204,045,615,576 |
9434a70f2af2bc6dec26a61a73f01a70a23ac1cd
|
e53bb0816bf91148647563cbb799bd17d019ec46
|
/client/gentoostats/packages.py
|
6d75ccc1713638b1909ba8783dd73700e421576e
|
[
"GPL-3.0-only"
] |
non_permissive
|
vikraman/gentoostats
|
https://github.com/vikraman/gentoostats
|
fcf31567ba29e915e13c000e733e5161909d3601
|
57d76a1e76d77b23ed3e616dae761bc8e6b7021c
|
refs/heads/master
| 2021-01-01T06:54:13.996569 | 2012-04-17T14:06:13 | 2012-04-17T14:06:13 | 2,025,303 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import portage
from gentoostats.dbapi import VARDB
class Packages(object):
"""
A class encapsulating providers for reading installed packages from portage
"""
def getInstalledCPs(self, sort=False):
"""
Read installed packages as category/packagename
"""
installed_cps = VARDB.cp_all()
if sort:
return sorted(installed_cps)
return installed_cps
def getInstalledCPVs(self, sort=False):
"""
Read installed packages as category/packagename-version
"""
installed_cpvs = VARDB.cpv_all()
if sort:
return sorted(installed_cpvs)
return installed_cpvs
|
UTF-8
|
Python
| false | false | 2,012 |
8,469,675,524,937 |
29ee222f7d0f28cd84dddc0962aae1c4db7d5af3
|
c9402425dc4c6e28eeb37ed0b65d39795eea339a
|
/crawler/create_movies_json.py
|
724cba49a0c4af2c9632dac1c7dec7dd989bcf48
|
[] |
no_license
|
yohoadam/actorsHITS-1
|
https://github.com/yohoadam/actorsHITS-1
|
cdbc3e455b057f4bc30ceb6f8cb3298b4741f419
|
e01e09609f6a7dd829fa018d0a2ca98c133dc098
|
refs/heads/master
| 2020-04-12T01:00:11.375714 | 2012-12-30T05:01:00 | 2012-12-30T05:01:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Script for outputting needed information about ALL movies to JSON files.
# Pulls movie information from a local PostgreSQL database.
#
# Author: Adam Yoho
# 15 November 2012
import json
import settings
import sys
from imdb import IMDb
from imdb import Person
from imdb import Movie
print 'getting database access object...'
ia = IMDb('sql', uri='postgres://localhost/imdb', adultSearch=0)
print 'connected!\n'
movie_dict = {}
OUTPUT_DIR = './'
INV_OUTPUT_DIR = './'
ERROR_OUTPUT_DIR = './'
PRINT_INTERVAL = 250 # After how many loop iterations should a status be output
RECORD_INTERVAL = 100 # How many movies should be recorded per file
MAX_MOVIE_ID = settings.MAX_TITLE_ID # The highest ID value in the 'title' database table
START_ID = 1 # Start gathering movie information at this ID
END_ID = MAX_MOVIE_ID
sub_file_string = '' # JSON-formatted string of all valid movies ready to be dumped to a file
invalid_string = '' # JSON-formatted string of all non-movies looked up
movies_recorded = 0
invalid_movie_count = 0 # Keep count of number of non-movies found
invalid_file_num = 1
id = START_ID # Initialize the id variable to where we wish to begin searching
start_id = id # Records which ID the current subset of movies ready to be dumped starts at
while id <= END_ID:
try:
movie = ia.get_movie(id)
if movie == None or (movie.get('kind') != 'movie' and movie.get('kind') != 'tv movie' and movie.get('kind') != 'video movie' and movie.get('kind') != 'tv mini series'):
# Not a valid movie
inv_movie = {settings.ID_K:id}
invalid_string += json.dumps(inv_movie) + '\n'
invalid_movie_count += 1
id += 1
continue
title = movie.get('long imdb title')
if id % PRINT_INTERVAL == 0:
print 'Looking up ID {0} of {1}\t({2:.1%}, {3:.3%} of total):\t'.format(id, END_ID, float(id-START_ID+1)/(END_ID-START_ID), float(id)/MAX_MOVIE_ID), u'{0}'.format(title)
rating = movie.get(settings.RATING_K)
votes = movie.get(settings.VOTE_K)
genres = movie.get(settings.M_GENRES_K)
director = movie.get(settings.DIRECTOR_K)
director_dict = {}
if director != None and director != []:
for d in director:
director_dict[d.getID()] = d.get(settings.NAME_K)
if rating == None or votes == None or genres == None or genres == []:
# We can only deal with movies that have this information
inv_movie = {settings.ID_K:id}
invalid_string += json.dumps(inv_movie) + '\n'
invalid_movie_count += 1
id += 1
continue
movie_dict[id] = {settings.ID_K:id, settings.TITLE_K:title, settings.RATING_K:rating, settings.VOTE_K:votes, settings.M_GENRES_K:genres, settings.DIRECTOR_K:director_dict}
# Add movie info to string to be output to a file later
sub_file_string += json.dumps(movie_dict[id]) + '\n'
movies_recorded += 1
# Write the current movie subset to a file, then continue
if movies_recorded % RECORD_INTERVAL == 0:
subfilename = OUTPUT_DIR + 'sub_movies_' + str(start_id) + '-' + str(id) + '.json'
print '\nwriting', subfilename, '...'
sub_file = open(subfilename, 'w')
sub_file.write(sub_file_string)
sub_file.close()
print
# Update which ID the next subset of movies to be dumped will start at
start_id = id + 1
sub_file_string = ''
# Write the invalid titles found to a file
if invalid_movie_count % RECORD_INTERVAL == 0:
subfilename = INV_OUTPUT_DIR + 'sub_inv_movies_' + str(invalid_file_num) + '.json'
print '\nwriting', subfilename, '...'
sub_file = open(subfilename, 'w')
sub_file.write(invalid_string)
sub_file.close()
print
invalid_movie_count = 0
invalid_file_num += 1
invalid_string = ''
id += 1
except:
error = 'Error (ID:' + str(id) + '):' + str(sys.exc_info()) + '\n'
err_file = open(ERROR_OUTPUT_DIR + 'movie_load_errors.txt', 'a')
err_file.write(error)
err_file.close()
id += 1
pass
# Write the residual movies to a file
if sub_file_string != '':
subfilename = OUTPUT_DIR + 'sub_movies_' + str(start_id) + '-' + str(id-1) + '.json'
print '\nwriting', subfilename, '...'
sub_file = open(subfilename, 'w')
sub_file.write(sub_file_string)
sub_file.close()
print
# Write the residual invalid titles to a file
if invalid_string != '':
inv_filename = INV_OUTPUT_DIR + 'sub_inv_movies_' + str(invalid_file_num) + '.json'
print 'dumping invalid movies', inv_filename, '...'
invalid_file = open(inv_filename, 'w')
invalid_file.write(invalid_string)
invalid_file.close()
print
|
UTF-8
|
Python
| false | false | 2,012 |
4,681,514,375,044 |
f659c00bd42161b98ae41a4214b65deaae5b6015
|
8b9d3fa48e87579a74b187abf781d5916b6b47df
|
/geoutils/tests/__init__.py
|
8910a02bf2eb5f998c0000988452c0de5bbe04ac
|
[] |
no_license
|
loum/geoutils
|
https://github.com/loum/geoutils
|
3f34b10bfaff8978af09f01de03723b71cd8be4f
|
034787d9a54856dac12988aaa05c366c5da4d7ec
|
refs/heads/master
| 2021-01-19T08:54:54.983763 | 2014-12-01T04:59:50 | 2014-12-01T04:59:50 | 22,494,847 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Support shorthand import of our classes into the namespace.
"""
from test_standard import TestStandard
from test_nitf import TestNITF
from test_metadata import TestMetadata
from test_geoimage import TestGeoImage
from test_datastore import TestDatastore
from test_modelbase import TestModelBase
from test_schema import TestSchema
from test_auditer import TestAuditer
from test_gdelt import TestGdelt
|
UTF-8
|
Python
| false | false | 2,014 |
3,934,190,068,820 |
f0bb47fcac68d0340dc1f982a49e40c32d312924
|
2a24dba82767419cf7d2269875bf0a297f41580c
|
/examples/basics/visuals/image_visual.py
|
dfe215215fe58c2c1cf70d25571f38bcb7cd10f7
|
[
"BSD-3-Clause",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
shjoshi/vispy
|
https://github.com/shjoshi/vispy
|
58b300d23486b7478b786977b3548dd7225de847
|
2f3d169aa60c738467e766c59096f51570483d6f
|
refs/heads/master
| 2020-12-25T12:40:36.545768 | 2014-08-06T22:59:35 | 2014-08-06T22:59:35 | 22,704,584 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Simple demonstration of ImageVisual.
"""
import numpy as np
import vispy.app
from vispy import gloo
from vispy.scene import visuals
image = np.random.normal(size=(100, 100, 3), loc=128,
scale=50).astype(np.ubyte)
class Canvas(vispy.scene.SceneCanvas):
def __init__(self):
self.image = visuals.Image(image)
vispy.scene.SceneCanvas.__init__(self, close_keys='escape')
self.size = (800, 800)
self.show()
def on_draw(self, ev):
gloo.clear(color='black', depth=True)
self.push_viewport((0, 0) + self.size)
self.image.draw()
if __name__ == '__main__':
win = Canvas()
import sys
if sys.flags.interactive != 1:
vispy.app.run()
|
UTF-8
|
Python
| false | false | 2,014 |
17,849,884,096,818 |
2b8899f3bb11e239f230e1bf2ae53b0764b3d656
|
7223d7fe87f84b01f833b44f8201ee218168a34d
|
/config.py
|
db9437b611b339e4833fb53a52fea7eac1910365
|
[] |
no_license
|
faith0811/chart3
|
https://github.com/faith0811/chart3
|
434e425b2894bf29415b97fb7f390562d0658369
|
3e2fc3f6d72d32b175e7aa1331c83f6f245c4fd7
|
refs/heads/master
| 2016-09-05T09:44:33.711807 | 2014-09-29T13:45:17 | 2014-09-29T13:45:17 | 24,144,299 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# chart3/config.py
DEBUG = True
SECRET_KEY = 'j\x0f\x96t\xff\xab~@x\xec\xa1\xfc\n\x05\x11A\x1f\xd2\xa51\xaaL\xc6\x81'
|
UTF-8
|
Python
| false | false | 2,014 |
7,421,703,535,857 |
83775a56bd03f44790cecb53aa7bb5d48302f8d5
|
1be4d7c687ae6bd1e9b222f017e76dea25d21b8f
|
/src/minitage/core/makers/buildout.py
|
612eba55add2a6a4549ae8b2adddc7cfbde61743
|
[] |
no_license
|
minitage/minitage
|
https://github.com/minitage/minitage
|
27f38493e1facc0263bc57239fe23c742c99be72
|
532cfbfd103a84b8e5917af8e696bb9eb6e09f7e
|
refs/heads/master
| 2021-01-02T09:43:14.641604 | 2013-09-10T11:19:21 | 2013-09-10T11:19:21 | 9,155,272 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__docformat__ = 'restructuredtext en'
import os
import sys
import logging
import re
re_f = re.S|re.M|re.U
import copy
import pkg_resources
import urllib2
from minitage.core.makers import interfaces
from iniparse import ConfigParser
from minitage.core.unpackers import interfaces as uinterfaces
import minitage.core.core
import minitage.core.common
import subprocess
import traceback
from threading import Thread
import fcntl
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
ON_POSIX = 'posix' in sys.builtin_module_names
def run_boot_setup():
ret = subprocess.Popen(['bin/buildout', 'setup', '.minitage/setup.py'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
)
return ret
def enqueue_output(ret, queue):
while True:
out = ret.stdout.read()
eout = ret.stderr.read()
if (eout == '') and (out == '') and (ret.returncode != None):
break
queue.put(out)
queue.put(eout)
sys.stdout.write(out)
sys.stdout.write(eout)
ret.stdout.close()
ret.stderr.close()
class BuildoutError(interfaces.IMakerError):
"""General Buildout Error."""
__logger__ = 'minitage.makers.buildout'
class MrDeveloperError(BuildoutError): pass
def select_fl(fl):
found = False
ret = None
for d in fl:
if os.path.exists(d):
files = os.listdir(d)
has_st = True in [True
for a in files
if (a.startswith('setuptools')
and (a.endswith('gz')
or a.endswith('zip')
or a.endswith('egg')
))]
has_ds = True in [True
for a in files
if (a.startswith('distribute')
and (a.endswith('gz')
or a.endswith('zip')
or a.endswith('egg')
))]
has_buildout = True in [True
for a in files
if (a.startswith('zc.buildout')
and (a.endswith('gz')
or a.endswith('zip')
or a.endswith('egg')
))]
if has_buildout and (has_st or has_ds):
ret = d
found = True
break
return ret
def get_offline(opts):
minimerge_offline = (True=='minimerge' in opts) and opts['minimerge']._offline or False
offline = (opts.get('offline', False) or minimerge_offline)
return offline
class BuildoutMaker(interfaces.IMaker):
"""Buildout Maker.
"""
def select_ds(self, distribute_setup_places, py = None):
mfile = 'distribute_setup.py'
if self.has_setuptools7(py):
mfile = 'ez_setup.py'
ds = None
for i in distribute_setup_places:
ds = os.path.join(i, mfile)
if os.path.exists(ds):
break
else:
ds = None
return ds
def __init__(self, config = None, verbose=False):
"""Init a buildout maker object.
Arguments
- config keys:
- options: cli args for buildout
"""
if not config:
config = {}
self.logger = logging.getLogger(__logger__)
self.config = config
self.cwd = os.getcwd()
self.buildout_config = 'buildout.cfg'
interfaces.IMaker.__init__(self)
def match(self, switch):
"""See interface."""
if switch == 'buildout':
return True
return False
def has_setuptools7(self, py=None):
new_st = False
if not py:
py = sys.executable
try:
cmd = [py,
"-c",
"import pkg_resources;print not pkg_resources.get_distribution('distribute').version.startswith('0.6')"]
#self.logger.debug('Run %s' % " ".join(cmd))
ret = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE,)
if ret.wait() == 0:
if 'true' in ret.stdout.read().lower():
new_st = True
except Exception, e:
new_st = False
return new_st
def upgrade_code(self, opts):
"""try to run mr.developer but not as a fail element"""
if (
not opts['offline']
and os.path.isfile('bin/develop')
and opts['minimerge'].is_package_to_be_updated(
opts['minibuild'])
):
content = open('bin/develop').read()
if 'mr.developer' in content:
self.logger.info(
'Running mr.developer update')
helpret = subprocess.Popen(
['bin/develop', 'up', '--help'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
)
outputs = []
while helpret.poll() is None:
outputs.append(helpret.communicate())
if helpret.wait() != 0:
raise MrDeveloperError('error with help')
else:
content = ''.join([a[0] for a in outputs])
cmd = ['bin/develop', 'up', '-v']
if '--force' in content:
cmd.append('--force')
self.logger.info(
'Running %s' % ' '.join(cmd))
ret = subprocess.Popen(
cmd, bufsize=-1,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,)
#for i in (ret.stdout, ret.stderr):
# fd = i.fileno()
# fl = fcntl.fcntl(fd, fcntl.F_GETFL)
# fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
q = Queue()
t = Thread(target=enqueue_output, args=(ret, q,))
t.daemon = True
t.start()
outputs = []
ret.wait()
buf = ''
while not q.empty():
buf += q.get()
q.join()
if t and t.is_alive(): #wait for thread to finish
t.join()
if ret.returncode == 0:
self.logger.info('mr.developer successfuly updated code')
else:
raise MrDeveloperError('mr.developer failed to update code')
def upgrade_bootstrap(self, minimerge, offline, directory=".", py=None):
buildout1 = False
try:
def findcfgs(path, cfgs=None):
ignored = ['var', 'parts']
dirs = []
if not cfgs: cfgs=[]
for i in os.listdir(path):
fi = os.path.join(path, i)
if fi.endswith('.cfg') and os.path.isfile(fi):
cfgs.append(fi)
if os.path.isdir(fi) and (not i in ignored):
dirs.append(fi)
for fpath in dirs:
for p, ids, ifs in os.walk(fpath):
for i in ifs:
if i.endswith('.cfg'):
cfgs.append(os.path.join(p, i))
return cfgs
files = findcfgs(directory)
for f in files:
fic = open(f)
buildout1re = re.compile('^zc\.buildout\s*=\s*1', re_f)
dfic = fic.read()
if (
('buildout.dumppick' in dfic)
or
(buildout1re.search(dfic))
):
buildout1 = True
fic.close()
adirectory = os.path.abspath(directory)
if (
('dependencies/' in adirectory)
or ('eggs/' in adirectory)
):
buildout1 = True
except Exception, e:
pass
if buildout1:
booturl = 'http://downloads.buildout.org/1/bootstrap.py'
else:
#if self.has_setuptools7(py=py):
# booturl = 'https://raw.github.com/tseaver/buildout/use-setuptools-0.7/bootstrap/bootstrap.py'
#else:
booturl = 'http://downloads.buildout.org/2/bootstrap.py'
self.logger.debug('Using %s' % booturl)
# try to donwload an uptodate bootstrap
# set defaulttimeout
# and add possible content
try:
try:
fic = open('bootstrap.py')
oldcontent = fic.read()
fic.close()
except:
oldcontent = ""
data = oldcontent
updated = False
dled = False
if not offline:
try:
open(
os.path.join(minimerge.history_dir,
'updated_bootstrap'))
except:
self.logger.info('Bootstrap updated')
data = urllib2.urlopen(booturl).read()
updated = True
dled = True
if not 'socket.setdefaulttimeout' in data:
updated = True
ldata = data.splitlines()
ldata.insert(1, 'import socket;socket.setdefaulttimeout(2)')
data = '\n'.join(ldata)
if updated:
self.logger.info('Bootstrap updated')
fic = open('bootstrap.py', 'w')
fic.write(data)
fic.close()
if dled:
afic = open(os.path.join(
minimerge.history_dir, 'updated_bootstrap'), 'w')
afic.write('foo')
afic.close()
except:
if oldcontent:
fic = open('bootstrap.py', 'w')
fic.write(oldcontent)
fic.close()
def reinstall(self, directory, opts=None):
"""Rebuild a package.
Warning this will erase .installed.cfg forcing buildout to rebuild.
Problem is that underlying recipes must know how to handle the part
directory to be already there.
This will be fine for minitage recipes in there. But maybe that will
need boiler plate for other recipes.
Exceptions
- ReinstallError
Arguments
- directory : directory where the packge is
- opts : arguments for the maker
"""
mypath = os.path.join(
directory,
'.installed.cfg'
)
if os.path.exists(mypath):
os.remove(mypath)
self.install(directory, opts)
def install(self, directory, opts=None):
"""Make a package.
Exceptions
- MakeError
Arguments
- dir : directory where the packge is
- opts : arguments for the maker
"""
if opts is None:
opts = {}
self.logger.info(
'Running buildout in %s (%s)' % (directory,
self.buildout_config))
os.chdir(directory)
minibuild = opts.get('minibuild', None)
installed_cfg = os.path.join(directory, '.installed.cfg')
if not opts:
opts = {}
try:
try:
parts = opts.get('parts', False)
if isinstance(parts, str):
parts = parts.split()
category = ''
if minibuild: category = minibuild.category
# Try to upgrade only if we need to
# (we chech only when we have a .installed.cfg file
if (not opts.get('upgrade', True)
and os.path.exists(installed_cfg)
and (not category=='eggs')):
self.logger.info(
'Buildout will not run in %s'
' as there is a .installed.cfg file'
' indicating us that the software is already'
' installed but minimerge is running in'
' no-update mode. If you want to try'
' to update/rebuild it unconditionnaly,'
' please relaunch with -uUR.' % directory)
else:
self.upgrade_code(opts)
self.buildout_bootstrap(directory, opts)
self.buildout(directory, parts, opts)
except Exception, instance:
trace = traceback.format_exc()
raise BuildoutError(
'Buildout failed:\n\t%s' % trace)
finally:
os.chdir(self.cwd)
def buildout_bootstrap(self, directory, opts):
offline = get_offline(opts)
dcfg = os.path.expanduser('~/.buildout/default.cfg')
minimerge = opts.get('minimerge', None)
py = self.choose_python(directory, opts)
new_st = self.has_setuptools7(py=py)
downloads_caches = [
os.path.abspath('../../downloads/dist'),
os.path.abspath('../../downloads/minitage/eggs'),
os.path.abspath('../../downloads/minitage'),
os.path.abspath('../../download/dist'),
os.path.abspath('../../download/minitage/eggs'),
]
if os.path.exists(dcfg):
try:
cfg = ConfigParser()
cfg.read(dcfg)
buildout = dict(cfg.items('buildout'))
for k in ['download-directory', 'download-cache']:
if k in buildout:
places = [k,
'%s/dist' % k,
'%s/minitage/eggs'%k]
for k in places:
if not k in downloads_caches:
downloads_caches.append(k)
except Exception, e:
pass
find_links = []
cache = os.path.abspath(
os.path.join(directory, ('../../eggs/cache'))
)
for c in [cache] + downloads_caches:
if os.path.exists(c) and not c in find_links:
find_links.append(c)
distribute_setup_places = find_links[:] + downloads_caches + [
os.path.join(directory, 'downloads/minitage/eggs'),
os.path.join(directory, 'downloads/dist'),
os.path.join(directory, 'downloads'),
os.path.join(directory, 'download/minitage/eggs'),
os.path.join(directory, 'download/dist'),
os.path.join(directory, 'download'),
os.path.join(directory),
os.path.expanduser('~/.buildout'),
os.path.expanduser('~/.buildout/download'),
os.path.expanduser('~/.buildout/download/dist'),
os.path.expanduser('~/.buildout/download/minitage'),
os.path.expanduser('~/.buildout/download/minitage/eggs'),
os.path.expanduser('~/.buildout/downloads'),
os.path.expanduser('~/.buildout/downloads/dist'),
os.path.expanduser('~/.buildout/downloads/minitage'),
os.path.expanduser('~/.buildout/downloads/minitage/eggs'),
os.path.expanduser('~/'),
]
bootstrap_args = ''
st_bootstrap_args = ''
self.upgrade_bootstrap(minimerge, offline, py=py)
# be sure which buildout bootstrap we have
fic = open('bootstrap.py')
content = fic.read()
fic.close()
if '--distribute' in content:
if not new_st:
bootstrap_args += ' %s' % '--distribute'
if new_st:
self.logger.warning('Forcing to use setuptools')
has_buildout2, has_buildout1 = False, False
if offline:
if ' --accept-buildout-test-releases' in content:
bootstrap_args += ' --accept-buildout-test-releases'
if ('--setup-source' in content
and not "--find-links" in content):
ds = self.select_ds(distribute_setup_places, py=py)
if not ds and offline:
raise Exception(
'Bootstrap failed, '
'no distribute_setup.py '
'found in %s '
'' % ' '.join(
distribute_setup_places))
if ds:
for eggc in (
os.path.abspath(os.path.abspath('../../downloads/minitage/eggs')),
os.path.abspath(os.path.abspath('../../eggs/cache')),
):
if os.path.exists(eggc):
break
bootstrap_args += ' --setup-source %s' % ds
bootstrap_args += ' --eggs %s' % (eggc)
st_bootstrap_args += ' --setup-source %s' % ds.replace('distribute_setup.py', 'ez_setup.py')
st_bootstrap_args += ' --eggs %s' % (eggc)
try:
eggs_base = select_fl(find_links)
except:
eggs_base = None
if offline:
raise Exception(
'Missing either '
'zc.buildout or distribute/setuptools source')
bare_bootstrap_args = bootstrap_args
st_bare_bootstrap_args = st_bootstrap_args
boot_can_continue = False
if not os.path.isdir(".minitage"):
os.makedirs(".minitage")
# Be sure to have an unzipped eggs
SCRIPT = """
import pkg_resources
for i in ['setuptools', 'zc.buildout']:
print pkg_resources.get_distribution(i).location
"""
fic = open('.minitage/setup.py', 'w')
fic.write(SCRIPT)
fic.close()
if eggs_base is not None:
arg = ""
if '--download-base' in content:
arg = "--download-base"
if '--find-links' in content:
arg = "--find-links"
if arg:
bootstrap_args += ' %s "%s"' % (
arg, eggs_base)
if self.buildout_config and '"-c"' in content:
bootstrap_args += " -c %s" % self.buildout_config
bare_bootstrap_args += " -c %s" % self.buildout_config
st_bootstrap_args += " -c %s" % self.buildout_config
st_bare_bootstrap_args += " -c %s" % self.buildout_config
BARGS = [(st_bootstrap_args, st_bare_bootstrap_args)]
if '--distribute' in content:
BARGS.append((bootstrap_args, bare_bootstrap_args))
for ix, bargs in enumerate(BARGS):
bootstrap_args, bare_bootstrap_args = bargs
try:
cmd = '%s bootstrap.py %s ' % (py, bootstrap_args,)
self.logger.info('Running %s' % cmd)
if '--distribute' in cmd:
self.logger.warning('Using distribute !')
minitage.core.common.Popen(cmd , opts.get('verbose', False))
boot_setup = run_boot_setup()
if boot_setup.wait() == 0:
boot_can_continue = True
except Exception, e:
self.logger.error('Buildout bootstrap failed, trying online !')
cmd = '%s bootstrap.py %s ' % (py, bare_bootstrap_args,)
self.logger.info('Running %s' % cmd)
try:
if '--distribute' in cmd:
self.logger.warning('Using distribute !')
minitage.core.common.Popen(cmd, opts.get('verbose', False))
boot_setup = run_boot_setup()
if boot_setup.wait() == 0:
boot_can_continue = True
except Exception, ex:
if ix < len(BARGS) -1:
continue
else:
raise
if boot_can_continue:
break
if boot_can_continue:
output = [a for a in boot_setup.stdout.read().splitlines()
if os.path.exists(a)]
for a in output:
if os.path.isfile(a):
# unpack
self.logger.info('Unpack to dir: %s' % a)
f = uinterfaces.IUnpackerFactory()
n = 1
while True:
n += 1
orig = '%s.old.%s' % (a, n)
try:
os.rename(a, orig)
break
except:
if n > 100:
raise
else:
pass
zipf = f(orig)
zipf.unpack(orig, a,)
else:
raise BuildoutError(
'Buildout not bootstrapped')
def buildout(self,
directory=".",
parts=None,
opts=None):
offline = get_offline(opts)
bcmd = os.path.normpath('./bin/buildout')
minibuild = opts.get('minibuild', None)
dependency_or_egg = (getattr(minibuild, 'category', None)
in ['dependencies', 'eggs'])
offline = get_offline(opts)
argv = []
if not parts:
parts = []
if not opts:
opts = {}
if opts.get('verbose', False):
self.logger.debug(
'Buildout is running in verbose mode!')
argv.append('-vvvvvvv')
installed_cfg = os.path.join(directory, '.installed.cfg')
if (not opts.get('upgrade', True)
and not dependency_or_egg
and not os.path.exists(installed_cfg)):
argv.append('-N')
if opts.get('upgrade', False) or dependency_or_egg:
self.logger.debug(
'Buildout is running in newest mode!')
argv.append('-n')
if offline:
self.logger.debug(
'Buildout is running in offline mode!')
argv.append('-o')
if opts.get('debug', False):
self.logger.debug(
'Buildout is running in debug mode!')
argv.append('-D')
if parts:
for part in parts:
self.logger.info(
'Installing single part: %s' % part)
minitage.core.common.Popen(
'%s -c %s %s install %s ' % (
bcmd,
self.buildout_config,
' '.join(argv),
part
),
opts.get('verbose', False)
)
else:
self.logger.debug('Installing parts')
cmd = '%s -c %s %s ' % (
bcmd,
self.buildout_config,
' '.join(argv))
minitage.core.common.Popen(
cmd,
opts.get('verbose', False)
)
def choose_python(self, directory, opts):
python = sys.executable
mb = opts.get('minibuild', None)
if mb:
if os.path.exists(mb.python):
python = mb.python
return python
def get_options(self, minimerge, minibuild, **kwargs):
"""Get python options according to the minibuild and minimerge instance.
For eggs buildouts, we need to know which versions of python we
will build site-packages for
For parts, we force to install only the 'part' buildout part.
Arguments
- we can force parts with settings 'buildout_parts' in minibuild
- minimerge a minitage.core.Minimerge instance
- minibuild a minitage.core.object.Minibuild instance
- kwargs:
- 'python_versions' : list of major.minor versions of
python to compile against.
"""
options = {}
parts = self.buildout_config = [
a.strip()
for a in minibuild.minibuild_config._sections[
'minibuild'].get('buildout_parts', '').split()]
if kwargs is None:
kwargs = {}
# if it s an egg, we must install just the needed
# site-packages if selected
if minibuild.category == 'eggs':
vers = kwargs.get('python_versions', None)
if not vers:
vers = minitage.core.core.PYTHON_VERSIONS
parts = ['site-packages-%s' % ver for ver in vers]
self.buildout_config = minibuild.minibuild_config._sections[
'minibuild'].get('buildout_config',
'buildout.cfg')
content = ''
if minibuild.category == 'eggs':
try:
fic = open(os.path.join(minimerge.get_install_path(minibuild), self.buildout_config))
content = fic.read()
fic.close()
except:
pass
parts = [p for p in parts+['site-packages'] if '[%s]'%p in content]
options['parts'] = parts
# prevent buildout from running if we have already installed stuff
# and do not want to upgrade.
options['upgrade'] = minimerge.getUpgrade()
if minimerge.has_new_revision(minibuild):
options['upgrade'] = True
return options
# vim:set et sts=4 ts=4 tw=80:
|
UTF-8
|
Python
| false | false | 2,013 |
6,468,220,792,508 |
e10d4fd041120a95a745d8d16117f1948e2691b1
|
ab642369b649c053f43ed571e8890c3493d2b855
|
/intraday/settings/datasources.py
|
f39af5c8774301f638b118760f9195a74cf1e2bc
|
[
"MIT"
] |
permissive
|
steven-cutting/collectdata
|
https://github.com/steven-cutting/collectdata
|
7dc507e200068111b20713dd90076c97fbe68078
|
0c56eec3fb58cd8eb26b490d7541174b585ee431
|
refs/heads/master
| 2015-08-17T20:09:15.607086 | 2014-12-12T11:59:47 | 2014-12-12T11:59:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__title__ = 'collectdata'
__license__ = 'MIT'
__author__ = 'Steven Cutting'
__author_email__ = '[email protected]'
__created_on__ = '11/18/2014'
symbols = [('S&P 500', '^GSPC', 0),
('S&P 400', '^SP400', 0),
('S&P 600', '^SP600', 0),
('S&P 900', '^SP900', 0),
('S&P 1000', '^SPMIDSM', 0),
('NASDAQ', '^IXIC', 15),
('Russell 2000', '^RUT', 10),
('IWO', 'IWO', 15),
('IWZ', 'IWZ', 15),
('IWF', 'IWF', 15),
('IWB', 'IWB', 15),
]
datapoints = [('Name', 'n'),
('Ticker', 's'),
('LastTrade', 'l1'),
('Change', 'c6'),
('Change %', 'k2'),
]
|
UTF-8
|
Python
| false | false | 2,014 |
5,007,931,877,901 |
58dd6edcf3bde035ca28a72cef2b9050a7c2b99a
|
e59a2b18e979957f5e987c4c8ca9feeacf38465e
|
/arduirc.py
|
3eb7a22cc6e8587fa52f8f0b2d2e4b639a975a45
|
[
"MIT"
] |
permissive
|
skwintz813/ArduiRC
|
https://github.com/skwintz813/ArduiRC
|
a0548ac0e2884b7951b435e428af3c3139a81477
|
970ddd3ba9660ffca9796e7331237d117c6cd235
|
refs/heads/master
| 2021-12-02T18:05:26.114662 | 2014-01-28T11:22:44 | 2014-01-28T11:22:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""Usage:
arduirc.py [options] ( <section> <command> | <timings> )
Options:
-h --help show this help and exit
-v --version show version and exit
-t --timings-file=FILE specify the timings file to use [default: timings.yml]
-s --socket=SOCKET specify the socket to connect to [default: /dev/ttyACM0]
-w --wait wait two seconds for the Arduino to reset.
-b --baud-rate=RATE the baud rate to connect with [default: 9600]
-r --repeat=REPEAT repeat the command REPEAT times [default: 3]
-d --delay=DELAY delay between repeats (in usec) [default: 10000]
-p --pin=PIN which Arduino pin to write to [default: 3]
"""
import serial
import sys
import time
import yaml
from docopt import docopt
def main(arguments):
if arguments.get("<section>") and arguments.get("<command>"):
section = arguments["<section>"]
command = arguments["<command>"]
try:
timing_dict = yaml.load(open(arguments["--timings-file"]))
except IOError:
sys.exit("Error opening timings file.")
if section not in timing_dict:
sys.exit("Unknown section.")
if command not in timing_dict[section]:
sys.exit("Unknown command.")
raw_timings = timing_dict[section][command]
elif arguments.get("<timings>"):
raw_timings = arguments["<timings>"]
timings = [int(timing) for timing in raw_timings.split()]
output = "".join([chr(int(round(timing / 25.0))) for timing in timings])
com = serial.Serial(arguments["--socket"], int(arguments["--baud-rate"]), timeout=0.2)
if arguments["--wait"]:
print "Waiting for the Arduino to reset..."
time.sleep(2)
pin = int(arguments["--pin"])
repeat = int(arguments["--repeat"])
delay = int(round(int(arguments["--delay"]) / 100.0))
if delay > 255 or delay < 1:
sys.exit("Delay must be between 100 and 25500.")
if pin > 13 or pin < 0:
sys.exit("Pin must be between 0 and 13.")
if repeat > 255 or repeat < 1:
sys.exit("Repeat must be between 1 and 255.")
# chr(1) is the command (command 1, send timings).
com.write(chr(1) + chr(pin) + chr(repeat) + chr(delay) + output + chr(0))
print com.read(1000)
if __name__ == "__main__":
arguments = docopt(__doc__, version="0.1.0")
main(arguments)
|
UTF-8
|
Python
| false | false | 2,014 |
2,903,397,902,845 |
6f1e6fc5fc6a6a7cf958cb83d52691a6b9b9c264
|
10a625c83ce522574d823dd50951e35b9ba38286
|
/octopus_basket/migrations/0002_auto__chg_field_cart_parent__chg_field_basket_parent.py
|
133a77f09d4dbb8a7e767e64eeece85fbe86d8d8
|
[] |
no_license
|
john-dwuarin/octopus_baskettt
|
https://github.com/john-dwuarin/octopus_baskettt
|
b3c9f795224a44bde6d8cca9fee891cf69b3bdee
|
d3c5ad972d89141cf68b74bbb3829b4e4e88d947
|
refs/heads/master
| 2020-04-15T11:10:34.987917 | 2014-05-30T19:34:02 | 2014-05-30T19:34:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Cart.parent'
db.alter_column(u'octopus_basket_cart', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['octopus_basket.Cart'], null=True))
# Changing field 'Basket.parent'
db.alter_column(u'octopus_basket_basket', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['octopus_basket.Basket'], null=True))
def backwards(self, orm):
# Changing field 'Cart.parent'
db.alter_column(u'octopus_basket_cart', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['octopus_basket.Cart']))
# Changing field 'Basket.parent'
db.alter_column(u'octopus_basket_basket', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['octopus_basket.Basket']))
models = {
u'octopus_basket.availabletag': {
'Meta': {'object_name': 'AvailableTag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '150'})
},
u'octopus_basket.basket': {
'Meta': {'object_name': 'Basket'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 5, 26, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '60', 'primary_key': 'True', 'db_index': 'True'}),
'is_browsable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['octopus_basket.Basket']", 'null': 'True', 'blank': 'True'}),
'product_dict': ('django_hstore.fields.DictionaryField', [], {'db_index': 'True'}),
'purchase_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 5, 26, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['octopus_user.OctopusUser']"})
},
u'octopus_basket.baskettag': {
'Meta': {'object_name': 'BasketTag'},
'basket': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['octopus_basket.Basket']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['octopus_basket.AvailableTag']"})
},
u'octopus_basket.cart': {
'Meta': {'object_name': 'Cart'},
'basket_list': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '60'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 5, 26, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '60', 'db_index': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['octopus_basket.Cart']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 5, 26, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['octopus_user.OctopusUser']"})
},
u'octopus_basket.usercarttag': {
'Meta': {'object_name': 'UserCartTag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['octopus_basket.AvailableTag']"}),
'user_cart': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['octopus_basket.Cart']"})
},
u'octopus_user.octopususer': {
'Meta': {'object_name': 'OctopusUser'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['octopus_basket']
|
UTF-8
|
Python
| false | false | 2,014 |
16,209,206,616,831 |
e0b0d58961fcc21912fa59d8b559469f7af2337f
|
a07c50240888730c0dca7575ee87dc5c243e3c41
|
/2_Data_Structures/2.5.4构建一个多线程播客客户程序.py
|
38ada97c457a9f77151efe842d3f932ac6cd0ec1
|
[] |
no_license
|
CoryVegan/PythonSL
|
https://github.com/CoryVegan/PythonSL
|
190449bc783bbba2c4b62102f145fac2057e05f8
|
f4d281d2a0d2526364b62f16c3e6b48aa7b718f2
|
refs/heads/master
| 2020-03-08T12:13:17.588156 | 2014-07-06T02:38:45 | 2014-07-06T02:38:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
#本节构建一个播客客户程序,程序展示了如何用多个线程使用Queue类。这个程序要读入一个或多个RSS提要,对专辑排队来显示最新的5集以供下载,并使用线程并行地处理多个下载。这里没有提供足够的错误处理,所以不能在实际生产环境中使用,不过这个骨架实现可以作为一个很好的例子来说明如何使用Queue模块。
#首先要建立一些操作参数。正常情况下,这些参数来自用户输入(首选项,数据库等)。不过在这个例子中,线程数和要获取的URL列表都使用了硬编码值。
from Queue import Queue
from threading import Thread
import time
import urllib
import urlparse
import feedparser
#Set up some global variables
num_fetch_threads = 2
enclosure_queue = Queue()
# A real app wouldn't use hard-coded data...
feed_urls = ['http://advocacy.python.org/podcasts/littlebit.rss',]
#函数downloadEnclosures()在工作线程中运行,使用urllib来处理下载
def downloadEnclosures(i, q):
"""This is the worker thread function.
It processes items in the queue one after
another. These daemon threads go into an
infinite loop, and only exit when
the main thread ends.
"""
while True:
print '%s: Looking for the next enclosure' % i
url = q.get()
parsed_url = urlparse.urlparse(url)
print '%s: Downloading:' % i, parsed_url.path
response = urllib.urlopen(url)
data = response.read()
# Save the downloaded file to the current directory
outfile_name = url.rpartition('/')[-1]
with open(outfile_name, 'wb') as outfile:
outfile.write(data)
q.task_done()
#一旦定义了线程的目标函数,接下来可以启动工作线程。downloadEnclosures()处理语句url = q.get()时,会阻塞并等待,直到队列返回某个结果。这说明,及时队列中没有任何内容,也可以安全的启动线程。
# Set up some threads to fetch the enclosures
for i in range(num_fetch_threads):
worker = Thread(target=downloadEnclosures, args=(i, enclosure_queue,))
worker.setDaemon(True)
worker.start()
#下一步使用Mark Pilgrim的feedparser模块(www.feedparser.org)获取提要内容,并将这些专辑的URL入队。一旦第一个URL增加到队列,就会有某个工作线程提取这个URL,开始下载。这个循环继续增加元素直到提要全部利用,工作线程会依次将URL出队来下载这些提要
# Download the feed(s) and put the enclosure URLs into the queue.
for url in feed_urls:
response = feedparser.parse(url, agent='fetch_podcasts.py')
for entry in response['entries'][-5:]:
for enclosure in entry.get('enclosures', []):
parsed_url = urlparse.urlparse(enclosure['url'])
print 'Queuing:', parsed_url.path
enclosure_queue.put(enclosure['url'])
#使用join()再次等待队列腾空
# Now wait for the queue to be empty, indicating that we have processed all the downloads.
print '*** Main thread waiting'
enclosure_queue.join()
print '*** Done'
|
UTF-8
|
Python
| false | false | 2,014 |
4,758,823,778,766 |
4788c2ef710b9594e015046d7578a11b8f1c0fb9
|
796247102c56fcaae363416f3161f709dff18f51
|
/problem001.py
|
cd239888db4f7d0b03aea153468d4fabfb156761
|
[] |
no_license
|
seongjaelee/ProjectEuler
|
https://github.com/seongjaelee/ProjectEuler
|
eadb562c3ac79a44627c2eb81657c8795bbca866
|
ed8d53ddbb72cfdbcc396f0de6ddac4b54bcc3bc
|
refs/heads/master
| 2021-01-15T23:07:55.207370 | 2012-02-26T13:47:31 | 2012-02-26T13:47:31 | 3,440,672 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import problem
class Problem(problem.Problem):
def __init__(self):
number = 1
question = 'Add all the natural numbers below one thousand that are multiples of 3 or 5.'
problem.Problem.__init__(self, number, question)
def getAnswer(self):
ret = 0
for i in xrange(1, 1000):
if i % 3 == 0 or i % 5 == 0:
ret += i
return ret
|
UTF-8
|
Python
| false | false | 2,012 |
7,748,121,018,345 |
bebbaf7531f0ec4900b726fe01a1549a14077f98
|
3c8c32107e2b3e8f274996d6631012d619b2bcb1
|
/server.py
|
81345a1c7441722b1e7674dfdecf1484eff525e4
|
[] |
no_license
|
vpavlenko/reinhardt
|
https://github.com/vpavlenko/reinhardt
|
7032d76ee359cdf8e253f54e30939a8b0dd30f4f
|
03f46021f9efae63505aedfb6308f373feea7df0
|
refs/heads/master
| 2020-05-31T10:55:13.183166 | 2014-02-07T10:58:12 | 2014-02-07T10:58:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
import mimetypes
import os
import re
import socket
import sys
import urllib.parse
VERSION = 0.2
HOST = '127.0.0.1'
MAX_CONNECTIONS = 100 # number of simultaneous connections waiting in a queue
BUFLEN = 1024 # read data from socket by chunks of size BUFLEN
FILESYSTEM_ENCODING = sys.getfilesystemencoding() # encoding for file names
class Request:
'''
Represents an incoming request. Contains 'method', 'path' and 'headers'.
'''
def __init__(self, data):
first_line, *headers_lines = re.split('\r?\n', data)
self.method, self.path, self.http_version = first_line.split(' ')
self.path = urllib.parse.unquote(self.path[1:], encoding=FILESYSTEM_ENCODING)
self.headers = {}
for line in headers_lines:
if line:
key, value = line.split(': ')
self.headers[key] = value
def read_data_from_socket(sock):
# socket interface allows to read data only chunk by chunk
data = b''
while True:
data += sock.recv(BUFLEN)
if b'\r\n\r\n' in data:
break
return data.decode('ascii')
def compile_response(code, comment, mimetype, body):
assert type(body) == bytes
head = ('HTTP/1.0 {0} {1}\r\n'
'Content-Type: {2}\r\n'
'Content-Length: {3}\r\n\r\n').format(
code, comment, mimetype, len(body)
).encode('ascii')
return head, body
def http_404(request, message):
body = '<h2>{0}</h2>'.format(message)
return compile_response(code=404, comment='Not Found',
mimetype='text/html', body=body.encode('ascii'))
def send_file(request, path):
data = open(path, 'rb').read()
mimetype = mimetypes.guess_type(path)[0]
if mimetype is None:
mimetype = 'octet/stream' # default action: propose to download
return compile_response(code=200, comment='OK', mimetype=mimetype,
body=data)
def send_directory(request, path):
# Items are sorted in the following order: all directories, then all files.
# Alphabetical order inside each group.
files = [(not os.path.isdir(os.path.join(path, filename)), filename)
for filename in os.listdir(path)]
files.sort()
files = [filename + ('' if isfile else '/') for isfile, filename in files]
encoding_line = '<meta charset="{encoding}">'
file_line = '<div><a href="{filename}">{filename}</a></div>'
data = ([encoding_line.format(encoding=FILESYSTEM_ENCODING)] +
[file_line.format(path=path, filename=filename)
for filename in files])
return compile_response(code=200, comment='OK', mimetype='text/html',
body='\n'.join(data).encode(FILESYSTEM_ENCODING))
def serve_static(request, directory):
path = os.path.join(directory, request.path)
if not os.path.exists(path):
return http_404(request=request,
message='Path {0} doesn\'t exist'.format(path))
else:
if os.path.isdir(path):
index = os.path.join(path, 'index.html')
if os.path.exists(index):
return send_file(request, index)
else:
return send_directory(request, path)
else:
return send_file(request, path)
def static_server(port, directory):
print('Reinhardt {0}'.format(VERSION))
print('Starting a server on http://{0}:{1}/'.format(HOST, port))
print()
addr = (HOST, port)
serversock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# prevent "Address already in use" error
serversock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
serversock.bind(addr)
serversock.listen(MAX_CONNECTIONS)
while True:
clientsock, addr = serversock.accept()
print('Connected from:', addr)
data = read_data_from_socket(clientsock)
print(data)
request = Request(data)
head, body = serve_static(request=request, directory=directory)
print(head.decode('ascii'))
clientsock.sendall(head + body)
clientsock.close()
def main():
if len(sys.argv) != 3:
print('Usage: {0} PORT DIR'.format(sys.argv[0]))
sys.exit(1)
else:
port = int(sys.argv[1])
directory = sys.argv[2]
static_server(port, directory)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
13,108,240,205,588 |
c1e575fda7622b2f49fa84d50eaa258bd5d9b0e0
|
3638641c779d5d644805c23152229f29fc0284cb
|
/testing.py
|
a9ce3aa1dc7cf4f791231f7c880c75b1c9a65648
|
[] |
no_license
|
NilsNoreyson/FlaskServerToDocxLetter
|
https://github.com/NilsNoreyson/FlaskServerToDocxLetter
|
ff91293c334316eccfa8771f34c18d4b6af08b06
|
fc12176be44ebe33043ddffde2742b9cd457e644
|
refs/heads/master
| 2021-01-20T06:57:33.612833 | 2014-05-16T12:08:35 | 2014-05-16T12:08:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed May 14 08:22:51 2014
@author: peterb
"""
letter_data={'date': '15.05.2014', 'recipient': {'fax': '', 'tel': u'+49 15782576830', 'firstname': u'Matthias', 'prefix': '', 'address': {}, 'department': '', 'organization': '', 'surname': u'B\xf6pple', 'email': u'[email protected]'}, 'sender': {'fax': [u'07071-29-5963'], 'tel': u'07071-29-77633', 'firstname': u'Peter', 'prefix': u'Dipl. Phys.', 'address': {u'country': u'Germany', u'region': None, u'street': u'Memelweg 9', u'zipcode': u'72072', u'locality': u'T\xfcbingen'}, 'department': u'IPC - AG Weimar', 'organization': u'Universit\xe4t T\xfcbingen', 'surname': u'Bonanati', 'email': u'[email protected]'}}
#letter_data={'date': '15.05.2014', 'recipient': {'fax': '', 'tel': u'A', 'firstname': u'Andreas', 'prefix': '', 'address': u'A', 'department': '', 'organization': '', 'surname': u'Fiedler', 'email': u'[email protected]'}, 'sender': {'fax': [u'07071-29-5963'], 'tel': u'07071-29-77633', 'firstname': u'Peter', 'prefix': u'Dipl. Phys.', 'address': {u'country': u'Germany', u'region': None, u'street': u'Memelweg 9', u'zipcode': u'72072', u'locality': u'T\xfcbingen'}, 'department': u'IPC - AG Weimar', 'organization': u'Universit\xe4t T\xfcbingen', 'surname': u'Bonanati', 'email': u'[email protected]'}}
#letter_data=jsonToLetterData(testData)
#print letter_data
basic = Template(source="IPC_DATA", filepath='IPC_ger_Letter.odt')
print('TemplateDone')
basic_generated = basic.generate(o=letter_data).render()
print('filled Done')
odt_data=basic_generated.getvalue()
print('generated Done')
f = tempfile.NamedTemporaryFile(delete=False,mode='wb',dir=os.path.join(topPath,'static'))
print("%s.odt"%f.name)
f.write(odt_data)
f.close()
|
UTF-8
|
Python
| false | false | 2,014 |
1,331,439,866,154 |
91583be76001e359672063e2995c5e7065793963
|
c02b07d6cc49b9fd3ab811e92eccb7ba319f853a
|
/pymes/admin.py
|
5581363a9af7037bebd48f6809cc4f7026ea68ac
|
[] |
no_license
|
jhcastel/WebServer
|
https://github.com/jhcastel/WebServer
|
ca38429fa049b38d01396cb63a86112002875a57
|
e98a69f760cae36db74ecf4e834f214d7ab0b1ac
|
refs/heads/master
| 2021-03-12T23:43:14.266456 | 2014-11-18T01:13:16 | 2014-11-18T01:13:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from django.contrib.admin.sites import AdminSite
#from pymes.models import LoanType, Client
from pymes.forms import LoanAdminAuthForm
admin.autodiscover()
#class ClientInline(admin.TabularInline):
# model = Client
# extra = 1
#class LoanTAdmin(admin.ModelAdmin):
# fieldsets = [
# ('Code Admin', {'fields': ['idadmin']}),
# ('Loan Type', {'fields': ['ltype']}),
# ('Rate', {'fields': ['rate']}),
# ]
# inlines = [ClientInline]
# list_display = ('idadmin', 'ltype', 'rate')
class LoanAdmin(AdminSite):
login_form = LoanAdminAuthForm
def has_permission(self, request):
return request.user.is_active
loan_admin_site = LoanAdmin(name='loansadmin')
|
UTF-8
|
Python
| false | false | 2,014 |
13,134,009,999,575 |
189708c4621fa11f2259d1ac3561dd42fdecde0c
|
370f8e1278f21a218230240c69a391ba234b87ea
|
/admin.py
|
7271eb2fce90ca4b382802e05e5d632923efa5c0
|
[] |
no_license
|
mattapayne/ColorMatchR
|
https://github.com/mattapayne/ColorMatchR
|
a42b90ff96d62dab5d62d65953d7dde504df8979
|
39c6f558211e8c37bf39f74c0678c153acf3ce8a
|
refs/heads/master
| 2016-09-06T05:45:15.805795 | 2011-10-16T08:45:27 | 2011-10-16T08:45:27 | 2,582,491 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import webapp
from google.appengine.ext import db
from helpers import *
#This is a test
class AdminPage(webapp.RequestHandler):
def get(self):
signups = db.GqlQuery("SELECT * FROM BetaSignup")
template_data = {'signups': signups}
render_template(self, template_name='admin.html', template_values=template_data)
|
UTF-8
|
Python
| false | false | 2,011 |
8,667,244,010,378 |
1517407b54d8ce7c97aabb13dc3cf20ca6a7b2f8
|
eb4f1caf460f082e302b5621d600c386bdf54c76
|
/app/validator.py
|
881ac558bd1cd16f5cfca62f4ac465bffb4eb933
|
[] |
no_license
|
ageorgie/Phonebuzz
|
https://github.com/ageorgie/Phonebuzz
|
be70563ddeddbc7d3662a97b3528c17b3cca1a4a
|
9ccb5fae6992f60c4d629924a3d3fc49462b77ca
|
refs/heads/master
| 2016-09-10T22:00:58.074667 | 2014-06-27T01:58:18 | 2014-06-27T01:58:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from twilio.util import RequestValidator
import os
AUTH_TOKEN = ''
if AUTH_TOKEN == '':
AUTH_TOKEN = os.environ['TWILIO_TOKEN']
validator = RequestValidator(AUTH_TOKEN)
def isValid(url, signature, postVars = {}):
return validator.validate(url, postVars, signature)
|
UTF-8
|
Python
| false | false | 2,014 |
6,158,983,126,914 |
54c3636d0eefaffc12930b586cfc06e8274e262b
|
5de7cfe1ce2af6cd3b450c18a9519ca367b3a7b2
|
/varch/grub.py
|
bacf2f4ce632e0c43814e17e0be6b612c7ab1a66
|
[] |
no_license
|
thatch45/varch
|
https://github.com/thatch45/varch
|
62f48c77f65892d5e97f11aa7b89b3824accb2d1
|
e0d5c654aec7e152f2aee3712612c955b107485c
|
refs/heads/master
| 2020-05-16T21:22:27.435394 | 2011-08-16T19:28:22 | 2011-08-16T19:28:22 | 1,461,775 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Manage everything from the install to the grub-install
'''
import os
import shutil
import subprocess
class Grub:
'''
Manage preparing the environment for grub
'''
def __init__(self, opts, target, nbd):
self.opts = opts
self.target = target
self.nbd = nbd
def _grub_conf(self):
'''
Edits the grub config and returns the grub root for the grub install
'''
# TODO: Grow up and use a with statement
lst = os.path.join(self.target, 'boot/grub/menu.lst')
lines = open(lst, 'r').readlines()
grub_root = ''
for ind in range(0, len(lines)):
if lines[ind].startswith('#'):
continue
if lines[ind].startswith('root'):
grub_root = lines[ind][lines[ind].index('('):]
if lines[ind].startswith('kernel'):
s = lines[ind]
if self.opts['generic']:
lines[ind] = s.replace(self.nbd + 'p', '/dev/sda')
else:
lines[ind] = s.replace('/dev/sda', '/dev/vda')
open(lst, 'w').writelines(lines)
return grub_root
def _fstab(self):
'''
Edit the fstab with the propper devices!
'''
fstab = os.path.join(self.target, 'etc/fstab')
lines = open(fstab, 'r').readlines()
for ind in range(0, len(lines)):
if lines[ind].startswith('#'):
continue
if lines[ind].startswith('/dev/mapper/loop'):
s = lines[ind]
if self.opts['generic']:
lines[ind] = s.replace(self.nbd + 'p', '/dev/sda')
else:
lines[ind] = s.replace(self.nbd + 'p', '/dev/vda')
open(fstab, 'w').writelines(lines)
def _copy_stages(self):
'''
Copy in the boot stages
'''
shutil.copy('/boot/grub/stage1',
os.path.join(self.target, 'boot/grub/'))
shutil.copy('/boot/grub/stage2',
os.path.join(self.target, 'boot/grub/'))
def _install_grub(self, grub_root):
'''
Um... install grub!
'''
g_cmd = 'grub --batch --no-floppy --device-map=/dev/null'
g_lines = 'device (hd0) ' + self.opts['image'] + '\n'\
+ 'root ' + grub_root + '\n'\
+ 'setup (hd0)\n'\
+ 'quit\n'
g_lines = str.encode(g_lines)
grub = subprocess.Popen(g_cmd, shell=True, stdin=subprocess.PIPE)
grub.communicate(g_lines)
rc = grub.wait()
def setup_boot(self):
'''
Run the routines that will setup booting the virtual machine.
'''
grub_root = self._grub_conf()
self._fstab()
self._copy_stages()
self._install_grub(grub_root)
|
UTF-8
|
Python
| false | false | 2,011 |
13,237,089,245,285 |
5cb25ad62c0bd614cc348f4aff0ef1e0aa018653
|
d43e168a4f65e6ad6a055ab7b2470ac884dcac5e
|
/multimedia/mythtv/files/patch-bindings__python__setup.py
|
66c95ffa184ff7f90a58a6f72cf494828938a522
|
[
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license",
"GPL-3.0-only",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
non_permissive
|
surajrav/921ports
|
https://github.com/surajrav/921ports
|
d1d0e39c96ea1718dd0d3a1e0e6fd1822ba05a16
|
62fac1c0adb28a9198eda68a6e8508aa9cbe216d
|
refs/heads/master
| 2015-07-25T04:48:02 | 2013-04-04T13:50:32 | 2013-04-04T13:50:32 | 23,722,073 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
--- bindings/python/setup.py.orig 2012-04-10 07:29:22.000000000 +0200
+++ bindings/python/setup.py 2012-05-10 09:11:16.799272921 +0200
@@ -78,7 +78,7 @@
setup(
name='MythTV',
- version='0.25.0',
+ version='0.26.0',
description='MythTV Python bindings.',
long_description='Provides canned database and protocol access to the MythTV database, mythproto, mythxml, and frontend remote control.',
packages=['MythTV', 'MythTV/tmdb', 'MythTV/tmdb3', 'MythTV/ttvdb', 'MythTV/wikiscripts'],
|
UTF-8
|
Python
| false | false | 2,013 |
3,753,801,436,380 |
cc07c72bf9d73d4dda5afa828ca4d8b9847ad104
|
4180a6f4d3978b6012c6708c23cb1a126baf87df
|
/algorithms/graph.py
|
408dd4931fc74a7d81ba5813bff5419c92354fa9
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
rrane/jcvi
|
https://github.com/rrane/jcvi
|
20fbd8c433465849a8c1704c6b7bae6e65fec5f2
|
bc4fa3245cfcd90055b83f81ab3c3d51578829b8
|
refs/heads/master
| 2019-06-28T06:48:11.120589 | 2014-03-22T04:19:55 | 2014-03-22T04:19:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Wrapper for the common graph algorithms. Common usages are:
>>> edges = [(1, 2), (2, 3), (4, 5)]
>>> g = nx.DiGraph(edges)
>>> c = weakly_connected_components(g)
>>> print c
[[1, 2, 3], [4, 5]]
>>> sub = g.subgraph(c[0])
>>> topological_sort(sub)
[1, 2, 3]
"""
import sys
import logging
import networkx as nx
from collections import deque
from string import maketrans
from networkx.algorithms.dag import topological_sort
from networkx.algorithms.components.weakly_connected import \
weakly_connected_components
from networkx.algorithms.components.connected import connected_components
from networkx.algorithms.shortest_paths.generic import shortest_path
from jcvi.formats.base import must_open
from jcvi.apps.base import debug
debug()
"""
Bidirectional graph.
"""
dirs = (">", "<")
trans = maketrans("+?-", ">><")
class BiNode (object):
def __init__(self, v):
self.v = v
self.ins = []
self.outs = []
def get_next(self, tag="<"):
"""
This function is tricky and took me a while to figure out.
The tag specifies the direction where the current edge came from.
tag ntag
---> V >----> U
cur next
This means the next vertex should follow the outs since this tag is
inward '<'. Check if there are multiple branches if len(L) == 1, and
also check if the next it finds has multiple incoming edges though if
len(B) == 1.
"""
next, ntag = None, None
L = self.outs if tag == "<" else self.ins
if len(L) == 1:
e, = L
if e.v1.v == self.v:
next, ntag = e.v2, e.o2
ntag = "<" if ntag == ">" else ">" # Flip tag if on other end
else:
next, ntag = e.v1, e.o1
if next: # Validate the next vertex
B = next.ins if ntag == "<" else next.outs
if len(B) > 1:
return None, None
return next, ntag
def __str__(self):
return str(self.v)
__repr__ = __str__
class BiEdge (object):
def __init__(self, v1, v2, o1, o2, color="black", length=None):
self.v1 = v1
self.v2 = v2
o1 = o1.translate(trans)
o2 = o2.translate(trans)
assert o1 in dirs and o2 in dirs
self.o1 = o1
self.o2 = o2
if v1 > v2:
self.flip()
self.color = color
self.length = length
def __str__(self):
return "".join(str(x) for x in \
(self.v1, self.o1, "--", self.o2, self.v2))
def flip(self):
self.v2, self.v1 = self.v1, self.v2
o1, o2 = self.o1, self.o2
self.o1 = ">" if o2 == "<" else "<"
self.o2 = ">" if o1 == "<" else "<"
class BiGraph (object):
def __init__(self):
self.nodes = {}
self.edges = {}
def __str__(self):
return "BiGraph with {0} nodes and {1} edges".\
format(len(self.nodes), len(self.edges))
def add_node(self, v):
if v not in self.nodes:
self.nodes[v] = BiNode(v)
def add_edge(self, e):
v1, v2 = e.v1, e.v2
assert isinstance(e, BiEdge)
for v in (v1, v2):
self.add_node(v)
n1 = self.nodes.get(v1)
n2 = self.nodes.get(v2)
l = n1.outs if e.o1 == ">" else n1.ins
r = n2.ins if e.o2 == ">" else n2.outs
l.append(e)
r.append(e)
e.v1, e.v2 = n1, n2
self.edges[(v1, v2)] = e
def get_node(self, v):
return self.nodes[v]
def get_edge(self, av, bv):
flip = False
if av > bv:
av, bv = bv, av
flip = True
e = self.edges[(av, bv)]
if flip:
e.flip()
return e
def iter_paths(self):
discovered = set()
for v, vv in self.nodes.items():
if v in discovered:
continue
path = deque([vv])
#print "cur", v
discovered.add(v)
prev, ptag = vv.get_next(tag=">")
while prev:
#print prev, ptag
if prev.v in discovered:
break
path.appendleft(prev)
discovered.add(prev.v)
prev, ptag = prev.get_next(tag=ptag)
next, ntag = vv.get_next(tag="<")
while next:
#print next, ntag
if next.v in discovered:
break
path.append(next)
discovered.add(next.v)
next, ntag = next.get_next(tag=ntag)
#discovered |= set(x.v for x in path)
yield path
def path(self, path, flip=False):
from jcvi.utils.iter import pairwise
oo = []
if len(path) == 1:
m = "Singleton {0}".format(path[0])
oo.append((path[0].v, True))
return m, oo
edges = []
for a, b in pairwise(path):
av, bv = a.v, b.v
e = self.get_edge((av, bv))
if not oo: # First edge imports two nodes
oo.append((e.v1.v, e.o1 == ">"))
last = oo[-1]
assert last == (e.v1.v, e.o1 == ">")
oo.append((e.v2.v, e.o2 == ">"))
if flip:
se = str(e)
e.flip()
else:
se = str(e)
edges.append(se)
return "|".join(edges), oo
def read(self, filename, color="black"):
fp = open(filename)
nedges = 0
for row in fp:
a, b = row.strip().split("--")
oa = a[-1]
ob = b[0]
a, b = a.strip("<>"), b.strip("<>")
self.add_edge(BiEdge(a, b, oa, ob, color=color))
nedges += 1
logging.debug("A total of {0} edges imported from `{1}` (color={2}).".
format(nedges, filename, color))
def write(self, filename="stdout"):
fw = must_open(filename, "w")
for e in self.edges.values():
print >> fw, e
logging.debug("Graph written to `{0}`.".format(filename))
def draw(self, pngfile, dpi=96, verbose=False, namestart=0,
nodehighlight=None, prog="circo"):
import pygraphviz as pgv
G = pgv.AGraph()
for e in self.edges.values():
arrowhead = (e.o1 == ">")
arrowtail = (e.o2 == "<")
if e.o1 != e.o2: # Not sure why this is necessary
arrowhead = not arrowhead
arrowtail = not arrowtail
arrowhead = "normal" if arrowhead else "inv"
arrowtail = "normal" if arrowtail else "inv"
v1, v2 = e.v1, e.v2
v1, v2 = str(v1)[namestart:], str(v2)[namestart:]
G.add_edge(v1, v2, color=e.color,
arrowhead=arrowhead, arrowtail=arrowtail)
if nodehighlight:
for n in nodehighlight:
n = n[namestart:]
n = G.get_node(n)
n.attr["shape"] = "box"
G.graph_attr.update(dpi=str(dpi))
if verbose:
G.write(sys.stderr)
G.draw(pngfile, prog=prog)
logging.debug("Graph written to `{0}`.".format(pngfile))
def get_next(self, node, tag="<"):
return self.get_node(node).get_next(tag)
def get_path(self, n1, n2, tag="<"):
# return all intermediate nodes on path n1 -> n2
path = deque()
next, ntag = self.get_next(n1, tag=tag)
while next:
if next.v == n2:
return path
path.append((next, ntag))
next, ntag = next.get_next(tag=ntag)
return path if n2 is None else None
def bigraph_test():
g = BiGraph()
g.add_edge(BiEdge(1, 2, ">", "<"))
g.add_edge(BiEdge(2, 3, "<", "<", color="red"))
g.add_edge(BiEdge(2, 3, ">", ">", color="blue"))
g.add_edge(BiEdge(5, 3, ">", ">"))
g.add_edge(BiEdge(4, 3, "<", ">"))
g.add_edge(BiEdge(4, 6, ">", ">"))
g.add_edge(BiEdge(7, 1, ">", ">"))
g.add_edge(BiEdge(7, 5, "<", ">"))
g.add_edge(BiEdge(8, 6, ">", "<"))
print g
g.write()
for path in g.iter_paths():
p, oo = g.path(path)
print p
print oo
#g.draw("demo.png", verbose=True)
def merge_paths(paths):
"""
Zip together sorted lists.
>>> paths = [[1, 2, 3], [1, 3, 4], [2, 4, 5]]
>>> merge_paths(paths)
[1, 2, 3, 4, 5]
"""
from jcvi.utils.iter import pairwise
edges = []
for a in paths:
edges.extend(list(pairwise(a)))
g = nx.DiGraph(edges)
return topological_sort(g)
if __name__ == '__main__':
import doctest
doctest.testmod()
bigraph_test()
|
UTF-8
|
Python
| false | false | 2,014 |
2,465,311,236,168 |
9776a5a6adc0875f7b61576aad39de7c80c00894
|
665d6d22fb1a0be38dc558ce68276a51e7568763
|
/cis/migrations/0002_auto_20140807_0739.py
|
5f7519bf8de697b5cf314ab42947b68421cad831
|
[] |
no_license
|
deepanshu-nickelfox/cis
|
https://github.com/deepanshu-nickelfox/cis
|
4e2afa39a2bbf874b31e79cb7c61fc13f826623f
|
dcd54412efb79b2459b89401b6ed9873d6ce0db6
|
refs/heads/master
| 2021-05-28T19:25:42.007437 | 2014-08-07T07:41:38 | 2014-08-07T07:41:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('cis', '0001_initial'),
('hr', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='position',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, blank=True, to='hr.Position', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='user',
name='user_permissions',
field=models.ManyToManyField(to='auth.Permission', verbose_name='user permissions', blank=True),
preserve_default=True,
),
]
|
UTF-8
|
Python
| false | false | 2,014 |
15,753,940,079,168 |
73a0ef46740920db7da174c8965a5a0523347950
|
453e612020d4bedb11ae05c92c2a4c614daff389
|
/src/checksum/strategies/verhoeff.py
|
17aec845be788a83b34441a61a21eeed07d9fc11
|
[
"MIT"
] |
permissive
|
vaiski/checksum
|
https://github.com/vaiski/checksum
|
8a29849431a12b7ccbfcd749390b03bea05a4740
|
b88b76595922e182837ce42d49fd61bb2f053d77
|
refs/heads/master
| 2021-01-01T20:18:06.324443 | 2012-12-29T20:46:35 | 2012-12-29T20:46:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
'''
Verhoeff algorithm
~~~~~~~~~~~~~~~~~~
'''
from ..checksum import (
ChecksumStrategy,
)
class Verhoeff(ChecksumStrategy):
'''
Provides Verhoeff checksum algorithm.
'''
name = 'verhoeff'
d = [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[1, 2, 3, 4, 0, 6, 7, 8, 9, 5],
[2, 3, 4, 0, 1, 7, 8, 9, 5, 6],
[3, 4, 0, 1, 2, 8, 9, 5, 6, 7],
[4, 0, 1, 2, 3, 9, 5, 6, 7, 8],
[5, 9, 8, 7, 6, 0, 4, 3, 2, 1],
[6, 5, 9, 8, 7, 1, 0, 4, 3, 2],
[7, 6, 5, 9, 8, 2, 1, 0, 4, 3],
[8, 7, 6, 5, 9, 3, 2, 1, 0, 4],
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0],
]
p = [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[1, 5, 7, 6, 2, 8, 3, 0, 9, 4],
[5, 8, 0, 3, 7, 9, 6, 1, 4, 2],
[8, 9, 1, 6, 0, 4, 3, 5, 2, 7],
[9, 4, 5, 3, 1, 2, 6, 8, 7, 0],
[4, 2, 8, 6, 5, 7, 3, 9, 0, 1],
[2, 7, 9, 3, 8, 0, 6, 4, 1, 5],
[7, 0, 4, 6, 9, 1, 3, 2, 5, 8],
]
inv = [0, 4, 3, 2, 1, 5, 6, 7, 8, 9]
def checksum(self, body):
digits = self._prepare(body)
checksum = 0
for i, digit in enumerate(digits):
checksum = self.d[checksum][self.p[(i + 1) % 8][int(digit)]]
return str(self.inv[checksum])
def split(self, value):
return (value[:-1], value[-1])
def _prepare(self, body):
body = body[::-1]
return [int(d) for d in str(body)]
|
UTF-8
|
Python
| false | false | 2,012 |
13,864,154,459,707 |
98511312b8274a335fc39e8579e0d3a44020084d
|
1372d042330122d881989417329ac1d9f6b8647b
|
/lib/repo_gc.py
|
3cf8012e03237a044e22876b4572639f811f8367
|
[
"MIT"
] |
permissive
|
th0ma5w/polybius
|
https://github.com/th0ma5w/polybius
|
a9c08ee9fac447cc2a37c18ae0ace39734613887
|
a4c9d536b0c833d563a894cb0fa5b3f51a6b77c9
|
refs/heads/master
| 2020-03-26T08:27:00.623426 | 2013-08-15T18:23:21 | 2013-08-15T18:23:21 | 11,649,226 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import git
garbage_collect = lambda repo: git.Git(repo).gc()
|
UTF-8
|
Python
| false | false | 2,013 |
2,980,707,306,100 |
9707e1424565d7891fe6f9f3bdc231e81b76a117
|
f6c17672d64c0ce13fc8296ee7020b3fa30f2fe4
|
/src/util/Tool.py
|
bc628a985b1f4440acfd24e571d0209b1b7a0ef8
|
[
"MIT"
] |
permissive
|
wufulin/PyMusicPlayer
|
https://github.com/wufulin/PyMusicPlayer
|
3b6f0b7178caa302bc2211287183f7556baa4816
|
e0304d5abad623e2757cf55de5add0562ebc02e8
|
refs/heads/master
| 2020-06-01T09:25:24.041259 | 2013-12-03T02:51:21 | 2013-12-03T02:51:21 | 14,791,710 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#encoding=utf-8
'''
@author: wufulin
'''
CODEC='utf-8'
def ParseSongName(source):
return source.split('\\')[-1]
def ParseSongNameEscapeFileExt(source):
return ParseSongName(source).split('.')[0]
def ms2time(ms):
"""时间转换为字符串"""
if ms <= 0: return '00:00'
time_sec, ms = ms / 1000, ms % 1000
time_min, time_sec = time_sec / 60, time_sec % 60
time_hor, time_min = time_min / 60, time_min % 60
if time_hor == 0: return '%02d:%02d'%(time_min, time_sec)
return '--:--'
|
UTF-8
|
Python
| false | false | 2,013 |
8,761,733,327,927 |
4bfd00aa9617d100ae9bd9e464d81538e27216cf
|
b4ce28e4be8841368f7ae669825c0ec12004834b
|
/dealfeaturedataForBetterResult-getLatter.py
|
86cd4974713b8754744337e894c0f347cba3361e
|
[] |
no_license
|
Youngyi/metaREC
|
https://github.com/Youngyi/metaREC
|
bccde93db8122c1ac9c6930ef4a5803b19c9fafd
|
0c0458b31b044e1cf03053fe06e59087a0292bda
|
refs/heads/master
| 2021-05-27T18:21:39.774547 | 2014-06-18T14:22:34 | 2014-06-18T14:22:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
f=open("E:\\checkin\\result\\foursquare_NewYork_featuredata_binary_test_12hours.txt",'r')
fout=open("E:\\checkin\\result\\foursquare_NewYork_featuredata_binary_test_12hours_out.txt",'w+')
i=0
begin = False
while True:
i=i+1
if i%10000000 ==0:
print i
newline=f.readline()
arr=newline.split(',');
userId=int(arr[0])
if userId ==31613218 and begin == False:
begin = True
print "out"
if begin:
fout.write(newline)
|
UTF-8
|
Python
| false | false | 2,014 |
6,571,300,004,677 |
d379e75841831b29ef7f90b3eba0f738cc7be5aa
|
cf0e99001e778a7160f7b975370f6c98196288ce
|
/lib/model/create.py
|
1f9c8d8df2fe400b8b7eb28fa646df8a03cf434c
|
[] |
no_license
|
j0hn/snipptr
|
https://github.com/j0hn/snipptr
|
112070985a8802fb928795f3b7cb1da06fe91125
|
3a3f2988e5e125fc6bb189fff8a4166b7343d598
|
refs/heads/master
| 2021-01-10T22:05:25.289154 | 2011-06-29T19:01:35 | 2011-06-29T19:01:35 | 1,974,123 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
Creates the database tables.
"""
import time
import peewee
from tag import Tag
from user import User
from base import database
from snippet import Snippet, SnippetTag
def create_tables():
"""
Creates the tables.
"""
database.connect()
Snippet.create_table()
SnippetTag.create_table()
Tag.create_table()
User.create_table()
def fill_with_content():
"""
Fills with sample content.
"""
j0hn = User.create(username="j0hn", password="negros")
create_snippet("Create os dir in python",
"import os\nos.mkdir('FOLDER_NAME')",
["Python", "os"], j0hn)
create_snippet("Get command line arguments in C",
"int main(int *argc, char *argv[]){}",
["C"])
def create_snippet(title, text, tags_names, user=None):
if not user:
user = User().get_anon()
snip = Snippet.get_or_create(title=title, text=text,
date=str(time.time()), user=user)
for tag_name in tags_names:
tag = Tag.get_or_create(name=tag_name.lower())
SnippetTag.get_or_create(snippet=snip, tag=tag)
if __name__ == "__main__":
try:
create_tables()
print "Tables created"
except:
print "Tables Allready created"
fill_with_content()
print "Tables filled with content"
|
UTF-8
|
Python
| false | false | 2,011 |
19,267,223,304,045 |
1b60a31b12d9377a40c87204a2f8e81113100268
|
1daaae31de3bb34c76594158e70a896fc64a2cef
|
/dyn_options.py
|
57cd60daf6fffa5d35eae80a3f9e6f11cdf4fefd
|
[
"BSD-3-Clause"
] |
permissive
|
fons/dyn_options
|
https://github.com/fons/dyn_options
|
dba2592b104b30c9d94d67545c3f4032ff629110
|
cc80d0fd7e9435f12a7b5149a9c84f465232e92b
|
refs/heads/master
| 2016-09-06T05:18:35.917743 | 2009-08-30T19:21:27 | 2009-08-30T19:21:27 | 290,487 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
import string
import types
class dyn_options :
def __init__(self, d) :
self.__dict__ = d
self._freeze = False
self._internals = ["_internals","_freeze", "defaults"]
def __repr__(self) :
return self.__make_str(self.__id)
def __str__(self) :
return self.__make_str(self.__excl_internal_symbols)
def __id(self, k) :
return True
def __excl_internal_symbols(self, k) :
return k not in self._internals
def __make_str(self, fn) :
L = ["options :"]
L = L + map(self.__make_rep, filter(fn, self.__dict__))
return string.join(L,"\n")
def __make_rep(self, key) :
return "\t#) " + str(key) + " ==> " + str(self.__dict__[key])
def __getattr__(self, attrname) :
return False
def freeze(self) :
self._freeze = True
return self._freeze
def __setattr__(self, attr, value) :
def _set_default(p) :
if not self.__dict__.has_key(p[0]) :
self.__dict__[p[0]] = p[1]
return p
def _is_frozen() :
if self.__dict__.has_key("_freeze") and self.__dict__["_freeze"] == True :
return True
return False
if attr == "_freeze" and value == True :
self.__dict__[attr] = value
return
if attr != "defaults" and not self.__dict__.has_key(attr) and not _is_frozen():
self.__dict__[attr] = value
return
if self.__dict__.has_key(attr) :
return
if attr == "defaults" and type(value) == types.DictType :
map(_set_default, value.iteritems())
self.__dict__[attr] = value
if attr == "defaults" and type(value) == types.ListType :
try :
map(_set_default, dict(value).iteritems())
self.__dict__[attr] = dict(value)
except :
if self.__dict__.has_key("defaults") :
del self.__dict__["defaults"]
return
def create_option(argv, defaults=[]) :
def parse_argv(l, r) :
def starts(s) :
def starts_with(pat, s) :
return (pat == s[0:len(pat)])
return starts_with("-", s) or starts_with("--", s)
def start(l, r) :
"""
start state
"""
if len(l) == 0 :
return ([["--program", r ]], "")
if len(l) == 1 and len(l[0]) == 0 :
return ([["--program", r ]], "")
return (l, r)
def pattern1(l, r) :
"""
last argument started with a -- and the next one as well e.g [..] --opt1 --opt2 [..]
"""
if (len(r) == 0) :
return (l, r)
le = len(l) - 1
if (len(l[le]) == 1) and starts(l[le][0]) and starts(r) :
l[le].append(True)
l.append([r])
return (l,"")
return (l,r)
def pattern2(l, r) :
"""
last argument started with a -- and the next one doesn't [..] --opt1 value1 [..]
or
last argument didn't start with a -- and neither does the next one [..] value1 value2 [..]
"""
if (len(r) == 0) :
return (l, r)
le = len(l) - 1
if (len(l[le]) > 0) and starts(l[le][0]) and (not starts(r) ) :
l[le].append(r)
return (l,"")
return (l,r)
def pattern3(l, r) :
"""
last argument didn't start with a -- and the next one does [..] value1 --opt2 [..]
"""
if (len(r) == 0) :
return (l, r)
le = len(l) - 1
if (len(l[le]) > 1) and starts(l[le][0]) and starts(r) :
l.append([r])
return (l,"")
return (l,r)
(l, r) = start(l, r)
(l, r) = pattern1(l, r)
(l, r) = pattern2(l, r)
(l, r) = pattern3(l, r)
return l
def clean_args(l) :
def strip_dash(s) :
if "--" == s[0:2] :
return s[2:]
if "-" == s[0:1] :
return s[1:]
return s
if len(l) == 0 :
return l
if len(l) == 1 :
l.append(True)
if len(l) < 3:
return [strip_dash(l[0])] + l[1:]
return [strip_dash(l[0]), string.join(l[1:]," ")]
opt = dyn_options( dict(map(clean_args, reduce(parse_argv, argv, [ [] ] ))))
opt.defaults = defaults
opt.freeze()
return opt
|
UTF-8
|
Python
| false | false | 2,009 |
2,843,268,379,417 |
879ae8a9bbb27e6c34f6f4217b9db34fd6e41cd7
|
1db6c8dc22a15a77f80f23842cc7c68b58f34abd
|
/test_bin.py
|
0161289b8508394fb1c9a117938f632df135270e
|
[] |
no_license
|
gantony/roulette-simulator
|
https://github.com/gantony/roulette-simulator
|
19d1a7b80433d5e027f3665140dc48e036267393
|
01473b0abcd861261cadde3301ae43219079a25d
|
refs/heads/master
| 2021-01-23T22:42:55.403975 | 2014-10-23T22:14:51 | 2014-10-23T22:14:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from outcome import Outcome
from bin import Bin
class TestBin(unittest.TestCase):
def setUp(self):
self.output = Outcome("Red", 11)
self.other = Outcome("Red-ish", 10)
self.different = Outcome("Black", 11)
def testConstructBinWithOneOutcome(self):
binSingleOutcome = Bin(self.output)
self.assertEqual(len(binSingleOutcome.outcomes), 1)
def testConstructBinWithMultipleOutcomes(self):
binMultipleOutcome = Bin(self.output, self.other, self.different)
self.assertEqual(len(binMultipleOutcome.outcomes), 3)
def testCanAddOutcomeToBin(self):
testBin = Bin()
self.assertEqual(len(testBin.outcomes), 0)
testBin.add(self.output)
self.assertEqual(len(testBin.outcomes), 1)
def testStrSingleOutcome(self):
binSingleOutcome = Bin(self.output)
self.assertEqual("%s" % self.output, "Red (11:1)")
def testStrMultipleOutcomes(self):
binMultipleOutcome = Bin(self.output, self.other, self.different)
self.assertEqual("%s" % binMultipleOutcome, "[Black (11:1), Red-ish (10:1), Red (11:1)]")
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
11,647,951,344,720 |
f80091446eeb88878d3ef41b755141d4547971b4
|
f15fd97aca7cb452be89c2028d30dd96e4f1d57f
|
/test2.py
|
96661af0f95bc5f27d3b78b2e5ae248be5424aad
|
[] |
no_license
|
ketilk/Atlas
|
https://github.com/ketilk/Atlas
|
d54e7cd59ee9eb5f2ea255a97242249ae3beadb2
|
94b21e4e0ff357f8429433f570ee28fa8825a99c
|
refs/heads/master
| 2021-01-21T11:15:03.483254 | 2014-11-29T20:01:28 | 2014-11-29T20:01:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import time
import atlas
from math import sin
from math import pi
from topic import Topic
class TestDaemon(atlas.AtlasDaemon):
def _init(self):
self.topic = Topic("temperature", "tester")
self.subscriber = self.get_subscriber(self.topic)
def _loop(self):
self.logger.info(self.subscriber.get_topic())
import logging
import sys
import os
file_name = os.path.splitext(os.path.basename(__file__))[0]
if __name__ == '__main__':
logging.basicConfig(filename='/var/log/' + file_name + '.log',
filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
daemon = TestDaemon('/var/run/' + file_name + '.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart" % sys.argv[0]
sys.exit(2)
|
UTF-8
|
Python
| false | false | 2,014 |
3,221,225,504,510 |
2feae35856bcbde685f9d608997af29b509d4b84
|
bc164190f6cf1d884c12848b46e08832bc2b2819
|
/gui/model/executor.py
|
6beea77f3dfe3d25e33cb570da6b9f50929ecf41
|
[
"GPL-1.0-or-later",
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-only"
] |
non_permissive
|
tokarthik/IE4unix
|
https://github.com/tokarthik/IE4unix
|
edb82bec2cbb3e1f9a61c896e26bee89f089140d
|
8a0f31c6cd49eb2d51f2b68f827528d52213a5eb
|
refs/heads/master
| 2021-05-27T15:20:57.703318 | 2014-09-24T06:50:45 | 2014-09-24T06:50:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from installer import _Button as _Button
# Defines a program executor
class ExecutorDefinition:
def __init__(self):
self.buttons = []
self.args = []
def set_program(self, program):
self.program = program
self._do_command()
return self
def set_initial_command(self, command, args):
self.command = command
for arg in args: self.args.append(arg)
self._do_command()
return self
def _do_command(self):
if not hasattr(self, 'program'): return
for c in self.program.checkboxes():
if c.checked:
self.args.append(c.command)
for c in self.program.comboboxes():
self.args.append(c.command)
self.args.append(c.selected)
for t in self.program.textfields():
self.args.append(t.command)
self.args.append(t.value)
def title(self, title):
self.title = title
return self
def subtitle(self, subtitle):
self.subtitle = subtitle
return self
def logo(self, logo):
#TODO validates file existence
self.logo = logo
return self
def button(self, label, img, callback):
button = _Button(label, img, callback)
self.buttons.append(button)
return self
|
UTF-8
|
Python
| false | false | 2,014 |
18,863,496,377,259 |
081799eb0267e0bd2cf7530033fb3331c5d74c23
|
d5047ce01e3afab588bbf858de12512e133416ea
|
/basicOps.py
|
32fb27bd1ff898099cefd94f2df9d15ea7d77b0d
|
[] |
no_license
|
thomasmurphycodes/PythonFinance
|
https://github.com/thomasmurphycodes/PythonFinance
|
1b0126710d4b21323e0b4b82e252dcf1c29aa025
|
0a3b2ed936bdc5ecef6cb42cb0996c3f3ea7c6dd
|
refs/heads/master
| 2016-08-05T19:58:08.283916 | 2014-08-04T21:52:56 | 2014-08-04T21:52:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import math
__author__ = 'thomasmurphy'
# Relation between present value & future value where:
# PV = present value, FV = future value, and R = rate of return over n periods
def presentVal(futureVal, rate, periods):
return futureVal/(1 + rate) ** periods
print "Present Value:"
print presentVal(1000, 0.1, 4)
""" Future Value, where
PV = Present Value
R = interest rate
n = number of periods
"""
def futureVal(presentVal, rate, periods):
return presentVal * (1 + rate)**periods
print "Future Val:"
print futureVal(40000, 0.1, 10)
""" Perpetuity Model at present time where
c = constant periodic cash flow
R = periodic discount rate """
def perpetuity(cashFlow, discountRate):
return cashFlow/discountRate
print "Sample Perpetuity"
print perpetuity(1000,0.2)
""" Annuity where:
PMT = equal periodic payment
R = periodic discount rate
n = number of periods
"""
#Present Value Annuity
def annuity(PMT,R,n):
return (PMT/R) * (1 - (1/(1+R)**n))
print "annuity:"
print annuity(300, 0.3, 8)
#Present Value Annuity Due
def annuityDue(PMT,R,n):
return (PMT/R) * (1 - (1/(1+R)**n)) * (1+R)
#Future Value Annuity
def fvAnnuity(PMT,R,n):
return (PMT/R) * ((1+R)**n - 1)
#Future Value Annuity Due
def fvAnnuityDue(PMT,R,n):
return (PMT/R) * ((1+R)**n - 1) * (1 + R)
""" INTEREST RATES """
#Effective Annual rate for a given APR
"""
APR = Annual Percentage Rate
m = compounding frequency in one year
"""
def effectiveAnnualRate(APR, m):
return (1 + APR/m)**m - 1
print "Effective annual rate for 5.9 percent compounded quarterly"
print effectiveAnnualRate(0.059, 4);
#Estimated effective rate, used to convert between values of m
def estimatedEffectiveRate(APR, m):
return APR/m
#Convert to different value of m
def periodConversion(APR, mone, mtwo):
return (1 + APR/mone) ** (mone/mtwo) - 1
print "Effective monthly rate for a semiannually compounded 6% APR"
print periodConversion(12, 2, 12)
"""
Continuously Compounded Interest Rate
m = compounding frequency per year
APR is annual percentage rate
"""
def contCompInterest(m,APR):
return m * math.log(1*(APR/m))
print "Continuously compounded interest rate on 10% at bimonthly compounding:"
print contCompInterest(0.1, 6)
"""
Net present value. Determine whether a transaction is strong based on discounted rate and
cash flows
"""
def netPresentValue(rate, cashflows):
total = 0.0
for i, cashflow in enumerate(cashflows):
total += cashflow / (1 + rate) ** i
if total <= 0:
return "That's a Bad Choice"
else:
return "That's a Good Choice!"
print "Net present value of initial 40000 investment with " \
"7000, 5000, 3000, 20000, and 10000 future cash flows and" \
"6% rate"
print netPresentValue(0.032, [-40000,7000,5000,3000,20000,10000])
"""
Internal Rate of Return: The annualized compound itnerest rate that makes
all cash flows in a project equal to 0
"""
def internalRateReturn(cashflows,iterations=100):
rate = 1.0
#Initial Investment, will be negative
investment=cashflows[0]
for i in range(1, iterations+1):
rate*=(1-netPresentValue(rate,cashflows)/investment)
return rate
|
UTF-8
|
Python
| false | false | 2,014 |
240,518,194,556 |
f31834923e475ef5c0875b890792d14d605b93fc
|
470ef7d6c6bd99a58e3b7ca97c03ff084bf98160
|
/scripts/mfs.py
|
e0d886c4752180d904d17e867924c7203ec00875
|
[] |
no_license
|
rbaravalle/europeanfood
|
https://github.com/rbaravalle/europeanfood
|
80296a89eddf9768776be836ddad94008ae996dd
|
2421c3e7d0b430b78111875baad406c18322b6a9
|
refs/heads/master
| 2016-09-05T09:47:13.394278 | 2013-10-24T15:36:38 | 2013-10-24T15:36:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import Image
import numpy as np
from math import exp, log10
import scipy.ndimage.filters as sf
import matplotlib
from matplotlib import pyplot as plt
import scipy.signal
def gauss_kern(size, sizey):
""" Returns a normalized 2D gauss kernel array for convolutions """
m = np.float32(size)
n = np.float32(sizey)
sigma = 2; # ???
if(size <= 3): sigma = 1.5;
if(size == 5): sigma = 2.5;
y, x = np.mgrid[-(m-1)/2:(m-1)/2+1, -(n-1)/2:(n-1)/2+1]
b = 2*(sigma**2)
x2 = map(lambda i: map( lambda j: j**2,i), x)
y2 = map(lambda i: map( lambda j: j**2,i), y)
g = np.sum([x2,y2],axis=0).astype(np.float32)
g = np.array(map(lambda i: map( lambda j: exp(-j/b),i), g)).astype(np.float32)
return g / g.sum()
def mfs(im,extra):
#mfs Computes the MFS vector for the input measurement image im
#
# parameters: ind_num -> determines how many levels are used when computing the density
# choose 1 for using directly the image measurement im or
# >= 6 for computing the density of im (quite stable for >=5)
# f_num----> determines the dimension of MFS vector
# ite_num ---> determines how many levels are used when computing MFS for each level set
# (quite stable for >= 3)
#
#MFS = mfs(im) computes the MFS for input im with default setting
#
#MFS = mfs(im,ind_num) computes the MFS with ind_num density levels
#
#MFS = mfs(im,ind_num, f_num) computes the MFS of dimension f_num for input im
# with ind_num density levels
#
#MFS = mfs(im, ind_num, f_num,ite_num) computes the MFS of dimension f_num for input measurement im
# using ite_num level iterations in the
# estimation of the fractal dimension and using ind_num level
# iterations in the density estimation.
#
#Author: Yong Xu, Hui Ji
#Date: Apr 24, 2007
#Code ported to python : Rodrigo Baravalle. December 2012
ind_num = 1
if(len(extra) == 1):
ind_num = extra[0] #density counting levels
f_num = 26 #the dimension of MFS
ite_num = 3 # iteration levels in estimating fractal dimension
if(len(extra) == 2):
ind_num = extra[0]
f_num = extra[1]
ite_num = 3
if(len(extra) >= 3):
ind_num = extra[0]
f_num = extra[1]
ite_num = extra[2]
# Extra[3] == True means what we are passing is a filename
# Extra[3] == False means what we are passing is an array
FILENAME = extra[3]
if(FILENAME):
im = Image.open(im)
# Preprocessing: if IM is a color image convert it to a gray image
im = im.convert("L")
im = np.array(im.getdata()).reshape(im.size)
#Using [0..255] to denote the intensity profile of the image
grayscale_box =[0, 255];
#Preprocessing: default intensity value of image ranges from 0 to 255
if(abs(im).max()< 1):
im = im * grayscale_box[1];
#######################
### Estimating density function of the image
### by solving least squares for D in the equation
### log10(bw) = D*log10(c) + b
r = 1.0/max(im.shape)
c = np.dot(range(1,ind_num+1),r)
c = map(lambda i: log10(i), c)
bw = np.zeros((ind_num,im.shape[0],im.shape[1])).astype(np.float32)
bw[0] = im + 1
k = 1
if(ind_num > 1):
bw[1] = scipy.signal.convolve2d(bw[0], gauss_kern(k+1,(k+1)),mode="full")[1:,1:]*((k+1)**2)
for k in range(2,ind_num):
temp = scipy.signal.convolve2d(bw[0], gauss_kern(k+1,(k+1)),mode="full")*((k+1)**2)
if(k==4):
bw[k] = temp[k-1-1:temp.shape[0]-(k/2),k-1-1:temp.shape[1]-(k/2)]
else:
bw[k] = temp[k-1:temp.shape[0]-(1),k-1:temp.shape[1]-(1)]
bw = np.log10(bw)
n1 = c[0]*c[0]
n2 = bw[0]*c[0]
for k in range(1,ind_num):
n1 = n1+c[k]*c[k]
n2 = n2 + bw[k]*c[k]
sum3 = bw[0]
for i in range(1,ind_num):
sum3 = sum3 + bw[i]
if(ind_num >1):
D = (n2*ind_num-sum(c)*sum3)/(n1*ind_num -sum(c)*sum(c));
if (ind_num > 1):
max_D = np.float32(4)
min_D = np.float32(1)
D = grayscale_box[1]*(D-min_D)/(max_D - min_D)+grayscale_box[0]
else:
D = im
#Partition the density
# throw away the boundary
D = D[ind_num-1:D.shape[0]-ind_num+1, ind_num-1:D.shape[1]-ind_num+1]
IM = np.zeros(D.shape)
gap = np.ceil((grayscale_box[1] - grayscale_box[0])/np.float32(f_num));
center = np.zeros(f_num);
for k in range(1,f_num+1):
bin_min = (k-1) * gap;
bin_max = k * gap - 1;
center[k-1] = round((bin_min + bin_max) / 2);
D = ((D <= bin_max) & (D >= bin_min)).choose(D,center[k-1])
D = ((D >= bin_max)).choose(D,0)
D = ((D < 0)).choose(D,0)
IM = D
#Constructing the filter for approximating log fitting
r = max(IM.shape)
c = np.zeros(ite_num)
c[0] = 1;
for k in range(1,ite_num):
c[k] = c[k-1]/(k+1)
c = c / sum(c);
#Construct level sets
Idx_IM = np.zeros(IM.shape);
for k in range(0,f_num):
IM = (IM == center[k]).choose(IM,k+1)
Idx_IM = IM
IM = np.zeros(IM.shape)
#Estimate MFS by box-counting
num = np.zeros(ite_num)
MFS = np.zeros(f_num)
for k in range(1,f_num+1):
IM = np.zeros(IM.shape)
IM = (Idx_IM==k).choose(Idx_IM,255+k)
IM = (IM<255+k).choose(IM,0)
IM = (IM>0).choose(IM,1)
temp = max(IM.sum(),1)
num[0] = log10(temp)/log10(r);
for j in range(2,ite_num+1):
mask = np.ones((j,j))
bw = scipy.signal.convolve2d(IM, mask,mode="full")[1:,1:]
indx = np.arange(0,IM.shape[0],j)
indy = np.arange(0,IM.shape[1],j)
bw = bw[np.ix_(indx,indy)]
idx = (bw>0).sum()
temp = max(idx,1)
num[j-1] = log10(temp)/log10(r/j)
MFS[k-1] = sum(c*num)
return MFS
|
UTF-8
|
Python
| false | false | 2,013 |
16,423,954,964,149 |
b5f8add556099f36d0ce2a89a4ef2b7845d5e794
|
5be29d9a515686ca5f880416b7901b1abd1f8e0e
|
/minos/tests/validators/test_with_validator.py
|
b36b2f1810d5eeda39834ec6b01095be6af53708
|
[
"MIT"
] |
permissive
|
jamiepg1/minos
|
https://github.com/jamiepg1/minos
|
fb4b19531cf0a8f2ef0c19ef5db8ca9a554051df
|
d2f099133d46b173b8dab8c7cbe1d1a1fad4c5b6
|
refs/heads/master
| 2018-03-12T06:00:12.502437 | 2013-10-07T20:07:00 | 2013-10-07T20:07:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import absolute_import
import unittest
from minos.errors import ValidationError
class testWithValidator(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_email_validator(self):
from minos.validators import WithValidator
class testClass:
fruit_info = None
def __init__(self, fruit_info):
self.fruit_info = fruit_info
def ripeness(self):
return self.fruit_info['ripe'] == True
def yellowness(self):
return self.fruit_info['fruit_type'] in ['banana', 'grapefruit', 'lemon']
foo = testClass(
fruit_info={
'fruit_type': 'apple',
'ripe': True
}
)
validator1 = WithValidator('fruit_info', with_='ripeness')
validator2 = WithValidator('fruit_info', with_='yellowness')
self.assertEqual(validator1.validate_wrapper(foo), None)
self.assertRaises(ValidationError, validator2.validate_wrapper, foo)
|
UTF-8
|
Python
| false | false | 2,013 |
4,063,039,091,169 |
2e0abefe5e346bae5f273ddd5cf58ff410c6d1ae
|
d18f7ba1ec51a231b84925d6efa5cfea057e4d36
|
/Algos/06/bucketsort.py
|
6cb1625df01f1b9d9522cdfe05ed8a777a6bd487
|
[] |
no_license
|
hanslovsky/SS_2012
|
https://github.com/hanslovsky/SS_2012
|
9ec64aee4ded377060715c3d1358e04d769fc1eb
|
814e4eacabc7fda44484b818f53c5ac0033ee437
|
refs/heads/master
| 2021-01-20T10:13:26.659119 | 2012-07-25T20:26:26 | 2012-07-25T20:26:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
import timeit
import random
import unittest
from pylab import polyfit, poly1d
import matplotlib.pyplot as plt
from math import sqrt, fabs
from os import sys
import numpy as np
from scipy.optimize import curve_fit
def insertionSort(a):
for i in xrange(len(a)):
item = a[i]
ihole = i
while ihole > 0 and a[ihole-1] > item:
a[ihole] = a[ihole-1]
ihole -= 1
a[ihole] = item
def createData(size):
a = []
while len(a) < size:
x, y = random.uniform(-1, 1), random.uniform(-1, 1)
r = sqrt(x**2 + y**2)
if r < 1.0:
a.append(r)
return a
def bucketMapLinear(r, M):
return int(r*M)
def bucketMapQuadratic(r, M):
return int(r**2*M)
#return int(r*M)
def returnBuckets(a, bucketMap, M):
buckets = [[] for k in xrange(M)]
for r in a:
index = bucketMap(r, M)
buckets[index].append(a)
return buckets
def testUniformity(buckets):
N = 0
for bucket in buckets:
N += len(bucket)
M = len(buckets)
c = float(N)/float(M)
chiSquared = 0
for bucket in buckets:
chiSquared += (len(bucket)-c)**2
chiSquared = chiSquared/c
tau = fabs(sqrt(2*chiSquared)-sqrt(2*M-3))
return tau
def bucketSort(a, bucketMap, c):
M = int(c*len(a))
b = [[] for k in xrange(M)]
for k in xrange(len(a)):
index = bucketMap(a[k], M)
b[index].append(a[k])
i=0
for k in xrange(M):
insertionSort(b[k])
a[i:i+len(b[k])] = b[k]
i += len(b[k])
class SortingTestCase(unittest.TestCase):
""" Check post conditions for sorting algorithm
"""
def setUp(self):
self.randomArray = [random.randint(0,1000) for r in xrange(100)]
self.sortedArray = self.randomArray[:]
#check the size
def runTest(self):
self.assertEqual(len(self.randomArray), len(self.sortedArray),
'incorrect size')
#check that arrays contain same elements
_randomArray = self.randomArray[:]
for element in self.sortedArray:
index = 0
length = len(_randomArray)
while index < len(_randomArray):
if element == _randomArray[index]:
_randomArray.pop(index)
break
index += 1
self.assertEqual(len(_randomArray), length,
'different elements in array')
#check that array is sorted
last = self.sortedArray[0]
for element in self.sortedArray[1:]:
self.assertTrue(last <= element,'not sorted')
last = element
#test bucketMapQuadratic
class BucketSortTestCase1(SortingTestCase):
def setUp(self):
self.randomArray = [random.random() for r in xrange(1000)]
self.sortedArray = self.randomArray[:]
bucketSort(self.sortedArray, bucketMapQuadratic, 0.2)
#test bucketMapLinear
class BucketSortTestCase2(SortingTestCase):
def setUp(self):
self.randomArray = [random.random() for r in xrange(1000)]
self.sortedArray = self.randomArray[:]
bucketSort(self.sortedArray, bucketMapLinear, 0.2)
if __name__ == "__main__":
print "takes about 2 minutes..."
array_length = [10**i for i in xrange(2,7)]
array_M = [2, 4, 8, 20, 100]
n_total = len(array_M)*len(array_length)
print "-"*40
print "run test to check uniformity for quadratic indexing:"
n_uniform_q = 0
for l in array_length:
a = createData(l)
for M in array_M:
# print "Length = ", l, " M = ",M
buckets_quad = returnBuckets(a, bucketMapQuadratic, M)
tau = testUniformity(buckets_quad)
# print 'tau = ', tau
if tau <= 3:
n_uniform_q += 1
print "-"*40
print "\n"
print "run test to check uniformity for linear indexing: "
n_uniform_l = 0
for l in array_length:
a = createData(l)
for M in array_M:
# print "Length = ", l, " M = ",M
buckets_lin = returnBuckets(a, bucketMapLinear, M)
tau = testUniformity(buckets_lin)
# print 'tau = ', tau
if tau <= 3:
n_uniform_l += 1
print "-"*79
print "\n"
print "test result for quadratic indexing:"
print n_uniform_q, " out of ", n_total, " tests passed"
print "\n"
print "test result for linear indexing:"
print n_uniform_l, " out of ", n_total, " tests passed"
print "\n"
print "running unittest to check correctness of bucketSort\n"
## testing bucketSort with both bucketMaps
sortingtestsuite = unittest.TestSuite()
sortingtestsuite.addTest(BucketSortTestCase1())
sortingtestsuite.addTest(BucketSortTestCase2())
unittest.TextTestRunner(verbosity=2).run(sortingtestsuite)
## checking if runtime grows linear with arraysize
print "compute runtime..."
n_length = [1000, 5000, 10000, 20000, 40000, 100000, 200000,
300000, 400000, 500000, 600000, 700000]
time_quad = []
time_lin = []
for n in n_length:
reps = 10
init="""
import bucketsort as bs
a = bs.createData(%d)
c=4.0
c=1.0/c
""" % n
prog_lin = """bs.bucketSort(a, bs.bucketMapLinear, c)"""
prog_quad = """bs.bucketSort(a, bs.bucketMapQuadratic, c)"""
timer_quad = timeit.Timer(prog_quad, init)
timer_lin = timeit.Timer(prog_lin, init)
time_quad.append(timer_quad.timeit(reps))
time_lin.append(timer_lin.timeit(reps))
# linear fit to data
fit = polyfit(n_length, time_quad, 1)
fit_fn = poly1d(fit)
x=range(n_length[-1])
p_quad = plt.plot(n_length, time_quad, 'bs')
p_lin = plt.plot(n_length, time_lin, 'yo')
p_fit = plt.plot(x, fit_fn(x), 'k')
plt.legend([p_quad, p_lin, p_fit], ["int(r**2*M)", "int(r*M", "linear fit"], loc = 2)
plt.xlabel('array length')
plt.ylabel('runtime')
plt.title('Runtime of bucketSort with different bucketMaps')
plt.axis([0, n_length[-1] + 10, 0, time_lin[-1] + 10])
plt.show()
|
UTF-8
|
Python
| false | false | 2,012 |
6,141,803,248,589 |
e3c0a239847ebb0265c0aed7cb66abec0e916418
|
795d0917f40f2a0cddf54867a165ba34daae0cca
|
/Robot/Susi.py
|
ecb3a94aa854ac207c9a1896421121a22d4933d7
|
[] |
no_license
|
ruudel/Soe-Raev
|
https://github.com/ruudel/Soe-Raev
|
a47f1bdc64ad01b18a8a49cb954239c8e87158fb
|
1d09c644663fe46b8f83a7749cdccdb2582f1ed7
|
refs/heads/master
| 2016-09-05T19:35:09.063627 | 2013-11-14T08:19:41 | 2013-11-14T08:19:41 | 12,954,474 | 1 | 0 | null | false | 2013-11-04T01:14:10 | 2013-09-19T17:19:05 | 2013-11-04T01:10:31 | 2013-11-04T01:09:55 | 256 | null | 2 | 0 |
Python
| null | null |
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import cv2.cv as cv
import serial
import time
from time import sleep
parem = serial.Serial('/dev/ttyACM2', timeout=1, parity=serial.PARITY_NONE, baudrate=115200)
vasak = serial.Serial('/dev/ttyACM1', timeout=1, parity=serial.PARITY_NONE, baudrate=115200)
coil = serial.Serial('/dev/ttyACM0', timeout=1, parity=serial.PARITY_NONE, baudrate=115200)
def saada(seade, sonum):
seade.write(sonum+'\n')
def saadaseadmetele(sonum):
saada(vasak, sonum)
saada(parem, sonum)
def stop():
saadaseadmetele('sd0')
def soidaedasi(kiirus):
saada(vasak,'sd'+str(kiirus))
saada(parem,'sd-'+str(kiirus))
def tagane(kiirus):
saada(vasak,'sd-'+str(kiirus))
saada(parem,'sd'+str(kiirus))
def ymberpoord():
saadaseadmetele('sd20')
sleep(0.7)
saadaseadmetele('sd0')
def soidaparemale(kiirus):
saada(vasak, 'sd'+str(kiirus-kiirus*0.3))
saada(parem, 'sd-'+str(kiirus))
def soidavasakule(kiirus):
saada(vasak, 'sd'+str(kiirus))
saada(parem, 'sd-'+str(kiirus-kiirus*0.3))
def loeseadmest(seade, sonum):
saada(seade,sonum)
print(seade.readline())
def kasSin():
for i in range(3):
parem.write('go\n')
v=parem.readline()
if '<0mine>' in v:
return True
else:
return False
return False
def kasKol():
for i in range(3):
parem.write('go\n')
v=parem.readline()
if '<1mine>' in v:
return True
else:
return False
return False
def kasB():
parem.write('gl\n')
v=parem.readline()
if v=='<0varav>\n':
return True
else:
return False
def kasPall():
for i in range(3):
parem.write('gb\n')
v=parem.readline()
if '<b:1>' in v:
return True
elif '<b:0>' in v:
return False
else:
pass
return False
def annatuld(tugevus):
saada(coil,'k'+str(tugevus))
def otsi():
joonemomendid = cv2.moments(dilatejoon)
#kui must joon ees on, siis edasi ei soida
if joonemomendid['m01'] < 180:
stop()
ymberpoord()
if joonemomendid['m01'] < 160:
soidavasakule(kiirus)
elif joonemomendid['m01'] >= 160:
soidaparemale(kiirus)
else:
soidaedasi(kiirus)
def leiaTsenter(contours):
x=0
y=0
maxArea = 0
for contour in contours:
moments = cv2.moments(contour, True)
#kui moment 0, siis eira
if (len(filter(lambda i: i==0, moments.values())) > 0):
continue
if moments['m00'] > maxArea:
maxArea = moments['m00']
x=moments['m10']/moments['m00']
y=moments['m01']/moments['m00']
center=(x,y)
center = map(lambda i:int(round(i)), center)
return center
else:
return None
def joonistaTsenter(center, image):
cv2.circle(image, tuple(center), 20, cv.RGB(255,0,0),2)
def kasJoon(center):
x = center[0]
y = center[1]
while y < 238:
y+=1
if dilatejoon[y, x]==255:
return True
return False
c = cv2.VideoCapture(0)
c.set(3, 320) #Pildi korgus
c.set(4, 240) #Laius
pall_min = [0,183,130]
pall_max = [9,255,255]
sinine_min = [107,154,54]
sinine_max = [115,231,133]
kollane_min = [19,121,125]
kollane_max = [34,175,211]
must_min = [24, 50, 67]
must_max = [54, 87, 106]
tume = pall_min
hele = pall_max
kernel = np.ones((5,5), "uint8") #dilate jaoks
dilatekernel = np.ones((5,5), "uint8") #dilate jaoks
kiirus = 40
while(1):
saada(coil, 'c')
saada(coil, 'p')
start=time.time()
_,f = c.read()
hsv = cv2.cvtColor(f,cv2.COLOR_BGR2HSV)
#Mis varvi on vaja taga ajada
if kasPall():
if kasSin():
tume = sinine_min
hele = sinine_max
elif kasKol():
tume = kollane_min
hele = kollane_max
else:
tume = pall_min
hele = pall_max
thresh = cv2.inRange(hsv,np.array(tume), np.array(hele))
jooned = cv2.inRange(hsv, np.array(must_min), np.array(must_max))
dilatejoon = cv2.dilate(jooned, dilatekernel)
dilate = cv2.dilate(thresh, kernel)
kontuurimaagia = np.zeros((240,320, 3), np.uint8)
contours, hierarchy = cv2.findContours(dilate, cv.CV_RETR_EXTERNAL, cv.CV_CHAIN_APPROX_NONE)
cv2.drawContours(kontuurimaagia, contours, -1, cv2.cv.RGB(0,255,0),-1)
center = leiaTsenter(contours)
#kui on joon ees, siis kohe ots ringi
if dilatejoon[210,160] == 255:
stop()
ymberpoord()
#Liikumise loogeka
elif center != None:
if kasJoon(center) == False:
joonistaTsenter(center, kontuurimaagia)
joonemomendid = cv2.moments(dilatejoon)
if joonemomendid['m01'] < center[1]:
continue
if center[0] > 180:
if kasPall():
vasak.write('sd-25\n')
else:
soidaparemale(kiirus)
elif center[0] < 140:
if kasPall():
vasak.write('sd25\n')
else:
soidavasakule(kiirus)
else:
if kasPall():
stop()
annatuld(32000)
else:
soidaedasi(kiirus)
else:
if kasPall():
joonistaTsenter(center, kontuurimaagia)
joonemomendid = cv2.moments(dilatejoon)
if joonemomendid['m01'] < center[1]:
continue
if center[0] > 180 and center[0] < 140:
stop()
annatuld()
elif center[0] > 180:
vasak.write('sd-25\n')
elif center[0] < 140:
vasak.write('sd25\n')
else:
ymberpoord()
else:
soidaedasi(kiirus)
if kasPall():
soidaparemale(kiirus)
else:
soidavasakule(kiirus)
## print("FPS: " + str(int(1/(time.time()-start))))
cv2.imshow("Susivisoon", kontuurimaagia)
## cv2.imshow("Joonevisioon", dilatejoon)
cv2.imshow("Reaalvisoon", f)
if cv2.waitKey(2) >= 0:
stop()
cv2.destroyAllWindows()
c.release()
break
|
UTF-8
|
Python
| false | false | 2,013 |
16,595,753,634,252 |
35dccf666c355a2519cac4b29e72b0fa1598d3db
|
e256758900b7521094dd54476afe33fdda9ca5bc
|
/bankapp/bank_app.py
|
33ac3dba54c6dff21cfa53ccb7936cdf3e15e891
|
[] |
no_license
|
ronzohan/bank_app
|
https://github.com/ronzohan/bank_app
|
00ddacd2f9841677229a4e917beb3c3f64f4d873
|
cf64efc8ca7ed87d1c1b0a31e98dfeb7dc94c65c
|
refs/heads/master
| 2021-01-21T01:59:49.050190 | 2014-12-12T10:06:14 | 2014-12-12T10:06:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Controller file
"""
from flask import Flask, render_template, request
from bankapp.bank import Bank
from bankapp.account import Account
APP = Flask(__name__)
BANK = Bank()
@APP.route('/')
def hello_world():
"""
View for the index.html
"""
account_number = request.args.get('account_number')
balance = BANK.get_account_balance(account_number)
return render_template('index.html', balance=balance)
if __name__ == '__main__':
ACCOUNT = Account('1111', 50)
BANK.add_account(ACCOUNT)
APP.run(debug=True)
|
UTF-8
|
Python
| false | false | 2,014 |
2,954,937,529,797 |
2c342a83e867266493aae75992f9915458ebf0b3
|
e0f504631e0771c05cf214b3e14e166a65f7efaf
|
/preSVM.py
|
057784ae372b6d4929afab71331588919f08c512
|
[
"BSD-2-Clause"
] |
permissive
|
parenthetical-e/pyBV
|
https://github.com/parenthetical-e/pyBV
|
89349c146d2c2df460d2effa1e2293f75abeb406
|
279f27c5e175110b947df2e0019f234532b32b31
|
refs/heads/master
| 2020-03-30T10:11:28.299900 | 2012-01-06T04:39:00 | 2012-01-06T04:39:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
A set of functions for preprocessing Brainvoyager data (in the .nii format)
in preperation for SVM analysis using PyML.
"""
import numpy as np
import nifti as nf
import Image as im
import os as os
import re as re
import PyML as ml
def readLabList(fname):
"""
Read in the labelList files created by prt2Label() from Brainvoyager
.prt files.
- Returns two arrays in a tuple, one for volumes (as int)
one for the labels (str).
"""
vols,labels = np.loadtxt(fname,delimiter=',',unpack=True,dtype=str)
vols = np.array(vols,dtype='int')
return vols, labels
def resize_worker(array_2d,dims=()):
"""
Does the heavy lifting for resizeVtc() and resizeVmr();
It is not desinged for external use.
- Returns a 2d array, resized to 'dims' by nearest neighbor
interpolation.
"""
dims_swapped = (dims[1],dims[0])
# for PIL compatibility
## convert to image, resize, and back to array
img = im.fromarray(np.uint32(array_2d),"I")
img_resize = img.resize(dims_swapped, im.NEAREST)
array_2d_resize = np.array(img_resize)
## Debug:
print("array_2d size: {0}".format(array_2d.shape))
print("Before resize as img: {0}".format(img.size))
print("Target dims: {0}".format(dims))
print("Target swapped dims: {0}".format(dims_swapped))
print("After resize as img: {0}".format(img_resize.size))
print("array_2d_resize size: {0}".format(array_2d_resize.shape))
return array_2d_resize
def resize3d(array3d,intial_dim,final_dim):
"""
Loops throuh z then x resizing a given 3d array
via resize_worker(), which is required.
Internal use only.
"""
print("Starting z loop...")
first_resize = np.zeros((final_dim[0:2]+intial_dim[2:]),dtype="uint32")
first_dim = final_dim[0:2]
for z in range(intial_dim[2]):
first_resize[:,:,z] = resize_worker(array3d[:,:,z],first_dim)
print("Starting y loop...")
second_resize = np.zeros(final_dim,dtype="uint32")
second_dim = final_dim[1:]
for x in range(final_dim[0]):
second_resize[x,:,:] = resize_worker(first_resize[x,:,:],second_dim)
print("[0] => {1}".format(intial_dim,final_dim))
return second_resize
def downsampleVmr(vmr, by):
"""
Uses PIL (python imaging library) to resize the x, y and z dimensions
of the given vmr to match that of vtc.
-Returns: a the resized vmr in a NiftiImage object.
"""
intial_dim = vmr.data.shape
# TODO test for int? How to deal with fractions here...
final_dim = (by[0]/intial_dim[0],by[1]/intial_dim[1],by[2]/intial_dim[0])
resized_vmr_data = resize3d(vmr.data,intial_dim,final_dim)
return nf.NiftiImage(resized_vmr_data,vmr.header)
def upsampleVtc(vtc, by, vol):
"""
[9/1/2011]: This function replaces roiReduce(), which has been depricated;
input/output data formats are changed between the two;
the volume in the vtc data to act on was added.
resizeVtc() takes a vmr and vtc NiftiImage() objects (converted to .nii
files of course), and alters the x, y, z, dimentions of the vtc to
match the vmr by nearest neighbor interpolation.
- Requires resize_worker()
NOTE: Upscaled vtc data can become VERY large. For example,
270 volumes at 256x256x256 occupies over 18 GBs.
Returns: the resolution altered volume for in vtc stored as a
NiftiImage object, the header is dropped.
"""
print("Vol: {0}".format(vol))
intial_dim = vtc.data.shape[1:]
final_dim = (intial_dim[0]*by[0], intial_dim[1]*by[1],intial_dim[2]*by[2])
resized_vtc_data = resize3d(vtc.data[vol,:,:,:],intial_dim,final_dim)
return nf.NiftiImage(resized_vtc_data,vtc.header)
def createRef(nii_data):
"""
Creates an reference space of sequential intergers simplifying
voxel/feature labeling in SVMLIB formatted data.
- Returns an numpy array populated with an index starting
at 0 at (0,0,0) and ending at N at (N,N,N).
"""
## Init a vtc.data(x,y,z) shaped array, then fill it with unique
## random numbers.
shapeXYZ = nii_data.shape[1:]
numCoord = np.size(nii_data[1,...])
refXYZ = np.arange(0,numCoord).reshape(shapeXYZ)
return refXYZ
def maskVtc(roi_vmr,vtc,vol):
"""
Creates a bool mask everywhere the roi data is greater than 2.
Roi needs to a nifti object resulting from imported vmr data. Then
uses the roiMask to extract that data at the appropriate voxels
from vtc (a 4D nifti object). It should probably be applied to
the vtcdata, the labelVtc, and refVtc.
- roiVmr and vtc(x,y,z) must have indentical dimensions.
- 'vol' is the volumne number of the vtc data you wish to mask
- NOTE: To convert a voi to vmr use the ROI window in Brainvoyager.
Select Options, then the 'VOI functions' tab, and select
'Create MSK...', give the file a name (it does not matter
what as this file is not needed). Once that is done go to
the File menu and select 'Save secondary VMR'. The result-
ing file is wh)t should be read in with NiftiImage().
- Returns a (t=1,x,y,z) nifit object if roi masked vtc data
w/ correct header info.
"""
roi_data = roiVmr.data
# roiData = np.round(roiData)
# VMR data contain very small non-zero deicmal entries
# (e.g. 235.000011 or 0.0000124) where there should be empty
# decimals (235.0000000).
masked_vtc = np.zeros(roi_vmr, dtype="uint32")
## create a ref (later upscaled) to find redundant voxels
ref = createRef(vtc.data[1,...])
ref_vtc = nf.NiftiImage(ref[np.newaxis,...]) # vtc NiftiImage obj
# needed for resizeVtc()
ref_resize = resizeVtc(roi_vmr,ref_vtc,1)
## Create roi_mask then falsify redundant
## entries; keep on the the first
roi_mask = roi_data > 2
for uni in np.unique(ref_resize.data):
ind = np.where(uni == ref_resize.data)
ind = (ind[0][1:],ind[1][1:],ind[2][1:])
roi_mask[ind] = False
## rescale the vtc vol to match the vmr
## mask that vols data and store in
## masked_vtc
vol_resize = resizeVtc(roi_vmr,vtc,vol)
masked_vtc = np.where(roi_mask,vol_resize,0)
# create t, set to 1
return nf.NiftiImage(masked_vtc, vtc.header)
def writePyML(vtc,labels,vol,fname):
"""
All incoming vtcs/vmrs should have the same spatial dimensions and
have been treated by identically (maskVtc)(), roiReduced(),...).
This script will flatten each of the vtcs and write them to fname
in the SVMLIB format. If fname exists data will be silently
APPENDED to it.
- Any voxels in the vtc data that are zero are not written.
[6/27/2011]: Added a filter so that as vols is iterated it does not exceed
vtc.data.shape[0] (i.e. the number of volumes in the vtc data). This is a
concern as vols was offset (during 'prt2Labels.py') to allow for the slow BOLD
response. For details see prt2Labels.py.
"""
fname = str(fname) # just in case...
outFile = open(fname,'a')
if os.path.exists(fname):
print('*Appending* SVMLIB formatted data to {0}'.format(fname))
else:
print('Writing SVMLIB formatted data to {0}'.format(fname))
## remove entries in 'vols' that would lead to
## 'vtc' dimensions being exceed as the result
## of the offseting for the BOLD delay
numVolsInVtc = vtc.data.shape[0]
volMask = vols <= numVolsInVtc
vols = vols[volMask]
for vol in range(0,np.size(vols)):
flatVtcVol = vtc.data[vol,...].flatten()
flatRefVol = refVtc.data.flatten()
## Remove empty voxels.
zeroMask = flatVtcVol != 0
flatVtcVol = flatVtcVol[zeroMask]
flatRefVol = flatRefVol[zeroMask]
#print(np.sum(zeroMask))
#print(np.size(flatVtcVol))
#print(np.size(flatRefVol))
## Build up the line for each vol/label.
## Format: 'label voxID:data voxID:data ...'
line = str(labels[vol]) + ' '
for pos in range(0,np.size(flatVtcVol)):
line = line + str(flatRefVol[pos])+ ':' + \
str(flatVtcVol[pos]) + ' '
## write the line and flush it.
line = line + '\n'
outFile.write(line)
outFile.flush()
outFile.close()
def nii_LabelListMatch(direc='.'):
"""
Finds all the .nii (i.e. nifti formatted vtc data) and labelList
files in the current or a specified directory, then sort them;
both file types should have the same prefix and thus will correctly
aligned.
- Returns a tuple of lists, nii in one, label files in the other
"""
files = os.listdir(direc)
niiFiles = []
labFiles = []
for entry in files:
if re.search('THPGLMF2c_TAL.nii$',entry):
niiFiles.append(entry)
elif re.search('_labelList.txt$',entry):
labFiles.append(entry)
if len(niiFiles) != len(labFiles):
print('Different number of nii and label files!')
print(niiFiles)
print(labFiles)
niiFiles.sort()
labFiles.sort()
return niiFiles, labFiles
def zSparse(fname):
"""
Converts a sparse formated SVMLIB data file to Vector/CSV format
and then znomralizes on a feature basis and writes out that file
as fname.
"""
znorm = ml.Standardizer()
sparse = ml.SparseDataSet(fname)
sparse.save('temp',format='csv')
vec = ml.VectorDataSet('temp',labelsColumn=1,idColumn=0)
znorm.train(vec)
vecName = 'vec_' + fname
# Verbal overwrite of priors
if os.path.exists(vecName):
print('Overwriting {0}.'.format(vecName))
os.remove(vecName)
vec.save(vecName)
def vecSplit(vecName='',fracTrain=0.3):
"""
Splits a vector/csv fotmatted SVMLIB file into training and test sets
by row according to numTrain and numTest and writes out the resulting
files. Existing files are overwritten.
q - numTest + numTrain should equal the number of lines in vecName
minus the one line header.
- Returns 'Done.'
[08/16/2011]: a major change - instead of taking numTest and numTrain
directly fracTrain (the fraction of trials destined for taining)
was added as a invocation arg. numTrain/numTest are now discovered.
An unmodified (commented out) version of the old function was left in
the source.
"""
## Calc numbers of features for
## training and testing data
vecData = ml.VectorDataSet(vecName,labelsColumn=1,idColumn=0)
numTrain = int(vecData.numFeatures * fracTrain)
numTest = vecData.numFeatures - numTrain
## Create filenames of train and test data taht will
## be written soon...
## Remove 'vec' from vecName so a more informative,
## less redundant, names can be created.
tmpName = str.split(vecName,'vec')
trainName = open('vec_train_{0}{1}'.format(numTrain,tmpName[-1]), 'w')
testName = open('vec_test_{0}{1}'.format(numTest,tmpName[-1]), 'w')
## Randomly select features for either
## training or testing.
sampler = np.asarray([1] * numTrain + [2] * numTest)
np.random.shuffle(sampler)
## Create indices from 'sampler'
featureIndex = np.arange(len(sampler))
invertTrainIndex = featureIndex[sampler == 2]
invertTestIndex = featureIndex[sampler == 1]
print('trainIndex: {0}'.format(invertTrainIndex))
print('testIndex: {0}'.format(invertTestIndex))
## Use trainIndex or testIndex to eliminate features,
## deepcopy the vecData first; eliminateFeatures()
## operates in place.
trainData = ml.VectorDataSet(vecData)
trainData.eliminateFeatures(invertTrainIndex.tolist())
trainData.save(trainName)
testData = ml.VectorDataSet(vecData)
testData.eliminateFeatures(invertTestIndex.tolist())
testData.save(testName)
#def vSplit(vecName='',numTrain=30,numTest=70):
#"""
#Splits a vector/csv fotmatted SVMLIB file into training and test sets
#by row according to numTrain and numTest and writes out the resulting
#files. Existing files are overwritten.
#- numTest + numTrain should equal the number of lines in vecName minus
#the one line header
#- Returns 'Done.'
#[08/16/2011]: major change, instead of taking numTest ad numTrain directly
#fracTrain was added as a invocation arg and numTrain/numTest are now discovered.
#The unmodified veriosn of the function is commented out below.
#"""
#import numpy as np
### 1 = train, 2 = test
### shuffle is in place
#sampler = [1] * numTrain + [2] * numTest
#sampler = np.array(sampler)
#np.random.shuffle(sampler)
#sampler = list(sampler)
#print('Sampler: ', sampler)
#vecFile = open(vecName)
#header = next(vecFile)
#trainFile = open('train_{0}_{1}.txt'.format(numTrain,vecName), 'w')
#testFile = open('test_{0}_{1}.txt'.format(numTest,vecName), 'w')
#trainFile.write(header)
#testFile.write(header)
#for samp in sampler:
#line = next(vecFile)
#if samp == 1:
#trainFile.write(line)
#trainFile.flush()
#else:
#testFile.write(line)
#testFile.flush()
#vecFile.close()
#trainFile.close()
#testFile.close()
#return 'Done.'
#def roiReduce(roiVmr, vtc):
#"""
#Pads a vmrROI with zeros, 1 in each dimension, so this
#data can be easily reduced from the vmr's 1mm^3 resolution to the
#vtc's 3mm^3 resolution. It then crops the excess so the roiVmr x,y,z
#dimensions are equal to the provided vtc.
#- Both roiVmr and vtc should be nifti objects.
#- Returns a roiVmr nifiti object w/ a correct header.
#"""
#import numpy as np
#import nifti as nf
## start over based on:
## (denoms are vtc dims x,y,z)
##In [191]: 256/46; 256/40; 256/58
##Out[191]: 5
##Out[191]: 6
##Out[191]: 4
## reshape based on these?
### Pad the roi w zeros
#padRoi = np.zeros((258,258,258))
#padRoi[1:257,1:257,1:257] = roiVmr.data
### Reduce roi size by factor of 3:
### Geedly includes any elements in the reduced ROI
### just as Brainvoyager does when doing a roi GLM.
### That is if only one element in the 1mm resolution
### is present in the 3mm resolution it is included.
#roi3mm = padRoi.reshape((258/3,3, 258/3,3, 258/3,3))
#roi3mm = roi3mm.mean(5).mean(3).mean(1)
### Crop the roi to vtc dimensions
#roi3mmShapeXYZ = np.array(roi3mm.shape)
#vtcShapeXYZ = np.array(vtc.data.shape[1:])
#excess = (roi3mmShapeXYZ - vtcShapeXYZ) / 2
#roiCrop = roi3mm[(excess[0]):(86-excess[0]),(excess[1]):(86-excess[1]),(excess[2]):(86-excess[2])]
#roiReduced = nf.NiftiImage(roiCrop,roiVmr.header)
#return roiReduced
|
UTF-8
|
Python
| false | false | 2,012 |
678,604,835,409 |
8c6781d105258f7ce892e0758bc534183193b08a
|
65ab4ea633f585410d57544e935cec658a25e1a1
|
/apps/masterindex/views.py
|
4561bb934932cf9848422ae8dab22dbf7425b482
|
[] |
no_license
|
commtrack/commtrack-core
|
https://github.com/commtrack/commtrack-core
|
50cc52bae0798072ecab149ad3076e13c43974bb
|
33ee0aaf1049c838cfae2a3b4416033ad9f093ee
|
refs/heads/master
| 2021-01-19T18:30:04.252281 | 2010-10-19T08:36:51 | 2010-10-19T08:36:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import resource
import logging
import hashlib
import settings
import traceback
import sys
import os
import uuid
import string
from datetime import timedelta
from graphing import dbhelper
from django.http import HttpResponse
from django.http import HttpResponseRedirect, Http404
from django.template import RequestContext
from django.core.exceptions import *
from django.views.decorators.http import require_http_methods
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.db import transaction
from django.db.models.query_utils import Q
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import redirect_to_login
from django.contrib.auth.forms import AdminPasswordChangeForm
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from rapidsms.webui.utils import render_to_response, paginated
from domain.decorators import login_and_domain_required
from xformmanager.models import *
from hq.models import *
from graphing.models import *
from receiver.models import *
import hq.utils as utils
import hq.reporter as reporter
import hq.reporter.custom as custom
import hq.reporter.metastats as metastats
import hq.reporter.inspector as repinspector
import hq.reporter.metadata as metadata
from reporters.utils import *
from reporters.views import message, check_reporter_form, update_reporter
from reporters.models import *
from resources.models import *
from masterindex.models import MasterIndex
resources_set = False
from reporters.utils import *
def index(req):
template_name = 'masterindex.html'
columns = (("resource", "Resource"),
("facility", "Facility"),
)
sort_column, sort_descending = _get_sort_info(req, default_sort_column="resource",
default_sort_descending=True)
sort_desc_string = "-" if sort_descending else ""
search_string = req.REQUEST.get("q", "")
query = MasterIndex.objects.order_by("%s%s" % (sort_desc_string, sort_column))
if search_string == "":
query = query.all()
else:
query = query.filter(
Q(resource__name__icontains=search_string) |
Q(facility__name__icontains=search_string))
indexies = paginated(req, query)
return render_to_response(req, template_name, {"columns": columns,
"indexies": indexies,
"sort_column": sort_column,
"sort_descending": sort_descending,
"search_string": search_string})
def _get_sort_info(request, default_sort_column, default_sort_descending):
sort_column = default_sort_column
sort_descending = default_sort_descending
if "sort_column" in request.GET:
sort_column = request.GET["sort_column"]
if "sort_descending" in request.GET:
if request.GET["sort_descending"].startswith("f"):
sort_descending = False
else:
sort_descending = True
return (sort_column, sort_descending)
|
UTF-8
|
Python
| false | false | 2,010 |
7,395,933,703,177 |
844c0f8f29af17344efe8a2fc8e2d883d3847df8
|
921c416c908106fb00879855069ba10fa0534e9f
|
/btfAuthorization.py
|
f7bf20e2c5ce744c99f1d7a47fc2e4c982cfa889
|
[] |
no_license
|
BlueTarp/auth-python
|
https://github.com/BlueTarp/auth-python
|
b1b366253ce414439fe1db470e91dad1f9a84102
|
cfe2c9870b15ef2088a224bdc002fd371b60d98e
|
refs/heads/master
| 2020-03-01T23:55:53.139807 | 2013-11-15T20:31:41 | 2013-11-15T20:31:41 | 14,433,415 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##BTF auth example for Python 2.7.5
from btfAuthReqBuilder import *
from btfConstants import *
from btfConnector import getAuthResponseXML
from btfResponseParse import parseAuthXML
def authorizeSale(transactionId, clientId, authToken, amount, invoice="",
job=""):
saleXML = buildSaleXML(transactionId, clientId, authToken, amount,
invoice, job)
#send xml doc to btf and get response
responseXML = getAuthResponseXML(saleXML)
authResponse = parseAuthXML(responseXML)
message = authResponse[ "message" ]
print "Sale Authorization details:"
print " Status(Approve/Decline): " + authResponse[ "message" ]
if(message == "APPROVED"):
print " Auth Sequence " + authResponse[ "authseq"]
print " Approval code: " + authResponse[ "appcode" ]
return message
def authorizeCredit(transactionId, clientId, authToken, amount, invoice="",
job="", oldInvoice=""):
creditXML = buildCreditXML(transactionId, clientId, authToken, amount,
invoice, job, oldInvoice)
#send xml doc to btf and get response
responseXML = getAuthResponseXML(creditXML)
authResponse = parseAuthXML(responseXML)
message = authResponse[ "message" ]
print "Credit Authorization details:"
print " Status(Approve/Decline): " + authResponse[ "message" ]
if(message == "APPROVED"):
print " Auth Sequence " + authResponse[ "authseq"]
print " Approval code: " + authResponse[ "appcode" ]
return message
def authorizeHold(transactionId, clientId, authToken, amount, invoice="",
job=""):
holdXML = buildHoldXML(transactionId, clientId, authToken, amount,
invoice, job)
#send xml doc to btf and get response
responseXML = getAuthResponseXML(holdXML)
authResponse = parseAuthXML(responseXML)
message = authResponse[ "message" ]
print "Hold Authorization details:"
print " Status(Approve/Decline): " + authResponse[ "message" ]
if(message == "APPROVED"):
print " Auth Sequence " + authResponse[ "authseq"]
print " Approval code: " + authResponse[ "appcode" ]
return message
def authorizeCollect(transactionId, clientId, authToken, amount, authSeq, invoice="",
job=""):
collectXML = buildCollectXML(transactionId, clientId, authToken,
amount, authSeq, invoice, job)
#send xml doc to btf and get response
responseXML = getAuthResponseXML(collectXML)
authResponse = parseAuthXML(responseXML)
message = authResponse[ "message" ]
print "Collect Authorization details:"
print " Status(Approve/Decline): " + authResponse[ "message" ]
if(message == "APPROVED"):
print " Auth Sequence " + authResponse[ "authseq"]
print " Approval code: " + authResponse[ "appcode" ]
return message
def authorizeVoid(transactionID, clientId, authSeq, authToken):
voidXML = buildVoidXML(transactionID, clientId, authSeq, authToken)
#send xml doc to btf and get response
responseXML = getAuthResponseXML(voidXML)
authResponse = parseAuthXML(responseXML)
message = authResponse[ "message" ]
print "Collect Authorization details:"
print " Status(Approve/Decline): " + authResponse[ "message" ]
if(message == "APPROVED"):
print " Auth Sequence " + authResponse[ "authseq"]
print " Approval code: " + authResponse[ "appcode" ]
return message
|
UTF-8
|
Python
| false | false | 2,013 |
8,486,855,405,550 |
d33b1c942610e74089704850722a0b69067bf6cb
|
777eed1586c11263719d260562ae539f8c818e8d
|
/shortenersoda/shortener/test/test_views.py
|
e337debd4abb17bb1415d133f9024a56773b6c48
|
[] |
no_license
|
daniellbastos/soda
|
https://github.com/daniellbastos/soda
|
60f78160f56f2a9433ebc61b15961b3cace0355e
|
1d56fec2edf6d83a801d97cb702767956f99de07
|
refs/heads/master
| 2016-08-04T14:17:36.219126 | 2014-08-06T04:57:08 | 2014-08-06T04:57:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#coding: utf-8
from django.test import TestCase, Client
from django.core.urlresolvers import reverse as r
from shortener.models import ShortenerURL
__author__ = 'Daniel'
class TestView(TestCase):
def setUp(self):
self.cli = Client()
self.resp = self.cli.get(r('shortener:home'))
def test_status_code(self):
'Check URL retorning status code 200'
self.assertEqual(200, self.resp.status_code)
def test_template(self):
'Check Template home.html'
self.assertTemplateUsed('shortener/home.html', self.resp)
def test_render_form(self):
'Check has inputs in HTML'
self.assertContains(self.resp, '<form')
self.assertContains(self.resp, 'csrfmiddlewaretoken')
self.assertContains(self.resp, '<input', 2)
self.assertContains(self.resp, 'type="url"')
self.assertContains(self.resp, 'type="submit"')
class TestPostView(TestCase):
def setUp(self):
self.data = {
'url': 'http://daniellbastos.com.br'
}
self.cli = Client()
self.resp = self.cli.post(r('shortener:post'), self.data)
def test_status_code(self):
'Check URL retorning status code 302 (redirect)'
self.assertEqual(302, self.resp.status_code)
class TestDetailView(TestCase):
def setUp(self):
self.obj = ShortenerURL(
url='http://daniellbastos.com.br'
)
self.obj.save()
self.cli = Client()
self.resp = self.cli.get(self.obj.get_absolute_url())
def test_status_code(self):
'Check URL retorning status code 200'
self.assertEqual(200, self.resp.status_code)
def test_obj_context(self):
'Check if object ShortenerURL in context'
self.assertIsInstance(self.resp.context['obj'], ShortenerURL)
def test_created_shortener_url(self):
'Check if created object ShortenerURL'
obj = self.resp.context['obj']
self.assertEqual(1, obj.id)
class TestRedirectingView(TestCase):
def setUp(self):
obj = ShortenerURL(
url='http://daniellbastos.com.br'
)
obj.save()
self.hash = obj.hash_shortened
self.cli = Client()
self.resp = self.cli.get(r('shortener:redirecting', args=[self.hash]))
def test_status_code(self):
'Check URL retorning status code 200'
self.assertEqual(200, self.resp.status_code)
def test_obj_context(self):
'Check if object ShortenerURL in context'
self.assertIsInstance(self.resp.context['obj'], ShortenerURL)
def test_created_shortener_url(self):
'Check if created object ShortenerURL'
obj = self.resp.context['obj']
self.assertEqual(self.hash, obj.hash_shortened)
|
UTF-8
|
Python
| false | false | 2,014 |
5,763,846,160,645 |
249910005cf21ccc97d4d881769980769665c6c4
|
ca94bf202c418ba6ccb09ed0d4f7fdc2617847c7
|
/cfg/cmdJoyPID.cfg
|
61e1fe578d696818eb5c56282a798052de2553bb
|
[
"GPL-2.0-only"
] |
non_permissive
|
omwdunkley/crazyflieROS
|
https://github.com/omwdunkley/crazyflieROS
|
5cb0515f0d5e6a78b532299f81d89537f90ab651
|
319c26541071c4a7c9281aa71ffc68db5322439c
|
refs/heads/master
| 2020-12-25T19:03:36.540213 | 2014-12-13T22:40:08 | 2014-12-13T22:40:08 | 16,998,058 | 83 | 39 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
PACKAGE='crazyflieROS'
import roslib
roslib.load_manifest(PACKAGE)
from dynamic_reconfigure.parameter_generator import *
gen = ParameterGenerator()
# Name Type Level Description Default Min Max
source_enum = gen.enum([ gen.const("Qualisys", int_t, 0, "A small constant"),
gen.const("Camera", int_t, 1, "A medium constant"),
gen.const("Synthetic", int_t, 2, "A large constant")],
"An enum to set size")
dist_enum = gen.enum([ gen.const("Linear", int_t, 0, "A small constant"),
gen.const("ATAN", int_t, 1, "A medium constant"),
gen.const("ASIN", int_t, 2, "A large constant")],
"An enum to set size")
preset_enum = gen.enum([ gen.const("Custom", int_t, 0, "A small constant"),
gen.const("Aggressive", int_t, 1, "A medium constant"),
gen.const("Passive", int_t, 2, "A large constant"),
gen.const("Linear", int_t, 3, "A large constant")],
"An enum to set size")
#JOYSTICK
gen.add("trim_roll", double_t, 0, "", 0, -20, 20)
gen.add("trim_pitch", double_t, 0, "", 0, -20, 20)
gen.add("max_angle", double_t, 0, "", 30, 10, 60)
gen.add("max_yawangle", double_t, 0, "", 200, 60, 2*360)
gen.add("max_thrust", double_t, 0, "", 100, 50, 100)
gen.add("min_thrust", double_t, 0, "", 25, 10, 50)
gen.add("slew_limit", double_t, 0, "", 45, 1, 100)
gen.add("slew_rate", double_t, 0, "", 30, 1, 100)
gen.add("yaw_joy", bool_t, 0, "Allow joystick use for yaw", True)
#SETTING
gen.add("Control", bool_t, 0, "Control instead of autohover", True)
gen.add("SetCurrent", bool_t, 0, "Set goal to most recent configuration", False)
gen.add("SetCurrentAuto", bool_t, 0, "When initiating auto control, reset goal to current configuration", False)
gen.add("LiveUpdate", bool_t, 0, "Update the goal as it changes here in real time vs only updating the goal with SET", True)
gen.add("Set" , bool_t, 0, "Set the goal to the current settings", False)
gen.add("Source", int_t, 0, "Chose source of location information", 0, 0, 2, edit_method=source_enum)
gen.add("Delay", double_t, 0, "Artificial delay in the position", 0, 0, 2)
gen.add("WandControl", bool_t, 0, "Control instead of autohover", False)
#GOAL
gen.add("PIDPreset", int_t, 0, "Chose source of location information", 0, 0, 10, edit_method=preset_enum)
gen.add("x", double_t, 0, "", 0, -3, 3)
gen.add("y", double_t, 0, "", 0, -3, 3)
gen.add("z", double_t, 0, "", 0, 0, 3)
gen.add("rz", double_t, 0, "", -90, -180, 180)
gen.add("xyControl", bool_t, 0, "", True)
gen.add("yawControl", bool_t, 0, "", False)
gen.add("thrustControl", bool_t, 0, "", True)
#GAINS
gen.add("DistFunc", int_t, 0, "Chose source of location information", 0, 0, 2, edit_method=dist_enum)
gen.add("Response", double_t, 0, "", 1.0, 0, 5)
gen.add("Pxy", double_t, 0, "", 17.7, 0, 30)
gen.add("Ixy", double_t, 0, "", 0.0, 0, 30)
gen.add("Dxy", double_t, 0, "", 5, 0, 30)
gen.add("Pyaw", double_t, 0, "", 3.3, 0, 30)
gen.add("Iyaw", double_t, 0, "", 0, 0, 30)
gen.add("Dyaw", double_t, 0, "", 0, 0, 30)
gen.add("Pz", double_t, 0, "", 20, 0, 50)
gen.add("Iz", double_t, 0, "", 5, 0, 50)
gen.add("Dz", double_t, 0, "", 8, 0, 50)
#LIMITS
gen.add("RP_maxAngle", double_t, 0, "", 25, 0, 45)
gen.add("Y_maxVel", double_t, 0, "", 200, 0, 360)
gen.add("z_minThrust", double_t, 0, "", 50, 0, 100)
gen.add("z_maxThrust", double_t, 0, "", 90, 0, 100)
gen.add("z_baseThrust", double_t, 0, "", 73, 0, 100)
# Experiment
#gen.add("XY_maxVel", double_t, 0, "", 0.8, 0, 2)
#gen.add("Z_maxVel", double_t, 0, "", 0.8, 0, 2)
exit(gen.generate(PACKAGE, "cmdJoyPID", "cmdJoyPID"))
|
UTF-8
|
Python
| false | false | 2,014 |
12,360,915,928,614 |
d18ef09978ec7bc34b9f4db3062ae5b55948d81e
|
289a727fb4cb6edb39ed3fc5dcf7eae5e3eb4b45
|
/src/applications/vppmgr/controllers/apps.py
|
bbf740438ce1db33bfe2d6dc6dc5130ec5eeef0b
|
[
"LicenseRef-scancode-public-domain",
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"LGPL-2.0-or-later",
"BSD-3-Clause",
"GPL-2.0-only",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-free-unknown"
] |
non_permissive
|
ksdtech/vppmgr
|
https://github.com/ksdtech/vppmgr
|
89f3ee601e8faa5eff650f48948a2d235f6bd50c
|
d9d6db2415499d919396adb26777c9103198faac
|
refs/heads/master
| 2016-09-05T21:27:38.059186 | 2012-02-02T04:35:16 | 2012-02-02T04:35:16 | 3,279,369 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import re
def _find_app():
app = db.app(request.args(0))
if app is None:
raise HTTP(404)
return app
def _filtered_by_group():
group_id = request.vars['group']
group_name = None
app_rows = vpp_manager.select_apps(group_id)
group_rows = vpp_manager.select_groups()
if group_id is not None:
group_names = [g.name for g in group_rows if g.id==int(group_id)]
if len(group_names) > 0:
group_name = group_names[0]
else:
group_id = None
return (app_rows, group_rows, group_id, group_name)
def index():
app_rows, group_rows, group_id, group_name = _filtered_by_group()
return dict(apps=app_rows, groups=group_rows, group=group_id, group_name=group_name)
def show():
app = _find_app()
return dict(app=app, form=SQLFORM(db.app, app, readonly=True))
def provision():
if request.env.request_method == 'POST': # postback
errors = []
user_email = vpp_manager.domain_user(request.post_vars['user_email'])
if user_email is None:
errors.append('No user specified')
else:
user_count = db(db.auth_user.email == user_email).count()
if user_count < 1:
errors.append('No such user: %s' % (user_email))
devices = None
device_ids = request.post_vars['devices']
# multiple select returns single string if only one selected!
if (isinstance(device_ids, str)):
device_ids = [ device_ids ]
if len(device_ids) == 0:
errors.append('No devices selected')
else:
devices = db(db.device.id.belongs(device_ids)).select(db.device.ALL)
if len(devices) == 0:
errors.append('No devices found')
apps = None
app_ids = request.post_vars['apps']
# multiple select returns single string if only one selected!
if (isinstance(app_ids, str)):
app_ids = [ app_ids ]
if len(app_ids) == 0:
errors.append('No apps selected')
else:
apps = db(db.app.id.belongs(app_ids)).select()
if len(apps) == 0:
errors.append('No apps found')
if len(errors) == 0:
success = vpp_manager.queue_and_send_message(user_email, devices, apps)
if success:
response.flash = "Message sent"
else:
response.flash = "Problem sending message"
else:
response.flash = "\n".join(errors)
app_rows, group_rows, group_id, group_name = _filtered_by_group()
user_rows = db().select(db.auth_user.ALL, orderby=db.auth_user.email)
device_rows = vpp_manager.select_devices()
return dict(apps=app_rows, groups=group_rows, group=group_id, group_name=group_name, users=user_rows, devices=device_rows)
def import_one():
app = _find_app()
updates = vpp_manager.update_apps([app])
session.flash = "Updated %d free and %d vpp apps" % (updates['free'], updates['vpp'])
redirect(URL('index'))
def import_all():
updates = vpp_manager.populate_app_table()
session.flash = "Updated %d free and %d vpp apps" % (updates['free'], updates['vpp'])
redirect(URL('index'))
|
UTF-8
|
Python
| false | false | 2,012 |
5,823,975,670,339 |
71429cec80e3b903d6a8496c89eed95de3e7e426
|
c27dde629aa15b4912e77dd13bf601c27c1e7e54
|
/src/dtalk/conf/__init__.py
|
97c209c0f19e8da42388e5c098642ba55a15d541
|
[] |
no_license
|
lovesnow/deepin-talk
|
https://github.com/lovesnow/deepin-talk
|
77ebc4ee2bfc4c906c700a28cbf83ca8268aba4e
|
892baed8096219117eb41a3a947036235c3fe2d9
|
refs/heads/master
| 2021-01-22T17:57:30.071970 | 2014-04-23T01:04:26 | 2014-04-23T01:04:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 ~ 2014 Deepin, Inc.
# 2011 ~ 2014 Hou ShaoHui
#
# Author: Hou ShaoHui <[email protected]>
# Maintainer: Hou ShaoHui <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from functools import partial
from dtalk.utils import xdg
# from dtalk.utils import pyini
from dtalk.conf.ini import NotifyIni
user_settings_file = xdg.get_config_path("settings.ini", check_exists=True)
settings = NotifyIni(os.path.join(xdg.get_parent_dir(__file__), "default_settings.ini"))
if user_settings_file:
settings.read(filename=user_settings_file)
settings.write = partial(settings.save, filename=user_settings_file)
DEBUG = True
|
UTF-8
|
Python
| false | false | 2,014 |
11,055,245,829,831 |
da6707b55fd71a5b24792712b331f8b568e05bd7
|
e9db0ad719ddc778584c1d89c9fe300415aa424f
|
/hdf5_action.py
|
3af9baf875943c6b796972abc96b3c7aeeac8b3e
|
[] |
no_license
|
jgrebel1/cubereader
|
https://github.com/jgrebel1/cubereader
|
a07a306e9c37481eef7e2683c6dc48a6fd4a96e1
|
47ded2cc4635bb3f6383dd7c9b47077f52d6b36f
|
refs/heads/master
| 2021-01-23T00:06:34.817397 | 2014-11-06T19:34:02 | 2014-11-06T19:34:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 30 15:08:15 2013
@author: JG
"""
import shutil
import numpy as np
import os
import sys
#try:
from PySide import QtGui
#except:
# pass
import h5py
import threading
#project specific items
import generic_thread
class HDF5Action(object):
"""
Applies action to HDF5 file images from CubeReader or hyperspy.
The program starts the action in a new thread
"""
def __init__(self, filename, output_filename, action, images=True):
print 'Reading file %s'%filename
self.progress_bar = self.initialize_progress_bar(filename, images)
print 'Saving file %s'%output_filename
#self.start_in_new_thread(self.hdf5_images,
# output_filename,
# action,
# images)
self.threadPool = []
self.threadPool.append(generic_thread.GenericThread(self.hdf5_images,
filename,
output_filename,
action,
images))
self.threadPool[len(self.threadPool)-1].start()
def get_title(self, hdf5):
g = lambda x: x
title = str(hdf5['Experiments'].visit(g))
return title
def get_ycube(self,filename):
self.input_hdf5 = h5py.File(filename, 'r')
title = self.get_title(self.input_hdf5)
ycube = self.input_hdf5["Experiments/%s/data"%title]
return ycube
def get_dimensions(self, filename):
ycube = self.get_ycube(filename)
input_rows, input_columns, input_slices = np.shape(ycube[...])
return input_rows, input_columns, input_slices
def hdf5_images(self, filename, output_filename, action, images=True):
"""
makes a temporary hdf5 file, performs action on image slices into the file and
then uses the temp file as data for the final output.
"""
self.stop = False
ycube = self.get_ycube(filename)
print 'output_filename is', output_filename
self.temp_hdf5 = h5py.File(output_filename +'temporary','w')
self.read_into_temp_hdf5(self.temp_hdf5,
ycube,
action,
images)
if not self.stop:
self.generate_output(filename, output_filename, self.temp_hdf5)
self.temp_hdf5.close()
os.remove(output_filename +'temporary')
self.input_hdf5.close()
self.close()
def initialize_progress_bar(self, filename, images=True):
try:
columns, slices = self.get_dimensions(filename)[1:]
if images:
number = slices
else:
number = columns
progress_bar = self.progress_bar(number)
return progress_bar
except Exception as e:
print e
def progress_bar(self, maximum):
"""
progress bar window with stop button
"""
self.progress_window = QtGui.QWidget()
self.progress_window.setWindowTitle("Progress")
progress_bar = QtGui.QProgressBar()
progress_bar.setMaximum(maximum)
button_stop= QtGui.QPushButton("&Stop")
button_stop.clicked.connect(self.stop_now)
box = QtGui.QVBoxLayout()
box.addWidget(progress_bar)
box.addWidget(button_stop)
self.progress_window.setLayout(box)
self.progress_window.show()
return progress_bar
def read_into_temp_hdf5(self, temp_hdf5, ycube, action, images = True):
shape = self.get_new_shape(ycube, action, images)
temp_cube = temp_hdf5.create_dataset('cube', shape)
if images:
self.iterate_images(ycube, temp_cube, action)
else:
self.iterate_spectrums(ycube, temp_cube, action)
try:
self.progress_window.close()
except Exception as e:
print e
def get_new_shape(self,ycube, action, images):
rows,columns,specs = ycube.shape
if images:
test_array = ycube[:,:,0]
else:
test_array = ycube[0,0,:]
new_array = action(test_array)
shape = new_array.shape
return new_shape
def iterate_images(self,ycube, temp_cube, action):
slices = np.shape(ycube)[2]
for input_slice in np.arange(slices):
if self.stop:
return
image = ycube[:,:,input_slice]
new_image = action(image)
temp_cube[:,:,input_slice] = new_image
try:
self.update_progress(input_slice)
except Exception as e:
print e
def iterate_spectrums(self, ycube, temp_cube, action):
rows, columns = np.shape(ycube)[:2]
for row in np.arange(rows):
for column in np.arange(columns):
if self.stop:
return
spectrum = ycube[row, column,:]
new_spectrum = action(spectrum)
# shape, = np.shape(temp_cube[row,column,:])
if new_spectrum.shape ==():
temp_cube[row,column] = new_spectrum
else:
temp_cube[row,column,:] = new_spectrum #[:shape]
try:
self.update_progress(column)
except Exception as e:
print e
def generate_output(self,filename, output_file, temp_hdf5):
shutil.copy(filename, output_file)
output_hdf5 = h5py.File(output_file, 'r+')
title = self.get_title(output_hdf5)
del output_hdf5["Experiments/%s/data"%title]
output_hdf5.create_dataset("Experiments/%s/data"%title,
data = temp_hdf5['cube'])
output_hdf5.close()
def start_in_new_thread(self, function, *args):
self.threadPool = []
for count, thing in enumerate(args):
print '{}.{}'.format(count,thing)
self.threadPool.append(generic_thread.GenericThread(function,*args))
self.threadPool[len(self.threadPool)-1].start()
return self.threadPool
#threading.Thread(target=function, args=args)
def stop_now(self):
self.stop = True
self.input_hdf5.close()
self.temp_hdf5.close()
self.progress_window.close()
def update_progress(self, value):
#update less frequently if it slows down process
#if value%10 == 0:
self.progress_bar.setValue(value)
#main function to start up program
def main(filename=None):
app = QtGui.QApplication.instance()
if app is None:
app = QtGui.QApplication(sys.argv)
form = HDF5Action(filename)
app.exec_()
return form
else:
form = HDF5Action(filename)
app.exec_()
return form
|
UTF-8
|
Python
| false | false | 2,014 |
7,937,099,603,904 |
29e1d49a6dcbcc0196a0b580fc0bf9788f6794ab
|
02a7d02b7bff39b72e8549f49b2c0c339150a76f
|
/tests/p3/while2.py
|
501faa6070190d3fce1b1ab3ff274f0fcb7296ca
|
[] |
no_license
|
scottpledger/compyler-csci4555
|
https://github.com/scottpledger/compyler-csci4555
|
b5a99e17aa4f020aafcc222c55ac98dae387f6d4
|
18e947271d275939e7f255f4d8372b285a1694a9
|
refs/heads/master
| 2021-01-23T13:17:43.103062 | 2012-12-21T09:20:11 | 2012-12-21T09:20:11 | 5,603,160 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
i = input()
x = input()
n = input()
while not (i == n):
w = input()
if w:
print x
else:
y = input()
print y
z = i
i = i + 1
print z
|
UTF-8
|
Python
| false | false | 2,012 |
1,400,159,338,886 |
3e82d90d3ad9c294603da0298f8228201590cafb
|
49ce0b99a1bb43f363a1a1523dddb1a9a3fb6ae7
|
/client/cryptZoobar.py
|
1eac8fc754cec98f1cb4bff79603b6ad8032f34d
|
[] |
no_license
|
benpatsai/Searchable-Encryption
|
https://github.com/benpatsai/Searchable-Encryption
|
b5f4b303a01321876cb421be7671bffc6b1486bc
|
ca182828a2f7a8b2e437eb150d7fcc150eea67f8
|
refs/heads/master
| 2016-09-06T18:40:00.447575 | 2013-12-09T06:50:03 | 2013-12-09T06:50:03 | 14,760,365 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import requests
import cmd, os, shutil
import time
import json
from sa import*
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
from Crypto.Hash import MD5
from Crypto.Cipher import AES
from Crypto import Random
block_length = 100
s = requests.session()
ID = ''
PW = ''
MPW = ''
profile = {}
#salt=random_str(10)
#print salt
def search_hash(hashed_query, filename):
global s, profile, ID
url = "http://128.30.93.9:8080/zoobar/index.cgi/enc_search"
payload = {"keyword" : hashed_query, "filename": filename}
r = s.post(url, data = payload)
#print r.text
result = json.loads(r.text)
return result['idx']
def get_encrypted_blocks(filename, start, end):
global s, profile, ID
url = "http://128.30.93.9:8080/zoobar/index.cgi/get_enc_block"
payload = {"start" : start, "end":end, "filename": filename}
r = s.post(url, data = payload)
#print r.text
result = json.loads(r.text)
return result['enc_block']
#f = open(filename+'block').read()
#enc_block = json.loads(f)
#return enc_block[start:end+1]
def search_substring(query,salt,key_index,block_length,block_index_iv,key_blocks, filename):
global s, profile, ID
enc_index=search_hash(hash_with_salt(query,salt), filename)
#print enc_index
if enc_index!=-1:
return decrypt_integer(enc_index,block_index_iv,key_index)
prefix_len=len(query)
delta=next_greater_power_of_2(prefix_len)/2
enc_shorter_index=search_hash(hash_with_salt(query[0:prefix_len-1],salt),filename)
while not((enc_index==-1) and (enc_shorter_index!=-1)):
if enc_shorter_index == -1:
prefix_len-=delta
if enc_index > -1:
prefix_len+=delta
#print prefix_len
delta=delta/2
enc_index=search_hash(hash_with_salt(query[0:(0 if prefix_len<0 else prefix_len)],salt), filename)
#enc_shorter_index=search_hash(s,hash_with_salt(query[0:(0 if (prefix_len-1<0) else (prefix_len-1))],salt), filename)
enc_shorter_index=(0 if (prefix_len - 1 <= 0 ) else (search_hash(hash_with_salt(query[0:(prefix_len-1)],salt), filename)))
#print enc_index, enc_shorter_index
#print prefix_len-1
#print query[0:(0 if prefix_len<0 else prefix_len)],'!',query[0:(0 if (prefix_len-1<0) else (prefix_len-1))]
#input("!")
#print enc_shorter_index
index=decrypt_integer(enc_shorter_index,block_index_iv,key_index)
#print index
start_index=index
end_index=start_index+len(query)
#print end_index
start_block_index=start_index/block_length
end_block_index=(end_index-1)/block_length
#print start_block_index,end_block_index
encrypted_blocks=get_encrypted_blocks(filename, start_block_index,end_block_index)
decrypted_string=''
for (index1,encrypted_block) in enumerate(encrypted_blocks):
block_id=start_block_index+index1
decrypted_string+=decrypt_block(encrypted_block,block_id,block_index_iv,key_blocks)
#print decrypted_string
check_start_index=start_index-start_block_index*block_length
check_end_index=check_start_index+len(query)
to_compare=decrypted_string[check_start_index:check_end_index]
if (to_compare==query):
return index
else:
return -1
def RSA_key_gen():
key = RSA.generate(2048)
return [key.n,key.e,key.d,key.p,key.q,key.u]
def upload_profile(username):
global s, profile, ID, MPW, PW
Mkey=MD5.new()
Mkey.update(MPW+'iv')
iv = Mkey.hexdigest()[:AES.block_size]
Mkey=MD5.new()
Mkey.update(PW)
cipher = AES.new(Mkey.hexdigest(), AES.MODE_CFB, iv)
#print profile
enc_profile = iv + cipher.encrypt(json.dumps(profile))
iv_size = AES.block_size
dec_profile = cipher.decrypt(enc_profile)[iv_size:]
#print dec_profile
f = open(username+'profile','w')
f.write(enc_profile)
f.close()
url = "http://128.30.93.9:8080/zoobar/index.cgi/upload"
files = {'file': (username+'profile', open(username+'profile','rb'))}
s.post(url, files=files)
def download_profile(username):
global s, profile, ID, MPW, PW
Mkey=MD5.new()
Mkey.update(MPW+'iv')
iv = Mkey.hexdigest()[:AES.block_size]
Mkey=MD5.new()
Mkey.update(PW)
cipher = AES.new(Mkey.hexdigest(), AES.MODE_CFB, iv)
url = "http://128.30.93.9:8080/zoobar/index.cgi/download"
payload = {"filename" : username+'profile'}
r = s.get(url, params=payload)
f = open(username+'profile', 'wb')
for chunk in r.iter_content(chunk_size=512 * 1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.close()
f = open(username+'profile','r').read()
iv_size = AES.block_size
ptxt = cipher.decrypt(f)[iv_size:]
dec = cipher.decrypt(f)[iv_size:]
#print dec
profile = json.loads(dec)
def login(username, password, MkeyPW):
global s, profile, ID, MPW
payload = { "submit_login" : "1" ,
"login_username": username ,
"login_password": password }
r = s.post("http://128.30.93.9:8080/zoobar/index.cgi/login", data=payload)
if (username in r.text):
ID = username
MPW = MkeyPW
PW = password
download_profile(username)
return True
else:
return False
def register(username, password, MkeyPW):
[n, e, d, p, q, u] = RSA_key_gen()
global s, profile, ID
payload = { "submit_registration" : "1" ,
"login_username": username ,
"login_password": password }
r = s.post("http://128.30.93.9:8080/zoobar/index.cgi/login", data=payload)
if (username in r.text):
ID = username
MPW = MkeyPW
PW = password
pkey = {"n": n, "e": e}
profile = {"d": str(d), "p": str(p), "q": str(q), "u":str(u)}
upload_profile(username)
s.get("http://128.30.93.9:8080/zoobar/index.cgi/registerPkey", params=pkey)
return True
else:
return False
def upload(filename, path):
global s, profile, ID
url = "http://128.30.93.9:8080/zoobar/index.cgi/upload"
files = {'file': (filename, open(path,'rb'))}
s.post(url, files=files)
def enc_upload(filename, path):
global s, profile, ID
f = open(path,'r').read()
#salt = Random.new().read(AES.block_size)
salt = [ord(x) for x in Random.new().read(AES.block_size)]
#key_index = Random.new().read(AES.block_size)
key_index = [ord(x) for x in Random.new().read(AES.block_size)]
key_blocks = [ord(x) for x in Random.new().read(AES.block_size)]
block_index_iv=random_str(15)
#key_blocks = Random.new().read(AES.block_size)
profile[filename] = {'salt':salt, 'key_index':key_index, 'key_blocks':key_blocks, 'block_index_iv':block_index_iv}
hashed = hash_substrings(f, salt,block_index_iv, key_index)
h = open(filename+'hash', 'w')
h.write(json.dumps(hashed))
#h.write(hashed)
h.close()
url = "http://128.30.93.9:8080/zoobar/index.cgi/enc_upload"
files = {'file': (filename+'hash', open(filename+'hash','rb'))}
s.post(url, files=files)
encrypted_blocks = encrypt_blocks(f,key_blocks,block_index_iv,block_length)
b = open(filename+'block','w')
b.write(json.dumps(encrypted_blocks))
b.close()
url = "http://128.30.93.9:8080/zoobar/index.cgi/enc_upload"
files = {'file': (filename+'block', open(filename+'block','rb'))}
s.post(url, files=files)
upload_profile(ID)
def download(filename, local_path):
global s, profile, ID
url = "http://128.30.93.9:8080/zoobar/index.cgi/download"
payload = {"filename" : filename}
r = s.get(url, params=payload)
f = open(local_path, 'wb')
for chunk in r.iter_content(chunk_size=512 * 1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.close()
def search(keyword, filename):
global s, profile, ID
url = "http://128.30.93.9:8080/zoobar/index.cgi/search"
payload = {"keyword" : keyword, "filename": filename}
r = s.post(url, data = payload)
result = json.loads(r.text)
print result
def enc_search(keyword, filename):
global s, profile, ID
file_profile = profile[filename]
salt = file_profile['salt']
key_index = file_profile['key_index']
key_blocks= file_profile['key_blocks']
block_index_iv=file_profile['block_index_iv']
print search_substring(keyword, salt, key_index, block_length,block_index_iv,key_blocks, filename)
def search_in_file(path, keyword):
global s, profile, ID
linestring = open(path, 'r').read()
linestring = linestring.replace('\n',' ')
words = linestring.split(' ')
if keyword in words:
return True
else:
return False
def local_search( keyword, filename):
global s, profile, ID
if not os.path.exists('tmp'):
os.mkdir('tmp',0777)
if filename != "ALL":
download(s, filename, 'tmp/'+filename )
local_list = [filename]
else:
url = "http://128.30.93.9:8080/zoobar/index.cgi/getlist"
r = s.get(url)
file_list = json.loads(r.text)
for f in file_list:
download(s, f, 'tmp/'+f )
local_list = os.listdir('tmp/')
search_results = []
for f in local_list:
if search_in_file('tmp/'+f, keyword):
search_results.append(f)
print search_results
class cmdlineInterface(cmd.Cmd):
prompt = 'cryptZooBar>> '
def do_login(self, line):
"login $username $password $Mkeypassword"
"Log into Zoobar as $username"
args = line.split(' ')
login(args[0], args[1], args[2])
def do_register(self, line):
"register $username $password $Mkeypassword"
"Register in Zoobar as $username"
args = line.split(' ')
register( args[0], args[1], args[2])
def do_shell(self, line):
"Run a shell command"
print "running shell command:", line
output = os.popen(line).read()
print output
self.last_output = output
def do_upload(self, line):
"upload $new_name $path"
"Upload $path as $new_name to Zoobar, need log in first"
start = time.time()
args = line.split(' ')
upload(args[0], args[1])
end = time.time()
timespend = end - start
print "spent:" + str(timespend)
def do_enc_upload(self, line):
"enc_upload $new_name $path"
"Upload encrypted $path as $new_name to Zoobar, need log in first"
start = time.time()
args = line.split(' ')
enc_upload( args[0], args[1])
end = time.time()
timespend = end - start
print "spent:" + str(timespend)
def do_download(self, line):
"download $remote_name $local_path"
"Download $remote_name in server and save as $local_path, need log in first"
start = time.time()
args = line.split(' ')
download(args[0], args[1])
end = time.time()
timespend = end - start
print "spent:" + str(timespend)
def do_search(self, line):
"search $keyword"
"Get files that contain $keyword"
start = time.time()
args = line.split(' ')
if len(args) > 1:
search(args[0], args[1])
else:
search(args[0], 'ALL')
end = time.time()
timespend = end - start
print "spent:" + str(timespend)
def do_enc_search(self, line):
"search $keyword ($file)"
"Get files that contain $keyword"
start = time.time()
args = line.split(' ')
if len(args) > 1:
enc_search(args[0], args[1])
else:
enc_search(args[0], 'ALL')
end = time.time()
timespend = end - start
print "spent:" + str(timespend)
def do_local_search(self, line):
"search $keyword"
"Get files that contain $keyword"
start = time.time()
args = line.split(' ')
if len(args) > 1:
local_search(args[0], args[1])
else:
local_search(args[0], 'ALL')
end = time.time()
timespend = end - start
print "spent:" + str(timespend)
def do_logout(self, line):
self.s = requests.session()
Mkey = MD5.new()
self.profile = {}
def do_EOF(self, line):
print ""
return True
if __name__ == '__main__':
Mkey = MD5.new()
loggedIn = False
c = cmdlineInterface()
# salt=random_str(10)
#print salt
c.cmdloop()
|
UTF-8
|
Python
| false | false | 2,013 |
13,967,233,660,095 |
ee170df56264056e0b5d012e6ce60e0ad5eb03fa
|
7034ed711ebe3477bce4183355ce905182f7b2c8
|
/MapUtils.py
|
47be54bfeb0478caa2510de3306ec5c7d1d5692b
|
[] |
no_license
|
wxmiked/RadarClustering
|
https://github.com/wxmiked/RadarClustering
|
d1a3efd165ef04c39702abc716419fefbfdc8e75
|
73c5fb4d9d8f54b7574d10ce9b536162301f82be
|
refs/heads/master
| 2021-05-27T08:16:14.579814 | 2012-01-12T22:28:24 | 2012-01-12T22:28:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os.path # for os.path.dirname(), os.path.abspath(), os.path.sep
# Default Map display options
mapLayers = [['states', {'linewidth':1.5, 'color':'k', 'zorder':0}],
['counties', {'linewidth':0.5, 'color':'k', 'zorder':0}],
['roads', {'linewidth':0.75, 'color':'r', 'zorder':0}],
['rivers', {'linewidth':0.5, 'color':'b', 'zorder':0}]]
def PlotMapLayers(map, layerOptions, axis=None):
for layer in layerOptions :
if layer[0] == 'states' :
map.drawstates(ax=axis, **layer[1])
elif layer[0] == 'counties' :
map.readshapefile(os.path.sep.join([os.path.dirname(os.path.abspath(__file__)), 'shapefiles', 'countyp020']),
name='counties', ax=axis, **layer[1])
elif layer[0] == 'rivers' :
map.drawrivers(ax=axis, **layer[1])
elif layer[0] == 'roads' :
map.readshapefile(os.sep.join([os.path.dirname(os.path.abspath(__file__)), 'shapefiles', 'road_l']),
name='road', ax=axis, **layer[1])
elif layer[0] == 'countries':
map.drawcountries(ax=axis, **layer[1])
else :
raise TypeError('Unknown map_layer type: ' + layer[0])
|
UTF-8
|
Python
| false | false | 2,012 |
15,582,141,363,781 |
5f31394d68503011588318055ad8ab7b3265d736
|
7181c8223accad153f4fefc3673cc5364010c2fe
|
/pyrely/distributions/ot_compatibility.py
|
d9a5943fb84c1d79752429f0bc0ab4a5afae2209
|
[] |
no_license
|
pyrely/pyrely
|
https://github.com/pyrely/pyrely
|
c99cb4384a633a1336274eb6d4997f98855d16e5
|
7dc8e38992e4cd7fffb2d12dd284c0232fe1b482
|
refs/heads/master
| 2021-01-01T05:49:38.356093 | 2011-03-13T06:58:31 | 2011-03-13T06:58:31 | 1,291,037 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#-*- coding: utf8 -*-
"""
Compatibility functions for OpenTURNS and Numpy.
"""
# Author(s): Vincent Dubourg <[email protected]>
# License: BSD style
import openturns as ot
import numpy as np
def NumericalSample_to_array(NS):
"""This function converts a NumericalSample from OpenTURNS into a two-
dimensional array from numpy.
Parameters
----------
NS: ot.NumericalSample
A NumericalSample.
Returns
-------
a: np.atleast_2d
Two-dimensional array.
"""
a = np.zeros((NS.getSize(), NS.getDimension()))
for i in range(NS.getSize()):
for j in range(NS.getDimension()):
a[i, j] = NS[i][j]
return a
def array_to_NumericalSample(a):
"""This function converts an array from numpy into a NumericalSample from
OpenTURNS.
Parameters
----------
a: np.array
An array.
Returns
-------
NS: ot.NumericalSample
A NumericalSample.
"""
a = np.array(a)
if a.ndim <= 1:
a = np.atleast_2d(a).T
NS = ot.NumericalSample(1, ot.NumericalPoint(a[0].tolist()))
for i in range(a.shape[0] - 1):
NS.add(ot.NumericalPoint(a[i + 1].tolist()))
return NS
def NumericalPoint_to_array(NP):
"""This function converts a NumericalPoint from OpenTURNS into a one-
dimensional array from numpy.
Parameters
----------
NP: ot.NumericalPoint
A NumericalPoint.
Returns
-------
a: np.array
One-dimensional array.
"""
a = np.zeros(NP.getDimension())
for i in range(NP.getDimension()):
a[i] = NP[i]
return a
def array_to_NumericalPoint(a):
"""This function converts a one-dimensional array from numpy into a
NumericalPoint from OpenTURNS.
Parameters
----------
a: np.array
One-dimensional array (will be flattened anyway).
Returns
-------
NP: ot.NumericalPoint
A NumericalPoint.
"""
a = np.ravel(a)
NP = ot.NumericalPoint(a.size)
for i in range(NP.getDimension()):
NP[i] = a[i]
return NP
|
UTF-8
|
Python
| false | false | 2,011 |
4,423,816,317,400 |
caad9b9898ddc1ee761dd09dc9e6f141673f5654
|
bfdafc7c52c831e63e1bdbbdfa9be57d05dafe9f
|
/tipy/pyshell.py
|
05a0966c5020cb3de55d67b63414574d33b64b13
|
[] |
no_license
|
sdiehl/tipy
|
https://github.com/sdiehl/tipy
|
50b53f0d656f45f3698b71445232bd00334b69f5
|
fdee3a3079d66656c16a4f10ae270ec357030c9f
|
refs/heads/master
| 2016-09-05T08:46:42.292879 | 2012-08-06T20:01:12 | 2012-08-06T20:01:12 | 4,781,453 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
A modified version of the core interactive shell which handles
captures all execution output.
"""
import re
import sys
import argparse
import traceback
from code import InteractiveConsole, softspace
from pprint import pformat
from pygments.formatters import HtmlFormatter, get_formatter_by_name
from pygments.styles import get_style_by_name
from pygments.lexers import get_lexer_by_name
from pygments import highlight
passthru_commands = {
'dataframe' : lambda df: df.to_html(),
#'tex' : lambda sym: '$$%s$$' % latex(sym),
#'inlinetex' : lambda sym: '$%s$' % latex(sym),
#'numpy' : lambda arr: '$$%s$$' % LatexPrinter()._print_ndarray(arr)[1:-2],
'pprint1' : lambda obj: highlight_python(pformat(obj, width=1)),
'pprint' : lambda obj: highlight_python(pformat(obj)),
#'json' : lambda obj: highlight_json(simplejson.dumps(obj, indent=4))
}
def highlight_python(py_obj):
_lexer = get_lexer_by_name('python')
return highlight(py_obj, _lexer, formatter)
def highlight_json(py_obj):
_lexer = get_lexer_by_name('json')
return highlight(py_obj, _lexer, formatter)
def highlight_shell(py_obj):
_lexer = get_lexer_by_name('bash')
return highlight(py_obj, _lexer, formatter)
def filter_cat(args):
"""
I can joinz non-empty str inputz?
"""
ar = filter(lambda x: len(x) > 0, args)
return '\n'.join(ar)
class FileCacher:
def __init__(self):
self.reset()
def reset(self):
self.out = []
def write(self,line):
self.out.append(line)
def flush(self):
output = ''.join(self.out)
self.reset()
return output
class Shell(InteractiveConsole):
def __init__(self):
self.stdout = sys.stdout
self.cache = FileCacher()
InteractiveConsole.__init__(self)
return
def get_output(self):
sys.stdout = self.cache
def return_output(self):
sys.stdout = self.stdout
def push(self,line):
self.get_output()
InteractiveConsole.push(self,line)
self.return_output()
output = self.cache.flush()
return output.rstrip()
def runcode(self, code):
try:
exec code in self.locals
except SystemExit:
raise
except Exception:
print traceback.format_exc()
else:
if softspace(sys.stdout, 0):
print
def runsource(self, source, filename="<input>", symbol="single"):
try:
code = self.compile(source, filename, symbol)
except (OverflowError, SyntaxError, ValueError):
print traceback.format_exc()
return False
#if code is not None and len(source) > 0:
#print type(eval(source))
if code is None:
# Case 2
return True
# Case 3
self.runcode(code)
return False
def exec_block(source, style, formatter, shell=False, dohighlight=True):
_lexer = get_lexer_by_name('pycon')
formatter = get_formatter_by_name(formatter)
formatter.style = get_style_by_name(style)
interactions = []
interp = Shell()
passthru_output = []
passthru_mode = False
passthru_f = None
for line in source.split('\n'):
if 'pragma' in line:
# Everything after pragma
rawrgs = line.split(':')[1:]
args = [ a.strip() for a in rawrgs ]
directive = args[0]
passthru_f = passthru_commands.get(directive)
if not passthru_f:
raise RuntimeError('Pragma `%s` directive unknown.' % directive)
passthru_mode = True
elif line.startswith('>>>') and passthru_mode:
inline = line.split('>>> ')[1]
try:
retval = eval(inline, {}, interp.locals)
except SyntaxError:
raise RuntimeError('Passthru line must return a value')
interactions += [(line, '')]
passthru_output += [passthru_f(retval)]
# TODO: this turned off does cool stuff
passthru_mode = False
elif line.startswith('>>>'):
inline = line.split('>>> ')[1]
output = interp.push(inline)
interactions += [(line, str(output))]
elif line.startswith('...'):
inline = line.split('... ')[1]
output = interp.push(inline)
interactions += [(line, str(output))]
else:
inline = line
# We're in shell mode so we can have hidden
# "preperation lines", which can still execute side
# effects and do output
if shell:
output = interp.push(inline)
#interactions += [('', str(output))]
else:
output = interp.push(inline)
interactions += [(line, str(output))]
# TODO: interleave passthru output with the corresponding
# caller, right now it just appends it on the end.
show = filter_cat(passthru_output)
output = '\n'.join(filter_cat([a,b]) for a,b in interactions)
if dohighlight:
return highlight(output, _lexer, formatter), show
else:
return output, show
def exec_source(source, style, formatter, shell=False, dohighlight=True):
_lexer = get_lexer_by_name('python')
bc = compile(source, '<stdin>', 'exec')
cache = FileCacher()
ostdout = sys.stdout
sys.stdout = cache
exec bc in {}
output = cache.flush()
sys.stdout.write(output)
sys.stdout = ostdout
if dohighlight:
return highlight(output, _lexer, formatter), ''
else:
return output, ''
def preprocess_source(rawsource, style, formatter):
CODE_REGEX = re.compile(r"```(?P<compiler>\w+)(?P<code>.*?)```", re.MULTILINE | re.DOTALL)
CODE_SPAN = lambda s,a: """```python\n%s\n```\n\n%s""" % (s,a)
def preprocess_block(matchobj):
match = matchobj.groupdict()
compiler = match['compiler']
source = match['code']
if compiler == 'pycon':
return CODE_SPAN(*exec_block(source, style, formatter, shell=True, dohighlight=False))
elif compiler == 'pyexec':
return (
CODE_SPAN(source, '') + '\n\n' +
CODE_SPAN(*exec_source(source, style, formatter, shell=False, dohighlight=False))
)
else:
return matchobj.group()
return re.sub(CODE_REGEX, preprocess_block, rawsource, re.U)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('source', default='-', nargs='?', type=str)
parser.add_argument('--style', default='colorful', type=str)
parser.add_argument('--format', default='html', type=str)
parser.add_argument('--css', action="store_true", help='Router service config')
parser.add_argument('--preprocess', action="store_true", help='Preprocess a markdown file from stdin')
args = parser.parse_args()
if args.css:
htmlformat = HtmlFormatter(style=args.style)
sys.stdout.write(htmlformat.get_style_defs('.highlight'))
sys.exit(0)
if args.preprocess:
source = sys.stdin.read()
processed = preprocess_source(source, args.style, args.format)
sys.stdout.write(processed)
return
if args.source == '-' or not args.source:
source = sys.stdin.read()
else:
source = open(args.source).read()
stdout = exec_block(source, style=args.style, formatter=args.format)
sys.stdout.write(stdout)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,012 |
14,456,859,948,311 |
d6804f77383131e72c011665f93c28dc34a81f9e
|
0ebddfea0da8d426cad886290b717827f19e31ad
|
/hmm/ghmm_enhancer_order2.py
|
e5b4259db3c132ebda1e9650f70378c5fcc20636
|
[] |
no_license
|
Guannan/genomics_project
|
https://github.com/Guannan/genomics_project
|
949d9d126d0b8991bf21da095a72b5064b0b6130
|
ea99f19fd9950534fe79e97817b0ed90defc78c6
|
refs/heads/master
| 2021-01-13T02:08:41.536725 | 2013-05-01T03:09:59 | 2013-05-01T03:09:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from ghmm import *
import sys
import os
import argparse
import itertools
import random
def extract_kmer(s,ln):
index = []
for i in xrange (0,len(s)-ln+1):
index.append(s[i:i+ln])
return index
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Create feature vectors from kmer counts")
parser.add_argument("pos", help="positive data set")
parser.add_argument("neg", help="negative data set")
# parser.add_argument("order", help="order of HMM")
args = parser.parse_args()
bp = ['A','C','T','G']
dna = [bp,bp,bp]
kmers = list(itertools.product(*dna))
for i in xrange(len(kmers)):
kmers[i] = ''.join(kmers[i])
# print kmers
sigma = Alphabet(kmers)
normal = []
enhancer = []
for i in xrange(len(kmers)):
normal.append(0.5)
enhancer.append(0.5)
E = [normal,enhancer]
# print len(E)
# print len(E[0])
T = [[0.5, 0.5], [0.5, 0.5]]
pi = [0.5] * 2
m=HMMFromMatrices(sigma,DiscreteDistribution(sigma),T,E,pi)
# print m
temp_seq = ""
# trained = False
kmer_arr = []
with open(args.pos, 'r') as fafh:
input_str = fafh.readline() #get rid of initial >
# while trained == False:
while True:
input_str = fafh.readline()
if len(input_str) == 0: break
input_str = input_str.strip().upper()
if '>' in input_str:
# temp_seq = ""
trained = True
else:
temp_seq += input_str
train_arr = extract_kmer(temp_seq,3)
# for i in kmer_arr:
# print i
# print train_arr
train_seq = EmissionSequence(sigma, train_arr)
m.baumWelch(train_seq)
# print m
test_arr = []
with open(args.neg, 'r') as fafh:
input_str = fafh.readline() #get rid of initial >
for input_str in fafh:
input_str = input_str.strip().upper()
if '>' in input_str:
test_arr = extract_kmer(temp_seq[:20],3)
test_seq = EmissionSequence(sigma, test_arr)
hidden_state,log = m.viterbi(test_seq)
print log
temp_seq = ""
else:
temp_seq += input_str
test_arr = extract_kmer(temp_seq[:20],3)
test_seq = EmissionSequence(sigma, test_arr)
hidden_state,log = m.viterbi(test_seq)
print log
|
UTF-8
|
Python
| false | false | 2,013 |
13,013,750,950,462 |
b3a15a076aa4b3ea38665d9c9f6d0f07ddaf895e
|
afcdca427b09fb60bb35f709b27dbaee1519b9c6
|
/RockPhysics/Gassman.py
|
b776abbe2340bd701484eb7576d613d9bf848b06
|
[] |
no_license
|
crimap/cccm
|
https://github.com/crimap/cccm
|
f7a157af9d8838c484e66daefa998488ea29ea8b
|
87f4f61ddcc0afc96c582a579ea83c7766597bb7
|
refs/heads/master
| 2021-01-21T01:10:45.119638 | 2013-02-22T05:12:21 | 2013-02-22T05:12:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import numpy as np
import math
from pylab import *
import numpy as np
import matplotlib.pyplot as plt
def kdry(ksatWell,phiWell,soWell,vsh,kbri,koil):
n1 = len(ksatWell)
kdry = np.zeros(n1)
kquartz = 37.0e9
kclay = 25.0e09 #15
gquartz = 44.0e9
rhoquartz = 2650
rhoclay = 2550
gclay = 9.0e9
#K dry calculation by using Gassman eq in the wells
for i in range (n1):
mv = kquartz*(1.0-vsh[i])+kclay*vsh[i]
mr = (1.0-vsh[i])/kquartz+vsh[i]/kclay
ks = 0.5*(mv+1.0/mr)
mv = gquartz*(1.0-vsh[i])+gclay*vsh[i]
mr = (1.0-vsh[i])/gquartz+vsh[i]/gclay
gs = 0.5*(mv+1.0/mr)
#soWell[i] =soWell[i]*0.0
kfluid = (1.0-soWell[i])/kbri+soWell[i]/koil
kfluid = 1.0/kfluid
#print(phiWell[i],vsh[i],ksatWell[i],kfluid,soWell[i])
tmp1 = ksatWell[i]*(phiWell[i]*ks/kfluid+1.0-phiWell[i])-ks
tmp2 = phiWell[i]*ks/kfluid+ksatWell[i]/ks-1.0-phiWell[i]
kdry[i] = tmp1/tmp2
return kdry
def kgassman(kdry,phi,so,vsh,kbri,koil,rhobri,rhooil):
n = len(kdry)
#print(max(kdry),max(so),max(vsh),kbri,koil)
ksat = np.zeros(n)
rhosat = np.zeros(n)
soR = np.zeros(n)
kfluid = np.zeros(n)
kquartz = 37.0e9
kclay = 25.0e09
gquartz = 44.0e9
rhoquartz = 2650
rhoclay = 2550
gclay = 9.0e9
for i in range(n):
#so[i] =so[i]*0.0
soR[i] =so[i] #*0.00001
mv = kquartz*(1.0-vsh[i])+kclay*vsh[i]
mr = (1.0-vsh[i])/kquartz+vsh[i]/kclay
ks = 0.5*(mv+1.0/mr)
#print(so[i])
mv = gquartz*(1.0-vsh[i])+gclay*vsh[i]
mr = (1.0-vsh[i])/gquartz+vsh[i]/gclay
gs = 0.5*(mv+1.0/mr)
rhos = rhoquartz*(1.0-vsh[i])+rhoclay*vsh[i]
kfluid[i] = (1.0-so[i])/kbri+so[i]/koil
kfluid[i] = 1.0/kfluid[i]
#print(kfluid)
rhofluid = (1.0-so[i])*rhobri+so[i]*rhooil
tmp3=(phi[i]/kfluid[i]+(1.0-phi[i])/ks-kdry[i]/(ks*ks))
tmp1=((1.0-kdry[i]/ks)*(1.0-kdry[i]/ks))
ksat[i]= kdry[i]+tmp1/tmp3
rhosat[i]= rhooil*phi[i]+(1.0-phi[i])*rhos
return ksat,rhosat,soR,kfluid
class Gassman:
def __init__(self,ks,rhos,phi):
self._ks = ks
self._rhos = rhos
self._phi = phi
def fluidSub2_p(self,kbri,kco2,koil,rhobri,rhoco2,rhooil,kdry,co2,p,oilInit,brineInit):
'''K sat calculation by using Gassman.
This modules works for combining Bounds with fluid effect
'''
self._kbri = kbri
self._rhobri = rhobri
self._koil = koil
self._rhooil = rhooil
self._rhoco2 = rhoco2
self._kco2 = kco2
self._kdry = kdry
self._co2 = co2
self._p = p
self._rhoco2 = rhoco2
n1 = len(self._phi)
n3 = len(self._p)
n2 = len(self._co2)
ksat = np.zeros(((n3,n2,n1)))
rhosat = np.zeros(((n3,n2,n1)))
for i3 in range(n3):
for i2 in range(n2):
tmp3=0.0
for i1 in range (n1):
kfluid = (brineInit-self._co2[i2]/2.0)/self._kbri[i3]+(oilInit-self._co2[i2]/2.0)/self._koil[i3]+self._co2[i2]/self._kco2[i3]
kfluid = 1.0/kfluid
rhofluid = (brineInit-self._co2[i2]/2)*self._rhobri[i3]+(oilInit-self._co2[i2]/2)*self._rhooil[i3]+self._co2[i2]*self._rhoco2[i3]
tmp3=(self._phi[i1]/kfluid+(1.0-self._phi[i1])/self._ks-self._kdry[i3][i1]/(self._ks*self._ks))
tmp1=((1.0-self._kdry[i3][i1]/self._ks)*(1.0-self._kdry[i3][i1]/self._ks))
ksat[i3][i2][i1]= self._kdry[i3][i1]+tmp1/tmp3
rhosat[i3][i2][i1]= rhofluid*self._phi[i1]+(1.0-self._phi[i1])*self._rhos
return (ksat,rhosat)
def fluidSub2_st(self,kbri,kco2,koil,rhobri,rhoco2,rhooil,kdry,co2,p,oilInit,brineInit):
'''K sat calculation by using Gassman.
This modules works for combining Bounds with fluid effect
'''
self._kbri = kbri
self._rhobri = rhobri
self._koil = koil
self._rhooil = rhooil
self._rhoco2 = rhoco2
self._kco2 = kco2
self._kdry = kdry
self._co2 = co2
self._p = p
self._rhoco2 = rhoco2
n1 = len(self._phi)
n3 = len(self._p)
n2 = len(self._co2)
ksat = np.zeros(((n3,n2,n1)))
rhosat = np.zeros(((n3,n2,n1)))
for i3 in range(n3):
for i2 in range(n2):
tmp3=0.0
for i1 in range (n1):
kfluid =(1.0-self._co2[i2])/self._kbri[i3]+self._co2[i2]/self._kco2[i3]
kfluid = 1.0/kfluid
rhofluid = (1.0-self._co2[i2])*self._rhobri + self._co2[i2]*self._rhoco2
tmp3=(self._phi[i1]/kfluid+(1.0-self._phi[i1])/self._ks-self._kdry[i3][i1]/(self._ks*self._ks))
tmp1=((1.0-self._kdry[i3][i1]/self._ks)*(1.0-self._kdry[i3][i1]/self._ks))
ksat[i3][i2][i1]= self._kdry[i3][i1]+tmp1/tmp3
rhosat[i3][i2][i1]= self._rhoco2[i3]*self._phi[i1]+(1.0-self._phi[i1])*self._rhos
return (ksat,rhosat)
def fluidSub2(self,kbri,kco2,rhobri,rhoco2,kdry,co2):
'''K sat calculation by using Gassman.
This modules works for combining Bounds with fluid effect
'''
self._kbri = kbri
self._rhobri = rhobri
self._rhoco2 = rhoco2
self._kco2 = kco2
self._kdry = kdry
self._co2 = co2
self._rhoco2 = rhoco2
n1 = len(self._phi)
n3 = len(self._p)
n2 = len(self._co2)
ksat = np.zeros((n2,n1))
rhosat = np.zeros((n2,n1))
for i2 in range(n2):
tmp3=0.0
for i1 in range (n1):
kfluid = (1.0-self._co2[i2])/self._kbri+self._co2[i2]/self._kco2
kfluid = 1.0/kfluid
rhofluid = (1.0-self._co2[i2])*self._rhobri+self._co2[i2]*self._rhoco2
tmp3=(self._phi[i1]/kfluid+(1.0-self._phi[i1])/self._ks-self._kdry[i1]/(self._ks*self._ks))
tmp1=((1.0-self._kdry[i1]/self._ks)*(1.0-self._kdry[i1]/self._ks))
ksat[i2][i1]= self._kdry[i1]+tmp1/tmp3
rhosat[i2][i1]= self._rhoco2*self._phi[i1]+(1.0-self._phi[i1])*self._rhos
return (ksat,rhosat)
|
UTF-8
|
Python
| false | false | 2,013 |
3,083,786,552,602 |
e8f3170cd5d73236bc3590e32207da2244bf7f32
|
7b6c23555364db5b07c3f99e4545ff6d2fcb1d54
|
/galerka/postgres.py
|
e98fbe12d90fddf2fc15238fd86503f2664802a2
|
[
"MIT"
] |
permissive
|
encukou/galerka
|
https://github.com/encukou/galerka
|
9dcae0ededb9853217657c51518b5e2fa1a3a254
|
c5875b1caf29561babf94caccb9712c149860438
|
refs/heads/master
| 2021-01-13T01:26:49.058394 | 2014-06-15T18:51:57 | 2014-06-15T18:51:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import asyncio
from werkzeug.utils import call_maybe_yield, cached_property
import aiopg
from aiopg.sa import create_engine, dialect
import psycopg2
from sqlalchemy.schema import CreateTable
from sqlalchemy.sql import ClauseElement
from galerka.util import asyncached
def postgres_middleware(app):
@asyncio.coroutine
def add_postgres(environ, start_response):
connection = SQLConnection(environ['galerka.postgres.get_pool'])
environ['galerka.postgres.connection'] = connection
try:
result = yield from call_maybe_yield(app, environ, start_response)
return result
except:
yield from connection.rollback()
raise
else:
yield from connection.commit()
finally:
yield from connection.close()
return add_postgres
class SQLConnection:
def __init__(self, get_pool):
self.get_pool = get_pool
self.connection = None
self.transaction = None
@asyncached
def pool(self):
return (yield from self.get_pool())
@asyncio.coroutine
def execute(self, query, *multiparams, **params):
if not self.connection:
pool = yield from self.pool
self.connection = yield from pool.acquire()
self.transaction = yield from self.connection.begin()
if isinstance(query, ClauseElement):
print(query.compile(dialect=dialect))
else:
print(repr(query), multiparams, params)
result = yield from self.connection.execute(query,
*multiparams,
**params)
return result
@asyncio.coroutine
def commit(self):
if self.transaction:
yield from self.transaction.commit()
@asyncio.coroutine
def rollback(self):
if self.transaction:
yield from self.transaction.rollback()
@asyncio.coroutine
def close(self):
if self.connection:
yield from self.connection.close()
def postgres_pool_factory(dsn, tables):
@asyncio.coroutine
def get_pool():
pool = yield from create_engine(dsn)
connection = yield from pool.acquire()
try:
result = yield from connection.execute(
'SELECT tablename FROM pg_tables '
'WHERE schemaname=%s', ('public', ))
existing_table_names = {name[0] for name in result}
print('Existing tables:', existing_table_names)
for name, table in tables.metadata.tables.items():
if name not in existing_table_names:
create_statement = CreateTable(table)
print(create_statement.compile(dialect=dialect))
yield from connection.execute(create_statement)
finally:
connection.close()
return pool
pool_future = asyncio.Task(get_pool())
@asyncio.coroutine
def get_pool():
return (yield from pool_future)
return get_pool
class PostgresMixin:
def execute_sql(self, *args, **k):
return self.environ['galerka.postgres.connection'].execute(*args, **k)
@cached_property
def sql_tables(self, *args, **k):
return self.environ['galerka.postgres.tables']
|
UTF-8
|
Python
| false | false | 2,014 |
7,524,782,712,294 |
c8dca07b61d9a3c57b693af7e15dcfacecc977af
|
dfab6798ece135946aebb08f93f162c37dd51791
|
/workflows/aokuang.workflows/aokuang.workflows/actors/feedback.py
|
12bcf78b628c265e25496c190e65680891822838
|
[] |
no_license
|
yxqd/luban
|
https://github.com/yxqd/luban
|
405f5f7dcf09015d214079fe7e23d644332be069
|
00f699d15c572c8bf160516d582fa37f84ac2023
|
refs/heads/master
| 2020-03-20T23:08:45.153471 | 2012-05-18T14:52:43 | 2012-05-18T14:52:43 | 137,831,650 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
"""
http://example.com/[email protected]
"""
import luban
from ..workflows.feedback import workflow; workflow=workflow()
class Actor(workflow.Actor):
expose = 1
@luban.decorators.frameHandler
def default(self, email, **kwds):
# Store user email in session. Later it will be used
# in constructing the feedback email.
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# The implementation here is a hack for demo purpose.
# For a real app, this should not come from arguments;
# it should be obtained from database, for example.
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
luban.session['email'] = email
# give this context a name
context = 'feedback-demo'
# build the action in case of successful sending the feedback
onsuccess = luban.a.load(actor = self.name, routine = 'onsuccess')
# and save the context in the session
luban.session[context] = {'onsuccess': onsuccess}
# create the feedback button
button = workflow.visuals.button(context=context)
# test frame
frame = luban.e.frame(title='test')
frame.append(button)
return frame
def onsuccess(self, **kwds):
return
pass
# End of file
|
UTF-8
|
Python
| false | false | 2,012 |
11,081,015,643,687 |
840f7561ae21b645da1a8b312d9ecf51113b9908
|
c2a358779b5f77dc3390b8cb6216860193be4b24
|
/cs8s14_lab05/KyleTests05.py
|
71c48666eaa4af491111be03ba547e1f97b002c5
|
[] |
no_license
|
kjorg50/grading_scripts
|
https://github.com/kjorg50/grading_scripts
|
1b04fca339f95b77da3ba660c0772cdb6a0d81b6
|
af0abcb223fb9506be92d2189a3d0c1e81352bf2
|
refs/heads/master
| 2020-12-14T09:02:16.440094 | 2014-05-19T23:13:18 | 2014-05-19T23:13:18 | 18,897,729 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Kyle's tests for grading lab05
import unittest
from lab05Funcs import *
class KyleTests05(unittest.TestCase):
# tests for smallestInt
def test_smallestInt_1(self):
self.assertEqual( smallestInt([3.141592]), False )
def test_smallestInt_2(self):
self.assertEqual( smallestInt('Lannister'), False )
def test_smallestInt_3(self):
self.assertEqual( smallestInt([-999,-998,-997,999,1,0,-999,9]), -999 )
# tests for indexOfSmallestInt
def test_indexOfSmallestInt_1(self):
self.assertEqual( indexOfSmallestInt([-3.141592]), False )
def test_indexOfSmallestInt_2(self):
self.assertEqual( indexOfSmallestInt(True), False )
def test_indexOfSmallestInt_3(self):
self.assertEqual( indexOfSmallestInt([300,30,0,390,50,0,1]), 2 )
def test_indexOfSmallestInt_4(self):
self.assertEqual( indexOfSmallestInt([1,2,3,4,5]), 0 )
# tests for longestString
def test_longestString_1(self):
self.assertEqual( longestString([3.141592]), False)
def test_longestString_2(self):
self.assertEqual( longestString(42), False )
# test to make sure they are checking the *length* of each string, not just ASCII ordering
# in this case if they are only doing listOfStrings[i] > longest, then they would get 'wolf'
def test_longestString_3(self):
self.assertEqual( longestString(['aaaaaaaaaa','','university','wolf']), 'aaaaaaaaaa' )
# tests for indexOfShortestString
def test_indexOfShortestString_1(self):
self.assertEqual( indexOfShortestString([[123,12],[-1,-2,-3],[77]]), False )
def test_indexOfShortestString_2(self):
self.assertEqual( indexOfShortestString(80085), False )
# test to make sure they are checking the *length* of each string, not just ASCII ordering
# in this case if they are only doing listOfStrings[i] > indexOfShortest, then they would get 'a'
def test_indexOfShortestString_3(self):
self.assertEqual( indexOfShortestString(['b','red','north','wolf','a']), 0 )
# End of tests for grading lab05
if __name__ == '__main__':
unittest.main(exit=False)
|
UTF-8
|
Python
| false | false | 2,014 |
10,797,547,801,857 |
9e559ea08f7eec1b5a66bc2f49f0cd9a760d4724
|
017ef5766521a71381591a2877138326cfeadc1e
|
/pymediaserv
|
c64b50d8d9c20fe76a09d18f54f8e73dccb7d16e
|
[
"MIT"
] |
permissive
|
intenso/PyMedS-ng
|
https://github.com/intenso/PyMedS-ng
|
e31826f0b5951b18e388c19655805edb8dc1a468
|
5ce56c4207bdc03956847be841496b81e0da25ed
|
refs/heads/master
| 2020-03-26T06:41:14.999416 | 2012-04-08T13:29:12 | 2012-04-08T13:29:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2005, Tim Potter <[email protected]>
# Copyright 2006 John-Mark Gurney <[email protected]>
__version__ = '$Change: 1230 $'
# $Id: //depot/python/pymeds/pymeds-0.5/pymediaserv#2 $
# make sure debugging is initalized first, other modules can be pulled in
# before the "real" debug stuff is setup. (hmm I could make this a two
# stage, where we simulate a namespace to either be thrown away when the
# time comes, or merge into the correct one)
import debug # my debugging module
debug.doDebugging(True) # open up debugging port
# Modules to import, maybe config file or something?
def tryloadmodule(mod):
try:
return __import__(mod)
except ImportError:
#import traceback
#traceback.print_exc()
pass
# ZipStorage w/ tar support should be last as it will gobble up empty files.
# These should be sorted by how much work they do, the least work the earlier.
# mpegtsmod can be really expensive.
modules = [
'shoutcast',
'pyvr',
'dvd',
'ZipStorage',
'mpegtsmod',
]
modmap = {}
for i in modules:
modmap[i] = tryloadmodule(i)
for i in modules:
debug.insertnamespace(i, modmap[i])
from DIDLLite import TextItem, AudioItem, VideoItem, ImageItem, Resource, StorageFolder
from FSStorage import FSDirectory
import os
import os.path
import random
import socket
import string
import sys
from twisted.python import log
from twisted.internet import reactor
def generateuuid():
if False:
return 'uuid:asdflkjewoifjslkdfj'
return ''.join([ 'uuid:'] + map(lambda x: random.choice(string.letters), xrange(20)))
listenAddr = sys.argv[1]
if len(sys.argv) > 2:
listenPort = int(sys.argv[2])
if listenPort < 1024 or listenPort > 65535:
raise ValueError, 'port out of range'
else:
listenPort = random.randint(10000, 65000)
log.startLogging(sys.stdout)
# Create SSDP server
from SSDP import SSDPServer, SSDP_PORT, SSDP_ADDR
s = SSDPServer()
debug.insertnamespace('s', s)
port = reactor.listenMulticast(SSDP_PORT, s, listenMultiple=True)
port.joinGroup(SSDP_ADDR)
port.setLoopbackMode(0) # don't get our own sends
uuid = generateuuid()
urlbase = 'http://%s:%d/' % (listenAddr, listenPort)
# Create SOAP server
from twisted.web import server, resource, static
from ContentDirectory import ContentDirectoryServer
from ConnectionManager import ConnectionManagerServer
class WebServer(resource.Resource):
def __init__(self):
resource.Resource.__init__(self)
class RootDevice(static.Data):
def __init__(self):
r = {
'hostname': socket.gethostname(),
'uuid': uuid,
'urlbase': urlbase,
}
d = file('root-device.xml').read() % r
static.Data.__init__(self, d, 'text/xml')
root = WebServer()
debug.insertnamespace('root', root)
content = resource.Resource()
mediapath = 'media'
if not os.path.isdir(mediapath):
print >>sys.stderr, \
'Sorry, %s is not a directory, no content to serve.' % `mediapath`
sys.exit(1)
# This sets up the root to be the media dir so we don't have to
# enumerate the directory
cds = ContentDirectoryServer('My Media Server', klass=FSDirectory,
path=mediapath, urlbase=os.path.join(urlbase, 'content'), webbase=content)
debug.insertnamespace('cds', cds)
root.putChild('ContentDirectory', cds)
cds = cds.control
root.putChild('ConnectionManager', ConnectionManagerServer())
root.putChild('root-device.xml', RootDevice())
root.putChild('content', content)
# Purely to ensure some sane mime-types. On MacOSX I need these.
medianode = static.File('pymediaserv')
medianode.contentTypes.update( {
# From: http://support.microsoft.com/kb/288102
'.asf': 'video/x-ms-asf',
'.asx': 'video/x-ms-asf',
'.wma': 'audio/x-ms-wma',
'.wax': 'audio/x-ms-wax',
'.wmv': 'video/x-ms-wmv',
'.wvx': 'video/x-ms-wvx',
'.wm': 'video/x-ms-wm',
'.wmx': 'video/x-ms-wmx',
#'.ts': 'video/mp2t',
'.ts': 'video/mpeg', # we may want this instead of mp2t
'.m2t': 'video/mpeg',
'.m2ts': 'video/mpeg',
'.mp4': 'video/mp4',
#'.mp4': 'video/mpeg',
'.dat': 'video/mpeg', # VCD tracks
'.ogm': 'application/ogg',
'.vob': 'video/mpeg',
#'.m4a': 'audio/mp4', # D-Link can't seem to play AAC files.
})
del medianode
site = server.Site(root)
reactor.listenTCP(listenPort, site)
# we need to do this after the children are there, since we send notifies
s.register('%s::upnp:rootdevice' % uuid,
'upnp:rootdevice',
urlbase + 'root-device.xml')
s.register(uuid,
uuid,
urlbase + 'root-device.xml')
s.register('%s::urn:schemas-upnp-org:device:MediaServer:1' % uuid,
'urn:schemas-upnp-org:device:MediaServer:1',
urlbase + 'root-device.xml')
s.register('%s::urn:schemas-upnp-org:service:ConnectionManager:1' % uuid,
'urn:schemas-upnp-org:device:ConnectionManager:1',
urlbase + 'root-device.xml')
s.register('%s::urn:schemas-upnp-org:service:ContentDirectory:1' % uuid,
'urn:schemas-upnp-org:device:ContentDirectory:1',
urlbase + 'root-device.xml')
# Main loop
reactor.run()
|
UTF-8
|
Python
| false | false | 2,012 |
9,448,928,086,908 |
1409be2df3839b38a62493a9cdf2145cc8af1f79
|
5676e7b5a0c7b0cfef4d842e9192089a517d81b5
|
/code/chap14/14-2.py
|
62250ed0c99301691167851b11e21d22a58d3788
|
[] |
no_license
|
wmx3ng/codetest
|
https://github.com/wmx3ng/codetest
|
317f0dbcc6c195c8d047a7092d530e4d0996c63e
|
ab3782be05d49ac7100c3e0af6eff9f740aa1673
|
refs/heads/master
| 2016-09-06T04:00:44.071431 | 2014-06-28T08:32:56 | 2014-06-28T08:32:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
def test_exception(filename):
try:
fin=open(filename)
for line in fin:
print line
fin.close()
except:
print 'something went wrong.'
def sed(pattern,dest_str,src,dest):
try:
fin=open(src)
fout=open(dest,'w')
for line in fin:
line=line.replace(pattern,dest_str)
fout.write(line)
fout.close()
fin.close()
except:
print 'something was wrong'
"""
filename=raw_input("input a filename:")
test_exception(filename)
"""
pattern=raw_input("input a pattern:")
dest_str=raw_input("input a alterstr:")
src=raw_input("input a src file:")
dest=raw_input("input a dest file:")
sed(pattern,dest_str,src,dest)
|
UTF-8
|
Python
| false | false | 2,014 |
14,645,838,508,377 |
49a281e6ba4d3fd62f5187dc25186869682a04d0
|
9c5b62315691b3943ff4facbb2c83f0b2633af4f
|
/Scripts/Red/Team1_Defensive.py
|
9fd5974fd749d1bafb489ad17c046b82662cbf9f
|
[] |
no_license
|
Baralabite/RoboCup-Soccer-Simulator-Old
|
https://github.com/Baralabite/RoboCup-Soccer-Simulator-Old
|
9204dfc7278683b683868064ce9dc66d03296112
|
760c50be2cf0d448d7e27b38c40de9162c3b1497
|
refs/heads/master
| 2021-01-22T15:11:29.139786 | 2013-10-17T00:35:51 | 2013-10-17T00:35:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class RobotAI:
def __init__(self, api):
self.api = api
self.cnt = 0
self.dir = 1
def update(self):
pass
|
UTF-8
|
Python
| false | false | 2,013 |
13,872,744,401,712 |
270879737255da3accaae45ccc4a76cb45fe04da
|
6130537afbd7afc6abe11dc58cd9d20c4a5b279a
|
/defer_1.py
|
f037e4c725a6d9d75dc89a4cb26583c497c87e14
|
[] |
no_license
|
liqingqiya/code_repos
|
https://github.com/liqingqiya/code_repos
|
6b4c412bfde1ec187e350a81aaeb4c571cf11d04
|
427b2ba8e359117c9ecd9e9d2ee2f61686d409ac
|
refs/heads/master
| 2021-01-20T08:46:29.390164 | 2014-10-20T08:55:49 | 2014-10-20T08:55:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# !/usr/bin/env python
#-*- coding:utf-8 -*-
__author__ = 'liqing'
from twisted.internet import reactor, defer
def getDummyData(x):
"""
This function is a dummy which simulates a delayed result and
returns a Deferred which will fire with that result. Don't try too
hard to understand this.
"""
d = defer.Deferred()
# simulate a delayed result by asking the reactor to fire the
# Deferred in 2 seconds time with the result x * 3
reactor.callLater(2, d.callback, x * 3)
return d
def printData(d):
"""
Data handling function to be added as a callback: handles the
data by printing the result
"""
print d
print "begin..."
d = getDummyData(3)
print "after d..."
d.addCallback(printData)
print "after callback..."
# manually set up the end of the process by asking the reactor to
# stop itself in 4 seconds time
reactor.callLater(10, reactor.stop)
# start up the Twisted reactor (event loop handler) manually
reactor.run()
|
UTF-8
|
Python
| false | false | 2,014 |
11,467,562,686,971 |
ed0bb1b11583fe504176717f493d5a9a4da0f83a
|
0cdda402da89621611e4356611651d33957e5285
|
/smsbot_django/profiles/admin.py
|
086fc80ef63c1a803b5d8230bed0ff93dd17b088
|
[
"GPL-1.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
audaciouscode/SMSBot
|
https://github.com/audaciouscode/SMSBot
|
a98a7cb75d625f06f07f92215210496b08aae5a3
|
b1546591bd123f8c011a6e094651b65795b51b10
|
refs/heads/master
| 2020-07-04T05:13:22.249557 | 2013-10-29T04:03:10 | 2013-10-29T04:03:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from profiles.models import *
from django.contrib.gis import admin
class UserProfileAdmin(admin.OSMGeoAdmin):
list_display = ('user', 'primary_language', 'active')
admin.site.register(UserProfile, UserProfileAdmin)
class PhoneNumberAdmin(admin.OSMGeoAdmin):
list_display = ('value', 'profile', 'priority', 'active', 'best_time')
admin.site.register(PhoneNumber, PhoneNumberAdmin)
|
UTF-8
|
Python
| false | false | 2,013 |
9,474,697,883,651 |
ea2cfb1287763badfcaa27a0db6ab790785160f6
|
d8d8737e16366a044eb6c4e7f9cec084cfdb5700
|
/tests/cigar_example/cigar_example/restapi/views.py
|
873fe1ca458bd060f5228f1a43c704194603f32c
|
[
"BSD-2-Clause"
] |
permissive
|
st4lk/django-rest-swagger
|
https://github.com/st4lk/django-rest-swagger
|
a5cfc95e9a063ef6e5ddba495c9d6038e35ecb29
|
acc02268d5e82f7f543b26038b4d4db9b6b38a75
|
refs/heads/master
| 2020-12-26T02:09:46.172062 | 2014-11-04T14:26:03 | 2014-11-04T14:26:03 | 26,171,685 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
"""API Views for example application."""
from rest_framework.views import Response, APIView
from rest_framework import viewsets, status
from rest_framework.decorators import action, link, api_view
from rest_framework.generics import ListCreateAPIView, \
RetrieveUpdateDestroyAPIView
from rest_framework_swagger.decorators import serializer_class
from cigar_example.app.models import Cigar, Manufacturer, Country
from .serializers import CigarSerializer, ManufacturerSerializer, \
CountrySerializer, JambalayaSerializer
class CigarViewSet(viewsets.ModelViewSet):
""" Cigar resource. """
serializer_class = CigarSerializer
model = Cigar
def list(self, request, *args, **kwargs):
"""
Return a list of objects.
"""
return super(CigarViewSet, self).list(request, *args, **kwargs)
@action()
def set_price(self, request, pk):
"""An example action to on the ViewSet."""
return Response('20$')
@link()
def get_price(self, request, pk):
"""Return the price of a cigar."""
return Response('20$')
class ManufacturerList(ListCreateAPIView):
"""Get the list of cigar manufacturers from the database."""
model = Manufacturer
serializer_class = ManufacturerSerializer
class ManufacturerDetails(RetrieveUpdateDestroyAPIView):
"""Return the details on a manufacturer."""
model = Manufacturer
serializer_class = ManufacturerSerializer
class CountryList(ListCreateAPIView):
"""Gets a list of countries. Allows the creation of a new country."""
model = Country
serializer_class = CountrySerializer
class CountryDetails(RetrieveUpdateDestroyAPIView):
"""Detailed view of the country."""
model = Country
serializer_class = CountrySerializer
def get_serializer_class(self):
self.serializer_class.context = {'request': self.request}
return self.serializer_class
class MyCustomView(APIView):
"""
This is a custom view that can be anything at all.
It's not using a serializer class, but I can define my own parameters.
Cet exemple démontre l'utilisation de caractères unicode
"""
def get(self, *args, **kwargs):
"""
Get the single object.
param1 -- my param
"""
return Response({'foo': 'bar'})
def post(self, request, *args, **kwargs):
"""
Post to see your horse's name.
horse -- the name of your horse
"""
return Response({'horse': request.GET.get('horse')})
@api_view(['POST', 'GET'])
def jambalaya(request):
"""
This is Sisko's jambalaya
---
serializer: JambalayaSerializer
"""
serializer = JambalayaSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@serializer_class(JambalayaSerializer)
@api_view(['POST'])
def jambalaya2(request):
"""
This is Sisko's jambalaya
"""
serializer = JambalayaSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
UTF-8
|
Python
| false | false | 2,014 |
14,070,312,886,135 |
9c8b06532d486b98c11b66f3b8f18e5676d8b636
|
46ed94f7b29cc5a9d3e14fe7fe743da3f244c1b5
|
/Protocols/tests.py
|
37b0148ebdead8d32f7f7851cbeba010cffcf9b9
|
[] |
no_license
|
yournex/nexPBX
|
https://github.com/yournex/nexPBX
|
6d31e8a42e9f8fa0c4881d99856be418abcfc113
|
f57e768d824950065b3a6e1761d89b5c9006a169
|
refs/heads/master
| 2016-09-11T08:50:25.238098 | 2011-07-23T01:54:31 | 2011-07-23T01:54:31 | 1,257,345 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_plugins(self):
"""Test Plugin system in Protocols"""
import Protocols.plugins as p
self.assertEqual(p.getPlugins()['SIP']['installed'], True )
self.assertEqual(p.getPlugins()['SIP']['installed'], True )
|
UTF-8
|
Python
| false | false | 2,011 |
1,640,677,520,291 |
deee84a174c77505269e73c5fdb708e36281afd2
|
31b6d834a764cc367b54bc4341ca74e1ebd1d124
|
/SRC/file_manager.py
|
d669aff5117e1e2f668f1efa577bb8d8c0f887ac
|
[] |
no_license
|
amalvezin/BEEF
|
https://github.com/amalvezin/BEEF
|
db60a9c36e5f097d5532409339c13e735311922b
|
7cb40189c09f8f663b18d1706e2bd04f3d75fad2
|
refs/heads/master
| 2021-01-18T07:37:39.706421 | 2013-07-25T12:14:34 | 2013-07-25T12:14:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import sys
from static import *
class MyFileManager():
""" Managing my file """
def __init__(self, platform=PLATFORM_WINDOWS):
self.wd = os.getcwd()
print("")
print("Working directory is :" + self.wd)
print("")
#setting repertory serapator
if platform == PLATFORM_LINUX:
directorySep = "/"
else:
directorySep = "\\"
#looking for param directory
self.hierachyDir = self.wd + directorySep + HIERARCHY_DIR
self.listFiles = []
print("changing directory to : " + self.hierachyDir)
try:
self.listFiles = os.listdir(self.hierachyDir)
if len(self.listFiles)>0:
print("Following hierachy files found :")
for file in self.listFiles:
print(file)
print("")
except :
print("Cannot access subdirectory - check installation")
def getHierarchies(self):
return self.listFiles
def getSublines(self, file):
""" read all lines in a specified file """
try:
#setting repertory serapator
if sys.platform == "linux2":
directorySep = "/"
else:
directorySep = "\\"
print("Opening : " + self.hierachyDir + directorySep + file)
fs = open(self.hierachyDir + directorySep + file, 'r')
sublines = []
while 1:
txt = fs.readline()
if txt == "###":
break
else:
sublines.append(txt[0:3])
fs.close
print("Closing : " + self.hierachyDir + directorySep + file)
#print(sublines)
return sublines
except:
print("Cannot open file : " + file)
return []
|
UTF-8
|
Python
| false | false | 2,013 |
19,344,532,723,423 |
9904f8287f1dbf1ada43f80529dace2b456df8df
|
ad80d9b1752209d424e5ebdf2fc8fcf913a3c6ae
|
/homework_02/03.py
|
2906690611dbbc1ec07ac3bd1b4b287dfe8684d1
|
[] |
no_license
|
adkozlov/python-2014
|
https://github.com/adkozlov/python-2014
|
32b884d4f6ccaeb4fb6ac4fecf659e1ae990d365
|
3db9bc116e75e79489dac9f25ebfb3bfc3c9f07f
|
refs/heads/master
| 2021-01-23T06:44:54.511664 | 2014-12-02T19:34:01 | 2014-12-02T19:34:01 | 25,054,848 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
__author__ = "Andrew Kozlov"
__copyright__ = "Copyright 2014, SPbAU"
import sys
import xml.etree.ElementTree as et
def print_element(element, indent):
print("%s%s" % (indent, element))
def print_with_indent(element, indent, tab):
print_element(element.tag, indent)
for child in element:
print_with_indent(child, indent + tab, tab)
print_element(element.text, indent + tab)
print_element(element.tag, indent)
def print_rss(argument, tab='\t'):
print_with_indent(et.parse(argument).getroot(), '', tab)
if __name__ == '__main__':
print_rss(sys.argv[1], tab=' ')
|
UTF-8
|
Python
| false | false | 2,014 |
10,307,921,520,276 |
daebd5ed11acd17024fa08506e7b2ded4292a61f
|
b080a3842f89dd1b0fa4f43ecb9175a57b12ed8b
|
/adhocracy/controllers/event.py
|
0dbf602fa29ea48cb3aee1b523c40312e8921920
|
[
"AGPL-3.0-only"
] |
non_permissive
|
jedix/adhocracy
|
https://github.com/jedix/adhocracy
|
adb76627390dcea150016a53c85de78884caca84
|
ab4197aa2cd13823e20c7def5c3d4be97d8060ce
|
refs/heads/master
| 2020-12-25T10:08:49.025551 | 2012-10-08T00:16:28 | 2012-10-08T00:16:28 | 6,471,982 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
from pylons import tmpl_context as c
from pylons.i18n import _
from adhocracy import model
from adhocracy.lib import event, helpers as h, tiles
from adhocracy.lib.base import BaseController
from adhocracy.lib.pager import NamedPager
from adhocracy.lib.templating import render
log = logging.getLogger(__name__)
class EventController(BaseController):
def all(self, format='html'):
query = model.meta.Session.query(model.Event)
query = query.order_by(model.Event.time.desc())
query = query.limit(50)
if format == 'rss':
events = query.all()
return event.rss_feed(events,
_('%s News' % h.site.name()),
h.site.base_url(None),
_("News from %s") % h.site.name())
c.event_pager = NamedPager('events', query.all(),
tiles.event.row, count=50)
return render('/event/all.html')
|
UTF-8
|
Python
| false | false | 2,012 |
9,637,906,628,021 |
c42b49738b045d799ed0541ad3fdcdc594cb6dcc
|
044dd4ecae4ca1234811a04c081014ad58863932
|
/mumblr/templatetags/mumblr_tags.py
|
381586539d665f7a1421b0fdd67949cba8d6ee1c
|
[
"MIT"
] |
permissive
|
jimbattin/django-mumblr
|
https://github.com/jimbattin/django-mumblr
|
4bcb73cad0cc934c37b4c2b18a350591053a81a7
|
f92bca675f9216af47a8221b5672b898b5cd8047
|
refs/heads/master
| 2021-01-17T22:44:42.087940 | 2012-11-03T02:05:44 | 2012-11-03T02:05:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.template import Library, Node, TemplateSyntaxError
import re
from mumblr.entrytypes import EntryType
register = Library()
class LatestEntriesNode(Node):
def __init__(self, num, var_name):
self.num = int(num or 10)
self.var_name = var_name
def render(self, context):
context[self.var_name] = list(EntryType.live_entries()[:self.num])
return ''
@register.tag
def get_latest_entries(parser, token):
# Usage:
# {% get_latest_entries as entries %} (default 10 entries)
# (or {% get_latest_entries 7 as entries %} for 7 entries)
# {% for entry in entries %}
# <li>{{ entry.title }}</li>
# {% endfor %}
tag_name, contents = token.contents.split(None, 1)
match = re.search(r'(\d+\s+)?as\s+([A-z_][A-z0-9_]+)', contents)
if not match:
raise TemplateSyntaxError("%r tag syntax error" % tag_name)
num, var_name = match.groups()
return LatestEntriesNode(num, var_name)
|
UTF-8
|
Python
| false | false | 2,012 |
7,146,825,590,598 |
2396b8c21a5010b0124db743461d7a4e5cb2b877
|
1bf43d7515862b1878e835f506f502753c2f89d4
|
/scripts/drive_ifsmiles.py
|
d3cac6d45afd0e3eb8c487f0291e940adda6f85a
|
[] |
no_license
|
nerdymech/VisionProject
|
https://github.com/nerdymech/VisionProject
|
748f6ee9da55939ba22a6a9f51a4a15138d8fc8a
|
af9642526b7f919c79a8e0fbf74a22d49b59e0d3
|
refs/heads/master
| 2021-01-13T01:45:25.360483 | 2014-11-11T00:40:30 | 2014-11-11T00:40:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""
Code from CompRobo
Adela Wee and Michelle Sit"""
import pdb
from datetime import datetime
from webcam_modified_fixed import detectFaces
import rospy
from sensor_msgs.msg import Image
from geometry_msgs.msg import Twist, Vector3
from std_msgs.msg import String
def move_neato():
pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
self.image_pub = rospy.Publisher("image_topic_2",Image)
self.camera_listener = rospy.Subscriber("camera/image_raw",Image, self.convertingNumpy)
rospy.init_node('teleop', anonymous=True)
r = rospy.Rate(10) #run at 10 hz
#if time, add in a dist_buffer =
while not rospy.is_shutdown():
#move robot! this is a state machine
if faces >= 1:
if webcam.avg >= 1:
#Someone's smiling, so drive forwards
velocity_msg = Twist(Vector3((0.2), 0.0, 0.0), Vector3(0.0, 0.0, 0.0))
print (":D Hi there!")
elif webcam.avg <0.8:
#No webcam.avg detected, so drive backwards
velocity_msg = Twist(Vector3(-0.2, 0.0, 0.0), Vector3(0.0, 0.0, 0.0))
print (":( i'll go away now....")
else:
#move forwards at 0.2 m/s
velocity_msg = Twist(Vector3(0.0, 0.0, 0.0), Vector3(0.0, 0.0, 0.0))
print ("is anyone here? waiting for command")
pub.publish(velocity_msg)
r.sleep()
if __name__ == '__main__':
try:
webcam = detectFaces()
move_neato()
except rospy.ROSInterruptException: pass
|
UTF-8
|
Python
| false | false | 2,014 |
3,375,844,316,863 |
14f906b56bfe44065060bece7510ee57ad7bdbee
|
e3f5afc1f1cea165ee99c35ef8192311ce2ae3f0
|
/disorder_regions/disorder/DiseaseListView.py
|
ceeeccb44d543e7bb59b3f24092e33db9e0dda3d
|
[] |
no_license
|
steveandroulakis/disorder-regions
|
https://github.com/steveandroulakis/disorder-regions
|
1122ff5a6b8d3d1ae1cb255fb75c449b920be3af
|
e24ffcf5c41f408cee7bf245476f5cf914977f70
|
refs/heads/master
| 2021-01-10T20:33:25.842541 | 2012-04-04T05:30:27 | 2012-04-04T05:30:27 | 3,269,306 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.views.generic import ListView
from disorder_regions.disorder.models import Proteinmutationdisease
from django.db.models import Count
class DiseaseListView(ListView):
proteinmutationdisease_distinct = \
Proteinmutationdisease.objects.annotate(num_proteins=Count('mutprotein__mutprotein_proteinuniprot_fr__protein_uniprotaccession', distinct=True))
proteinmutationdisease_distinct = proteinmutationdisease_distinct.\
order_by('-num_proteins')
queryset = proteinmutationdisease_distinct
context_object_name = "proteinmutationdisease_list" #default is object_list
paginate_by = 3000
|
UTF-8
|
Python
| false | false | 2,012 |
7,206,955,147,571 |
ade120b21eb96f278f4532908135db8575b10ce0
|
ef03c14a90809758da070e28a97ef27f161eb236
|
/Mar25/kamili.py
|
3c86af0b0b7bf7041c98b4d774446252017af98e
|
[] |
no_license
|
mkeagar/cs598r
|
https://github.com/mkeagar/cs598r
|
0932656e11bf38ea39e58a0e58526613e76ea2fe
|
69a992ca7f71cdee80a5f2c1d0bd4a49a3a5f55c
|
refs/heads/master
| 2020-12-24T15:22:14.576389 | 2014-04-08T19:15:49 | 2014-04-08T19:15:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
for i in xrange(10):
line = raw_input();
print 2**sum(line.count(i)for i in'DFLT')
|
UTF-8
|
Python
| false | false | 2,014 |
3,075,196,594,988 |
83b4421180ea810710e312f9a3a906b5b03c3788
|
7dec3d39f16cd68c7c1afcf057052026a965a989
|
/setup.py
|
ce1766d0be468b5217e5a26b37b19f28532ed32a
|
[
"GPL-3.0-only"
] |
non_permissive
|
knutwalker/namespacetrie
|
https://github.com/knutwalker/namespacetrie
|
ac8e86029227c2dfcaa6b1a25dc204f7d9e8e07f
|
552fc2fe909a0f9c0c683ff90bef827caf4837bc
|
refs/heads/master
| 2020-06-07T19:34:21.914572 | 2012-05-07T19:59:10 | 2012-05-07T19:59:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# Namespace Trie
# Copyright (C) 2012 Paul Horn
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__ = '[email protected] (Paul Horn)'
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='namespacetrie',
version='0.1',
description=('A Trie implementation that manages not the single characters'
' but treats its values as typical namespaces.'),
license='GPLv3',
author='Paul Horn',
author_email='[email protected]',
url='https://github.com/knutwalker/namespacetrie',
keywords = "namespace trie",
long_description="""
This is the Namespace Trie.
Namespace Trie is a implementation of a Trie data structure. Unlike typical
implementations, which are splitting its value into single characters,
Namespace Trie treats its values as namespaces. Namespaces are strings that
are delimited by a period. Such namespaces often occur in programming
languages, e.g. Java or Python and may also appear while using some libraries
for programming languages that itself do not offer namespacing (e.g. the
Google Closure library offers a namespace feature for JavaScript). The
Namespace Trie may help you find flaws in the namespace structure.
Namespace Trie is developed for use with Closure Depresolver and may at the
moment not be very useful as there is not standalone interface.
""".strip(),
install_requires=['weakrefset', 'ordereddict'],
package_dir={'namespacetrie': 'namespacetrie'},
packages=['namespacetrie'],
test_suite="namespacetrie.nstrie_test"
)
|
UTF-8
|
Python
| false | false | 2,012 |
206,158,462,975 |
518d4a3215bf7a6c045b5f7b6d03d7a11fc7e7bb
|
09b0eeda4871cdf4f2d7fa023becc77a4fea2b0c
|
/statsmile/handlers/user.py
|
27e8def71c8459dc095301f8aec8ef4247a9d60a
|
[] |
no_license
|
Lardjo/stml1
|
https://github.com/Lardjo/stml1
|
9efaac3e23478095f71b3b176e3dbd70e4d7a663
|
9f9e54a0eda2d19381a0bbf51d09912e675c58d4
|
refs/heads/master
| 2016-08-12T18:41:05.654374 | 2014-02-15T11:12:38 | 2014-02-15T11:12:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
import math
from motor import Op
from tornado.gen import engine
from tornado.web import asynchronous
from .base import BaseHandler
from bson import ObjectId
from statsmile.common import libs
class UserHandler(BaseHandler):
@asynchronous
@engine
def get(self, sid):
black_list = [7, 9, 15]
session = None
if self.current_user:
session = yield Op(self.db['users'].find_one, {'_id': self.current_user['userid']})
user = yield Op(self.db['users'].find_one, {'_id': ObjectId(sid)})
if user is None:
return self.send_error(404)
matches, match = yield [
Op(self.db['matches'].find({"players.account_id": user["steamid32"], "game_mode": {"$nin": black_list},
'players.hero_id': {'$nin': [0]}},
{"radiant_win": 1, "cluster": 1, "duration": 1, "start_time": 1, "game_mode": 1,
"lobby_type": 1, "match_id": 1,
"players": {"$elemMatch": {"account_id": user["steamid32"]}}},
sort=[('start_time', -1)], limit=10).to_list),
Op(self.db['matches'].find({"players.account_id": user["steamid32"],
"game_mode": {"$nin": black_list}, 'players.hero_id': {'$nin': [0]}},
sort=[('start_time', -1)], limit=1).to_list)]
self.render("user.html", title="Dashboard", user=user, session=session, matches=matches,
match=match, heroes=libs.heroes, cluster=libs.cluster, mode=libs.mode)
class UserMatchesHandler(BaseHandler):
@asynchronous
@engine
def get(self, sid):
black_list = [7, 9, 15]
pg = int(self.get_argument('page', 1))
hero = self.get_argument('hero', 'all')
if hero == 'all':
pass
else:
if not int(hero) in libs.heroes.keys():
return self.send_error(404)
session = None
if self.current_user:
session = yield Op(self.db['users'].find_one, {'_id': self.current_user['userid']})
user = yield Op(self.db['users'].find_one, {'_id': ObjectId(sid)})
if user is None:
return self.send_error(404)
pages = yield Op(self.db["matches"].find({"players.account_id": user['steamid32'],
"game_mode": {"$nin": black_list},
'players.hero_id': {'$nin': [0]}}).count)
max_pages = math.ceil(pages / 20)
if pg > max_pages:
return self.send_error(404)
if hero == 'all':
matches = yield Op(self.db["matches"].find(
{"players.account_id": user["steamid32"], "game_mode": {"$nin": black_list},
'players.hero_id': {'$nin': [0]}},
{"game_mode": 1, "start_time": 1, "duration": 1, "cluster": 1,
"match_id": 1, "radiant_win": 1, "lobby_type": 1,
"players": {"$elemMatch": {"account_id": user["steamid32"]}}},
sort=[('start_time', -1)], limit=20).skip((pg-1)*20).to_list)
else:
matches = yield Op(self.db['matches'].find(
{'game_mode': {'$nin': black_list},
'players': {'$elemMatch': {'account_id': user['steamid32'], 'hero_id': int(hero)}}},
{"game_mode": 1, "start_time": 1, "duration": 1, "cluster": 1,
"match_id": 1, "radiant_win": 1, "lobby_type": 1,
'players': {'$elemMatch': {'account_id': user['steamid32']}}},
sort=[('start_time', -1)], limit=20).skip((pg-1)*20).to_list)
self.render("user.html", title="Matches", user=user, session=session, matches=matches, max_pages=max_pages,
page=pg, hero_current=hero, heroes=libs.heroes, cluster=libs.cluster, mode=libs.mode)
class UserHeroesHandler(BaseHandler):
@asynchronous
@engine
def get(self, sid):
session = None
if self.current_user:
session = yield Op(self.db['users'].find_one, {'_id': self.current_user['userid']})
user = yield Op(self.db['users'].find_one, {'_id': ObjectId(sid)})
if user is None:
return self.send_error(404)
self.render("user.html", title="Heroes", user=user, session=session, heroes=libs.heroes)
class UserRecordsHandler(BaseHandler):
@asynchronous
@engine
def get(self, sid):
session = None
if self.current_user:
session = yield Op(self.db['users'].find_one, {'_id': self.current_user['userid']})
user = yield Op(self.db['users'].find_one, {'_id': ObjectId(sid)})
if user is None:
return self.send_error(404)
kills, deaths, assists, gpm = yield [
Op(self.db["matches"].aggregate,
[{"$match": {"players.account_id": user["steamid32"], "game_mode": {"$nin": [7, 9, 15]}}},
{"$project": {"match_id": 1, "radiant_win": 1, "start_time": 1, "players.kills": 1,
"players.account_id": 1, "players.player_slot": 1, "players.hero_id": 1}},
{"$unwind": "$players"},
{"$match": {"players.account_id": user["steamid32"]}},
{"$sort": {"players.kills": -1}},
{"$limit": 1}]),
Op(self.db["matches"].aggregate,
[{"$match": {"players.account_id": user["steamid32"], "game_mode": {"$nin": [7, 9, 15]}}},
{"$project": {"match_id": 1, "radiant_win": 1, "start_time": 1, "players.deaths": 1,
"players.account_id": 1, "players.player_slot": 1, "players.hero_id": 1}},
{"$unwind": "$players"},
{"$match": {"players.account_id": user["steamid32"]}},
{"$sort": {"players.deaths": -1}},
{"$limit": 1}]),
Op(self.db["matches"].aggregate,
[{"$match": {"players.account_id": user["steamid32"], "game_mode": {"$nin": [7, 9, 15]}}},
{"$project": {"match_id": 1, "radiant_win": 1, "start_time": 1, "players.assists": 1,
"players.account_id": 1, "players.player_slot": 1, "players.hero_id": 1}},
{"$unwind": "$players"},
{"$match": {"players.account_id": user["steamid32"]}},
{"$sort": {"players.assists": -1}},
{"$limit": 1}]),
Op(self.db["matches"].aggregate,
[{"$match": {"players.account_id": user["steamid32"], "game_mode": {"$nin": [7, 9, 15]}}},
{"$project": {"match_id": 1, "radiant_win": 1, "start_time": 1, "players.gold_per_min": 1,
"players.account_id": 1, "players.player_slot": 1, "players.hero_id": 1}},
{"$unwind": "$players"},
{"$match": {"players.account_id": user["steamid32"]}},
{"$sort": {"players.gold_per_min": -1}},
{"$limit": 1}])]
self.render("user.html", title="Records", user=user, session=session,
records=(kills['result'], deaths['result'], assists['result'], gpm['result']),
heroes=libs.heroes, cluster=libs.cluster, mode=libs.mode)
|
UTF-8
|
Python
| false | false | 2,014 |
1,949,915,189,775 |
24c3463f49e45db3d56bf13207c3240c40e8c0bd
|
658fb45e8a65ca0dbf87b23ae0b43b025d9b1a80
|
/client/modules/Music.py
|
db39f0fb166b82b1c119b4da94ad9fd18714acc4
|
[
"MIT",
"LicenseRef-scancode-other-permissive"
] |
non_permissive
|
mellonball/raz
|
https://github.com/mellonball/raz
|
526704bc9ae9369dc269737c96a6a831159952fa
|
f7965d4abf2bb8b7fef6f8e83d9ed90c9ae99b2b
|
refs/heads/master
| 2016-09-11T02:41:03.440503 | 2014-05-06T03:02:16 | 2014-05-06T03:02:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
#import hubmusic
WORDS = ["MUSIC"]
#veraIP = rasp.getIP()
def handle(text, mic, profile):
"""
Responds to user input by printing out the command.
Eventually it will send the command to the hub, which will execute it.
"""
#it heard the word Music
mic.say("You said " + text)
#contact the hub requesting a file (NAMED songoptions.txt that overwrites) containg 3 random songs and numbers on the same line
#hubmusic.getoptions()
#for line in file, read out the line which will be (1 jayz - brush your shoulders off ....)
with open("songoptions.txt", "r") as searchfile:
for line in searchfile:
mic.say(line.strip())
#listen for user input
#if user chooses a valid number, send that number to the HUB and the HUB will send over that song
#play the song
#probably import hubmusic and in there function playsong.
#rasp.toggleLamp(veraIP, text.lower())
def isValid(text):
print text
return bool(re.search(r'\bmusic\b', text, re.IGNORECASE))
|
UTF-8
|
Python
| false | false | 2,014 |
15,891,379,020,967 |
d2d33066adbc8d997c1272744f7575f76d35558a
|
1b94fb1b067af3349ac4d5eea1b123457f32362f
|
/3.py
|
f9dee4b7862c5a25e7bf11ac832e2aa437c20896
|
[] |
no_license
|
qszhuan/python-challenge-solution
|
https://github.com/qszhuan/python-challenge-solution
|
173d9d9c50e872ad30e3820b04fea61a9b9a1e14
|
74676d585053c727945b51d08236133d6305c92e
|
refs/heads/master
| 2020-04-09T22:01:34.572844 | 2013-05-24T14:23:10 | 2013-05-24T14:23:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#http://www.pythonchallenge.com/pc/def/equality.html
import urllib2
import re
resp = urllib2.urlopen('http://www.pythonchallenge.com/pc/def/equality.html').read()
start_index = resp.rindex('<!--')
print ''.join(re.findall('[^A-Z][A-Z]{3}([a-z])[A-Z]{3}[^A-Z]', resp[start_index:]))
|
UTF-8
|
Python
| false | false | 2,013 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.