__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
12,077,448,081,425 |
1b326be71daa74bec022dd2864e4b6709a58dc90
|
0fbb44b3ac42009f878e3b9460eeeacb371d3c3f
|
/achernyakov/core/forms.py
|
ada6eb0f08e51aed098836a1f03d289715da03b0
|
[] |
no_license
|
bobrynya/achernyakov
|
https://github.com/bobrynya/achernyakov
|
f6b93ffbfb426cef14f8ab120174668f28560e2e
|
bf62c446addd9619ed1afc96243a4e6fb03cf74e
|
refs/heads/master
| 2020-05-18T01:50:00.587148 | 2013-12-17T13:11:36 | 2013-12-17T13:11:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django import forms
from django.utils.translation import ugettext as _
class LoginForm(forms.Form):
username = forms.CharField(
label=_("Username"), max_length=30, min_length=4
)
password = forms.CharField(
label=_("Password"), widget=forms.PasswordInput(), min_length=5
)
|
UTF-8
|
Python
| false | false | 2,013 |
3,959,959,856,758 |
a27deebb64143334ef7b2544f25397d54855dbd5
|
9b34b4bb1dccfd4442f0b758bcef3834b721f151
|
/news/models.py
|
0f7ce8d053765cfa17f8bfdfd8ecf71eb4152a1a
|
[] |
no_license
|
marekblaszczyk/finance
|
https://github.com/marekblaszczyk/finance
|
3ce3df50ae45fe221eda46c33df4e172cd9f87e0
|
d0fe89b2a82074ecc71030c11230a65fefff6009
|
refs/heads/master
| 2019-07-01T16:47:00.320473 | 2011-09-18T21:31:37 | 2011-09-18T21:31:37 | 1,495,369 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.db import models
from datetime import datetime
class Reklama300x250(models.Model):
class Admin:
pass
reklama = models.TextField(verbose_name="Reklama rectangle")
class Meta:
verbose_name = "Reklama na sg rectangle"
verbose_name_plural = "Reklamy na sg rectangle"
def save(self):
self.id = 1
super(Reklama300x250, self).save()
def delete(self):
pass
def __str__(self):
return "reklama"
def __unicode__(self):
return "reklama"
class PromoProduct(models.Model):
class Admin:
pass
promo_id = models.IntegerField()
class Meta:
verbose_name = "Produkt promo"
verbose_name_plural = "Produkty promo"
def save(self):
self.id = 1
super(PromoProduct, self).save()
def delete(self):
pass
def __str__(self):
return self.promo_id
def __unicode__(self):
return "promo_id"
class Category(models.Model):
name = models.CharField(max_length=255, verbose_name='Nazwa Kategorii')
slug = models.SlugField(max_length=255, unique=True, verbose_name='Odnosnik')
icon = models.ImageField(upload_to='icons', verbose_name='Ikonka Kategorii', blank=True)
class Meta:
verbose_name = "Kategoria"
verbose_name_plural = "Kategorie"
def __str__(self):
return self.name
def __unicode__(self):
return self.name
class News(models.Model):
category = models.ManyToManyField(Category, verbose_name='Kategorie')
title = models.CharField(max_length=255, verbose_name='Tytul')
slug = models.SlugField(max_length=255, unique=True, verbose_name='Odnosnik')
text = models.TextField(verbose_name='Tresc')
date = models.DateTimeField(verbose_name='Data dodania')
wykop = models.CharField(max_length=255, verbose_name='Wykop', blank=True)
class Meta:
verbose_name = "Wiadomosc"
verbose_name_plural = "Wiadomosci"
def __str__(self):
return self.title
def __unicode__(self):
return self.title
def get_absolute_url(self):
return '/news/' + self.slug + '/'
class Comment(models.Model):
nick = models.CharField(max_length=255, verbose_name="Nick")
text = models.TextField(max_length=4096, verbose_name="Treść")
ip_address = models.IPAddressField(blank=True, null=True, verbose_name="Adres IP")
published = models.DateTimeField(auto_now_add=True, default=datetime.now, verbose_name="Data dodania")
status = models.BooleanField(verbose_name="Wyświetlany?", default=True)
which_category = models.CharField(max_length=100, verbose_name="Kategoria")
class Meta:
verbose_name = "Komentarz"
verbose_name_plural = "Komentarze"
ordering = ['-published']
def __unicode__(self):
return self.nick
class Banned(models.Model):
ip = models.IPAddressField(unique=True, verbose_name="Adres IP", help_text=u'Adres IP, który będzie mieć zablokowaną możliwość dodaania komentarzy')
reason = models.CharField(max_length=255, verbose_name="Powód", help_text=u'Powód zablokowania adresu')
created = models.DateTimeField(auto_now_add=True, verbose_name="Data utorzenia")
updated = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Zbanowany adres"
verbose_name_plural = "Zbanowane adresy"
def __unicode__(self):
return self.ip
|
UTF-8
|
Python
| false | false | 2,011 |
14,705,968,028,751 |
c830ed2f7b588a9f4a5723aa4c8ffee1b37d39f2
|
d18ed72d6f8d27dd8a13eab5c6366f9dca48aa6b
|
/espresso/branches/experiment-to-support-matter/tests/calcs/test_import.py
|
787bf7b7b0abfbde3f4b4319e0ce770e22a01c39
|
[] |
no_license
|
danse-inelastic/AbInitio
|
https://github.com/danse-inelastic/AbInitio
|
6f1dcdd26a8163fa3026883fb3c40f63d1105b0c
|
401e8d5fa16b9d5ce42852b002bc2e4274afab84
|
refs/heads/master
| 2021-01-10T19:16:35.770411 | 2011-04-12T11:04:52 | 2011-04-12T11:04:52 | 34,972,670 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Alex Dementsov
# California Institute of Technology
# (C) 2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Dummy test: tries to import classes from matter
from matter.Structure import Structure
from matter.Lattice import cosd, Lattice
from matter.SymmetryUtilities import equalPositions
__date__ = "$Mar 31, 2011 1:29:06 PM$"
|
UTF-8
|
Python
| false | false | 2,011 |
5,411,658,811,917 |
168e016a6f63b8545a382f30e68f8c3c8df8f98f
|
a85ce9fdb57b8d32ccef69c54b7e5cfbd98e6d15
|
/article_coding/open_calais_article_coder.py
|
4d442702c88a3efd497c214766bb207f5333ed5e
|
[
"LGPL-3.0-or-later",
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"LGPL-3.0-only",
"GPL-3.0-only"
] |
non_permissive
|
byouloh/sourcenet
|
https://github.com/byouloh/sourcenet
|
5ccce89539b77ba4e7e280944aac9209c9aa40af
|
6b86b6e58ee45866734e3697b5402e249eca60df
|
refs/heads/master
| 2021-01-15T20:18:50.873274 | 2014-12-02T05:42:45 | 2014-12-02T05:42:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import unicode_literals
'''
Copyright 2010-2014 Jonathan Morgan
This file is part of http://github.com/jonathanmorgan/sourcenet.
sourcenet is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
sourcenet is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with http://github.com/jonathanmorgan/sourcenet. If not, see http://www.gnu.org/licenses/.
'''
'''
This code file contains a class that implements functions for interacting with
the online database of Newsbank. It mostly includes methods for building and
connecting to URLs that represent issues of newspapers and articles within an
issue, and then Beautiful Soup code for interacting with the HTML contents of
those pages once they are retrieved. The actual work of retrieving pages is
outside the scope of this class.
'''
#================================================================================
# Imports
#================================================================================
# parent abstract class.
from sourcenet.article_coding.article_coder import ArticleCoder
#================================================================================
# Package constants-ish
#================================================================================
#================================================================================
# OpenCalaisArticleCoder class
#================================================================================
# define OpenCalaisArticleCoder class.
class OpenCalaisArticleCoder( ArticleCoder ):
'''
This class is a helper for Collecting articles out of NewsBank. It implements
functions for interacting with the online database of Newsbank. It mostly
includes methods for building and connecting to URLs that represent issues
of newspapers and articles within an issue, and then Beautiful Soup code
for interacting with the HTML contents of those pages once they are
retrieved. The actual work of retrieving pages is outside the scope of
this class.
Preconditions: It depends on cookielib, datetime, os, re, sys, urllib2 and
BeautifulSoup 3.
'''
#============================================================================
# Constants-ish
#============================================================================
# status constants
STATUS_SUCCESS = "Success!"
#============================================================================
# Instance variables
#============================================================================
# debug
debug = False
#============================================================================
# Instance methods
#============================================================================
def __init__( self ):
# call parent's __init__() - I think...
super( OpenCalaisArticleCoder, self ).__init__()
#-- END method __init__() --#
def output_debug( self, message_IN ):
'''
Accepts message string. If debug is on, passes it to print(). If not,
does nothing for now.
'''
# got a message?
if ( message_IN ):
# only print if debug is on.
if ( self.debug == True ):
# debug is on. For now, just print.
print( message_IN )
#-- END check to see if debug is on --#
#-- END check to see if message. --#
#-- END method output_debug() --#
#-- END class OpenCalaisArticleCoder --#
|
UTF-8
|
Python
| false | false | 2,014 |
9,216,999,824,033 |
3f1429713d38f079fae0cfe46034cad5e312028d
|
5ac1565005aa7edf08d7ca64f31d8cedf7ad30ef
|
/Entropy/scripts/counter.py
|
1ed9d5ef0bc53e6c09131622806c8bc0a65a8f22
|
[
"GPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
ulikoehler/entropy-analysis-tools
|
https://github.com/ulikoehler/entropy-analysis-tools
|
9b1ea4e915bdc074296d21cf7f9dc99c72a2016c
|
c4e45a8662bf64a716a6c5427db768911713bee7
|
refs/heads/master
| 2016-09-06T17:47:07.827256 | 2012-12-04T16:09:33 | 2012-12-04T16:09:33 | 183,564 | 6 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from __future__ import with_statement
from optparse import OptionParser
from decimal import Decimal
parser = OptionParser()
parser.enable_interspersed_args()
parser.add_option("-i","--in",dest="input",help="Data input file")
parser.add_option("-o", "--out", dest="output",help="Statistics output file")
parser.add_option("-s","--separator",dest="separator",help="CSV separator")
parser.add_option("--int",action="store_true",dest="int",help="Use int as key type (instead of string)")
parser.add_option("-l","--long",action="store_true",dest="long",help="Use long as key type (instead of string)")
parser.add_option("-f","--float",action="store_true",dest="float",help="Use string as key type (instead of string)")
parser.add_option("-d","--decimal",action="store_true",dest="decimal",help="Use Decimal as key type (instead of string)")
#Set the defaults
parser.set_defaults(output="counts.txt",separator=",")
#Do the actual parsing
(options,args) = parser.parse_args()
#Set the right converter lambda function
if options.int:
fconv = lambda x: int(x)
elif options.long:
fconv = lambda x: long(x)
elif options.float:
fconv = lambda x: float(x)
elif options.decimal:
fconv = lambda x: Decimal(x)
else: #string
fconv = lambda x: x
#occurrences dictionary
occ = {}
#read the data
with open(options.input) as fin:
for line in fin:
line = fconv(line[:-1]) #Remove the newline character
if not line in occ:
occ[line] = 1L
continue
occ[line] += 1L
#Sort the data
occ.keys().sort()
#Write the data into the output file
with open(options.output, "w") as fout:
for k in occ:
fout.write(str(k) + options.separator + str(occ[k]) + "\n")
|
UTF-8
|
Python
| false | false | 2,012 |
4,355,096,868,256 |
a53d489c578a3a28312856f9b8bcc24a092c7c60
|
94bf5cf95865927ccb62d8590853bdccb581e6b3
|
/app/spiders.py
|
28eb523fdf1c30adca54a561c67545a6b490c183
|
[] |
no_license
|
madsaylor/parser_test
|
https://github.com/madsaylor/parser_test
|
15ddf3881bf3d55a48df0d8d28f070d7f2666c90
|
1b844ff343edabf2a84774765e28a77d1c6a122b
|
refs/heads/master
| 2020-06-03T13:04:29.948360 | 2014-01-06T00:45:33 | 2014-01-06T00:45:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from grab.spider import Spider, Task
import logging
import csv
from collections import OrderedDict
class EdamollSpider(Spider):
initial_urls = ['http://edamoll.ru']
domain = 'http://edamoll.ru'
def prepare(self):
self.result_file = csv.writer(open('result.csv', 'w'))
self.result_counter = 0
def task_initial(self, grab, task):
for s in grab.doc.select('//*[@class="item-text "]//a/@href'):
yield Task('paginate', url=grab.config['url'] + s.text())
def task_paginate(self, grab, task):
try:
page = grab.doc.select('//div[@class="nav_top"]//div[@class="nav-pages flr"]/a[not(@class)]')[-1]
pages_count = int(page.text())
page_url = self.domain + page.attr('href').split('=')[0]
for i in xrange(pages_count):
yield Task('product_list', url=page_url + '=' + str(i + 1))
except IndexError:
yield Task('product_list', url=grab.response.url)
def task_product_list(self, grab, task):
for s in grab.doc.select('//div[@class="catalog_item"]/a/@href'):
yield Task('product', url=self.domain + s.text())
def task_product(self, grab, task):
product = {
'name' : grab.doc.select('//h3[@class="catalog-element-name"]').text(),
'price' : float(grab.doc.select('//*[@class="item_price price"]/text()').text() + '.' + grab.doc.select('.//*[@class="decimal"]').text()),
'image_url' : self.domain + grab.doc.select('//div[@class="catalog-element-image"]/img/@src').text()
}
breadcrumbs = grab.doc.select('//ul[@class="breadcrumb-navigation"]//a')
for i, crumb in enumerate(breadcrumbs):
product['category'+str(i)] = crumb.text()
result = OrderedDict(sorted(product.items(), key=lambda t: t[0]))
self.result_file.writerow([v.encode('utf-8') for v in result.values() if not type(v) is float ])
self.result_counter += 1
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
bot = EdamollSpider(thread_number=2)
bot.run()
|
UTF-8
|
Python
| false | false | 2,014 |
3,478,923,537,933 |
b208de1444ba76f627d2bec7d884821bd569ea94
|
12a8845697b630f9654502b06d31ed0180284936
|
/fb/modules/confluence.py
|
fae7fd1c459ad0daea9d3dc907d666fb2be61bbf
|
[] |
no_license
|
jburgar/Fritbot
|
https://github.com/jburgar/Fritbot
|
6a2444b307269514e2a1b1207809e25a5f8336a9
|
f333282eeeae4af5fb59e2ad5701909c88772c33
|
refs/heads/master
| 2021-01-18T07:19:20.353798 | 2013-06-26T22:57:50 | 2013-06-26T22:57:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import xmlrpclib
from twisted.python import log
import zope.interface
import config
import fb.intent as intent
from fb.modules.base import IModule, response
class ConfluenceModule:
zope.interface.implements(IModule)
name="Confluence"
description="Functionality for searching Confluence"
author="Michael Pratt ([email protected])"
def register(self):
intent.service.registerCommand("confluence", self.confluence, self, "Confluence Search", "Returns confluence search result, use 'confluence more' to find up to 5 results.")
@response
def confluence(self, bot, room, user, args):
more = False
results = 1
if args[0] == "more":
more = True
results = 5
args = args[1:]
try:
s = xmlrpclib.Server(config.CONFLUENCE["url"])
except:
return "Confluence URL failure, cannot search Confluence. Contact your {0} admin.".format(config.CONFIG['name'])
try:
token = s.confluence1.login(config.CONFLUENCE["username"], config.CONFLUENCE["password"])
except:
return "Login failure, cannot search Confluence. Contact your {0} admin.".format(config.CONFIG['name'])
search = s.confluence1.search(token, ' '.join(args), results)
if len(search) > 0:
if more:
lines = []
line = 0
for l in search:
line +=1
lines.append("{0}: {1} - {2}".format(line, unicode(l['title']), unicode(l['url'])))
return '\n'.join(lines)
else:
return "{0} - {1}".format(unicode(search[0]['title']), unicode(search[0]['url']))
else:
return "Sorry {0}, Confluence reports no results for that query.".format(user['nick'])
module = ConfluenceModule()
|
UTF-8
|
Python
| false | false | 2,013 |
16,398,185,175,537 |
20af3f67fb33ee905acbe269c65173f1c00bd584
|
a68b933c166d32967fbb0bcf8f99fd1da1628782
|
/src/object/stack.py
|
41455fab7cae0f312d292dd64c714205f4485686
|
[] |
no_license
|
berenm/gentulu
|
https://github.com/berenm/gentulu
|
4fc144b00f582b32a631bf1f5e4b4de3eb5c3e6e
|
6835efee74cadfea2cbb7e8df4699bf2c1bc73e7
|
refs/heads/master
| 2016-09-05T16:53:43.164782 | 2012-03-26T20:05:37 | 2012-03-26T20:05:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Distributed under the Boost Software License, Version 1.0.
# See accompanying file LICENSE or copy at http://www.boost.org/LICENSE
class stack:
def __init__(self, library_name, file_name, extension_name, category_name, function_name=None):
self.library_name = library_name
self.file_name = file_name
self.extension_name = extension_name
self.category_name = category_name
self.function_name = function_name
def values(self):
return [ n for n in [ self.library_name, self.file_name, self.extension_name, self.category_name, self.function_name ] if n is not None ]
|
UTF-8
|
Python
| false | false | 2,012 |
18,992,345,394,691 |
99c1ec97681bcfb5cf1e8b955b994cc9d04a10f2
|
ae72bd630168ddaf1ec27ca871988bf1674e1b25
|
/pymol/modules/pymol/opengl/glu/__init__.py
|
dc5e56073a327a637e8d14c43a90f7383e28fb7e
|
[
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0",
"LicenseRef-scancode-free-unknown"
] |
non_permissive
|
Almad/pymol
|
https://github.com/Almad/pymol
|
2cfd957fdec4de04e9342832ca76b5ef3ed52eac
|
2d929ff774e4a258c6d51dfd584d60f284808ef4
|
refs/heads/master
| 2020-04-06T03:43:44.944248 | 2009-10-29T21:22:08 | 2009-10-29T21:22:08 | 386,874 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# $Id$
import sys
from pymol import opengl
if opengl._numeric:
from Numeric import ArrayType
try:
import _glu_num
_glu = _glu_num
except ImportError:
import _glu
except SystemError:
import _glu
else:
import _glu
from gluconst import *
from pymol.opengl.gl import Error, CarefulFunction
origdict = _glu.__dict__.copy()
sys.modules['pymol.opengl.glu'].__dict__.update(origdict)
carefuldict = {}
for name, func in origdict.items():
if callable(func):
carefuldict[name] = CarefulFunction(name, func)
# These do the same sorts of things that the C versions would
def careful():
cd = carefuldict.copy()
if cd.has_key('error'): del cd['error']
sys.modules['pymol.opengl.glu'].__dict__.update(cd)
import string
def fast():
cd = origdict.copy()
if cd.has_key('error'): del cd['error']
if cd.has_key('glconst'): del cd['glconst']
sys.modules['pymol.opengl.glu'].__dict__.update(cd)
|
UTF-8
|
Python
| false | false | 2,009 |
2,095,944,077,717 |
0a2c55592676d4e89a920576dadcada886ba7716
|
f9f466eb655e14f8a7a3bdb0ffa64efe3eea7d80
|
/python/phonenumbers/data/region_TD.py
|
b4dc32fcc2cb14b49fb30edf54327dfaf2b471c5
|
[] |
no_license
|
kijun/python-phonenumbers
|
https://github.com/kijun/python-phonenumbers
|
d3d5e20a3dce628edb6a6ccfab5aafc74713cf9c
|
41798f62075790696c70fa6da46fd9110caeec9f
|
refs/heads/master
| 2020-12-25T04:19:02.137384 | 2011-05-06T10:45:42 | 2011-05-06T10:45:42 | 1,710,606 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Auto-generated file, do not edit by hand. TD metadata"""
from phonenumbers import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_TD = PhoneMetadata(id='TD', country_code=235, international_prefix='00|16',
general_desc=PhoneNumberDesc(national_number_pattern=u'[2679]\d{7}', possible_number_pattern=u'\d{8}'),
fixed_line=PhoneNumberDesc(national_number_pattern=u'22(?:[3789]0|5[0-5]|6[89])\d{4}', possible_number_pattern=u'\d{8}', example_number=u'22501234'),
mobile=PhoneNumberDesc(national_number_pattern=u'(?:6(?:3[0-7]|6\d)|77\d|9(?:5[0-4]|9\d))\d{5}', possible_number_pattern=u'\d{8}', example_number=u'63012345'),
toll_free=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
premium_rate=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
shared_cost=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
personal_number=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
voip=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
pager=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
uan=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern=u'NA', possible_number_pattern=u'NA'),
preferred_international_prefix=u'00',
number_format=[NumberFormat(pattern='(\d{2})(\d{2})(\d{2})(\d{2})', format=u'\\1 \\2 \\3 \\4')])
|
UTF-8
|
Python
| false | false | 2,011 |
5,746,666,277,166 |
d9775b5f891898b9a67bfcda6ed78bd607637347
|
aab187b369ef60fffeded975515e7bbbfa909614
|
/listcrawler.py
|
784013783aaaa0483e5599c0794921ccf801059a
|
[
"MIT"
] |
permissive
|
commonlisp/bearded-tribble
|
https://github.com/commonlisp/bearded-tribble
|
830d1e82c1cceec32facd41d4222f1fb79296e77
|
25e0a938a2fd8c1f177d2e11c6dd907dab6e97c1
|
refs/heads/master
| 2016-09-05T21:41:45.747558 | 2014-12-04T15:42:57 | 2014-12-04T15:42:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from pyspark import SparkContext
from lxml import etree, html
from collections import Counter
import time
import urllib2
import sys
import datetime
today = datetime.date.today()
dates = ['%d%02d'%(today.year, month) for month in range(1,today.month)]
baseurl = "http://mail-archives.apache.org/mod_mbox/"
maxTries = 5
def cleanName(t):
if t.endswith('(JIRA)'):
return t[:-7]
return t
def scanUrl(listname, date, n):
url = baseurl + listname +date+".mbox/date?"+str(n)
for i in range(maxTries):
try:
doc = html.parse(url)
r = doc.getroot()
subjects = map(lambda x:x[0].text, r.find_class('subject'))
authors = map(lambda x:cleanName(x.text), r.find_class('author'))
return subjects, authors
except IOError as e:
print 'Retry %d for url %s: exception %s' % (i, url, str(e))
time.sleep(0.1)
continue
except Exception as e:
print 'For url %s, exception %s' % (url, str(e))
break
return [], []
def participation(listname):
c = {}
for d in dates:
subjects = []
authors = []
n = 0
while True:
s, a = scanUrl(listname, d, n)
if s == []:
break
n += 1
subjects.extend(s)
authors.extend(a)
c[d] = Counter(authors)
return c
def counts(dateSeries):
return map(lambda itm: (itm[0], len(itm[1].items())), dateSeries.items())
def listindex():
doc = html.parse(baseurl)
r = doc.getroot()
nodes = r.cssselect('td ul li')
index = []
for n in nodes:
links = n.cssselect('ul li a[href]')
if links is None:
continue
cmt = filter(lambda elem: elem.text == "commits", links)
dev = filter(lambda elem: elem.text == "dev", links)
if len(cmt) > 0 and len(dev) > 0:
index.append({'commit': cmt[0].get('href'), 'dev': dev[0].get('href')})
return index
def processProject(linkPair):
dev = linkPair['dev']
discussParticipants = counts(participation(dev))
commitParticipants = counts(participation(linkPair['commit']))
devSorted = sorted(discussParticipants, key=lambda x:x[0])
commitSorted = sorted(commitParticipants, key=lambda x:x[0])
print "Participation (discuss " + dev + "):\n" + str(devSorted)
print "Participation (commit):\n" + str(commitSorted)
return {'dev': devSorted, 'commit': commitSorted }
if __name__ == "__main__":
sc = SparkContext(appName="ListCrawler")
if len(sys.argv) < 2:
print "Usage: %s [hdfs namenode]" % (sys.argv[0])
exit(1)
namenode_host = sys.argv[1]
links = listindex()
distLinks = sc.parallelize(links)
distLinks.map(processProject).saveAsTextFile('hdfs://' + namenode_host + '/links.txt')
sc.stop()
|
UTF-8
|
Python
| false | false | 2,014 |
10,436,770,553,794 |
7863e041b9aea07c96e4d217078b290a269959ee
|
fe82cf8480c5b555873c3bdbe125aa505f94e4d4
|
/src/purchases/views.py
|
1aee715da3c51bb345acec4f0b0741722b374d39
|
[] |
no_license
|
drBradley/shiny-ninja
|
https://github.com/drBradley/shiny-ninja
|
fc124fb361841a0ac1cde83ee23df4f62284bf40
|
0875d0df66575f9dbeb814be6fb7b8846cc1d7b8
|
refs/heads/master
| 2021-01-10T20:24:48.992429 | 2014-01-09T11:25:16 | 2014-01-09T11:25:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from decimal import Decimal
from django.core.context_processors import csrf
from django.db.models import Sum
from django.shortcuts import render_to_response, redirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from products.models import Product, Shop, Price, Currency
from purchases.models import Purchase, Benefit, Balance
@login_required
def new_purchase(request):
ctx = csrf(request)
ctx['products'] = Product.objects.all()
ctx['shops'] = Shop.objects.all()
return render_to_response(
'purchases/new_purchase_form.html',
ctx)
@login_required
def new_purchase_price(request):
ctx = csrf(request)
ctx['product'] = Product.objects.get(
id=request.POST['product_id'])
ctx['shop'] = Shop.objects.get(
id=request.POST['shop_id'])
ctx['price'] = ctx['product'].current_price(
ctx['shop'])
ctx['currencies'] = Currency.objects.all()
return render_to_response(
'purchases/new_purchase_price_form.html',
ctx)
@login_required
def handle_new_purchase(request):
product = Product.objects.get(
id=request.POST['product_id'])
shop = Shop.objects.get(
id=request.POST['shop_id'])
currency = Currency.objects.get(
id=request.POST['price_currency'])
# Qucik and dirty fix to accept commas in place of periods.
value = Decimal(
request.POST['price_value'].replace(
',', '.'))
amount = Decimal(
request.POST['purchased_amount'].replace(
',', '.'))
price = product.current_price(shop)
if ((not price) or price.currency.id != currency.id or
price.value != value):
product.change_current_price(
shop, value, currency)
price = product.current_price(shop)
purchase = Purchase.objects.create(
product_price=price,
payer=request.user,
amount=amount)
return redirect(show_purchase, purchase.id)
def show_purchase(request, purchase_id):
ctx = csrf(request)
purchase = ctx['purchase'] = Purchase.objects.get(
id=purchase_id)
ctx['users'] = User.objects.all()
ctx['benefits'] = ctx['purchase'].benefits()
share_sum = Benefit.objects.filter(
purchase=purchase).aggregate(Sum('share'))['share__sum']
for benefit in ctx['benefits']:
benefit.__dict__['template_share'] = (
benefit.share * purchase.amount / share_sum)
return render_to_response(
'purchases/show_purchase.html',
ctx)
def add_beneficiary(request, purchase_id):
purchase = Purchase.objects.get(
id=purchase_id)
if not request.user.id == purchase.payer.id:
response = render_to_response(
'purchases/not_payer.html',
{'purchase_id': purchase_id})
response.status_code = 401
return response
user = User.objects.get(
id=request.POST['beneficiary_id'])
share = request.POST['share']
purchase.add_benefit(user, share)
return redirect(show_purchase, purchase_id)
@login_required
def show_balances(request):
ctx = {}
user = ctx['me'] = request.user
own_balances = ctx['own_balances'] = (Balance.objects.
filter(first_user=user, second_user=user))
balances_me_first = ctx['balances_me_first'] = (Balance.objects.
filter(first_user=user).
exclude(second_user=user).
order_by('second_user'))
balances_me_second = ctx['balances_me_second'] = (Balance.objects.
filter(second_user=user).
exclude(first_user=user).
order_by('first_user'))
return render_to_response(
'purchases/my_balance.html',
ctx)
@login_required
def debts(request, obligor_id):
if request.method == 'GET':
return show_debts(request, obligor_id)
elif request.method == 'POST':
return settle_debts(request, obligor_id)
def show_debts(request, obligor_id):
ctx = csrf(request)
obligor = ctx["obligor"] = User.objects.get(id=obligor_id)
benefits = ctx["benefits"] = Benefit.objects.filter(
purchase__payer=request.user,
beneficiary=obligor,
paid_off=False)
return render_to_response(
"purchases/debts.html",
ctx)
def settle_debts(request, obligor_id):
obligor = User.objects.get(id=obligor_id)
settled_debts = Benefit.objects.filter(
id__in=request.POST.getlist('settled'))
for benefit in settled_debts:
benefit.purchase.settle_debt(benefit)
return redirect(debts, obligor_id)
@login_required
def list_purchases(request):
user = request.user
purchases = Purchase.objects.filter(payer=user).order_by('date', '-id')
return render_to_response(
'purchases/list_purchases.html',
{'purchases': purchases})
def delete_purchase(request, purchase_id):
purchase = Purchase.objects.get(
id=purchase_id)
if not request.user.id == purchase.payer.id:
response = render_to_response(
'purchases/not_payer.html',
{'purchase_id': purchase_id})
response.status_code = 401
return response
purchase.delete()
return redirect(list_purchases)
|
UTF-8
|
Python
| false | false | 2,014 |
18,193,481,476,901 |
be1b0c79dc823dc83af405d38f77581d5d5d9c30
|
25e6aceace81a2a29aebc8249c11d0f9c1fcafbf
|
/resourceprovider/routes/__init__.py
|
f99dada5520d74278ca9eccf2b534a3db55f7a8e
|
[] |
no_license
|
willholley/flask-azure
|
https://github.com/willholley/flask-azure
|
e9178f295ded252b4591978be96c3a57631bdfbb
|
fb35811ddc00ea0b7f0aad42a6a056f4bb57a820
|
refs/heads/master
| 2021-01-22T02:08:24.436127 | 2013-10-01T21:14:18 | 2013-10-01T21:14:18 | 13,322,133 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from resources import resources
from subscriptions import subscriptions
from cloudservices import cloudservices
from sso import sso
def add_routes_to(app):
for blueprint in [resources, subscriptions, cloudservices, sso]:
app.register_blueprint(blueprint)
|
UTF-8
|
Python
| false | false | 2,013 |
9,732,395,911,574 |
f17236593c0fca539f099355187f63de98d7f177
|
cf3d1f3bc0da10b4ee2a7cc46a045670800ff5e3
|
/src/WordLadder/solution.py
|
052ea5403cca544493decc294901d75dc8f15f5d
|
[
"Apache-2.0"
] |
permissive
|
54lihaoxin/leetcode_python
|
https://github.com/54lihaoxin/leetcode_python
|
b9c2ecf9e755168e89cbc92f41c29fce5751599c
|
435484369abd6a86bb16e6dbfe2eddb4ba9edbd2
|
refs/heads/master
| 2016-09-06T13:35:01.311426 | 2014-10-21T07:41:34 | 2014-10-21T07:41:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Word Ladder
#
# Given two words (start and end), and a dictionary, find the length of shortest transformation sequence from start to end, such that:
#
# Only one letter can be changed at a time
# Each intermediate word must exist in the dictionary
# For example,
#
# Given:
# start = "hit"
# end = "cog"
# dict = ["hot","dot","dog","lot","log"]
# As one shortest transformation is "hit" -> "hot" -> "dot" -> "dog" -> "cog",
# return its length 5.
#
# Note:
# Return 0 if there is no such transformation sequence.
# All words have the same length.
# All words contain only lowercase alphabetic characters.
debug = True
debug = False
# from CommonClasses import * # hxl: comment out this line for submission
class Solution:
# @param start, a string
# @param end, a string
# @param dict, a set of string
# @return an integer
def ladderLength(self, start, end, dict):
if len(start) == 1:
return 1
level = set([start])
visitedWords = set()
dict.add(end)
map = {} # hxl: K: word; V: set of neighbor words
self.buildMap(map, dict, visitedWords, set(), start)
count = 0
while len(level) != 0:
count += 1
print count
nextLevel = set([])
for curWord in level:
if curWord == end:
return count
elif curWord not in visitedWords:
visitedWords.add(curWord)
if curWord not in map:
self.buildMap(map, dict, visitedWords, level, curWord)
for nextWord in map[curWord]:
nextLevel.add(nextWord)
level = nextLevel
return 0
def buildMap(self, map, words, visitedWords, thisLevel, targetWord):
map[targetWord] = set()
for w in words:
if (w not in visitedWords
and w not in thisLevel
and self.areNeighbor(targetWord, w)):
map[targetWord].add(w)
# hxl: check whether two words are different with only one character
def areNeighbor(self, wordA, wordB):
diff = 0
i = 0
while i < len(wordA):
if wordA[i] != wordB[i]:
diff += 1
if diff > 2:
return False
i += 1
return diff == 1
|
UTF-8
|
Python
| false | false | 2,014 |
8,890,582,338,997 |
8804ca3a8edf4f4a626af716c22bff794c21efa0
|
d52aef31bbf4daacca3f8ff1cda5c5b3ca598268
|
/lino/modlib/polls/__init__.py
|
edbd386d869a313e5bd918433761ca8de824d6af
|
[
"BSD-2-Clause"
] |
permissive
|
ManuelWeidmann/lino
|
https://github.com/ManuelWeidmann/lino
|
d0ee5e3f55f742006afd502756655567aefb709b
|
7d97afeb6b98e205a02d6a44141d5520148ec59d
|
refs/heads/master
| 2020-12-31T03:55:23.030299 | 2014-10-21T10:29:00 | 2014-10-21T10:29:00 | 25,510,068 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2013 Luc Saffre
# License: BSD (see file COPYING for details)
"""
A polls app for Lino. This is the main app for :ref:`polly`.
"""
from lino import ad
from django.utils.translation import ugettext_lazy as _
class Plugin(ad.Plugin):
verbose_name = _("Polls")
|
UTF-8
|
Python
| false | false | 2,014 |
7,593,502,219,798 |
d24436dd663e78c74129cb0ab93fe6b8bcbe09eb
|
a31be834d3fe193194c6b773ab53a14e00a8655d
|
/test.py
|
5ccb1facaa1face6d4c3654b6cc50c3e6468366c
|
[] |
no_license
|
hust1994/mytest
|
https://github.com/hust1994/mytest
|
f666723b7ed833c12c0ee0a5af0e57a24c06fc53
|
4ae767fee8b69dfe2954f54f51142af3ee4d63b0
|
refs/heads/master
| 2016-09-05T16:48:19.337977 | 2014-09-19T02:52:29 | 2014-09-19T02:52:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import re
import subprocess
branch = subprocess.Popen("git branch", stdout=subprocess.PIPE, shell=True)
for line in branch.stdout:
if line.startswith('*'):
branch_name = line.split()[1]
print "<branch>: ", branch_name
break
origin = subprocess.Popen("git remote", stdout=subprocess.PIPE, shell=True)
origin_name = origin.stdout.read().strip()
print "<origin>: ", origin_name
command = "git push " + origin_name + " HEAD:refs/for/" + branch_name
print command
result = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
for line in result.stdout:
print line
|
UTF-8
|
Python
| false | false | 2,014 |
11,416,023,122,446 |
5917b1bbd7a8ae81b65c234254550b4d1738e793
|
66e0ba0267d1d8e13497275b0291aa3b452fb98d
|
/api/v1/files/views.py
|
a56acdafcc76ae8344504bc3a8f4a4430c6f8523
|
[] |
no_license
|
TorinAsakura/lgoapp
|
https://github.com/TorinAsakura/lgoapp
|
8a492fa597cdaab63096cf3032024ea92679b5e4
|
a09b3e06112e7d604eaa6ee5d36d279b102505af
|
refs/heads/master
| 2016-05-30T09:10:32.342083 | 2014-10-31T04:20:10 | 2014-10-31T04:20:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from rest_framework.response import Response
from rest_framework import generics
from rest_framework import status
from . import serializers
from files import models
class UserFileListView(generics.ListCreateAPIView):
queryset = models.UserFile.objects.all()
serializer_class = serializers.UserFileSerializer
class UserFileDetailView(generics.RetrieveUpdateDestroyAPIView):
queryset = models.UserFile.objects.all()
serializer_class = serializers.UserFileSerializer
def get(self, request, *args, **kwargs):
response = super(UserFileDetailView, self).get(request, *args, **kwargs)
if not True:
del response['content-type']
response['X-Accel-Redirect'] = self.object.content.url
else:
return Response(status=status.HTTP_401_UNAUTHORIZED)
return response
|
UTF-8
|
Python
| false | false | 2,014 |
790,273,987,481 |
fc58ccbfd0db46f35457ed75b34173b4c268fb2e
|
9f235b04ae25aa34f08232de11a1f2b924b7d959
|
/api/Models.py
|
c4e4ee5cae75fe366b9791ab65f5b9caa802fe66
|
[] |
no_license
|
a85/doublespeak
|
https://github.com/a85/doublespeak
|
b51c78a629104a19ad43b5944f442fd8ea3afab9
|
52ec8cbbd27783971f173ac4bb1da6c85f5d4070
|
refs/heads/master
| 2021-01-10T20:49:58.406558 | 2011-10-18T19:43:12 | 2011-10-18T19:43:12 | 2,302,749 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import db
class Link(db.Model):
url = db.LinkProperty(required=True)
title = db.StringProperty()
description = db.StringProperty()
provider_name = db.StringProperty()
thumbnail_url = db.LinkProperty()
created = db.DateProperty()
def __str__(self):
return self.url
class Topic(db.Model):
title = db.StringProperty()
speaker = db.StringProperty()
author = db.StringProperty()
topic_link = db.ReferenceProperty(Link, collection_name='topic')
parentTopic = db.SelfReferenceProperty()
created = db.DateProperty()
def __str__(self):
return self.title
class TopicMembership(db.Model):
topic = db.ReferenceProperty(Topic, collection_name='link_memberships')
link = db.ReferenceProperty(Link, collection_name='topic_memberships')
|
UTF-8
|
Python
| false | false | 2,011 |
6,356,551,623,931 |
74b234b6f63013c2d3995259fab0efa041cab621
|
b454b8c1eee9014f209e6ffb6e95a0d823c953f7
|
/ConfParser.py
|
39b97a0a0d5dd8ac800045d021b17dba1a9a4de2
|
[] |
no_license
|
ygliang2009/pysearch
|
https://github.com/ygliang2009/pysearch
|
fb4e4ee44fcccde6927bed74235c99059db20ff6
|
eab85e37c06673f71c6d980c1e0a99575d16c295
|
refs/heads/master
| 2021-01-19T05:28:51.499181 | 2013-04-10T09:55:41 | 2013-04-10T09:55:41 | 9,342,947 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/bin/python
#encoding=utf-8
#@author:[email protected]
#@version:1.0
#@date:2012-11-22
#@desc:ini格式的config文件解析器
from ConfigParser import ConfigParser
class confparser:
def __init__(self):
pass
@staticmethod
def confdictory(section,confname='conf/search.conf'):
confparser = ConfigParser()
confparser.read(confname)
confitems = confparser.items(section)
confdict = {}
for confitem in confitems:
confdict[confitem[0]] = confitem[1]
return confdict
if __name__ == '__main__':
confdict = confparser.confdictory('backinfo')
print confdict
|
UTF-8
|
Python
| false | false | 2,013 |
11,665,131,212,095 |
81df5b6ed70aef1f3674183060cd16424b788d0e
|
be99b597a5c0c7d2ace9e5008fdd80311a69724a
|
/ex33-for.py
|
277e34d461c252604e8d2a8b6570ae50534651fe
|
[] |
no_license
|
antiface/lpthw
|
https://github.com/antiface/lpthw
|
1e728321b50a9f0718eb181b3dd46751fa1feb64
|
220dd44d16fe9be21979a1385f66994b18eae6c8
|
refs/heads/master
| 2020-12-24T12:02:22.894767 | 2011-06-20T01:39:14 | 2011-06-20T01:39:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def count_numbers_while(end, inc):
i = 0
numbers = []
while i < end:
print "At the top i is %d" % i
numbers.append(i)
i = i + inc
print "Numbers now: ", numbers
print "At the bottom i is %d" % i
# note to self: it helps if you return
# something if you want to see something
# back in __main__
return numbers
def count_numbers_for(end,inc):
numbers = []
# range() takes a third argument
# which is the increment
# pydoc is your friend
for i in range (0, end, inc):
print "Adding %d to the list." % i
numbers.append(i)
return numbers
while_list = count_numbers_while(23, 2)
print "The numbers with while():"
for number in while_list:
print number
for_list = count_numbers_for(23, 2)
print "The numbers with for():"
for number in for_list:
print number
|
UTF-8
|
Python
| false | false | 2,011 |
17,832,704,230,481 |
6f913e2f8f590c9f7511b45e8e6e2e599793565e
|
79a023b00d09a18212bb3b9eb4ba82482bffdf6c
|
/squirrel/blog/models.py
|
f267ed51f7a77fd953d8eed6cd80f279add76120
|
[] |
no_license
|
shonenada/squirrel
|
https://github.com/shonenada/squirrel
|
eca07f04613b6aabb514bdbcdf00eb0243b1346f
|
3aa46c3192d3d271a641abb0a74fd76bb6868cf6
|
HEAD
| 2016-03-25T10:48:45.498124 | 2014-08-31T09:04:13 | 2014-08-31T09:04:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from datetime import datetime
from squirrel.exts import db
class Blog(db.Model):
BLOG_TYPE = ('rst', 'md', 'html', 'text')
__tablename__ = 'blog'
id = db.Column(db.Integer, primary_key=True)
type = db.Column(db.Enum(*BLOG_TYPE))
title = db.Column(db.String(128), nullable=False)
date = db.Column(db.DateTime, default=datetime.utcnow)
slug = db.Column(db.String(128))
author = db.Column(db.String(32))
summary = db.Column(db.String(256))
content = db.Column(db.Text)
is_deleted = db.Column(db.Boolean(), default=False)
|
UTF-8
|
Python
| false | false | 2,014 |
12,850,542,178,843 |
11cecc1841a54e55e74280f04adac6bc1fe9f17a
|
62e4fe9cc41d6183c3a86c4a367093c8ef1158bb
|
/ega/views.py
|
0117a4e3afdba28b87a53f957453d492069cdbf0
|
[] |
no_license
|
pombredanne/fenics
|
https://github.com/pombredanne/fenics
|
c75ed9a9d3527a5ab62b973cd23c6438a2ce9f17
|
ae5f09ff7b11540762fd4bf6b313d154d650ecd8
|
refs/heads/master
| 2017-05-25T05:26:46.312995 | 2014-08-22T15:01:57 | 2014-08-22T15:01:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from allauth.account.models import EmailAddress
from django.contrib import auth, messages
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.forms.models import modelformset_factory
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.template.response import TemplateResponse
from django.views.decorators.http import require_GET, require_http_methods
from ega.constants import (
DEFAULT_TOURNAMENT,
EXACTLY_MATCH_POINTS,
INVITE_BODY,
INVITE_LEAGUE,
INVITE_SUBJECT,
RANKING_TEAMS_PER_PAGE,
)
from ega.forms import (
EgaUserForm,
InviteFriendsForm,
LeagueForm,
PredictionForm,
)
from ega.models import (
EgaUser,
League,
LeagueMember,
Match,
Prediction,
Tournament,
)
def get_absolute_url(url):
return Site.objects.get_current().domain + url
def logout(request):
auth.logout(request)
messages.success(request, 'Cerraste sesión exitosamente!')
return HttpResponseRedirect(reverse('home'))
@login_required
def home(request):
tournament = get_object_or_404(
Tournament, slug=DEFAULT_TOURNAMENT, published=True)
matches = tournament.next_matches()
played = Prediction.objects.filter(user=request.user, match__in=matches,
home_goals__isnull=False,
away_goals__isnull=False )
pending = matches.count() - played.count()
matches = matches[:3]
for m in matches:
try:
m.user_prediction = played.get(match=m)
except Prediction.DoesNotExist:
m.user_prediction = None
top_ranking = tournament.ranking()[:7]
history = request.user.history(tournament)[:3]
stats = request.user.stats(tournament)
return render(request, 'ega/home.html',
{'tournament': tournament, 'top_ranking': top_ranking,
'pending': pending, 'matches': matches,
'history': history, 'stats': stats})
@require_http_methods(('GET', 'POST'))
@login_required
def profile(request):
if request.method == 'POST':
form = EgaUserForm(
instance=request.user, data=request.POST, files=request.FILES)
if form.is_valid():
form.save()
messages.success(
request, 'Perfil actualizado.')
return HttpResponseRedirect(reverse('profile'))
else:
form = EgaUserForm(instance=request.user)
return render(request, 'ega/profile.html', dict(form=form))
@require_http_methods(('GET', 'POST'))
@login_required
def invite_friends(request, league_slug=None):
kwargs = dict(key=request.user.invite_key)
league = None
if league_slug:
league = get_object_or_404(
League, tournament__slug=DEFAULT_TOURNAMENT, slug=league_slug)
if league.owner != request.user:
raise Http404
kwargs['league_slug'] = league.slug
invite_url = get_absolute_url(reverse('join', kwargs=kwargs))
if request.method == 'POST':
form = InviteFriendsForm(request.POST)
if form.is_valid():
emails = form.invite(sender=request.user)
if emails > 1:
msg = '%s amigos invitados!' % emails
else:
msg = '1 amigo invitado!'
messages.success(request, msg)
return HttpResponseRedirect(reverse('home'))
else:
subject = INVITE_SUBJECT
extra_text = ''
if league:
subject += ', jugando en mi liga de amigos %s' % league.name
extra_text = INVITE_LEAGUE % dict(league_name=league.name)
initial = dict(
subject=subject,
body=INVITE_BODY % dict(
extra_text=extra_text, url=invite_url,
inviter=request.user.visible_name()),
)
form = InviteFriendsForm(initial=initial)
return render(request, 'ega/invite.html',
dict(form=form, league=league, invite_url=invite_url))
@require_GET
@login_required
def friend_join(request, key, league_slug=None):
friend = get_object_or_404(EgaUser, invite_key=key)
if league_slug:
league = get_object_or_404(
League, tournament__slug=DEFAULT_TOURNAMENT, slug=league_slug)
member, created = LeagueMember.objects.get_or_create(
user=request.user, league=league)
if created:
messages.success(
request, 'Te uniste a el Ega, en la liga %s!' % league)
else:
messages.warning(request, 'Ya sos miembro de la liga %s.' % league)
else:
messages.success(request, 'Te uniste a el Ega!')
return HttpResponseRedirect(reverse('home'))
@require_http_methods(('GET', 'POST'))
@login_required
def leagues(request):
if request.method == 'POST':
form = LeagueForm(request.POST)
if form.is_valid():
league = form.save()
LeagueMember.objects.create(
user=request.user, league=league, is_owner=True)
return HttpResponseRedirect(
reverse('invite-league', kwargs=dict(league_slug=league.slug)))
else:
form = LeagueForm()
leagues = League.objects.current().filter(members=request.user)
return render(
request, 'ega/leagues.html', dict(leagues=leagues, form=form))
@require_GET
@login_required
def league_home(request, slug, league_slug):
tournament = get_object_or_404(Tournament, slug=slug, published=True)
league = get_object_or_404(
League, tournament=tournament, slug=league_slug, members=request.user)
top_ranking = league.ranking()[:5]
stats = request.user.stats(tournament)
return render(request, 'ega/league_home.html',
{'tournament': tournament, 'league': league,
'top_ranking': top_ranking, 'stats': stats})
@login_required
def next_matches(request, slug):
"""Return coming matches for the specified tournament."""
tournament = get_object_or_404(Tournament, slug=slug, published=True)
matches = tournament.next_matches()
for m in matches:
# create prediction for user if missing
Prediction.objects.get_or_create(
user=request.user, match=m, defaults={'starred': m.starred})
PredictionFormSet = modelformset_factory(
Prediction, form=PredictionForm, extra=0)
predictions = Prediction.objects.filter(
user=request.user, match__in=matches)
if request.method == 'POST':
formset = PredictionFormSet(request.POST)
if formset.is_valid():
formset.save()
messages.success(request, 'Pronósticos actualizados.')
expired_matches = [f.instance.match for f in formset if f.expired]
for m in expired_matches:
msg = "%s - %s: el partido expiró, pronóstico NO actualizado."
messages.error(request, msg % (m.home.name, m.away.name))
return HttpResponseRedirect(
reverse('ega-next-matches', args=[slug]))
else:
formset = PredictionFormSet(queryset=predictions)
return render(
request, 'ega/next_matches.html',
{'tournament': tournament, 'formset': formset})
@login_required
def match_details(request, slug, match_id):
"""Return specified match stats."""
tournament = get_object_or_404(Tournament, slug=slug, published=True)
match = get_object_or_404(Match, id=match_id, tournament=tournament)
exacts = Prediction.objects.none()
winners = Prediction.objects.none()
finished = match.home_goals is not None and match.away_goals is not None
if finished:
winners = Prediction.objects.filter(
match=match, score__gt=0, score__lt=EXACTLY_MATCH_POINTS)
exacts = Prediction.objects.filter(
match=match, score__gte=EXACTLY_MATCH_POINTS
).select_related('user')
return render(
request, 'ega/match_details.html',
{'tournament': tournament, 'match': match,
'finished': finished, 'exacts': exacts, 'winners': winners})
@login_required
def ranking(request, slug, league_slug=None, playoffs=False):
"""Return ranking and stats for the specified tournament."""
tournament = get_object_or_404(Tournament, slug=slug, published=True)
league = None
if league_slug is not None:
league = get_object_or_404(
League, tournament=tournament, slug=league_slug)
user = request.user
scores = (league.ranking(playoffs) if league
else tournament.ranking(playoffs))
try:
position = ([r['username'] for r in scores]).index(user.username)
position += 1
except ValueError:
position = None
paginator = Paginator(scores, RANKING_TEAMS_PER_PAGE)
page = request.GET.get('page')
try:
ranking = paginator.page(page)
except PageNotAnInteger:
ranking = paginator.page(1)
except EmptyPage:
ranking = paginator.page(paginator.num_pages)
stats = user.stats(tournament, playoffs)
return render(
request, 'ega/ranking.html',
{'tournament': tournament, 'league': league,
'ranking': ranking, 'user_position': position, 'stats': stats,
'playoffs': playoffs})
@login_required
def history(request, slug):
"""Return history for the specified tournament."""
tournament = get_object_or_404(Tournament, slug=slug, published=True)
user_history = request.user.history(tournament)
paginator = Paginator(user_history, RANKING_TEAMS_PER_PAGE)
page = request.GET.get('page')
try:
predictions = paginator.page(page)
except PageNotAnInteger:
predictions = paginator.page(1)
except EmptyPage:
predictions = paginator.page(paginator.num_pages)
stats = request.user.stats(tournament)
return render(
request, 'ega/history.html',
{'tournament': tournament, 'predictions': predictions, 'stats': stats})
@login_required
def verify_email(request, email):
email_address = get_object_or_404(
EmailAddress, user=request.user, email=email)
email_address.send_confirmation(request)
messages.success(request, 'Email de verificación enviado a %s' % email)
return HttpResponseRedirect(reverse('profile'))
|
UTF-8
|
Python
| false | false | 2,014 |
5,257,040,020,343 |
d368590b17ce0a4892c226be00d38e17dc6a10b2
|
8981417ce72c254d6769cadd1f8df0d68bd44c66
|
/ckanext/saml2/config/sp_config.py
|
1cbdc212e038c31d2d0b48c30f5f12039dc25e5a
|
[] |
no_license
|
kapucko/ckanext-saml2
|
https://github.com/kapucko/ckanext-saml2
|
95b28f7d9308624a43b811092965097bdda89c06
|
de029cb5a91bf71510b67416c2ba1d4da232e35a
|
refs/heads/master
| 2021-01-19T06:58:35.872331 | 2014-12-23T14:20:55 | 2014-12-23T14:20:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os.path
from saml2 import BINDING_HTTP_REDIRECT
from saml2.saml import NAME_FORMAT_BASIC
try:
from saml2.sigver import get_xmlsec_binary
except ImportError:
get_xmlsec_binary = None
if get_xmlsec_binary:
xmlsec_path = get_xmlsec_binary(["/opt/local/bin"])
else:
xmlsec_path = '/usr/bin/xmlsec1'
#BASE = 'http://edem.microcomp.sk'
#entityid' : 'http://edem.microcomp.sk',
BASE = 'http://edem.microcomp.sk'
CONFIG_PATH = os.path.dirname(__file__)
USER_MAPPING = {
'email': 'mail',
'fullname': 'field_display_name',
}
#'idp': ['urn:mace:umu.se:saml:ckan:idp'],
CONFIG = {
'entityid' : 'http://edem.microcomp.sk',
'description': 'CKAN saml2 auth',
'service': {
'sp': {
'name' : 'CKAN SP',
'endpoints': {
'assertion_consumer_service': [BASE],
'single_logout_service' : [(BASE + '/slo',
BINDING_HTTP_REDIRECT)],
},
'required_attributes': [
# 'sn',
'uid',
# 'name',
# 'mail',
# 'status',
# 'roles',
# 'field_display_name',
# 'realname',
# 'groups',
# 'givenname',
# 'surname',
# 'edupersonaffiliation',
],
'optional_attributes': [],
"authn_assertions_signed": "true",
"authn_requests_signed" : "true",
"want_assertions_signed": "true",
"logout_requests_signed": "true",
}
},
'debug': 1,
'key_file': CONFIG_PATH + '/pki/mod_key.pem',
'cert_file': CONFIG_PATH + '/pki/mod_cert.pem',
'attribute_map_dir': CONFIG_PATH + '/../attributemaps',
'metadata': {
'local': [CONFIG_PATH + '/idp.xml'],
},
# -- below used by make_metadata --
# 'organization': {
# 'name': 'Exempel AB',
# 'display_name': [('Exempel AB','se'),('Example Co.','en')],
# 'url':'http://www.example.com/ckan',
# },
# 'contact_person': [{
# 'given_name':'John',
# 'sur_name': 'Smith',
# 'email_address': ['[email protected]'],
# 'contact_type': 'technical',
# },
# ],
'name_form': NAME_FORMAT_BASIC,
"xmlsec_binary": '/usr/bin/xmlsec1',
'logger': {
'rotating': {
'filename': 'sp.log',
'maxBytes': 100000,
'backupCount': 5,
},
'loglevel': 'debug',
}
}
|
UTF-8
|
Python
| false | false | 2,014 |
18,004,502,940,455 |
e3186f817794f99b28da0af85b7e697c60047444
|
96c26e13a7d0d006494de96b001ae5ecc9c11e64
|
/urls.py
|
286efad2ebb1c0895bc5c37ec004c8f847b5e2b2
|
[] |
no_license
|
rizafahmi/irocservices
|
https://github.com/rizafahmi/irocservices
|
e655b3680cb0602f84144245ddf55fbca8bcf11a
|
c0e8508e8827229237b726ff845268bd270eca70
|
refs/heads/master
| 2021-01-01T19:11:23.899264 | 2011-12-09T10:52:20 | 2011-12-09T10:52:20 | 2,938,482 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import patterns, include, url
from piston.resource import Resource
from irocservices.handlers import CoriHandler, CoriCountHandler
cori_resources = Resource(handler=CoriHandler)
cori_count_resources = Resource(handler=CoriCountHandler)
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^api/', include('irocservices.cori.urls'), name='api_home'),
# Get Comments
url(r'^apis/(?P<id_table>[^/]+)/(?P<id_news>[^/]+)/$', cori_resources, { 'emitter_format': 'xml' }),
url(r'^apis/(?P<id_table>[^/]+)/(?P<id_news>[^/]+)/(?P<emitter_format>.+)$', cori_resources),
# Count Comments
url(r'^capis/(?P<id_table>[^/]+)/(?P<id_news>[^/]+)/$', cori_count_resources, { 'emitter_format': 'xml' }),
url(r'^capis/(?P<id_table>[^/]+)/(?P<id_news>[^/]+)/(?P<emitter_format>.+)$', cori_count_resources),
)
|
UTF-8
|
Python
| false | false | 2,011 |
3,152,506,000,659 |
35992221628d545ae869d679e781117816a38f93
|
7ec2bba22c0b52b569cae8e11ba46d3ad4d6cca4
|
/urls.py
|
35bc3e73ded580176c27394b7970b9993a570e8b
|
[] |
no_license
|
patternleaf/atlas
|
https://github.com/patternleaf/atlas
|
7698d042794db03d352bbbe1d31d09806ed5c858
|
e991100231f42c718d9b4d4c5553440d9f489149
|
refs/heads/master
| 2021-01-18T09:07:34.804955 | 2013-04-12T14:51:38 | 2013-04-12T14:51:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import handler500, patterns, include, url
from django.contrib import admin
from django.conf import settings
from django.views.decorators.cache import cache_page
from tastypie.api import Api
from storybase.api import CreativeCommonsLicenseGetProxyView
from storybase.views import JSErrorHandlerView
from storybase_asset.urls import urlpatterns as asset_urlpatterns
from storybase_user.urls import urlpatterns as user_urlpatterns
from storybase_story.urls import urlpatterns as story_urlpatterns
from storybase_asset.api import AssetResource, DataSetResource
from storybase_geo.api import (GeocoderResource, GeoLevelResource,
LocationResource, PlaceResource)
from storybase_help.api import (HelpResource)
from storybase_story.api import StoryResource
from storybase_taxonomy.api import TagResource
# Override default error handler with one that uses RequestContext
handler500 = 'storybase.views.defaults.server_error'
admin.autodiscover()
urlpatterns = patterns('')
# Set up Tastypie API resources
v0_1_api = Api(api_name='0.1')
v0_1_api.register(AssetResource())
v0_1_api.register(DataSetResource())
v0_1_api.register(StoryResource())
v0_1_api.register(GeocoderResource())
v0_1_api.register(GeoLevelResource())
v0_1_api.register(LocationResource())
v0_1_api.register(PlaceResource())
v0_1_api.register(HelpResource())
v0_1_api.register(TagResource())
urlpatterns += patterns('',
# REST API
(r'^api/', include(v0_1_api.urls)),
# Proxy for Creative Commons endpoint
# Cache responses for 24 hours
url(r"^api/%s/license/get/" % v0_1_api.api_name,
cache_page(CreativeCommonsLicenseGetProxyView.as_view(), 60 * 60 * 24),
name="api_cc_license_get"),
)
# Include storybase_user URL patterns
# Use this pattern instead of include since we want to put the URLs
# at the top-level
urlpatterns += user_urlpatterns + story_urlpatterns + asset_urlpatterns
urlpatterns += patterns('',
# Examples:
# url(r'^$', 'atlas.views.home', name='home'),
# url(r'^atlas/', include('atlas.foo.urls')),
# StoryBase account management
# This needs to come before the admin URLs in order to use
# the custom login form
(r'^accounts/', include('storybase_user.account_urls')),
(r'^messaging/', include('storybase_messaging.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
#url(r'^admin/lookups/', include(ajax_select_urls)),
url(r'^admin/', include(admin.site.urls)),
# Make translations available in JavaScript
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', {}),
# JS errors
url(r'^errors/', JSErrorHandlerView.as_view(), name="js_error_log"),
# Comments
(r'^comments/', include('django.contrib.comments.urls')),
# Search via Haystack
(r'^search/', include('search_urls')),
# 3rd-party apps
(r'^tinymce/', include('tinymce.urls')),
(r'^accounts/', include('storybase_user.registration.backends.extrainfo.urls')),
(r'^accounts/', include('social_auth.urls')),
(r'^notices/', include('notification.urls')),
# django CMS URLs
url(r'^', include('cms.urls')),
)
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'', include('django.contrib.staticfiles.urls')),
) + urlpatterns
|
UTF-8
|
Python
| false | false | 2,013 |
3,427,383,906,049 |
86295b41dcdc032514c1887c778f4231f5f1a020
|
ad646d037f1591d22f7556c284451fd004da9ca4
|
/focus/environment/__init__.py
|
a2ca4aa5227e5b5b2fe0910127ce5cdbf2f9e268
|
[
"MIT"
] |
permissive
|
techdragon/focus
|
https://github.com/techdragon/focus
|
e85b5d326b94d7bebaf4ace1a01c7b228107f7d2
|
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
|
refs/heads/master
| 2021-01-16T17:57:54.624191 | 2012-11-23T04:50:57 | 2012-11-23T04:50:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" This package encapsulates the runtime environment, including pathing, the
current ``Task`` and ``IOStream`` instances, and command-line and daemon
interfaces to drive the system.
"""
import os
import re
import sys
from focus import errors
from focus.task import Task
from focus.environment.cli import CLI
from focus.environment.io import IOStream
__all__ = ('Environment', 'CLI', 'IOStream')
_RE_PY_EXT = re.compile(r'\.py[co]?$')
_RE_INIT_PY = re.compile(r'__init__\.py[co]?$')
def _import_modules(dir_path):
""" Attempts to import modules in the specified directory path.
`dir_path`
Base directory path to attempt to import modules.
"""
def _import_module(module):
""" Imports the specified module.
"""
# already loaded, skip
if module in mods_loaded:
return False
__import__(module)
mods_loaded.append(module)
mods_loaded = []
# check if provided path exists
if not os.path.isdir(dir_path):
return
try:
# update import search path
sys.path.insert(0, dir_path)
# check for modules in the dir path
for entry in os.listdir(dir_path):
path = os.path.join(dir_path, entry)
if os.path.isdir(path): # directory
_import_module(entry)
elif _RE_PY_EXT.search(entry): # python file
if not _RE_INIT_PY.match(entry): # exclude init
name = _RE_PY_EXT.sub('', entry)
_import_module(name)
finally:
# remove inserted path
sys.path.pop(0)
class Environment(object):
""" Basic container for a runtime environment.
"""
DEF_DATA_DIR = '~/.focus' # default path for data dir
DATA_SUBDIRS = ('tasks', 'plugins') # subdirectories within data dir
def __init__(self, **kwargs):
""" Initializes environment.
`args`
List of environment arguments. Default: ``None``
`io`
IO object for data streams. Default: ``None``
`data_dir`
Home directory for focus user data. Default: ~/.focus or value
defined in $FOCUS_HOME env var.
`task`
``Task`` instance. Default: ``None``
"""
# argument list
self._args = kwargs.get('args', list())
# io stream
self._io = kwargs.get('io', IOStream())
# path to focus user data directory (for config files, plugins, etc.)
# first: check provided arg, then: $FOCUS_HOME env variable,
# finally: use default user homedir path
self._data_dir = (
kwargs.get('data_dir') or os.environ.get('FOCUS_HOME') or
os.path.expanduser(self.DEF_DATA_DIR) # ~/.focus
)
self._data_dir = os.path.realpath(self._data_dir)
self._task = kwargs.get('task')
self._loaded = False
def _setup_directories(self):
""" Creates data directory structure.
* Raises a ``DirectorySetupFail`` exception if error occurs
while creating directories.
"""
dirs = [self._data_dir]
dirs += [os.path.join(self._data_dir, name) for name
in self.DATA_SUBDIRS]
for path in dirs:
if not os.path.isdir(path):
try:
os.makedirs(path) # recursive mkdir
os.chmod(path, 0755) # rwxr-xr-x
except OSError:
raise errors.DirectorySetupFail()
return True
def _setup_task(self, load):
""" Sets up the ``Task`` object and loads active file for task.
`load`
Set to ``True`` to load task after setup.
"""
if not self._task:
self._task = Task(self._data_dir)
if load:
self._task.load()
def _load_plugins(self):
""" Attempts to load plugin modules according to the order of available
plugin directories.
"""
# import base plugin modules
try:
__import__('focus.plugin.modules')
#import focus.plugin.modules
except ImportError as exc:
raise errors.PluginImport(unicode(exc))
# load user defined plugin modules
try:
user_plugin_dir = os.path.join(self._data_dir, 'plugins')
_import_modules(user_plugin_dir)
except Exception as exc:
raise errors.UserPluginImport(unicode(exc))
def load(self):
""" Loads in resources needed for this environment, including loading a
new or existing task, establishing directory structures, and
importing plugin modules.
"""
self._setup_directories()
self._load_plugins()
self._setup_task(load=True)
self._loaded = True
@property
def loaded(self):
""" Returns if environment is loaded.
"""
return self._loaded
@property
def args(self):
""" Returns original arguments passed into environment.
"""
return self._args
@property
def io(self):
""" Returns ``IO`` object for environment.
"""
return self._io
@property
def data_dir(self):
""" Returns data directory path.
"""
return self._data_dir
@property
def task(self):
""" Returns task associated with environment.
"""
return self._task
|
UTF-8
|
Python
| false | false | 2,012 |
18,184,891,558,396 |
05cfb9bba58443a750035630cfe5c50d923a349c
|
c349edef90d513814887434856f796063bb6a750
|
/portal.py
|
e86dcf8828286d4c3a8a205b33fbc14fb8e97087
|
[] |
no_license
|
whatwewant/portal
|
https://github.com/whatwewant/portal
|
4c44c0cc9c630df56b90b2f2fda90abc3f85dcb7
|
014066bb0492281f313f5aa6815e38deedc4f922
|
refs/heads/master
| 2016-08-05T18:33:49.603999 | 2014-10-20T02:13:42 | 2014-10-20T02:13:42 | 23,379,660 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import requests
import time
from base64 import b64encode
from bs4 import BeautifulSoup
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
userName = ""
userPwd = ""
serviceType = ""
isSavePwd = "on"
isQuickAuth = "false"
language = "English"
browserFinalUrl = ""
userip = "null"
cookie_hello1 = userName # document.getElementById("is_userName").value
cookie_hello2 = False # document.getElementById("id_isSavePwd").checked
cookie_hello3 = "" # encrypt(document.getElementById("id_userPwd").value)
cookie_hello4 = "" # document.getElementById("id_serviceType").value
def encrypt(password):
ret = ""
str = password
for each in str:
ret += unichr(ord(each)^0xff)
return ret
class Portal(object):
HOST = '172.20.1.1'
ROOT_URL = r'http://172.20.1.1/portal/'
INDEX_URL = ROOT_URL + 'index_default.jsp'
LOGIN_URL = ROOT_URL + 'login.jsp'
LOGOUT_URL = ROOT_URL + 'logout.jsp'
ONLINE_URL = ROOT_URL + 'online.jsp'
ONLINE_HEARTBEAT_URL = ROOT_URL + 'online_heartBeat.jsp'
ONLINE_SHOWTIMER_URL = ROOT_URL + 'online_showTimer.jsp'
ONLINE_FUNCTIONBUTTON_URL = ROOT_URL + 'online_funcButton.jsp'
TIME_OUT = 24000
def __init__(self):
self.__username = ''
self.__password = ''
self.__password_encrypt = ''
self.__req = requests.Session()
self.__req.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0'
# self.get_index_info()
self.__ip = 'null'
self.__login_success_html = ''
# record whether is logined
self.__is_logined = False
#
self.__heartBeat_data = ''
self.__showTimer_data = ''
self.__funcButton_data = ''
self.__time_out = 24000 # 掉线时间
# cookie
self.__cookies = ''
# headers
self.__headers = dict()
def get_ip(self):
from socket import socket
from socket import AF_INET, SOCK_DGRAM
s = socket(AF_INET, SOCK_DGRAM)
s.connect((Portal.HOST, 0))
return s.getsockname()[0]
def checkLogin(self):
pass
def logout(self):
'''
whatever it had logined, logout
2 results:
1. User is offline.
2. Logged out successfully.
'''
source_html = self.__req.get(Portal.LOGOUT_URL).content
beautiful_html = BeautifulSoup(source_html)
message = beautiful_html.find('div').text.strip()
self.__is_logined = False
print message
return message
def get_index_info(self):
index_html = self.__req.get(Portal.INDEX_URL)
#print index_html.cookies
index_beautiful = BeautifulSoup(index_html.content)
info = index_beautiful.findAll('input')
return info
def add_cookie_to_headers(self):
JSESSIONID = self.__req.cookies.get_dict()['JSESSIONID']
#cookie = 'hello1={0}; hello2=true; hello3={1}; hello4=; JSESSIONID={2}'.format(self.__username, self.__password_encrypt, JSESSIONID)
cookie = 'hello1={0}; hello2=false; hello3=; hello4=; JSESSIONID={1}'.format(self.__username, JSESSIONID)
self.__req.headers['Referer'] = 'http://172.20.1.1/portal/index_default.jsp'
self.__req.headers['Cookie'] = cookie
self.__req.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0'
self.__req.headers['Connection'] = 'keep-alive'
self.__req.headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
cookies = {
'hello1': self.__username,
'hello2': 'false',
'hello3': '',
'hello4': '',
'JSESSIONID': JSESSIONID
}
self.__cookies = cookies
def online_post_after_login(self):
'''must after login successfully.'''
html = requests.post(Portal.ONLINE_URL, data=self.__heartBeat_data)
return html
def login(self, username, password):
'''
Before login, you had better logout.
Fortunately, we will do logout action instead of you self.
'''
self.__username = username
self.__password = password
self.__password_encrypt = encrypt(password)
# whatever it had logined, logout
self.logout()
self.get_index_info()
self.add_cookie_to_headers()
password = b64encode(password)
if self.__ip == 'null':
self.__ip = self.get_ip()
if self.__ip == '127.0.0.1' or self.__ip == '0.0.0.0':
self.__ip = 'null'
payload = {
'userName':username,
'userPwd':password,
'serviceType':serviceType,
'isSavePwd':isSavePwd,
'isQuickAuth':isQuickAuth,
'language':language,
'browserFinalUrl':browserFinalUrl,
'userip':self.__ip
}
try :
# html = self.__req.post(Portal.LOGIN_URL, params=payload, cookies=self.__cookies)
self.__req.get(Portal.INDEX_URL)
html = self.__req.post(Portal.LOGIN_URL, params=payload)
#print 'in login: ', self.__req.cookies.get_dict()
#print 'in login: ', self.__req.headers
except requests.exceptions.ConnectionError:
print "未接入网络,请检查网线是否接好或无线是否接入"
exit
beautiful_html = BeautifulSoup(html.content)
login_message = beautiful_html.find('div').text.strip()
if 'Logged in successfully.' == login_message:
self.__is_logined = True
self.__login_success_html = html.content
# 获取信息
self.get_login_info()
# 到online界面
print 'Logged in successfully.'
#
self.online_post_after_login()
else:
print 'Logged in failed.\n' + login_message
exit
# keep login success html
return html
# with open('t.html', 'w+') as t:
# t.write(html.content)
def re_login_by_time_out(self, username, password):
'''this may be a little silly, had better not do it'''
while True:
self.login(username, password)
time.sleep(self.__time_out * 3)
def get_login_info(self):
#
# if not self.__is_logined:
# username = raw_input('Username: ')
# password = raw_input('Password: ')
# self.login(username, password)
# time.sleep(3)
html = self.__login_success_html
beautiful_html = BeautifulSoup(html)
info = beautiful_html.findAll('input')
info_list = list()
for each in info:
tmp = dict()
tmp[each.attrs['name']] = each.attrs['value']
info_list.append(tmp)
info_dict = {
'language': info_list[0]['language'],
'heartbeatCyc': info_list[1]['heartbeatCyc'],
'heartBeatTimeoutMaxTime': info_list[2]['heartBeatTimeoutMaxTime'],
'userDevPort': info_list[3]['userDevPort'],
'userStatus': info_list[4]['userStatus'],
'userip': self.__ip, #info_list[5]['userip'],
'serialNo': info_list[6]['serialNo']
}
# keep heartbeatCyc
startTime = str(int(float(time.time()) * 1000))# [:13]
self.__time_out = int(info_list[1]['heartbeatCyc']) / 1000
self.__heartBeat_data = info_dict
self.__showTimer_data = {
'language': info_dict['language'],
'startTime': startTime
}
self.__funcButton_data = {
'language':info_dict['language'],
'userip':info_dict['userip'],
'serialNo':info_dict['serialNo']
}
return info_dict
def do_heart_beat(self):
if not self.__is_logined:
username = raw_input('Username: ')
password = raw_input('Password: ')
self.login(username, password)
time.sleep(3)
#if self._heart_beat_data == '':
# self.get_login_info()
while True:
time.sleep(self.__time_out)
self.online_heartBeat()
# self.online_showTimer()
# self.online_funcButton()
#print 'heart beat'
# time.sleep(3)
# self.logout()
# time.sleep(3)
# html = self.__req.post(Portal.HEART_BEAT_URL, params=self._heart_beat_data)
# html = requests.post(Portal.HEART_BEAT_URL, params=self._heart_beat_data)
# print html.url
# return html
def online_heartBeat(self):
#print self.__req.cookies.get_dict()
#print self.__req.headers
html = self.__req.post(Portal.ONLINE_HEARTBEAT_URL, data=self.__heartBeat_data)
# print self.__req.url
#print html.url
#print BeautifulSoup(html.content)
return html
def online_showTimer(self):
self.__showTimer_data['startTime'] = str(int(float(time.time()) * 1000))
html = self.__req.get(Portal.ONLINE_SHOWTIMER_URL, params=self.__showTimer_data)
return html
def online_funcButton(self):
html = self.__req.get(Portal.ONLINE_FUNCTIONBUTTON_URL, params=self.__funcButton_data)
return html
if __name__ == '__main__':
import os
username = None
password = None
if not os.path.isfile('/usr/bin/userinfo.txt'):
print "First Use This Script!!!"
username = raw_input("UserName: ")
password = raw_input("PassWord: ")
if not os.path.exists('/usr/bin/userinfo.txt'):
os.system('sudo touch /usr/bin/userinfo.txt && sudo chmod 766 /usr/bin/userinfo.txt')
with open('/usr/bin/userinfo.txt', 'w') as uif:
uif.write(username+'\n'+password+'\n')
else:
print "Make sure you UserName & PassWord in file `userinfo.txt` is Right"
with open('/usr/bin/userinfo.txt', 'r') as uif:
username = uif.readline().strip()
password = uif.readline().strip()
if username == "" or password == "":
print "Username or Password cannot be empty."
print "Please Check Your Input or File `userinfo.txt` "
username = raw_input("UserName: ")
password = raw_input("PassWord: ")
with open('/usr/bin/userinfo.txt', 'w') as uif:
uif.write(username+'\n'+password+'\n')
exit()
client = Portal()
client.re_login_by_time_out(username, password)
#client.do_heart_beat()
|
UTF-8
|
Python
| false | false | 2,014 |
3,745,211,531,456 |
2627831339370cb5a1f787e4d300e8ce9948a157
|
4cc7bce721b096d5e7ba6ba02cdddf4bc87a3f8e
|
/Converters/ConvertGridtoWordLists.py
|
8f2a4d89aa75274c27a6880762d56095e480b6a6
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
non_permissive
|
willwade/AAC-Tools
|
https://github.com/willwade/AAC-Tools
|
dc04416a399b2b1af080d24b593c44ebf65af70d
|
6c8702efb03afb7e30c4d8594f59e1c2388e6ad4
|
refs/heads/master
| 2021-01-11T04:06:46.002079 | 2014-09-10T16:25:56 | 2014-09-10T16:25:56 | 4,106,441 | 1 | 0 | null | false | 2014-07-13T11:28:17 | 2012-04-22T19:24:31 | 2014-06-02T02:42:26 | 2014-07-13T11:28:17 | 1,872 | 1 | 0 | 0 |
Python
| null | null |
#!/usr/bin/env python
# -*- coding: cp1252 -*-
# Contact: Will Wade <[email protected]>
# Date: Jan 2012
# -*- coding: iso-8859-15 -*-
# Utils
import sys
sys.path.append('../utils')
""" Grid to Wordlist.
- Export Grids as CSV files for analysis. Seperate files or One file. set location of where
- Export grids as wordlist files - pass flag to rewrite all the grids as wordlist Grids
(NB Pass the -excludecommon and -excludewords to exclude common and your own wordlists to convert. Useful for template pages)
"""
import pdb
import os.path, errno, re
from lxml import etree
import getopt, sys, csv
from unicodecsv import UnicodeWriter
def make_dirs(path):
if not os.path.exists(path):
os.makedirs(path)
return path
def filetolist(file):
text_file = open(file, "r")
s = text_file.read()
l = re.split('\n',s)
l=filter(None, l)
return l
def parse_grids(gridxml='grid.xml',outputpath='.',userdir='.',
excludehidden=False,outputwordlists=True,
ignoregrids=[],ignorecells=[], blackliststyles=[],singlefile=False, outputcsv=False):
# gridxml,outputpath,userdir,excludehidden,outputwordlists, ignoregrids, ignorecells, singlefile, outputcsv
'''
Parse Grid.xml files recursively. Extract Vocabulary and store it out as CSV files and/or as Wordlist files
'''
# Get gridsetname
for r,d,f in os.walk(userdir): # Parse directories to find Grids directory
#pdb.set_trace()
if "Grids" in d:
gridsetname=os.path.split(os.path.normpath(r))[1]
# outputing to Grid folders or other output folder?
# Check to see if output directory specified, if not output to the Grid directories.
if (outputpath == '.'):
outinplace = True
else:
outputpath = outputpath + '/'
outinplace=False
# outputing to single file?
if(singlefile):
if(outputwordlists):
file_out = open(outputpath + gridsetname +'.xml', 'wb')
wordlist = etree.Element("wordlist")
if(outputcsv):
vocabWriter = UnicodeWriter(open(outputpath + 'vocab.csv', 'wb'), delimiter=',', quotechar='"')
for r,d,f in os.walk(userdir): # Parse any directory, only picking up on grid.xml files.
page = os.path.split(r)[1]
if page not in ignoregrids:
for files in f:
if files.endswith("grid.xml"):
pth = os.path.join(r,files)
if (outinplace): # Check to see if output directory specified, if not output to the Grid directories.
outputpath = r + '/'
parser = etree.XMLParser(strip_cdata=False)
tree = etree.parse(pth, parser) # Parse the file
if(tree.xpath(".//licencekey") == []): # does it have a licencekey? Bugger if it has
readpictures = True
else:
readpictures = False # So this grid is licenced. Dont try and read the pictures
cells = tree.xpath(".//cell")
if(singlefile == False):
if(outputwordlists):
wordlist = etree.Element("wordlist")
if (outputcsv):
vocabWriter = UnicodeWriter(open(outputpath + page + '.csv', 'wb'), delimiter=',', quotechar='"')
# Add in data from any existing wordlists!
wordlistpath = os.path.dirname(pth) + "\wordlist.xml"
if os.path.isfile(wordlistpath): # wordlist exists for this grid. Need to add the wordlist data to the uber wordlist.
wordlistwordlist = etree.parse(wordlistpath)
root = wordlistwordlist.getroot()
for wordx in root.iterfind("word"): # MORE EFFICIENT METHOD???
if outputwordlists:
wordlist.append(wordx) # HOW TO MAKE IT CDATA?
if outputcsv:
vocabWriter.writerow([pth,"wordlist","wordlist",str(wordx.findtext("wordtext")),str(wordx.findtext("picturefile"))])
for cell in cells:
tt = ''.join(cell.xpath("(.//caption)/text()"))
style = ''.join(cell.xpath(".//stylepreset/text()"))
command_id = cell.xpath(".//id/text()") # Check the /Paramter/ID value to check if 'type' - i.e. being sent to the text bar.
## NOT PERFECT - need to grab text sent to text bar rahter than caption...
if "type" in command_id or "speaknow" in command_id: # We are only interested if text is being sent to the text bar or being spoken directly.
#if tt != '': # UNCOMMENT TO INCLUDE ALL CELLS WITH A CAPTION.
if style not in blackliststyles:
# Implement white list too?
if tt not in ignorecells:
if ''.join(cell.xpath(".//hidden/text()")) != '1':
if(outputwordlists):
word = etree.SubElement(wordlist, "word")
cellchildren = cell.getchildren()
vocabtext = picture = ''
for cellchild in cellchildren:
# Check if the cell has a type of speak command and if so save the text(s).
commands = cellchild.getchildren()
for command in commands:
id = command.find("id")
if id is not None:
if id.text == "type" or "speaknow":
parameters = command.findall("parameter")
for parameter in parameters:
if "1" in parameter.xpath(".//@index"):
vocabtext = parameter.text.strip() # Grid seems to add Asquiggle charchters to the text if there is a space in the text output. Luckily python strip ditches them!
if(outputwordlists):
wordtext = etree.SubElement(word, "wordtext")
wordtext.text = etree.CDATA(vocabtext)
# Check if the cell has a picture (symbol) and if so save the picture path.
## Potential for blank words, if cell has symbol, but no text. What to do about this???
picture = ''.join(cell.xpath(".//picture/text()"))
if ((readpictures==True) and (picture != [])):
if(outputwordlists):
picturefile = etree.SubElement(word, "picturefile")
picturefile.text = picture
if (outputcsv):
vocabWriter.writerow([pth,cell.get('x'),cell.get('y'),vocabtext,picture])
if(singlefile == False):
if(outinplace):
if(outputwordlists):
# Writing multiple files to Grid folders
file_out = open( outputpath + 'wordlist.xml', 'wb')
file_out.write('<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(wordlist, pretty_print=True, encoding='utf-8'))
else:
if(outputwordlists):
# writing multiple files to output folder (make a folder for the grids, name them by the page).
try:
os.mkdir(outputpath + '/'+ gridsetname)
except OSError, e:
if e.errno != errno.EEXIST:
raise
file_out = open(outputpath + '/' + gridsetname + '/' + page +'.xml', 'wb')
file_out.write('<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(wordlist, pretty_print=True, encoding='utf-8'))
# Write out to a single file after itterating the loop
if(singlefile == True):
if(outputwordlists):
file_out.write('<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(wordlist, pretty_print=True, encoding='utf-8'))
def usage():
print """
This program takes a Grid 2 User folder and spits
out seperate CSV files full of the vocab in the grids
Flags:
-h, --help - This screen
-v - Verbose (debug)
-o, --output - File path of where you would like the csv/wordlist files.
Set to SAME to be same directory of grid.xml files (default)
-u, --userdir= - File path of the user Folder you want to analyse
-c, --ignorecells= - Exclude cells listed from a text file (e.g, back, jump)
-g, --ignoregrids= - Exclude grids listed from a text file (e.g, home, dogs)
-b, --blackliststyles - Exclude styles listed from a text file (e.g. colours, jumpcells)
-x, --excludehidden - Exclude hidden cells from the analysis
-w, --wordlists - Output wordlists
-s, --singlefile - single file wordlist output into one file? Otherwise, will write to seperate files (in the name of the grid)
Example Usage:
ConvertGridtoWordLists.py --userdir="Path\To\Your\Grid2\User\Folder" --output="Path\To\Dump\Output" -w
Requirements:
Python 2.3, Lxml, unicodecsv
Author:
Will Wade, [email protected]
"""
def main():
gridxml='grid.xml'
outputpath ='.'
userdir='.'
excludehidden=False
outputwordlists=False
ignoregrids=[]
ignorecells=[]
blackliststyles=[]
singlefile=False
outputcsv=False
# rewritegrids=False
try:
opts, args = getopt.getopt(sys.argv[1:], "houcgbxwsdv", ["help", "output=", "userdir=","ignorecells=","ignoregrids=", "blackliststyles=","excludehidden","wordlists", "singlefile", "dataascsv"])
except getopt.GetoptError, err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
output = None
verbose = False
for o, a in opts:
if o == "-v":
verbose = True
elif o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-o", "--output"):
if os.path.exists(os.path.normpath(a) + '/'):
outputpath = os.path.normpath(a) + '/'
else:
assert False, "non-existent output directory: " + os.path.normpath(a) + '/'
elif o in ("-u", "--userdir"):
if os.path.exists(os.path.normpath(a) + '/'):
userdir = os.path.normpath(a) + '/'
else:
assert False, "non-existent user directory: " + os.path.normpath(a) + '/'
elif o in ("-x", "--excludehidden"):
excludehidden = True
elif o in ("-w", "--wordlists"):
outputwordlists = True
elif o in ("-g", "--ignoregrids"):
if os.path.exists(os.path.normpath(a)):
ignoregrids = filetolist(os.path.normpath(a))
else:
assert False, "non-existent ignoregrids file: " + os.path.normpath(a)
elif o in ("-c", "--ignorecells"):
if os.path.exists(os.path.normpath(a)):
ignorecells = filetolist(os.path.normpath(a))
else:
assert False, "non-existent ignorecells file: " + os.path.normpath(a)
elif o in ("-b", "--blackliststyles"):
if os.path.exists(os.path.normpath(a)):
blackliststyles = filetolist(os.path.normpath(a))
else:
assert False, "non-existent blacklist styles file: " + os.path.normpath(a)
elif o in ("-s", "--singlefile"):
singlefile = True
elif o in ("-d", "--dataascsv"):
outputcsv = True
else:
assert False, "unhandled option"
parse_grids(gridxml,outputpath,userdir,excludehidden,outputwordlists, ignoregrids, ignorecells, blackliststyles, singlefile, outputcsv)
# gridxml,outputpath,userdir,excludehidden,outputwordlists, ignoregrids, ignorecells, singlefile
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
2,645,699,874,274 |
c54159e863d1c2bbbe10bae8642e512e1c5a6d97
|
c680f696d3decaea3f4d0c1b804d071448d44d6f
|
/utils/crypto/drill_21.py
|
dd93009afbc12d8313154775d3aad2fc4a8ad60f
|
[
"MIT"
] |
permissive
|
z3v2cicidi/CTFStuff
|
https://github.com/z3v2cicidi/CTFStuff
|
60108e49b1cbe7807d8ae1a503b47156b0b90f9a
|
de4a54a01d8e121a0d5068b5fb2d3e926acf319b
|
refs/heads/master
| 2021-01-14T14:35:13.233915 | 2014-12-10T22:39:13 | 2014-12-10T22:39:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Crypto.Cipher import AES
# you may modify this script to accomplish drill 21
# make sure you pad the key using '\x20' to make it 16 bytes
IV = 16 * '\x00'
mode = AES.MODE_CBC
# !!!
# The text has to be padded as well sincie it is not a multiple of 16!!
# we use the PKCS5 padding scheme
# the length of plain text is 21, to make it 32 = 2*16, we need to pad 11 bytes
# in hex 11 = 0b
text = "This is a top secret." + 11*'\x0b'
target = "8d20e5056a8d24d0462ce74e4904c1b513e10d1df4a2ef2ad4540fae1ca0aaf9"
with open('words.txt', 'r') as fin:
word_list = fin.read().split('\n')
#print len(word_list)
for word in word_list:
if len(word) < 17:
padsize = 16 - len(word)
tmpkey = word
word += padsize*'\x20'
encryptor = AES.new(word, mode, IV=IV)
ciphertext = encryptor.encrypt(text)
if target == ciphertext.encode('hex'):
print "got the key:\t" + tmpkey
#if tmpkey == 'median':
# print IV
# print text
# print ciphertext.encode('hex')
# print target
|
UTF-8
|
Python
| false | false | 2,014 |
8,727,373,578,964 |
480a7504c9ed8ecb89f374b1055521d277dd6ebd
|
c174d0e6b06131ef901bb8486b300a11ac423b96
|
/apps/operations/migrations/0001_initial.py
|
d779fc6b6c40f78ca6a51dcf43ce4e556490697f
|
[] |
no_license
|
Marakuba/CloudStore
|
https://github.com/Marakuba/CloudStore
|
d13665256718c8cba25671819fc9f76d0bd164ea
|
2c1429496b0e04ee11fe7f64cbfad4741a5444bc
|
refs/heads/master
| 2021-01-23T07:15:33.623230 | 2011-01-27T20:41:32 | 2011-01-27T20:41:32 | 1,278,311 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Invoice'
db.create_table('operations_invoice', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('create', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 1, 27, 23, 26, 20, 330888))),
('organization', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['classifiers.Organization'])),
('stock', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['classifiers.Stock'])),
('invoicetype', self.gf('django.db.models.fields.CharField')(max_length=1)),
('comment', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
))
db.send_create_signal('operations', ['Invoice'])
# Adding model 'Entry'
db.create_table('operations_entry', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('invoice', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['operations.Invoice'])),
('service', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['classifiers.Service'])),
('count', self.gf('django.db.models.fields.FloatField')()),
('cost', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=10, decimal_places=2)),
))
db.send_create_signal('operations', ['Entry'])
def backwards(self, orm):
# Deleting model 'Invoice'
db.delete_table('operations_invoice')
# Deleting model 'Entry'
db.delete_table('operations_entry')
models = {
'classifiers.organization': {
'Meta': {'ordering': "('name',)", 'object_name': 'Organization'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'})
},
'classifiers.service': {
'Meta': {'ordering': "('name',)", 'object_name': 'Service'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'servicegeneralgroup': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'services'", 'null': 'True', 'to': "orm['classifiers.ServiceGeneralGroup']"}),
'unitmeas': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classifiers.Unitmeas']", 'null': 'True'})
},
'classifiers.servicegeneralgroup': {
'Meta': {'ordering': "('name',)", 'object_name': 'ServiceGeneralGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['classifiers.ServiceGeneralGroup']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'classifiers.stock': {
'Meta': {'ordering': "('name',)", 'object_name': 'Stock'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'})
},
'classifiers.unitmeas': {
'Meta': {'ordering': "('name',)", 'object_name': 'Unitmeas'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
},
'operations.entry': {
'Meta': {'ordering': "('invoice',)", 'object_name': 'Entry'},
'cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2'}),
'count': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['operations.Invoice']"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classifiers.Service']"})
},
'operations.invoice': {
'Meta': {'ordering': "('create',)", 'object_name': 'Invoice'},
'comment': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'create': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2011, 1, 27, 23, 26, 20, 330888)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoicetype': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classifiers.Organization']"}),
'stock': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classifiers.Stock']"})
}
}
complete_apps = ['operations']
|
UTF-8
|
Python
| false | false | 2,011 |
15,255,723,856,842 |
735e39b9fdb5c4c8bada58e577e90f70f5946fdb
|
c7faef6d4c0a965032a37953f34373816ce092c1
|
/Products/MetaPublisher2/bases/field/legacyfield.py
|
be4c7140fa1b2ff0285dae780df6bbd97e520af2
|
[
"ZPL-2.1"
] |
permissive
|
sfluehnsdorf/MetaPublisher2
|
https://github.com/sfluehnsdorf/MetaPublisher2
|
2d52a6baf1822a94ba12f66c86760953dd1cead4
|
4688baa9182919a8f8da8a0afbd68997e4453708
|
refs/heads/master
| 2019-01-01T23:22:04.677296 | 2013-06-23T12:03:46 | 2013-06-23T12:03:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: iso-8859-15 -*-
# ============================================================================
#
# M e t a P u b l i s h e r 2
#
# ----------------------------------------------------------------------------
# Copyright (c) 2002-2013, Sebastian Lühnsdorf - Web-Solutions and others
# For more information see the README.txt file or visit www.metapulisher.org
# ----------------------------------------------------------------------------
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL).
#
# A copy of the ZPL should accompany this distribution.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
# ============================================================================
__doc__ = """Legacy Field Plugin Base
!TXT! module info
$Id: bases/field/legacyfield.py 10 2013-05-09 17:22:57Z sfluehnsdorf $
"""
__version__ = '$Revision: 2.3 $'[11:-2]
# ============================================================================
# Module Imports
from Products.MetaPublisher2.bases.plugin.legacyplugin import LegacyPluginBase
from Products.MetaPublisher2.library.common import ClassSecurityInfo, DTMLFile, InitializeClass
from field import FieldPluginBase
# ============================================================================
# Module Exports
__all__ = [
'LegacyFieldPlugin',
]
# ============================================================================
# Legacy Field Plugin Base Class
class LegacyFieldPlugin(LegacyPluginBase, FieldPluginBase):
"""!TXT! Legacy Field Base Class"""
security = ClassSecurityInfo()
# --------------------------------------------------------------------------
# Field Attributes
isZMP2FieldPlugin = 1
# --------------------------------------------------------------------------
# Field Identity API
getFieldObject = FieldPluginBase.get_plugin_instance
getFieldId = FieldPluginBase.get_plugin_id
getFieldURL = FieldPluginBase.get_plugin_url
# --------------------------------------------------------------------------
def get_immutable_pluginflag_ids(self):
"""!TXT! Return list of Plugin flag ids, which are either constants or set by an external source and may not be altered by MetaPublisher2 or its users"""
# !!! bases/field/legacyfield.py - get_immutable_pluginflag_ids
return []
def get_mutable_pluginflag_ids(self):
"""!TXT! Return list of Plugin flag ids, which may be altered by MetaPublisher2 and its users"""
# !!! bases/field/legacyfield.py - get_mutable_pluginflag_ids
return []
# --------------------------------------------------------------------------
# Field Specification
def getFieldInfo(self):
"""!TXT! Return information about this Field if available"""
raise NotImplementedError
def get_plugin_infos(self):
"""!TXT! Return information about this Field if available"""
return self.getFieldInfo()
# --------------------------------------------------------------------------
# Field ZMI
manage_configureFieldForm = DTMLFile('fieldplugin_edit', globals())
def manage_configureField(self, REQUEST=None):
"""!TXT! Change Field's configuration parameters"""
self.title = REQUEST.get('title', '')
self.redirect(
REQUEST,
'fields_form',
message='!TXT! Changes saved',
storage_id=self.getStorageId()
)
def renderAdd(self):
"""!TXT! Return a html code for adding an Entry with this Field"""
raise NotImplementedError
def renderEdit(self, entry_id):
"""!TXT! Return a html code for editing an Entry with this Field"""
raise NotImplementedError
def renderView(self, entry_id):
"""!TXT! Return a html code for viewing an Entry with this Field"""
raise NotImplementedError
# --------------------------------------------------------------------------
# Field Retrieval API
def _getValue(self, entryId, default):
"""!TXT! Retrieve a value from an entry"""
raise NotImplementedError
def getValue(self, entryId, default=None):
"""!TXT! Wapper for retrieving a value from an entry"""
return self._getValue(entryId, default)
def _hasValue(self, entry_id):
"""!TXT! Return 1 if the Entry has a value stored for this Field, 0 otherwise"""
raise NotImplementedError
def hasValue(self, entry_id):
"""!TXT! Wrapper for testing existence of a value in an Entry"""
return self._hasValue(entry_id)
def _testValue(self, value, options={}):
"""!TXT! Test a value for validity"""
raise NotImplementedError
def testValue(self, value, **options):
"""!TXT! Wrapper for testing a value's validity"""
try:
return self._testValue(value, options)
except:
raise TestError(errorType=sys.exc_type, errorValue=sys.exc_value)
# --------------------------------------------------------------------------
# Field Mutation API
def _setValue(self, entry_id, value):
"""!TXT! Store a value in an entry"""
raise NotImplementedError
def setData(self, entry_id, data):
"""!TXT! Set the value inside data for this Field in the Entry"""
raise NotImplementedError
def setDefault(self, entry_id):
"""!TXT! Set the default value for this Field in the Entry"""
raise NotImplementedError
def setValue(self, entry_id, value):
"""!TXT! Wapper for storing a value in an entry"""
try:
result = self.testValue(value)
self._setValue(entry_id, result)
except TestError, error:
raise error.errorType, error.errorValue
# ------------------------------------------------------------------------------
# initialize class security
InitializeClass(LegacyFieldPlugin)
# !!! bases/field/legacyfield.py - revise and update legacy api
|
ISO-8859-3
|
Python
| false | false | 2,013 |
429,496,764,125 |
4c2435a2dded226ae134af77878a7aa1027a644e
|
d49cf28f65950ef03179c8465238f10426471007
|
/annoyances.py
|
e8343c08625550c7d7275e5fd4a12cce73605986
|
[] |
no_license
|
Make-Magazine/Dish-Detector
|
https://github.com/Make-Magazine/Dish-Detector
|
3f2307a847a8462b3c038f97821b6765f5aaa9d3
|
8d25b002b0481913579349163c08622ecfffb211
|
refs/heads/master
| 2021-01-18T04:20:08.707376 | 2012-01-18T14:27:18 | 2012-01-18T14:27:18 | 20,770,317 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import socket
import urllib, urllib2
'''
annoyances.py
Things to annoy people with.
Each function is in the table-o-annoyances and is called with the doAlarm method
THESE WILL NEED EDITING FOR YOUR CONFIGURATION. All of these are specific to our network
'''
class alarms:
#change the status of our traffic light
def sendTraffic(self, lev):
if 0 <= lev < 4:
message = urllib.quote("$rage" + str(lev))
urllib2.urlopen("http://babbage:8020/%s" % message)
else:
print "invalid level"
#first annoyance
def first(self, state):
if state:
self.sendTraffic(1)
self.ircSpeak("The sink has stuff in it")
else:
self.sendTraffic(0)
#second annoyance
def second(self, state):
if state:
self.sendTraffic(2)
self.ircSpeak("The sink *still* has washing up in it")
else:
self.sendTraffic(0)
#third annoyance
def third(self,state):
if state:
self.sendTraffic(3)
self.ircSpeak("FFS the sink needs cleaning, someone sort it out!")
else:
self.sendTraffic(0)
#build a function list to call from doAlarm
alarmList = [first, second, third]
def __init__ (self):
#I used to ping things to the serial port until I decided that the traffic lights were a better idea
print "serial port not available, using IRC and traffic lights"
#send a string through IRC, this depends on our network setup so will need rewriting for yours
def ircSpeak(self, text):
sc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sc.connect(("babbage",12345))
sc.send(text)
sc.close()
#trigger an alarm, first stop all other alarms, then start our requested one
def doAlarm(self, level):
if 0 <= level < len(self.alarmList):
for a in self.alarmList:
a(self, False)
self.alarmList[level](self, True)
#cycle through all alarm methods and run the "stop" command
def stopAllAlarms(self):
for a in self.alarmList:
a(self, False)
#the next line got annoying
# self.ircSpeak("The sink has been cleared, happy days")
|
UTF-8
|
Python
| false | false | 2,012 |
8,761,733,305,047 |
b997625316f5dda0ff2a57ff04b70cdf77f3e89d
|
f469459d44911aef94f50629dd550bcc0deb2f7a
|
/smap_hermes/generate_map.py
|
3914711d79646ab5f5d7c8541de70b9ec30c97a7
|
[] |
no_license
|
aconley/smap_hermes
|
https://github.com/aconley/smap_hermes
|
40b5ee9e6296ea0777039b07cfd5493b121a3b89
|
7e1291dcb279fbe94c3ba7189de380f487c9bbc4
|
refs/heads/master
| 2020-05-18T10:45:43.751451 | 2014-10-28T18:57:36 | 2014-10-28T18:57:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" Generate a SMAP map from an input catalog"""
from __future__ import print_function
from .smap_struct import smap_map
from .smap_beam import get_gauss_beam
import numpy as np
try:
from astropy.convolution import convolve
except ImportError:
from astropy.nddata import convolve
__all__ = ["cattomap_gauss"]
def cattomap_gauss(area, fluxes, wave=[250.0, 350, 500],
pixsize=[6.0, 8.33333, 12.0], racen=25.0, deccen=0.0,
fwhm=[17.6, 23.9, 35.2], nfwhm=5.0, bmoversamp=5,
sigma_inst=None, verbose=False):
""" Generates simulated maps as SMAP structures using a Gaussian beam
from an input catalog of flux densities.
Parameters
----------
area: float
Area of generated maps, in deg^2
fluxes: ndarray
Array of flux densities, of shape nsources by nbands, in Jy.
wave: ndarray
Wavelengths to generate maps at, in microns. This must
have the same number of elements as the second dimension
of fluxes.
pixsize: ndarray
Pixel sizes of output maps, in arcsec. The code may
perform somewhat better if the finest pixel scale is first.
racen: float
Right ascension of generated maps
deccen: float
Declination of generated maps
fwhm: ndarray
Beam FWHM values, in arcsec.
nfwhm: float
How far out, in units of FWHM, to generate the beams
sigma_inst: ndarray or None
Map instrument noise, in Jy. If None, no instrument
noise is added.
verbose: bool
Print informational messages as it runs.
Returns
-------
A tuple containing an array input maps and the x/y positions
of the sources (in the first map). Note that the maps are
indexed in [y, x] order.
"""
import math
from numbers import Number
# Check inputs
if not isinstance(fluxes, np.ndarray):
raise TypeError("Input fluxes not ndarray")
# Get number of bands
if len(fluxes.shape) == 1:
# 1 band case
nbands = 1
elif len(fluxes.shape) == 2:
# Usual case
nbands = fluxes.shape[1]
else:
raise ValueError("Input fluxes of unexpected dimension")
if len(wave) < nbands:
raise ValueError("Number of wavelengths not the same as the"
" number of bands")
if len(pixsize) < nbands:
raise ValueError("Number of pixsize values not the same as the"
" number of bands")
if len(fwhm) < nbands:
raise ValueError("Number of FWHM values not the same as the"
" number of bands")
nsrcs = fluxes.shape[0]
# Figure out sigma situation
if sigma_inst is None:
has_sigma = False
elif isinstance(sigma_inst, Number):
# Single value -- replicate it out
has_sigma = True
int_sigma = sigma_inst * np.ones(nbands, dtype=np.float32)
else:
has_sigma = True
int_sigma = np.asarray(sigma_inst, dtype=np.float32)
if len(int_sigma) == 1 and nbands > 1:
int_sigma = int_sigma[0] * np.ones(nbands, dtype=np.float32)
elif len(int_sigma) < nbands:
raise ValueError("Not enough instrument sigma values for #bands")
if has_sigma and int_sigma.min() < 0:
raise ValueError("Invalid (negative) instrument sigma")
# Create the empty maps
nextent = np.empty(nbands, dtype=np.int32)
truearea = np.empty(nbands, dtype=np.float32)
maps = []
for i in range(nbands):
pixarea = (pixsize[i] / 3600.0)**2
nextent[i] = math.ceil(math.sqrt(area / pixarea))
truearea[i] = nextent[i]**2 * pixarea
s_map = smap_map()
if has_sigma:
s_map.create(np.zeros((nextent[i], nextent[i]), dtype=np.float32),
pixsize[i], racen, deccen, wave=wave[i],
error=int_sigma[i]*np.ones((nextent[i], nextent[i]),
dtype=np.float32))
else:
s_map.create(np.zeros((nextent[i], nextent[i]), dtype=np.float32),
pixsize[i], racen, deccen, wave=wave[i])
maps.append(s_map)
# Generate positions in the first band for all sources
xpos = nextent[0] * np.random.rand(nsrcs)
ypos = nextent[0] * np.random.rand(nsrcs)
# Construct maps
for i in range(nbands):
if verbose:
print("Preparing {:0.1f} um map".format(wave[i]))
# Add sources
if verbose:
print(" Inserting sources")
relpix = pixsize[i] / pixsize[0]
xf = np.floor(xpos * relpix)
yf = np.floor(ypos * relpix)
cmap = maps[i].image
nx, ny = cmap.shape
np.place(xf, xf > nx-1, nx-1)
np.place(yf, yf > ny-1, ny-1)
for cx, cy, cf in zip(xf, yf, fluxes[:, i]):
cmap[cy, cx] += cf # Note y, x
# Smooth
if verbose:
print(" Smoothing")
bm = get_gauss_beam(fwhm[i], pixsize[i], nfwhm)
maps[i].image = convolve(cmap, bm, boundary='wrap')
# Noise
if has_sigma:
if verbose:
print(" Adding noise")
maps[i].image += np.random.normal(scale=int_sigma[i],
size=maps[i].image.shape)
maps[i].image -= maps[i].image.mean()
return (maps, xpos, ypos)
|
UTF-8
|
Python
| false | false | 2,014 |
16,956,530,891,289 |
f2f0487acf9f825d2e039efb1fc7dd1d14f9fec2
|
1d372fab0d1dcc0845d21203bc6d3c8750f4acf2
|
/hal4000/camera/andorCameraControl.py
|
243967463ecb228a30925405986ca02ffaa7f14e
|
[] |
no_license
|
yongdengzhang/storm-control
|
https://github.com/yongdengzhang/storm-control
|
084b8e10f170e971e56c96a1053c1c880b3461f9
|
e8276fa9cf3f498500d7dd4ffc7888f94a8d96f9
|
refs/heads/master
| 2021-01-17T22:32:23.811515 | 2014-06-10T19:31:52 | 2014-06-10T19:31:52 | 20,910,954 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#
## @file
#
# Camera control specialized for a Andor camera.
#
# Hazen 10/13
#
from PyQt4 import QtCore
import numpy
import os
import platform
import traceback
# Debugging
import sc_library.hdebug as hdebug
import camera.frame as frame
import camera.cameraControl as cameraControl
import sc_hardware.andor.andorcontroller as andor
## ACameraControl
#
# The CameraControl class specialized to control a Andor camera.
#
class ACameraControl(cameraControl.CameraControl):
## __init__
#
# Create the CameraControl class.
#
# @param hardware Camera hardware settings.
# @param parent (Optional) The PyQt parent of this object.
#
@hdebug.debug
def __init__(self, hardware, parent = None):
cameraControl.CameraControl.__init__(self, hardware, parent)
if hasattr(hardware, "pci_card"):
self.initCamera(hardware.pci_card)
else:
self.initCamera()
## closeShutter
#
# Stop the camera and close the shutter.
#
@hdebug.debug
def closeShutter(self):
self.shutter = False
self.stopCamera()
if self.got_camera:
if self.reversed_shutter:
self.camera.openShutter()
else:
self.camera.closeShutter()
## getAcquisitionTimings
#
# Stop the camera and get the acquisition timings (basically the frame rate)
#
@hdebug.debug
def getAcquisitionTimings(self):
self.stopCamera()
if self.got_camera:
return self.camera.getAcquisitionTimings()
else:
return [1.0, 1.0, 1.0]
## getTemperature
#
# Stop the camera and get the camera temperature.
#
@hdebug.debug
def getTemperature(self):
self.stopCamera()
if self.got_camera:
return self.camera.getTemperature()
else:
return [50, "unstable"]
## haveEMCCD
#
# Returns that this is a EMCCD camera.
#
# @return True, this is a EMCCD camera.
#
@hdebug.debug
def haveEMCCD(self):
return True
## havePreamp
#
# Returns that the camera has a pre-amplifier.
#
# @return True, this camera has a pre-amplifier.
#
@hdebug.debug
def havePreamp(self):
return True
## haveShutter
#
# Returns that the camera has a shutter.
#
# @return True, this camera has a shutter.
#
@hdebug.debug
def haveShutter(self):
return True
## haveTemperature
#
# Returns that this camera can measure its sensor temperature.
#
# @return True, this camera can measure its sensor temperature.
#
@hdebug.debug
def haveTemperature(self):
return True
## initCamera
#
# This tries to find the right driver file to operate the camera
# based on the OS type (32 or 64bit) and a search of the common
# Andor directory names.
#
# @param pci_card (Optional) The ID of the PC card to use.
#
@hdebug.debug
def initCamera(self, pci_card = 0):
if not self.camera:
hdebug.logText("Initializing Andor Camera", False)
if (platform.architecture()[0] == "32bit"):
path = "c:/Program Files/Andor iXon/Drivers/"
driver = "atmcd32d.dll"
if os.path.exists(path + driver):
self.initCameraHelperFn(path, driver, pci_card)
return
path = "c:/Program Files/Andor Solis/"
driver = "atmcd32d.dll"
if os.path.exists(path + driver):
self.initCameraHelperFn(path, driver, pci_card)
return
else:
path = "c:/Program Files/Andor Solis/Drivers/"
driver = "atmcd64d.dll"
if os.path.exists(path + driver):
self.initCameraHelperFn(path, driver, pci_card)
return
path = "c:/Program Files (x86)/Andor Solis/Drivers/"
driver = "atmcd64d.dll"
if os.path.exists(path + driver):
self.initCameraHelperFn(path, driver, pci_card)
return
hdebug.logText("Can't find Andor Camera drivers")
## initCameraHelperFn
#
# Given the path, driver and pci_card ID this creates a Andor
# camera controller class.
#
# @param path The path to the Andor camera DLL.
# @param driver The name of the Andor camera DLL.
# @param pci_card The ID of the PCI card.
#
@hdebug.debug
def initCameraHelperFn(self, path, driver, pci_card):
andor.loadAndorDLL(path + driver)
handle = andor.getCameraHandles()[pci_card]
self.camera = andor.AndorCamera(path, handle)
## newFilmSettings
#
# This is called at the start of a acquisition to get the camera
# running in the right mode (fixed length or run till abort) and
# to set the camera fan speed. Fixed length is only used for films
# that are less than 1000 frames in length, otherwise they are
# generally too large to easily store in RAM.
#
# @param parameters The current camera settings object.
# @param film_settings A film settings object or None.
#
@hdebug.debug
def newFilmSettings(self, parameters, film_settings):
self.stopCamera()
self.mutex.lock()
p = parameters
if self.got_camera:
self.reached_max_frames = False
if film_settings:
self.filming = True
self.acq_mode = film_settings.acq_mode
self.frames_to_take = film_settings.frames_to_take
if (self.acq_mode == "fixed_length"):
# If the film is really long then we use a software stop, otherwise
# we tell the camera to take the number of frames that was requested.
if (self.frames_to_take > 1000):
self.camera.setACQMode("run_till_abort")
else:
self.camera.setACQMode("fixed_length", number_frames = self.frames_to_take)
else:
self.camera.setACQMode("run_till_abort")
else:
self.filming = False
self.acq_mode = "run_till_abort"
self.camera.setACQMode("run_till_abort")
# Due to what I can only assume is a bug in some of the
# older Andor software you need to reset the frame
# transfer mode after setting the aquisition mode.
self.camera.setFrameTransferMode(p.frame_transfer_mode)
# Set camera fan to low. This is overriden by the off option
if p.get("low_during_filming"):
if self.filming:
self.camera.setFanMode(1) # fan on low
else:
self.camera.setFanMode(0) # fan on full
# This is for testing whether the camera fan is shaking the
# the camera, adding noise to the images.
if p.get("off_during_filming"):
if self.filming:
self.camera.setFanMode(2) # fan off
else:
self.camera.setFanMode(0) # fan on full
self.mutex.unlock()
## newParameters
#
# Called when the user selects a new parameters file.
#
# @param parameters The new parameters object.
#
@hdebug.debug
def newParameters(self, parameters):
#self.initCamera()
p = parameters
self.reversed_shutter = p.get("reversed_shutter")
try:
hdebug.logText("Setting Read Mode", False)
self.camera.setReadMode(4)
hdebug.logText("Setting Temperature", False)
self.camera.setTemperature(p.get("temperature"))
hdebug.logText("Setting Trigger Mode", False)
self.camera.setTriggerMode(0)
hdebug.logText("Setting ROI and Binning", False)
self.camera.setROIAndBinning(p.get("ROI"), p.get("binning"))
hdebug.logText("Setting Horizontal Shift Speed", False)
self.camera.setHSSpeed(p.get("hsspeed"))
hdebug.logText("Setting Vertical Shift Amplitude", False)
self.camera.setVSAmplitude(p.get("vsamplitude"))
hdebug.logText("Setting Vertical Shift Speed", False)
self.camera.setVSSpeed(p.get("vsspeed"))
hdebug.logText("Setting EM Gain Mode", False)
self.camera.setEMGainMode(p.get("emgainmode"))
hdebug.logText("Setting EM Gain", False)
self.camera.setEMCCDGain(p.get("emccd_gain"))
hdebug.logText("Setting Baseline Clamp", False)
self.camera.setBaselineClamp(p.get("baselineclamp"))
hdebug.logText("Setting Preamp Gain", False)
self.camera.setPreAmpGain(p.get("preampgain"))
hdebug.logText("Setting Acquisition Mode", False)
self.camera.setACQMode("run_till_abort")
hdebug.logText("Setting Frame Transfer Mode", False)
self.camera.setFrameTransferMode(p.get("frame_transfer_mode"))
hdebug.logText("Setting Exposure Time", False)
self.camera.setExposureTime(p.get("exposure_time"))
hdebug.logText("Setting Kinetic Cycle Time", False)
self.camera.setKineticCycleTime(p.get("kinetic_cycle_time"))
hdebug.logText("Setting ADChannel", False)
self.camera.setADChannel(p.get("adchannel"))
p.head_model = self.camera.getHeadModel()
hdebug.logText("Camera Initialized", False)
self.got_camera = True
except:
hdebug.logText("andorCameraControl: Bad camera settings")
print traceback.format_exc()
self.got_camera = False
self.newFilmSettings(parameters, None)
self.parameters = parameters
## openShutter
#
# Stops the camera and opens the camera shutter.
#
@hdebug.debug
def openShutter(self):
self.shutter = True
self.stopCamera()
if self.got_camera:
if self.reversed_shutter:
self.camera.closeShutter()
else:
self.camera.openShutter()
## quit
#
# Stops the camera thread and closes the connection to the camera.
#
@hdebug.debug
def quit(self):
self.stopThread()
self.wait()
if self.got_camera:
self.camera.shutdown()
## run
#
# This is the thread loop that handles getting the new frames from
# the camera, saving them in filming mode and signaling that the
# camera has new data, or that the camera is idle.
#
def run(self):
while(self.running):
self.mutex.lock()
if self.acquire.amActive() and self.got_camera:
# Get data from camera and create frame objects.
[frames, frame_size, state] = self.camera.getImages16()
# Check if we got new frame data.
if (len(frames) > 0):
# Create frame objects.
frame_data = []
for raw_frame in frames:
aframe = frame.Frame(numpy.fromstring(raw_frame, dtype = numpy.uint16),
self.frame_number,
frame_size[0],
frame_size[1],
"camera1",
True)
frame_data.append(aframe)
self.frame_number += 1
if self.filming:
if self.daxfile:
if (self.acq_mode == "fixed_length"):
if (self.frame_number <= self.frames_to_take):
self.daxfile.saveFrame(aframe)
else:
self.daxfile.saveFrame(aframe)
if (self.acq_mode == "fixed_length") and (self.frame_number == self.frames_to_take):
self.reached_max_frames = True
break
# Emit new data signal.
self.newData.emit(frame_data, self.key)
# Emit max frames signal.
#
# The signal is emitted here because if it is emitted before
# newData then you never see that last frame in the movie, which
# is particularly problematic for single frame movies.
#
if self.reached_max_frames:
self.max_frames_sig.emit()
else:
self.acquire.idle()
self.mutex.unlock()
self.msleep(5)
## setEMCCDGain
#
# Set the EMCCD gain of the camera.
#
# @param gain The desired EMCCD gain value.
#
@hdebug.debug
def setEMCCDGain(self, gain):
self.stopCamera()
if self.got_camera:
self.camera.setEMCCDGain(gain)
## startCamera
#
# Start a new camera acquisition. The key parameter is to
# ensure that camera frames taken with older parameters
# are ignored. This can be a problem due to thread
# synchronization issues.
#
# @param key The ID to use for the frames from this acquisition series.
#
@hdebug.debug
def startCamera(self, key):
#if self.have_paused:
self.mutex.lock()
self.acquire.go()
self.key = key
self.frame_number = 0
self.max_frames_sig.reset()
if self.got_camera:
self.camera.startAcquisition()
self.mutex.unlock()
## stopCamera
#
# Stop the current acquisition series.
#
@hdebug.debug
def stopCamera(self):
if self.acquire.amActive():
self.mutex.lock()
if self.got_camera:
self.camera.stopAcquisition()
self.acquire.stop()
self.mutex.unlock()
while not self.acquire.amIdle():
self.usleep(50)
#
# The MIT License
#
# Copyright (c) 2013 Zhuang Lab, Harvard University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
|
UTF-8
|
Python
| false | false | 2,014 |
1,168,231,136,518 |
cab568f02bf987a3cdd3db5a66ae4daf11288aec
|
92330c77628d99f80c28989f82b6fc191480c995
|
/ita/web/user.py
|
ebbf67c339947c61b387566aa3de79b6ed607b10
|
[
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] |
non_permissive
|
bopopescu/Assigment-generator
|
https://github.com/bopopescu/Assigment-generator
|
881eff8333ba775405ec50880ca284ac0572db86
|
2a3d0d10f1e3b215a79efc727e26d2ebbf1bb7a3
|
refs/heads/master
| 2022-11-23T18:17:48.631978 | 2013-05-21T08:45:08 | 2013-05-21T08:45:08 | 282,134,265 | 0 | 0 | null | true | 2020-07-24T05:48:47 | 2020-07-24T05:48:46 | 2013-09-08T10:20:16 | 2013-05-25T20:12:27 | 1,043 | 0 | 0 | 0 | null | false | false |
import database
from bottle import route, post, request, redirect, response, hook
from helpers import *
from decorator import decorator
from models import User
from exception import *
###############################################################################
# funkce pro komunikaci s vnějškem
def role( *allowed ):
""" Dekorátor pro oprávnění rolí"""
def wrapper(f, *args, **kwargs):
usr = User.getCurrent();
if not usr:
msg("Pro přístup se musíte nejdříve přihlásit")
redirect("/login")
for role in usr.read('roles', tuple() ):
if role in allowed:
return f(*args, **kwargs)
msg("Nemáte dostatečná oprávnění", "error")
return unauthorized()
return decorator(wrapper)
def getUser():
return User.getCurrent()
################################################################################
# stránky
@route("/chosenOne", method=['GET', 'POST'])
@role('master', 'lector')
def profil():
"""Nastavení hesla"""
usr = getUser()
psw = request.forms.get("psw")
if psw:
pswControl = request.forms.get("pswControl")
if psw == pswControl:
usr.setPassword(psw)
msg("Hesla nastaveno","success")
else:
msg("Hesla se neshodují","error")
redirect("/chosenOne")
return template("profil", {"user":usr} )
@route('/lectors', method=['GET', 'POST'])
@role('master')
def list():
"""Seznam lektorů """
if request.params.get("promote"):
lec = User.get( request.params.get("promote") )
lec.addRole("master")
msg("Lektor %s byl povýšen" % lec.login,"success")
redirect(request.path)
if request.params.get("degrade"):
lec = User.get( request.params.get("degrade") )
lec.removeRole("master")
msg("Lektor %s byl ponížen :-)" % lec.login,"success")
redirect(request.path)
# vložení nového lektora
if request.forms.get("add"):
login = request.forms.decode().get("add")
usr = User.insertLector(login, psw = login )
if usr:
msg("Lektor '%s' vytvořen" % usr.login, "success")
msg("Heslo pro nového lektora bylo nastaveno na '%s'" % usr.login, "info")
else:
msg("Chyba při vytváření lektora","error")
redirect("/lectors")
lectors = User.getLectors()
return template("lectors", {"lectors" : lectors } )
@route('/lectors/delete/<login>', method=['GET', 'POST'])
def delete(login):
"""Smazání lektora"""
usr = User.get( login )
if login == getUser().login:
msg("Nelze smazat sama sebe", "error")
redirect("/lectors")
answer = request.forms.get("answer")
if answer:
if answer == "Ne": redirect("/lectors")
if answer == "Ano":
usr.remove()
msg("Uživatel smazán", "success")
redirect("/lectors")
return template("question", {"question":"Skutečně chcete smazat lektora '%s'" % usr.login } )
############
# správa přihlášení atp
@route('/login')
def login():
if getUser(): redirect("/")
lectorLogin = request.params.get("lector")
return template("login", {"lectorLogin" : lectorLogin } )
@post('/login-post')
def loginSubmit():
data = request.forms
usr = User.get( data["login"] )
if not usr:
msg("Uživatel '%s' nenalezen" % data["login"], "error")
redirect('/login' + ("?lector=1" if data.get("password") else "") )
try:
usr.authenticate( data.get("password") )
#redirect
msg("Úspěšně přihlášen", "success")
redirect( "/" ) #request.path if request.path != "/login-post" else "/"
except UserException as e:
msg("Došlo k chybě při přihlašování - %s" % e, "error")
redirect('/login' + ("?lector=1" if data.get("password") else "") )
@route('/logout')
def logout():
User.logout()
msg("Odhlášení bylo úspěšné", "success")
redirect("/")
@route('/unauthorized')
def unauthorized():
response.status = 401 # unauthorized
return template("unauthorized")
###############################################################################
# callbacky
@hook("before_request")
def userMenu():
usr = getUser()
if usr:
addMenu("/logout","Odhlásit se (%s)" % usr.login, 100)
# vsem krome studentu umoznime pristup
if not usr.inRole("student"):
addMenu("/chosenOne", "Profil", 97)
if usr.inRole("master"):
addMenu("/lectors", "Cvičící", 95)
else:
addMenu("/login","Přihlásit se", 100)
|
UTF-8
|
Python
| false | false | 2,013 |
1,829,656,110,154 |
c890036f1130c8bc54eb9162581ce07dc1bc1afd
|
93d6b66922d11f555353f21dfaa9052b78a0c9f5
|
/setup.py
|
62d5d44e344d1233cd2d0e9675bbc77d3233621c
|
[
"MIT"
] |
permissive
|
avanov/book2arrange
|
https://github.com/avanov/book2arrange
|
229c2b29034868655de2c332473c4290dd4ebae3
|
f63d8aa855a27d95b1b64b8fcbe62e6f6c9ae449
|
refs/heads/master
| 2021-01-20T05:54:58.346960 | 2013-08-10T10:17:23 | 2013-08-10T10:17:23 | 7,669,660 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import sys
from setuptools import find_packages
from setuptools import setup
PY3K = sys.version_info >= (3,0)
readme = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
setup(
name='book2arrange',
version='1.0.1',
packages=find_packages(exclude=['tests']),
install_requires=[],
setup_requires=[],
tests_require=[],
package_data={
# If any package contains listed files, include them
'':['*.txt', '*.rst']
},
include_package_data=True,
entry_points={
'console_scripts': [
'book2arrange = book2arrange:main',
]
},
# PyPI metadata
# Read more on http://docs.python.org/distutils/setupscript.html#meta-data
author="Maxim Avanov",
author_email="[email protected]",
maintainer="Maxim Avanov",
maintainer_email="[email protected]",
description="Arrange audio files from http://www.50languages.com/ in one "
"convenient collection for better language acquisition.",
long_description=readme,
license="MIT",
url="https://github.com/avanov/book2arrange",
download_url="https://github.com/avanov/book2arrange",
keywords="cli utils foreign language education",
# See the full list on http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Education',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: Microsoft',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Education',
]
)
|
UTF-8
|
Python
| false | false | 2,013 |
5,471,788,336,927 |
14855ec90acbfaf81aecc60189824a833ca3b284
|
4c564648faf5c67bee83950f96aa2d5d71773794
|
/week0/1-Python-simple-problems-set /14.Number_to_list.py
|
aa09f300c7f88824276138107d3baaa6ce0750c0
|
[] |
no_license
|
DesislavaDimitrova/HackBulgaria
|
https://github.com/DesislavaDimitrova/HackBulgaria
|
54d3d06dd22b456c6ba430c68adc275844bf2497
|
a3dbbba5bf722a956d8a6c64d6cbd4751952f150
|
refs/heads/master
| 2021-01-01T19:10:45.533168 | 2014-12-05T18:39:01 | 2014-12-05T18:39:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Turning a number into a list of digits
def number_to_list(n):
return [int(x) for x in str(n)]
|
UTF-8
|
Python
| false | false | 2,014 |
352,187,327,894 |
d174e6a84c2b2570d9517de8489057c91eb7899b
|
275b38b655dd79efeaba2339f27fe09439d6ad00
|
/src/constants.py
|
6f3ebc9f0b1df78b713f33995683e0d8e759efea
|
[] |
no_license
|
berli/unicorn
|
https://github.com/berli/unicorn
|
034fa5f165606b3ff58569028b5e0093202d54c9
|
c1a30b5fa1882c2c6d66722e391e5fe68017b292
|
refs/heads/master
| 2021-01-01T15:51:42.730766 | 2012-07-02T10:33:49 | 2012-07-02T10:33:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2012-6-28
@author: diracfang
'''
import os
BASE_PATH = os.path.abspath(__file__).replace('\\', '/')
TEXT_PATH_MINI = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/SogouC.mini.20061102/Sample').replace('\\', '/')
TEXT_PATH_REDUCED = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/SogouC.reduced.20061102/Reduced').replace('\\', '/')
TEXT_PATH_FULL = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/SogouC/ClassFile').replace('\\', '/')
CLASS_LIST_PATH = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/SogouC.mini.20061102/ClassList.txt').replace('\\', '/')
CLASS_WEIGHT_LIST_PATH = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/SogouC.mini.20061102/ClassWeightList.txt').replace('\\', '/')
COMMON_FREQ_PATH = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/SogouW/Freq/SogouLabDic.dic').replace('\\', '/')
DB_DUMP_PATH = os.path.join(os.path.dirname(os.path.dirname(BASE_PATH)),
'resource/unicorn_db.zip').replace('\\', '/')
DB_ARC_NAME = 'unicorn_db.json'
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
REDIS_DB = 2
REDIS_PREFIX = 'un_'
WORD_FREQ_KEY = 'freq:%s'
RARE_FREQ = 0.1
|
UTF-8
|
Python
| false | false | 2,012 |
386,547,059,885 |
ed51d3acb0d7abbebfbe5582f2fff1ddf0961a76
|
28c614942558229bb9adca33070331b04d454015
|
/py/terranova-151.py
|
ffcc2e867dec1ebf31a71fa67c16016c6fe0f115
|
[] |
no_license
|
qdv/Colorly
|
https://github.com/qdv/Colorly
|
95827b077b888251dea3a2ed58e8a37e98837409
|
6891a2d550a66e374c5da441b452256abccaffad
|
refs/heads/gh-pages
| 2021-05-28T02:57:53.409957 | 2014-11-12T03:00:26 | 2014-11-12T03:00:26 | 100,415,084 | 1 | 0 | null | true | 2017-08-15T20:05:44 | 2017-08-15T20:05:44 | 2017-06-21T21:35:13 | 2014-11-12T03:00:29 | 8,707 | 0 | 0 | 0 | null | null | null |
PALETTE = [
{
"name": "Terranova 1000",
"label": "terranova-1000",
"hex": "#f0eee4"
},
{
"name": "Terranova 1100",
"label": "terranova-1100",
"hex": "#f6d50e"
},
{
"name": "Terranova 1102",
"label": "terranova-1102",
"hex": "#f2eece"
},
{
"name": "Terranova 1110",
"label": "terranova-1110",
"hex": "#f5ecb2"
},
{
"name": "Terranova 1125",
"label": "terranova-1125",
"hex": "#f6e991"
},
{
"name": "Terranova 1150",
"label": "terranova-1150",
"hex": "#f4de5a"
},
{
"name": "Terranova 1200",
"label": "terranova-1200",
"hex": "#ffba1c"
},
{
"name": "Terranova 1202",
"label": "terranova-1202",
"hex": "#f4e8ce"
},
{
"name": "Terranova 1210",
"label": "terranova-1210",
"hex": "#fbe1b3"
},
{
"name": "Terranova 1225",
"label": "terranova-1225",
"hex": "#fad58f"
},
{
"name": "Terranova 1250",
"label": "terranova-1250",
"hex": "#ffcf6e"
},
{
"name": "Terranova 1300",
"label": "terranova-1300",
"hex": "#e09d49"
},
{
"name": "Terranova 1302",
"label": "terranova-1302",
"hex": "#f4ebd8"
},
{
"name": "Terranova 1310",
"label": "terranova-1310",
"hex": "#f7e3c8"
},
{
"name": "Terranova 1325",
"label": "terranova-1325",
"hex": "#f5d6b0"
},
{
"name": "Terranova 1350",
"label": "terranova-1350",
"hex": "#edc28c"
},
{
"name": "Terranova 1400",
"label": "terranova-1400",
"hex": "#daaa54"
},
{
"name": "Terranova 1402",
"label": "terranova-1402",
"hex": "#f5eacf"
},
{
"name": "Terranova 1410",
"label": "terranova-1410",
"hex": "#f4e3c3"
},
{
"name": "Terranova 1425",
"label": "terranova-1425",
"hex": "#f2d7a6"
},
{
"name": "Terranova 1450",
"label": "terranova-1450",
"hex": "#e8c483"
},
{
"name": "Terranova 1500",
"label": "terranova-1500",
"hex": "#cf9540"
},
{
"name": "Terranova 1502",
"label": "terranova-1502",
"hex": "#f2e3c8"
},
{
"name": "Terranova 1510",
"label": "terranova-1510",
"hex": "#f4d8ac"
},
{
"name": "Terranova 1525",
"label": "terranova-1525",
"hex": "#ebc287"
},
{
"name": "Terranova 1550",
"label": "terranova-1550",
"hex": "#e3b166"
},
{
"name": "Terranova 1600",
"label": "terranova-1600",
"hex": "#bd8045"
},
{
"name": "Terranova 1602",
"label": "terranova-1602",
"hex": "#f3ddc3"
},
{
"name": "Terranova 1610",
"label": "terranova-1610",
"hex": "#eccca7"
},
{
"name": "Terranova 1625",
"label": "terranova-1625",
"hex": "#daaf83"
},
{
"name": "Terranova 1650",
"label": "terranova-1650",
"hex": "#d49f6b"
},
{
"name": "Terranova 2000",
"label": "terranova-2000",
"hex": "#89754c"
},
{
"name": "Terranova 2002",
"label": "terranova-2002",
"hex": "#eae3d4"
},
{
"name": "Terranova 2010",
"label": "terranova-2010",
"hex": "#d8cdb8"
},
{
"name": "Terranova 2025",
"label": "terranova-2025",
"hex": "#ccbfa4"
},
{
"name": "Terranova 2050",
"label": "terranova-2050",
"hex": "#b5a381"
},
{
"name": "Terranova 2100",
"label": "terranova-2100",
"hex": "#807f4d"
},
{
"name": "Terranova 2102",
"label": "terranova-2102",
"hex": "#e5e3ce"
},
{
"name": "Terranova 2110",
"label": "terranova-2110",
"hex": "#d0cfb2"
},
{
"name": "Terranova 2125",
"label": "terranova-2125",
"hex": "#bcbc92"
},
{
"name": "Terranova 2150",
"label": "terranova-2150",
"hex": "#a8a779"
},
{
"name": "Terranova 2200",
"label": "terranova-2200",
"hex": "#7b7555"
},
{
"name": "Terranova 2202",
"label": "terranova-2202",
"hex": "#e4e3d8"
},
{
"name": "Terranova 2210",
"label": "terranova-2210",
"hex": "#d5d2c3"
},
{
"name": "Terranova 2225",
"label": "terranova-2225",
"hex": "#c1bea9"
},
{
"name": "Terranova 2250",
"label": "terranova-2250",
"hex": "#a39f86"
},
{
"name": "Terranova 2300",
"label": "terranova-2300",
"hex": "#607a5a"
},
{
"name": "Terranova 2302",
"label": "terranova-2302",
"hex": "#e2e7dd"
},
{
"name": "Terranova 2310",
"label": "terranova-2310",
"hex": "#cad6c8"
},
{
"name": "Terranova 2325",
"label": "terranova-2325",
"hex": "#adbea7"
},
{
"name": "Terranova 2350",
"label": "terranova-2350",
"hex": "#91a68c"
},
{
"name": "Terranova 2400",
"label": "terranova-2400",
"hex": "#686a50"
},
{
"name": "Terranova 2402",
"label": "terranova-2402",
"hex": "#dedfd3"
},
{
"name": "Terranova 2410",
"label": "terranova-2410",
"hex": "#c5c8bc"
},
{
"name": "Terranova 2425",
"label": "terranova-2425",
"hex": "#b3b5a6"
},
{
"name": "Terranova 2450",
"label": "terranova-2450",
"hex": "#949782"
},
{
"name": "Terranova 2500",
"label": "terranova-2500",
"hex": "#596655"
},
{
"name": "Terranova 2502",
"label": "terranova-2502",
"hex": "#dbdfd4"
},
{
"name": "Terranova 2510",
"label": "terranova-2510",
"hex": "#c5cac1"
},
{
"name": "Terranova 2525",
"label": "terranova-2525",
"hex": "#abb3aa"
},
{
"name": "Terranova 2550",
"label": "terranova-2550",
"hex": "#8a998c"
},
{
"name": "Terranova 3000",
"label": "terranova-3000",
"hex": "#9c714a"
},
{
"name": "Terranova 3002",
"label": "terranova-3002",
"hex": "#e6d5c5"
},
{
"name": "Terranova 3010",
"label": "terranova-3010",
"hex": "#e1cab4"
},
{
"name": "Terranova 3025",
"label": "terranova-3025",
"hex": "#d3b69a"
},
{
"name": "Terranova 3050",
"label": "terranova-3050",
"hex": "#bc9a78"
},
{
"name": "Terranova 3100",
"label": "terranova-3100",
"hex": "#866346"
},
{
"name": "Terranova 3102",
"label": "terranova-3102",
"hex": "#e6ddd2"
},
{
"name": "Terranova 3110",
"label": "terranova-3110",
"hex": "#daccbd"
},
{
"name": "Terranova 3125",
"label": "terranova-3125",
"hex": "#c6b19d"
},
{
"name": "Terranova 3150",
"label": "terranova-3150",
"hex": "#b0947b"
},
{
"name": "Terranova 3200",
"label": "terranova-3200",
"hex": "#7f5e4c"
},
{
"name": "Terranova 3202",
"label": "terranova-3202",
"hex": "#ece0d7"
},
{
"name": "Terranova 3210",
"label": "terranova-3210",
"hex": "#d9c7bd"
},
{
"name": "Terranova 3225",
"label": "terranova-3225",
"hex": "#cbb2a8"
},
{
"name": "Terranova 3250",
"label": "terranova-3250",
"hex": "#b19082"
},
{
"name": "Terranova 3300",
"label": "terranova-3300",
"hex": "#7a533f"
},
{
"name": "Terranova 3302",
"label": "terranova-3302",
"hex": "#e4d2c9"
},
{
"name": "Terranova 3310",
"label": "terranova-3310",
"hex": "#cdb4ab"
},
{
"name": "Terranova 3325",
"label": "terranova-3325",
"hex": "#b09185"
},
{
"name": "Terranova 3350",
"label": "terranova-3350",
"hex": "#9a7565"
},
{
"name": "Terranova 3400",
"label": "terranova-3400",
"hex": "#b26f49"
},
{
"name": "Terranova 3402",
"label": "terranova-3402",
"hex": "#f1dbd0"
},
{
"name": "Terranova 3410",
"label": "terranova-3410",
"hex": "#ebc8b5"
},
{
"name": "Terranova 3425",
"label": "terranova-3425",
"hex": "#e0b59e"
},
{
"name": "Terranova 3450",
"label": "terranova-3450",
"hex": "#d09d82"
},
{
"name": "Terranova 3500",
"label": "terranova-3500",
"hex": "#a4654e"
},
{
"name": "Terranova 3502",
"label": "terranova-3502",
"hex": "#ebd8cf"
},
{
"name": "Terranova 3510",
"label": "terranova-3510",
"hex": "#e1c1b5"
},
{
"name": "Terranova 3525",
"label": "terranova-3525",
"hex": "#d7ac9c"
},
{
"name": "Terranova 3550",
"label": "terranova-3550",
"hex": "#c28c7a"
},
{
"name": "Terranova 4000",
"label": "terranova-4000",
"hex": "#e9743c"
},
{
"name": "Terranova 4002",
"label": "terranova-4002",
"hex": "#f9e1cf"
},
{
"name": "Terranova 4010",
"label": "terranova-4010",
"hex": "#f2ccb4"
},
{
"name": "Terranova 4025",
"label": "terranova-4025",
"hex": "#f2b794"
},
{
"name": "Terranova 4050",
"label": "terranova-4050",
"hex": "#ec9f75"
},
{
"name": "Terranova 4100",
"label": "terranova-4100",
"hex": "#e15745"
},
{
"name": "Terranova 4102",
"label": "terranova-4102",
"hex": "#f5d9d4"
},
{
"name": "Terranova 4110",
"label": "terranova-4110",
"hex": "#f5c2be"
},
{
"name": "Terranova 4125",
"label": "terranova-4125",
"hex": "#eea59f"
},
{
"name": "Terranova 4150",
"label": "terranova-4150",
"hex": "#e7877f"
},
{
"name": "Terranova 4200",
"label": "terranova-4200",
"hex": "#864a42"
},
{
"name": "Terranova 4202",
"label": "terranova-4202",
"hex": "#e3cac9"
},
{
"name": "Terranova 4210",
"label": "terranova-4210",
"hex": "#cca7a8"
},
{
"name": "Terranova 4225",
"label": "terranova-4225",
"hex": "#ba8a8b"
},
{
"name": "Terranova 4250",
"label": "terranova-4250",
"hex": "#a76e6d"
},
{
"name": "Terranova 4300",
"label": "terranova-4300",
"hex": "#8d4a3e"
},
{
"name": "Terranova 4302",
"label": "terranova-4302",
"hex": "#e8c9c3"
},
{
"name": "Terranova 4310",
"label": "terranova-4310",
"hex": "#d8a59c"
},
{
"name": "Terranova 4325",
"label": "terranova-4325",
"hex": "#c5867e"
},
{
"name": "Terranova 4350",
"label": "terranova-4350",
"hex": "#ad6b62"
},
{
"name": "Terranova 4400",
"label": "terranova-4400",
"hex": "#965f5d"
},
{
"name": "Terranova 4402",
"label": "terranova-4402",
"hex": "#ece0dc"
},
{
"name": "Terranova 4410",
"label": "terranova-4410",
"hex": "#e0cccc"
},
{
"name": "Terranova 4425",
"label": "terranova-4425",
"hex": "#cdaeaf"
},
{
"name": "Terranova 4450",
"label": "terranova-4450",
"hex": "#bb9393"
},
{
"name": "Terranova 4500",
"label": "terranova-4500",
"hex": "#795b59"
},
{
"name": "Terranova 4502",
"label": "terranova-4502",
"hex": "#e6dfdb"
},
{
"name": "Terranova 4510",
"label": "terranova-4510",
"hex": "#d8cece"
},
{
"name": "Terranova 4525",
"label": "terranova-4525",
"hex": "#c0b2b4"
},
{
"name": "Terranova 4550",
"label": "terranova-4550",
"hex": "#a79598"
},
{
"name": "Terranova 4600",
"label": "terranova-4600",
"hex": "#6d4842"
},
{
"name": "Terranova 4602",
"label": "terranova-4602",
"hex": "#ded2d2"
},
{
"name": "Terranova 4610",
"label": "terranova-4610",
"hex": "#c6b0b1"
},
{
"name": "Terranova 4625",
"label": "terranova-4625",
"hex": "#b19392"
},
{
"name": "Terranova 4650",
"label": "terranova-4650",
"hex": "#987475"
},
{
"name": "Terranova 5000",
"label": "terranova-5000",
"hex": "#5590c7"
},
{
"name": "Terranova 5002",
"label": "terranova-5002",
"hex": "#e5eeeb"
},
{
"name": "Terranova 5010",
"label": "terranova-5010",
"hex": "#cbdce6"
},
{
"name": "Terranova 5025",
"label": "terranova-5025",
"hex": "#b6d1e7"
},
{
"name": "Terranova 5050",
"label": "terranova-5050",
"hex": "#97bcdd"
},
{
"name": "Terranova 5100",
"label": "terranova-5100",
"hex": "#647bbb"
},
{
"name": "Terranova 5102",
"label": "terranova-5102",
"hex": "#e6eae7"
},
{
"name": "Terranova 5110",
"label": "terranova-5110",
"hex": "#d3dce6"
},
{
"name": "Terranova 5125",
"label": "terranova-5125",
"hex": "#c0cee6"
},
{
"name": "Terranova 5150",
"label": "terranova-5150",
"hex": "#a9b9e0"
},
{
"name": "Terranova 6000",
"label": "terranova-6000",
"hex": "#535868"
},
{
"name": "Terranova 6002",
"label": "terranova-6002",
"hex": "#e4e5e2"
},
{
"name": "Terranova 6010",
"label": "terranova-6010",
"hex": "#ced3d7"
},
{
"name": "Terranova 6025",
"label": "terranova-6025",
"hex": "#b2b9c2"
},
{
"name": "Terranova 6050",
"label": "terranova-6050",
"hex": "#979eac"
},
{
"name": "Terranova 6100",
"label": "terranova-6100",
"hex": "#51534f"
},
{
"name": "Terranova 6102",
"label": "terranova-6102",
"hex": "#d8dcd8"
},
{
"name": "Terranova 6110",
"label": "terranova-6110",
"hex": "#c4c7c5"
},
{
"name": "Terranova 6125",
"label": "terranova-6125",
"hex": "#abadac"
},
{
"name": "Terranova 6150",
"label": "terranova-6150",
"hex": "#8a8d8c"
},
{
"name": "Terranova 6200",
"label": "terranova-6200",
"hex": "#464546"
},
{
"name": "Terranova 6202",
"label": "terranova-6202",
"hex": "#dee0df"
},
{
"name": "Terranova 6210",
"label": "terranova-6210",
"hex": "#c6cacc"
},
{
"name": "Terranova 6225",
"label": "terranova-6225",
"hex": "#a3a6a9"
},
{
"name": "Terranova 6250",
"label": "terranova-6250",
"hex": "#76787b"
}
]
|
UTF-8
|
Python
| false | false | 2,014 |
9,285,719,328,305 |
58531be155c464e11421ebf2eadc84a22e2824ec
|
e2d8f4ed659c162c4c7aef90d121c22a90848de5
|
/src/bibim/main/tests/test_reference_maker.py
|
6a3b876e1cad7bcf9b67d1b8cb974c1b60089fbc
|
[
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-only",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft"
] |
non_permissive
|
Alex-Linhares/bibtexIndexMaker
|
https://github.com/Alex-Linhares/bibtexIndexMaker
|
e279400cf66b0275bbeb47ac91585f5063f36873
|
cec15dd552b5c1a6928fba65d4e06291f42b91ec
|
refs/heads/master
| 2020-03-27T12:28:38.595359 | 2010-07-26T09:31:38 | 2010-07-26T09:31:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2010 Ramon Xuriguera
#
# This file is part of BibtexIndexMaker.
#
# BibtexIndexMaker is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BibtexIndexMaker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BibtexIndexMaker. If not, see <http://www.gnu.org/licenses/>.
import unittest #@UnresolvedImport
from os.path import normpath, join, dirname
from bibim.main.refmaker import ReferenceMaker
from bibim.util.helpers import ReferenceFormat
class TestReferenceMaker(unittest.TestCase):
def setUp(self):
self.rm = ReferenceMaker()
self.file = normpath(join(dirname(__file__), ('../../../../tests/'
'fixtures/extraction/article.pdf')))
def tearDown(self):
pass
def test_make_reference(self):
references = self.rm.make_reference(self.file, ReferenceFormat.BIBTEX)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
UTF-8
|
Python
| false | false | 2,010 |
6,932,077,263,599 |
07365e6b478e69084fa94a62003e9888d89c2d8f
|
7680ce90bb8758ebc7e2573329745cb9ecc5821d
|
/scan-toc
|
0e5875b7ddcb6fd82a32ea08b2bcad2ceac01d75
|
[] |
no_license
|
andy128k/clhs-devhelp
|
https://github.com/andy128k/clhs-devhelp
|
8624a8160a7c7c7ba094394ed94f2722ee075ce2
|
da3acfc546ddc4aed29499aaf4511cb65b4638e7
|
refs/heads/master
| 2021-01-22T03:08:33.563882 | 2014-09-28T15:30:34 | 2014-09-28T15:30:34 | 24,562,490 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os
from links_parser import LinksParser
from xml.dom.minidom import Element
class TocParser(LinksParser):
def take_link(self, attrs):
if not self.in_list:
return None
elif self.in_list == 'ol':
return {'no': self.index}
else:
return {}
def toc():
base = 'clhs'
path = 'HyperSpec/Front'
name = 'Contents.htm'
with open(os.path.join(base, path, name)) as f:
links = TocParser.parse(f.read())
index = Element('chapters')
for link in links:
name = link['text']
if 'no' in link:
name = str(link['no']) + '. ' + name
href = os.path.normpath(os.path.join(path, link['href']))
e = Element('sub')
e.setAttribute('name', name)
e.setAttribute('link', href)
index.appendChild(e)
return index
if __name__ == '__main__':
print toc().toprettyxml()
|
UTF-8
|
Python
| false | false | 2,014 |
10,067,403,381,402 |
86145a095c9f937ff64b8a424c8b87965ce7034c
|
d505f9c86625ace82603a010d6eba078e4d77581
|
/jackpoint/jack/scripts.py
|
44d042f1ba57a8c83354490a41d8f0f69b232539
|
[] |
no_license
|
bussiere/jackpoint
|
https://github.com/bussiere/jackpoint
|
b8407066105fa393037de1d0e4193eea3b283006
|
efdfe78f048d5a3fc39131351f9e5164e2fae71a
|
refs/heads/master
| 2020-05-30T10:00:20.064338 | 2013-01-08T15:48:32 | 2013-01-08T15:48:32 | 2,615,944 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Create your views here.
from django.http import HttpResponseRedirect
from invitation.forms import FirstInvitationForm,CreateInvitationForm
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import user_passes_test
from django.template import RequestContext
from invitation.models import Invitation
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.contrib.auth.decorators import login_required
from django.contrib import auth
from skill.models import Skill
from carac.models import Carac
from item.models import Item
from carac.forms import CaracForm
from skill.forms import SkillForm
from item.forms import ItemForm
from jack.forms import JackRegisterForm
from django.forms.formsets import formset_factory
from django.forms.formsets import BaseFormSet
from jack.models import CaracUser,SkillUser,ItemUser
from carac.models import Carac
from skill.models import Skill
from invitation.script import classer_invitation
@login_required
def enregistrementJack(request,jack,caracs,skills,items,invitation=None):
retour = True
u = User.objects.get(id=request.user.id)
# jack["jack_username"]=jack_username
# jack["jack_email"] = jack_email
# jack["jack_password1"]= jack_password1
# jack["jack_password2"]=jack_password2
# jack["jack_Bio"]=jack_Bio
u.get_profile().Pseudo = jack["jack_username"]
u.get_profile().Email = jack["jack_email"]
u.get_profile().Bio = jack["jack_Bio"]
u.save()
u.get_profile().save()
u.get_profile().Caracs.clear()
u.get_profile().Skills.clear()
u.get_profile().Items.clear()
#TODO
#crade a revoir
for carac in caracs.keys():
caracdb = Carac.objects.get(Nom=carac)
private = False
if caracs[carac][1] == "1" :
private = True
try :
result = CaracUser.objects.get(carac=caracdb,Level=int(caracs[carac][0]),Private=private)
except :
result = CaracUser.objects.create(Level=0)
result.Carac.add(caracdb)
result.Level = int(caracs[carac][0])
result.Private = private
result.save()
u.get_profile().Caracs.add(result)
for skill in skills.keys():
skilldb = Skill.objects.get(Nom=skill)
private = False
if skills[skill][1] == "1" :
private = True
try :
result = SkillUser.objects.get( Skills=skilldb,Level=int(skills[skill][0]),Private=private)
except :
result = SkillUser.objects.create(Level=0)
result.Skill.add(skilldb)
result.Private = private
result.Level = int(skills[skill][0])
result.save()
u.get_profile().Skills.add(result)
for item in items.keys():
itemdb = Item.objects.get(Nom=item)
private = False
if items[item][0] == "1" :
private = True
try :
result = ItemUser.objects.get(Item=itemdb,Private=private)
except :
result = ItemUser.objects.create()
result.Item.add(itemdb)
result.Private = private
result.save()
u.get_profile().Items.add(result)
u.get_profile().save()
u.save()
#faire la verif des mdps
if (jack["jack_password1"]==jack["jack_password2"] and jack["jack_password1"] != ""):
u.set_password(jack["jack_password1"])
u.get_profile().Finished = True;
classer_invitation(u.get_profile().InvitationAccepted)
else :
retour = False
u.save()
# faire la verif pour l'invitation et la passer a used.
return retour
|
UTF-8
|
Python
| false | false | 2,013 |
8,160,437,882,064 |
cc4301343fa5e071496f6752b1083066fe583666
|
45678b026058dd1f9117657e4c32e5633692a96a
|
/test/topcoder/srm/test_team_selection.py
|
00595393bf6d3cff905fe395e63b0b088301f5a0
|
[] |
no_license
|
strahinjakupusinac/python_playground
|
https://github.com/strahinjakupusinac/python_playground
|
694970565426d30a8d30f7c1b87f9dd42e8af20f
|
8cf5673aa1b049f343babf0ba7fb9cef7d518108
|
refs/heads/master
| 2021-01-13T02:10:29.889033 | 2013-12-21T11:55:35 | 2013-12-21T11:55:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Sep 29, 2013
@author: Strahinja
'''
import unittest
from src.topcoder.srm.team_selection import TeamSelection
class Test(unittest.TestCase):
def testCase1(self):
ts = TeamSelection()
preference1 = [1, 2, 3, 4]
preference2 = [1, 2, 3, 4]
expectedResult = [1, 2, 1, 2]
actualResult = ts.simulate(preference1, preference2)
print(actualResult)
self.assertEqual(actualResult, expectedResult, actualResult)
def testCase2(self):
ts = TeamSelection()
preference1 = [3, 2, 1]
preference2 = [1, 3, 2]
expectedResult = [2, 1, 1]
actualResult = ts.simulate(preference1, preference2)
print(actualResult)
self.assertEqual(actualResult, expectedResult, actualResult)
def testCase3(self):
ts = TeamSelection()
preference1 = [6, 1, 5, 2, 3, 4]
preference2 = [1, 6, 3, 4, 5, 2]
expectedResult = [2, 1, 2, 2, 1, 1]
actualResult = ts.simulate(preference1, preference2)
print(actualResult)
self.assertEqual(actualResult, expectedResult, actualResult)
def testCase4(self):
ts = TeamSelection()
preference1 = [8, 7, 1, 2, 4, 5, 6, 3, 9]
preference2 = [7, 2, 4, 8, 1, 5, 9, 6, 3]
expectedResult = [1, 2, 1, 1, 2, 1, 2, 1, 2]
actualResult = ts.simulate(preference1, preference2)
print(actualResult)
self.assertEqual(actualResult, expectedResult, actualResult)
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
3,367,254,393,375 |
82de0a401b5f6941c4262cab66134a335ed8e675
|
85b1ce2b58d56701e039013fdeb90f1e55014077
|
/cqg_assignment2/ttt/eop/chapter3/compare_io.py
|
efa65d415630b78d6fc491b8e298cfc8b9048832
|
[] |
no_license
|
afable/cqg
|
https://github.com/afable/cqg
|
7dc7e4d19b7b853945476d2d477f93bbb997835b
|
99c07a7c5e698f3815a4d7cc6e1ff85d6a64cfd0
|
refs/heads/master
| 2019-04-13T18:20:40.313687 | 2013-11-06T04:28:36 | 2013-11-06T04:28:36 | 14,053,253 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
question_type = 'input_output'
source_language = 'C'
hotspot_declarations = [
['$a','int'],['$b','int'],['$out','string']
]
group_list = [
['compare_io_', [1,2,None], [2,1,None], [1,1,None]],
]
global_code_template = '''\
d #include <stdio.h>
x #include <stdio.h>
dx
'''
main_code_template = '''\
dx int a,b;
dx
dx a = $a;
dx b = $b;
dx if (a < b)
dx printf("first smaller\\n");
dx if (a > b)
dx printf("first larger\\n");
dx if (a == b)
dx printf("equal\\n");
'''
argv_template = ''
stdin_template = ''
stdout_template = '''\
$out
'''
|
UTF-8
|
Python
| false | false | 2,013 |
15,384,572,897,475 |
00c86b84fe6920ac30a167051705207c94022e09
|
1b4fef3f9e56654146c0cbba0f7c2b9c6df83b5e
|
/check_nameservers_are_in_sync_for_zone.py
|
54f014cb4401bfe1646ec4d019af0e14b01b4a71
|
[] |
no_license
|
dwt/monitoring_probes
|
https://github.com/dwt/monitoring_probes
|
afcfdb6ae10f184e608325f66e142fda439d26ae
|
cddaf97cdfae5dc31c7cb618b7f06b7722b37781
|
refs/heads/master
| 2016-08-05T16:14:42.713207 | 2014-10-01T10:10:04 | 2014-10-01T10:10:04 | 23,682,032 | 3 | 0 | null | false | 2014-09-29T18:52:35 | 2014-09-04T22:45:22 | 2014-09-04T22:47:10 | 2014-09-29T18:52:35 | 116 | 0 | 1 | 1 |
Python
| null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
Checks that all advertised nameservers for a domain are on the same soa version,
thus ensuring your customers will get consistent answers to their dns queries.
Will return the standard Icinga error codes.
See: https://www.monitoring-plugins.org/doc/guidelines.html#AEN78
Usage:
check_nameservers_are_in_sync_for_zone.py --domain=DOMAIN [--warning=WARNING_NAMESERVER_LIMIT]
[--critical=CRITICAL_NAMESERVER_LIMIT]
check_nameservers_are_in_sync_for_zone.py --selftest [<unittest-options>...]
Option:
-h, --help Show this screen and exit.
-d, --domain DOMAIN The domain to check.
-w, --warning WARNING_NAMESERVER_LIMIT Warn if less nameservers [default: 3]
-c, --critical CRITICAL_NAMESERVER_LIMIT Critical if less nameservers [default: 2]
--selftest Execute the unittests for this module
Copyright: Martin Häcker <spamfenger (at) gmx.de>
License AGPL: https://www.gnu.org/licenses/agpl-3.0.html
"""
"""
TODO:
* change dig calls to skip all caches
* add switches for ipv6, defaulting the same as ipv4
* add switch to disble ipv6 checks
* collect all ip addresses
* check ip addresses directly instead of names - but still provide names in error messages
"""
def main():
arguments = docopt(__doc__)
if arguments['--selftest']:
unittest.main(argv=sys.argv[1:])
(return_code, label), message \
= check_soas_equal_for_domain(
domain_name=arguments['--domain'],
warning_minimum_nameservers=int(arguments['--warning']),
critical_minimum_nameservers=int(arguments['--critical']))
print("%s: %s" % (label, message))
sys.exit(return_code)
from docopt import docopt # Only external requirement. Install via: pip install docopt
import sys
import subprocess
from StringIO import StringIO
def check_output(command):
"Stub for subprocess.check_output which is only available from python 2.7+"
buffer_ = StringIO()
process = subprocess.Popen(command, stdout=subprocess.PIPE)
for line in process.stdout:
buffer_.write(line)
process.wait()
if process.returncode != 0:
raise subprocess.CalledProcessError(process.returncode, command)
return buffer_.getvalue()
class NAGIOS(object):
OK = (0, 'OK')
WARNING = (1, 'WARNING')
CRITICAL = (2, 'CRITICAL')
UNKNOWN = (3, 'UNKNOWN')
def nameservers_for_domain(domain_name):
output = check_output(['dig', '+short', 'NS', domain_name])
if "" == output: return []
return map(lambda each: each.rstrip('.'), output.strip().split('\n'))
def soa_for_domain_with_dns_server(domain_name, dns_server_name):
output = check_output(['dig', '+short', 'SOA', domain_name, '@' + dns_server_name])
return output.strip()
def check_soas_equal_for_domain(domain_name, warning_minimum_nameservers=2, critical_minimum_nameservers=1):
try:
nameservers = nameservers_for_domain(domain_name)
if len(nameservers) == 0:
return (NAGIOS.CRITICAL, 'No nameserver for domain "%s", dns is unavailable.' % domain_name)
soa_records = map(lambda each: soa_for_domain_with_dns_server(domain_name, each), nameservers)
empty_response_servers = [nameservers[index] for index, record in enumerate(soa_records) if 0 == len(record)]
if len(empty_response_servers) >= 1:
return (NAGIOS.CRITICAL,
'Nameserver(s) %s did not return SOA record for domain "%s"' % (empty_response_servers, domain_name))
are_all_soas_equal = all(map(lambda each: each == soa_records[0], soa_records))
except Exception as error:
return (NAGIOS.UNKNOWN, "%r" % error)
if not are_all_soas_equal:
return (NAGIOS.CRITICAL, 'Nameservers do not agree for domain "%s" %r' % (domain_name, soa_records))
elif len(nameservers) < critical_minimum_nameservers:
return (NAGIOS.CRITICAL, 'Less than %d nameservers for domain "%s", only %d available. %s' % (
critical_minimum_nameservers, domain_name, len(nameservers), nameservers))
elif len(nameservers) < warning_minimum_nameservers:
return (NAGIOS.WARNING, 'Expected at least %d nameservers for domain "%s", but only found %d - %r' % (
warning_minimum_nameservers, domain_name, len(nameservers), nameservers))
else: # are_all_soas_equal
return (NAGIOS.OK, soa_records[0])
import unittest
class SOATest(unittest.TestCase):
@property
def expect(self):
# provided as property to avoid dependency when installing on servers
from pyexpect import expect # Only testing requirement. Install via: pip install pyexpect
return expect
def setUp(self):
self._stubbed_commands = dict()
global check_output
self._original_check_output = check_output
check_output = self.check_output_mock
def tearDown(self):
global check_output
check_output = self._original_check_output
def check_output_mock(self, command):
normalized_command = ' '.join(command)
assert normalized_command in self._stubbed_commands, \
"Missing output for <%s>, only have output for <%s>" % (normalized_command, self._stubbed_commands)
return self._stubbed_commands[normalized_command]
def on_command(self, expected_command):
"Expects command as one string"
self._expected_command = expected_command
return self
def provide_output(self, stubbed_output):
"Outdents output"
command = self._expected_command
output = '\n'.join(map(lambda each: each.lstrip(), stubbed_output.split('\n')))
self._stubbed_commands[command] = output
del self._expected_command
## Tests
def test_get_nameservers_for_domain(self):
self.on_command('dig +short NS yeepa.de').provide_output("""\
nsc1.schlundtech.de.
nsb1.schlundtech.de
nsa1.schlundtech.de.
nsd1.schlundtech.de.""")
nameservers = nameservers_for_domain('yeepa.de')
self.expect(nameservers) == [
'nsc1.schlundtech.de',
'nsb1.schlundtech.de',
'nsa1.schlundtech.de',
'nsd1.schlundtech.de']
def test_get_soa_for_domain_from_nameserver(self):
self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("""\
nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600""")
soa = soa_for_domain_with_dns_server('yeepa.de', 'nsc1.schlundtech.de')
self.expect(soa) == 'nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600'
def test_should_compare_soas_from_all_web_servers(self):
self.on_command('dig +short NS yeepa.de').provide_output("""\
nsc1.schlundtech.de.
nsb1.schlundtech.de.""")
self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("""\
nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600""")
self.on_command('dig +short SOA yeepa.de @nsb1.schlundtech.de').provide_output("""\
nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600""")
self.expect(check_soas_equal_for_domain('yeepa.de')) == (
NAGIOS.OK, 'nsa1.schlundtech.de. sh.sntl-publishing.com. 2014090302 43200 7200 1209600 600')
def test_should_return_false_if_soas_differ(self):
self.on_command('dig +short NS yeepa.de').provide_output("""\
nsc1.schlundtech.de.
nsb1.schlundtech.de.""")
self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("not equal")
self.on_command('dig +short SOA yeepa.de @nsb1.schlundtech.de').provide_output("to this")
self.expect(check_soas_equal_for_domain('yeepa.de')) == (NAGIOS.CRITICAL, 'Nameservers do not agree for domain "yeepa.de" [\'not equal\', \'to this\']')
def test_should_erorr_if_nameservers_are_not_authoritative(self):
self.on_command('dig +short NS example.com').provide_output("""\
b.iana-servers.net.
a.iana-servers.net.""")
self.on_command('dig +short SOA example.com @a.iana-servers.net').provide_output("anything")
self.on_command('dig +short SOA example.com @b.iana-servers.net').provide_output("")
self.expect(check_soas_equal_for_domain('example.com')) == (NAGIOS.CRITICAL, 'Nameserver(s) [\'b.iana-servers.net\'] did not return SOA record for domain "example.com"')
def test_should_error_if_no_nameservers(self):
self.on_command('dig +short NS yeepa.de').provide_output("")
self.expect(check_soas_equal_for_domain('yeepa.de')) \
== (NAGIOS.CRITICAL, 'No nameserver for domain "yeepa.de", dns is unavailable.')
def test_should_allow_to_configure_warning_level_for_number_of_webservers(self):
self.on_command('dig +short NS yeepa.de').provide_output("""\
nsc1.schlundtech.de.
nsb1.schlundtech.de.""")
self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("equal")
self.on_command('dig +short SOA yeepa.de @nsb1.schlundtech.de').provide_output("equal")
self.expect(check_soas_equal_for_domain('yeepa.de', warning_minimum_nameservers=3)) \
== (NAGIOS.WARNING, 'Expected at least 3 nameservers for domain "yeepa.de", but only found 2 - '
"['nsc1.schlundtech.de', 'nsb1.schlundtech.de']")
def test_should_error_if_less_than_critical_nameservers(self):
self.on_command('dig +short NS yeepa.de').provide_output("""nsc1.schlundtech.de.""")
self.on_command('dig +short SOA yeepa.de @nsc1.schlundtech.de').provide_output("good enough")
self.expect(check_soas_equal_for_domain('yeepa.de', critical_minimum_nameservers=2)) \
== (NAGIOS.CRITICAL, 'Less than 2 nameservers for domain "yeepa.de", only 1 available. [\'nsc1.schlundtech.de\']')
def test_should_catch_unexpected_errors(self):
global check_output
def fail(*args): raise AssertionError('fnord')
check_output = fail
self.expect(check_soas_equal_for_domain('yeepa.de')) \
== (NAGIOS.UNKNOWN, "AssertionError('fnord',)")
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
13,572,096,704,561 |
57a8410c2c885802e3d791668ee2e2cbd08d173b
|
fc1c391656923321ff6a8a5dc7c3dd4c2d484a30
|
/hazard/shared/gdocs_common.py
|
9f7bfac8c8e65dcae66bee80e747811576bf5b63
|
[] |
no_license
|
msgre/hazard
|
https://github.com/msgre/hazard
|
bb62e8fa7626f587abfbded0d5e00b4e2c6b6032
|
9820d98d8d79880fd5a754c361917b6e2694941e
|
refs/heads/master
| 2022-02-08T20:39:08.384636 | 2013-05-04T18:55:13 | 2013-05-04T18:55:13 | 1,639,212 | 2 | 0 | null | false | 2022-01-26T08:29:23 | 2011-04-20T06:49:14 | 2013-09-30T03:12:48 | 2022-01-26T08:29:22 | 13,537 | 4 | 1 | 6 |
Python
| false | false |
# -*- coding: utf-8 -*-
"""
Spolecne konstanty a funkce pro praci s GDocs tabulkami.
Podrobnejsi info viz shared/spreadsheet.py a shared/export.py
"""
import gdata.spreadsheet.service
from django.conf import settings
# sloupce v tabulce, se kterymi pocitame
KEY_TOWN = 'mesto'
KEY_STREET = 'ulice'
KEY_TYPE = 'typ cinnosti'
KEY_NUMBER = 'cislo jednaci'
KEY_COUNT = 'pocet'
KEY_NAME = 'nazev herny'
KEY_NOTE = 'poznamka'
KEY_GPS = 'gps'
# poradi sloupci v tabulce
REAL_COLUMNS = [globals()["KEY_%s" % k.upper()] for k in settings.GDOCS_COLUMNS]
# retezec, ktery se vklada do sloupce GPS pokud se pozice nepodari nalezt
GPS_NOT_FOUND = 'nenalezeno'
def open_spreadsheet(document_id):
"""
Otevre Google Docs tabulku a vrati tuple:
* objekt client pro dalsi praci nad dokumentem
* feed jednotlivych radku
"""
client = gdata.spreadsheet.service.SpreadsheetsService()
client.ClientLogin(settings.GDOCS_USERNAME, settings.GDOCS_PASSWORD)
return client, client.GetListFeed(document_id)
def get_column_order(entry):
"""
Feed ma neprijemnou vlastnost a to tu, ze jednotlive sloupce vraci v
predem neznamem poradi (je to slovnik). Nastesti je ale mozne z pole
entry.content.text poradi rozpoznat.
Tato funkce vrati seznam klicu, ktere odpovidaji jednotlivym sloupcum,
v tom poradi, v jakem se v dokumentu nachazi.
"""
positions = []
for key in entry.custom.keys():
pos = entry.content.text.decode('utf-8').find(u"%s:" % key)
positions.append((key, pos))
return [i[0] for i in sorted(positions, cmp=lambda a, b: cmp(a[1], b[1]))]
def map_to_dict(entry, key_order, real_columns):
"""
Namapuje radek z Google Docs tabulky do slovniku, jehoz klice odpovidaji
realite (v Google Docs tabulce nemusi byt radek s vyznamem jednotlivych
sloupcu, proto je zadavan explicitne z vnejsku).
Vraci slovnik s hodnotami.
"""
out = {}
# nejprve ponastavujeme co skutecne v radku je (muze to byt podmnozina
# real_columns)
for idx, key in enumerate(key_order):
value = entry.custom.get(key, None)
value = value and value.text or None
real_key = real_columns[idx]
out[real_key] = value
# doplneni chybejicich klicu
for key in set(real_columns).difference(out.keys()):
out[key] = None
return out
|
UTF-8
|
Python
| false | false | 2,013 |
16,707,422,800,087 |
0a7df6ec42742f966c348f20997ed0513f04a25b
|
3fad85298b007ed716fbd460afe82ade2d1ca93d
|
/astelytics/server.py
|
88defb5b7c172af225be8f1ea6f03ff80ed66963
|
[] |
no_license
|
jamesbeedy/SanDiegoHHSARealTimeD3VisualizationService
|
https://github.com/jamesbeedy/SanDiegoHHSARealTimeD3VisualizationService
|
3a3b59d6058919863a04d37c01284581a344733f
|
bfbdf3e2353f2fe3bd625f794a4bc7ee676a7e8b
|
refs/heads/master
| 2017-05-27T15:38:27.615863 | 2014-02-26T01:32:09 | 2014-02-26T01:32:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import collections
import functools
import copy
import re
import requests
import flask
from flask.ext.classy import FlaskView, route
import analysis
import json
def support_jsonp(json):
'''Optionally enables support for jsonp, if requested.'''
callback = flask.request.args.get('callback', False)
if callback:
content = str(callback) + '(' + json + ')'
return flask.current_app.response_class(content, mimetype='application/json')
else:
return json
def rank_words(words, n):
boring = {'the', 'of', 'a', 'an', 'or', 'some', 'will', 'and', 'for',
'should', 'would', 'did', 'does', 'do', 'but', 'yet', 'nor', 'it', 'was', '',
'its', 'we', 'all', 'in', 'to', 'us', 'so'}
clean = re.sub('[.,-;:]', ' ', words.lower())
clean = re.sub('[^a-z0-9 ]', '', clean)
clean = [a for a in clean.split(' ') if a not in boring]
# I can't convert 'clean' to a set, since then it would squash duplicate
# occurances of a word.
return collections.Counter(clean).most_common(n)
class SurveyView(FlaskView):
def __init__(self):
super(FlaskView, self).__init__()
def index(self):
return 'TODO: Write up instructions'
@route('<survey_id>/')
def get(self, survey_id):
return flask.render_template('submit.html')
@route('<survey_id>/discover/')
def discover(self, survey_id):
response = requests.post(analysis.DB_URL + r"/dataset/allSurveys", data={"survey_id": survey_id})
return support_jsonp(flask.json.dumps(response.json()))
@route('<survey_id>/analytics/')
def analytics(self, survey_id):
survey = analysis.Survey(survey_id)
results = analysis.ResultsView(survey)
return support_jsonp(flask.json.dumps(results.json()))
@route('<survey_id>/report/')
def report(self, survey_id):
survey = analysis.Survey(survey_id)
results = analysis.ResultsView(survey)
return flask.render_template('report.html', results=results, survey_id=survey_id)
@route('<survey_id>/question/<question_id>/')
def get(self, survey_id, question_id):
survey = analysis.Survey(survey_id)
results = analysis.ResultsView(survey)
master = results.find_question(question_id)
similar = [result for result in results.questions if result['type'] == master['type']]
return flask.render_template('question.html', result=master)
@route('<survey_id>/question/<question_id>/suggest/')
def suggest(self, survey_id, question_id):
survey = analysis.Survey(survey_id)
results = analysis.ResultsView(survey)
master = results.find_question(question_id)
similar = [result for result in results.questions if result['type'] == master['type']]
return 'Return similar questions to {0}'.format(question_id)
@route('<survey_id>/question/<question_id>/<other_id>/')
def combine(self, survey_id, question_id, other_id):
return 'Return a combination of {0} and {1}'.format(question_id, other_id)
application = flask.Flask(__name__, static_folder='static')
app = application
app.secret_key = ',*\xee\xd6tJ1Ja\xc8D\x9d!-\xa2k\xb6K\x9e\xb8\xff\xd7z\xc3'
SurveyView.register(app)
if __name__ == '__main__':
application.run(debug=True)
|
UTF-8
|
Python
| false | false | 2,014 |
12,833,362,293,948 |
64d2e56fa57ce19431a4971e25fc00a2ef10c1df
|
a0092d99e9818c7ef60990246489e80bb64cd1d5
|
/problem_83.py
|
e5a1ec2a96090a044b6dd7510f7c80f52d52750a
|
[] |
no_license
|
sfermoy/project_euler
|
https://github.com/sfermoy/project_euler
|
8a4d773aace6526c87a7cea60794fe5e95c67b59
|
3db07272466d834206741b2f0f89954c9b85a038
|
refs/heads/master
| 2016-09-09T21:05:51.489068 | 2013-02-23T23:51:58 | 2013-02-23T23:51:58 | 3,629,287 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from numpy import *
mat = genfromtxt("prob_83_input.txt", delimiter=",")
#mat= genfromtxt("prob_83_test_input.txt")
def smallest_unvisited(values, visited):
min_index = unravel_index(values.argmin(), values.shape)
if visited[min_index[0], min_index[1]] > 0:
values[min_index[0], min_index[1]] = 20 ** 12 # infinity
return smallest_unvisited(values, visited)
else:
return [values, min_index]
def dikjstra(matrix):
nrows = len(matrix)
mcols = len(matrix[0])
tentitive = array([[10 ** 12 for x in xrange(mcols)] for x in xrange(nrows)]) # infinity
result = array([[0 for x in xrange(mcols)] for x in xrange(nrows)])
#source node in position (0,0)
tentitive[0, 0] = matrix[0, 0]
for z in xrange((nrows * mcols)):
tentitive, index = smallest_unvisited(tentitive, result)
i = index[0]
j = index[1]
# rules for which nodes are connected
if i == 0:
#down
option = matrix[i + 1, j] + tentitive[i, j]
if option < tentitive[i + 1, j]:
tentitive[i + 1, j] = option
elif i == (nrows - 1):
#up
option = matrix[i - 1, j] + tentitive[i, j]
if option < tentitive[i - 1, j]:
tentitive[i - 1, j] = option
else:
#up
option = matrix[i - 1, j] + tentitive[i, j]
if option < tentitive[i - 1, j]:
tentitive[i - 1, j] = option
#down
option = matrix[i + 1, j] + tentitive[i, j]
if option < tentitive[i + 1, j]:
tentitive[i + 1, j] = option
if j < (mcols - 1):
#right
option = matrix[i, j + 1] + tentitive[i, j]
if option < tentitive[i, j + 1]:
tentitive[i, j + 1] = option
if j > 0:
#left
option = matrix[i, j - 1] + tentitive[i, j]
if option < tentitive[i, j - 1]:
tentitive[i, j - 1] = option
matrix[i, j] = tentitive[i, j]
result[i, j] = tentitive[i, j]
return result
######main#############
result = dikjstra(mat)
i = len(mat) - 1
print result[i, i]
|
UTF-8
|
Python
| false | false | 2,013 |
609,885,361,112 |
fcbd25fed58b3760bcec91a6345a572ad3b52bbd
|
6489a502fcef6d68a7c9e7ed55c1dda1f5745937
|
/game.py
|
6b14bd0bd9f7de14f475ee01f040a71ced9a66be
|
[] |
no_license
|
MegFord/Python_Practice_Code
|
https://github.com/MegFord/Python_Practice_Code
|
67db7c8bf04bd36e3fa7e50af0f77453e8a06ce7
|
eb21d7010f9e37273ce6f112cf4fbd6f277c698c
|
refs/heads/master
| 2021-01-01T18:37:04.433364 | 2014-09-23T13:15:47 | 2014-09-23T13:15:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tic-tac-toe game employing magic square win-checking algorithm
Include Unit tests
"""
__author__ = "Meg Ford"
__copyright__ = "Copyright 2014 Meg Ford"
__credits__ = []
__license__ = "GNU GPL v2+"
__version__ = "1.0"
__maintainer__ = "Meg Ford"
__email__ = ""
######################################################################
import sys
from board import Board
from players import Player
class Game(object):
"""
A :class:`Game` object which instantiates
the other objects and contains the business logic.
"""
def __init__(self):
"""
Initialize a :class:`Game` object.
"""
self.board = Board([1, 2, 3, 4, 5, 6, 7, 8, 9],
[8, 1, 6, 3, 5, 7, 4, 9, 2])
self.magic_square = self.board.magic_square
self.playerX = Player("Player X", "x")
self.playerO = Player("Player O", "o")
self._play = 0
self._win = 1
self._tie = 2
def start(self):
"""
Assign players based on user input.
"""
player = self.board.assign_player_input()
if player == 'x':
player = self.playerX
self.play(player)
elif player == 'o':
player = self.playerX
self.adjust_lists(player, 4)
self.board.print_board()
player = self.playerO
self.play(player)
def play(self, player):
"""
Start playing the game.
"""
do = True
while do is True:
space = self.board.move_input()
if type(space) == int:
move = self.eval_move(player, space)
self.board.print_board()
do = True
def eval_move(self, player, move):
"""
Figure out which player moves first.
"""
if player.name == "Player X":
return self.move_playerX(move)
else:
return self.move_playerO(move)
def move_playerX(self, move):
"""
Move the person-controlled player.
"""
self.adjust_lists(self.playerX, move)
result = self.playerX.check_game_over()
if result == self._play:
return self.move_ai(self.playerO)
else:
self.print_result(self.playerX.name)
def move_playerO(self, move):
"""
Move the person-controlled player.
"""
self.adjust_lists(self.playerO, move)
result = self.playerO.check_game_over()
if result == self._play:
return self.move_ai(self.playerX)
else:
self.print_result(self.playerO.name)
def move_ai(self, player):
"""
Move the ai-controlled player.
"""
opp_player = self.opp_player(player)
# First check to see if the ai player has a winning move
result, move = player.check_for_winning_move(self.magic_square)
# If the ai player has a winning move, make it and print results
if result == self._win and type(move) is int:
self.adjust_lists(player, move)
self.print_result(player.name)
# If there are no winning moves for the ai player,
# then check to see if there are any wins for the opponent.
elif result == self._play and move is None:
result, move =\
opp_player.check_for_winning_move(self.magic_square)
# If there are, then block the opponent's winning move
if result == self._win and type(move) is int:
self.adjust_lists(player, move)
# If that was the last move on the board, end game
if len(opp_player.player) + len(player.player) == 9:
self.print_result("Cat")
# Else keep playing
return self._play
# Otherwise, hand the player off to the non-winning logic
elif result == self._play and move is None:
result = self.strategy(player)
# If the ai player found a move and made it...
if result == self._play:
# Check to see if there are any more moves on the board...
if len(opp_player.player) + len(player.player) == 9:
self.print_result("Cat")
# If there are, keep playing
return self._play
else:
self.print_result("Cat")
else:
print "Error\n"
self.end_game()
else:
print "Error\n"
self.end_game()
def strategy(self, player):
"""
Blocking strategies for the ai player.
"""
# If the center square is empty, move there.
if type(self.board.values[4]) is int:
self.adjust_lists(player, 4)
return self._play
# Certain strategies are only relevent
# if the opposing player
# has made exactly two moves.
if len(self.opp_player(player).player) == 2:
# First check the diagonals.
if not self.diagonal(player):
return self._play
# Next check for opposing player moves in opposite corners.
if not self.check_opposite_corner(self.opp_player(player).player):
return self._play
# Next check for opposing player moves in checkerboard pattern
# on the edge squares.
if not self.check_edge(self.opp_player(player)):
return self._play
# Move to the first available even square.
if not self.even_squares(player):
return self._play
# Move to the first available off square.
if not self.odd_squares(player):
return self._play
# If we haven't found a move, then the cat won the game.
return 2
def even_squares(self, player):
"""
Find the first free even-indexed square and
add it to the ai player's list.
"""
for idx, val in enumerate(self.board.values[::2]):
if type(val) is int:
self.adjust_lists(player, idx*2)
return self._play
return 1
def odd_squares(self, player):
"""
Find the first free odd-indexed square and
add it to the ai player's list.
"""
for idx, val in enumerate(self.board.values[1::2]):
if type(val) is int:
self.adjust_lists(player, idx*2+1)
return self._play
return 1
def check_opposite_corner(self, player):
"""
Next check for opposing player moves in opposite corners.
"""
if player[0] % 2 == 0 and player[1] % 2 == 0:
if player[0] + player[1] == 10:
# If opposite corners are filled, then the ai player moves
# to an odd-indexed square.
return self.odd_squares(player)
return 1
def check_edge(self, player):
"""
Check for a checkboard pattern.
If that pattern is found, then the ai player moves
to a corner square.
"""
if player.player[0] % 2 != 0 and player.player[1] % 2 != 0:
if abs(player.player[0] - player.player[1]) == 2 or\
abs(player.player[0] - player.player[1]) == 6:
# If that pattern is found, then the ai player moves
# to corner square.
return self.corner_squares(player)
return 1
def diagonal(self, player):
"""
Check for an occupied diagonal.
"""
opp_player = self.opp_player(player).player
if player.player[0] == 5:
if player.player[0] + opp_player[0] + opp_player[1] == 15:
# If a diagonal is filled, then the ai player moves
# to an even-indexed square.
return self.odd_squares(player)
return 1
def corner_squares(self, player):
"""
Check for a checkboard pattern.
If that pattern is found, then the ai player looks for
the correct corner and moves there.
"""
opp_player = self.opp_player(player)
player = player.player
if player[0] + player[1] == 4 and type(self.board.values[0]) is int:
self.adjust_lists(opp_player, 0)
return self._play
elif player[0] + player[1] == 8 and type(self.board.values[2]) is int:
self.adjust_lists(opp_player, 2)
return self._play
elif player[0] + player[1] == 12 and type(self.board.values[6]) is int:
self.adjust_lists(opp_player, 6)
return self._play
elif player[0] + player[1] == 16 and type(self.board.values[8]) is int:
self.adjust_lists(opp_player, 8)
return self._play
else:
return 1
def adjust_lists(self, player, move):
"""
Add the values related to the square to the lists.
"""
player.add_move(self.magic_square[move])
self.board.add_move(move, player.move_type)
self.board.remove_value(move)
def print_result(self, name):
"""
Print the board, winner, and exit the program.
"""
self.board.print_board()
self.board.print_winner(name)
self.end_game()
def end_game(self):
"""
Exit the program.
"""
sys.exit(0)
def opp_player(self, player):
"""
Return the other player based on the current player's name.
"""
if player.name == "Player X":
return self.playerO
else:
return self.playerX
if __name__ == '__main__':
game = Game()
game.board.print_board()
game.start()
|
UTF-8
|
Python
| false | false | 2,014 |
13,761,075,223,806 |
825384ea9b2bafc6d0fae8b5d31e8b7b012c4f7c
|
44139b785e71f006d1fe58d005f1e53df89685cd
|
/exercises/solutions/words.py
|
4b5e0489caf8b083c85a26dc6a8c6ebab5d87c54
|
[
"MIT"
] |
permissive
|
JoieTsun/codex-python-web
|
https://github.com/JoieTsun/codex-python-web
|
857bd51c1dfacfc29eea136ddfe5ea7157ce77c0
|
f83637fef85a3dd16dbd7e20bfd5a6b0fc680828
|
refs/heads/master
| 2023-03-16T09:54:33.114947 | 2014-07-31T11:55:50 | 2014-07-31T11:55:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def split(text, sep):
'''
Divides up some a string 'text' at each occurrence of a separator 'sep'.
Returns a list of parts of the text, with the separators removed.
'''
# Start with an empty list of parts.
parts = []
while True:
# Find the next occurrence of the separator.
idx = text.find(sep)
if idx == -1:
# No more. Add the rest of the text.
parts.append(text)
# And we're done.
return parts
else:
# Found a separator. Take the text up to it.
part = text[: idx]
parts.append(part)
# And skip over it.
text = text[idx + len(sep) :]
def split_paragraphs(text):
'''
Splits text into a list of paragraphs.
'''
# Start a list of paragraphs, initially empty.
pars = []
# Start off the first paragraph, initially empty.
this_par = ''
# Split the text into lines.
lines = split(text, '\n')
# Now process each line.
for line in lines:
if len(line) > 0:
# Add this to the current paragraph.
if len(this_par) > 0:
this_par += ' '
this_par += line
else:
# Blank line: that's the end of the paragraph (unless it's empty).
if len(this_par) > 0:
# Store this paragraph.
pars.append(this_par)
# Start a new paragraph.
this_par = ''
# Make sure we add the last paragraph.
if len(this_par) > 0:
pars.append(this_par)
return pars
def count_words(text):
'''
Counts words in a text, and returns a dict from word to count.
'''
# Start with empty counts.
counts = {}
# Split the text into paragraphs.
pars = split_paragraphs(text)
for par in pars:
# Split this paragraph into words.
words = split(par, ' ')
for word in words:
# Get the current count for this word, or zero if we've not seen it.
count = counts.get(word, 0)
# Update the count to one more.
counts[word] = count + 1
# All done!
return counts
|
UTF-8
|
Python
| false | false | 2,014 |
5,068,061,436,140 |
c12f6d75734a33117b2df08b83c9778e21e333f3
|
eaaa762724ae250e870ef3ec876ef1d5f5855457
|
/ocd/emulators.py
|
9fd72258601e03efc872b2c09619767b095bf07c
|
[] |
no_license
|
llenterak/odcdoctor
|
https://github.com/llenterak/odcdoctor
|
4db127d42cd15a91a24dff1c0b93b1c6e9cb41e1
|
b750ebe3a0413bef468dc22cdb5e2d65650d8403
|
refs/heads/master
| 2016-09-01T20:09:45.613186 | 2013-11-24T10:15:41 | 2013-11-24T10:15:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
class I2C_emulator(object):
def __init__(self):
logging.info("i2c emulator started")
class SPI_emulator(object):
def __init__(self):
logging.info("spi emulator started")
class OneWire_emulator(object):
def __init__(self):
logging.info("spi emulator started")
|
UTF-8
|
Python
| false | false | 2,013 |
7,138,235,652,107 |
95ad29ee167c969beedd0befca0fe67c9963e914
|
4dc77d1b08f612123599c9bfd3f7b3f7b17f7721
|
/cairosvg/css.py
|
12fc336f304c1f7b4bdc22867c309f7524d1d9bd
|
[] |
no_license
|
egradman/tweetcutter
|
https://github.com/egradman/tweetcutter
|
1efb9352b754386368f3297ce08aa9125b4c1c2a
|
54574aef82ccab7cb4f24b4555d561b4e7084bde
|
refs/heads/master
| 2021-01-25T10:29:37.228391 | 2012-03-07T04:03:49 | 2012-03-07T04:03:49 | 3,615,196 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# This file is part of CairoSVG
# Copyright © 2010-2012 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with CairoSVG. If not, see <http://www.gnu.org/licenses/>.
"""
Optionally handle CSS stylesheets.
"""
from .parser import HAS_LXML
try:
import cssutils
HAS_CSSUTILS = True
except ImportError:
HAS_CSSUTILS = False
try:
from lxml import cssselect
HAS_CSSSELECT = True
except ImportError:
HAS_CSSSELECT = False
CSS_CAPABLE = HAS_LXML and HAS_CSSUTILS and HAS_CSSSELECT
# Python 2/3 compat
iteritems = getattr(dict, 'iteritems', dict.items) # pylint: disable=C0103
def remove_svg_namespace(tree):
"""Remove the SVG namespace from ``tree`` tags.
``lxml.cssselect`` does not support empty/default namespaces, so remove any
SVG namespace.
"""
prefix = "{http://www.w3.org/2000/svg}"
prefix_len = len(prefix)
for element in tree.iter():
tag = element.tag
if hasattr(tag, "startswith") and tag.startswith(prefix):
element.tag = tag[prefix_len:]
def find_stylesheets(tree):
"""Find the stylesheets included in ``tree``."""
for element in tree.iter():
# http://www.w3.org/TR/SVG/styling.html#StyleElement
if (element.tag == "style" and
# TODO: support contentStyleType on <svg>
element.get("type", "text/css") == "text/css"):
# TODO: pass href for relative URLs
yield cssutils.parseString(element.text, validate=False)
def find_style_rules(tree):
"""Find the style rules in ``tree``."""
for stylesheet in find_stylesheets(tree):
for rule in stylesheet.cssRules:
if rule.type == rule.STYLE_RULE:
yield rule
def get_declarations(rule):
"""Get the declarations in ``rule``."""
for declaration in rule.style.getProperties(all=True):
if declaration.name.startswith("-"):
# Ignore properties prefixed by "-"
continue
# TODO: filter out invalid values
yield declaration.name, declaration.cssText, bool(declaration.priority)
def match_selector(rule, tree):
"""Yield the ``(element, specificity)`` in ``tree`` matching ``rule``."""
for selector in rule.selectorList:
specificity = selector.specificity
try:
matcher = cssselect.CSSSelector(selector.selectorText)
except cssselect.ExpressionError:
# Unsupported selector
# TODO: warn
continue
for element in matcher(tree):
yield element, specificity
def apply_stylesheets(tree):
"""Apply the stylesheet in ``tree`` to ``tree``."""
if not CSS_CAPABLE:
# TODO: warn?
return
remove_svg_namespace(tree)
style_by_element = {}
for rule in find_style_rules(tree):
declarations = list(get_declarations(rule))
for element, specificity in match_selector(rule, tree):
style = style_by_element.setdefault(element, {})
for name, value, important in declarations:
weight = important, specificity
if name in style:
_old_value, old_weight = style[name]
if old_weight > weight:
continue
style[name] = value, weight
for element, style in iteritems(style_by_element):
values = [v for v, _ in style.itervalues()]
values.append(element.get("style", ""))
element.set("style", ";".join(values))
|
UTF-8
|
Python
| false | false | 2,012 |
111,669,175,566 |
378ec4d02f8b81e6c04107096b001f1a96921cb1
|
8e0d43033f7b232af89bb55bacc5e85848bbec6d
|
/RTTS(3).py
|
357660ef9f629cd8bcb532dc1b723faba2c326b7
|
[] |
no_license
|
tayoshan/RealTimeTrafficSpeedScript
|
https://github.com/tayoshan/RealTimeTrafficSpeedScript
|
f6e1f1dc8ada5c9c6efec3c0de3188a9211e3eab
|
620abba7546878885c0330bb24b2358c3634aa25
|
refs/heads/master
| 2016-09-06T16:13:19.015072 | 2013-10-04T02:15:10 | 2013-10-04T02:15:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import urllib2
import datetime as dt
#Important concepts:
#A) Must check for incomplete lines first (missing ending quoatation) and then for whether lines missing data entries.
#---Opposite order will cause code to infinitely loop.
#B) Keeping a separate counter of the length of the list of lines since this is used to control the outter-most loop but
#---the list is changing dynmaically within inner functions
def checkColumns(line, numFields): #function to check each line to see if it has the correct number of tab-delmited
if len(line.split('\t')) < numFields: #data entries given the number of fields in the first row
return False
else:
return True
def fixLine(line, lines, count): #Main function that checks each line and makes necessary format corrections
if line[-2] != '"': #First check to see if the line ends with the proper "end-of-line" characters for this dataset
line = line + lines[1] #If not, add the next line in the list to the previous line
lines.pop(0) #Remove the original line under revision from the list
count -= 1 #Remove one from the list count
while checkColumns(line, numFields) == False: #Second, check to see if the line has correct number of data entries
line = line + lines[1] #As long as it does not, keep adding the next line to the one under revision
lines.pop(1) #And remove the added line from the list
count -= 1 #Update list count
lines.pop(0) #When the line has the correct number of data entries, remove it from the list
count -= 1 #And update list count
return line, lines, count #Return corrected line, updated lines list, and updated list count
lines = urllib2.urlopen('http://207.251.86.229/nyc-links-cams/LinkSpeedQuery.txt').read().rsplit("\n") #Get text file from web service
numFields = len(lines[0].split("\t")) #Get number of fields (tab-delimited fields in first line)
newLines = [] #New list to store reformatted lines
count = len(lines) #Number of lines in original list
while count > 1: #While count is greater than 1 (should be 0 but for some reason there is a set of quotes that ends up being left over)
line, lines, count = fixLine(lines[0], lines, count) #Check and fix the current line
line = line.replace("\r", "") #Aferwards, remove extra carriage return markers
newLines.append(line) #Add reformmated line to new list
columns = newLines[0].split("\t") #Columns will be the number of fields in first row
dataArray = np.ndarray((len(newLines),len(columns)), dtype = "object") #Final array should be the size of the number of rows by the nubmer of fields
rows,cols = dataArray.shape #row and column counts
for row in range(rows): #Break lines up into numpy array
line = newLines[row].split("\t")
for col in range(cols):
dataArray[row,col] = line[col]
current = dt.datetime.now()
date = str(current.month) + "_" + str(current.day) + "_" + str(current.year)[2:]
time = current.strftime("%H.%M.%S")
np.savetxt("RTTS" + "_" + date + "_ " + time + ".csv", dataArray, fmt = "%s", delimiter = ",") #Save numpy array as a csv file
|
UTF-8
|
Python
| false | false | 2,013 |
9,947,144,307,119 |
412d54231d3c8d9aaa053c02be65098f251b9dd9
|
89bdc31ecd07f75f3ae2a9e8c48a5047e7dd4cd7
|
/src/AuditAndSecurity.py
|
c09a1351221b404675a7c63de0efdda9ad653996
|
[] |
no_license
|
jaeshleman/pyMAGE2RDF
|
https://github.com/jaeshleman/pyMAGE2RDF
|
11c214ce7986bd7eb788c70c088a62ada2a5b547
|
f1b44b2dd7a3d0c6b88fcf3f94764636ad6a95cf
|
refs/heads/master
| 2021-01-01T17:32:49.321308 | 2013-05-31T22:44:57 | 2013-05-31T22:44:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 09 May,2011
@author: Jason A. Eshleman -- [email protected] //[email protected]
'''
from Identifiable import Identifiable
from Person import Person
from Contact import Contact
from Organization import Organization
from Describable import Describable
import rdflib
class AuditAndSecurity(Identifiable):
'''
AuditAndSecurity section of MAGE-ML xml doc contains information about individuals and agencies
that ran the experiment(s) and processed the data.
'''
def __init__(self):
'''
Constructor
'''
Identifiable.__init__(self)
def processSubSection(self, subClass = None, subClassKey =None,nodes = None,
classDictionary = None, *args, **kwargs):
'''function to call to process AuditAndSecurity's subsections
'''
classDictionary = {"Contact_assnlist":Contact}#, "Organization":Organization,
for subClassKey, subClass in classDictionary.iteritems():
try:
Identifiable.processSubSection(self, subClass = subClass, subClassKey = subClassKey,
nodes = nodes, classDictionary = classDictionary)
except:
print "did not get through subsection %s within AuditAndSecurity"%subClassKey
def internalGraph(self, *args, **kwargs):
'''Empty method prevents instances from being created.
Instances of items within will be created if 'internalGraph'
method is enabled with subClasses.
'''
Describable.noInteralGraph(self)
def setClassNamespace(self, baseURI = 'http://io-informatics.com/rdf/'):
'''overrides self.classType assignment ---not needed if "noInternalGraph" selected
'''
self.ns = rdflib.Namespace(baseURI)
self.classType = self.ns['Audit']
|
UTF-8
|
Python
| false | false | 2,013 |
2,508,260,946,619 |
ef5490cbe4b06e4c572d412671bcfaa7047b81b1
|
27e78587d92a6b010fde8faa4110efec83c7861c
|
/finalists/Oleg/challenge_14/challenge14.py
|
43441b48361d7544560c04a6ebd66b9abca7d258
|
[] |
no_license
|
magmax/programming-challenge-2
|
https://github.com/magmax/programming-challenge-2
|
85f8916812af77bde3895b175a261041d279ff4b
|
62e8fd0d2d2b3a149d6d5c0a0364db366df85525
|
refs/heads/master
| 2020-12-24T21:44:57.840107 | 2013-02-15T15:48:41 | 2013-02-15T15:48:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys;
from Hamming import Hamming;
def bit(data,i):
return int(data[i]);
# Just hamming 7,4
def hamming(data):
b = [int(v) for v in list(data)]
d1 = (b[0]+b[2]+b[4]+b[6]) & 1;
d2 = (b[1]+b[2]+b[5]+b[6]) & 1;
d3 = (b[3]+b[4]+b[5]+b[6]) & 1;
# Error correction
if (d1 and d2 and not d3):
b[2] = 1-b[2];
elif (d1 and not d2 and d3):
b[4] = 1-b[4];
elif (not d1 and d2 and d3):
b[5] = 1-b[5];
elif (d1 and d2 and d3):
b[6] = 1-b[6];
s = [str(v) for v in b]
return s[2]+s[4]+s[5]+s[6]
def main():
s = 0;
for line in sys.stdin:
d = line.strip();
bits = '';
s = '';
# Incorrect length
if (len(d) % 7 != 0):
print "Error!";
elif (len(d) / 7 * 4 % 8 != 0):
print "Error!";
else:
error = False;
# Get hamming data
for i in range(len(d)/7):
bits += hamming(d[i*7:(i+1)*7]);
for i in range(len(bits)/8):
n = int(bits[i*8:(i+1)*8],2);
# Incorrect char
if (n>127 or n<32):
error = True;
s +=chr(n);
if error:
print "Error!";
else:
print s;
if __name__ == '__main__':
main();
|
UTF-8
|
Python
| false | false | 2,013 |
7,636,451,899,678 |
186d191bc4f48e83965098e90ab9dc0232ac5795
|
41619faa3a0c911760f002a9b64f05c113d28f3a
|
/server/upload-health.py
|
dbcc35fb100aeab3ea2da913fbe63fa5c6d8770f
|
[
"Apache-2.0"
] |
permissive
|
marcosgm/edeploy
|
https://github.com/marcosgm/edeploy
|
7c28e2a1b131322e199603b266c5a6f44209bc6b
|
7da51f956379de09e4a4d71e2c2a96286782874c
|
refs/heads/master
| 2019-04-01T08:25:26.781803 | 2014-10-03T13:04:23 | 2014-10-03T13:04:23 | 24,805,357 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Copyright (C) 2013-2014 eNovance SAS <[email protected]>
#
# Author: Erwan Velu <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''CGI script part of the eDeploy system.
It receives on its file form a file containing a Python dictionnary
with the hardware detected on the remote host. In return, it sends
a Python configuration script corresponding to the matched config.
If nothing matches, nothing is returned. So the system can abort
its configuration.
On the to be configured host, it is usually called like that:
$ curl -i -F name=test -F file=@/tmp/hw.lst http://localhost/cgi-bin/upload.py
'''
import ConfigParser
import cgi
import cgitb
import os
import pprint
import re
import sys
import time
import matcher
def log(msg):
'Error Logging.'
sys.stderr.write('eDeploy: ' + msg + '\n')
def save_hw(items, name, hwdir):
'Save hw items for inspection on the server.'
try:
filename = os.path.join(hwdir, name + '.hw')
pprint.pprint(items, stream=open(filename, 'w'))
except Exception, xcpt:
log("exception while saving hw file: %s" % str(xcpt))
def generate_filename_and_macs(items):
'''Generate a file name for a hardware using DMI information
(product name and version) then if the DMI serial number is
available we use it unless we lookup the first mac address.
As a result, we do have a filename like :
<dmi_product_name>-<dmi_product_version>-{dmi_serial_num|mac_address}'''
# Duplicate items as it will be modified by match_* functions
hw_items = list(items)
sysvars = {}
sysvars['sysname'] = ''
matcher.match_spec(('system', 'product', 'name', '$sysprodname'),
hw_items, sysvars)
if 'sysprodname' in sysvars:
sysvars['sysname'] = re.sub(r'\W+', '', sysvars['sysprodname']) + '-'
matcher.match_spec(('system', 'product', 'vendor', '$sysprodvendor'),
hw_items, sysvars)
if 'sysprodvendor' in sysvars:
sysvars['sysname'] += re.sub(r'\W+', '', sysvars['sysprodvendor']) + \
'-'
matcher.match_spec(('system', 'product', 'serial', '$sysserial'),
hw_items, sysvars)
# Let's use any existing DMI serial number or take the first mac address
if 'sysserial' in sysvars:
sysvars['sysname'] += re.sub(r'\W+', '', sysvars['sysserial']) + '-'
# we always need to have the mac addresses for pxemngr
if matcher.match_multiple(hw_items,
('network', '$eth', 'serial', '$serial'),
sysvars):
if 'sysserial' not in sysvars:
sysvars['sysname'] += sysvars['serial'][0].replace(':', '-')
else:
log('unable to detect network macs')
return sysvars
def fatal_error(error):
'''Report a shell script with the error message and log
the message on stderr.'''
print('''#!/bin/sh
cat <<EOF
%s
EOF
exit 1
''' % error)
sys.stderr.write('%s\n' % error)
sys.exit(1)
def main():
'''CGI entry point.'''
config = ConfigParser.ConfigParser()
config.read('/etc/edeploy.conf')
def config_get(section, name, default):
'Secured config getter.'
try:
return config.get(section, name)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return default
cfg_dir = os.path.normpath(config_get('SERVER', 'HEALTHDIR', os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'health'))) + '/'
# parse hw file given in argument or passed to cgi script
if len(sys.argv) == 3 and sys.argv[1] == '-f':
hw_file = open(sys.argv[2])
else:
cgitb.enable()
form = cgi.FieldStorage()
if 'file' not in form:
fatal_error('No file passed to the CGI')
fileitem = form['file']
hw_file = fileitem.file
try:
hw_items = eval(hw_file.read(-1), {"__builtins__": None}, {})
except Exception, excpt:
fatal_error("'Invalid hardware file: %s'" % str(excpt))
filename_and_macs = generate_filename_and_macs(hw_items)
dirname = time.strftime("%Y_%m_%d-%Hh%M", time.localtime())
if form.getvalue('session'):
dest_dir = cfg_dir + os.path.basename(form.getvalue('session')) + \
'/' + dirname
else:
dest_dir = cfg_dir + '/' + dirname
try:
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
except OSError, e:
fatal_error("Cannot create %s directory (%s)" % (dest_dir, e.errno))
save_hw(hw_items, filename_and_macs['sysname'], dest_dir)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
5,703,716,589,673 |
24412b2b504c09bb3e8550faa90d89befc7a6bb4
|
5aa882a51d7ecbb994b2a6e068382ef69c39b5d5
|
/flocker/node/gear.py
|
9838263d573c0701d1b8a4696387302238331803
|
[
"Apache-2.0"
] |
permissive
|
tombh/flocker
|
https://github.com/tombh/flocker
|
1c69d99ce64d5282c1ace8ccf53e5fa10106ee65
|
33c4056ad5f40d8a18a3915a7ec957d02db43130
|
refs/heads/master
| 2021-01-15T20:53:16.530524 | 2014-07-25T14:14:50 | 2014-07-25T14:14:50 | 22,288,394 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""Client implementation for talking to the geard daemon."""
import json
from zope.interface import Interface, implementer
from characteristic import attributes
from twisted.internet.defer import succeed, fail
from treq import request, content
GEAR_PORT = 43273
class AlreadyExists(Exception):
"""A unit with the given name already exists."""
class GearError(Exception):
"""Unexpected error received from gear daemon."""
@attributes(["name", "activation_state", "sub_state", "container_image",
"ports", "links"],
defaults=dict(sub_state=None, container_image=None,
ports=(), links=()))
class Unit(object):
"""
Information about a unit managed by geard/systemd.
XXX: The container_image attribute defaults to `None` until we have a way
to interrogate geard for the docker images associated with its
containers. See https://github.com/ClusterHQ/flocker/issues/207
:ivar unicode name: The name of the unit.
:ivar unicode activation_state: The state of the unit in terms of
systemd activation. Values indicate whether the unit is installed
but not running (``u"inactive"``), starting (``u"activating"``),
running (``u"active"``), failed (``u"failed"``) stopping
(``u"deactivating"``) or stopped (either ``u"failed"`` or
``u"inactive"`` apparently). See
https://github.com/ClusterHQ/flocker/issues/187 about using constants
instead of strings.
:ivar unicode sub_state: The systemd substate of the unit. Certain Unit
types may have a number of additional substates, which are mapped to
the five generalized activation states above. See
http://www.freedesktop.org/software/systemd/man/systemd.html#Concepts
:ivar unicode container_image: The docker image name associated with this
gear unit
:ivar list ports: The ``PortMap`` instances which define how connections to
ports on the host are routed to ports exposed in the container.
:ivar list links: The ``PortMap`` instances which define how connections to
ports inside the container are routed to ports on the host.
"""
class IGearClient(Interface):
"""A client for the geard HTTP API."""
def add(unit_name, image_name, ports=None, links=None):
"""Install and start a new unit.
:param unicode unit_name: The name of the unit to create.
:param unicode image_name: The Docker image to use for the unit.
:param list ports: A list of ``PortMap``\ s mapping ports exposed in
the container to ports exposed on the host. Default ``None`` means
that no port mappings will be configured for this unit.
:param list links: A list of ``PortMap``\ s mapping ports forwarded
from the container to ports on the host.
:return: ``Deferred`` that fires on success, or errbacks with
:class:`AlreadyExists` if a unit by that name already exists.
"""
def exists(unit_name):
"""Check whether the unit exists.
:param unicode unit_name: The name of the unit to create.
:return: ``Deferred`` that fires with ``True`` if unit exists,
otherwise ``False``.
"""
def remove(unit_name):
"""Stop and delete the given unit.
This can be done multiple times in a row for the same unit.
:param unicode unit_name: The name of the unit to stop.
:return: ``Deferred`` that fires on success.
"""
def list():
"""List all known units.
:return: ``Deferred`` firing with ``set`` of :class:`Unit`.
"""
@implementer(IGearClient)
class GearClient(object):
"""Talk to the gear daemon over HTTP.
:ivar bytes _base_url: Base URL for gear.
"""
def __init__(self, hostname):
"""
:param unicode hostname: Gear host to connect to.
"""
self._base_url = b"http://%s:%d" % (hostname.encode("ascii"),
GEAR_PORT)
def _container_request(self, method, unit_name, operation=None, data=None):
"""Send HTTP request to gear.
:param bytes method: The HTTP method to send, e.g. ``b"GET"``.
:param unicode unit_name: The name of the unit.
:param operation: ``None``, or extra ``unicode`` path element to add to
the request URL path.
:param data: ``None``, or object with a body for the request that
can be serialized to JSON.
:return: A ``Defered`` that fires with a response object.
"""
path = b"/container/" + unit_name.encode("ascii")
if operation is not None:
path += b"/" + operation
return self._request(method, path, data=data)
def _request(self, method, path, data=None):
"""Send HTTP request to gear.
:param bytes method: The HTTP method to send, e.g. ``b"GET"``.
:param bytes path: Path to request.
:param data: ``None``, or object with a body for the request that
can be serialized to JSON.
:return: A ``Defered`` that fires with a response object.
"""
url = self._base_url + path
if data is not None:
data = json.dumps(data)
return request(method, url, data=data, persistent=False)
def _ensure_ok(self, response):
"""Make sure response indicates success.
Also reads the body to ensure connection is closed.
:param response: Response from treq request,
``twisted.web.iweb.IResponse`` provider.
:return: ``Deferred`` that errbacks with ``GearError`` if the response
is not successful (2xx HTTP response code).
"""
d = content(response)
# geard uses a variety of 2xx response codes. Filed treq issue
# about having "is this a success?" API:
# https://github.com/dreid/treq/issues/62
if response.code // 100 != 2:
d.addCallback(lambda data: fail(GearError(response.code, data)))
return d
def add(self, unit_name, image_name, ports=None, links=None):
"""
See ``IGearClient.add`` for base documentation.
Gear `NetworkLinks` are currently fixed to destination localhost. This
allows us to control the actual target of the link using proxy / nat
rules on the host machine without having to restart the gear unit.
XXX: If gear allowed us to reconfigure links this wouldn't be
necessary. See https://github.com/openshift/geard/issues/223
XXX: As long as we need to set the target as 127.0.0.1 its also worth
noting that gear will actually route the traffic to a non-loopback
address on the host. So if your service or NAT rule on the host is
configured for 127.0.0.1 only, it won't receive any traffic. See
https://github.com/openshift/geard/issues/224
"""
if ports is None:
ports = []
if links is None:
links = []
data = {
u"Image": image_name, u"Started": True, u'Ports': [],
u'NetworkLinks': []}
for port in ports:
data['Ports'].append(
{u'Internal': port.internal_port,
u'External': port.external_port})
for link in links:
data['NetworkLinks'].append(
{u'FromHost': u'127.0.0.1',
u'FromPort': link.internal_port,
u'ToHost': u'127.0.0.1',
u'ToPort': link.external_port}
)
checked = self.exists(unit_name)
checked.addCallback(
lambda exists: fail(AlreadyExists(unit_name)) if exists else None)
checked.addCallback(
lambda _: self._container_request(b"PUT", unit_name, data=data))
checked.addCallback(self._ensure_ok)
return checked
def exists(self, unit_name):
d = self.list()
def got_units(units):
return unit_name in [unit.name for unit in units]
d.addCallback(got_units)
return d
def remove(self, unit_name):
d = self._container_request(b"PUT", unit_name, operation=b"stopped")
d.addCallback(self._ensure_ok)
d.addCallback(lambda _: self._container_request(b"DELETE", unit_name))
d.addCallback(self._ensure_ok)
return d
def list(self):
d = self._request(b"GET", b"/containers?all=1")
d.addCallback(content)
def got_body(data):
values = json.loads(data)[u"Containers"]
# XXX: GearClient.list should also return container_image
# information.
# See https://github.com/ClusterHQ/flocker/issues/207
# container_image=image_name,
return set([Unit(name=unit[u"Id"],
activation_state=unit[u"ActiveState"],
sub_state=unit[u"SubState"],
container_image=None)
for unit in values])
d.addCallback(got_body)
return d
@implementer(IGearClient)
class FakeGearClient(object):
"""In-memory fake that simulates talking to a gear daemon.
The state the the simulated units is stored in memory.
:ivar dict _units: See ``units`` of ``__init``\ .
"""
def __init__(self, units=None):
"""
:param dict units: A dictionary of canned ``Unit``\ s which will be
manipulated and returned by the methods of this ``FakeGearClient``.
:type units: ``dict`` mapping `unit_name` to ``Unit``\ .
"""
if units is None:
units = {}
self._units = units
def add(self, unit_name, image_name, ports=(), links=()):
if unit_name in self._units:
return fail(AlreadyExists(unit_name))
self._units[unit_name] = Unit(
name=unit_name,
container_image=image_name,
ports=ports,
links=links,
activation_state=u'active'
)
return succeed(None)
def exists(self, unit_name):
return succeed(unit_name in self._units)
def remove(self, unit_name):
if unit_name in self._units:
del self._units[unit_name]
return succeed(None)
def list(self):
# XXX: This is a hack so that functional and unit tests that use
# GearClient.list can pass until the real GearClient.list can also
# return container_image information, ports and links.
# See https://github.com/ClusterHQ/flocker/issues/207
incomplete_units = set()
for unit in self._units.values():
incomplete_units.add(
Unit(name=unit.name, activation_state=unit.activation_state))
return succeed(incomplete_units)
@attributes(['internal_port', 'external_port'])
class PortMap(object):
"""
A record representing the mapping between a port exposed internally by a
docker container and the corresponding external port on the host.
:ivar int internal_port: The port number exposed by the container.
:ivar int external_port: The port number exposed by the host.
"""
|
UTF-8
|
Python
| false | false | 2,014 |
19,542,101,205,999 |
e0727a7ec9043b4bfb63856b6885c0dd05a0b0a2
|
14b76667c4839fb3f5c177bbd983df70b231f898
|
/pypercube/time_utils.py
|
80b4b49642328e5dc1312491887978b99f7339c3
|
[
"BSD-3-Clause"
] |
permissive
|
kingishb/pypercube
|
https://github.com/kingishb/pypercube
|
3838a8e0fb9ef9f3abc2b349a43f69421900b85a
|
e9d2cca9c004b8bad6d1e0b68b080f887a186a22
|
refs/heads/master
| 2016-10-30T20:56:54.938329 | 2012-07-06T21:58:49 | 2012-07-06T21:58:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from datetime import datetime
from datetime import timedelta
STEP_10_SEC = long(1e4)
STEP_1_MIN = long(6e4)
STEP_5_MIN = long(3e5)
STEP_1_HOUR = long(36e5)
STEP_1_DAY = long(864e5)
STEP_CHOICES = (
(STEP_10_SEC, "10 seconds"),
(STEP_1_MIN, "1 minute"),
(STEP_5_MIN, "5 minutes"),
(STEP_1_HOUR, "1 hour"),
(STEP_1_DAY, "1 day"))
def now():
return datetime.utcnow()
def yesterday(start=None):
if start is None:
start = now()
return start - timedelta(days=1)
def last_week(start=None):
if start is None:
start = now()
return start - timedelta(days=7)
def start_of_month(timestamp=None):
if not timestamp:
timestamp = now()
return datetime(year=timestamp.year, month=timestamp.month, day=1)
def floor(start, resolution):
"""Floor a datetime by a resolution.
>>> now = datetime(2012, 7, 6, 20, 33, 16, 573225)
>>> floor(now, STEP_1_HOUR)
datetime.datetime(2012, 7, 6, 20, 0)
"""
if resolution == STEP_10_SEC:
return datetime(start.year, start.month, start.day, start.hour,
start.minute, start.second - (start.second % 10))
elif resolution == STEP_1_MIN:
return datetime(start.year, start.month, start.day, start.hour,
start.minute)
elif resolution == STEP_5_MIN:
return datetime(start.year, start.month, start.day, start.hour,
start.minute - (start.minute % 5))
elif resolution == STEP_1_HOUR:
return datetime(start.year, start.month, start.day, start.hour)
elif resolution == STEP_1_DAY:
return datetime(start.year, start.month, start.day)
raise ValueError("{resolution} is not a valid resolution. Valid choices "
"are {choices}".format(
resolution=resolution, choices=STEP_CHOICES))
|
UTF-8
|
Python
| false | false | 2,012 |
7,550,552,529,579 |
13785fd70e734801c5d0e061105a13a606999d70
|
fac07f2c62982ac8549fcfdce52a4edeae5052bd
|
/archlinux/aurprofile/views.py
|
eb4c07b9df718471adb17cbc4f3239277cf07c41
|
[] |
no_license
|
CMB/aur2
|
https://github.com/CMB/aur2
|
97192c2e86a30bea1ce9c1ea071dd044473204dd
|
b5bde5a4fbeb41c7f0e7640dc7ee2250eb1e81aa
|
refs/heads/master
| 2016-11-02T20:14:52.583296 | 2012-01-19T01:10:50 | 2012-01-19T01:12:01 | 3,213,859 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.template import RequestContext
from aurprofile.forms import ProfileUpdateForm
from aur.models import Package
@login_required
def profile(request):
packages = Package.objects.filter(maintainers=request.user)
if request.method == 'POST':
form = ProfileUpdateForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
else:
form = ProfileUpdateForm(instance=request.user)
count_packages_ood = packages.filter(outdated=True).count()
context = RequestContext(request, {
'packages': packages,
'form': form,
'packages_out_of_date': count_packages_ood,
})
return render_to_response('aurprofile/profile.html', context)
|
UTF-8
|
Python
| false | false | 2,012 |
249,108,123,999 |
38436e95a25eef473fb4266474ed90cfe98484f7
|
09c32573797e268146f9dc96264f5919049abbe0
|
/videoscrapy/spiders/shows_spider.py
|
ac11237bc10474f445f89fb746201ec3dc797b37
|
[] |
no_license
|
cih315/videoscrapy
|
https://github.com/cih315/videoscrapy
|
d8d3f4624c48697b7a4170dfebb9c0176e5d4267
|
add0db2709d0187fdd3174d5834274d2b45b9b24
|
refs/heads/master
| 2021-01-12T22:16:36.829857 | 2012-12-06T12:50:47 | 2012-12-06T12:50:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from scrapy.http import Request
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from videoscrapy.items import VideoItem
class Spider(CrawlSpider):
name = 'shows'
allowed_domains = ['v.360.cn']
start_urls = ['http://v.360.cn']
rules = (
Rule(SgmlLinkExtractor(allow=('zongyi/index\.php')), callback='parse_item'),
)
def parse_item(self, response):
self.log('Hi, this is an item page! %s' % response.url)
hxs = HtmlXPathSelector(response)
sites = hxs.select("//div[@id='bd']//div[@class='content gclearfix']/div[@class='video-list gclearfix']/dl[@class='section variety']")
items = []
for site in sites:
item = VideoItem()
item['name'] = site.select("dt[@class='video-title']/a/text()").extract()
item['sort_index'] = site.select("dt[@class='video-title']/em/text()").extract()[0].replace('.','').strip()
item['score'] = 0
detail_url =self.start_urls[0]+site.select("dt[@class='video-title']/a/@href").extract()[0]
request = Request(detail_url,callback=self.parse_detail)
request.meta['item'] = item
items.append(request)
#items.append(item)
#link = hxs.select("//div[@id='gpage']/a[@class='page-next']/@href").extract()
#if link:
# items.append(Request(link[0],callback=self.parse_item))
return items
def parse_detail(self,response):
hxs = HtmlXPathSelector(response)
item = response.meta['item']
site = hxs.select("//div[@id='bd']")
video_list = site.select("//div[@class='content']/div[@class='content-bd gclearfix']/dl")
item['video_url'] = video_list.select("dd[@class='poster']/a[@class='play_btn']/@href").extract()
item['video_name'] = video_list.select("dt/a/text()").extract()
item['video_introduction'] = video_list.select("dd[@class='poster']/a[@class='play_btn']/div[2]").extract()
item['video_thumbnail'] = video_list.select("dd[@class='poster']/a[@class='play_btn']/img/@src").extract()
item['introduction'] = site.select("div[@class='span17']/div[@id='info']/div[2][@class='info-bd']/div[@class='intro gclearfix']/p[@id='part-intro']/text()").extract()
item['thumbnail'] = site.select("div[@id='left_info']/div[@id='poster']/a[@class='play_btn']/img/@src").extract()
item['publish_time'] = ""
item['director'] = " ".join(site.select("div[@id='left_info']/div[@id='otherinfo']/p[2]/span[@class='text']/text()").extract())
item['actors'] = ""
item['area'] = site.select("div[@id='left_info']/div[@id='otherinfo']/p[4]/span[@class='text']/text()").extract()
item['sec_classify_name'] = "".join(site.select("div[@id='left_info']/div[@id='otherinfo']/p[3]/span[@class='text']/text()").extract())
item['video_view_cnt'] = 0
return item
|
UTF-8
|
Python
| false | false | 2,012 |
6,210,522,730,754 |
0d5291de3e27dd232e7b93abf6529cee90823efa
|
1393cd37b91d684814dcb348b95c646034e15e77
|
/src/Partie.py
|
080bd1978088505cd7bda0eb5e830c1cb7d6e8b6
|
[] |
no_license
|
ThomasLeclerc/Projet-tut
|
https://github.com/ThomasLeclerc/Projet-tut
|
9db17c2f531f3c982a86a5625f600869de7f99d4
|
cc5e166adfd94a260ad790f6cead891192817dba
|
refs/heads/master
| 2020-12-30T09:51:36.891681 | 2012-10-24T09:01:42 | 2012-10-24T09:01:42 | 3,388,371 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''#####################
#'' '#
#' Classe Partie '#
#' Moteur de jeu '#
#' ''#
#####################'''
''' IMPORTS '''
import pygame
import pyganim
import sys
import Ennemi
import Ship
import Obstacle
import random
import Bonus
import Menu
import Bouton
import time
''' CLASSE '''
class Partie:
pygame.init()
''' FONCTION QUI INSTANCIE nombre DE SNAKE
'' le type de deplacement est tire aleatoirement
'' ainsi que les coefiscients pour un deplacement en droite '''
def __init__(self, player):
self.player = player
self.music = pygame.mixer.Sound("sounds/music_before_record.wav")
self.musicAfterRecord = pygame.mixer.Sound("sounds/music_after_record.wav")
##### GROUPES DE SPRITE #####
self.missiles = pygame.sprite.Group()
self.snakes = pygame.sprite.Group()
self.shooters = pygame.sprite.Group()
self.aleatoires = pygame.sprite.Group()
self.obstacles = pygame.sprite.Group()
self.missilesShooter = pygame.sprite.Group()
self.bonus = pygame.sprite.Group()
self.coins = pygame.sprite.Group()
self.isRecordBattu = False
#necessaire pour afficher plusieurs fois l'animation ReachedRecord
self.afficherReachedRecord = 0
def creerSnakes(self,width, height, nombre=0):
positionChaine = random.randint(100,height-180)
#on tire le type de deplacement au hasard
typeDeplacement = random.randint(1,3)
#type snake
if typeDeplacement == 1:
while(nombre!=0):
self.snakes.add(Ennemi.Snake(width+(nombre*20), 0, 1, positionChaine))
nombre -= 1
#type ligne
elif typeDeplacement == 2:
a = random.uniform(-0.8,0.8)
if a < 0:
b = random.uniform(height/2,height-10)
else:
b = random.uniform(10,height-height/2)
while nombre!=0:
self.snakes.add(Ennemi.Snake(width+(nombre*40), positionChaine, typeDeplacement, positionChaine, a, b))
nombre -= 1
#type escadron
else:
self.snakes.add(Ennemi.Snake(width+80, positionChaine-80, 3))
self.snakes.add(Ennemi.Snake(width+50, positionChaine-40, 3))
self.snakes.add(Ennemi.Snake(width+20, positionChaine, 3))
self.snakes.add(Ennemi.Snake(width+50, positionChaine+40, 3))
self.snakes.add(Ennemi.Snake(width+80, positionChaine+80, 3))
def creerShooters(self, width, height):
self.shooters.add(Ennemi.Shooter(width, height-random.randint(40, height)))
def creerAleatoires(self, width, height):
self.aleatoires.add(Ennemi.Aleatoire(width, height-random.randint(40, height)))
def creerBonus(self, ship, width, height):
r = random.randint(1,2)
if r == 1:
self.bonus.add(Bonus.BonusAmmo(width,height,ship))
elif r == 2:
self.bonus.add(Bonus.BonusShield(width,height,ship))
'''Fonction qui gere l'apparition aleatoire de tous les ennemis'''
def creerEnnemi(self, width, height, level, monVaisseau):
if random.randint(0, level) > 2+level/4:
self.creerSnakes(width, height, random.randint(6,10))
if random.randint(0, level) > 4+level/4:
self.creerShooters(width, height)
if random.randint(0, level) > 2+level/4:
self.creerAleatoires(width, height)
'''Apparition aleatoire des asteroides'''
def creerObstacle(self, width, height, level):
if level==-1:
self.obstacles.add(Obstacle.obstacleRecord(width, random.randint(10,height-200),"images/ingame/record/asteroid_crash_1.png"))
else:
y = random.randint(10, height)
if random.randint(0, level) < level-int(level/4):
typeObstacle = random.randint(1,5)
self.obstacles.add(Obstacle.obstacle(width, y,"images/ingame/asteroids/asteroid"+str(typeObstacle)+".png"))
def gameOver(self, (x, y), screen, distance, height, monVaisseau):
monVaisseau.son.stop()
if self.player.soundOn:
pygame.mixer.Sound("sounds/shipBoom.wav").play()
imagesTemp = [(pygame.image.load("images/ingame/explosion/explosion"+str(compt)+".png"), 0.1) for compt in range(1,9)]
explosion = pyganim.PygAnimation(imagesTemp, loop=False)
explosion.play()
self.music.stop()
self.musicAfterRecord.stop()
#record
if distance > self.player.record :
self.player.record=distance
#argent total du joueur
self.player.money += monVaisseau.money
self.player.save()
while(1):
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
ecranAccueil = Menu.Menu("images/menu/menu.jpg", self.player)
ecranAccueil.addButton(Bouton.BoutonStartGame("images/menu/menu_principal/titles/play.png",0, 270, self.player, True))
ecranAccueil.addButton(Bouton.BoutonOption("images/menu/menu_principal/titles/option.png",0, 340, self.player))
ecranAccueil.addButton(Bouton.BoutonCredits("images/menu/menu_principal/titles/credits.png",0, 415))
ecranAccueil.addButton(Bouton.BoutonQuit("images/menu/menu_principal/titles/quit.png",0, 485))
ecranAccueil.afficher()
elif event.key == pygame.K_RETURN:
p = Partie(self.player)
p.jouer()
break
background = pygame.image.load("images/background.jpg")
screen.blit(background, (0,0))
#blit GAME OVER
policeTitre = pygame.font.Font(None, 120)
titre = policeTitre.render("GAME OVER",1,(254,0,0))
screen.blit(titre,(200,100))
#blit Recommencer
policeTitre = pygame.font.Font(None, 20)
titre = policeTitre.render("ENTRER POUR RECOMMENCER",1,(50,254,50))
screen.blit(titre,(800,700))
#blit disctance parcourue
policeDistance = pygame.font.Font(None, 80)
titre = policeDistance.render("distance : "+str(distance)+" m",1,(254,0,0))
screen.blit(titre,(200,height/2))
titreRec = policeDistance.render("record : "+str(self.player.record)+" m",1,(254,0,0))
screen.blit(titreRec,(200,(height/2)+60))
explosion.blit(screen, (x,y))
pygame.display.update()
'''Fonction qui gere les collisions'''
def Collisions(self, monVaisseau, animObj, screen):
#test des self.missiles contre snakes
for monMissile in self.missiles:
for snakeTemp in self.snakes:
if snakeTemp.estTouche(monMissile):
monVaisseau.raiseScore(1)
(x,y) = snakeTemp.getPos()
snakeTemp.creerCoin(self.coins)
self.missiles.remove(monMissile)
if self.player.soundOn:
snakeTemp.son.play()
self.snakes.remove(snakeTemp)
animObj.play()
animObj.blit(screen, (x,y))
break
for shooterTemp in self.shooters:
if shooterTemp.estTouche(monMissile):
self.missiles.remove(monMissile)
(x,y) = shooterTemp.getPos()
if shooterTemp.vie != 0:
shooterTemp.vie -= 1
elif shooterTemp.vie == 0:
r = random.randint(0,100)
if 100-r < 40:
self.creerBonus(monVaisseau, x-10, y+20)
shooterTemp.creerCoin(self.coins)
if self.player.soundOn:
shooterTemp.son.play()
self.shooters.remove(shooterTemp)
monVaisseau.raiseScore(2)
animObj.play()
animObj.blit(screen, (x,y))
break
for aleaTemp in self.aleatoires:
if aleaTemp.estTouche(monMissile):
(x,y) = aleaTemp.getPos()
aleaTemp.creerCoin(self.coins)
monVaisseau.raiseScore(1)
self.missiles.remove(monMissile)
if self.player.soundOn:
aleaTemp.son.play()
self.aleatoires.remove(aleaTemp)
animObj.play()
animObj.blit(screen, (x,y))
break
for obsTemp in self.obstacles:
for monMissile in self.missilesShooter:
if obsTemp.estTouche(monMissile):
monMissile.setImg("images/ingame/impact.png")
screen.blit(monMissile.image, monMissile.rect)
self.missilesShooter.remove(monMissile)
for monMissile in self.missiles:
if obsTemp.estTouche(monMissile):
monMissile.setImg("images/ingame/impact.png")
screen.blit(monMissile.image, monMissile.rect)
self.missiles.remove(monMissile)
#test des snakes contre obstacle
for snakeTemp in self.snakes:
if obsTemp.estTouche(snakeTemp):
(x,y) = snakeTemp.getPos()
if self.player.soundOn:
snakeTemp.son.play()
self.snakes.remove(snakeTemp)
animObj.play()
animObj.blit(screen, (x,y))
#test des self.shooters contre obstacle
for shooterTemp in self.shooters:
if obsTemp.estTouche(shooterTemp):
(x,y) = shooterTemp.getPos()
if self.player.soundOn:
shooterTemp.son.play()
self.shooters.remove(shooterTemp)
animObj.play()
animObj.blit(screen, (x,y))
#test des self.shooters contre obstacle
for aleaTemp in self.aleatoires:
if obsTemp.estTouche(aleaTemp):
(x,y) = aleaTemp.getPos()
if self.player.soundOn:
aleaTemp.son.play()
self.aleatoires.remove(aleaTemp)
animObj.play()
animObj.blit(screen, (x,y))
#test du ship contre les ennemis
if monVaisseau.isBonusShield != True:
if monVaisseau.estTouche(obsTemp):
monVaisseau.enVie = False
#test des pieces contre obstacles
for coinTemp in self.coins:
if obsTemp.estTouche(coinTemp):
self.coins.remove(coinTemp)
for snakeTemp in self.snakes:
if monVaisseau.estTouche(snakeTemp):
monVaisseau.raiseScore(1)
(x,y) = snakeTemp.getPos()
snakeTemp.creerCoin(self.coins)
if self.player.soundOn:
snakeTemp.son.play()
self.snakes.remove(snakeTemp)
animObj.play()
animObj.blit(screen, (x,y))
if not monVaisseau.isBonusShield:
monVaisseau.enVie = False
for shooterTemp in self.shooters:
if monVaisseau.estTouche(shooterTemp):
(x,y) = shooterTemp.getPos()
r = random.randint(0,100)
if 100-r < 40:
self.creerBonus(monVaisseau, x, y)
shooterTemp.creerCoin(self.coins)
if self.player.soundOn:
shooterTemp.son.play()
self.shooters.remove(shooterTemp)
monVaisseau.raiseScore(2)
animObj.play()
animObj.blit(screen, (x,y))
if not monVaisseau.isBonusShield:
monVaisseau.enVie = False
for aleaTemp in self.aleatoires:
if monVaisseau.estTouche(aleaTemp):
(x,y) = aleaTemp.getPos()
aleaTemp.creerCoin(self.coins)
monVaisseau.raiseScore(1)
if self.player.soundOn:
aleaTemp.son.play()
self.aleatoires.remove(aleaTemp)
animObj.play()
animObj.blit(screen, (x,y))
if not monVaisseau.isBonusShield:
monVaisseau.enVie = False
for self.missileshooterTemp in self.missilesShooter:
if monVaisseau.estTouche(self.missileshooterTemp):
if not monVaisseau.isBonusShield:
monVaisseau.enVie = False
self.missilesShooter.remove(self.missileshooterTemp)
#test vaisseau contre self.bonus
for self.bonusTemp in self.bonus:
if monVaisseau.estTouche(self.bonusTemp):
self.bonusTemp.startTime=pygame.time.get_ticks()
self.bonusTemp.stopTime=pygame.time.get_ticks()+10000
self.bonusTemp.isActive=True
self.bonusTemp.isVisible=False
self.bonusTemp.action(self.bonus,pygame.time.get_ticks())
else:
self.bonusTemp.action(self.bonus,pygame.time.get_ticks())
#test vaisseau contre pieces de monnaie
for coinTemp in self.coins:
if monVaisseau.estTouche(coinTemp):
monVaisseau.money += 1
if self.player.soundOn:
coinTemp.son.play()
self.coins.remove(coinTemp)
'''Fonction qui gere les mouvements de tous les objets'''
def Mouvements(self, screen, width, height, monVaisseau):
##### MOUVEMENT JOUEUR #####
monVaisseau.update(pygame.time.get_ticks(), height, screen)
##### MOUVEMENT DES SNAKE #####
self.snakes.update(pygame.time.get_ticks(), self.snakes, width, height)
##### MOUVEMENT DES SHOOTERS #####
self.shooters.update(pygame.time.get_ticks(), monVaisseau, self.shooters, self.missilesShooter, height)
##### MOUVEMENT DES ALEATOIRES #####
self.aleatoires.update(pygame.time.get_ticks(), self.obstacles, height)
##### MOUVEMENT DES OBSTACLES #####
self.obstacles.update(pygame.time.get_ticks())
##### MOUVEMENT DES BONUS #####
self.bonus.update(pygame.time.get_ticks(),self.bonus)
##### MOUVEMENT self.missiles #####
self.missiles.update(pygame.time.get_ticks(), width, self.missiles)
##### MOUVEMENT self.missiles ENNEMY #####
self.missilesShooter.update(pygame.time.get_ticks(), self.missilesShooter)
##### MOUVEMENT DES PIECES DE MONNAIE #####
self.coins.update(pygame.time.get_ticks())
'''Fonction qui gere les blits de tous les objets'''
def Blits(self, width, height, screen, distance, monVaisseau, anim):
#jauge tir
imgJauge = pygame.image.load("images/ingame/gauge.png")
screen.blit(imgJauge, (1,10))
if (monVaisseau.inCharge):
pygame.draw.rect(screen, (255, 0, 0), (32, 38, monVaisseau.charge*139/monVaisseau.chaleurMax, 23))
screen.blit(monVaisseau.image, monVaisseau.rect)
#blits logo self.bonus
if monVaisseau.isBonusAmmo:
logoBonus = pygame.transform.scale(pygame.image.load("images/bonus/ammo.png"),(25,25))
screen.blit(logoBonus,(10,50))
if monVaisseau.isBonusShield:
logoBonus = pygame.transform.scale(pygame.image.load("images/bonus/shield_icon.png"),(25,25))
screen.blit(logoBonus,(40,50))
#blits ennemies et self.missiles
for o in self.obstacles.sprites(): screen.blit(o.image, o.rect)
for c in self.coins.sprites(): screen.blit(c.image,c.rect)
for s in self.snakes.sprites(): screen.blit(s.image, s.rect)
for s in self.shooters.sprites(): screen.blit(s.image, s.rect)
for a in self.aleatoires.sprites(): screen.blit(a.image, a.rect)
for m in self.missiles.sprites(): screen.blit(m.image, m.rect)
for m in self.missilesShooter.sprites(): screen.blit(m.image, m.rect)
for b in self.bonus.sprites():
if b.isVisible:
screen.blit(b.image,b.rect)
#blits record
police = pygame.font.Font(None, 40)
if not self.isRecordBattu:
texte = police.render("record : "+str(self.player.record), 1, (254,50,100))
else:
texte = police.render("record : "+str(distance), 1, (100,255,100))
screen.blit(texte, (width - 250, height - 150))
#blits score
screen.blit(pygame.image.load("images/ingame/Coin.png"), (width-200, height-100))
police = pygame.font.Font(None, 60)
texte = police.render(str(monVaisseau.money), 1, (210, 210, 1))
screen.blit(texte, (width - 160, height - 110))
#blits score
police = pygame.font.Font(None, 60)
texte = police.render(str(distance) + " m", 1, (254, 0, 0))
screen.blit(texte, (width - 200, height - 70))
#blits jauge chaleur
image = pygame.image.load("images/rocket.png")
for l in range(((monVaisseau.chaleurMax / monVaisseau.chaleurMissile) - 1) - ((monVaisseau.chaleur) / monVaisseau.chaleurMissile)):
screen.blit(image, (10 * (l + 1), 10))
if (monVaisseau.chaleur == 0):
screen.blit(image, (10 * (l + 2), 10))
if monVaisseau.isBonusShield:
imgShield = pygame.image.load("images/bonus/Shield.png")
(x,y) = monVaisseau.getPos()
screen.blit(imgShield, (x,y-10))
if self.afficherReachedRecord%10:
anim.play()
anim.blit(screen, (312, 200))
if self.afficherReachedRecord != 0:
self.afficherReachedRecord -= 1
def supprimerObjets(self, width):
for snakeTemp in self.snakes:
(x, _) = snakeTemp.getPos()
if x < -200:
self.snakes.remove(snakeTemp)
for shooterTemp in self.shooters:
(x, _) = shooterTemp.getPos()
if x < -200:
self.shooters.remove(shooterTemp)
for aleaTemp in self.aleatoires :
(x, _) = aleaTemp.getPos()
if x < -200:
self.aleatoires.remove(aleaTemp)
for obsTemp in self.obstacles :
(x, _) = obsTemp.getPos()
if x < -200:
self.obstacles.remove(obsTemp)
for self.missileshooterTemp in self.missilesShooter :
(x, _) = self.missileshooterTemp.getPos()
if x < -200:
self.missilesShooter.remove(self.missileshooterTemp)
for coinTemp in self.coins :
(x, _) = coinTemp.getPos()
if x < -200:
self.coins.remove(coinTemp)
for missileTemp in self.missiles :
(x, _) = missileTemp.getPos()
if x > width+40:
self.missiles.remove(missileTemp)
def jouer(self):
##### PARAMETRES DE LA FENETRE #####
size = width, height = 1024,768
screen = pygame.display.set_mode(size, pygame.FULLSCREEN)
pygame.mouse.set_visible(False)
##### COMPTEURS #####
distanceTemp = 0
distance = 2
distanceLevelTemp = 0
level = 1
##### EXPLOSIONS #####
imagesTemp = [(pygame.transform.scale(pygame.image.load("images/ingame/explosion/explosion"+str(compt)+".png"), (70, 70)), 0.6) for compt in range(2,6)]
animObj = pyganim.PygAnimation(imagesTemp, loop=False)
imagesTemp = [(pygame.image.load("images/ingame/reachedRecord3.png"), 0.15), (pygame.image.load("images/ingame/reachedRecord2.png"),0.15), (pygame.image.load("images/ingame/reachedRecord1.png"),0.4)]
animReachedRecord = pyganim.PygAnimation(imagesTemp, loop=False)
##### IMAGES DU BACKGROUND #####
background = pygame.image.load("images/background/background.jpg")
i=0
isRecordBattu=False
##### JOUEUR #####
monVaisseau = Ship.ship([20, 0], self.player)
monVaisseau.raiseChaleurMax(self.player.additionalMissiles)
'''self.player.updateShopStateList(3,-1)'''
if self.player.shopStateList[3]==1:
monVaisseau.versionCanon=4
elif self.player.shopStateList[2]==1:
monVaisseau.versionCanon=3
elif self.player.shopStateList[1]==1:
monVaisseau.versionCanon=2
else:
monVaisseau.versionCanon=1
if self.player.shopStateList[4]==1:
monVaisseau.boosterOn = True
if self.player.shopStateList[5]==1:
monVaisseau.spoilerOn = True
##### MUSIQUE #####
if self.player.musicOn:
self.music.play(-1)
##### MENU COMMENCER #####
menuStartOn=True
musicAfterRecord = False
'''################################################################## ''
'' BOUCLE DE JEU ''
'' (img par img) ''
'' ##################################################################'''
while 1:
''' VITESSE D'AFFICHAGE '''
clock = pygame.time.Clock()
FRAMES_PER_SECOND = 35
deltat = clock.tick(FRAMES_PER_SECOND)
'''APPUYER SUR ENTRER POUR COMMENCER'''
while menuStartOn:
screen.blit(pygame.image.load("images/menu/menu_start_game/background_start_menu.jpeg"),(0,10))
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
menuStartOn=False
elif event.key == pygame.K_ESCAPE:
self.music.stop()
self.musicAfterRecord.stop()
ecranAccueil = Menu.Menu("images/menu/menu.jpg", self.player)
ecranAccueil.addButton(Bouton.BoutonStartGame("images/menu/menu_principal/titles/play.png",0, 270, self.player, True))
ecranAccueil.addButton(Bouton.BoutonOption("images/menu/menu_principal/titles/option.png",0, 340, self.player))
ecranAccueil.addButton(Bouton.BoutonCredits("images/menu/menu_principal/titles/credits.png",0, 415))
ecranAccueil.addButton(Bouton.BoutonQuit("images/menu/menu_principal/titles/quit.png",0, 485))
ecranAccueil.afficher()
''' COMMANDES CLAVIER '''
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
##### APPUI SUR TOUCHE #####
elif event.type == pygame.KEYDOWN:
# HAUT
if event.key == pygame.K_UP:
if not monVaisseau.monte:
monVaisseau.monte=True
if self.player.soundOn:
monVaisseau.son.play(-1)
# ESPACE
elif event.key == pygame.K_SPACE:
if not monVaisseau.inCharge:
monVaisseau.inCharge=True
# RIGHT
elif event.key == pygame.K_RIGHT:
if not monVaisseau.inBoost:
monVaisseau.inBoost=True
# LEFT
elif event.key == pygame.K_LEFT:
if not monVaisseau.inBreak:
monVaisseau.inBreak=True
# ECHAPE
elif event.key == pygame.K_ESCAPE:
menuPause = Menu.menuPause("images/menu/menu_pause/background_menu_pause.png", self.player)
menuPause.addButton(Bouton.BoutonReprendre("images/menu/menu_pause/reprendre.png", 445, 410, True))
menuPause.addButton(Bouton.BoutonMenuPrincipal("images/menu/menu_pause/menu_principal.png", 474, 447, self.player))
menuPause.afficher(screen, self)
##### RELACHE TOUCHE #####
elif event.type == pygame.KEYUP:
# HAUT
if event.key == pygame.K_UP:
monVaisseau.monte=False
monVaisseau.son.stop()
# ESPACE
elif event.key == pygame.K_SPACE:
monVaisseau.tir(self.missiles);
# RIGHT
elif event.key == pygame.K_RIGHT:
monVaisseau.inBoost=False
# LEFT
elif event.key == pygame.K_LEFT:
monVaisseau.inBreak=False
##### BACKGROUND #####
screen.blit(background, (-i,0))
screen.blit(background, (13262-i,0))
i+=1
if i > 13262:
i=0
##### RECORD PRECEDENT #####
if not isRecordBattu:
if distance+35==self.player.record:
self.creerObstacle(width, height, -1)
isRecordBattu=True
if not self.isRecordBattu:
if distance>self.player.record:
self.isRecordBattu=True
if self.player.musicOn:
if distance == self.player.record:
self.afficherReachedRecord = 40
self.music.stop()
if musicAfterRecord == False:
pygame.mixer.Sound("sounds/record_beaten.wav").play()
self.musicAfterRecord.play(-1)
musicAfterRecord = True
self.Mouvements(screen, width, height, monVaisseau)
self.Collisions(monVaisseau, animObj, screen)
self.Blits(width, height, screen, distance, monVaisseau, animReachedRecord)
self.supprimerObjets(width)
#incrementation du compteur generale de distance et creation d'ennemis et d'self.obstacles
if distanceTemp != 4:
distanceTemp += 1
else:
distanceTemp = 0
distance += 1
if distanceLevelTemp != 60:
distanceLevelTemp += 1
if distanceLevelTemp == 10:
self.creerEnnemi(width, height, level, monVaisseau)
self.creerObstacle(width, height, level)
else:
distanceLevelTemp = 0
level += 1
if (monVaisseau.enVie == False):
self.gameOver(monVaisseau.getPos(), screen, distance, height, monVaisseau)
pygame.display.update()
|
UTF-8
|
Python
| false | false | 2,012 |
18,940,805,788,814 |
3c139f7d9e88ad20f630306fc8220b2779883ee7
|
15cd84c3dba9a14c2ef2dcc01a5b43bc72bc55db
|
/monitor/common/src/main/python/subscribe_pb2.py
|
cdd0b62cb713ac8ee39e64f0d865e76589b1e4fc
|
[] |
no_license
|
ktoso/dist-metrics
|
https://github.com/ktoso/dist-metrics
|
cb2caf7d0f8ad30306f9e638f4a656596078e90b
|
23b4b3f68250a2b2080b9a828b08a3cf7d3c595a
|
refs/heads/master
| 2021-01-25T08:59:53.047226 | 2012-10-15T22:40:51 | 2012-10-16T17:43:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
import common_pb2
DESCRIPTOR = descriptor.FileDescriptor(
name='subscribe.proto',
package='distmetrics',
serialized_pb='\n\x0fsubscribe.proto\x12\x0b\x64istmetrics\x1a\x0c\x63ommon.proto\"S\n\x10SubscribeRequest\x12\x12\n\nresourceId\x18\x01 \x02(\t\x12+\n\nmetricType\x18\x02 \x02(\x0e\x32\x17.distmetrics.MetricType\"J\n\x14SubscriptionResponse\x12\x16\n\x0esubscriptionId\x18\x01 \x02(\x03\x12\x0c\n\x04host\x18\x02 \x02(\t\x12\x0c\n\x04port\x18\x03 \x02(\x05\x42\'\n%pl.project13.distmetrics.common.proto')
_SUBSCRIBEREQUEST = descriptor.Descriptor(
name='SubscribeRequest',
full_name='distmetrics.SubscribeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='resourceId', full_name='distmetrics.SubscribeRequest.resourceId', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metricType', full_name='distmetrics.SubscribeRequest.metricType', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=46,
serialized_end=129,
)
_SUBSCRIPTIONRESPONSE = descriptor.Descriptor(
name='SubscriptionResponse',
full_name='distmetrics.SubscriptionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='subscriptionId', full_name='distmetrics.SubscriptionResponse.subscriptionId', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='host', full_name='distmetrics.SubscriptionResponse.host', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='port', full_name='distmetrics.SubscriptionResponse.port', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=131,
serialized_end=205,
)
_SUBSCRIBEREQUEST.fields_by_name['metricType'].enum_type = common_pb2._METRICTYPE
DESCRIPTOR.message_types_by_name['SubscribeRequest'] = _SUBSCRIBEREQUEST
DESCRIPTOR.message_types_by_name['SubscriptionResponse'] = _SUBSCRIPTIONRESPONSE
class SubscribeRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _SUBSCRIBEREQUEST
# @@protoc_insertion_point(class_scope:distmetrics.SubscribeRequest)
class SubscriptionResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _SUBSCRIPTIONRESPONSE
# @@protoc_insertion_point(class_scope:distmetrics.SubscriptionResponse)
# @@protoc_insertion_point(module_scope)
|
UTF-8
|
Python
| false | true | 2,012 |
10,746,008,189,943 |
1525a9595a78b167cd9f0d50ccd6284c17bb9191
|
e3695e7ba8f319df3c94927ee5cf5841486148f6
|
/plugins/datalogger.py
|
91d8b6e521b0a43d4f2622a55212e389df0445e7
|
[] |
no_license
|
dmsherazi/xbee-homeautomation
|
https://github.com/dmsherazi/xbee-homeautomation
|
e74556277c41e39ba11fde29a6699e835339f864
|
3b9cf1ed53d3badde588892011acc045ab29256b
|
refs/heads/master
| 2021-01-23T22:52:44.853895 | 2012-11-15T04:27:51 | 2012-11-15T04:27:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
import threading
import xh
from xh.protocol import Command, PIN
log = logging.getLogger('DataLogger')
class DataLogger(xh.Plugin):
"""
Log the data from all frames using xh.datalogging.
Data includes samples (automatically sent or via InputSample) and
supply voltages (via InputVolts). Data is logged raw (in volts or as
boolean values).
InputVolts is actively queried.
"""
_PRESENCE_PLUGIN_NAME = 'Presence'
_POLL_INTERVAL_SEC = 5 * 60.0
def __init__(self):
xh.Plugin.__init__(self, receiveFrames=True)
def activate(self):
xh.Plugin.activate(self)
self.__presence = self.getPluginObjByName(
self._PRESENCE_PLUGIN_NAME)
self.__poll()
def _frameReceived(self, frame):
if isinstance(frame, xh.protocol.Data):
self.__logData(frame)
elif isinstance(frame, xh.protocol.InputSample):
self.__logInputSample(frame)
elif isinstance(frame, xh.protocol.InputVolts):
self.__logInputVolts(frame)
def __logData(self, frame):
serial = (frame.getSourceAddressLong()
or self.__presence.getLocalSerial())
t = frame.getTimestamp()
for sample in frame.getSamples():
xh.datalogging.logPinValue(serial, t,
sample.getPinName(), sample.getValue())
def __logInputSample(self, frame):
serial = (frame.getRemoteSerial()
or self.__presence.getLocalSerial())
for sample in frame.getSamples():
xh.datalogging.logPinValue(serial, frame.getTimestamp(),
sample.getPinName(), sample.getValue())
def __logInputVolts(self, frame):
serial = (frame.getRemoteSerial()
or self.__presence.getLocalSerial())
v = frame.getVolts()
t = frame.getTimestamp()
xh.datalogging.logPinValue(serial, t, PIN.VCC, v)
def __poll(self):
"""
At a set interval, poll for data values which are not
automatically sent.
Work around a bug in xbee-python's parsing of Vcc voltages (see
xh.protocol.data.VCC_BUG_URL) by actively polling supply
voltage level.
"""
thread = threading.Timer(self._POLL_INTERVAL_SEC, self.__poll)
thread.daemon = True
thread.start()
for serial in self.__presence.getRemoteSerials():
xh.protocol.InputVolts(dest=serial).send()
|
UTF-8
|
Python
| false | false | 2,012 |
18,030,272,712,804 |
382ae3e24570d2ff88dbf6e2bb1ad89adfee352b
|
26e26e6fc5d542263321bc58031089a4637d9fea
|
/glymur/test/test_opj_suite_neg.py
|
75c9c6f7bf8b24a183f459cabe8558252e0f85bf
|
[
"MIT"
] |
permissive
|
wafels/glymur
|
https://github.com/wafels/glymur
|
8ab2ed322c5b504face4cd484a61dfbf08e3785e
|
938f40801d16c4bbfe9bedbf2d051ab3186a54de
|
refs/heads/master
| 2021-01-15T22:41:58.800462 | 2013-06-14T18:22:29 | 2013-06-14T18:22:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
The tests here do not correspond directly to the OpenJPEG test suite, but
seem like logical negative tests to add.
"""
import os
import sys
import tempfile
import unittest
import warnings
import numpy as np
import pkg_resources
from ..lib import openjp2 as opj2
# Need some combination of matplotlib, PIL, or scikits-image for reading
# other image formats.
no_read_backend = False
msg = "Either scikit-image with the freeimage backend or matplotlib "
msg += "with the PIL backend must be available in order to run the "
msg += "tests in this suite."
no_read_backend_msg = msg
try:
import skimage.io
try:
skimage.io.use_plugin('freeimage')
from skimage.io import imread
except ImportError:
try:
skimage.io.use_plugin('PIL')
from skimage.io import imread
except ImportError:
raise
except ImportError:
try:
from PIL import Image
from matplotlib.pyplot import imread
except ImportError:
no_read_backend = True
from glymur import Jp2k
import glymur
try:
data_root = os.environ['OPJ_DATA_ROOT']
except KeyError:
data_root = None
except:
raise
def read_image(infile):
# PIL issues warnings which we do not care about, so suppress them.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
data = imread(infile)
return data
@unittest.skipIf(no_read_backend, no_read_backend_msg)
@unittest.skipIf(data_root is None,
"OPJ_DATA_ROOT environment variable not set")
class TestSuiteNegative(unittest.TestCase):
def setUp(self):
self.jp2file = pkg_resources.resource_filename(glymur.__name__,
"data/nemo.jp2")
def tearDown(self):
pass
def test_negative_psnr_with_cratios(self):
# Using psnr with cratios options is not allowed.
# Not an OpenJPEG test, but close.
infile = os.path.join(data_root, 'input/nonregression/Bretagne1.ppm')
data = read_image(infile)
with tempfile.NamedTemporaryFile(suffix='.j2k') as tfile:
j = Jp2k(tfile.name, 'wb')
with self.assertRaises(RuntimeError):
j.write(data, psnr=[30, 35, 40], cratios=[2, 3, 4])
@unittest.skipIf(sys.hexversion < 0x03020000,
"Uses features introduced in 3.2.")
def test_NR_MarkerIsNotCompliant_j2k_dump(self):
# SOT marker gives bad offset.
relpath = 'input/nonregression/MarkerIsNotCompliant.j2k'
jfile = os.path.join(data_root, relpath)
jp2k = Jp2k(jfile)
with self.assertWarns(UserWarning) as cw:
c = jp2k.get_codestream(header_only=False)
# Verify that the last segment returned in the codestream is SOD,
# not EOC. Codestream parsing should stop when we try to jump to
# the end of SOT.
self.assertEqual(c.segment[-1].id, 'SOD')
@unittest.skipIf(sys.hexversion < 0x03020000,
"Uses features introduced in 3.2.")
def test_NR_illegalcolortransform_dump(self):
# SOT marker gives bad offset.
relpath = 'input/nonregression/illegalcolortransform.j2k'
jfile = os.path.join(data_root, relpath)
jp2k = Jp2k(jfile)
with self.assertWarns(UserWarning) as cw:
c = jp2k.get_codestream(header_only=False)
# Verify that the last segment returned in the codestream is SOD,
# not EOC. Codestream parsing should stop when we try to jump to
# the end of SOT.
self.assertEqual(c.segment[-1].id, 'SOD')
@unittest.skipIf(sys.hexversion < 0x03020000,
"Uses features introduced in 3.2.")
def test_NR_Cannotreaddatawithnosizeknown_j2k(self):
# SOT marker gives bad offset.
relpath = 'input/nonregression/Cannotreaddatawithnosizeknown.j2k'
jfile = os.path.join(data_root, relpath)
jp2k = Jp2k(jfile)
with self.assertWarns(UserWarning) as cw:
c = jp2k.get_codestream(header_only=False)
# Verify that the last segment returned in the codestream is SOD,
# not EOC. Codestream parsing should stop when we try to jump to
# the end of SOT.
self.assertEqual(c.segment[-1].id, 'SOD')
def test_code_block_dimensions(self):
# opj_compress doesn't allow the dimensions of a codeblock
# to be too small or too big, so neither will we.
data = np.zeros((256, 256), dtype=np.uint8)
with tempfile.NamedTemporaryFile(suffix='.j2k') as tfile:
j = Jp2k(tfile.name, 'wb')
# opj_compress doesn't allow code block area to exceed 4096.
with self.assertRaises(RuntimeError) as cr:
j.write(data, cbsize=(256, 256))
# opj_compress doesn't allow either dimension to be less than 4.
with self.assertRaises(RuntimeError) as cr:
j.write(data, cbsize=(2048, 2))
with self.assertRaises(RuntimeError) as cr:
j.write(data, cbsize=(2, 2048))
@unittest.skipIf(sys.hexversion < 0x03020000,
"Uses features introduced in 3.2.")
def test_exceeded_box(self):
# Verify that a warning is issued if we read past the end of a box
# This file has a palette (pclr) box whose length is impossibly
# short.
infile = os.path.join(data_root,
'input/nonregression/mem-b2ace68c-1381.jp2')
with self.assertWarns(UserWarning) as cw:
j = Jp2k(infile)
def test_precinct_size_not_multiple_of_two(self):
# Seems like precinct sizes should be powers of two.
ifile = Jp2k(self.jp2file)
data = ifile.read(reduce=3)
with tempfile.NamedTemporaryFile(suffix='.jp2') as tfile:
ofile = Jp2k(tfile.name, 'wb')
with self.assertRaises(IOError) as ce:
ofile.write(data, psizes=[(13, 13)])
def test_codeblock_size_not_multiple_of_two(self):
# Seems like code block sizes should be powers of two.
ifile = Jp2k(self.jp2file)
data = ifile.read(reduce=3)
with tempfile.NamedTemporaryFile(suffix='.jp2') as tfile:
ofile = Jp2k(tfile.name, 'wb')
with self.assertRaises(IOError) as ce:
ofile.write(data, cbsize=(13, 12))
def test_codeblock_size_with_precinct_size(self):
# Seems like code block sizes should never exceed half that of
# precinct size.
ifile = Jp2k(self.jp2file)
data = ifile.read(reduce=3)
with tempfile.NamedTemporaryFile(suffix='.jp2') as tfile:
ofile = Jp2k(tfile.name, 'wb')
with self.assertRaises(IOError) as ce:
ofile.write(data,
cbsize=(64, 64),
psizes=[(64, 64)])
if __name__ == "__main__":
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
11,905,649,384,588 |
a4fd48d310c66eb4ad6d660da52601b4ed2efb0d
|
15b0f767a4a2893bbb0063dcc7f11df9f94e3d3a
|
/link_learning.py
|
16507c7b50a379ca5cb80d7f54773036299e40f9
|
[] |
no_license
|
jinpf/pox_custom_component
|
https://github.com/jinpf/pox_custom_component
|
0fefe6834d068681a75b3a60179fa3b204b86af7
|
9386012c06d94c6646591b5979ab063e6633a6ed
|
refs/heads/master
| 2021-01-25T07:35:07.281540 | 2014-06-28T01:16:50 | 2014-06-28T01:17:08 | 20,132,370 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: jinpf
# @Date: 2014-05-24 17:15:37
# @Last Modified by: jinpf
# @Last Modified time: 2014-05-26 15:47:36
# @Email: [email protected]
"""
# @comment here:
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.openflow.discovery import Discovery
from pox.lib.packet.arp import arp
from pox.lib.packet.ethernet import ethernet
from pox.lib.addresses import EthAddr
from pox.lib.recoco import Timer
import logging
Swich_Connect_Info={} #Swich_Connect_Info={dpid1:{dpid2:port1}}
IP_To_MAC={} #IP_To_MAC={IP:mac}
Host_Info={} #Host_Info={mac:(dpid,port,ip)} , record host direct connect switch
MAXINT=9999
Special_MAC=EthAddr('11:11:11:11:11:11')
#when don`t know ip->mac ,flood arp
def Arp_Flood(arp_packet):
#construct arp request packet
arp_request=arp()
arp_request.hwsrc=arp_packet.hwsrc #Special_MAC
arp_request.hwdst=arp_packet.hwdst #EthAddr(b"\xff\xff\xff\xff\xff\xff")
arp_request.opcode=arp.REQUEST
arp_request.protosrc=arp_packet.protosrc
arp_request.protodst=arp_packet.protodst
ether=ethernet()
ether.type=ethernet.ARP_TYPE
ether.dst=EthAddr(b"\xff\xff\xff\xff\xff\xff")
ether.src=Special_MAC
ether.payload=arp_request
msg=of.ofp_packet_out()
msg.data=ether.pack()
msg.actions.append(of.ofp_action_output(port=of.OFPP_FLOOD))
for connection in core.openflow.connections:
connection.send(msg)
#when we know ip->mac ,reply
def Arp_Reply(event,arp_packet):
#construct arp reply packet
arp_reply=arp()
arp_reply.hwsrc=IP_To_MAC[arp_packet.protodst]
arp_reply.hwdst=arp_packet.hwsrc
arp_reply.opcode=arp.REPLY
arp_reply.protosrc=arp_packet.protodst
arp_reply.protodst=arp_packet.protosrc
ether=ethernet()
ether.type=ethernet.ARP_TYPE
ether.dst=arp_packet.hwsrc
ether.src=IP_To_MAC[arp_packet.protodst]
ether.payload=arp_reply
# send the created arp reply back to switch
msg=of.ofp_packet_out()
msg.data=ether.pack()
msg.actions.append(of.ofp_action_output(port=of.OFPP_IN_PORT))
msg.in_port=event.port
event.connection.send(msg)
#compare rule,put in min(),data as tumple,data[1] stands for compare data,if data[0] in list L,data[1] won`t be counted when compare
def compare_rule(data,L):
if data[0] in L:
return MAXINT
else:
return data[1]
#return start -> end path in list
def Dijkstra(start,end):
path={} #all shortest path from start
dist={} #dist[i] stands for shortest distance from start to i node
Switches=Swich_Connect_Info.keys() #list of switches
Used_Sw=[start] #already used switches in algorithm
#inital
for sw in Switches:
path[sw]=[]
dist[sw]=MAXINT
mstart=start #mstart stands for start node in each loop
path[start]=[start]
dist[start]=0
while(len(Used_Sw)<len(Switches)):
for sw in Swich_Connect_Info[mstart]:
if sw not in Used_Sw:
if dist[sw] >dist[mstart]+1:
dist[sw]=dist[mstart]+1
path[sw]=path[mstart]+[sw]
min_sw=min(dist.items(), key=lambda x: compare_rule(x,Used_Sw)) [0]
Used_Sw.append(min_sw)
mstart=min_sw
print 'path :',start,'->',end,' : ',path[end]
return path[end]
#install flow on path switch
def Install_Path_Flow(src,dst,event=None):
print 'Install path flow -----------------------------'
start_sw=Host_Info[src][0]
end_sw=Host_Info[dst][0]
path=Dijkstra(start_sw,end_sw)
#install flow for the path
for i in range(len(path)):
#flow for src->dst
msg1=of.ofp_flow_mod()
msg1.match.dl_dst=dst
msg1.match.dl_src=src
if i==len(path)-1: #last node
msg1.actions.append(of.ofp_action_output( port=Host_Info[dst][1] ) )
else:
msg1.actions.append(of.ofp_action_output( port=Swich_Connect_Info[ path[i] ][ path[i+1] ] ))
core.openflow.sendToDPID(path[i],msg1)
#flow for dst->src
msg2=of.ofp_flow_mod()
msg2.match.dl_dst=src
msg2.match.dl_src=dst
if i==0: #first node
msg2.actions.append(of.ofp_action_output( port=Host_Info[src][1] ) )
else:
msg2.actions.append(of.ofp_action_output( port=Swich_Connect_Info[ path[i] ][ path[i-1] ] ))
core.openflow.sendToDPID(path[i],msg2)
#send out this Packedin data
if event !=None:
i=path.index(event.dpid)
msg=of.ofp_packet_out(data=event.data)
if i==len(path)-1:
msg.actions.append(of.ofp_action_output(port=Host_Info[dst][1]))
else:
msg.actions.append(of.ofp_action_output(port=Swich_Connect_Info[ path[i] ][ path[i+1] ] ))
msg.in_port=event.port
event.connection.send(msg)
# def _handle_timer(message):
# pass
class Link_Learning(object):
def __init__(self):
core.openflow.addListeners(self)
core.openflow_discovery.addListeners(self)
def _handle_LinkEvent(self,event):
dpid1=event.link[0]
dpid2=event.link[2]
if event.added is True:
if dpid1 not in Swich_Connect_Info:
Swich_Connect_Info[dpid1]={}
if dpid2 not in Swich_Connect_Info:
Swich_Connect_Info[dpid2]={}
if Swich_Connect_Info[dpid1].get(dpid2) is None:
Swich_Connect_Info[dpid1][dpid2]=event.link[1]
Swich_Connect_Info[dpid2][dpid1]=event.link[3]
if event.removed is True:
if Swich_Connect_Info[dpid1].get(dpid2) is not None:
del Swich_Connect_Info[dpid1][dpid2]
del Swich_Connect_Info[dpid2][dpid1]
#remain to do some work
# print Swich_Connect_Info
def _handle_ConnectionUp(self,event):
pass
def _handle_ConnectionDown(self,event):
del Swich_Connect_Info[event.dpid]
def _handle_PacketIn(self,event):
packet = event.parsed
if packet.src!=Special_MAC: #filt packet send by ourself
if packet.find("arp"):
arp_packet=packet.find("arp")
# print 'switch:',event.dpid,'packet_in:','arp_packet:',arp_packet
Host_Info[packet.src]=(event.dpid,event.port,arp_packet.protosrc)
IP_To_MAC[arp_packet.protosrc]=packet.src
if arp_packet.opcode==arp.REQUEST:
if arp_packet.protodst in IP_To_MAC:
Arp_Reply(event,arp_packet)
else:
Arp_Flood(arp_packet)
if (packet.src in Host_Info) and (packet.dst in Host_Info):
Install_Path_Flow(packet.src,packet.dst,event)
# if packet.find('ipv4'):
# ip_packet=packet.find('ipv4')
# print 'switch:',event.dpid,'packet_in:','ip:',ip_packet
# print 'switch:',event.dpid,'packet_in:',packet
# print 'IP to MAC',IP_To_MAC
# print 'Host_Info:',Host_Info
# print
def launch():
#clear some unimportant message
core.getLogger("packet").setLevel(logging.ERROR)
core.registerNew(Discovery,explicit_drop=False,install_flow = False)
core.registerNew(Link_Learning)
# Timer(30,_handle_timer,recurring=True,args=["Timer1 come!])
|
UTF-8
|
Python
| false | false | 2,014 |
644,245,120,582 |
6269d37d0fe2c056723c6f71df9c850dc81dd66c
|
023f7dc61aaa75ac1b9d4a5a63b3e35e9c0c4f9c
|
/p16/p16.py
|
9f088a39ec54d06ee8be2681ac86f6662bc9f055
|
[] |
no_license
|
JoseTomasTocino/tuenti-challenge-2012
|
https://github.com/JoseTomasTocino/tuenti-challenge-2012
|
23d0519fd8192b0d55ab8cef1dbc84e86b2f48fa
|
20fa081d49db0571e4093b6ecbb13cdd9508ae15
|
refs/heads/master
| 2021-01-22T07:27:35.474194 | 2012-05-06T17:11:48 | 2012-05-06T17:11:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 José Tomás Tocino García <[email protected]>
# Autor: José Tomás Tocino García
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
import sys
threshold = 0.5
learning_rate = 0.1
training_set = [((1, 0, 0), 1), ((1, 0, 1), 1), ((1, 1, 0), 1), ((1, 1, 1), 0)]
def sum_function(values, weights):
return sum(value * weights[index] for index, value in enumerate(values))
def main():
numKnownRep = int(sys.stdin.readline())
numUnknownRep = int(sys.stdin.readline())
numTotalCalls = int(sys.stdin.readline())
baseCases = []
newCases = []
# For each known case
for i in range(numKnownRep):
# Read the case
V = filter(None, sys.stdin.readline().strip().split())
S = 1 if V[0] == "S" else 0
V = map(int, V[1:])
# Add it to the base of knowledge
baseCases.append( (S, V) )
# For each unknown case
for i in range(numUnknownRep):
# Read the case
V = map(int, filter(None, sys.stdin.readline().strip().split()))
# Add it to the base of knowledge
newCases.append( V )
# Perceptron from http://en.wikipedia.org/wiki/Perceptron
weights = [0] * numTotalCalls
while True:
error_count = 0
for desired_output, input_vector in baseCases:
result = 1 if sum_function(input_vector, weights) > threshold else 0
error = desired_output - result
if error != 0:
error_count += 1
for index, value in enumerate(input_vector):
weights[index] += learning_rate * error * value
if error_count == 0:
break
sss = 0
for c in newCases:
if sum(map(lambda x : x[0] * x[1], zip(c,weights))) < 0:
sss += sum(c)
print sss
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,012 |
17,102,559,808,625 |
845ac0185c3dbc557fe1df83d4e0382689c5da52
|
ca36f4eff957163697e3146b34f2f320ec8e931b
|
/Matches/views.py
|
d0e274643fb9c1a6567d8684cc4e534a0d9bcd1e
|
[] |
no_license
|
sanke93/Infosmos
|
https://github.com/sanke93/Infosmos
|
bd4fa4364f6f6ba8d62e5be4c193b9ed59372ad1
|
32687ae52654ff289534a7d5f3df42291b4ed375
|
refs/heads/master
| 2020-04-25T11:52:17.097851 | 2014-05-07T20:54:45 | 2014-05-07T20:54:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render
from User_Profile.models import Skill, Desire, UserProfile
from Matches.models import Matches
from django.http import HttpResponse,HttpResponseRedirect
from django.template import RequestContext, Context, loader
from django.shortcuts import render_to_response
from User_Profile.forms import UserForm, UserProfileForm
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.core.context_processors import csrf
from User_Profile.models import Skill, Desire
# Create your views here.
def match(request):
context = RequestContext(request)
matched = False
if request.method == 'POST':
match
@login_required
def matches(request):
user = None
if request.user.is_authenticated():
user = request.user
matches = Matches.objects.filter(user1=user)
return render_to_response('matches.html', {'matches':matches})
|
UTF-8
|
Python
| false | false | 2,014 |
19,602,230,761,106 |
283bf63d6572895d88a661767e22db56151d3871
|
f72fa67f589b11fda7fc8ad2672da63c83655766
|
/app/views.py
|
d2ad4cf16b84b38e951d4bc97445a1e546ade042
|
[] |
no_license
|
tyrion/cms-bug
|
https://github.com/tyrion/cms-bug
|
ae2d8492ec14be617e9552b532010fa6e3a90443
|
7db705d522d569e17121637c7fc51ae8eccce5d4
|
refs/heads/master
| 2020-08-05T23:28:27.885349 | 2013-11-28T11:59:44 | 2013-11-28T11:59:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponse
def index(request):
return HttpResponse('index')
def bug(request):
return HttpResponse('bug')
|
UTF-8
|
Python
| false | false | 2,013 |
19,396,072,324,188 |
6159059da1bb4278aac7189533053a667646105c
|
6fd2cb39410f2215b6b7a6973a87849e35ca38a3
|
/sub/log.py
|
347c4275087477148d6708ac9cecb00c5d156cea
|
[] |
no_license
|
fruch/django-subcenter
|
https://github.com/fruch/django-subcenter
|
eb342da4d5311f2271f3222a9258ee32a664ccef
|
c7459b22fa9f04239da308e98363964a18a0133a
|
refs/heads/master
| 2020-05-16T04:00:52.988185 | 2010-10-02T19:15:06 | 2010-10-02T19:15:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# stdlib
import threading
import logging
# django
from django.conf import settings
import imdb
#===============================================================================
# Globals
#===============================================================================
_LOCALS = threading.local()
def get_logger():
logger = getattr(_LOCALS, 'logger', None)
if logger is not None:
return logger
logger = logging.getLogger()
hdlr = logging.FileHandler(settings.LOG_FILE)
formatter = logging.Formatter('[%(asctime)s]%(levelname)-8s - %(funcName)s - %(lineno)d - %(message)s','%Y-%m-%d %a %H:%M:%S')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(getattr(settings, 'LOG_LEVEL', logging.NOTSET))
setattr(_LOCALS, 'logger', logger)
imdb._logging.setLevel(getattr(settings, 'LOG_LEVEL', logging.NOTSET))
return logger
|
UTF-8
|
Python
| false | false | 2,010 |
10,934,986,737,288 |
fa683b2a0c45c63ecbf2df7029c6489527564fdc
|
662e8edf696ff1273edfac63eae95a4037bbfa79
|
/exercise4.7.py
|
ad4e38965a08b53ad6458cf4211fdfcd11e65902
|
[] |
no_license
|
asimkaleem/untitled
|
https://github.com/asimkaleem/untitled
|
e68cbc59fad17a797489bb391dc88c59d3e1138e
|
dc37f08e86038a623f74f29a2013123ab56df94b
|
refs/heads/master
| 2021-01-23T07:16:54.623165 | 2014-11-26T13:19:05 | 2014-11-26T13:19:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Exercise 4.7 Rewrite the grade program from the previous chapter using a func-
# tion called computegrade that takes a score as its parameter and returns a grade
# as a string.
try:
score = float(raw_input("Please enter score between 0.0 and 1.0: "))
except:
score = "Not A Number"
def computegrade(score_to_grade):
if score < 0.6 and score >= 0.0:
grade = "F"
return grade
elif score >= 0.6 and score < 0.7:
grade = "D"
return grade
elif score >= 0.7 and score < 0.8:
grade = "C"
return grade
elif score >= 0.8 and score < 0.9:
grade = "B"
return grade
elif score >= 0.9 and score <= 1.0:
grade = "A"
return grade
else:
grade = "Bad Score"
return grade
result = computegrade(score)
print result
|
UTF-8
|
Python
| false | false | 2,014 |
5,901,285,086,040 |
a18bee298df461b31d664221c7942d2b06900cc0
|
ab4699a688c0708f1940e6066616b0475c3b88d0
|
/wktopdf/api.py
|
b6ece45ded19a98ec9b34d460f0861688712b091
|
[
"MIT"
] |
permissive
|
b4stien/wktopdf
|
https://github.com/b4stien/wktopdf
|
1425fc053ccc72c5af38eb9de03709ab4c84f63e
|
5e748adf4eac3f2dc90e838baf093d6c558cd2d3
|
refs/heads/master
| 2020-05-16T00:48:31.829921 | 2014-10-20T21:23:06 | 2014-10-20T21:23:11 | 25,079,326 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import six
from wktopdf.core import UnprocessedWebkitPdf
def from_html(html_content, encoding='utf-8'):
if not isinstance(html_content, six.text_type):
raise TypeError('`wktopdf.api.from_html()` is expecting unicode data.')
unprocessed_pdf = UnprocessedWebkitPdf()
unprocessed_pdf.set('web.defaultEncoding', 'utf-8')
return unprocessed_pdf.process_html_content(html_content.encode(encoding))
def from_url(url):
unprocessed_pdf = UnprocessedWebkitPdf()
return unprocessed_pdf.process_url(url)
|
UTF-8
|
Python
| false | false | 2,014 |
17,489,106,837,334 |
60a0d0edd5c8fafd7cb7a73c726f1a730ae3386a
|
20d8e934d2ca1ec361425aae5e84a1e176434ab1
|
/src/splashscreen.py
|
e25d145a91b5257ff9dddd96b4bc1dc7b05504a8
|
[
"CC0-1.0",
"CC-BY-3.0",
"GPL-3.0-only"
] |
non_permissive
|
4nickpick/Stellar
|
https://github.com/4nickpick/Stellar
|
2ca6e3b2dc18d01bd1396940e84855c6592c8fc9
|
737aee4c6d9a1ca296e9ac56f50b90c99bc1984b
|
refs/heads/master
| 2021-05-28T20:19:38.733496 | 2013-07-15T02:41:03 | 2013-07-15T02:41:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2012, 2013 Emilio Coppola
#
# This file is part of Stellar.
#
# Stellar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Stellar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
from PyQt4 import QtCore, QtGui
import os, webbrowser
import cfg
from dialogs import NewProjectDialog
import sys
if sys.version_info.major == 2:
str = unicode
class Start(QtGui.QWidget):
def __init__(self, main,parent=None):
super(Start, self).__init__(parent)
self.main = main
self.initUI()
def initUI(self):
pic = QtGui.QLabel(self)
pic.setGeometry(12, 10, 500, 145)
pic.setPixmap(QtGui.QPixmap(os.path.join("Data", "stellarsplash.png")))
#TABS ----------------
self.tab_widget = QtGui.QTabWidget()
tab1 = QtGui.QWidget()
tab2 = QtGui.QWidget()
tab3 = QtGui.QWidget()
tab4 = QtGui.QWidget()
p1_vertical = QtGui.QVBoxLayout(tab1)
p2_vertical = QtGui.QVBoxLayout(tab2)
p3_vertical = QtGui.QVBoxLayout(tab3)
p4_vertical = QtGui.QVBoxLayout(tab4)
self.tab_widget.addTab(tab4, "Welcome")
self.tab_widget.addTab(tab1, "New Project")
self.tab_widget.addTab(tab2, "Open Project")
self.tab_widget.addTab(tab3, "Release Notes")
vbox = QtGui.QVBoxLayout()
vbox.addStretch(1)
vbox.addWidget(self.tab_widget)
self.setLayout(vbox)
self.welcomwidget = QtGui.QWidget(self)
#RECENT FILE for disable/enable open recent button --
self.recentp = cfg.recentproject
#-------------
self.buttonnew = QtGui.QPushButton("New Project")
self.buttonnew.setIcon(QtGui.QIcon(os.path.join('Data', 'new.png')))
self.buttonnew.clicked.connect(self.newbutton)
self.buttonrec = QtGui.QPushButton("Open last project (%s)" % os.path.basename(cfg.recentproject))
self.buttonrec.setIcon(QtGui.QIcon(os.path.join('Data', 'folder.png')))
if self.recentp == "" or not os.path.isfile(self.recentp):
self.buttonrec.setDisabled(True)
self.buttonrec.clicked.connect(self.openlastproject)
self.buttonwebsite = QtGui.QPushButton("Stellar Website")
self.buttonwebsite.setIcon(QtGui.QIcon(os.path.join('Data', 'home.png')))
self.buttonwebsite.clicked.connect(self.openwebsite)
self.Spacer = QtGui.QLabel(' ')
self.Spacer1 = QtGui.QLabel(' ')
self.grid1 = QtGui.QGridLayout()
self.grid1.setSpacing(15)
self.grid1.addWidget(self.Spacer, 1, 0)
self.grid1.addWidget(self.buttonnew, 1, 1)
self.grid1.addWidget(self.buttonwebsite, 3, 1)
self.grid1.addWidget(self.buttonrec, 2, 1)
self.grid1.addWidget(self.Spacer1, 3, 3)
p4_vertical.addLayout(self.grid1)
#-----------------
self.name = QtGui.QLabel('Project Name: ')
self.nameEdit = QtGui.QLineEdit()
self.pathname = QtGui.QLabel('Project Folder: ')
self.pathEdit = QtGui.QLineEdit()
self.browsebtn = QtGui.QPushButton("...")
QtCore.QObject.connect(self.browsebtn, QtCore.SIGNAL('clicked()'), self.ChooseFolder)
#Projects Folder-------------------------
self.dirname = ''
self.btn_New = QtGui.QPushButton('Create \nNew File', self)
self.btn_New.setGeometry(25, 75, 100, 50)
QtCore.QObject.connect(self.btn_New, QtCore.SIGNAL('clicked()'), self.CreateProject)
self.grid = QtGui.QGridLayout()
self.grid.setSpacing(15)
self.grid.addWidget(self.name, 2, 0)
self.grid.addWidget(self.nameEdit, 2, 1)
self.grid.addWidget(self.pathname, 3, 0)
self.grid.addWidget(self.pathEdit, 3, 1)
self.grid.addWidget(self.browsebtn, 3, 2)
self.grid.addWidget(self.btn_New, 4, 1)
p1_vertical.addLayout(self.grid)
self.btn_Open = QtGui.QPushButton('Open \nExisting File', self)
self.btn_Open.setGeometry(150, 75, 100, 50)
QtCore.QObject.connect(self.btn_Open, QtCore.SIGNAL('clicked()'), self.OpenFile)
p2_vertical.addWidget(self.btn_Open)
self.te = QtGui.QTextEdit()
f = open(os.path.join("Data", "releasenotes.txt"), 'r')
with f:
data = f.read()
self.te.setText(data)
f.close()
self.te.setReadOnly (True)
self.te.setMaximumSize(475,120)
p3_vertical.addWidget(self.te)
#Window-----------------
self.setWindowTitle('Stellar - %s' % cfg.__version__)
self.setWindowIcon(QtGui.QIcon(os.path.join('Data', 'icon.png')))
self.resize(500,350)
self.setMinimumSize(500,350)
self.setMaximumSize(500,350)
self.center()
self.show()
def center(self):
qr = self.frameGeometry()
cp = QtGui.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def ChooseFolder(self):
dir = str(QtGui.QFileDialog.getExistingDirectory(self, "Select Directory of project"))
self.dirname = dir
self.pathEdit.setText(dir)
self.pathEdit.setCursorPosition(0)
def CreateProject(self):
name = str(self.nameEdit.text())
path = str(self.pathEdit.text())
dirname = os.path.join(path, name)
if self.name != "" or path != "":
if not os.path.exists(dirname) and not os.path.isfile(os.path.join(dirname, name)):
os.mkdir(dirname)
self.main.dirname = dirname
self.main.fname = name
self.main.createProject(dirname, name)
self.main.clearSources()
self.close()
else:
reply = QtGui.QMessageBox.question(self, "Already Exists",
"That Project already exists. Do you want to open it?",
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.OpenFile(dirname, name)
def openwebsite(self):
webbrowser.open("http://stellarpygame.blogspot.com")
def newbutton(self):
self.tab_widget.setCurrentIndex(1)
def openlastproject(self):
if not os.path.exists(os.path.dirname(cfg.recentproject)):
QtGui.QMessageBox.information(self, "Project doesn't exist",
"This project doesn't exist or has been removed",
QtGui.QMessageBox.Ok)
return
#RECENT FILE--
self.recentp = cfg.recentproject
self.dirname = os.path.dirname(self.recentp)
path = os.path.join(self.dirname, self.recentp)
self.main.openProject(path)
self.close()
def OpenFile(self, dirname = None, name = None):
# check if we opens existing file from CreateProject function
if dirname != None and name != None:
path = os.path.join(dirname, name)
self.main.openProject(path)
else:
self.main.openProject()
self.close()
|
UTF-8
|
Python
| false | false | 2,013 |
506,806,159,887 |
28b1127a66dd6c66f6366db20512d62b7748eae3
|
1a947463bd03f9b7c4b655820de73a1f7be3cce0
|
/gifts_with_friends.py
|
9b7438bdd61a434ca163be1ada39e119818b4fba
|
[
"MIT"
] |
permissive
|
steven-gomez/secret-santa
|
https://github.com/steven-gomez/secret-santa
|
6ee725d4f53619248445a1f668c29601d5a09247
|
8a1792c3229fa57d2e8ba95647bb1ba956c39d00
|
refs/heads/master
| 2021-01-01T19:24:28.396345 | 2014-11-06T04:30:25 | 2014-11-06T04:30:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""
gifts_with_friends.py: Match up friends randomly for secret shopping.
"""
__author__ = "Steven Gomez"
import smtplib
from random import shuffle
from email.mime.text import MIMEText
import gifts_cfg
def verify_no_fixed_points(list_a, list_b, debug=False):
"""Return whether two lists have no fixed points.
Iterate through lists; return whether the ith entry in list_a does not
equal the ith entry in list_b, for all i.
"""
assert(len(list_a) == len(list_b))
for i in range(len(list_a)):
if list_a[i] == list_b[i]:
print 'Found fixed point at position '+str(i)
if debug:
print list_a, list_b
return False
print 'No fixed points'
if debug:
print list_a, list_b
return True
def send_gift_mail(to_email, shop_for, price, date, gmail_usr, gmail_pwd):
"""Send email with match/shopping details.
Notify a person who his match is, what price to spend, the date. Use Google's
mail server to send the email.
"""
text_body = "You are " + shop_for + "\'s secret santa. " + \
"Please get them something nice for around $"+str(price)+". " + \
"Gifts will be exchanged on "+date+".\n\nHo Ho Ho!"
msg = MIMEText(text_body)
reply_email = 'Santa Claus <[email protected]>'
msg['Subject'] = 'Randomized Secret Santa results'
msg['From'] = reply_email
msg['To'] = to_email
server = smtplib.SMTP("smtp.gmail.com",587)
server.ehlo()
server.starttls()
server.ehlo
server.login(gmail_usr, gmail_pwd)
server.sendmail(reply_email, [to_email], msg.as_string())
server.quit()
def shuffle_and_notify(kids, gmail_usr, gmail_pwd, price, date):
"""Notify friends about gift match-ups.
Duplicate and shuffle the list of kids. Keep shuffling until no one is
matched up with herself, i.e., the ith person in the kids list does not match
the ith person in the shuffled list, for all i. Then email each person their
match.
"""
print '\n--- Gifts With Friends! ---'
shuffled = kids.keys()
shuffle(shuffled)
tries = 1
while not verify_no_fixed_points(kids.keys(), shuffled):
shuffle(shuffled)
tries = tries + 1
print 'Shuffle succeeded after ' + str(tries) + ' tries'
# Loop through kids and look up paired person in shuffled list at
# corresponding index.
for i in range(len(kids.keys())):
shopper_email = kids.values()[i]
receiver_name = shuffled[i]
send_gift_mail(shopper_email, receiver_name, price, date, gmail_usr, gmail_pwd)
print 'Email sent to ' + shopper_email
print 'All emails sent. Ho Ho Ho!\n'
def main():
"""Load configuration and run shuffle_and_notify."""
kids = gifts_cfg.kids
price = gifts_cfg.price
date = gifts_cfg.date
gmail_usr = gifts_cfg.gmail_usr
gmail_pwd = gifts_cfg.gmail_pwd
shuffle_and_notify(kids, gmail_usr, gmail_pwd, price, date)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
7,911,329,762,437 |
3933c22ef7fca7995b5cb73aab1f8b67f135e1a8
|
13805becca416a402ebc3237df7d345c955c5e67
|
/main.py
|
a7aff98992de3a94c1cb7e7ae525963aa801121b
|
[] |
no_license
|
kols/excel-diff-program
|
https://github.com/kols/excel-diff-program
|
98237e134f38656f1e00259c8c3ea0ea54ae3109
|
89d7369ed1462430b51c047d22048efc6f379545
|
refs/heads/master
| 2015-08-05T14:38:26.424198 | 2011-07-29T09:01:23 | 2011-07-29T09:02:47 | 2,106,860 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import shutil
import os.path
import Tkinter as T
from tkFileDialog import askopenfilename
from tkMessageBox import showerror, showinfo
from difflib import Differ
import xlutils.copy
from xlrd import open_workbook
WRONG_CHAR = 'W'
class Cardiff(object):
"""
"""
def __init__(self, fromfile, tofile):
"""
"""
self.fromfile = fromfile
self.tofile = tofile
self.old = False
def _parse_xls(self, xls_file):
"""
"""
wb = open_workbook(xls_file)
values = []
sheet = wb.sheets()[0]
for row in range(sheet.nrows):
row_val = []
for col in range(sheet.ncols):
cell_val = sheet.cell(row, col).value
if cell_val == WRONG_CHAR:
if not self.old:
self.old = True
continue
row_val.append(cell_val)
values.append(row_val)
return values
def diff(self):
"""
"""
fromdata = self._parse_xls(self.fromfile)
todata = self._parse_xls(self.tofile)
d = Differ()
result = []
for (fd, td) in zip(fromdata, todata):
# use map(str) to make sure that all the values are strings.
# join operation will not work with non-strings
fd_string = ''.join(map(str, fd))
td_string = ''.join(map(str, td))
result.append(list(d.compare(fd_string, td_string)))
return (result, len(fromdata) - len(todata))
def parse_diff(self, diff_res):
"""
"""
diff_row = []
row = 1
for entry in diff_res[0]:
if len(entry) <= 1:
if entry[0][0] == ' ':
row += 1
continue
diff_row.append(str(row))
row += 1
row_eq = True if diff_res[-1] == 0 else False
return (diff_row, row_eq)
class Window(object):
"""
"""
def __init__(self):
"""
"""
self.fromfile = ''
self.tofile = ''
self.root = T.Tk()
self.root.title('手机卡密码比对程序')
def _openfilename(self, which):
"""
"""
def func():
file_path = askopenfilename()
if file_path:
self.text_widget[which].delete("1.0", T.END)
self.text_widget[which].insert("1.0", file_path)
return func
def _confirm(self):
"""
"""
def _check_writable(file):
try:
open(f, 'a').close()
except IOError:
showerror('错误', '请关闭其他打开文件的程序后再试一次')
return False
return True
self.fromfile = self.text_widget[0].get("1.0", T.END).strip()
self.tofile = self.text_widget[1].get("1.0", T.END).strip()
if (not self.fromfile or not self.tofile or not
os.path.exists(self.fromfile) or not os.path.exists(self.tofile)):
showerror('错误', '请输入正确的文件路径')
else:
cd = Cardiff(self.fromfile, self.tofile)
diff_res = cd.diff()
wrong_rows, row_eq = cd.parse_diff(diff_res)
if not row_eq:
showerror('错误', '两个文件行数不一致')
elif wrong_rows:
for f in [self.fromfile, self.tofile]:
if not _check_writable(f):
return
shutil.copy2(f, f + '.bak')
wb = xlutils.copy.copy(open_workbook(f))
sheet = wb.get_sheet(0)
for row in wrong_rows:
sheet.row(int(row) - 1).write(3, WRONG_CHAR)
sheet.flush_row_data()
wb.save(f)
showerror('完成', '比对完成,错误数:\n' +
str(len(wrong_rows)))
else:
if cd.old:
for f in [self.fromfile, self.tofile]:
if not _check_writable(f):
return
wb = xlutils.copy.copy(open_workbook(f))
sheet = wb.get_sheet(0)
for row in sheet.rows.values():
if row.get_cells_count() == 4:
row.set_cell_blank(3)
sheet.flush_row_data()
wb.save(f)
showinfo('完成', '比对完成,全部正确')
def _draw_input_widgets(self, which):
"""
"""
label_text = '第二个文件' if which else '第一个文件'
container = T.Frame(self.root)
container.pack(side=T.TOP, expand=T.YES)
T.Label(container, text=label_text).pack(side=T.LEFT, expand=T.YES)
self.text_widget[which] = T.Text(container, height=1, width=50)
self.text_widget[which].pack(side=T.LEFT, expand=T.YES)
T.Button(container, text='选择', command=self._openfilename(which)
).pack(side=T.LEFT, expand=T.YES)
def _make_menu(self, window):
"""
"""
top_menu = T.Menu(window)
window.config(menu=top_menu)
file_menu = T.Menu(top_menu)
file_menu.add_command(label='退出', command=window.quit, underline=0)
top_menu.add_cascade(label='文件', menu=file_menu)
def draw_gui(self):
"""
"""
# Make menu
self._make_menu(self.root)
# Choose file widgets
self.text_widget = {0: '', 1: ''}
for i in range(2):
self._draw_input_widgets(i)
# Confirm button
T.Button(self.root, text='确定',
command=self._confirm).pack(side=T.TOP,
expand=T.YES)
self.root.mainloop()
if __name__ == '__main__':
Window().draw_gui()
|
UTF-8
|
Python
| false | false | 2,011 |
970,662,631,757 |
acaaab150277c8902ab6fada36d6f990d9bc3cdd
|
0b842bcb3bf20e1ce628d39bf7e11abd7699baf9
|
/oscar/a/api/yang/modules/tech/common/qwilt_tech_interfaces/tech/interfaces/interface/connectivity_check/ipv6/counters/counters_oper_data_gen.py
|
8b63ecc899b470a6a0d91596cb9711beff8c0a13
|
[] |
no_license
|
afeset/miner2-tools
|
https://github.com/afeset/miner2-tools
|
75cc8cdee06222e0d81e39a34f621399e1ceadee
|
81bcc74fe7c0ca036ec483f634d7be0bab19a6d0
|
refs/heads/master
| 2016-09-05T12:50:58.228698 | 2013-08-27T21:09:56 | 2013-08-27T21:09:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright Qwilt, 2011
#
# The code contained in this file may not be used by any other entities without explicit written permission from Qwilt.
#
# Author: auto-generated
# Must be set here to avoid stupid warnings about stuff in BlinkyOperData
__pychecker__="no-classattr"
from a.infra.misc.enum_with_value import EnumWithValue
from a.infra.basic.return_codes import ReturnCodes
import socket
class CountersOperData (object):
def __init__ (self):
self.neighborDiscoveryFailures = 0
self._myHasNeighborDiscoveryFailures=False
self._myNeighborDiscoveryFailuresRequested=False
self.neighborDiscoveryTimeouts = 0
self._myHasNeighborDiscoveryTimeouts=False
self._myNeighborDiscoveryTimeoutsRequested=False
self.neighborDiscoverySuccesses = 0
self._myHasNeighborDiscoverySuccesses=False
self._myNeighborDiscoverySuccessesRequested=False
self.pingRequestsSent = 0
self._myHasPingRequestsSent=False
self._myPingRequestsSentRequested=False
self.pingFailures = 0
self._myHasPingFailures=False
self._myPingFailuresRequested=False
self.pingSuccesses = 0
self._myHasPingSuccesses=False
self._myPingSuccessesRequested=False
self.neighborDiscoveryRequestsSent = 0
self._myHasNeighborDiscoveryRequestsSent=False
self._myNeighborDiscoveryRequestsSentRequested=False
self.pingTimeouts = 0
self._myHasPingTimeouts=False
self._myPingTimeoutsRequested=False
def copyFrom (self, other):
self.neighborDiscoveryFailures=other.neighborDiscoveryFailures
self._myHasNeighborDiscoveryFailures=other._myHasNeighborDiscoveryFailures
self._myNeighborDiscoveryFailuresRequested=other._myNeighborDiscoveryFailuresRequested
self.neighborDiscoveryTimeouts=other.neighborDiscoveryTimeouts
self._myHasNeighborDiscoveryTimeouts=other._myHasNeighborDiscoveryTimeouts
self._myNeighborDiscoveryTimeoutsRequested=other._myNeighborDiscoveryTimeoutsRequested
self.neighborDiscoverySuccesses=other.neighborDiscoverySuccesses
self._myHasNeighborDiscoverySuccesses=other._myHasNeighborDiscoverySuccesses
self._myNeighborDiscoverySuccessesRequested=other._myNeighborDiscoverySuccessesRequested
self.pingRequestsSent=other.pingRequestsSent
self._myHasPingRequestsSent=other._myHasPingRequestsSent
self._myPingRequestsSentRequested=other._myPingRequestsSentRequested
self.pingFailures=other.pingFailures
self._myHasPingFailures=other._myHasPingFailures
self._myPingFailuresRequested=other._myPingFailuresRequested
self.pingSuccesses=other.pingSuccesses
self._myHasPingSuccesses=other._myHasPingSuccesses
self._myPingSuccessesRequested=other._myPingSuccessesRequested
self.neighborDiscoveryRequestsSent=other.neighborDiscoveryRequestsSent
self._myHasNeighborDiscoveryRequestsSent=other._myHasNeighborDiscoveryRequestsSent
self._myNeighborDiscoveryRequestsSentRequested=other._myNeighborDiscoveryRequestsSentRequested
self.pingTimeouts=other.pingTimeouts
self._myHasPingTimeouts=other._myHasPingTimeouts
self._myPingTimeoutsRequested=other._myPingTimeoutsRequested
def copyRequestedFrom (self, other):
"""
This method will copy from other only the leaves & descendant that are requested in ***self***
"""
if self.isNeighborDiscoveryFailuresRequested():
self.neighborDiscoveryFailures=other.neighborDiscoveryFailures
self._myHasNeighborDiscoveryFailures=other._myHasNeighborDiscoveryFailures
self._myNeighborDiscoveryFailuresRequested=other._myNeighborDiscoveryFailuresRequested
if self.isNeighborDiscoveryTimeoutsRequested():
self.neighborDiscoveryTimeouts=other.neighborDiscoveryTimeouts
self._myHasNeighborDiscoveryTimeouts=other._myHasNeighborDiscoveryTimeouts
self._myNeighborDiscoveryTimeoutsRequested=other._myNeighborDiscoveryTimeoutsRequested
if self.isNeighborDiscoverySuccessesRequested():
self.neighborDiscoverySuccesses=other.neighborDiscoverySuccesses
self._myHasNeighborDiscoverySuccesses=other._myHasNeighborDiscoverySuccesses
self._myNeighborDiscoverySuccessesRequested=other._myNeighborDiscoverySuccessesRequested
if self.isPingRequestsSentRequested():
self.pingRequestsSent=other.pingRequestsSent
self._myHasPingRequestsSent=other._myHasPingRequestsSent
self._myPingRequestsSentRequested=other._myPingRequestsSentRequested
if self.isPingFailuresRequested():
self.pingFailures=other.pingFailures
self._myHasPingFailures=other._myHasPingFailures
self._myPingFailuresRequested=other._myPingFailuresRequested
if self.isPingSuccessesRequested():
self.pingSuccesses=other.pingSuccesses
self._myHasPingSuccesses=other._myHasPingSuccesses
self._myPingSuccessesRequested=other._myPingSuccessesRequested
if self.isNeighborDiscoveryRequestsSentRequested():
self.neighborDiscoveryRequestsSent=other.neighborDiscoveryRequestsSent
self._myHasNeighborDiscoveryRequestsSent=other._myHasNeighborDiscoveryRequestsSent
self._myNeighborDiscoveryRequestsSentRequested=other._myNeighborDiscoveryRequestsSentRequested
if self.isPingTimeoutsRequested():
self.pingTimeouts=other.pingTimeouts
self._myHasPingTimeouts=other._myHasPingTimeouts
self._myPingTimeoutsRequested=other._myPingTimeoutsRequested
def copySetFrom (self, other):
"""
This method will copy to self only the leaves & descendant that are set in ***other***
"""
if other.hasNeighborDiscoveryFailures():
self.neighborDiscoveryFailures=other.neighborDiscoveryFailures
self._myHasNeighborDiscoveryFailures=other._myHasNeighborDiscoveryFailures
self._myNeighborDiscoveryFailuresRequested=other._myNeighborDiscoveryFailuresRequested
if other.hasNeighborDiscoveryTimeouts():
self.neighborDiscoveryTimeouts=other.neighborDiscoveryTimeouts
self._myHasNeighborDiscoveryTimeouts=other._myHasNeighborDiscoveryTimeouts
self._myNeighborDiscoveryTimeoutsRequested=other._myNeighborDiscoveryTimeoutsRequested
if other.hasNeighborDiscoverySuccesses():
self.neighborDiscoverySuccesses=other.neighborDiscoverySuccesses
self._myHasNeighborDiscoverySuccesses=other._myHasNeighborDiscoverySuccesses
self._myNeighborDiscoverySuccessesRequested=other._myNeighborDiscoverySuccessesRequested
if other.hasPingRequestsSent():
self.pingRequestsSent=other.pingRequestsSent
self._myHasPingRequestsSent=other._myHasPingRequestsSent
self._myPingRequestsSentRequested=other._myPingRequestsSentRequested
if other.hasPingFailures():
self.pingFailures=other.pingFailures
self._myHasPingFailures=other._myHasPingFailures
self._myPingFailuresRequested=other._myPingFailuresRequested
if other.hasPingSuccesses():
self.pingSuccesses=other.pingSuccesses
self._myHasPingSuccesses=other._myHasPingSuccesses
self._myPingSuccessesRequested=other._myPingSuccessesRequested
if other.hasNeighborDiscoveryRequestsSent():
self.neighborDiscoveryRequestsSent=other.neighborDiscoveryRequestsSent
self._myHasNeighborDiscoveryRequestsSent=other._myHasNeighborDiscoveryRequestsSent
self._myNeighborDiscoveryRequestsSentRequested=other._myNeighborDiscoveryRequestsSentRequested
if other.hasPingTimeouts():
self.pingTimeouts=other.pingTimeouts
self._myHasPingTimeouts=other._myHasPingTimeouts
self._myPingTimeoutsRequested=other._myPingTimeoutsRequested
def copyDataFrom (self, other):
"""
This method will copy to self only the leaves & descendant that and their "has" values from ***other***.
It will leave "requested" fields unchanged
"""
self.neighborDiscoveryFailures=other.neighborDiscoveryFailures
self._myHasNeighborDiscoveryFailures=other._myHasNeighborDiscoveryFailures
self.neighborDiscoveryTimeouts=other.neighborDiscoveryTimeouts
self._myHasNeighborDiscoveryTimeouts=other._myHasNeighborDiscoveryTimeouts
self.neighborDiscoverySuccesses=other.neighborDiscoverySuccesses
self._myHasNeighborDiscoverySuccesses=other._myHasNeighborDiscoverySuccesses
self.pingRequestsSent=other.pingRequestsSent
self._myHasPingRequestsSent=other._myHasPingRequestsSent
self.pingFailures=other.pingFailures
self._myHasPingFailures=other._myHasPingFailures
self.pingSuccesses=other.pingSuccesses
self._myHasPingSuccesses=other._myHasPingSuccesses
self.neighborDiscoveryRequestsSent=other.neighborDiscoveryRequestsSent
self._myHasNeighborDiscoveryRequestsSent=other._myHasNeighborDiscoveryRequestsSent
self.pingTimeouts=other.pingTimeouts
self._myHasPingTimeouts=other._myHasPingTimeouts
def setAllNumericToZero (self):
self.neighborDiscoveryFailures=0
self.setHasNeighborDiscoveryFailures()
self.neighborDiscoveryTimeouts=0
self.setHasNeighborDiscoveryTimeouts()
self.neighborDiscoverySuccesses=0
self.setHasNeighborDiscoverySuccesses()
self.pingRequestsSent=0
self.setHasPingRequestsSent()
self.pingFailures=0
self.setHasPingFailures()
self.pingSuccesses=0
self.setHasPingSuccesses()
self.neighborDiscoveryRequestsSent=0
self.setHasNeighborDiscoveryRequestsSent()
self.pingTimeouts=0
self.setHasPingTimeouts()
pass
def subtractAllNumericHas (self, other):
"""
Only *numeric* members with *has* flag set to on will be subtracted
"""
if self.hasNeighborDiscoveryFailures():
if other.hasNeighborDiscoveryFailures():
self.neighborDiscoveryFailures -= other.neighborDiscoveryFailures
if self.hasNeighborDiscoveryTimeouts():
if other.hasNeighborDiscoveryTimeouts():
self.neighborDiscoveryTimeouts -= other.neighborDiscoveryTimeouts
if self.hasNeighborDiscoverySuccesses():
if other.hasNeighborDiscoverySuccesses():
self.neighborDiscoverySuccesses -= other.neighborDiscoverySuccesses
if self.hasPingRequestsSent():
if other.hasPingRequestsSent():
self.pingRequestsSent -= other.pingRequestsSent
if self.hasPingFailures():
if other.hasPingFailures():
self.pingFailures -= other.pingFailures
if self.hasPingSuccesses():
if other.hasPingSuccesses():
self.pingSuccesses -= other.pingSuccesses
if self.hasNeighborDiscoveryRequestsSent():
if other.hasNeighborDiscoveryRequestsSent():
self.neighborDiscoveryRequestsSent -= other.neighborDiscoveryRequestsSent
if self.hasPingTimeouts():
if other.hasPingTimeouts():
self.pingTimeouts -= other.pingTimeouts
pass
def addAllNumericHas (self, other):
"""
Only *numeric* members with *has* flag set to on will be subtracted
"""
if self.hasNeighborDiscoveryFailures():
if other.hasNeighborDiscoveryFailures():
self.neighborDiscoveryFailures += other.neighborDiscoveryFailures
if self.hasNeighborDiscoveryTimeouts():
if other.hasNeighborDiscoveryTimeouts():
self.neighborDiscoveryTimeouts += other.neighborDiscoveryTimeouts
if self.hasNeighborDiscoverySuccesses():
if other.hasNeighborDiscoverySuccesses():
self.neighborDiscoverySuccesses += other.neighborDiscoverySuccesses
if self.hasPingRequestsSent():
if other.hasPingRequestsSent():
self.pingRequestsSent += other.pingRequestsSent
if self.hasPingFailures():
if other.hasPingFailures():
self.pingFailures += other.pingFailures
if self.hasPingSuccesses():
if other.hasPingSuccesses():
self.pingSuccesses += other.pingSuccesses
if self.hasNeighborDiscoveryRequestsSent():
if other.hasNeighborDiscoveryRequestsSent():
self.neighborDiscoveryRequestsSent += other.neighborDiscoveryRequestsSent
if self.hasPingTimeouts():
if other.hasPingTimeouts():
self.pingTimeouts += other.pingTimeouts
pass
# has...() methods
def hasNeighborDiscoveryFailures (self):
return self._myHasNeighborDiscoveryFailures
def hasNeighborDiscoveryTimeouts (self):
return self._myHasNeighborDiscoveryTimeouts
def hasNeighborDiscoverySuccesses (self):
return self._myHasNeighborDiscoverySuccesses
def hasPingRequestsSent (self):
return self._myHasPingRequestsSent
def hasPingFailures (self):
return self._myHasPingFailures
def hasPingSuccesses (self):
return self._myHasPingSuccesses
def hasNeighborDiscoveryRequestsSent (self):
return self._myHasNeighborDiscoveryRequestsSent
def hasPingTimeouts (self):
return self._myHasPingTimeouts
# setHas...() methods
def setHasNeighborDiscoveryFailures (self):
self._myHasNeighborDiscoveryFailures=True
def setHasNeighborDiscoveryTimeouts (self):
self._myHasNeighborDiscoveryTimeouts=True
def setHasNeighborDiscoverySuccesses (self):
self._myHasNeighborDiscoverySuccesses=True
def setHasPingRequestsSent (self):
self._myHasPingRequestsSent=True
def setHasPingFailures (self):
self._myHasPingFailures=True
def setHasPingSuccesses (self):
self._myHasPingSuccesses=True
def setHasNeighborDiscoveryRequestsSent (self):
self._myHasNeighborDiscoveryRequestsSent=True
def setHasPingTimeouts (self):
self._myHasPingTimeouts=True
# isRequested...() methods
def isNeighborDiscoveryFailuresRequested (self):
return self._myNeighborDiscoveryFailuresRequested
def isNeighborDiscoveryTimeoutsRequested (self):
return self._myNeighborDiscoveryTimeoutsRequested
def isNeighborDiscoverySuccessesRequested (self):
return self._myNeighborDiscoverySuccessesRequested
def isPingRequestsSentRequested (self):
return self._myPingRequestsSentRequested
def isPingFailuresRequested (self):
return self._myPingFailuresRequested
def isPingSuccessesRequested (self):
return self._myPingSuccessesRequested
def isNeighborDiscoveryRequestsSentRequested (self):
return self._myNeighborDiscoveryRequestsSentRequested
def isPingTimeoutsRequested (self):
return self._myPingTimeoutsRequested
# setRequested...() methods
def setNeighborDiscoveryFailuresRequested (self):
self._myNeighborDiscoveryFailuresRequested=True
def setNeighborDiscoveryTimeoutsRequested (self):
self._myNeighborDiscoveryTimeoutsRequested=True
def setNeighborDiscoverySuccessesRequested (self):
self._myNeighborDiscoverySuccessesRequested=True
def setPingRequestsSentRequested (self):
self._myPingRequestsSentRequested=True
def setPingFailuresRequested (self):
self._myPingFailuresRequested=True
def setPingSuccessesRequested (self):
self._myPingSuccessesRequested=True
def setNeighborDiscoveryRequestsSentRequested (self):
self._myNeighborDiscoveryRequestsSentRequested=True
def setPingTimeoutsRequested (self):
self._myPingTimeoutsRequested=True
def __str__ (self):
__pychecker__='maxlines=1000'
items=[]
x=""
if self._myNeighborDiscoveryFailuresRequested:
x = "+NeighborDiscoveryFailures="
if self._myHasNeighborDiscoveryFailures:
leafStr = str(self.neighborDiscoveryFailures)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myNeighborDiscoveryTimeoutsRequested:
x = "+NeighborDiscoveryTimeouts="
if self._myHasNeighborDiscoveryTimeouts:
leafStr = str(self.neighborDiscoveryTimeouts)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myNeighborDiscoverySuccessesRequested:
x = "+NeighborDiscoverySuccesses="
if self._myHasNeighborDiscoverySuccesses:
leafStr = str(self.neighborDiscoverySuccesses)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myPingRequestsSentRequested:
x = "+PingRequestsSent="
if self._myHasPingRequestsSent:
leafStr = str(self.pingRequestsSent)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myPingFailuresRequested:
x = "+PingFailures="
if self._myHasPingFailures:
leafStr = str(self.pingFailures)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myPingSuccessesRequested:
x = "+PingSuccesses="
if self._myHasPingSuccesses:
leafStr = str(self.pingSuccesses)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myNeighborDiscoveryRequestsSentRequested:
x = "+NeighborDiscoveryRequestsSent="
if self._myHasNeighborDiscoveryRequestsSent:
leafStr = str(self.neighborDiscoveryRequestsSent)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
x=""
if self._myPingTimeoutsRequested:
x = "+PingTimeouts="
if self._myHasPingTimeouts:
leafStr = str(self.pingTimeouts)
else:
leafStr = "<UNSET>"
items.append(x + leafStr)
return "{CountersOperData: "+",".join(items)+"}"
def debugStr (self, includeRequested=False):
__pychecker__='maxlines=1000 maxbranches=100'
items=[]
x=""
x = "+NeighborDiscoveryFailures="
if self._myHasNeighborDiscoveryFailures:
leafStr = str(self.neighborDiscoveryFailures)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myNeighborDiscoveryFailuresRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+NeighborDiscoveryTimeouts="
if self._myHasNeighborDiscoveryTimeouts:
leafStr = str(self.neighborDiscoveryTimeouts)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myNeighborDiscoveryTimeoutsRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+NeighborDiscoverySuccesses="
if self._myHasNeighborDiscoverySuccesses:
leafStr = str(self.neighborDiscoverySuccesses)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myNeighborDiscoverySuccessesRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+PingRequestsSent="
if self._myHasPingRequestsSent:
leafStr = str(self.pingRequestsSent)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myPingRequestsSentRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+PingFailures="
if self._myHasPingFailures:
leafStr = str(self.pingFailures)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myPingFailuresRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+PingSuccesses="
if self._myHasPingSuccesses:
leafStr = str(self.pingSuccesses)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myPingSuccessesRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+NeighborDiscoveryRequestsSent="
if self._myHasNeighborDiscoveryRequestsSent:
leafStr = str(self.neighborDiscoveryRequestsSent)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myNeighborDiscoveryRequestsSentRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
x=""
x = "+PingTimeouts="
if self._myHasPingTimeouts:
leafStr = str(self.pingTimeouts)
else:
leafStr = "<UNSET>"
requestedStr = ''
if includeRequested:
if self._myPingTimeoutsRequested:
requestedStr = ('(requested)')
else:
requestedStr = ('(not-requested)')
items.append(x + leafStr + requestedStr)
return "{CountersOperData: "+",".join(items)+"}"
def setAllRequested(self):
self.setNeighborDiscoveryFailuresRequested()
self.setNeighborDiscoveryTimeoutsRequested()
self.setNeighborDiscoverySuccessesRequested()
self.setPingRequestsSentRequested()
self.setPingFailuresRequested()
self.setPingSuccessesRequested()
self.setNeighborDiscoveryRequestsSentRequested()
self.setPingTimeoutsRequested()
def setNeighborDiscoveryFailures (self, neighborDiscoveryFailures):
self.neighborDiscoveryFailures = neighborDiscoveryFailures
self.setHasNeighborDiscoveryFailures()
def setNeighborDiscoveryTimeouts (self, neighborDiscoveryTimeouts):
self.neighborDiscoveryTimeouts = neighborDiscoveryTimeouts
self.setHasNeighborDiscoveryTimeouts()
def setNeighborDiscoverySuccesses (self, neighborDiscoverySuccesses):
self.neighborDiscoverySuccesses = neighborDiscoverySuccesses
self.setHasNeighborDiscoverySuccesses()
def setPingRequestsSent (self, pingRequestsSent):
self.pingRequestsSent = pingRequestsSent
self.setHasPingRequestsSent()
def setPingFailures (self, pingFailures):
self.pingFailures = pingFailures
self.setHasPingFailures()
def setPingSuccesses (self, pingSuccesses):
self.pingSuccesses = pingSuccesses
self.setHasPingSuccesses()
def setNeighborDiscoveryRequestsSent (self, neighborDiscoveryRequestsSent):
self.neighborDiscoveryRequestsSent = neighborDiscoveryRequestsSent
self.setHasNeighborDiscoveryRequestsSent()
def setPingTimeouts (self, pingTimeouts):
self.pingTimeouts = pingTimeouts
self.setHasPingTimeouts()
"""
Extracted from the below data:
{
"node": {
"className": "CountersOperData",
"namespace": "counters",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.connectivity_check.ipv6.counters.counters_oper_data_gen import CountersOperData"
},
"ancestors": [
{
"namespace": "tech",
"isCurrent": false
},
{
"namespace": "interfaces",
"isCurrent": false
},
{
"namespace": "interface",
"isCurrent": false
},
{
"namespace": "connectivity_check",
"isCurrent": false
},
{
"namespace": "ipv6",
"isCurrent": false
},
{
"namespace": "counters",
"isCurrent": true
}
],
"descendants": [],
"conditionalDebugName": null,
"leaves": [
{
"typeHandler": "handler: IntHandler",
"memberName": "neighborDiscoveryFailures",
"yangName": "neighbor-discovery-failures",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "neighborDiscoveryTimeouts",
"yangName": "neighbor-discovery-timeouts",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "neighborDiscoverySuccesses",
"yangName": "neighbor-discovery-successes",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "pingRequestsSent",
"yangName": "ping-requests-sent",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "pingFailures",
"yangName": "ping-failures",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "pingSuccesses",
"yangName": "ping-successes",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "neighborDiscoveryRequestsSent",
"yangName": "neighbor-discovery-requests-sent",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
},
{
"typeHandler": "handler: IntHandler",
"memberName": "pingTimeouts",
"yangName": "ping-timeouts",
"object": "",
"leafrefPath": null,
"defaultVal": null,
"hasDefaultRef": false
}
],
"module": {},
"env": {
"namespaces": [
"a",
"api",
"yang",
"modules",
"tech",
"common",
"qwilt_tech_interfaces"
]
},
"createTime": "2013"
}
"""
|
UTF-8
|
Python
| false | false | 2,013 |
10,187,662,466,254 |
b87ab69cd9dbcc691b26db71b0263016b3bd1974
|
963df165b2ac3ef1386b2a34b7afa3f0d13eee45
|
/zipcode/urls.py
|
cce892c8c0cbbd1687f48f01cd1e50895e461a83
|
[] |
no_license
|
yen223/zipcode-malaysia
|
https://github.com/yen223/zipcode-malaysia
|
dbf8ad7cd3bac686f80e8f92dfc74417f678ce3d
|
6541719d6c0c4030fe5dd3802ccceac15baa35fe
|
refs/heads/master
| 2016-09-10T15:01:34.396952 | 2014-03-04T19:40:56 | 2014-03-04T19:40:56 | 17,029,055 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
from django.contrib import admin
from zipcode.api.views import StreetFinder, CityFinder
from rest_framework import routers
router = routers.DefaultRouter()
# router.register(r'city2', CityFinder)
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'zipcodesearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^admin/', include(admin.site.urls)),
url(r'^api/city/(?P<zipcode>.*?)/$', CityFinder.as_view(), name='city_finder_url'),
url(r'^api/street/(?P<zipcode>.*?)/$', StreetFinder.as_view(), name='street_finder_url'),
url(r'^api/docs/', include('rest_framework_swagger.urls')),
url(r'^api/$', include(router.urls)),
url(r'^$', TemplateView.as_view(template_name="zipcode/index.html")),
)
|
UTF-8
|
Python
| false | false | 2,014 |
16,904,991,299,287 |
b7f05b70fe72345f854213d700d51b9e041838ab
|
f2682798bdda53a034a2c38539cdb178646b3ed0
|
/mysite/urls.py
|
3a71bde1e098a278e63c440ab6b6c8dcdcc338ed
|
[] |
no_license
|
chu888chu888/Python-SAE-mysite
|
https://github.com/chu888chu888/Python-SAE-mysite
|
86ea63cb325c9fa118b386c84445ea716d702a16
|
11af363b310434eb485959b05a235e5e64c7f9ad
|
refs/heads/master
| 2016-08-03T03:38:13.502244 | 2013-01-18T09:01:43 | 2013-01-18T09:01:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.views.generic.simple import direct_to_template as direct
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_PATH, 'show_indexes':True}),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes':True}),
url(r'^$', direct, {'template':'index.html'}),
url(r'^about/$', direct, {'template':'about.html'}),
url(r'^feed/$', 'mysite.feedback.views.show_feed_page'),
url(r'^design/$', direct, {'template':'design.html'}),
url(r'^feed/', include('mysite.feedback.urls')),
# url(r'^mysite/', include('mysite.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
|
UTF-8
|
Python
| false | false | 2,013 |
13,013,750,918,242 |
c11b3eb9be44f6524853f314b88e41c42b14f196
|
96571602db800a736f147d640ff1a082e3cfd8f8
|
/app/models.py
|
f87463acb9f769498c44ca52355e2aacf248cf58
|
[] |
no_license
|
evanye/scraper_factory
|
https://github.com/evanye/scraper_factory
|
459949f9388973a4e63f9b5b46162c6371b09988
|
e727a9715bdf94bb934272d9073607e6761a349c
|
refs/heads/master
| 2021-01-13T02:08:29.966134 | 2013-07-14T04:26:46 | 2013-07-14T04:26:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from app import db
import simplejson as json
class Scraper(db.Model):
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String(32), index = True, unique = True)
url = db.Column(db.String(512), index = True)
time = db.Column(db.Time)
params = db.Column(db.String(4096))
dataset = db.relationship('Data', backref = 'dataset', lazy = 'dynamic')
def set_params(self, param_hash):
self.params = json.dumps(param_hash)
def get_params(self):
return json.loads(self.params)
def __repr__(self):
return "<API {0}, URL {1}, time {2}, params {3}".format(self.name, self.url, self.time, self.params)
class Data(db.Model):
id = db.Column(db.Integer, primary_key = True)
timestamp = db.Column(db.DateTime, index = True)
data = db.Column(db.String(100000))
scraper = db.Column(db.Integer, db.ForeignKey('scraper.id'))
def set_data(self, data_hash):
self.data = json.dumps(data_hash)
def get_data(self):
return json.loads(self.data)
def __repr__(self):
return "<DataSet: name {0}, time {1}, data {2}".format(self.scraper, self.timestamp, self.data)
|
UTF-8
|
Python
| false | false | 2,013 |
6,871,947,714,220 |
ddfe06e6570577fd7de907688a3c877f9e80e90a
|
21d290a229c391ad9e98b05c7e1a620d37f5611e
|
/tasks/db.py
|
1d2878147f2b7c8c660fbfe5191adbcd9d410ad2
|
[] |
no_license
|
deverant/tasks
|
https://github.com/deverant/tasks
|
4cf4f46db0fce869b1a6c8ccaad8e087f37bb99e
|
5b6906c9c8d669015458d1e056d913e4cc1e6ece
|
refs/heads/master
| 2020-02-07T10:17:29.165407 | 2013-07-24T21:54:40 | 2013-07-24T21:55:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Provide DB connections
"""
from flask import g
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import QueuePool
from . import app
__all__ = ['Session', 'Base']
# Patch psycopg2 in case we are using postgres and initiate QueuePool
# and since we have patched gevent in we should be safe in using
# threadlocals in the pool
if app.config['DATABASE'].startswith("postgresql"):
from psycogreen.gevent import patch_psycopg
patch_psycopg()
engine = create_engine(app.config['DATABASE'], poolclass=QueuePool,
pool_size=5, max_overflow=10,
client_encoding="utf8", echo=app.debug)
engine.pool._use_threadlocal = True
else:
engine = create_engine(app.config['DATABASE'], echo=app.debug)
# Global functions for rest of the project to use
Session = sessionmaker(bind=engine)
Base = declarative_base()
# Flask integration to give each request it's own connection to use and
# automatically tear it down after the request
@app.before_request
def before_request():
g.db = Session()
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
db.close()
|
UTF-8
|
Python
| false | false | 2,013 |
2,319,282,371,039 |
64ac723c5c4f069648d26a8c12773e1cf97821d0
|
1a79df131f98e50df5dc67c1e52d3d30ff2caa07
|
/workouts/views.py
|
41e6cdbd0d81f68eeca7eb3a061b9faec298a973
|
[] |
no_license
|
broderboy/simplewod-mongodb
|
https://github.com/broderboy/simplewod-mongodb
|
19e79600dde574da4a684186c2c8b580975b3f6b
|
bc5b0465e8cab9c739911cd3d1ae6c12e54db711
|
refs/heads/master
| 2020-05-13T21:50:59.937955 | 2012-12-24T23:24:59 | 2012-12-24T23:24:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response as r2r
from django.shortcuts import get_object_or_404
from forms import *
from models import *
import time, datetime
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.template import RequestContext
import settings
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.template import RequestContext, Context
from django.core.mail import send_mail, BadHeaderError
from django.views.decorators.cache import cache_page
from django.contrib.sites.models import Site
from mongo.models import *
from mongo.views import *
from settings import DB
from pymongo.objectid import ObjectId
from django.utils.encoding import smart_unicode
from django.core import serializers
debug = getattr(settings, 'DEBUG', None)
def get_paginator(request, qs, num=20):
if debug:
print '- get paginator'
#post_list = qs
paginator = Paginator(qs, num)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
results = paginator.page(page)
except (EmptyPage, InvalidPage):
results = paginator.page(paginator.num_pages)
return results
def fix_tags(tags):
if debug:
print '- fix tags'
print tags
tags = tags.replace(' ', '-')
tags = tags.split(',')
tt = []
for t in tags:
if len(t) > 0:
if t[0] == '-':
t = t[1:]
tt.append(t)
tt = ','.join(tt)
print "t3"
print tt
return tt
@login_required
def add_wod(request):
if debug:
print '- add wod'
user = request.user
if request.method == 'POST':
print request.POST
form = WodForm(request.POST)
if form.is_valid():
w = Workout()
w.title = form.cleaned_data['name']
w.user = user
w.workout = form.cleaned_data['workout']
w.save()
w.set_tags(fix_tags(form.cleaned_data['wod_tags']))
print w.id
if form.cleaned_data.get('has_result', False):
print 'storing result '
r = Result()
r.user = user
tempdate = form.cleaned_data['date']
tempdate = time.strptime(tempdate,"%m/%d/%Y")
r.date = time.strftime("%Y-%m-%d",tempdate)
r.workout = w
r.result = form.cleaned_data['results']
r.time = form.cleaned_data['time']
r.weight = form.cleaned_data['weight']
r.notes = form.cleaned_data['notes']
r.save()
#for tag in form.cleaned_data['tags'].split(','):
#r.add_tag(tag)
print form.cleaned_data['tags']
print "1!!!!!!!"
rtags = "%s, %s" % (fix_tags(form.cleaned_data['wod_tags']), fix_tags(form.cleaned_data['tags']))
print rtags
r.set_tags(rtags)
#print form.cleaned_data['wod_tags']
#print fix_tags(form.cleaned_data['wod_tags'])
print "2!!!!!!!"
#r.set_tags(fix_tags(form.cleaned_data['tags']))
#w.set_tags(form.cleaned_data['tags'].replace(' ', '_'))
print 'posting'
posted = True
else:
print 'not valid?'
else:
form = WodForm(initial={'date': datetime.datetime.now().strftime("%m/%d/%Y"), 'has_result': True})
#form.date = datetime.datetime.now().strftime("%d/%m/%Y")
return r2r('add.html', locals(), context_instance=RequestContext(request))
def result_add(request, wodslug):
if debug:
print '- result add'
return r2r('add_result.html', locals())
#@cache_page(60 * 60 * 2)
def home(request):
if debug:
print "home"
user = request.user
r1 = MongoWorkout.objects.all().order_by('-touched_at')
#r1 = Workout.private_objects.all().order_by('-created_at')
#if user.is_authenticated() and user.get_profile().private_wods:
# r2 = Workout.objects.filter(user=user).order_by('-created_at')
# results = get_paginator(request, r1 | r2)
#else:
# results = get_paginator(request, r1)
results = get_paginator(request, r1, 20)
tag_cloud_template = 'tags_home.html'
title = 'Recent Workouts'
#exes = Exercise.objects.all().order_by('name')
exes = serializers.serialize("json", Exercise.objects.all().order_by('name'))
print exes
show = True
home = True
return r2r('index.html', locals())
#@cache_page(60 * 60 * 2)
def home_user(request, username):
if debug:
print '- home user'
user = get_object_or_404(User, username=username)
show = False
if user.get_profile().private_wods:
if request.user == user:
show = True
else:
show = True
results = get_paginator(request, Result.objects.filter(user=user).order_by('-date'), 20)
tag_cloud_template = 'tags_home_user.html'
header = "%s's Recent Workouts" % username
title = username
return r2r('index.html', locals())
#@cache_page(60 * 60 * 2)
def wod_single(request, wodslug):
if debug:
print '- wods single'
user = request.user
wod = MongoWorkout.objects.get(slug = wodslug)
print wod
print wod.mongo_id
mwod = wod.get()
print mwod['_id']
print 'res'
results = MongoResult.private_objects.filter(workout = wod).order_by('-updated_at')
#results = DB.results.find(wod=ObjectId(mwod['_id']))
print results
if user.is_authenticated() and user.get_profile().private_wods:
r2 = MongoResult.objects.filter(workout = wod, user=user).order_by('-updated_at')
#results = results | r2
results = map(ObjectId, results.values_list('mongo_id', flat=True))
print results
#for r in results:
# r = ObjectId(r)
#({j:{$in: [2,4,6]}});
results = DB.results.find({"_id": { "$in": results }})
print results
#<pymongo.cursor.Cursor object at 0x102a2fe50>
rr = results
#for r in results:
# print r['user_id']
# rr.append(r)
#rr = [(r['user_id'],) for r in results.collection]
print rr
#prints expected user id
return r2r('singlewod.html', {'results': rr, 'mwod': mwod, 'wod': wod, 'request': request})
#@cache_page(60 * 60 * 2)
def result_single(request, wodslug, username):
if debug:
print '- result single'
user = get_object_or_404(User, username=username)
wod = get_object_or_404(Workout, slug=wodslug)
#results = Result.private_objects.filter(workout__slug=wodslug, user=user)
print "2"
print user.is_authenticated()
print user.get_profile().private_wods
if (request.user.is_authenticated() and request.user.get_profile().private_wods) or not user.get_profile().private_wods:
results = Result.objects.filter(workout = wod, user=user).order_by('-date')
show = True
print results
return r2r('singleresult.html', locals())
#@cache_page(60 * 60 * 2)
def result_tag_user(request, username, tagslug):
if debug:
print '- result tag user'
user = get_object_or_404(User, username=username)
results = TaggedItem.objects.get_by_model(Result, tagslug).order_by('-date')
if (request.user.is_authenticated() and request.user.get_profile().private_wods) or not user.get_profile().private_wods:
results = results.filter(user=user)
show = True
results = get_paginator(request, results, 10)
if request.user.is_authenticated() and request.user.username == username:
message = 'You have not logged any results with this tag'
header = "Tagged Results: %s" % Tag.objects.get(name=tagslug)
user = request.user
return r2r('tagresult.html', locals())
#@cache_page(60 * 60 * 2)
def result_tag(request, tagslug):
if debug:
print '- result tag'
results = TaggedItem.objects.get_by_model(Result, tagslug).order_by('-date')
header = "Tagged Results: %s" % Tag.objects.get(name=tagslug)
user = request.user
return r2r('singleresult.html', locals())
#@cache_page(60 * 60 * 2)
def wod_tag(request, tagslug):
if debug:
print '- wod tag'
results = get_paginator(request, TaggedItem.objects.get_by_model(Workout, tagslug).order_by('-created_at'), 10)
print Workout.objects.all().order_by('-created_at')
header = "Tagged Workouts: %s" % Tag.objects.get(name=tagslug)
user = request.user
return r2r('tagwod.html', locals())
#@cache_page(60 * 60 * 2)
def result_add_ajax_form(request):
if debug:
print '- ajax add form'
wod_id = request.GET.get('wod_id')
if request.is_ajax() and wod_id:
action = 'add/'
tags = Workout.objects.get(id=wod_id).get_tags()
if request.method == 'POST':
print 'post'
form = ResultFormAjax(request.POST)
else:
form = ResultFormAjax()
print form
return r2r('resultform.html', locals(), context_instance=RequestContext(request))
def result_add_form(request, wodslug):
if debug:
print '- in result add form'
wod_id = request.GET.get('wod_id')
print request.POST
wod = get_object_or_404(Workout, slug=wodslug)
tags = Workout.objects.get(id=wod.id).get_tags()
action = ''
user = request.user
if request.method == 'POST':
print 'post'
form = ResultFormAdd(request.POST)
if form.is_valid():
print "VALID"
r = Result()
r.user = user
tempdate = form.cleaned_data['date']
tempdate = time.strptime(tempdate,"%m/%d/%Y")
r.date = time.strftime("%Y-%m-%d",tempdate)
r.workout = wod
r.result = form.cleaned_data['results']
r.time = form.cleaned_data['time']
r.weight = form.cleaned_data['weight']
r.notes = form.cleaned_data['notes']
r.save()
#for tag in form.cleaned_data['tags'].split(','):
#r.add_tag(tag)
print form.cleaned_data['tags']
tt = []
for tag in wod.get_tags():
tt.append(tag.name)
r.set_tags(", ".join(tt))
r.set_tags(fix_tags(form.cleaned_data['tags']))
submitted = True
else:
print 'crap'
else:
form = ResultFormAdd()
return r2r('addresult.html', locals(), context_instance=RequestContext(request))
@login_required
def settings(request):
user = request.user
if request.method == 'POST':
form = SettingsForm(request.POST)
if form.is_valid():
user.email = form.cleaned_data['email']
user.get_profile().private_wods = form.cleaned_data['private_wods']
user.save()
user.get_profile().save()
posted = True
else:
form = SettingsForm(initial={'email': user.email, 'private_wods': user.get_profile().private_wods})
return r2r('settings.html', locals(), context_instance=RequestContext(request))
#@cache_page(60 * 60 * 2)
def contactview(request):
if debug:
print 'contactview'
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
if debug:
'valid form'
subject = "simpleWOD contact: %s" % form.cleaned_data['topic']
message = "From: %s\n\n%s" % (form.cleaned_data['name'], form.cleaned_data['message'])
from_email = form.cleaned_data['email']
try:
send_mail(subject, message, from_email, ['[email protected]'])
except BadHeaderError:
return HttpResponse('Invalid header found.')
return HttpResponseRedirect('/contact/thankyou/')
else:
form = ContactForm()
#form = ContactForm(initial={'topic': 'simpleWOD contact form submission' })
return r2r('contact.html', locals(), context_instance=RequestContext(request))
def thankyou(request):
return render_to_response('thankyou.html')
def feeds(request):
user = request.user
domain = 'http://%s/' % Site.objects.get_current().domain
return r2r('feeds.html', locals())
|
UTF-8
|
Python
| false | false | 2,012 |
6,682,969,128,082 |
30ba0a011851f114b8092577876796da4763666f
|
b049281d83a8e20374b201ae0c2af1c0b54d4230
|
/ThreeMonth.py
|
015c4fdf8118f31d9a3a18b76a9f57f5ec1f3c0b
|
[] |
no_license
|
EricBruylant/Smogon-Usage-Stats
|
https://github.com/EricBruylant/Smogon-Usage-Stats
|
dd0e65252d7f482a30bde5111201f18409cefb8f
|
52ba2fbf188f398c0a3e2fda39d55911360c2356
|
refs/heads/master
| 2021-05-26T21:12:00.797250 | 2012-09-25T18:39:59 | 2012-09-25T18:39:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import string
import sys
file = open("pokemons.txt")
pokelist = file.readlines()
file.close()
lsname = []
for line in range(0,len(pokelist)):
lsname.append(pokelist[line][str.find(pokelist[line],' ')+1:len(pokelist[line])-1])
usage = [0 for i in range(len(pokelist))] #track usage across all three tiers
#...first month OU
filename = str(sys.argv[1])
file = open(filename)
table=file.readlines()
file.close()
#'real' usage screwed me over--I can't take total count from header
#using percentages is a bad idea because of roundoff
tempUsage = [0 for i in range(len(pokelist))] #really dumb that I have to do this
for i in range(6,len(table)):
name = table[i][10:26]
if (name[0] == '-'):
break
while name[len(name)-1] == ' ':
#remove extraneous spaces
name = name[0:len(name)-1]
count = table[i][28:35]
while count[len(count)-1] == ' ':
#remove extraneous spaces
count = count[0:len(count)-1]
found = False
for j in range(0,len(lsname)):
if name == lsname[j]:
tempUsage[j]=float(count)
found = True
break
if not found:
print name+" not found!"
sys.exit()
for i in range(0,len(tempUsage)):
usage[i] = usage[i]+6.0*tempUsage[i]/sum(tempUsage)/24
#...second month's OU
filename = str(sys.argv[2])
file = open(filename)
table=file.readlines()
file.close()
tempUsage = [0 for i in range(len(pokelist))] #really dumb that I have to do this
for i in range(6,len(table)):
name = table[i][10:26]
if (name[0] == '-'):
break
while name[len(name)-1] == ' ':
#remove extraneous spaces
name = name[0:len(name)-1]
count = table[i][28:35]
while count[len(count)-1] == ' ':
#remove extraneous spaces
count = count[0:len(count)-1]
found = False
for j in range(0,len(lsname)):
if name == lsname[j]:
tempUsage[j]=float(count)
found = True
break
if not found:
print name+" not found!"
sys.exit()
for i in range(0,len(tempUsage)):
usage[i] = usage[i]+3.0*6.0*tempUsage[i]/sum(tempUsage)/24
#...third month's OU
filename = str(sys.argv[3])
file = open(filename)
table=file.readlines()
file.close()
tempUsage = [0 for i in range(len(pokelist))] #really dumb that I have to do this
for i in range(6,len(table)):
name = table[i][10:26]
if (name[0] == '-'):
break
while name[len(name)-1] == ' ':
#remove extraneous spaces
name = name[0:len(name)-1]
count = table[i][28:35]
while count[len(count)-1] == ' ':
#remove extraneous spaces
count = count[0:len(count)-1]
found = False
for j in range(0,len(lsname)):
if name == lsname[j]:
tempUsage[j]=float(count)
found = True
break
if not found:
print name+" not found!"
sys.exit()
for i in range(0,len(tempUsage)):
usage[i] = usage[i]+20.0*6.0*tempUsage[i]/sum(tempUsage)/24
#generate three-month table
OU = []
for i in range(0,len(usage)):
if usage[i] > 0.0:
OU.append([i,usage[i]])
OU = sorted(OU, key=lambda OU:-OU[1])
print "[HIDE=OU][CODE]"
print "Three-month usage for [insert tier name here]"
print " + ---- + --------------- + ------- + "
print " | Rank | Pokemon | Percent | "
print " + ---- + --------------- + ------- + "
print ' [B]| %-4d | %-15s | %6.3f%% |' % (1,lsname[OU[0][0]],OU[0][1]*100)
for i in range(1,len(OU)):
if OU[i][1] < 0.0340636711:
start = i
break
print ' | %-4d | %-15s | %6.3f%% |' % (i+1,lsname[OU[i][0]],100.0*OU[i][1])
print '[/B] | %-4d | %-15s | %6.3f%% |' % (i+1,lsname[OU[i][0]],100.0*OU[i][1])
for i in range(start+1,len(OU)):
print ' | %-4d | %-15s | %6.3f%% |' % (i+1,lsname[OU[i][0]],100.0*OU[i][1])
print " + ---- + --------------- + ------- +[/CODE][/HIDE]"
|
UTF-8
|
Python
| false | false | 2,012 |
6,098,853,578,022 |
129a7489754b2d8abaaac788b833cf59a11f0ecd
|
efa3e9531a21c5454198227d82892c20b2214832
|
/src/plivo/core/freeswitch/eventtypes.py
|
40b2a70b66a43c4f37246d97c9057fdfd38b8a82
|
[
"MPL-1.1",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
chrismatthieu/plivo
|
https://github.com/chrismatthieu/plivo
|
80dabd9affb191321f1a6df23ce2afd68123b936
|
23e75709301d0d3a0ee53917187ef6281598129f
|
refs/heads/master
| 2020-04-08T16:39:44.190012 | 2011-05-21T18:01:12 | 2011-05-21T18:01:12 | 1,781,808 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2011 Plivo Team. See LICENSE for details.
"""
Event Types classes
"""
from urllib import unquote
from urllib import quote
class Event(object):
'''Event class'''
def __init__(self, buffer=""):
self._headers = {}
self._raw_body = ''
self._raw_headers = ''
self._u_raw_headers = ''
if buffer:
# Sets event headers from buffer.
for line in buffer.splitlines():
try:
var, val = line.rstrip().split(': ', 1)
self.set_header(var, val)
except ValueError:
pass
def __getitem__(self, key):
return self.get_header(key)
def __setitem__(self, key, value):
self.set_header(key, value)
def get_content_length(self):
'''
Gets Content-Length header as integer.
Returns 0 If length not found.
'''
length = self.get_header('Content-Length')
if length:
try:
return int(length)
except:
return 0
return 0
def get_reply_text(self):
'''
Gets Reply-Text header as string.
Returns None if header not found.
'''
return self.get_header('Reply-Text')
def is_reply_text_success(self):
'''
Returns True if ReplyText header begins with +OK.
Returns False otherwise.
'''
reply = self.get_reply_text()
return reply and reply[:3] == '+OK'
def get_content_type(self):
'''
Gets Content-Type header as string.
Returns None if header not found.
'''
return self.get_header('Content-Type')
def get_headers(self):
'''
Gets all headers as a python dict.
'''
return self._headers
def set_headers(self, headers):
'''
Sets all headers from dict.
'''
self._headers = headers.copy()
def get_header(self, key, defaultvalue=None):
'''
Gets a specific header as string.
Returns None if header not found.
'''
try:
return self._headers[key]
except KeyError:
return defaultvalue
def set_header(self, key, value):
'''
Sets a specific header.
'''
key = key.strip()
value = value.strip()
u_value = unquote(value)
self._raw_headers += "%s: %s\n" % (key, value)
self._u_raw_headers += "%s: %s\n" % (key, u_value)
self._headers[key] = u_value
def get_body(self):
'''
Gets raw Event body.
'''
return self._raw_body
def set_body(self, data):
'''
Sets raw Event body.
'''
self._raw_body = data
def get_raw_headers(self):
'''
Gets raw headers (quoted).
'''
return self._raw_headers
def get_unquoted_raw_headers(self):
'''
Gets raw headers (unquoted).
'''
return self._u_raw_headers
def get_raw_event(self):
'''
Gets raw Event (quoted).
'''
return self._raw_headers + self._raw_body + '\n'
def get_unquoted_raw_event(self):
'''
Gets raw Event (unquoted).
'''
return self._u_raw_headers + self._raw_body + '\n'
def __str__(self):
return '<%s headers=%s, body=%s>' \
% (self.__class__.__name__,
str(self.get_unquoted_raw_headers().replace('\n', '\\n')),
str(self.get_body()).replace('\n', '\\n'))
class ApiResponse(Event):
def __init__(self, buffer=""):
Event.__init__(self, buffer)
@classmethod
def cast(self, event):
'''
Makes an ApiResponse instance from Event instance.
'''
cls = ApiResponse(event.get_raw_headers())
cls.set_body(event.get_body())
return cls
def get_response(self):
'''
Gets response for api command.
'''
return self.get_body().strip()
def is_success(self):
'''
Returns True if api command is a success.
Otherwise returns False.
'''
return self._raw_body and self._raw_body[:3] == '+OK'
class BgapiResponse(Event):
def __init__(self, buffer=""):
Event.__init__(self, buffer)
@classmethod
def cast(self, event):
'''
Makes a BgapiResponse instance from Event instance.
'''
cls = BgapiResponse(event.get_raw_headers())
cls.set_body(event.get_body())
return cls
def get_response(self):
'''
Gets response for bgapi command.
'''
return self.get_reply_text()
def get_job_uuid(self):
'''
Gets Job-UUID from bgapi command.
'''
return self.get_header('Job-UUID')
def is_success(self):
'''
Returns True if bgapi command is a success.
Otherwise returns False.
'''
return self.is_reply_text_success()
class CommandResponse(Event):
def __init__(self, buffer=""):
Event.__init__(self, buffer)
@classmethod
def cast(self, event):
'''
Makes a CommandResponse instance from Event instance.
'''
cls = CommandResponse(event.get_raw_headers())
cls.set_body(event.get_body())
return cls
def get_response(self):
'''
Gets response for a command.
'''
return self.get_reply_text()
def is_success(self):
'''
Returns True if command is a success.
Otherwise returns False.
'''
return self.is_reply_text_success()
|
UTF-8
|
Python
| false | false | 2,011 |
8,461,085,614,400 |
16350baafe6beedad9dd861f7881bf2e199e9d0f
|
8220f77b4e48353df8178d3c91d1875a6c7ba13e
|
/models.py
|
36ab4399cfa5f63795e77475d4380b28aeb0e3a5
|
[] |
no_license
|
vigneshsarma/RboxFilePlug
|
https://github.com/vigneshsarma/RboxFilePlug
|
81574b23cb53e15ed7d17ecbd82004c66a8fa63a
|
78f689bdd31c95146d0e6bd55584ca8b98d6a0d5
|
refs/heads/master
| 2021-01-23T22:52:41.430381 | 2012-06-19T09:44:35 | 2012-06-19T09:44:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models, connection
from django.db.models.query import QuerySet, EmptyQuerySet, insert_query, RawQuerySet
from custom_filefield import RboxFileField
from custom_filefield import S3BotoStorage
from django.contrib.auth.models import User
import uuid
import datetime
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models.fields.related import RelatedField, Field, ManyToManyRel
from django.conf import settings
from south.modelsinspector import add_introspection_rules
class FileManager(models.Manager):
def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None,
join_table=None, source_col_name=None, target_col_name=None, content_type=None,
content_type_field_name=None, object_id_field_name=None, file_field_identifier=None, max_count=None):
super(FileManager, self).__init__()
self.core_filters = core_filters or {}
self.model = model
self.content_type = content_type
self.symmetrical = symmetrical
self.instance = instance
self.join_table = join_table
self.join_table = model._meta.db_table
self.source_col_name = source_col_name
self.target_col_name = target_col_name
self.content_type_field_name = content_type_field_name
self.object_id_field_name = object_id_field_name
self.pk_val = self.instance._get_pk_val()
self.file_field_identifier = file_field_identifier
self.max_count = max_count
class FileDoesNotExist(Exception):
pass
class FileNameDidNoTMatch(Exception):
pass
class MaximumNumberofObjectsAlreadyCreated(Exception):
pass
def get_query_set(self):
"""Returns a new QuerySet object. Subclasses can override this method
to easily customize the behavior of the Manager.
"""
return QuerySet(RboxFile).filter(rboxfileconnector__content_type=self.content_type,
rboxfileconnector__object_id=self.instance.id,
rboxfileconnector__file_field_identifier=self.file_field_identifier)
def all(self):
if hasattr(self, 'ondelete'):
if self.ondelete:
return self.none()
return self.get_query_set()
def create(self, **kwargs):
if self.max_count and (self.all().count() >= self.max_count):
raise FileManager.MaximumNumberofObjectsAlreadyCreated("Maximum number of objects already created")
filepointer = kwargs['filepointer']
if kwargs.get('filename', None):
if kwargs['filename'] != filepointer.name:
raise FileManager.FileNameDidNoTMatch("the keyword argument filename didnot match with filepointer.name")
if not 'filename' in kwargs:
kwargs['filename'] = filepointer.name
if not 'filesize' in kwargs:
kwargs['filesize'] = filepointer.size
rbox_file = self.get_query_set().create(**kwargs)
rboxfile_connector = RboxFileConnector(rbox_file=rbox_file, content_type=self.content_type,
object_id=self.instance.id, file_field_identifier=self.file_field_identifier)
rboxfile_connector.save()
return rbox_file
def add(self, rbox_file):
if self.max_count and (self.all().count() >= self.max_count):
raise FileManager.MaximumNumberofObjectsAlreadyCreated("Maximum number of objects already created")
rboxfile_connector, created = RboxFileConnector.objects.get_or_create(rbox_file=rbox_file, content_type=self.content_type,
object_id=self.instance.id, file_field_identifier=self.file_field_identifier)
return rbox_file
def remove(self, rbox_file):
""" Remove doesnot deletes the file only deletes the connector model instance
rather use delete method for deleting files
"""
try:
rboxfile_connector = RboxFileConnector.objects.get(rbox_file=rbox_file, content_type=self.content_type,
object_id=self.instance.id, file_field_identifier=self.file_field_identifier)
rboxfile_connector.delete()
except RboxFileConnector.DoesNotExist:
pass
return
def get(self, **kwargs):
if self.max_count == 1:
try:
return self.all()[0]
except IndexError:
return None
else:
try:
return super(FileManager,self).get(**kwargs)
except RboxFile.DoesNotExist:
raise FileManager.FileDoesNotExist
def delete(self, **kwargs):
if self.max_count == 1:
return self.all().delete()
else:
raise AttributeError("'FileManager' object has no attribute 'delete'")
class FileManagerDescriptor(object):
"""
This class provides the functionality that makes the related-object
managers available as attributes on a model class, for fields that have
multiple "remote" values and have a GenericRelation defined in their model
(rather than having another model pointed *at* them). In the example
"article.publications", the publications attribute is a
ReverseGenericRelatedObjectsDescriptor instance.
"""
def __init__(self, field, file_field_identifier, max_count):
self.field = field
self.file_field_identifier = file_field_identifier
self.max_count = max_count
def get_filemanager(self):
return FileManager
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# This import is done here to avoid circular import importing this module
from django.contrib.contenttypes.models import ContentType
# Dynamically create a class that subclasses the related model's
# default manager.
rel_model = self.field.rel.to
RelatedManager = self.get_filemanager()
qn = connection.ops.quote_name
manager = RelatedManager(
model = rel_model,
instance = instance,
symmetrical = (self.field.rel.symmetrical and instance.__class__ == rel_model),
join_table = qn(self.field.m2m_db_table()),
source_col_name = qn(self.field.m2m_column_name()),
target_col_name = qn(self.field.m2m_reverse_name()),
content_type = ContentType.objects.db_manager(instance._state.db).get_for_model(instance),
content_type_field_name = self.field.content_type_field_name,
object_id_field_name = self.field.object_id_field_name,
file_field_identifier = self.file_field_identifier,
max_count = self.max_count
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
manager.clear()
for obj in value:
manager.add(obj)
class CustomFileRelation(generic.GenericRelation):
def get_filemanager_descriptor(self):
return FileManagerDescriptor
def contribute_to_class(self, cls, name):
super(CustomFileRelation, self).contribute_to_class(cls, name)
# Save a reference to which model this class is on for future use
self.model = cls
if not self.file_field_identifier:
self.file_field_identifier = self.name
RelatedManagerDescriptor = self.get_filemanager_descriptor()
setattr(cls, self.name, RelatedManagerDescriptor(self, self.file_field_identifier, self.max_count))
def get_unique_key():
return uuid.uuid4().hex
class RboxFile(models.Model):
unique_key = models.CharField('Unique Key', max_length=100, default=get_unique_key, unique=True, db_index=True)
filename = models.CharField('File Name', max_length=100)
filelabel = models.CharField('File Type', max_length=50, blank=True, null=True)
filesize = models.PositiveIntegerField('File Size')
filepointer = RboxFileField('File Pointer', max_length=200, upload_to='filemanager.rboxfile') #, backup_storage=S3BotoStorage())
user = models.ForeignKey(User,null=True)
date = models.DateTimeField(default=datetime.datetime.now)
class RboxFileConnector(models.Model):
rbox_file = models.ForeignKey(RboxFile)
content_type = models.ForeignKey(ContentType)
file_field_identifier = models.CharField(max_length=100, default="attachments", db_index=True)
object_id = models.PositiveIntegerField(db_index=True)
content_object = generic.GenericForeignKey('content_type', 'object_id')
class GenericFilePlug(object):
def __init__(self,related_name=None, file_field_identifier=None, max_count=None, *args, **kwargs):
if not related_name:
related_name = uuid.uuid4().hex
kwargs['related_name'] = related_name
kwargs['to'] = RboxFileConnector
super(GenericFilePlug,self).__init__(**kwargs)
self.file_field_identifier = file_field_identifier
self.max_count = max_count
def value_from_object(self, obj):
import django
if django.__dict__['VERSION'] == (1, 2, 3, 'final', 0):
manager_obj = getattr(obj, self.attname)
manager_obj.ondelete = True
return manager_obj
else:
return super(GenericFilePlug,self).value_from_object(obj)
class GenericSingleFilePlug(object):
def __init__(self, *args, **kwargs):
kwargs['max_count'] = 1
super(GenericSingleFilePlug,self).__init__(*args, **kwargs)
class RboxFilePlug(GenericFilePlug, CustomFileRelation):
pass
class RboxSingleFilePlug(GenericSingleFilePlug, RboxFilePlug):
pass
rboxfileplug_introspection_rules = [((RboxFilePlug,),[],{"file_field_identifier": ["file_field_identifier",{}],},)]
add_introspection_rules(rboxfileplug_introspection_rules, ["filemanager.models.RboxFilePlug"])
rboxsinglfileplug_introspection_rules = [((RboxSingleFilePlug,),[],{"file_field_identifier": ["file_field_identifier",{}],},)]
add_introspection_rules(rboxsinglfileplug_introspection_rules, ["filemanager.models.RboxSingleFilePlug"])
|
UTF-8
|
Python
| false | false | 2,012 |
11,175,504,919,262 |
d3fba9e0d7fa84b2603f97ab5ff373da90d91417
|
12d00d6452e19db2f9b7ec1f4bb2574bf2a0f125
|
/core/tests/gui/balance_graph_test.py
|
e397da42eb711b92e988d003a4278d0d2b16de51
|
[] |
no_license
|
prodigeni/moneyguru
|
https://github.com/prodigeni/moneyguru
|
578f9dfd5542b59d9b01ede1988bdd346c619472
|
f8700112c85ca005e52a3c460775d543e219534e
|
refs/heads/master
| 2021-01-16T21:22:26.930380 | 2014-01-25T19:13:23 | 2014-01-25T19:13:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from hscommon.testutil import eq_
from hscommon.currency import CAD
from ..base import TestApp, with_app
from ...model.account import AccountType
#--- Pristine
@with_app(TestApp)
def test_balgraph_yaxis_scaling_works_if_negative(app):
# The y axis scaling (ymin being "higher" than 0) works when the balance is negative.
app.add_account()
app.show_account()
app.add_entry('01/01/2010', decrease='1000')
app.drsel.select_next_date_range()
eq_(app.balgraph.ymax, -900)
eq_(app.balgraph.ymin, -1100)
class TestTwoLiabilityTransactions:
def do_setup(self):
app = TestApp()
app.drsel.select_month_range()
app.add_account('Visa', account_type=AccountType.Liability)
app.show_account()
app.add_entry('3/1/2008', increase='120.00')
app.add_entry('5/1/2008', decrease='40.00')
return app
@with_app(do_setup)
def test_budget(self, app, monkeypatch):
# when we add a budget, the balance graph will show a regular progression throughout date range
monkeypatch.patch_today(2008, 1, 27)
app.add_account('expense', account_type=AccountType.Expense)
app.add_budget('expense', 'Visa', '100')
app.show_nwview()
app.bsheet.selected = app.bsheet.liabilities[0]
app.show_account()
expected = [('04/01/2008', '120.00'), ('05/01/2008', '120.00'), ('06/01/2008', '80.00'),
('28/01/2008', '80.00'), ('01/02/2008', '180.00')]
eq_(app.graph_data(), expected)
app.drsel.select_next_date_range()
eq_(app.graph_data()[0], ('01/02/2008', '180.00'))
@with_app(do_setup)
def test_budget_on_last_day_of_the_range(self, app, monkeypatch):
# don't raise a ZeroDivizionError
monkeypatch.patch_today(2008, 1, 31)
app.add_account('expense', account_type=AccountType.Expense)
app.add_budget('expense', 'Visa', '100')
app.show_nwview()
app.drsel.select_next_date_range()
@with_app(do_setup)
def test_budget_with_future_txn(self, app, monkeypatch):
# when there's a future txn, we want the amount of that txn to be "sharply" displayed
monkeypatch.patch_today(2008, 1, 15)
app.add_entry('20/1/2008', decrease='10')
app.add_account('expense', account_type=AccountType.Expense)
app.add_budget('expense', 'Visa', '100')
app.show_nwview()
app.bsheet.selected = app.bsheet.liabilities[0]
app.show_account()
# the amount at the 20th is supposed to include budgeting for the 20th, and the 21st data point
# has to include budget for the 21st
expected = [('04/01/2008', '120.00'), ('05/01/2008', '120.00'), ('06/01/2008', '80.00'),
('16/01/2008', '80.00'), ('20/01/2008', '105.00'), ('21/01/2008', '101.25'), ('01/02/2008', '170.00')]
eq_(app.graph_data(), expected)
@with_app(do_setup)
def test_graph(self, app):
expected = [('04/01/2008', '120.00'), ('05/01/2008', '120.00'),
('06/01/2008', '80.00'), ('01/02/2008', '80.00')]
eq_(app.graph_data(), expected)
eq_(app.balgraph.title, 'Visa')
class TestForeignAccount:
def do_setup(self):
app = TestApp()
app.add_account('Visa', currency=CAD)
app.show_account()
return app
@with_app(do_setup)
def test_graph(self, app):
eq_(app.balgraph.currency, CAD)
#---
def app_budget_and_no_txn(monkeypatch):
monkeypatch.patch_today(2008, 1, 1)
app = TestApp()
app.drsel.select_month_range()
app.add_account('asset')
app.add_account('income', account_type=AccountType.Income)
app.add_budget('income', 'asset', '100')
return app
@with_app(app_budget_and_no_txn)
def test_future_date_range(app):
# There was a bug where when in a future date range, and also in a range with no transaction,
# no budget data would be drawn.
app.drsel.select_next_date_range()
app.show_nwview()
# Now, we're supposed to see a graph starting at 100 and ending at 200
expected = [('01/02/2008', '100.00'), ('01/03/2008', '200.00')]
eq_(app.nw_graph_data(), expected)
@with_app(app_budget_and_no_txn)
def test_show_budget_data_even_when_account_is_excluded(app):
# Ticket #332. When accounts were excluded, budget data wouldn't show in the account's balgraph.
nwview = app.show_nwview()
app.select_account('asset')
nwview.bsheet.toggle_excluded()
app.show_account('asset')
expected = [('02/01/2008', '0.00'), ('01/02/2008', '100.00')]
eq_(app.graph_data(), expected)
#---
class TestTwoAccountsOneTransaction:
def do_setup(self):
app = TestApp()
app.add_account('account1')
app.add_account('account2')
app.add_txn('12/01/2010', to='account1', amount='42')
return app
@with_app(do_setup)
def test_show_to_account(self, app):
# The data shown in the balgraph when showing account1 is accurate. Previously, the balgraph
# would use data from the *selected* account, not the *shown* account.
app.ttable.show_to_account()
app.link_aview()
# No account is selected now
eq_(app.graph_data()[0], ('13/01/2010', '42.00'))
eq_(app.balgraph.title, 'account1')
|
UTF-8
|
Python
| false | false | 2,014 |
4,690,104,319,082 |
e3c81aecb69f8da839d3fa82d4f7e403d50cdd14
|
18403e5580fe322cb0a44e68dacf7254e4c0e1e7
|
/blog/helper.py
|
237f826705195fec5e5f9fa6bd9e253f2ecb5467
|
[] |
no_license
|
weasky/example
|
https://github.com/weasky/example
|
7286c51acdcbe953d21a8f3ce5a587b0488f0adc
|
2982632101b747246958474c7d5b845a6a313add
|
refs/heads/master
| 2016-09-10T09:08:48.500061 | 2010-06-25T05:02:47 | 2010-06-25T05:02:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import render_template
def template(templatefile):
def decorated(f):
return lambda **kwargs: render_template(templatefile, **f(**kwargs))
return decorated
def humanizeTime(timestamp = None):
"""
Returns a humanized string representing time difference
between now() and the input timestamp.
The output rounds up to days, hours, minutes, or seconds.
4 days 5 hours returns '4 days'
0 days 4 hours 3 minutes returns '4 hours', etc...
"""
import datetime
timeDiff = datetime.datetime.now() - timestamp
print timeDiff
days = timeDiff.days
hours = timeDiff.seconds / 3600
minutes = timeDiff.seconds % 3600 / 60
seconds = timeDiff.seconds % 3600 % 60
str = ""
tStr = ""
if days > 0:
if days == 1:
tStr = "day"
else:
tStr = "days"
str = str + "%s %s" %(days, tStr)
return str
elif hours > 0:
if hours == 1:
tStr = "hour"
else:
tStr = "hours"
str = str + "%s %s" %(hours, tStr)
return str
elif minutes > 0:
if minutes == 1:
tStr = "min"
else:
tStr = "mins"
str = str + "%s %s" %(minutes, tStr)
return str
elif seconds > 0:
if seconds == 1:
tStr = "sec"
else:
tStr = "secs"
str = str + "%s %s" %(seconds, tStr)
return str
else:
return 'just now'
|
UTF-8
|
Python
| false | false | 2,010 |
12,558,484,397,346 |
f490c9d0dbb802d0cd26ac7f4e78dc23eac02e26
|
fe643428e98763ace20a18c7b87fcb7237b067cb
|
/2/2.py
|
5ee257ecbb58efb9d0e2203dfb950585d3452992
|
[] |
no_license
|
pwilczynski/PythonChallenge
|
https://github.com/pwilczynski/PythonChallenge
|
475978155ba43436248a27349532e0a5aa247067
|
c0f1487f61f1b75c0c2b7ceefefbef975b7c8cc3
|
refs/heads/master
| 2021-01-02T22:58:52.649972 | 2011-06-20T20:49:31 | 2011-06-20T20:49:31 | 1,886,445 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import string
alpha = string.ascii_lowercase
print alpha
f = open('basic.txt', 'r')
g = f.readlines()
for line in g:
for char in line:
if char.isalpha():
print char
|
UTF-8
|
Python
| false | false | 2,011 |
7,000,796,706,881 |
5eba1e8bb3708b267c3b3ae4c788affe495f081a
|
cd3d4da835b4e95c9dee158f21b6197caa130fa2
|
/util/gen-board-map.py
|
db4a49941399cf23da25c97c7096e896e09815ca
|
[
"GPL-3.0-only"
] |
non_permissive
|
pbrook/charliecube
|
https://github.com/pbrook/charliecube
|
839af1f0acb4434e4e8f90a64525df7e37f52808
|
222e6fdded5d12608c6bcc73562caa054c8d6768
|
refs/heads/master
| 2021-01-25T00:16:27.985446 | 2014-07-05T17:40:44 | 2014-07-05T17:40:44 | 19,299,229 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python3
import sys
import optparse
def fn(c):
return ord(c.lower()) - ord('a')
def file_line(f):
while True:
l = f.readline()
if l == "":
raise EOFError();
l = l.strip()
if l == "":
sys.stdout.write("\n")
continue
if l[0] == '#':
continue
yield l
op = optparse.OptionParser()
op.add_option("--rgb", action="store_true", dest="rgb", default=False)
(options, args) = op.parse_args();
rgb = options.rgb
sys.stdout.write("/* Generated by gen-board-map.py */\n")
lines = file_line(sys.stdin)
l = next(lines)
common = "AK".index(l[0])
fn = l[1:].index
while True:
try:
l1 = next(lines)
if rgb:
l2 = next(lines)
l3 = next(lines)
# Group into anode/cathode pairs
x = list(zip(*[iter(map(fn, l1.split()))]*2))
if rgb:
y = map(fn, l2.split())
z = map(fn, l3.split())
else:
y = [None]*len(x)
z = y
for ((r, k), g, b) in zip(x, y, z):
if common == 0:
# Common anode
sys.stdout.write("PIXEL(%d,%d)," % (k, r))
if rgb:
sys.stdout.write("PIXEL(%d,%d)," % (k, g))
sys.stdout.write("PIXEL(%d,%d),\n" % (k, b))
else:
# Common cathode
sys.stdout.write("PIXEL(%d,%d)," % (r, k))
if rgb:
sys.stdout.write("PIXEL(%d,%d)," % (g, k))
sys.stdout.write("PIXEL(%d,%d),\n" % (b, k))
except EOFError:
break;
sys.stdout.write("\n")
|
UTF-8
|
Python
| false | false | 2,014 |
1,760,936,620,401 |
1a5a265b8de1c8f92df77bdbc45ca67237100ac2
|
53036622fe9df8343a08cf52180a3b8f78e04f13
|
/plugin.py
|
b8896ffe3e8f5f02cf3b669b3e191399a8e4c9b6
|
[
"Apache-2.0"
] |
permissive
|
JoshuaCooper/xbmc-subsonic
|
https://github.com/JoshuaCooper/xbmc-subsonic
|
a479048434d9bac705fb91358e6dcc8dea68d94e
|
f6cf774382b2814ae301aa502ac77b55a99dd9ab
|
refs/heads/master
| 2021-01-21T02:40:42.687505 | 2013-09-17T20:07:20 | 2013-09-17T20:07:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys, re
import xbmc, xbmcplugin, xbmcgui
from resources.lib import Subsonic
base_url=sys.argv[0]
handle=int(sys.argv[1])
def addDir(name,url,mode,iconimage):
if mode == None or url == None or len(url) < 1:
fetchFolder()
elif mode == 1:
print(url)
elif mode == 2:
print(name + ":" + url)
def fetchFolder():
subsonic = Subsonic.Subsonic('http://bruelldb:4040', 'test', 'test')
folders = subsonic.getMusicFolders()
total=len(folders)
for folder in folders['musicFolders']['musicFolder']:
addToXbmc("test", "test", 1, total)
#addToXbmc(folder['name'], folder['name'], 1)
def handleFolder(folder):
subsonic = Subsonic.Subsonic('http://bruelldb:4040', 'test', 'test')
indexes = subsonic.getIndexes(folder['id'])['indexes']
indexList = indexes['index']
return indexList
def addToXbmc(name,link,mode,totalItems=0):
xbmc.log('name: ' + str(name) + ", link: " + str(link) + ", mode: " + str(mode), xbmc.LOGNOTICE)
title=decode(name)
img=''
#liz=xbmcgui.ListItem(title, iconImage="DefaultFolder.png")
liz=xbmcgui.ListItem(title, iconImage=img, thumbnailImage=img)
#liz.setInfo(type="music", infoLabels={"Title": title})
url=base_url + "?folder=" + name
xbmc.log('handle: ' + str(handle) + ", url: " + url, xbmc.LOGNOTICE)
ok = xbmcplugin.addDirectoryItem(handle=handle, url=url, listitem=liz, isFolder=True, totalItems=totalItems)
return ok
def _callback(matches):
id = matches.group(1)
try:
return unichr(int(id))
except:
return id
def decode(data):
if type(data) is int:
data = unicode(data)
return re.sub("&#(\d+)(;|(?=\s))", _callback, data).strip()
addDir(None,None,None,None)
|
UTF-8
|
Python
| false | false | 2,013 |
4,432,406,259,550 |
e4abbd8fc70cafb4e7d415c14b0252371ac4ca8a
|
aedf65a662083d82fd2ef021a883dd842961d445
|
/webapp/linkstop/urls.py
|
f0820aeb4e529eeeabdd8bb13a37a405d1ffa0e2
|
[] |
no_license
|
hercules261188/Locidesktop
|
https://github.com/hercules261188/Locidesktop
|
b95f9f4dd709d33f21b7b9f43d52e3b76c99912b
|
cab3a3bda807780244e4e5ce9c3745b6d04ddbc9
|
refs/heads/master
| 2021-12-02T15:12:47.876242 | 2011-01-10T09:21:27 | 2011-01-10T09:21:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
#url(r'^$', 'linkstop.apps.desktop.views.front'),
url(r'^$', 'linkstop.apps.desktop.views.desktop_welcome'),
(r'^api/', include('linkstop.apps.desktop.api.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
(r'^djadmin/doc/', include('django.contrib.admindocs.urls')),
(r'^djadmin/', include(admin.site.urls)),
#(r'^top/$', 'linkstop.apps.desktop.views.desktop_top_urls'),
(r'^accounts/', include('linkstop.apps.accounts.urls')),
#(r'^media/js/(?P<path>.*)$', 'django.views.static.serve',
# {'document_root': settings.STATIC_DOC_ROOT+'/minijs/'}),
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_DOC_ROOT}),
(r'^iconpacks/$', 'linkstop.apps.desktop.views.iconpacks'),
url(r'^pages/(?P<page_name>[\w-]+)/$', 'linkstop.apps.desktop.views.page'),
(r'^', include('linkstop.apps.desktop.urls')),
)
RESERVED_PATHS = frozenset([ 'api',
'djadmin',
'media',
'accounts',
'settings',
'pages',
'forum',
'blog',
'feed',
'rss',
'login',
'logout',
'faq',
'themes',
'icons',
'iconpacks',
'pages',
'new',
'edit'
])
def is_reserved_path(path):
return path.partition('/')[0].lower() in RESERVED_PATHS
|
UTF-8
|
Python
| false | false | 2,011 |
18,425,409,724,965 |
47f8e749514d3b5daa939e342c6614fbebc5dc51
|
84b7ca31046ce4791f086cdecb14c4be7131176f
|
/2013-06-07/python/exercise2.py
|
42a3fbea3dd2950a87f3e6368b2ca0afedbb55b5
|
[] |
no_license
|
cvdlab-cg/283934
|
https://github.com/cvdlab-cg/283934
|
a6d233078a52391a46c9a48b772bdd1d091c2638
|
1b884af68023c5cde4d8a7fc0bd335744f49a52b
|
refs/heads/master
| 2021-01-10T13:16:51.140468 | 2013-07-20T19:46:05 | 2013-07-20T19:46:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##/* 283934 - HOMEWORK 3 */
|
UTF-8
|
Python
| false | false | 2,013 |
12,953,621,407,100 |
e38c5359ea7d7ed0fff3d67f794512d4cb4eb10f
|
14b251d2d7cb5a64971829a2068c4f8a18e9d24d
|
/printers/urls.py
|
d18e3dfae4f1f149faaaa22152a64ba5d5720706
|
[] |
no_license
|
AxellH/printerinstaller-server
|
https://github.com/AxellH/printerinstaller-server
|
e04ff9fdf852110590a59b39a34aa5e4a36f7a76
|
d81a8ba56882828e4a3fdb4f5abfcb8deaf56b06
|
refs/heads/master
| 2020-12-24T09:56:48.915516 | 2014-12-22T21:49:25 | 2014-12-22T21:49:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''Printer URLs'''
from django.conf.urls import patterns, url
from printers import views as pviews
from sparkle import views as sviews
urlpatterns = patterns('', \
url(r'^$', pviews.index, name='index'), \
url(r'^manage/$', pviews.manage, name='manage'), \
url(r'^sparkle/$', sviews.index, name='su-index'), \
url(r'^printer/add/$', pviews.printer_add, {}, name='printer_add'), \
url(r'^printer/edit/(?P<id>\d+)/', pviews.printer_edit, {}, name='printer_edit'), \
url(r'^printer/delete/(?P<id>\d+)/$', pviews.printer_delete, name='printer_delete'), \
url(r'^printer/details/(?P<id>\d+)/$', pviews.printer_details, name='printer_details'), \
url(r'^printerlist/add/$', pviews.printerlist_add, name='printerlist_add'), \
url(r'^printerlist/edit/(?P<id>\d+)/$', pviews.printerlist_edit, name='printerlist_edit'), \
url(r'^subscription_list/add/$', pviews.subscription_list_add, name='subscription_list_add'), \
url(r'^subscription_list/edit/(?P<id>\d+)/$', pviews.subscription_list_edit, name='subscription_list_edit'), \
url(r'^subscription_list/delete/(?P<id>\d+)/$', pviews.subscription_list_delete, name='subscription_list_delete'), \
url(r'^printerlist/delete/(?P<id>\d+)/$', pviews.printerlist_delete, name='printerlist_delete'), \
url(r'^printerlist/details/(?P<id>\d+)/$', pviews.printerlist_details, name='printerlist_details'), \
url(r'^printerlist/public/(?P<id>\d+)/', pviews.printerlist_public,{}, name='printerlist_public'), \
url(r'^options/add/$', pviews.options_add,{}, name='options_add'), \
url(r'^options/edit/(?P<id>\d+)/$', pviews.options_edit,{}, name='options_edit'), \
url(r'^options/delete/(?P<id>\d+)/$', pviews.options_delete, name='options_delete'), \
\
url(r'^subscribe/$', pviews.get_subscription_list, name='get_subscription_list'), \
url(r'^(?P<name>[^/]+)/$', pviews.getlist, name='get_list'), \
)
|
UTF-8
|
Python
| false | false | 2,014 |
25,769,809,823 |
ac3e6379aa5ddea469f42c27157c5684b16045d0
|
7a8c1d8a22132fabe9601b15f4b0b0d28e85030f
|
/treemode/schema/flat.py
|
cb0d9895f12e5e65c1c0b516260f89794b8093c0
|
[
"GPL-3.0-only"
] |
non_permissive
|
cheery/essence
|
https://github.com/cheery/essence
|
87843f66a464a18f0409f3bdf2cf8bdfc407f3c7
|
6ffa4b3d8a5390076739e8da21ed092cd7ad5e4a
|
refs/heads/master
| 2020-05-02T21:19:58.069785 | 2013-05-11T23:01:31 | 2013-05-11T23:01:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from base import Struct, Meta, Constant
def read_byte(fd):
return ord(fd.read(1))
def write_byte(fd, value):
fd.write(chr(value & 255))
def read_le16(fd):
out = 0
for offset, ch in zip(range(0, 16, 8), fd.read(2)):
out |= ord(ch) << offset
return out
def write_le16(fd, value):
shf = (value >> i for i in range(0, 16, 8))
fd.write(''.join(chr(x & 255) for x in shf))
def read_le32(fd):
out = 0
for offset, ch in zip(range(0, 32, 8), fd.read(4)):
out |= ord(ch) << offset
return out
def write_le32(fd, value):
shf = (value >> i for i in range(0, 32, 8))
fd.write(''.join(chr(x & 255) for x in shf))
def read_raw(fd, length):
return fd.read(length)
def write_raw(fd, value):
fd.write(value)
def read_record(fd):
code = read_le16(fd)
length = read_le16(fd)
if code == 0x0: # constant/meta decl
cols = read_raw(fd, length).decode('utf-8').split(':')
if len(cols) == 1:
return 'decl', Constant(cols[0])
else:
return 'decl', Meta(cols[0], list(cols[1:]))
elif code == 0x1: # buffer
return 'data', read_raw(fd, length)
elif code == 0x2: # string
return 'data', read_raw(fd, length).decode('utf-8')
elif code == 0x3: # list
return 'list', length
else:
return 'block', code - 0x4
def write_decl(fd, decl):
decl = unicode(decl).encode('utf-8')
write_le16(fd, 0x0)
write_le16(fd, len(decl))
write_raw(fd, decl)
def write_data(fd, data):
if isinstance(data, unicode):
data = data.encode('utf-8')
write_le16(fd, 0x2)
else:
write_le16(fd, 0x1)
write_le16(fd, len(data))
write_raw(fd, data)
def push_decl(fd, decls, decl):
if not decl in decls:
write_decl(fd, decl)
decls[decl] = decls[None]
decls[None] += 1
return decls[decl] + 0x4
def read_block(fd, decls, decl):
if isinstance(decl, Constant):
return decl
struct = Struct(decl, [])
for name in decl.names:
which, data = read_record(fd)
while which == 'decl':
decls.append(data)
which, data = read_record(fd)
if which == 'block':
item = read_block(fd, decls, decls[data])
elif which == 'list':
item = read_list(fd, decls, length=data)
elif which == 'data':
item = data
else:
raise Exception("bad record")
struct.data.append(item)
return struct
def write_block(fd, decls, block):
if isinstance(block, Constant):
uid = push_decl(fd, decls, block)
write_le16(fd, uid)
write_le16(fd, 0)
else:
uid = push_decl(fd, decls, block.meta)
count = len(block.meta.names)
write_le16(fd, uid)
write_le16(fd, count)
for i in range(count):
item = block[i]
if isinstance(item, Struct):
write_block(fd, decls, item)
elif isinstance(item, list):
write_list(fd, decls, item)
else:
write_data(fd, item)
def read_list(fd, decls, length):
out = []
for index in range(length):
which, data = read_record(fd)
while which == 'decl':
decls.append(data)
which, data = read_record(fd)
if which == 'block':
item = read_block(fd, decls, decls[data])
out.append(item)
else:
raise Exception("bad record")
return out
def write_list(fd, decls, data):
write_le16(fd, 0x3)
write_le16(fd, len(data))
for item in data:
write_block(fd, decls, item)
version = "0.0"
mimetype = "struct/flat"
padding = '.'*(32 - len(version) - len(mimetype))
header = mimetype + padding + version
def load(fd):
if fd.read(32) != header:
raise Exception("Invalid format")
decls = []
which, data = read_record(fd)
while which == 'decl':
decls.append(data)
which, data = read_record(fd)
if which != 'block':
raise Exception("bad file begin")
root = read_block(fd, decls, decls[data])
fd.close()
return root
def save(fd, root):
fd.write(header)
write_block(fd, {None:0}, root)
fd.close()
load_file = lambda path: load(open(path, 'r'))
save_file = lambda path, root: save(open(path, 'w'), root)
if __name__ == "__main__":
Fruit = Meta(u'fruit', [
u"name"
])
Meal = Meta(u'meal', [
u"fruits"
])
meal = Meal([
Fruit(u"banana"),
Fruit(u"peach"),
Fruit(u"orange"),
Meal([
Fruit(u"lemon"),
Fruit(u"grape"),
]),
Fruit(u"kiwi"),
])
save_file("fruits", meal)
meal = load_file("fruits")
print meal
print "%r" % unicode(meal.meta)
print "%r" % unicode(meal.fruits[0].meta)
|
UTF-8
|
Python
| false | false | 2,013 |
4,011,499,463,968 |
47b79f26862cf8e07addb4e9dd42b55c6c0d37ba
|
8960199246844fabb36f846e0ef9aab5a4198c06
|
/python/testData/refactoring/inlinelocal/parenthesisInsertedForSubtraction.after.py
|
823c71531502c96702888ac83fde1deab15a6bc4
|
[] |
no_license
|
bit2pixel/intellij-community
|
https://github.com/bit2pixel/intellij-community
|
c793ed97d5b696250e087036543f6a54b7935ed6
|
675f89908bc9965aaf4dac79810fcac515fe5eb1
|
refs/heads/master
| 2021-01-23T00:53:00.454689 | 2014-10-07T16:38:13 | 2014-10-07T16:39:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
res1 = 1 - (2 - 3)
res2 = 2 - 3 - 1
|
UTF-8
|
Python
| false | false | 2,014 |
18,940,805,778,942 |
54e4eb6b043bc14018d1f97c00ffa3b58a6fec3b
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/J/jgkim/korean_health_news_scraper_for_joins.py
|
b9ae828011663f1057cd454a902debacbd064d04
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
https://github.com/BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
import scraperwiki
news_scraper = scraperwiki.utils.swimport('korean_health_news_scraper_for_hani')
news_scraper.base_url = 'http://life.joinsmsn.com/news/list/list.asp?sid=5119'
news_scraper.page_param = 'page'
news_scraper.page_num_of_article = 15
news_scraper.page_start = 1
news_scraper.page_step = 1
news_scraper.page_encoding = 'utf-8'
news_scraper.page_sleep = 0
news_scraper.patterns = {
'article' : {'xpath' : 'id("life_list")/div/ul/li' },
'title' : {
'xpath' : './/a[@class="title_cr"]',
'strip' : False
},
'summary' : {
'xpath' : './/a[@class="read_cr"]',
'strip' : False
},
'date' : {
'xpath' : './/span[@class="date"]',
'strip' : False,
'format' : '%Y-%m-%d'
},
'source' : {
'default' : '중앙일보'
}
}
news_scraper.main()# coding=utf-8
import scraperwiki
news_scraper = scraperwiki.utils.swimport('korean_health_news_scraper_for_hani')
news_scraper.base_url = 'http://life.joinsmsn.com/news/list/list.asp?sid=5119'
news_scraper.page_param = 'page'
news_scraper.page_num_of_article = 15
news_scraper.page_start = 1
news_scraper.page_step = 1
news_scraper.page_encoding = 'utf-8'
news_scraper.page_sleep = 0
news_scraper.patterns = {
'article' : {'xpath' : 'id("life_list")/div/ul/li' },
'title' : {
'xpath' : './/a[@class="title_cr"]',
'strip' : False
},
'summary' : {
'xpath' : './/a[@class="read_cr"]',
'strip' : False
},
'date' : {
'xpath' : './/span[@class="date"]',
'strip' : False,
'format' : '%Y-%m-%d'
},
'source' : {
'default' : '중앙일보'
}
}
news_scraper.main()
|
UTF-8
|
Python
| false | false | 2,013 |
11,544,872,113,819 |
16331c918d491d57e145a4f2d716dde73209291f
|
5941a554f42b233f4f8eea13fb4add66c62bb8ff
|
/python/modules/funtoo/boot/extensions/lilo.py
|
86bfea2a98e1d81ba2da12d77939f0ce6435acda
|
[
"GPL-1.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
clickbeetle/boot-update
|
https://github.com/clickbeetle/boot-update
|
195d2122584600e33fc02c21f33a817f3ea529e6
|
d2e9a5a4e9cfb0f984a22a60df5743d819cd6d76
|
refs/heads/master
| 2020-05-29T16:12:56.957174 | 2012-04-06T06:26:23 | 2012-04-06T06:26:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python2
# -*- coding: ascii -*-
import os
from ..resolver import Resolver
def getExtension(config):
return LILOExtension(config)
class LILOExtension(Extension):
def __init__(self,config):
Extension.__init__(self,config)
self.fn = "/etc/lilo.conf"
self.bootitems = []
def isAvailable(self):
msgs=[]
ok=True
if not os.path.exists("/sbin/lilo"):
msgs.append(["fatal","/sbin/lilo, required for boot/generate = lilo, does not exist"])
ok=False
return [ok, msgs]
def updateBootLoader(self):
msgs = [ [ "warn", "This version of coreboot requires that you run /sbin/lilo manually." ] ]
return [True, msgs]
def generateBootEntry(self,l,sect,kname,kext):
ok=True
allmsgs=[]
l.append("")
self.bootitems.append(kname)
l.append("image=%s" % kname )
params=self.config.item(sect,"params").split()
ok, allmsgs, myroot = self.r.DoRootAuto(params,ok,allmsgs)
if not ok:
return [ ok, allmsgs ]
ok, allmsgs, myfstype = self.r.DoRootfstypeAuto(params,ok,allmsgs)
if not ok:
return [ ok, allmsgs ]
self.r.ZapParam(params,"root=")
l += [
" read-only",
" root=%s" % myroot,
" append=\"%s\"" % " ".join(params)
]
initrds=self.config.item(sect,"initrd")
initrds=self.r.FindInitrds(initrds, kname, kext)
for initrd in initrds:
l.append(" initrd=" % self.r.RelativePathTo(initrd,"/boot"))
l.append("")
return [ ok, allmsgs ]
def generateConfigFile(self):
l=[]
c=self.config
ok=True
allmsgs=[]
l.append(c.condSubItem("boot/timeout", "timeout=%s"))
# pass our boot entry generator function to GenerateSections, and everything is taken care of for our boot entries
ok, msgs, defpos, defname = self.self.r.GenerateSections(l,self.generateBootEntry)
allmsgs += msgs
if not ok:
return [ ok, allmsgs, l, None]
l += [
""
"default=%s" % defname
]
allmsgs.append(["warn","Please note that LILO support is *ALPHA* quality and is for testing only."])
return [ok, allmsgs, l, defname]
|
UTF-8
|
Python
| false | false | 2,012 |
7,206,955,138,081 |
17c9bf5c3ba9ac2f0862edeb142ba122e5ec021a
|
8a5f6b0961726ce85d97d3dffa98e9d9121d2ca3
|
/timus/scoreboard/tests.py
|
7493650d7013d200c2fe249db24e0678cf670df8
|
[
"MIT"
] |
permissive
|
admp/timus-scoreboard
|
https://github.com/admp/timus-scoreboard
|
586cd3a7b8a20fdac378b14d9bf2f3d1e5fcdb59
|
04eb2240e9948634cc930a0618ae5a8bdcd69188
|
refs/heads/master
| 2016-08-03T16:36:19.716076 | 2009-10-07T18:49:19 | 2009-10-07T18:49:19 | 323,372 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from zope.testing import doctest, module
def test_suite():
default_options = doctest.ELLIPSIS
tests = unittest.TestSuite([
doctest.DocFileSuite('utilities.txt', optionflags=default_options),
doctest.DocFileSuite('configuration.txt', optionflags=default_options),
doctest.DocFileSuite('crawler.txt', optionflags=default_options),
])
return tests
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
UTF-8
|
Python
| false | false | 2,009 |
13,563,506,732,636 |
0e6f149407807cd312e40d18dbe3077877b19381
|
1c6569a52bcb384e50ef9302e6831cc33de75f42
|
/WebPage.py
|
9e63227cb6358ce7cfe13bd5169d7e7667fd269d
|
[] |
no_license
|
pythonrepo/STHUB
|
https://github.com/pythonrepo/STHUB
|
bb8cf866e1891189028fdb650bdd2e1a7f3fbee4
|
4520b6e9575a878311449f47a6c6947753edbe44
|
refs/heads/master
| 2016-09-06T19:47:40.611755 | 2014-09-15T15:27:52 | 2014-09-15T15:27:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
_author__ = 'sin'
import easygui
from GdataEntry import *
class WebPage(object):
def login(self):
gde = GdataEntry()
password = easygui.passwordbox("What is your password ?")
if gde.checkEntry(password1=password)==True:
easygui.msgbox("Welcome to the page")
else:
easygui.msgbox("Incorrent Password")
my_login = WebPage()
my_login.login()
|
UTF-8
|
Python
| false | false | 2,014 |
5,634,997,110,948 |
fcc69a8ad98e3e85b483ea1607c5e35b26a76931
|
05d4a51a820b53e665cb38d214b3cee869cde7ed
|
/app/views/constituencies.py
|
d4946cbf44bd38b97a38537d317af7a71ce3b72c
|
[] |
no_license
|
ahume/politmus-api
|
https://github.com/ahume/politmus-api
|
3923e02b119534db389c9480b3deac20372201b9
|
a269fda9551bfce01c50a224496dbb26609dc8b1
|
refs/heads/master
| 2020-06-08T17:54:54.796922 | 2012-12-07T14:35:31 | 2012-12-07T14:35:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import webapp
from google.appengine.ext import db
from models import Constituency
import utils
class ConstituencyListHandler(webapp.RequestHandler, utils.QueryFilter, utils.JsonAPIResponse):
def get(self):
response = {
'constituencies': []
}
for const in Constituency.all():
c = db.to_dict(const)
c['details'] = '/constituencies/%s' % const.slug
response['constituencies'].append(c)
self.returnJSON(200, response)
class ConstituencyHandler(webapp.RequestHandler, utils.QueryFilter, utils.JsonAPIResponse):
def get(self, slug):
response = {}
try:
response['constituency'] = db.to_dict(Constituency.all().filter('slug =', slug)[0])
except:
response['error'] = 'Cannot find constituency'
self.returnJSON(404, response)
self.returnJSON(200, response)
|
UTF-8
|
Python
| false | false | 2,012 |
15,590,731,286,793 |
06ce18ab740a4ed2884a1dc75c838fc547300b85
|
47b89595f560120355a9d119eb26ac30f7b5c444
|
/nltk_test.py
|
bdb9810248ef739c8958715bbb919ed767e13e17
|
[] |
no_license
|
fatihsucu/nothing
|
https://github.com/fatihsucu/nothing
|
e5a6d5ed7654ddd315f8c2bf8e30bde3cb5629ad
|
a0c807e0d7cbbe42439dd0a34ab9dce8b40ce1e8
|
refs/heads/master
| 2021-01-23T16:30:50.611273 | 2014-10-24T14:39:11 | 2014-10-24T14:39:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unirest
class Routes(object):
data = None
"""docstring for Routes"""
def __init__(self):
super(Routes, self).__init__()
self.end = "yalova"
self.start = "bursa"
if not self.data:
self.data = self.make_request(self.end, self.start)
def make_request(self, end, start):
response = unirest.get("https://montanaflynn-mapit.p.mashape.com/directions?ending={}&starting={}".format(self.end,self.start),
headers={"X-Mashape-Key": "qD3iWvWhE6mshvyXTW5QGdvIGd8Kp1VFEUwjsnhuFeUCLoTvHm"}
)
return response.body
def get_directions_data(self):
for d in self.data["directions"]:
yield d
def get_directions(self):
for direction in self.data["directions"]:
yield direction["direction"].encode('utf8')
def get_distance(self):
return self.data["distance"]
def get_duration(self):
return self.data["duration"]
def alert_maneuver(self, direction):
try:
if not direction["maneuver"]:
return "Continue"
else:
return direction["maneuver"]
except:
return "Continue"
|
UTF-8
|
Python
| false | false | 2,014 |
4,243,427,727,686 |
b010b984624c0aec2c4ad9dfaead479808f494ac
|
4440af71a02a629878f69b94f88d11f2d1032796
|
/CMS-2012-2013/src/reqmgr/fallbackStageoutRecovery.py
|
4f0fbd18df5d0ef84fea18f5d93d5e302fd0312d
|
[] |
no_license
|
dballesteros7/dev-scripts
|
https://github.com/dballesteros7/dev-scripts
|
d0f23489416023d826cc99605fb50b10c549865c
|
5e06fc340e335131aa84314a086b522453a2352d
|
refs/heads/master
| 2021-01-19T22:10:58.606522 | 2013-08-22T00:42:05 | 2013-08-22T00:42:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Aug 31, 2012
@author: dballest
'''
import sys
import os
import json
import shlex
from optparse import OptionParser
from WMCore.Services.PhEDEx.PhEDEx import PhEDEx
from WMCore.Database.CMSCouch import Database
from WMCore.Algorithms.SubprocessAlgos import runCommand
def checkForMissingFiles(options):
#Initialize stuff
phedexAPI = PhEDEx({'cachepath' : options.cachepath})
acdcCouch = Database('wmagent_acdc', options.acdcUrl)
#Let's get the IDs of the ACDC documents for the task/request/group/user
array = [options.group, options.user, options.request, options.task]
result = acdcCouch.loadView('ACDC', 'owner_coll_fileset_docs', {'reduce' : False}, [array])
documentsIDs = [x['id'] for x in result['rows']]
badFiles = {}
#Go through the documents
for docID in documentsIDs:
doc = acdcCouch.document(docID)
#Are we going to change this doc? Better back it up
if options.change:
backupFile = os.open(os.path.join(options.backup, "%s.bkp" % doc["_id"]), 'w')
json.dump(doc, backupFile)
backupFile.close()
#Go through the files
files = doc["files"]
for inputFile in files:
#Use PhEDEx API to get site based on the SE
se = files[inputFile]["locations"][0]
siteLocation = phedexAPI.getBestNodeName(se)
#Now get the PFN
pfnDict = phedexAPI.getPFN(siteLocation, inputFile)
inputPfn = pfnDict[(siteLocation, inputFile)]
#Run lcg-ls commands and see what we get
command = 'lcg-ls -b -D srmv2 --srm-timeout 60 %s' % inputPfn
commandList = shlex.split(command)
try:
(stdout, stderr, exitCode) = runCommand(commandList, False, 70)
except Exception, ex:
exitCode = 99999
stdout = ''
stderr = str(ex)
if exitCode:
#Something went wrong with the command
#Mark the file as bad
if docID not in badFiles:
badFiles[docID] = []
badFiles[docID].append(inputFile)
print 'File %s is thought to be bad' % inputFile
print 'Command was %s' % command
print 'Return code was %i' % exitCode
print 'Stdout was %s' % stdout
print 'Stderr was %s' % stderr
def swapLocations(options):
#Initialize stuff
phedexAPI = PhEDEx({'cachepath' : options.cachepath})
acdcCouch = Database('wmagent_acdc', options.acdcUrl)
#Let's get the IDs of the ACDC documents for the task/request/group/user
array = [options.group, options.user, options.request, options.task]
result = acdcCouch.loadView('ACDC', 'owner_coll_fileset_docs', {'reduce' : False}, [array])
documentsIDs = [x['id'] for x in result['rows']]
#Load the map file saying what we want to change of location
mapFile = open(options.map, 'r')
locationMap = json.load(mapFile)
mapFile.close()
#Go through the documents
for docID in documentsIDs:
doc = acdcCouch.document(docID)
#Are we going to change this doc? Better back it up
if options.change:
backupFile = os.open(os.path.join(options.backup, "%s.bkp" % doc["_id"]), 'w')
json.dump(doc, backupFile)
backupFile.close()
#Go through the files
files = doc["files"]
for inputFile in files:
#Use PhEDEx API to get site based on the SE
#Then map that to the desired target
se = files[inputFile]["locations"][0]
siteLocation = phedexAPI.getBestNodeName(se)
targetLocation = locationMap.get(siteLocation, siteLocation)
if siteLocation == targetLocation:
#Nothing to do with this one, move on
continue
if not options.change:
#No changes, then give the commands to move the files
#Get the PFN for both the current location and the target location
pfnDict = phedexAPI.getPFN(siteLocation, inputFile)
inputPfn = pfnDict[(siteLocation, inputFile)]
pfnDict = phedexAPI.getPFN(targetLocation, inputFile)
targetPfn = pfnDict[(targetLocation, inputFile)]
#Print it to stdout
print "lcg-cp -D srmv2 -b %s %s" % (inputPfn, targetPfn)
else:
#This is changes time, let's move the stuff
targetSE = phedexAPI.getNodeSE(targetLocation)
files[inputFile]["locations"][0] = targetSE
print "Changing location of %s from %s to %s" % (inputFile, se, targetSE)
#If specified, commit the changes
if options.change:
acdcCouch.commitOne(doc)
return 0
def main():
myOptParser = OptionParser()
myOptParser.add_option("-r", "--request", dest = "request")
myOptParser.add_option("-g", "--group", dest = "group")
myOptParser.add_option("-u", "--user", dest = "user")
myOptParser.add_option("-t", "--task", dest = "task")
myOptParser.add_option("-a", "--acdc", dest = "acdcUrl")
myOptParser.add_option("-m", "--map", dest = "map")
myOptParser.add_option("-o", "--mode", dest = "mode",
default = "move")
myOptParser.add_option("-b", "--backup-dir", dest = "backup",
default = "/tmp/backup")
myOptParser.add_option("-c", "--cache-dir", dest = "cachepath",
default = "/tmp/")
myOptParser.add_option("--commit-changes", action = 'store_true',
dest = 'change', default = False)
(options, _) = myOptParser.parse_args()
if options.mode == 'move':
return swapLocations(options)
elif options.mode == 'check':
return checkForMissingFiles(options)
return 0
if __name__ == '__main__':
sys.exit(main())
|
UTF-8
|
Python
| false | false | 2,013 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.