__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,703,058,501,905 |
d3f785eb5a59b3d590ed7dddcd3e3f61945b41a2
|
480047df66f213f521fd2187b591063b6257159d
|
/bikes/admin.py
|
79cd02ff6219e8c7bc9b256aa6bc55cb360e05ea
|
[] |
no_license
|
blsmth/paradise
|
https://github.com/blsmth/paradise
|
0b33d67fd390823dad81cba5a7f85c289fd1be86
|
03c0d2afdd00c406b4863b74673a1bf845bafa78
|
refs/heads/master
| 2020-05-19T09:02:38.269418 | 2012-02-07T02:02:53 | 2012-02-07T02:02:53 | 3,373,384 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from django.utils.safestring import mark_safe
from django.core.mail import mail_managers, send_mail
from django.http import Http404
from django.utils.translation import ugettext_lazy as _
from django.template import Template, Context, RequestContext, loader, get_library
from datetime import datetime, date
from django.core.mail import EmailMultiAlternatives
from django.utils.html import strip_tags
from django.contrib import messages
from bikes.models import Bike, Image
class ImageInline(admin.TabularInline):
model = Image
extra = 1
fieldsets = (
(' ', {
'classes': ('collapse',),
'fields': ('title','description','file'),
}),
)
class BikeAdmin(admin.ModelAdmin):
inlines = [ImageInline]
list_display = ['name','motto','description']
prepopulated_fields = {'slug': ('name',)}
class ImageAdmin(admin.ModelAdmin):
list_display = ['title','description',]
admin.site.register(Bike,BikeAdmin)
admin.site.register(Image,ImageAdmin)
|
UTF-8
|
Python
| false | false | 2,012 |
2,972,117,399,843 |
67e50bbb6ad51b39b1c9a58a969208e8ed8d40b3
|
fd532433e4c4f60746a9200afd855f613d846a45
|
/whopaid/customers_info.py
|
ea82f5edb4347fd9d8b4a56ddb211b54cc2dceb5
|
[] |
no_license
|
abhigitz/tracker
|
https://github.com/abhigitz/tracker
|
526668cddf6886b975af13a9971bff7abc0518e1
|
2a9bc3496f96f84fedd860b39df9c9f592220f4f
|
refs/heads/master
| 2020-06-04T15:44:39.688150 | 2014-09-23T05:53:43 | 2014-09-23T05:53:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#######################################################
## Author: Ashish Anand
## Date: 25-Dec-2012
## Intent: To read bills.xlsx and store company info
## Requirement: Python Interpretor must be installed
## Openpyxl must be installed
#######################################################
from Util.Misc import GetPickledObject
from Util.Config import GetOption, GetAppDir
from Util.ExcelReader import GetRows, GetCellValue
import os
class CustomerInfoCol:
"""
This class is used as Enum.
If and when the format of excel file changes just change the column bindings in this class
"""
CompanyFriendlyNameCol = "A"
CompanyGroupCol = "B"
BillingAddressCol = "C"
TinNumberCol = "D"
PhoneNumberCol = "E"
DeliveryPhoneNumberCol = "F"
SmsDispatchNumberCol = "G"
PaymentReminderSmsNoCol = "H"
CompanyOfficialNameCol = "I"
CourierAddressCol = "J"
DeliveryPinCodeCol = "K"
PreferredCourierCol = "L"
CityCol = "M"
EmailForPayment = "N"
KindAttentionCol = "O"
EmailForFormC = "P"
TrustCol = "Q"
IncludeDaysCol = "R"
CreditLimitCol = "S"
SendAutomaticMails = "T"
MinDaysGapCol = "U"
IncludeBillAmountInEmails = "V"
CompanyCodeCol = "W"
def CreateSingleCustomerInfo(row):
c = SingleCompanyInfo()
for cell in row:
col = cell.column
val = GetCellValue(cell)
if col == CustomerInfoCol.CompanyFriendlyNameCol:
c.companyFriendlyName = val
elif col == CustomerInfoCol.BillingAddressCol:
c.billingAddress = val
elif col == CustomerInfoCol.TinNumberCol:
c.tinNumber = val
elif col == CustomerInfoCol.PhoneNumberCol:
c.phoneNumber = val
elif col == CustomerInfoCol.DeliveryPinCodeCol:
c.deliveryPinCode = val
elif col == CustomerInfoCol.SmsDispatchNumberCol:
c.smsDispatchNo = val
elif col == CustomerInfoCol.DeliveryPhoneNumberCol:
c.deliveryPhNo = val
elif col == CustomerInfoCol.CompanyOfficialNameCol:
c.companyOfficialName = val
elif col == CustomerInfoCol.CourierAddressCol:
c.courierAddress = val
elif col == CustomerInfoCol.PreferredCourierCol:
c.preferredCourier = val
elif col == CustomerInfoCol.CityCol:
c.city = val
elif col == CustomerInfoCol.EmailForPayment:
c.emailForPayment = val.replace("\n","") if val else val
elif col == CustomerInfoCol.EmailForFormC:
c.emailForFormC = val.replace("\n", "") if val else val
elif col == CustomerInfoCol.KindAttentionCol:
c.kindAttentionPerson = val
elif col == CustomerInfoCol.TrustCol:
c.trust = val
elif col == CustomerInfoCol.IncludeDaysCol:
c.includeDays = val
elif col == CustomerInfoCol.CreditLimitCol:
c.creditLimit = val
elif col == CustomerInfoCol.SendAutomaticMails:
c.includeInAutomaticMails = val
elif col == CustomerInfoCol.CompanyCodeCol:
c.companyCode = val
elif col == CustomerInfoCol.MinDaysGapCol:
c.minDaysGapBetweenAutomaticMails = val
elif col == CustomerInfoCol.IncludeBillAmountInEmails:
c.includeBillAmountinEmails = val
elif col == CustomerInfoCol.CompanyGroupCol:
c.companyGroupName = val
return c
class SingleCompanyInfo():
"""This represents a single row in Cust sheet of Bills.xlsx"""
pass
class _AllCustomersInfo(dict):
"""Base Class which is basically a dictionary. Key is compName and Value is a list of info"""
def __init__(self, custDBwbPath):
super(_AllCustomersInfo, self).__init__(dict())
rows = GetRows(workbookPath=custDBwbPath,
sheetName = GetOption("CONFIG_SECTION", "NameOfCustSheet"),
firstRow= GetOption("CONFIG_SECTION", "CustDataStartsAtRow"),
includeLastRow=False)
for row in rows:
c = CreateSingleCustomerInfo(row)
self[c.companyFriendlyName] = c
def GetListOfCompNamesForThisGrp(self, grpName):
return [compName for compName in self if self[compName].companyGroupName == grpName]
def GetTrustForCustomer(self, compName):
return self[compName].trust
def GetCreditLimitForCustomer(self, compName):
return self[compName].creditLimit
def GetCompanyOfficialName(self, compName):
return self[compName].companyOfficialName
def GetCompanyGroupName(self, compName):
return self[compName].companyGroupName
def GetDeliveryPinCode(self, compName):
return self[compName].deliveryPinCode
def GetSmsDispatchNumber(self, compName):
return self[compName].smsDispatchNo
def GetDeliveryPhoneNumber(self, compName):
return self[compName].deliveryPhNo
def GetCustomerPhoneNumber(self, compName):
return self[compName].phoneNumber
def GetCustomerDeliveryAddress(self, compName):
return self[compName].courierAddress
def GetCustomerPreferredCourier(self, compName):
return self[compName].preferredCourier
def GetCustomerCity(self, compName):
return self[compName].city
def GetPaymentReminderEmailsForCustomer(self, compName):
return self[compName].emailForPayment
def GetFormCEmailsForCustomer(self, compName):
return self[compName].emailForFormC
def GetCustomerKindAttentionPerson(self, compName):
return self[compName].kindAttentionPerson
def GetIncludeDaysOrNot(self, compName):
return self[compName].includeDays
def IncludeBillAmountInEmails(self, compName):
val = self[compName].includeBillAmountinEmails
return val.lower() in ["yes", "y"]
def IncludeCustInAutomaticMails(self, compName):
val = self[compName].includeInAutomaticMails
return val.lower() in ["yes", "y"]
def GetMinDaysGapBetweenMails(self, compName):
return self[compName].minDaysGapBetweenAutomaticMails
def GetFormCEmailAsListForCustomer(self, compName):
toMailStr = self.GetFormCEmailsForCustomer(compName)
if not toMailStr: return None
toMailList = toMailStr.replace(';', ',').replace(' ', '').split(',')
#Remove spaces from eachMail in the list and create a new list
return [x for x in toMailList if x]
def GetPaymentReminderEmailAsListForCustomer(self, compName):
toMailStr = self.GetPaymentReminderEmailsForCustomer(compName)
if not toMailStr: return None
toMailList = toMailStr.replace(';', ',').replace(' ', '').split(',')
#Remove spaces from eachMail in the list and create a new list
return [x for x in toMailList if x]
def GetAllCustomersInfo():
custDBwbPath = os.path.join(GetAppDir(), GetOption("CONFIG_SECTION", "CustDBRelativePath"))
def _CreateAllCustomersInfoObject(custDBwbPath):
return _AllCustomersInfo(custDBwbPath)
return GetPickledObject(custDBwbPath, createrFunction=_CreateAllCustomersInfoObject)
|
UTF-8
|
Python
| false | false | 2,014 |
9,474,697,906,281 |
5b46f6f089c64fe52427d71ebd3cb3983dfd06f2
|
a7c9b5442d5e23abff3981beedb98ffe97aaa3e8
|
/pingme.py
|
3c77ccadb8aefecbfb67ba7d692d6d689665f5e1
|
[] |
no_license
|
seshadrs/pingme
|
https://github.com/seshadrs/pingme
|
67e3047878684e8f5fd52cb7de26118e4ea3a518
|
25b3d93e0a0b7f37ffff05f0d398a6e2cb15d679
|
refs/heads/master
| 2020-06-01T18:37:57.952964 | 2014-05-04T23:44:45 | 2014-05-04T23:44:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# !/usr/bin/env python
"""pingme.py: A Unix utility that sends an email when a shell command completes executing."""
__author__ = "Seshadri Sridharan"
import ast
import argparse
import datetime
from email.mime.text import MIMEText
import getpass
import os
import pickle
import pyDes
import smtplib
import sys
SUBJECT_PREFIX='PingMe : '
CONFIG_FILE_NAME='.pingme_config'
def sendMail(server, address, uname, pwd, subject,body):
msg = MIMEText(body)
msg['Subject'] = subject
msg['From'] = address
msg['To'] = address
server = smtplib.SMTP(server)
server.starttls()
server.login(uname,pwd)
server.sendmail(address, [address], msg.as_string())
server.quit()
def readPipedOutput():
startTime=datetime.datetime.now()
pipedOutput=""
for line in sys.stdin:
pipedOutput+=line
endTime=datetime.datetime.now()
executionTime = endTime - startTime
return (executionTime,pipedOutput)
def configFilePath():
return os.path.expanduser('~')+'/'+CONFIG_FILE_NAME
def getProfiles():
filePath=configFilePath()
if os.path.isfile(filePath):
try:
return ast.literal_eval(decrypt(pickle.load(open(filePath,'rb'))))
except:
pass
return []
def setProfiles(profiles):
filePath=configFilePath()
pickle.dump(encrypt(str(profiles)),open(filePath,'wb'))
def encrypt(data):
key = pyDes.des("DESCRYPT", pyDes.CBC, "\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5)
return key.encrypt(data)
def decrypt(encryptedData):
key = pyDes.des("DESCRYPT", pyDes.CBC, "\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5)
return key.decrypt(encryptedData, padmode=pyDes.PAD_PKCS5)
def configure():
profileConfig={}
profileConfig['profileName']=raw_input('Enter Config Profile Name : ')
profileConfig['smtpServer']=raw_input('Enter SMTP Server Domain (ex:smtp.gmail.com) : ')
profileConfig['smtpPort']=raw_input('Enter SMTP Port (ex:587 for Gmail) : ')
profileConfig['emailAddress']=raw_input('Enter Email Address : ')
profileConfig['userName']=raw_input('Enter User Name : ')
profileConfig['password']=getpass.getpass('Enter Password : ')
pwd=getpass.getpass('Re-enter Password : ')
while (pwd!=profileConfig['password']):
print 'Password does not match!'
profileConfig['password']=getpass.getpass('Enter Password : ')
pwd=getpass.getpass('Re-enter Password : ')
yOrN = raw_input('Set this profile as default? (y/n)? : ')
while yOrN not in ('y','n'):
yOrN=raw_input("Set this profile as default? Enter 'y' OR 'n' ? : ")
setAsDefault = True if yOrN=='y' else False
currentProfiles=getProfiles()
profileExistsAlready=False
for p in currentProfiles:
if p['profileName']==profileConfig['profileName']:
profileExistsAlready=True
break
if profileExistsAlready:
print 'Overwriting profile '+profileConfig['profileName']
currentProfiles.pop(currentProfiles.index(p))
if setAsDefault:
profiles= [profileConfig] + currentProfiles
else:
profiles= currentProfiles+[profileConfig]
setProfiles(profiles)
print 'Saved profile \''+profileConfig['profileName']+'\'. Your can use pingMe now!'
def erase():
setProfiles([])
print 'Erased all profiles'
def getProfile(name=None):
if name==None:
if len(getProfiles())>0:
return getProfiles()[0]
else:
for profile in getProfiles():
if profile['profileName']==name:
return profile
return None
def test(profile):
subject = 'Test Mail'
body='This is a testmail from PingMe. You profile config works!'
sendMail(profile['smtpServer']+':'+profile['smtpPort'],profile['emailAddress'],profile['userName'],profile['password'],SUBJECT_PREFIX+subject,body)
def pingme(profile,subject='',body=''):
sendMail(profile['smtpServer']+':'+profile['smtpPort'],profile['emailAddress'],profile['userName'],profile['password'],SUBJECT_PREFIX+subject,body)
if __name__=="__main__":
parser = argparse.ArgumentParser(__file__, description='A utility that sends an email when a shell command completes executing.')
parser.add_argument("--erase", "-e", help="Erase all configuration profiles (encrypted emailids, passwords etc.).", action="store_true")
parser.add_argument("--configure", "-c", help="Configure email SMTP server, emailid, username, password.", action="store_true")
parser.add_argument("--profile", "-p", help="Profile Name", type=str)
parser.add_argument("--subject", "-s", help="Email subject", type=str)
parser.add_argument("--test", "-t", help="Test setup.", action="store_true")
args= parser.parse_args()
if args.configure:
configure()
elif args.erase:
erase()
else:
profile=getProfile(args.profile)
if args.test:
if profile==None:
print "You haven't configured a PingMe profile yet. Please Configure now by entering following details"
configure()
profile=getProfile(args.profile)
test(profile)
else:
executionTime,pipedOutput=readPipedOutput()
if profile==None:
print "You haven't configured a PingMe profile yet. Please Configure now by entering following details"
configure()
profile=getProfile(args.profile)
subject= '' if args.subject==None else args.subject
body='EXECUTION_TIME : '+str(executionTime)+'\n'+'OUTPUT : '+pipedOutput
pingme(profile,subject=subject, body=body)
|
UTF-8
|
Python
| false | false | 2,014 |
3,693,671,916,833 |
f52cb861a449990d060d1896e9cbab7f64929804
|
4e0838941d9f843cd6070869b96e19fef5802867
|
/prtm.py
|
6727b3e11f11b38ee1d2d94f385af00b45d83ea1
|
[] |
no_license
|
mikelmcdaniel/rosalind-problems
|
https://github.com/mikelmcdaniel/rosalind-problems
|
be9c3869f85abf0170ca84a77226ccfd68f79e8b
|
055b683dfe662c41cb9552ef46a1e80ee1f58bb9
|
refs/heads/master
| 2021-01-20T00:55:56.392889 | 2012-12-30T01:15:47 | 2012-12-30T01:15:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# monoisotopic mass table of proteins
table = '''
A 71.03711
C 103.00919
D 115.02694
E 129.04259
F 147.06841
G 57.02146
H 137.05891
I 113.08406
K 128.09496
L 113.08406
M 131.04049
N 114.04293
P 97.05276
Q 128.05858
R 156.10111
S 87.03203
T 101.04768
V 99.06841
W 186.07931
Y 163.06333'''
table = table.split()
table = dict(zip(table[::2], map(float, table[1::2])))
print sum(table[c] for c in raw_input())
|
UTF-8
|
Python
| false | false | 2,012 |
9,646,496,551,588 |
cc0c68eeabb2b6885e90d257d8c29e0c5070330e
|
9978da5b48c69d1a4e92bc1fbbfe3ea932b34346
|
/ploomcake/core/browser/related_popup.py
|
f6a477cff375c662351cb0a566ba1533637f151b
|
[] |
no_license
|
abstract-open-solutions/ploomcake.core
|
https://github.com/abstract-open-solutions/ploomcake.core
|
bf8b871da4126f545cdb9afc2a9b9a14d8c768d7
|
e3b40b07a2679950d2794e1860efbc3c8c9b4929
|
refs/heads/master
| 2021-01-12T05:24:55.721529 | 2013-06-26T14:50:04 | 2013-06-26T14:50:04 | 77,924,539 | 0 | 0 | null | true | 2017-01-03T14:33:03 | 2017-01-03T14:33:02 | 2014-11-13T19:28:14 | 2013-06-26T14:50:12 | 184 | 0 | 0 | 0 | null | null | null |
from plone.app.form._named import named_template_adapter
from archetypes.referencebrowserwidget.browser.view import ReferenceBrowserPopup as ReferenceBrowserPopupOriginal
from zope.component import getMultiAdapter
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from plone.app.layout.icons.interfaces import IContentIcon
from zope.component import getMultiAdapter
default_popup_template = named_template_adapter(
ViewPageTemplateFile('templates/popup.pt'))
class ReferenceBrowserPopup(ReferenceBrowserPopupOriginal):
""" """
def getIcon(self, item):
""" override original getIcon method"""
icon = None
icon = getMultiAdapter((self.context, self.request, item), IContentIcon)
return icon
|
UTF-8
|
Python
| false | false | 2,013 |
2,877,628,128,217 |
92e71dad2e65e9ab5d5374802680247025ae1815
|
d4a248082c68a996f77338e856ab9ac69ece79f1
|
/utils/reportmaker.py
|
1ca7e10a09d20eaa6cc603248bcde4053ea86025
|
[] |
no_license
|
mcsquaredjr/Reports
|
https://github.com/mcsquaredjr/Reports
|
702e1e41506d4cf534421994db9a5c8c4ee5d64c
|
a0e8bd509a6730e854e649dc7c2e650d887b1521
|
refs/heads/master
| 2021-01-23T17:30:31.420673 | 2014-01-25T01:47:16 | 2014-01-25T01:47:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Report_Maker(object):
'''
Store submitted data and generate good-looking HTML
'''
def __init__(self):
# Keep all submitted reports in array of dicts
self.data = []
def add_data(self, new_data):
'''Append collected data
'''
self.data.append(new_data)
def get_data(self):
'''Return collected data
'''
return self.data
def data2md(self):
'''Convert submitted data to markdown'''
mmd_txt = ''
for entry in self.data:
keys = sorted(entry.keys())
mmd_txt += '## ' + entry['project'] + '\n'
for key in keys:
if key != 'project':
mmd_txt += '### ' + key + '\n'
mmd_txt += entry[key] + '\n'
return mmd_txt.replace('\n', '\n\n')
|
UTF-8
|
Python
| false | false | 2,014 |
9,740,985,856,968 |
3c5afaad12b3fa188ba216bd02ee7966654b7526
|
6eab88da3650a8d8f66bb390ed09114b3dc50443
|
/Totopos/node/shape.py
|
c10fb79d1277a754e52fc1eae0599bb66368562c
|
[] |
no_license
|
qunox/Totopos
|
https://github.com/qunox/Totopos
|
eee57769c09f2a7033e980e5f5038349e95a5ea6
|
543a030a7f34cf6bcfa230b947aebcdb2241166f
|
refs/heads/master
| 2020-05-18T01:10:17.981918 | 2014-10-14T22:20:59 | 2014-10-14T22:20:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import division
import logging
from numpy import array
class shape():
def __init__(self):
self.logger = logging.getLogger('sublog')
def __call__(self, *args, **kwargs):
if 'shape' in kwargs:
self.shape = kwargs['shape']
if 'width' in kwargs:
self.width = int(kwargs['width'])
if 'height' in kwargs:
self.height = int(kwargs['height'])
if not hasattr(self, 'shape'):
raise Exception('ERROR: No shape was given to node')
if self.shape is 'rectangular':
self.rectangular()
def rectangular(self):
self.logger.debug('Creating a rectangular shape nodes')
nodePosition = [array([x,y]) for x in range(self.width) for y in range(self.height)]
self.nodes_dict = {}
_i = 0
for position in nodePosition:
self.nodes_dict[_i] = {'position' : position , 'initialvector' : None, 'label': '0' , 'vector':None , 'popularity' : 0,
'picked' : 0 , 'perturb' : 0 }
_i += 1
self.logger.debug('Finish rectangular creating nodes')
def giveNodesDict(self):
if hasattr(self , 'nodes_dict'):
return self.nodes_dict
else:
raise Exception('Nodes dict has not been created yet')
|
UTF-8
|
Python
| false | false | 2,014 |
19,000,935,341,752 |
fd0f3c3181928d0a0c306f33c6fc69670d938efc
|
e347cadb4aca19fc1f1a00316cc5c4ce243fc463
|
/compareSrc/__init__.py
|
eaec8d0b9170d6eb9f5fe31fba83fe8ed15b129c
|
[
"MIT"
] |
permissive
|
kivvix/stage-LPC
|
https://github.com/kivvix/stage-LPC
|
1bb63e9e15c9433678eca948af8a8f086143152b
|
b7d885625713ea4287cb4b89db8e2abb05ca97d6
|
refs/heads/master
| 2021-05-31T06:22:17.849308 | 2014-11-04T12:29:53 | 2014-11-04T12:29:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## @package compareSrc
# @author J. Massot
# @date 2014-04-10
__all__ = ["cmp","data","config","compareSrc","init","job"]
|
UTF-8
|
Python
| false | false | 2,014 |
17,514,876,661,243 |
9a06763d4a922c675471d86d4ed7fe116c93a3c0
|
689e8eb089af87aa67655c1317ca72b194081e1b
|
/gub/specs/cygwin/lilypond.py
|
1ab03564c09df83a2b0c710e66779381acab47b8
|
[
"LicenseRef-scancode-warranty-disclaimer",
"GPL-2.0-or-later"
] |
non_permissive
|
EddyPronk/gub
|
https://github.com/EddyPronk/gub
|
be77458d204ee13a75df637f00de24729c62c4dc
|
fd733359b24b83d3be9a3557fc383143acb5065e
|
refs/heads/master
| 2021-12-01T15:10:39.867304 | 2010-01-07T11:18:26 | 2010-01-07T11:18:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from gub import cygwin
from gub import gup
from gub import misc
from gub import target
from gub.specs import lilypond
class LilyPond (lilypond.LilyPond):
subpackage_names = ['doc', '']
dependencies = gup.gub_to_distro_deps (lilypond.LilyPond.dependencies,
cygwin.gub_to_distro_dict)
configure_flags = (lilypond.LilyPond.configure_flags
.replace ('--enable-relocation', '--disable-relocation'))
python_lib = '%(system_prefix)s/bin/libpython*.dll'
LDFLAGS = '-L%(system_prefix)s/lib -L%(system_prefix)s/bin -L%(system_prefix)s/lib/w32api'
make_flags = (lilypond.LilyPond.make_flags
+ ' LDFLAGS="%(LDFLAGS)s %(python_lib)s"')
def __init__ (self, settings, source):
lilypond.LilyPond.__init__ (self, settings, source)
self.dependencies += [misc.with_platform ('lilypond-doc',
self.settings.build_platform)]
def install (self):
##lilypond.LilyPond.install (self)
target.AutoBuild.install (self)
self.install_doc ()
def install_doc (self):
# lilypond.make uses `python gub/versiondb.py --build-for=2.11.32'
# which only looks at source ball build numbers, which are always `1'
# This could be fixed, but for now just build one doc ball per release?
installer_build = '1'
installer_version = self.build_version ()
docball = self.expand ('%(uploads)s/lilypond-%(installer_version)s-%(installer_build)s.documentation.tar.bz2', env=locals ())
self.system ('''
mkdir -p %(install_prefix)s/share/doc/lilypond
cd %(install_prefix)s && LIBRESTRICT_ALLOW=/ tar -C %(install_prefix)s -jxf %(docball)s
''',
locals ())
def category_dict (self):
return {'': 'Publishing'}
Lilypond = LilyPond
|
UTF-8
|
Python
| false | false | 2,010 |
7,748,121,038,738 |
e7eae3ee02df7ddc395e722c223a1cd93cc95653
|
c9cd3714e3ce00f5d0a186939054ef7276317ec9
|
/sga_oauth/client/views.py
|
cb7e1cb68ea03b2f51437d7cfe54275d0b41f75a
|
[
"MIT"
] |
permissive
|
cobalys/SGAoauth
|
https://github.com/cobalys/SGAoauth
|
de2a5ce7097b5378f4330ad47302f65e7f613dee
|
360a953c3e7c2e3df96cf793f78d6bd3139410b1
|
refs/heads/master
| 2020-05-03T12:13:57.952094 | 2013-02-12T15:45:17 | 2013-02-12T15:45:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponseRedirect, HttpResponse
from sga_oauth.client import oauth_settings
from sga_oauth.shared.helpers.generators import generate_nonce
from sga_oauth.shared.helpers.request import fetch_oauth
from sga_oauth.shared.helpers.signature import sign_request
import time
def obtain_request_token(request, namespace):
"""
6.1.1. Consumer Obtains a Request Token
To obtain a Request Token, the Consumer sends an HTTP request to the
Service Provider's Request Token URL. The Service Provider documentation
specifies the HTTP method for this request, and HTTP POST is RECOMMENDED.
The request MUST be signed and contains the following parameters:
Variables:
oauth_consumer_key:
The Consumer Key.
oauth_signature_method:
The signature method the Consumer used to sign the request.
oauth_signature:
The signature as defined in Signing Requests.
oauth_timestamp:
As defined in Nonce and Timestamp.
oauth_nonce:
As defined in Nonce and Timestamp.
oauth_version:
OPTIONAL. If present, value MUST be 1.0 . Service Providers MUST
assume the protocol version to be 1.0 if this parameter is not
present. Service Providers' response to non-1.0 value is left
undefined.
oauth_callback:
An absolute URL to which the Service Provider will redirect the
User back when the Obtaining User Authorization step is completed.
If the Consumer is unable to receive callbacks or a callback URL
has been established via other means, the parameter value MUST be
set to oob (case sensitive), to indicate an out-of-band
configuration.
Additional parameters:
Any additional parameters, as defined by the Service Provider
Args:
request:
Returns:
the result of directs_user_service_provider(oauth_token).
Raises:
TypeError: if n is not a number.
ValueError: if n is negative.
"""
implementation = oauth_settings.IMPLEMENTATIONS[namespace]
url = implementation['OAUTH_URL']
path_request = implementation['OAUTH_REQUEST_TOKEN_PATH']
oauth_port = implementation['OAUTH_PORT']
#Variables
oauth_consumer_key = implementation['OAUTH_CONSUMER_KEY']
oauth_signature_method = 'HMAC-SHA1'
oauth_consumer_secret = implementation['OAUTH_CONSUMER_SECRET']
oauth_timestamp = int(time.time())
oauth_nonce = generate_nonce(length=8)
oauth_callback = implementation['OAUTH_CALLBACK_URL']
parameters = {
'oauth_consumer_key': oauth_consumer_key,
'oauth_signature_method': oauth_signature_method,
'oauth_consumer_secret': oauth_consumer_secret,
'oauth_timestamp': oauth_timestamp,
'oauth_nonce': oauth_nonce,
'oauth_callback': oauth_callback,
}
oauth_signature = sign_request('POST',
parameters,
oauth_consumer_secret)
parameters['oauth_signature'] = oauth_signature
result, status = fetch_oauth(url, oauth_port, path_request, 'POST', parameters)
if status == 200:
i = result.split('&')
parameters = {a.split('=')[0].strip(): a.split('=')[1].strip() for a in i}
oauth_token = parameters['oauth_token']
return directs_user_service_provider(oauth_token, namespace)
elif status == 401:
return HttpResponse('Invalid Token', status=401)
else:
return HttpResponse('Unknown error', status=400)
def directs_user_service_provider(oauth_token, namespace):
'''
6.2.1. Consumer Directs the User to the Service Provider
In order for the Consumer to be able to exchange the Request Token for an
Access Token, the Consumer MUST obtain approval from the User by directing
the User to the Service Provider. The Consumer constructs an HTTP GET
request to the Service Provider's User Authorization URL with the following
Once the request URL has been constructed the Consumer redirects the User
to the URL via the User's web browser. If the Consumer is incapable of
automatic HTTP redirection, the Consumer SHALL notify the User how to
manually go to the constructed request URL.
Note: If a Service Provider knows a Consumer to be running on a mobile
device or set-top box, the Service Provider SHOULD ensure that the User
Authorization URL and Request Token are suitable for manual entry.
Args:
oauth_token:
OPTIONAL. The Request Token obtained in the previous step. The
Service Provider MAY declare this parameter as REQUIRED, or accept
requests to the User Authorization URL without it, in which case it
will prompt the User to enter it manually.
Additional parameters:
Any additional parameters, as defined by the Service Provider.
Returns:
HttpResponseRedirect to the authorization url.
Raises:
TypeError: if n is not a number.
ValueError: if n is negative.
parameter:
'''
implementation = oauth_settings.IMPLEMENTATIONS[namespace]
path_authorize = implementation['OAUTH_AUTHORIZATION_REQUEST_TOKEN_PATH']
url_server = implementation['OAUTH_URL']
port_server = implementation['OAUTH_PORT']
url = 'http://%s:%s%s' % (url_server, port_server, path_authorize)
return HttpResponseRedirect(url % oauth_token)
def callback(request, namespace):
'''
6.2.3. Service Provider Directs the User Back to the Consumer
After the User authenticates with the Service Provider and grants
permission for Consumer access, the Consumer MUST be notified that the
Request Token has been authorized and ready to be exchanged for an Access
Token. If the User denies access, the Consumer MAY be notified that the
Request Token has been revoked.
To make sure that the User granting access is the same User returning back
to the Consumer to complete the process, the Service Provider MUST generate
a verification code: an unguessable value passed to the Consumer via the
User and REQUIRED to complete the process.
If the Consumer provided a callback URL (using the oauth_callback parameter
in Section 6.1.1 or by other means), the Service Provider uses it to
constructs an HTTP request, and directs the User's web browser to that URL
with the following parameters added:
oauth_token:
The Request Token the User authorized or denied.
oauth_verifier:
The verification code.
The callback URL MAY include Consumer provided query parameters. The
Service Provider MUST retain them unmodified and append the OAuth
parameters to the existing query.
If the Consumer did not provide a callback URL, the Service Provider
SHOULD display the value of the verification code, and instruct the User
to manually inform the Consumer that authorization is completed. If the
Service Provider knows a Consumer to be running on a mobile device or
set-top box, the Service Provider SHOULD ensure that the verifier value
is suitable for manual entry.
'''
implementation = oauth_settings.IMPLEMENTATIONS[namespace]
#Variables
oauth_verifier = request.GET.get('oauth_verifier')
oauth_token = request.GET.get('oauth_token')
oauth_consumer_key = implementation['OAUTH_CONSUMER_KEY']
oauth_signature_method = 'HMAC-SHA1'
oauth_consumer_secret = implementation['OAUTH_CONSUMER_SECRET']
oauth_nonce = generate_nonce(length=8)
oauth_timestamp = int(time.time())
parameters = {
'oauth_token': oauth_token,
'oauth_consumer_key': oauth_consumer_key,
'oauth_signature_method': oauth_signature_method,
'oauth_timestamp': oauth_timestamp,
'oauth_nonce': oauth_nonce,
'oauth_verifier': oauth_verifier,
}
oauth_signature = sign_request('POST',
parameters,
oauth_consumer_secret)
parameters['oauth_signature'] = oauth_signature
#Delete Session
oauth_port = implementation['OAUTH_PORT']
url = implementation['OAUTH_URL']
path_request = implementation['OAUTH_ACCESS_TOKEN_PATH']
return obtain_access_token(url, oauth_port, path_request, parameters, request, namespace)
def obtain_access_token(url, oauth_port, path_request, parameters, request, namespace):
'''
6.3.1. Consumer Requests an Access Token
The Request Token and Token Secret MUST be exchanged for an Access Token
and Token Secret.
To request an Access Token, the Consumer makes an HTTP request to the
Service Provider's Access Token URL. The Service Provider documentation
specifies the HTTP method for this request, and HTTP POST is RECOMMENDED.
The request MUST be signed per Signing Requests, and contains the
following parameters:
oauth_consumer_key:
The Consumer Key.
oauth_token:
The Request Token obtained previously.
oauth_signature_method:
The signature method the Consumer used to sign the request.
oauth_signature:
The signature as defined in Signing Requests.
oauth_timestamp:
As defined in Nonce and Timestamp.
oauth_nonce:
As defined in Nonce and Timestamp.
oauth_version:
OPTIONAL. If present, value MUST be 1.0 . Service Providers MUST
assume the protocol version to be 1.0 if this parameter is not present.
Service Providers' response to non-1.0 value is left undefined.
oauth_verifier:
The verification code received from the Service Provider in the
Service Provider Directs the User Back to the Consumer step.
No additional Service Provider specific parameters are allowed when
requesting an Access Token to ensure all Token related information is
present prior to seeking User approval.
'''
result, status = fetch_oauth(url, oauth_port, path_request, 'POST', parameters)
if status == 200:
i = result.split('&')
parameters = {a.split('=')[0].strip(): a.split('=')[1].strip() for a in i}
if 'OAUTH_ACCESS_TOKEN' not in request.session:
request.session['OAUTH_ACCESS_TOKEN'] = {}
request.session['OAUTH_ACCESS_TOKEN'][namespace] = parameters['oauth_token']
return HttpResponseRedirect('/') #TODO: Return to initial request
elif status == 401:
return HttpResponse('Invalid Token', status=401)
else:
return HttpResponse(result, status=401)
|
UTF-8
|
Python
| false | false | 2,013 |
9,672,266,353,178 |
872b75fa840935fedf58f30f3d23050ffcd6ced3
|
d7332cb920401fe9718a1e3ca967049f7103717f
|
/houghlines.py
|
9c0814614518ea6f436d4c0e3826d0a4e7ce955d
|
[] |
no_license
|
liz-murphy/MapRotation
|
https://github.com/liz-murphy/MapRotation
|
4647c2502b61adc9d12876c6304a27c1dffb4162
|
983ba7c9a1eadb1a3414ca102241d17fd64d9f76
|
refs/heads/master
| 2020-05-23T14:45:22.039638 | 2014-12-20T04:44:31 | 2014-12-20T04:44:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
'''
This example illustrates how to use Hough Transform to find lines
Usage: ./houghlines.py [<image_name>]
image argument defaults to ../data/pic1.png
'''
import cv2
import numpy as np
import sys
import math
import scipy
from scipy.misc import imrotate
from PIL import Image
from scipy import ndimage
try:
fn = sys.argv[1]
except:
fn = "map.png"
print __doc__
src = cv2.imread(fn)
dst = cv2.Canny(src, 50, 200)
cdst = cv2.cvtColor(dst, cv2.COLOR_GRAY2BGR)
print 'Press any key to continue, space to accept 0 degree line'
lines = cv2.HoughLinesP(dst, 1, math.pi/180.0, 50, np.array([]), 50, 10)
a,b,c = lines.shape
for i in range(b):
cv2.line(cdst, (lines[0][i][0], lines[0][i][1]), (lines[0][i][2], lines[0][i][3]), (0, 0, 255), 3, cv2.CV_AA)
cv2.imshow("source", src)
cv2.imshow("detected lines", cdst)
a = cv2.waitKey(0)
if a == 1048608:
print 'Aligning to this line'
break
angle = math.atan2( (lines[0][i][3]-lines[0][i][1]),(lines[0][i][2]-lines[0][i][0]) )
print "Angle of rotation: " + str(angle)
rotated = ndimage.rotate(src, 180*angle/3.1415, (1,0), True, None, 0, 'constant', 205, True)
cv2.imshow("Rotated image", rotated)
cv2.waitKey(0)
cv2.imwrite('aligned.png', rotated, [cv2.cv.CV_IMWRITE_PNG_COMPRESSION,9])
#cv2.imwrite('aligned.pgm', rotated)
|
UTF-8
|
Python
| false | false | 2,014 |
12,438,225,302,988 |
2c8e6956efa019abb3db6602d5338e8ed17869ef
|
7da19bbfa5a0f97435a330fb4f33c7807e6fd4a3
|
/app/views/accountviews.py
|
c0c1680bdfff9fdc1f2d577b3892453aab215cd2
|
[] |
no_license
|
Zakkeri/SE_Project2014
|
https://github.com/Zakkeri/SE_Project2014
|
81756526fde7416954bb2738317eaf001c6890c9
|
b9db59c5419207bc0546733218d534a21afd5b43
|
refs/heads/master
| 2020-04-30T14:20:25.100855 | 2014-04-21T15:15:52 | 2014-04-21T15:15:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#==============================================================================
# File: accountviews.py
# Auth: Andrew Calvano / Jim Ching
# Desc: Account management interface
#==============================================================================
from flask import render_template, request, session, abort, redirect, url_for
from app.dbmodels import User, OrderInfo, ServiceInfo
from app.util import getsalt, createhash, validate_table
from app.db import db
from app import app
# form tables (validation purposes)
accountreg_ft = ['username', 'password', 'check']
accountlog_ft = ['username', 'password']
accountroe_ft = ['username', 'newrole']
@app.route('/')
def home():
'System management home page.'
# check if user is login
if "role" not in session:
return render_template('index.html')
# user login as admin or sales
elif session["role"] in ["Admin", "Sales"]:
return render_template('index.html', order_count = \
OrderInfo.query.filter_by(status="Ready to Process").count(), \
service_count = ServiceInfo.query.filter_by(stats=1).count())
# user login as guest
elif session["role"] in ["Guest"]:
return render_template('index.html')
# character sets for validating registration
lower = [chr(i + 97) for i in range(26)] # lower-case alphabet (ascii)
upper = [chr(i + 65) for i in range(26)] # upper-case alphabet (ascii)
digit = [chr(i + 48) for i in range(10)] # digits
speci = [chr(i + 33) for i in range(14)] # special char
chars = set(lower + upper + digit) # username character set
@app.route('/register', methods=['GET', 'POST'])
def register():
'Register the user by adding an entry to the User table.'
# redirect signed in user to home page (already register)
if 'username' in session: return redirect(url_for("home"))
# user has submitted a registration form
if request.method == "POST":
if validate_table(accountreg_ft, request.form):
# extract form entries
username = request.form[accountreg_ft[0]]
password = request.form[accountreg_ft[1]]
verified = request.form[accountreg_ft[2]]
status = 0x0000
# validate registration
if not 5 <= len(username) <= 25: status += 0x0002 # username must be 5 - 25 characters long
if set(username) - chars: status += 0x0004 # username must contain only letters and digits
if not 5 <= len(password) <= 25: status += 0x0008 # password must be 5 - 25 characters long
if len(set(password) & set(digit)) < 1: status += 0x0010 # must contain digit character
if len(set(password) & set(upper)) < 1: status += 0x0020 # must contain capital character
if len(set(password) & set(speci)) < 1: status += 0x0040 # must contain special character
if password != verified: status += 0x0080 # password is not verified
if User.query.filter_by(uname=username).first() != None: status += 0x0100 # username already exist
# create the user if it does not exist
if not status:
salt = getsalt()
passhash = createhash(salt,password)
newuser = User(username, salt, passhash, "Guest", 0)
db.session.add(newuser)
db.session.commit()
return redirect(url_for("login", message="Registration successful, please sign in!"))
# report password does not match
elif status & 0x0080: return redirect(url_for("register", message = "Unable to verified password, please re-enter password."))
# report username already exist
elif status & 0x0100: return redirect(url_for("register", message = "{} has already been taken, please choose another username.".format(username)))
# report validation error
else: return redirect(url_for("register", message = "Invalid username or password, please re-read the registration form rules."))
# present user with initial registration
return render_template('accounttemps/register.html')
@app.route('/login', methods=['GET', 'POST'])
def login():
'Login the user by setting the session object.'
# redirect signed in user to home page (already login)
if 'username' in session: return redirect(url_for("home"))
# user has submitted credentials
if request.method == "POST":
if validate_table(accountlog_ft, request.form):
# extract form entries
username = request.form[accountlog_ft[0]]
password = request.form[accountlog_ft[1]]
status = 0x0000
# check whether the fields are empty
if not 5 <= len(username) <= 25: status += 0x0001 # username must be 5 - 25 characters long
if not 5 <= len(password) <= 25: status += 0x0002 # password must be 5 - 25 characters long
# check whether the user exist
try:
user_exists = User.query.filter_by(uname=username).first()
except Exception, e:
user_exists = None
if user_exists:
# check whether the password matches
if createhash(user_exists.salt,password) == user_exists.password:
session['username'] = user_exists.uname
session['role'] = user_exists.role
if user_exists.isadmin:
session['isadmin'] = True
else:
session['isadmin'] = False
status += 0x0010
else:
status += 0x0008
else:
status += 0x0004
if status & 0x0001 or status & 0x0002:
return redirect(url_for("login", message = 'Short username or password; must be at least length 5 or greater.'))
elif status & 0x0004 or status & 0x0008:
return redirect(url_for("login", message = 'Invalid username or password.'))
elif status & 0x0010:
return redirect(url_for("home"))
# present user with initial sign in form
return render_template("accounttemps/login.html")
@app.route('/roles', methods=['GET'])
def roles():
'Allow administrators to change the roles of other users.'
# check if user is login in and check if user is administrator
if 'isadmin' not in session.keys() or session['isadmin'] == False:
return redirect(url_for("home"))
if request.method == 'GET':
if validate_table(accountroe_ft, request.args):
username = request.args.get(accountroe_ft[0])
newrole = request.args.get(accountroe_ft[1])
message = ''
# check the role
if newrole in ['Admin','Sales', 'Guest']:
# check if user exist
user_exists = User.query.filter_by(uname = username).first()
if user_exists == None:
message = 'You\'ve modified an invalid user\'s role.'
# check if user is modifying his own permission level
elif user_exists.uname == session['username']:
message = 'You cannot modify your own permission level.'
else:
# set off administrator flag
if user_exists.role == "Admin" and newrole != "Admin":
user_exists.isadmin = 0
# set new role
user_exists.role = newrole
# set on administrator flag
if user_exists.role == "Admin":
user_exists.isadmin = 1
# commit the transaction
db.session.commit()
message = '{} role changed to {}.'.format(user_exists.uname, user_exists.role)
else:
message = 'Role action denied; invalid role({}).'.format(newrole)
return render_template('accounttemps/roles.html', User = User, message = message)
# present user with initial table
return render_template('accounttemps/roles.html', User = User)
@app.route('/logout')
def logout():
'Logout the user by clearing the session object.'
if 'username' in session: session.clear()
return redirect(url_for("home"))
|
UTF-8
|
Python
| false | false | 2,014 |
11,106,785,450,804 |
be3308ed97b2af9d47fe436acc19011c1e31ba1c
|
c2d2b00b29c6f0fcc9fd493870ab0feb76226326
|
/src/crashtec/symbolsmngr/test/testsymbolsmngr.py
|
90e17b8d6867ee4be39a62a59002dc065314aaba
|
[
"GPL-3.0-only",
"GPL-3.0-or-later",
"GPL-1.0-or-later"
] |
non_permissive
|
capone212/crashtec
|
https://github.com/capone212/crashtec
|
e4c2a7e52ec6c60c9452c736b67f50672fa0c15e
|
000b7e3020a7a63e2de905a07b821ffd4fd28ec5
|
refs/heads/master
| 2020-04-07T05:51:15.687433 | 2013-05-13T19:42:15 | 2013-05-13T19:42:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 07.04.2013
@author: capone
'''
import unittest
from mock import MagicMock
from mock import patch
import mock
import logging
from crashtec.utils.exceptions import CtGeneralError
from crashtec.symbolsmngr.bindownloader import BinaryDownloader
from crashtec.symbolsmngr.symstore import SymbolsStore
from crashtec.symbolsmngr import symbolsmngr
from crashtec.utils import debug
_sample_task = {'id' : 1}
_sample_products_list = {'prod1' : ['1.0.0.', '1.0.2']}
_sample_urls = ['url1', 'url2']
_binary_folder_list = ['folder1', 'folder2']
class MocSymbolsManagerBuilder(object):
def __init__(self):
self.agent_class = 'agent_class'
self.instance_name = 'instance_name'
def build_products_detector(self):
self.products_detector = MagicMock(
spec_set = ['get_products_list_for_task'])
self.products_detector.get_products_list_for_task = MagicMock(
return_value = _sample_products_list)
return self
def build_binaries_locator(self):
self.binaries_locator = MagicMock(
spec_set = ['get_binaries_url_for_products'])
self.binaries_locator.get_binaries_url_for_products = MagicMock(
return_value = _sample_urls)
return self
def build_downloader(self):
self.downloader = mock.create_autospec(BinaryDownloader, spec_set=True)
self.downloader.download_and_unpack.side_effect = _binary_folder_list
return self
def build_symbols_store(self):
self.symbols_store = mock.create_autospec(SymbolsStore, spec_set = True)
#self.symbols_store.add_binary_path.return_value = 10
#self.symbols_store.add_binary_path = mock.mocksignature(
# SymbolsStore.add_binary_path, skipfirst=True)
return self
def create(self):
impl = symbolsmngr.Implementation(self.products_detector,
self.binaries_locator,
self.downloader,
self.symbols_store)
return symbolsmngr.SymbolsManager(impl, self.agent_class,
self.instance_name,
'mock_group_id')
class MocBuilderProductDetectorThrows(MocSymbolsManagerBuilder):
def build_products_detector(self):
self.products_detector = MagicMock(
spec_set = ['get_products_list_for_task'])
self.products_detector.get_products_list_for_task = MagicMock(
side_effect = CtGeneralError('Mock intended error'))
return self
class MocBuilderSymbolsStoreThrows(MocSymbolsManagerBuilder):
def build_symbols_store(self):
self.symbols_store = MagicMock(spec_set = SymbolsStore)
self.symbols_store.add_binary_path = MagicMock(
side_effect = CtGeneralError('Mock intended error'))
return self
@patch('crashtec.infrastructure.public.agentbase.RegistrationHolder')
class TestSymbolsManager(unittest.TestCase):
# Here we should not have any errors. It is just regular success path.
def test_task_success(self, mock_class):
self._test_task_success(mock_class, MocSymbolsManagerBuilder())
def _test_task_success(self, mock_class, builder):
# Setup mock's
manager = self.build_mock(builder)
manager.task_failed = MagicMock(side_effect =
RuntimeError("Should not be called"))
# Call
manager.process_task(_sample_task)
# Validate call's
impl = manager.impl
impl.products_detector.get_products_list_for_task.\
assert_called_once_with(_sample_task)
impl.binaries_locator.get_binaries_url_for_products.\
assert_called_once_with(_sample_products_list,
_sample_task)
download_calls = [mock.call(url) for url in _sample_urls]
impl.downloader.download_and_unpack.assert_has_calls(download_calls,
any_order=True)
folder_calls = [mock.call(folder, _sample_task)
for folder in _binary_folder_list]
impl.symbols_store.add_binary_path.assert_has_calls(folder_calls,
any_order=True)
manager.task_finished.assert_called_once_with(_sample_task)
# Here task should fail, because manager could not find products
def test_products_detector_throws(self, mock_class):
manager = self.build_mock(MocBuilderProductDetectorThrows())
manager.task_finished = MagicMock(side_effect =
RuntimeError("Should not be called"))
# Call
manager.process_task(_sample_task)
# Validate call's
manager.task_failed.assert_called_once_with(_sample_task)
# Here we have exceptions at adding to symbols store, but
# there are many folders to add, so failing at one item should not
# lead to failing all the task
def test_symbolsstore_throws(self, mock_class):
manager = self.build_mock(MocBuilderSymbolsStoreThrows())
manager.task_failed = MagicMock(side_effect =
RuntimeError("Should not be called"))
# Call
manager.process_task(_sample_task)
# Validate call's
download_calls = [mock.call(url) for url in _sample_urls]
manager.impl.downloader.download_and_unpack.assert_has_calls(
download_calls, any_order=True)
manager.task_finished.assert_called_once_with(_sample_task)
def build_mock(self, builder):
manager = builder.build_products_detector().\
build_binaries_locator().\
build_downloader().\
build_symbols_store().create()
manager.task_failed = MagicMock()
manager.task_finished = MagicMock()
return manager
def setup_log():
logger = logging.getLogger('symbolsmngr')
debug.init_debug_logger(logger)
setup_log()
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
5,858,335,427,513 |
47904328f6e55c58aabf7f5e586d65653ec45dd4
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_8/mznsha002/question4.py
|
d96b8b3898e47042b0e19ac5c14ba355497180bd
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
https://github.com/MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# 8 May 2014
# Shaun Muzenda
# A program that uses recursive functions to find all palindrome primes between two integers supplied as input (start and end points are included)
import math
import sys
sys.setrecursionlimit (30000)
def palindrome(s,test): # checking if the number is a palindrome
if(len(str(s)) != 1): # checks whether the number has more than one character
d = s%10
test = test+str(d)
return palindrome((s-(s%10))//10,test)
else:
return(test+str(s))
def prime(starting_point,x,p): #checking whether the number is a prime
if(starting_point == 1):
return (p+"Not")
sq = int(math.sqrt(starting_point))
if(x != (sq+1)):
if(starting_point % x != 0):
return prime(starting_point,x+1,p)
else:
return (p+"Not")
def method(starting_point,ending_point):
if starting_point <= ending_point: #is
tesppalindrome = int(palindrome(starting_point,""))
if tesppalindrome == starting_point:
testPrime = prime(starting_point,2,"")
if testPrime != "Not":
print(starting_point) #prints a list of the palindrome primes
method(starting_point+1,ending_point)
if __name__== "__main__":
starting_point = eval(input("Enter the starting point N:\n"))
ending_point = eval(input("Enter the ending point M:\n"))
print("The palindromic primes are:")
method(starting_point,ending_point)
|
UTF-8
|
Python
| false | false | 2,014 |
16,423,954,953,297 |
4b03b542e7c1d05ef7d62a0000abfd244fd7c03c
|
39890fd20d1520784833b063a42d1839a92ecaed
|
/Task2b.py
|
ee977dc64f45e1c4cc7bc73591eb582640f6516e
|
[
"MIT",
"LicenseRef-scancode-proprietary-license"
] |
non_permissive
|
merrickheley/MECH3750-Ass2
|
https://github.com/merrickheley/MECH3750-Ass2
|
24b2ffb147b6dbd5be70c0fb8c24ab2a74a22e75
|
bd008d8b859e83407770b3907a64041a1b851b8e
|
refs/heads/master
| 2016-09-15T14:22:41.627367 | 2013-10-27T06:50:18 | 2013-10-27T06:50:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy
import Newtons
if __name__ == '__main__':
# Functions to be solved, v[0] is x, v[1] is y
f = [lambda v: -(v[0]**3 + 3*(v[0]**2)*v[1] - 2*v[0]*(v[1]**2) \
- 7*(v[1]**3) + 604894015496000),
lambda v: -(-15*(v[0]**2) - 57*v[0]*v[1] - 67*(v[1]**2) \
+ 26864190700)]
guesses = [[ 1 + 1j, 1 + 1j],
[ 1 - 1j, 1 - 1j],
[-100 - 100j, -100 - 100j],
[-100 + 100j, -100 + 100j],
[100000 + 100000j, 100000 + 100000j],
[100000 - 100000j, 100000 - 100000j]]
for i in xrange(len(guesses)):
# Guess
P = numpy.array(guesses[i])
# Solve
print Newtons.iterativeSolve(f, P, 0.01)[0]
|
UTF-8
|
Python
| false | false | 2,013 |
13,426,067,792,748 |
f6ed56de4478a393b43e332f3cbdfe00667061e6
|
077532be2ac14cabfd68b4c7f601868997c147c0
|
/scrap/getlink.py
|
c1cf1eb80f87b888c2374082781ef82ebc4ba1a6
|
[] |
no_license
|
nautical/vim.org.scrape
|
https://github.com/nautical/vim.org.scrape
|
079dc35afc2a315f550d426a4c0a81c3adb434dd
|
f9bdd051e9f5e260e202dc3b363051a5e8e9f941
|
refs/heads/master
| 2016-03-02T09:14:46.649149 | 2013-07-13T12:16:06 | 2013-07-13T12:16:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urllib
import lxml.html
from BeautifulSoup import BeautifulSoup as soup
import os
import sys
print "=========================="
pid = str(os.getpid())
print "Running process ID : " + pid
processes = open("./process/running_process", "a").write(sys.argv[1]+":"+sys.argv[2]+":"+pid+"\n")
print "=========================="
data = open("data.xml","r")
lines = data.read()
html = soup(lines)
print "starting script from : " + sys.argv[1] + " : " + sys.argv[2]
a = list(set([tag.attrMap['href'] for tag in html.findAll('a', {'href': True})]))[int(sys.argv[1]):int(sys.argv[2])]
def get_content(url):
try:
sock = urllib.urlopen(url)
htmlsource = sock.read()
sock.close()
return htmlsource
except:
return None
def get_table(url):
try:
content = get_content(url)
doc = lxml.html.document_fromstring(content)
row_elements = doc.xpath('//table')[1][2][2]
return row_elements
except:
return None
def write(con):
text_file = open("./data/"+A.split("?")[-1], "w+")
text_file.write("%s"%con)
print " Success !!"
text_file.close()
counter = int(sys.argv[1])
for A in a:
if(os.path.isfile("./data/"+A.split("?")[-1])):
counter = counter + 1
print sys.argv[1] + " [Skipping =>] " + A
pass
else:
counter = counter + 1
print str(counter) + " => " + A
con = get_table(A)
try:
con = lxml.html.tostring(con)
write(con)
except:
pass
|
UTF-8
|
Python
| false | false | 2,013 |
4,999,341,947,400 |
1b6fdbe0200ff05d836074eef2d6d374fe7b35b6
|
7a6dd7d115634a4a69755299fff7d1477b013ccc
|
/src/config.py
|
00330c2ff76e92a923ed6fcad154223845ea15c9
|
[] |
no_license
|
andrewjsledge/biffy
|
https://github.com/andrewjsledge/biffy
|
ea6bfb0fa50776abb256a2baab48126af1e44052
|
dc44b12f3a22d07c3e5ae283f5d52552b37f9950
|
refs/heads/master
| 2020-06-04T01:04:28.678365 | 2013-12-16T15:06:54 | 2013-12-16T15:06:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'andrew'
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
ADMINS = frozenset(['[email protected]'])
SECRET_KEY = 'SecretKeyForSessionSigning'
DEBUG_TB_PROFILER_ENABLED=True
DEBUG_TB_INTERCEPT_REDIRECTS=False
###
# Permanent storage
###
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(
_basedir, 'application.db'
)
DATABASE_CONNECT_OPTIONS = {}
SQLALCHEMY_ECHO = DEBUG
###
# Caching
###
CACHE_TANK = "simple"
if CACHE_TANK == "memcached":
from werkzeug.contrib.cache import MemcachedCache
cache = MemcachedCache(['127.0.0.1:11211'])
elif CACHE_TANK == "simple" or CACHE_TANK == "" or CACHE_TANK is None:
from werkzeug.contrib.cache import SimpleCache
cache = SimpleCache()
THREADS_PER_PAGE = 8
CSRF_ENABLED=True
CSRF_SESSION_KEY="somethingimpossibletoguess"
RECAPTCHA_USE_SSL = False
RECAPTCHA_PUBLIC_KEY = 'blahblahblahblahblahblahblahblahblah'
RECAPTCHA_PRIVATE_KEY = 'blahblahblahblahblahblahprivate'
RECAPTCHA_OPTIONS = {'theme': 'white'}
|
UTF-8
|
Python
| false | false | 2,013 |
16,140,487,143,975 |
62fd9141ab8509d712efb9fa4f7c802d8579f002
|
666b983548e68e4ed2667f1374cc3cb17e3d8c4c
|
/test.py
|
f492fad232a34e6e0988948d04052658ca52736a
|
[] |
no_license
|
zklevsha/Discover-flask
|
https://github.com/zklevsha/Discover-flask
|
4f58942751ceadf193c553593428a9113fc71a10
|
798def4326fdd5dd55c33e6fdfe46343adb01164
|
refs/heads/master
| 2016-09-06T20:14:49.866020 | 2014-10-02T12:49:05 | 2014-10-02T12:49:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#My unit tests
from app import app
import unittest
class FlaskTestCase(unittest.TestCase):
# Ensure that Flask was set up correctly
def test_index(self):
tester = app.test_client(self)
response = tester.get('/login', content_type='html/text')
self.assertEqual(response.status_code, 200)
# Ensure that login page loads correctly
def test_login_page_loads(self):
tester = app.test_client(self)
response = tester.get('/login', content_type='html/text')
self.assertTrue(b'Please login' in response.data)
# Ensure login behaves correctly given the correct credentials
def test_correct_login(self):
tester = app.test_client(self)
response = tester.post(
'/login',
data=dict(username="admin", password='admin'),
follow_redirects = True
)
self.assertIn(b'You were just logged ', response.data)
# Ensure login behaves correctly given the incorrect credentials
def test_incorrect_login(self):
tester = app.test_client(self)
response = tester.post(
'/login',
data=dict(username="admin1", password='admin'),
follow_redirects = True
)
self.assertIn(b'Invalid credentials. Please try again', response.data)
#Ensure logout behavies corectly
def test_correct_logout(self):
tester = app.test_client(self)
tester.post(
'/login',
data=dict(username="admin", password='admin'),
follow_redirects = True
)
response = tester.get('/logout', follow_redirects=True)
self.assertIn(b'You were just logged out', response.data)
#Ensure that main page requires login
def test_main_page_requires_login(self):
tester = app.test_client(self)
response = tester.get('/', follow_redirects=True)
self.assertIn(b'You need to login first',response.data)
#Ensure that login page requires login
def test_login_page_requires_login(self):
tester = app.test_client(self)
response = tester.get('/logout', follow_redirects=True)
self.assertIn(b'You need to login first',response.data)
# Ensure posts display on the main page
def test_display_posts(self):
tester = app.test_client(self)
response = tester.post(
'/login',
data=dict(username="admin", password='admin'),
follow_redirects = True
)
self.assertIn(b'I`m well', response.data)
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
9,620,726,767,861 |
4969ac36fb4a9ebbb588944636363bc22db5b6f1
|
c890d645bcbb8ea3e2b49da781222f36083e361e
|
/li/x1client.py
|
a8b7bdc4cd9a524783308c4ba3bc270332ff44b6
|
[] |
no_license
|
dinimicky/drop_server
|
https://github.com/dinimicky/drop_server
|
2d5f8163bbee1e2f334c0fed21b7cfd2cd2d2613
|
971105039bf456b6c14dc49a119db60143b70ead
|
refs/heads/master
| 2021-01-23T07:21:28.793758 | 2014-08-17T03:24:14 | 2014-08-17T03:24:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2013-2-25
@author: ezonghu
Note: without receiving response, the whole process will exit.
'''
from twisted.internet.protocol import ClientFactory
from twisted.internet import task, reactor
from twisted.python import log
from twisted.words.xish import xmlstream
from twisted.words.xish.xpath import XPathQuery
from common.multixmlstream import MultiXmlStream
from common.state import RespMsg
from common import config
from lixml import li_xml_temp
class X1ClientProtocol(MultiXmlStream):
X1PingRespPath = XPathQuery('/payload/ping/pingType/pingResponse')
getX1PingRespSeqNbr = XPathQuery('/payload/ping/seqNbr').queryForString
X1AlarmPath = XPathQuery('/payload/extPDU/LI-ADM-Event/lI-ADM-MessageSequence/alarmNotification')
callLater = reactor.callLater
timeOut = 30
def connectionMade(self):
log.msg('x1 tcp connection is made')
self.alarmCounter = 0
self.factory.x1_queue.get().addCallback(self.cmdReceived)
if config.pingEnable:
self.lcping = task.LoopingCall(self._sendPingRequest)
self.lcping.start(config.ping_delay)
MultiXmlStream.connectionMade(self)
def recordX1Alarm(element):
self.alarmCounter += 1
log.msg("recv X1 total alrams: %d" % self.alarmCounter)
log.msg("recv X1 alarm: %s" % element.toXml())
self.addObserver(X1ClientProtocol.X1AlarmPath, recordX1Alarm)
def cmdReceived(self, reqMsg):
if reqMsg.type == 'cmd':
log.msg("recv cmd: %s" % reqMsg.content)
self.reqMsg = reqMsg
self._sendX1Xml(self.reqMsg.content)
self.factory.x1_queue.get().addCallback(self.cmdReceived)
def connectionLost(self, Why):
log.msg("connnect is lost, reason:%s" % Why)
if hasattr(self, 'lcping') and self.lcping is not None:
lcping, self.lcping = self.lcping, None
lcping.stop()
log.msg('server existed')
reactor.stop()
if self.factory.x1_queue:
self.factory.x1_queue = None
return Why
def _sendPingRequest(self):
self.factory.state.x1Seq += 1
self.send(li_xml_temp.pingX1Req(self.factory.state.x1Seq))
log.msg("x1 ping request is sent out, x1Seq =", self.factory.state.x1Seq)
def recvPingResp(cancelPingId, x1Seq, element):
cancelPingId.cancel()
RecvX1Seq = int(X1ClientProtocol.getX1PingRespSeqNbr(element))
log.msg("recv x1 ping response, x1Seq=%d; send out x1Seq=%d" % (RecvX1Seq, x1Seq))
def ping_cancel():
self.removeObserver(X1ClientProtocol.X1PingRespPath, recvPingResp)
log.msg("x1 ping response is not received ")
self.transport.loseConnection()
pingCallID = self.callLater(config.ping_timeout, ping_cancel)
self.addOnetimeObserver(X1ClientProtocol.X1PingRespPath, recvPingResp, 0, pingCallID, self.factory.state.x1Seq)
def _sendX1Xml(self, xml):
log.msg('x1 send out xml directly')
self.send(xml)
expectResp = XPathQuery("//%s" % (self.reqMsg.expectedRes))
def recvCmdResp(cancelCmdCallID, element):
cancelCmdCallID.cancel()
def send_resp2cmd_queue(x1CliInst):
log.msg('recv X1 response')
x1CliInst.factory.cmd_queue.put(RespMsg(result="OK", content=x1CliInst.recvRootElement))
self.addOnetimeObserver(xmlstream.STREAM_END_EVENT, send_resp2cmd_queue)
def cancelCmdResp():
self.removeObserver(expectResp, recvCmdResp)
log.msg("X1 did't receive response. request:%s." % self.reqMsg.content)
from twisted.words.xish import domish
self.factory.cmd_queue.put(RespMsg(result="Unavailable", content=domish.Element((None, 'Unavailable'))))
self.reqMsg = None
self.transport.loseConnection()
cancelCmdRespCallId = self.callLater(X1ClientProtocol.timeOut, cancelCmdResp)
self.addOnetimeObserver(expectResp,
recvCmdResp, 0, cancelCmdRespCallId)
return cancelCmdResp
class X1ClientFactory(ClientFactory):
protocol = X1ClientProtocol
def __init__(self, cmd_queue, x1_queue, state):
self.cmd_queue = cmd_queue
self.x1_queue = x1_queue
self.state = state
|
UTF-8
|
Python
| false | false | 2,014 |
8,572,754,759,728 |
ea6a88c9d01989d318174196ee0db87e8046a387
|
952f354d6333a7ed11088a5fb2cea0d940b86ec7
|
/pelicanconf.py
|
1a49989bb6626927a84d580573fc976e18a6571c
|
[] |
no_license
|
Flowdalic/xmpp.org
|
https://github.com/Flowdalic/xmpp.org
|
151078c764a4af54ffb579635a7dec24d2c87834
|
4d0a11a2fb4f4dde921a8806fc659c070fc92529
|
refs/heads/master
| 2023-08-03T03:48:21.382935 | 2014-08-07T14:34:33 | 2014-08-07T14:34:33 | 22,764,397 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'[email protected]'
SITENAME = u'XMPP Standards Foundation'
SITEURL = ''
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('XMPP Questions', 'http://stackoverflow.com/questions/tagged/xmpp'),
('@xmpp', 'https://twitter.com/xmpp'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
GOOGLE_ANALYTICS = "UA-48883675-1"
DIRECT_TEMPLATES = ['index']
THEME = 'xmpp.org-theme'
STATIC_PATHS = [ 'img', 'CNAME' ]
CUSTOM_CSS = 'theme/css/style.css'
BOOTSTRAP_THEME = 'journal'
# Tell Pelican to add 'extra/custom.css' to the output dir
STATIC_PATHS = ['images', 'extra/custom.css', 'CNAME' ]
# Tell Pelican to change the path to 'static/custom.css' in the output dir
EXTRA_PATH_METADATA = {
'extra/custom.css': {'path': 'static/custom.css'}
}
|
UTF-8
|
Python
| false | false | 2,014 |
15,350,213,131,622 |
3aedac9d62d14e7700c5243a5600f8d2b11f27ca
|
6040a6bcec783e89dd23cc948792230a61e49113
|
/check_modules.py
|
99b39eebd70f00d31fb5fb7eeef34bdc152516da
|
[
"MIT"
] |
permissive
|
alastair/pymei
|
https://github.com/alastair/pymei
|
48edc41cf5b2573f543f7c72eff8ba8ad5312770
|
b0998fd37224a1187fbbeecebcc11fe6ea7bbc2a
|
refs/heads/master
| 2018-05-29T16:32:47.284539 | 2012-02-16T09:01:15 | 2012-02-16T09:01:15 | 3,128,594 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ================================================================
# check_modules.py
#
# Checks the current pymei modules against those defined
# in a RelaxNG MEI schema and alerts the user if there
# are any discrepancies. Mostly used for debugging purposes
#
# Author: Andrew Hankinson
# License: MIT
#
# ================================================================
import os
import sys
from optparse import OptionParser
from lxml import etree
import pymei.Components.Modules as mod
if __name__ == "__main__":
usage = "usage: %prog path_to_rng_schema"
desc = """ This test script runs against the defined RelaxNG schema and verifies that
a defined python class exists for each element defined in the schema."""
p = OptionParser(usage=usage, description=desc, version="%prog 0.1a")
# p.add_option("-f", "--folder", action="store", help="Path to folder containing the modules")
(options, args) = p.parse_args()
difference = []
problems = False
# get python modules
p = filter(lambda x: x.endswith("_") and not x.endswith("__"), dir(mod))
v = set(map(lambda x: x.rstrip("_"), p))
# get RNG schema. An earlier version of this script loaded all of the modules in from separate files. Now all the
# elements are in a single file.
f = open(args[0], 'r')
t = etree.parse(f)
f.close()
els = set([dict(e.items()).values()[0] for e in t.xpath("/r:grammar//r:element", namespaces={'r':'http://relaxng.org/ns/structure/1.0'})])
num_els = len(els)
print "The number of elements was {0}".format(len(els))
if not els.issubset(v):
problems = True
print "We found these elements that were not common to the schema and the python library: {0}".format(list(els.difference(v)))
if not problems:
print "\nNo problems were found. The Python library and the RelaxNG schema are in sync."
else:
print "\nProblems were found. Please correct them and try again."
# m = [r for r in os.listdir(options.folder) if os.path.splitext(r)[-1] == ".rng" and r != "mei-all.rng"]
# for fl in m:
# print "Processing {0}".format(fl)
# f = open(os.path.join(options.folder, fl), 'r')
# t = etree.parse(f)
# f.close()
#
# # construct a set of all the elements defined in the RNG file.
#
#
# # check if the els in the RNG file are a subset of the objects defined in python.
|
UTF-8
|
Python
| false | false | 2,012 |
3,272,765,095,360 |
678745651b56b0f639a058feec27866c3a457452
|
600451fe6362a52de538a5d34e3f30d4df0e225a
|
/file_renamer.py
|
87a92a5fb04ff436f85e988d030f568c99386fc8
|
[
"MIT"
] |
permissive
|
grimlck/file_renamer
|
https://github.com/grimlck/file_renamer
|
75e4e2065c96af252b160665aab559e01bc32f67
|
e578de5c7320254d443eeb456f9cc5d3f9b58206
|
refs/heads/master
| 2021-01-25T12:23:59.068925 | 2013-06-24T17:46:03 | 2013-06-24T17:46:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
This program replaces special characters in a file with the following naming scheme
01-Interpret-Titel.ext
NOTE: only works on operating systems which handle their file names case sensitiv
"""
import os
import sys
section_delimiter="-"
word_delimiter=" "
new_word_delimiter="_"
# dictionary of character replacements
replacement=[
["'","_"],
[" ","_"],
[".",""],
["ß","ss"],
["[","("],
[",",""],
["]",")"],
["!",""],
["?",""]
]
def replace_chars(string,replacement_dict):
"""
replace the characters in a string according to a dictionary
"""
if string and replacement_dict:
for item in replacement_dict:
string = string.replace(item[0],item[1])
return string
def rename_files(directory):
"""
rename a file, after replacing spacial characters
"""
if os.path.exists(directory) and os.path.isdir(directory):
os.chdir(directory)
cwd = os.getcwd()
# iterate over files in the given directory
for file in os.listdir(cwd):
if os.path.isfile(file):
# split the file name into extension and the name
extension = os.path.splitext(file)[-1]
file_name = os.path.splitext(file)[0]
# split the file name into sections
splitted_file_name = file_name.split("-")
# add 0 to the beginning of the filename
if len(splitted_file_name[0]) == 1:
splitted_file_name[0] = "0"+splitted_file_name[0]
# join the sections
file_name = "-".join(item.strip() for item in splitted_file_name)
# replace special chars and rebuild the file name
file_name = replace_chars(file_name,replacement)
file_name = file_name+extension
file_name = file_name.lower()
if file_name != file:
if not os.path.exists(file_name):
os.renames(file,file_name)
else:
print file_name+" already exists"
return 0
else:
return 1
def main():
if len(sys.argv) > 1:
directory = os.path.abspath(sys.argv[1])
rename_files(directory)
sys.exit(0)
else:
print "No directory specified."
sys.exit(1)
if __name__ == "__main__":
main()
|
WINDOWS-1252
|
Python
| false | false | 2,013 |
1,254,130,471,886 |
be0ae740faeebeeccf904ff39313b57d69083be0
|
7cb6c4b07454eac8f01efbc653a78f022aab0e18
|
/final/template_learning/output_converter.py
|
ac6137e67f0d03d85d5bb49b39298b71b441fb57
|
[] |
no_license
|
j43ster/480
|
https://github.com/j43ster/480
|
18bd0280a36d3c6f85a5f4d8577dec43bdeb4ba1
|
f9a78f4d984bb779b188ad03a27f6125fd9f3122
|
refs/heads/master
| 2021-01-10T21:57:34.537503 | 2014-12-04T08:19:20 | 2014-12-04T08:19:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python2
import ast
import sys
infile = None
if (len(sys.argv) > 1):
infile = sys.argv[1]
f = open(infile)
for line in f:
line = line.strip()
line = line.partition(": ")[2].strip()
if line:
line_list = ast.literal_eval(line)[0]
(sentence, pos) = zip(*line_list)
print(sentence)
print(pos)
|
UTF-8
|
Python
| false | false | 2,014 |
3,100,966,400,119 |
451d4be41f2e9606b4aa540a2727f4d521f83ff3
|
bb19a583a5b5735251b1f6ef7e64f3e5f54cb07d
|
/ESPN_Parser/Rushing_Play.py
|
fae1de69e86fc05114073f8e62c558e09314b639
|
[] |
no_license
|
townsepw/rcfbscraper
|
https://github.com/townsepw/rcfbscraper
|
fbbf0211b872673a8d87a81e3a947e028512bf8e
|
5484e0444dc7bb8d0120a753a5e9a3f3d1e9cad2
|
refs/heads/master
| 2020-12-03T05:23:40.629797 | 2014-10-14T15:36:34 | 2014-10-14T15:36:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
# Holds the data for a passing play
class Rushing_Play:
def __init__(self, game_code, play_num, team_code, rusher):
self.Game_Code = game_code
self.Play_Num = play_num
self.Team_Code = team_code
self.Rusher = rusher
self.Attempt = 1
self.Yards = 0
self.Touchdown = 0
self.First_down = 0
self.Fumble = 0
self.Fumble_Lost = 0
self.Sack = 0
self.Safety = 0
# Returns an array of relavent information
def Compile_Play(self):
OutputArray = []
OutputArray.append(str(self.Game_Code))
OutputArray.append(str(self.Play_Num))
OutputArray.append(str(self.Team_Code))
OutputArray.append(str(self.Rusher))
OutputArray.append(str(self.Attempt))
OutputArray.append(str(self.Yards))
OutputArray.append(str(self.Touchdown))
OutputArray.append(str(self.First_down))
OutputArray.append(str(self.Fumble))
OutputArray.append(str(self.Fumble_Lost))
OutputArray.append(str(self.Sack))
OutputArray.append(str(self.Safety))
return OutputArray
|
UTF-8
|
Python
| false | false | 2,014 |
16,947,940,984,542 |
58e229bdb777b8d5aaf343e5d44cc66f21535cf1
|
781b36d4dc25aaedd856de63f0f5c31bbd27c249
|
/lab7/problem3_2.py
|
3d31d1920e804802122c08538a4f71f523e8eca4
|
[] |
no_license
|
krl22785/principles
|
https://github.com/krl22785/principles
|
1eecacd252c59abbf63ba39e59ebcc3b3e4b0a81
|
b8330eb737c976580bfa84011facf75de8985ea5
|
refs/heads/master
| 2021-01-01T19:56:21.380206 | 2014-12-08T01:09:57 | 2014-12-08T01:09:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import csv
import sys
import pandas as pd
import scipy as sp
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
def allComplaintsbyAgency(dictionary):
df = pd.DataFrame(dictionary)
#colList = ['NYPD', 'DOT', 'DOB', 'TLC', 'DPR']
df_agency = df[colList]
df_agency['zipcode'] = df_agency.index
return df_agency
def allPopulations(dictionary):
df1 = pd.DataFrame(population.items(), columns = ['zipcode','population'])
return df1
def mergeGraph(df_agency, df1):
df3 = pd.merge(df_agency, df1, on='zipcode')
df3[['population']] = df3[['population']].astype(float)
plt.scatter(df3.population, df3.NYPD, color = 'blue', alpha = .5)
plt.scatter(df3.population, df3.DOT, color = 'green', alpha = .5)
plt.scatter(df3.population, df3.DOB, color = 'red', alpha = .5)
plt.scatter(df3.population, df3.TLC, color = 'black', alpha = .5)
plt.scatter(df3.population, df3.DPR, color = 'orange', alpha = .5)
plt.xlabel("Population")
plt.ylabel("Number of Complaints")
plt.xlim(xmax = 120000, xmin = 0)
plt.ylim(ymax = 1500, ymin = 0)
plt.title("Number of Complaints by Population by Agency")
plt.legend(colList, loc = 'upper left')
plt.show()
if __name__ == '__main__':
filename = open(sys.argv[1])
reader = csv.reader(filename, delimiter = ',')
next(reader)
colList = ['NYPD', 'DOT', 'DOB', 'TLC', 'DPR']
agencyComplaints = {}
for line in reader:
agency = line[3]
zipCode = line[8][:5]
if agency in agencyComplaints:
if zipCode in agencyComplaints[agency]:
agencyComplaints[agency][zipCode] += 1
else:
agencyComplaints[agency][zipCode] = 1
else:
dictCnt = {}
dictCnt[zipCode] = 1
agencyComplaints[agency] = dictCnt
df_agency = allComplaintsbyAgency(agencyComplaints)
filename1 = open(sys.argv[2])
reader1 = csv.reader(filename1, delimiter = ',')
population = {}
for line in reader1:
population[line[0]] = line[1]
df1 = allPopulations(population)
mergeGraph(df_agency, df1)
|
UTF-8
|
Python
| false | false | 2,014 |
455,266,575,350 |
c7c1873c083441abc7beae87e2b2511e60c30b5b
|
630bcc63e9d4a49b05f0dd77b8091a5f50c1bed0
|
/uri/views.py
|
a859e59c41915991772790f1bbc0128688ccf96c
|
[] |
no_license
|
tpetr/schedr
|
https://github.com/tpetr/schedr
|
d0f3970390adc0436c74e5481e40f7060b0fbc94
|
771f88bb2b565bb60e0747155b0819538fcc203d
|
refs/heads/master
| 2021-01-20T04:32:59.435153 | 2014-03-05T16:44:32 | 2014-03-05T16:44:32 | 17,447,470 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.core.urlresolvers import reverse
from schedr.uri.models import Term, Major, Course, Section, Event, User, Location, CourseComment, UserData
from schedr.base.models import School
from schedr.school import views
|
UTF-8
|
Python
| false | false | 2,014 |
506,806,172,616 |
cae15e063d4d9f51dfa7276c4256ce2fb098a3d4
|
13a6f4d27784580f7033d370b0c0e01e04e0dddc
|
/7.other.py
|
f67b0874d3ab4a2e35669afe610a11c5c3fe4475
|
[] |
no_license
|
kazhuravlev/py_tests
|
https://github.com/kazhuravlev/py_tests
|
323a5ce12ffa9271a4eb8aca93ea38baa8c62b36
|
e40899010b319467e1fe4120b05f5f3644ab8c32
|
refs/heads/master
| 2016-08-01T07:38:40.726792 | 2014-04-16T07:55:59 | 2014-04-16T07:55:59 | 14,669,800 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
import os
import sys
import inspect
# Как получить список всех атрибутов объекта
print [
name
for name, item
in inspect.getmembers(object)
if inspect.ismethod(item)
]
# Как получить список всех публичных атрибутов объекта
print [attr for attr in dir(object) if attr.startswith('_')]
# Как получить список методов объекта
print [attr for attr in dir(object) if callable(attr)]
# В какой "магической" переменной хранится содержимое help?
print object.__doc__
# Есть два кортежа, получить третий как конкатенацию первых двух
print tuple() + tuple()
# Есть два кортежа, получить третий как объединение уникальных элементов первых
# двух кортежей
print tuple(set(tuple()) ^ set(tuple()))
# Почему если в цикле меняется список, то используется for x in lst[:], что
# означает [:]?
# -- [:] создает срез списка, а срез списка - это всегда копирование
# Есть два списка одинаковой длины, в одном ключи, в другом значения.
# Составить словарь.
print dict(zip(list(), list()))
# Есть два списка разной длины, в одном ключи, в другом значения. Составить
# словарь. Для ключей, для которых нет значений использовать None в качестве
# значения. Значения, для которых нет ключей игнорировать.
a = range(3)
b = range(1)
print dict(zip(a, b) if len(a) < len(b) else map(None, a, b))
# Есть словарь. Инвертировать его. Т.е. пары ключ: значение поменять местами —
# значение: ключ.
d = dict([(1, 2), (3, 4), (5, 6)])
print d, '=>', dict(zip(d.itervalues(), d.iterkeys()))
print d, '=>', dict([(v, k) for k, v in d.iteritems()])
print d, '=>', dict(map(lambda item: (item[1], item[0]), d.iteritems()))
print os.defpath
print sys.path
|
UTF-8
|
Python
| false | false | 2,014 |
4,183,298,148,464 |
3fa384f7295ab0154343cbdbbae8a3759c2ea6e2
|
78968f83cb8f33c59af7e19f8852893a43ac5725
|
/ex13.py
|
90e4f476f350b9bd141a2ebcc9627d93b3b14f57
|
[] |
no_license
|
jamesadenny/LPTHWexercises
|
https://github.com/jamesadenny/LPTHWexercises
|
470e746cbaaed5341c6f22f1d13684648a629637
|
cdf447f54016f1231aa550502c9f2b9e3c8f6244
|
refs/heads/master
| 2016-09-06T01:36:28.356700 | 2014-04-12T22:26:41 | 2014-04-12T22:26:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Here we import the argument variable module, this holds the arguments you pass to the script when
# you run it
from sys import argv
# Here we "unpack" argv. "Take whatever is in argv, unpack it, and assign it to all of these
# variables on the left in order."
script, first, second, third = argv
print "The script is called:", script # This prints the file name, in this case it's ex13.py
# script seems to be only changeable via the file name.
print "Your first variable is:", first
print "Your second variable is:", second
print "Your third variable is:", third
# For this to run correctly in the shell, it needs the correct arguments, as follows:
# python ex13.py first 2nd 3rd
# This is interesting as what gets printed is totally dependent on the shell arguments, and they
# can be pretty much anything!
# Adding some raw_input at the bottom here just to see what happens
grasp = raw_input("On a scale of 1-10, how much do you enjoy using raw input?")
print "%s? Wow, that much?" % grasp # Don't forget to reference the variable!
|
UTF-8
|
Python
| false | false | 2,014 |
5,196,910,446,385 |
eb03c09f5d000bba0feb4a89b9b215c94cd3ce13
|
04f47e6292796f22db28312ff1089f382b3496c0
|
/nam/configmanager.py
|
289467f5634de2410a9e4c68d7201908f864895a
|
[] |
no_license
|
UfSoft/GtkNAM
|
https://github.com/UfSoft/GtkNAM
|
6de5f053955da8d7ad2bb90ad5ffa0bacbca625f
|
061f753c46f57d3aa4ac81f615ef2c686839853c
|
refs/heads/master
| 2021-01-02T09:14:35.007859 | 2010-11-11T01:35:26 | 2010-11-11T01:35:26 | 26,619,023 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# configmanager.py
#
# Copyright (C) 2007 Andrew Resch <[email protected]>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import os
import logging
import nam.common
from nam.config import Config
log = logging.getLogger(__name__)
class _ConfigManager:
def __init__(self):
log.debug("ConfigManager started..")
self.config_files = {}
self.__config_directory = None
@property
def config_directory(self):
if self.__config_directory is None:
self.__config_directory = nam.common.get_default_config_dir()
return self.__config_directory
def __del__(self):
log.debug("ConfigManager stopping..")
del self.config_files
def set_config_dir(self, directory):
"""
Sets the config directory.
:param directory: str, the directory where the config info should be
:returns bool: True if successfully changed directory, False if not
"""
if not directory:
return False
log.info("Setting config directory to: %s", directory)
if not os.path.exists(directory):
# Try to create the config folder if it doesn't exist
try:
os.makedirs(directory)
except Exception, e:
log.error("Unable to make config directory: %s", e)
return False
elif not os.path.isdir(directory):
log.error("Config directory needs to be a directory!")
return False
self.__config_directory = directory
# Reset the config_files so we don't get config from old config folder
# XXX: Probably should have it go through the config_files dict and try
# to reload based on the new config directory
self.save()
self.config_files = {}
return True
def get_config_dir(self):
return os.path.abspath(self.config_directory)
def close(self, config):
"""Closes a config file."""
try:
del self.config_files[config]
except KeyError:
pass
def save(self):
"""Saves all the configs to disk."""
for value in self.config_files.values():
value.save()
# We need to return True to keep the timer active
return True
def get_config(self, config_file, defaults=None):
"""Get a reference to the Config object for this filename"""
log.debug("Getting config '%s'", config_file)
# Create the config object if not already created
if config_file not in self.config_files.keys():
self.config_files[config_file] = Config(config_file, defaults, self.config_directory)
return self.config_files[config_file]
# Singleton functions
_configmanager = _ConfigManager()
def ConfigManager(config, defaults=None):
return _configmanager.get_config(config, defaults)
def set_config_dir(directory):
"""Sets the config directory, else just uses default"""
return _configmanager.set_config_dir(directory)
def get_config_dir(filename=None):
if filename != None:
return os.path.join(_configmanager.get_config_dir(), filename)
else:
return _configmanager.get_config_dir()
def close(config):
return _configmanager.close(config)
|
UTF-8
|
Python
| false | false | 2,010 |
13,417,477,840,251 |
6076fba08673747e8a303f2b7ec0d5f78178d9d0
|
fbe79dff60565c027f0dea490212f77d6080419a
|
/chenyx06plus.py
|
a34e03b8b4a31ce71c468881d5d7163702fba85d
|
[] |
no_license
|
edigonzales-archiv/qgis_chenyx06plus
|
https://github.com/edigonzales-archiv/qgis_chenyx06plus
|
34c447d0f52d0858fc7eb11e6057310e9e70e3ac
|
5c1dd79076717211bc326ca770b54468b739797d
|
refs/heads/master
| 2021-05-27T19:37:32.530619 | 2014-07-11T14:21:18 | 2014-07-11T14:21:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: latin1 -*-
"""
/***************************************************************************
Name : Chenyx06plus
Description : ....
Date : 2011-05-30
copyright : (C) 2011 by Stefan Ziegler
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries.
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4 import QtXml
from qgis.core import *
from qgis.gui import *
#import os, time, shutil
#import xlwt as pycel
#from tools.dbTools import DbObj
import tools.utils
class Chenyx06plus:
def __init__( self, iface, version ):
self.iface = iface
self.version = version
# Do some initialisation work.
srs = QgsCoordinateReferenceSystem()
srs.createFromSrsId(21781)
self.canvas = self.iface.mapCanvas()
self.canvas.setMapUnits(QGis.Meters)
self.rect = self.canvas.extent()
mapRender = self.canvas.mapRenderer()
mapRender.setMapUnits(QGis.Meters)
mapRender.setDestinationCrs(srs)
mapRender.setProjectionsEnabled(0)
QgsProject.instance().writeEntry("MapCanvas","/Units",QString("meters"))
QgsProject.instance().writeEntry("SpatialRefSys","/ProjectCRSProj4String",srs.toProj4())
# QgsProject.instance().writeEntry("Digitizing","/TopologicalEditing",1);
# QgsProject.instance().writeEntry("Digitizing","/AvoidPolygonIntersections",1);
self.canvas.refresh()
def initGui(self):
self.menu = QMenu()
self.menu.setTitle( QCoreApplication.translate( "chenyx06+", "CHENyx06+" ) )
self.loadChenyx06 = QAction( QCoreApplication.translate("chenyx06+", "Load CHENyx06 data" ), self.iface.mainWindow() )
self.changeSettings = QAction( QCoreApplication.translate("chenyx06+", "Settings" ), self.iface.mainWindow() )
self.about = QAction( QCoreApplication.translate("chenyx06+", "About" ), self.iface.mainWindow() )
self.transformMenu = QMenu( QCoreApplication.translate( "chenyx06+", "Transform data..." ) )
self.transformRegular = QAction( QCoreApplication.translate("chenyx06+", "with regular CHENyx06" ), self.iface.mainWindow() )
self.transformModified = QAction( QCoreApplication.translate("chenyx06+", "with modified CHENyx06" ), self.iface.mainWindow() )
self.transformMenu.addAction( self.transformModified )
self.transformMenu.addAction( self.transformRegular )
self.compareCoordsMenu = QMenu( QCoreApplication.translate( "chenyx06+", "Compare coordinates..." ) )
self.compareCoordsById = QAction( QCoreApplication.translate("chenyx06+", "by identifier" ), self.iface.mainWindow() )
self.compareCoordsByLocation = QAction( QCoreApplication.translate("chenyx06+", "by location" ), self.iface.mainWindow() )
self.compareCoordsMenu.addAction( self.compareCoordsById )
self.compareCoordsMenu.addAction( self.compareCoordsByLocation )
self.importMenu = QMenu( QCoreApplication.translate( "chenyx06+", "Import" ) )
self.importCopyLv03TspData = QAction( QCoreApplication.translate("chenyx06+", "Copy data into LV03-TSP" ), self.iface.mainWindow() )
self.importCopyLv95TspData = QAction( QCoreApplication.translate("chenyx06+", "Copy data into LV95-TSP" ), self.iface.mainWindow() )
self.importMenu.addAction( self.importCopyLv03TspData )
self.importMenu.addAction( self.importCopyLv95TspData )
self.exportMenu = QMenu( QCoreApplication.translate( "chenyx06+", "Export" ) )
self.exportFineltra = QAction( QCoreApplication.translate("chenyx06+", "Export CHENyx06" ), self.iface.mainWindow() )
self.exportGridPlot = QAction( QCoreApplication.translate("chenyx06+", "Export regular grid plot" ), self.iface.mainWindow() )
self.exportMenu.addAction( self.exportFineltra )
self.exportMenu.addAction( self.exportGridPlot )
self.testsMenu = QMenu( QCoreApplication.translate( "chenyx06+", "Tests" ) )
self.testMissingTsp = QAction( QCoreApplication.translate("chenyx06+", "Missing TSP" ), self.iface.mainWindow() )
self.testTriangleOverlap = QAction( QCoreApplication.translate("chenyx06+", "Triangle overlaps" ), self.iface.mainWindow() )
self.testTriangleHoles = QAction( QCoreApplication.translate("chenyx06+", "Triangle holes" ), self.iface.mainWindow() )
self.testsMenu.addAction( self.testMissingTsp )
self.testsMenu.addAction( self.testTriangleOverlap )
self.testsMenu.addAction( self.testTriangleHoles )
self.baseLayersMenu = QMenu( QCoreApplication.translate( "chenyx06+", "Load baselayer" ) )
baselayers = tools.utils.getBaselayers()
for baselayer in baselayers:
action = QAction( QCoreApplication.translate("chenyx06+", baselayer["title"] ), self.iface.mainWindow() )
self.baseLayersMenu.addAction( action )
QObject.connect( action, SIGNAL( "triggered()" ), lambda layer=baselayer: self.doShowBaseLayer(layer) )
self.menu.addAction( self.loadChenyx06 )
self.menu.addMenu( self.transformMenu )
self.menu.addMenu( self.compareCoordsMenu )
self.menu.addSeparator()
self.menu.addMenu( self.importMenu )
self.menu.addMenu( self.exportMenu )
self.menu.addSeparator()
self.menu.addMenu( self.testsMenu )
self.menu.addSeparator()
self.menu.addMenu( self.baseLayersMenu )
self.menu.addSeparator()
self.menu.addAction( self.changeSettings )
self.menu.addSeparator()
self.menu.addAction( self.about )
menu_bar = self.iface.mainWindow().menuBar()
actions = menu_bar.actions()
lastAction = actions[ len( actions ) - 1 ]
menu_bar.insertMenu( lastAction, self.menu )
QObject.connect( self.loadChenyx06, SIGNAL( "triggered()" ), self.doLoadChenyx06 )
QObject.connect( self.transformRegular, SIGNAL( "triggered()" ), lambda type="regular": self.doTransformDataDialog(type) )
QObject.connect( self.transformModified, SIGNAL( "triggered()" ), lambda type="modified": self.doTransformDataDialog(type) )
QObject.connect( self.compareCoordsById, SIGNAL( "triggered()" ), self.doCompareCoordsByIdDialog )
QObject.connect( self.compareCoordsByLocation, SIGNAL( "triggered()" ), self.doCompareCoordsByLocationDialog )
QObject.connect( self.importCopyLv03TspData, SIGNAL( "triggered()" ), lambda type="lv03": self.doCopyTspDataDialog(type) )
QObject.connect( self.importCopyLv95TspData, SIGNAL( "triggered()" ), lambda type="lv95": self.doCopyTspDataDialog(type) )
QObject.connect( self.exportFineltra, SIGNAL( "triggered()" ), self.doExportFineltra )
QObject.connect( self.exportGridPlot, SIGNAL( "triggered()" ), self.doExportGridPlotDialog )
QObject.connect( self.testMissingTsp, SIGNAL( "triggered()" ), self.doTestMissingTsp )
QObject.connect( self.testTriangleOverlap, SIGNAL( "triggered()" ), self.doTestTriangleOverlap )
QObject.connect( self.testTriangleHoles, SIGNAL( "triggered()" ), self.doTestTriangleHole )
QObject.connect( self.changeSettings, SIGNAL( "triggered()" ), self.doChangeSettings )
QObject.connect( self.about, SIGNAL( "triggered()" ), self.doAbout )
def doLoadChenyx06(self):
from tools.doLoadChenyx06 import LoadChenyx06
d = LoadChenyx06()
d.run(self.iface)
def doShowBaseLayer( self, layer ):
settings = tools.utils.getSettings()
if settings["host"] == "" or settings["database"] == "" or settings["port"] == "" or settings["schema"] == "" or settings["username"] == "" or settings["password"] == "":
QMessageBox.warning( None, "CHENyx06+", "No database parameters set.")
return
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
tools.utils.doShowSimpleLayer( self.iface, layer, False )
except:
QApplication.restoreOverrideCursor()
QApplication.restoreOverrideCursor()
def doApplyModifications(self):
from tools.doApplyModifications import ApplyModifications
d = ApplyModifications()
d.run(self.iface)
def doTransformDataDialog(self, type):
from tools.doTransformDataDialog import TransformDataDialog
d = TransformDataDialog(self.iface.mainWindow(), type)
d.initGui()
d.show()
QObject.connect( d, SIGNAL( "okClickedTransformData(QString, bool, QString)" ), self.doTransformData )
def doTransformData(self, layerName, selectedOnly, type):
from tools.doTransformData import TransformData
d = TransformData()
d.run(self.iface, layerName, selectedOnly, type)
def doCompareCoordsByIdDialog(self):
from tools.doCompareCoordsByIdDialog import CompareCoordsByIdDialog
d = CompareCoordsByIdDialog(self.iface.mainWindow())
d.initGui()
d.show()
QObject.connect( d, SIGNAL( "okClickedCopyCoordsById(QString, QString, QString, QString, bool)" ), self.doCompareCoordsById )
def doCompareCoordsById(self, layerNameA, layerNameB, idA, idB, addLayerToMap):
from tools.doCompareCoordsById import CompareCoordsById
d = CompareCoordsById()
d.run(self.iface, layerNameA, layerNameB, idA, idB, addLayerToMap)
def doCompareCoordsByLocationDialog(self):
from tools.doCompareCoordsByLocationDialog import CompareCoordsByLocationDialog
d = CompareCoordsByLocationDialog(self.iface.mainWindow())
d.initGui()
d.show()
QObject.connect( d, SIGNAL( "okClickedCopyCoordsByLocation(QString, QString, QString, QString, double, bool)" ), self.doCompareCoordsByLocation )
def doCompareCoordsByLocation(self, layerNameA, layerNameB, idA, idB, searchRadius, addLayerToMap):
from tools.doCompareCoordsByLocation import CompareCoordsByLocation
d = CompareCoordsByLocation()
d.run(self.iface, layerNameA, layerNameB, idA, idB, searchRadius, addLayerToMap)
def doCopyTspDataDialog(self, type):
from tools.doCopyTspDataDialog import CopyTspDataDialog
d = CopyTspDataDialog(self.iface.mainWindow(), type)
result = d.initGui()
if result != None:
d.show()
QObject.connect( d, SIGNAL( "okClickedCopyTspData(QString, QString, QString, QString, bool)" ), self.doCopyTspData )
def doCopyTspData(self, layerName, type, numberAttrName, typeAttrName, onlySelected):
from tools.doCopyTspData import CopyTspData
d = CopyTspData()
d.run(self.iface, layerName, numberAttrName, typeAttrName, type, onlySelected)
def doExportGridPlotDialog(self):
from tools.doExportGridPlotDialog import ExportGridPlotDialog
d = ExportGridPlotDialog(self.iface.mainWindow())
result = d.initGui()
if result != None:
d.show()
QObject.connect( d, SIGNAL( "okClickedExportGridPlot(QString, bool, bool, bool, float, bool)" ), self.doExportGridPlot )
def doExportGridPlot(self, layerName, createTriangles, createCornerPoints, createGridPoints, gridPointDistance, addLayersToMap):
print "layername"
print layerName
from tools.doExportGridPlot import ExportGridPlot
d = ExportGridPlot()
d.run(self.iface, layerName, createTriangles, createCornerPoints, createGridPoints, gridPointDistance, addLayersToMap)
def doExportFineltra(self):
from tools.doExportFineltra import ExportFineltra
d = ExportFineltra()
d.run()
def doTestMissingTsp(self):
from tools.doTestMissingTsp import TestMissingTsp
d = TestMissingTsp()
d.run(self.iface)
def doTestTriangleOverlap(self):
from tools.doTestTriangleOverlap import TestTriangleOverlap
d = TestTriangleOverlap()
d.run(self.iface)
def doTestTriangleHole(self):
from tools.doTestTriangleHole import TestTriangleHole
d = TestTriangleHole()
d.run(self.iface)
def doChangeSettings( self ):
from tools.doChangeSettings import ChangeSettingsDialog
d = ChangeSettingsDialog( self.iface.mainWindow() )
d.initGui()
d.show()
def doAbout( self ):
from tools.doAbout import AboutDialog
d = AboutDialog( self.iface.mainWindow(), self.version )
d.show()
def unload(self):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
9,371,618,640,597 |
63a2338a4712f79f248b5d1eee62bff3ca4c0b42
|
bba9a3ca53cb35449c31eefb7e69a3586e6ccc5b
|
/pympa_affarigenerali/apps.py
|
9ca058f1f8a0417165c1ad57cf1e08b8a1842435
|
[
"BSD-3-Clause"
] |
permissive
|
simodalla/pympa-affarigenerali
|
https://github.com/simodalla/pympa-affarigenerali
|
6c11e27c719602213f563b8df4d7d4466d9249c4
|
e089bc0ebdadbabcc9c5e2b1421742d724c3a20b
|
refs/heads/master
| 2016-09-05T15:49:00.489851 | 2014-11-27T17:42:41 | 2014-11-27T17:42:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.apps import AppConfig
class PympaAffariGeneraliConfig(AppConfig):
name = 'pympa_affarigenerali'
verbose_name = 'Affari Generali'
|
UTF-8
|
Python
| false | false | 2,014 |
5,153,960,759,731 |
769cdfc991e6358aecc7ba1bff902da8c7589b6f
|
03ff5b38780a5dbdebcbef2e797886798435cc6f
|
/mpasgen.py
|
a331642709526b216d2124a4af3429d7368993b6
|
[] |
no_license
|
mrzay316/minipascal
|
https://github.com/mrzay316/minipascal
|
e1c3d362bb20e71b1e329b6b210e7366f348a45c
|
ec420a852134d77006218f911a4e087a3cd16124
|
refs/heads/master
| 2020-01-22T16:12:54.250348 | 2014-12-09T01:30:17 | 2014-12-09T01:30:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'CHRISTIAN PATINO'
def generate(file,top):
print >>file, "! Creado por mapascal.py"
print >>file, "! Christian Patino, John Diaz, David Martinez, IS744 (2014-2)"
|
UTF-8
|
Python
| false | false | 2,014 |
12,919,261,629,262 |
a38e4a43be71cf509f3b0dda2f947b00edcdd6c5
|
13bb62113532c9559cd957e60d671bdfa6239290
|
/Codes/django/cms/urls.py
|
937367de181708299fd9a3c1622a5357d4051861
|
[] |
no_license
|
kentzo9/django_weblog
|
https://github.com/kentzo9/django_weblog
|
e6ae2180fd1b8e7ceb81c8e2ef2fcc4220b40bb7
|
59e9befcfdd4dcd96cceb2ad0b7932ac7c8a0118
|
refs/heads/master
| 2016-09-05T23:41:55.340949 | 2011-04-25T04:33:49 | 2011-04-25T04:33:49 | 1,223,145 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
from coltrane.feeds import LatestEntriesFeed, CategoryFeed
feeds = {'entries': LatestEntriesFeed }
feed2s = {'categories': CategoryFeed }
urlpatterns = patterns('',
# Example:
# (r'^cms/', include('cms.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
(r'^tiny_mce/(?P<path>.*)$', 'django.views.static.serve',
{ 'document_root': '/home/kentzo/django_weblog/javascript/tinymce/jscripts/tiny_mce/' }),
(r'^search/$', 'cms.search.views.search'),
(r'^weblog/categories/', include('coltrane.urls.categories')),
(r'^weblog/links/', include('coltrane.urls.links')),
(r'^weblog/tags/', include('coltrane.urls.tags')),
(r'^weblog/', include('coltrane.urls.entries')),
(r'^comments/',include('django.contrib.comments.urls')),
(r'^feeds/(?P<url>entries)/$','django.contrib.syndication.views.feed',{'feed_dict':feeds}),
(r'^feeds/(?P<url>categories/.*)/$','django.contrib.syndication.views.feed',{'feed_dict':feed2s}),
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve',{'document_root': '/home/kentzo/django_weblog/media/'}),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout'),
(r'^login/$', 'coltrane.views.mylogin'),
(r'^logout/$', 'coltrane.views.mylogout'),
(r'', include('django.contrib.flatpages.urls')),
)
|
UTF-8
|
Python
| false | false | 2,011 |
9,809,705,337,964 |
181aa8b98f57c75b60c304f822b934ccf6ad29a1
|
a34a9693988c00ea64309bfe42d9f9723cdedf6b
|
/xivo_bus/ctl/tests/test_producer.py
|
68d509d7a731212b751084179010867f8d1695bf
|
[
"GPL-3.0-only"
] |
non_permissive
|
lportier/xivo-bus
|
https://github.com/lportier/xivo-bus
|
f7f766acefc5213eb0482ee7270b22f51a315242
|
1e27c48e724400a6ebafdfee32edbfbd58023ff8
|
refs/heads/master
| 2021-05-01T12:39:16.823493 | 2014-12-17T20:11:26 | 2014-12-17T20:11:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2014 Avencall
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
import unittest
from mock import Mock, patch
from xivo_bus.ctl.marshaler import Marshaler
from xivo_bus.ctl.rpc.amqp_transport_client import AMQPTransportClient
from xivo_bus.ctl.producer import BusProducer
from xivo_bus.ctl.config import BusConfig
class TestBusProducer(unittest.TestCase):
def setUp(self):
self.marshaler = Mock(Marshaler)
self.transport = Mock(AMQPTransportClient)
self.config = Mock(BusConfig)
self.bus_producer = BusProducer()
self.bus_producer._marshaler = self.marshaler
self.bus_producer._transport = self.transport
@patch('xivo_bus.ctl.producer.AMQPTransportClient')
def test_connect_no_transport(self, amqp_client_constructor):
client = BusProducer(self.config)
client.connect()
amqp_client_constructor.create_and_connect.assert_called_once_with(config=self.config)
@patch('xivo_bus.ctl.producer.AMQPTransportClient', Mock())
def test_connect_already_connected(self):
client = BusProducer()
client.connect()
self.assertRaises(Exception, client.connect)
@patch('xivo_bus.ctl.producer.AMQPTransportClient')
def test_close_transport_with_no_connection(self, amqp_client):
client = BusProducer()
client.close()
self.assertFalse(amqp_client.create_and_connect.called)
@patch('xivo_bus.ctl.producer.AMQPTransportClient')
def test_connect_and_close_opens_and_closes_transport(self, amqp_client):
transport = Mock()
amqp_client.create_and_connect.return_value = transport
client = BusProducer(self.config)
client.connect()
client.close()
amqp_client.create_and_connect.assert_called_once_with(config=self.config)
transport.close.assert_called_once_with()
def test_declare_exchange(self):
name = 'xivo-ami'
exchange_type = 'topic'
durable = True
self.bus_producer.declare_exchange(name, exchange_type, durable)
self.transport.exchange_declare.assert_called_once_with(name, exchange_type, durable)
def test_publish_event(self):
event = Mock()
event.name = 'foobar'
exchange = 'xivo-ami'
routing_key = event.name
request = Mock()
self.marshaler.marshal_command.return_value = request
self.bus_producer.publish_event(exchange, routing_key, event)
self.marshaler.marshal_command.assert_called_once_with(event)
self.transport.send.assert_called_once_with(exchange, routing_key, request)
|
UTF-8
|
Python
| false | false | 2,014 |
18,245,021,103,435 |
3b3150c77cf6281c7acee4ed0e5eaa3633056b78
|
ad983c3b95fabe3397bc9f0d1b210e59d3b94d2f
|
/sandbox/print-high-density-kmers.py
|
e3aa2366452ae3c3a1f0721864c4b7d17b186e46
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
jiarong/khmer
|
https://github.com/jiarong/khmer
|
29ebf58faf244bd32f2b93f4576c19a325949a2a
|
9f3b68f3a281b68544effa0f815cbfa781228983
|
refs/heads/master
| 2021-01-18T09:55:53.603236 | 2014-06-09T21:07:26 | 2014-06-09T21:07:26 | 7,435,670 | 1 | 0 | null | true | 2013-05-01T07:09:07 | 2013-01-04T05:24:26 | 2013-05-01T07:09:07 | 2013-05-01T07:09:06 | 528 | null | 1 | 0 |
C
| null | null |
#! /usr/bin/env python
#
# This file is part of khmer, http://github.com/ged-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2013. It is licensed under
# the three-clause BSD license; see doc/LICENSE.txt. Contact: [email protected]
#
import sys
import screed.fasta
import os
import khmer
K = 32
HASHTABLE_SIZE = int(2e9)
N_HT = 4
RADIUS = 4
###
MAX_DENSITY = 2000
THRESHOLD = 10
infile = sys.argv[1]
outfile = sys.argv[2]
if len(sys.argv) > 3:
RADIUS = int(sys.argv[3])
print 'saving to:', outfile
print 'making hashtable'
ht = khmer.new_hashbits(K, HASHTABLE_SIZE, N_HT)
print 'eating', infile
ht.consume_fasta(infile)
seen = set()
outfp = open(outfile, 'w')
for n, record in enumerate(screed.open(infile)):
if n % 10000 == 0:
print '... saving', n
if n > 100000:
break
seq = record['sequence']
for pos in range(0, len(seq) - K + 1):
kmer = seq[pos:pos + K]
if kmer in seen:
continue
density = ht.count_kmers_within_radius(kmer, RADIUS, MAX_DENSITY)
if density >= THRESHOLD:
seen.add(kmer)
print >>outfp, kmer, density
|
UTF-8
|
Python
| false | false | 2,014 |
8,306,466,792,001 |
b73ea214b1a05cc71ef1f97a154b16bc725143cc
|
d585017afff959e00c9fcd38b49bfc43d8748341
|
/pyunsrc/LinePainter.py
|
05c4f77f26da725c98d44fac34e10d83fe086109
|
[] |
no_license
|
Peaker/pyun
|
https://github.com/Peaker/pyun
|
aba6e1aaba5a4d9c4bb8d6f7af98a62931d824bc
|
f77c83383b56c627ce6f0783b6298a1da80af08e
|
refs/heads/master
| 2021-01-20T21:29:03.421854 | 2009-11-17T23:10:08 | 2009-11-17T23:10:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class LinePainter(object):
def __init__(self):
self._lines = []
self._cur_line = []
def end_line(self):
self._lines.append(self._cur_line)
self._cur_line = []
def put(self, element):
self._cur_line.append(element)
def draw(self, surface, pos):
y = pos[1]
for line in self._lines:
x = pos[0]
line_height = max([0] + [line_surface.get_height() for line_surface in line])
for element in line:
surface.blit(element, (x, y + (line_height - element.get_height()) / 2))
x += element.get_width()
y += line_height
def size(self):
width = height = 0
for line in self._lines:
height += max([0] + [surface.get_height() for surface in line])
width = max(width, sum([surface.get_width() for surface in line]))
return width, height
|
UTF-8
|
Python
| false | false | 2,009 |
18,090,402,253,340 |
fdf041a47e87c08b76c6316c36ea809e435cd787
|
bd1fb92da9d07642a766db6683db1e7b94f785e0
|
/DatafileConversion.py
|
7110c31bd6a21150daf385f74adfc43ca06910c6
|
[] |
no_license
|
yhfy2006/scientometrics-project
|
https://github.com/yhfy2006/scientometrics-project
|
552917590336cdb9a65901d18b5fa3ff7ebc26a0
|
375b312241d7b59fde57de81dedda2b504cf83b3
|
refs/heads/master
| 2020-12-25T10:41:27.202227 | 2012-03-12T16:45:53 | 2012-03-12T16:45:53 | 3,309,305 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# Filename: DocfileConversion.py
import sys
import csv
# the index order is : ID- CI- SO- TI- BI- AU- AF- CT- CO- RF
def initDict():
global ArticleDict
ArticleDict = { 'ID' : '',
'CI': '',
'SO': '',
'TI': '',
'BI': '',
'AU': '',
'AF': '',
'CT': '',
'CO': '',
'RF': '',
'CA':''}
def print_dict(Adict):
"""
print dictionary
"""
for key,value in Adict.items():
print(key + ':' + value)
def readline(line):
global writer
global ArticleDict
global CurrentIndex
if(CurrentIndex=='AU'):
line = line.replace(",","")
if len(line.partition(" ")[0]) == 2:
CurrentIndex = line.partition(" ")[0]
if(CurrentIndex=='AU'):
line = line.replace(",","")
#print"CurrentIndex:"+CurrentIndex
if len(line.partition(" ")[0]) == 2:
CurrentIndex = line.partition(" ")[0]
#print"CurrentIndex:"+CurrentIndex
if CurrentIndex == "ID":
# print_dict(ArticleDict)
if ArticleDict['ID']!='':
writer.writerow([ArticleDict['ID'],ArticleDict['CI'],ArticleDict['SO'],ArticleDict['TI'],ArticleDict['BI'],ArticleDict['AU'],ArticleDict['AF'],ArticleDict['CT'],ArticleDict['CO'],ArticleDict['RF'],ArticleDict['CA']])
initDict()
# ArticleDict['ID']= line.partition(" ")[2]
if len(ArticleDict[CurrentIndex])<1:
ArticleDict[CurrentIndex]= line[3:]
else:
ArticleDict[CurrentIndex]=ArticleDict[CurrentIndex]+','+line[3:]
else:
#print 'CurrentIndex: '+CurrentIndex,
# print "ArticleDict["+CurrentIndex+"]:"+ArticleDict[CurrentIndex]
if line.isspace():
pass
else:
ArticleDict[CurrentIndex]=ArticleDict[CurrentIndex]+','+line
# print "ArticleDict["+CurrentIndex+"]after :"+ArticleDict[CurrentIndex]
# if len(line.partition(" ")[0]) == 2:
# CurrentIndex = len(line.partition(" ")[0]
#
# if CurrentIndex == 'ID':
# Article.clear()
# Article['ID']= line.partition(" ")[2]
# print Article['ID'],
def readfile(filename):
'''comfirm the file name '''
inputfile=file(filename)
outputfilename=filename.partition(".")[0]+".csv"
outputfile = file(outputfilename,'w')
outputfile.close()
global writer
print 'Start reading the file: %s, please wait for a second...'%filename
writer.writerow(["ID","CI","SO","TI","BI","AU","AF","CT","CO","RF","CA"])
# the index order is : ID- CI- SO- TI- BI- AU- AF- CT- CO- RF
while True:
line = inputfile.readline()
if len(line)==0:
break
else:
readline(line)
inputfile.close()
print 'Finish!!! Cheers!The out put file is:------>'+outputfilename,
# Script starts
argvlength = len(sys.argv)
print 'argv len is %d'%argvlength
ArticleDict = { 'ID' : '',
'CI': '',
'SO': '',
'TI': '',
'BI': '',
'AU': '',
'AF': '',
'CT': '',
'CO': '',
'RF': '',
'CA':''}
CurrentIndex = ''
if sys.argv[1].endswith('.txt'):
print 'input is ok, the arg is %s'%sys.argv[1]
inputfilename = sys.argv[1]
writer = csv.writer(open(inputfilename.partition(".")[0]+".csv","w"))
readfile(inputfilename)
else:
print 'input is not correct',
sys.exit()
|
UTF-8
|
Python
| false | false | 2,012 |
42,949,690,797 |
a8bad272e28af45c836fab599a4aa680e675c846
|
2ac99d43abba44e3a7696a68c115a378efc02dd0
|
/chopper.py
|
d182ece0447def65a92299d9d160c0b35dd63bce
|
[] |
no_license
|
bryce-evans/ClassMapper
|
https://github.com/bryce-evans/ClassMapper
|
1ab137e6ec70427d4ed0cc82e1c3a504a9da0eb0
|
a3214beec998da32826139a12080a213603bfd35
|
refs/heads/master
| 2021-05-26T14:32:46.343887 | 2013-07-08T05:28:56 | 2013-07-08T05:28:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# chopper.py
import sys
import os
chunk_size = 500
infile_fullname = sys.argv[1]
inFile = open(infile_fullname, 'r')
fileName, fileExtension = os.path.splitext(infile_fullname)
if not os.path.exists(fileName):
os.makedirs(fileName)
outFile = open(fileName+"/"+fileName+"_chop_0"+fileExtension, 'w')
i = 1
chop = 1
for line in inFile:
outFile.write(line)
if(i % (chunk_size) == 0):
outFile.close()
outFile = open(fileName+"/"+fileName+"_chop_"+str(chop)+fileExtension, 'w')
chop += 1
i = 0
i += 1
|
UTF-8
|
Python
| false | false | 2,013 |
481,036,385,271 |
3e6667db5a779f0ae041177db11047997fe3ea2d
|
94258ddfc4e33be71bcc55e935255a302079f5cc
|
/test_file.py
|
6d7bc5fcb28a8e603767e8b981cb552837db29bf
|
[] |
no_license
|
citrix-openstack/subunit-filter
|
https://github.com/citrix-openstack/subunit-filter
|
de4af8ddb1a6ab18020929b4c7927cf45f397fff
|
b28115e9095b87a56c65ca67ffa7f3276f76d4d7
|
refs/heads/master
| 2016-09-11T03:07:49.027774 | 2014-02-20T17:44:57 | 2014-02-20T17:44:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
class TestClass(unittest.TestCase):
def test_function(self):
pass
def test_function_2(self):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
16,655,883,201,140 |
843fc5c0fd7ec5e2f7d53c1ce0dae0d3f5fd6b55
|
afa7adedd170ad1366546cea18473a924606870f
|
/Comext/get_reference_dicts.py
|
2e9093361d4fd2e9a11e5a789cee4fb27dcf7363
|
[] |
no_license
|
TomAugspurger/data-wrangling
|
https://github.com/TomAugspurger/data-wrangling
|
bf442c1d67879c15de0f6e6b0e9bee2bf305aded
|
c987277e1eee12c17aa2febdc63a21dc587d9792
|
refs/heads/master
| 2021-01-25T03:48:31.104594 | 2013-05-21T14:13:12 | 2013-05-21T14:13:12 | 6,371,404 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import division
import os
import cPickle
import itertools as it
"""
Script to pickle the reference dicts rather than having
to call them on each iteration. Some memory issues, peaked
at around 6 GB and swaped out a few GB.
"""
os.chdir('/Volumes/HDD/Users/tom/DataStorage/Comext/yearly')
gmm_store = pd.HDFStore('gmm_store.h5')
with open('declarants_no_002_dict.pkl', 'r') as f:
declarants = cPickle.load(f)
f.close()
yearly = pd.HDFStore('yearly.h5')
def gr(country, years=['y2007', 'y2008', 'y2009', 'y2010', 'y2011']):
"""
Finds potential countries to use as k in calculating errors.
Must provide a positive quantity in every year.
Read inside out to maintain sanity. Would be so easy recursively...
Get index for first year possible (i.e. second year in sample).
Filter by calling .ix on that index; drop the NaNs. Take that index.
...
End with index of (product, partner) that works as references.
Parameters:
-----------
yearly : HDF5Store
country : String
years: list of strings
Returns:
--------
Or maybe a list of tuples with (good, partner) pairs?
"""
idx = yearly['quantity_' + country][years[4]].ix[1].ix[
yearly['quantity_' + country][years[3]].ix[1].ix[
yearly['quantity_' + country][years[2]].ix[1].ix[
yearly['quantity_' + country][years[1]].ix[1].ix[
yearly['quantity_' + country][years[0]].ix[1].dropna().index
].dropna().index
].dropna().index
].dropna().index
].dropna().index
holder = '0'
references = []
for tuple in idx:
if tuple[0] == holder:
pass
else:
references.append(tuple)
holder = tuple[0]
return {prod: partner for prod, partner in references}
m = it.imap(gr, sorted(declarants))
iz = it.izip(sorted(declarants), m)
d = {k: v for (k, v) in iz}
out = open('references_dict.pkl', 'w')
cPickle.dump(d, out, 2)
|
UTF-8
|
Python
| false | false | 2,013 |
9,268,539,468,059 |
b4cfc0f7394044187cde0f9871225814095db142
|
e0d68cd405abf20c2cda5f0f591a7138be7ad653
|
/src/users.py
|
04ba0fa24b6064e2a0e0a9c1e998652e261aaca9
|
[] |
no_license
|
psaravind/dwrangle
|
https://github.com/psaravind/dwrangle
|
19c9aed40e698a2f85cf697ffdb66fe7708bfc87
|
1298c4268e9ab5a79021b1cfc1fa6382af7b9d43
|
refs/heads/master
| 2020-03-30T17:34:53.151403 | 2014-04-21T20:57:46 | 2014-04-21T20:57:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import xml.etree.ElementTree as ET
import pprint
import re
import sys
"""
This module explores the data a bit more and finds out how many unique users
have contributed to map this particular area!
The function process_map returns a set of unique user IDs ("uid")
"""
def get_user(element):
return
def process_map(filename):
users = set()
for _, element in ET.iterparse(filename):
if 'uid' in element.attrib:
users.add(element.attrib['uid'])
return users
def test():
if len(sys.argv) != 2:
print("Usage: " + sys.argv[0] + " <file name to be processed>")
exit(1)
users = process_map(sys.argv[1])
pprint.pprint(users)
print("Unique user count:", len(users))
if __name__ == "__main__":
test()
|
UTF-8
|
Python
| false | false | 2,014 |
12,077,448,064,078 |
f4c10f25a3d0d96f25fea91a43a8f46948b292b2
|
ff695b9ac5c5f089c445577b2b07892ce13518d7
|
/parse/t.py
|
9902528432f9a0cf0348ed2551d2b503a28882e2
|
[] |
no_license
|
tshrinivasan/kural-library
|
https://github.com/tshrinivasan/kural-library
|
0ea6c359d670ec3fb6c9365b6893e59e501e174b
|
e53ae719288272231e6744d0616c8e1769ac89c4
|
refs/heads/master
| 2021-01-20T02:20:32.833200 | 2014-05-14T12:23:47 | 2014-05-14T12:23:47 | 19,389,197 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
from os.path import basename
for root, dirs, files in os.walk("../kurals"):
for filename in files:
if filename.endswith(".txt"):
# print filename
base = os.path.basename(filename)
print os.path.splitext(base)[0]
|
UTF-8
|
Python
| false | false | 2,014 |
6,751,688,589,962 |
5f943df986215edcde88abf347d5fc97c61bb1c7
|
034314fb706660c8163b4c28302da1da319cb137
|
/apps/gbook/models.py
|
ef98813bca50dfdf86fe1a040af86d817e3ce46f
|
[] |
no_license
|
k-olga/guestbook
|
https://github.com/k-olga/guestbook
|
ade6a10579476272d49ee5e5bcdb88d351b438e1
|
11444caac3d2069bda28c9eda037597383476003
|
refs/heads/master
| 2016-08-03T06:01:41.555457 | 2014-08-09T09:24:28 | 2014-08-09T09:26:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from datetime import datetime
class GuestBookPost(models.Model):
name = models.CharField(max_length=45)
text = models.CharField(max_length=500)
creation_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return u'%s' % self.name
|
UTF-8
|
Python
| false | false | 2,014 |
9,010,841,435,089 |
6f79228a99d7d8cca22f2404b1ad9521c1fb902d
|
1da881dd0e2adb694f8bab4c37f413795cc3377e
|
/designate/central/service.py
|
e3d3e681c50c318d100ec419f4e5335227b2c2e0
|
[
"Apache-2.0"
] |
permissive
|
redhat-cip/debian-designate
|
https://github.com/redhat-cip/debian-designate
|
ef792a7dd6fe6f6e26c0333a59e4bf80e69521f9
|
675d1d09a03bd788c51aa2476fed67a41f91770a
|
refs/heads/master
| 2020-12-30T18:14:47.452035 | 2013-12-09T20:59:25 | 2013-12-09T21:39:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2012 Managed I.T.
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import contextlib
from oslo.config import cfg
from designate.central import effectivetld
from designate.openstack.common import log as logging
from designate.openstack.common.rpc import service as rpc_service
from designate import backend
from designate import exceptions
from designate import notifier
from designate import policy
from designate import quota
from designate import utils
from designate.storage import api as storage_api
LOG = logging.getLogger(__name__)
@contextlib.contextmanager
def wrap_backend_call():
"""
Wraps backend calls, ensuring any exception raised is a Backend exception.
"""
try:
yield
except exceptions.Backend as exc:
raise
except Exception as exc:
raise exceptions.Backend('Unknown backend failure: %r' % exc)
class Service(rpc_service.Service):
RPC_API_VERSION = '2.1'
def __init__(self, *args, **kwargs):
backend_driver = cfg.CONF['service:central'].backend_driver
self.backend = backend.get_backend(backend_driver, self)
kwargs.update(
host=cfg.CONF.host,
topic=cfg.CONF.central_topic,
)
self.notifier = notifier.get_notifier('central')
policy.init_policy()
super(Service, self).__init__(*args, **kwargs)
# Get a storage connection
self.storage_api = storage_api.StorageAPI()
# Get a quota manager instance
self.quota = quota.get_quota()
self.effective_tld = effectivetld.EffectiveTld()
def start(self):
self.backend.start()
super(Service, self).start()
def wait(self):
super(Service, self).wait()
self.conn.consumer_thread.wait()
def stop(self):
super(Service, self).stop()
self.backend.stop()
def _is_valid_domain_name(self, context, domain_name):
# Validate domain name length
if len(domain_name) > cfg.CONF['service:central'].max_domain_name_len:
raise exceptions.InvalidDomainName('Name too long')
# Break the domain name up into its component labels
domain_labels = domain_name.strip('.').split('.')
# We need more than 1 label.
if len(domain_labels) <= 1:
raise exceptions.InvalidDomainName('More than one label is '
'required')
# Check the TLD for validity
# We cannot use the effective TLD list as the publicsuffix.org list is
# missing some top level entries. At the time of coding, the following
# entries were missing
# arpa, au, bv, gb, gn, kp, lb, lr, sj, tp, tz, xn--80ao21a, xn--l1acc
# xn--mgbx4cd0ab
if self.effective_tld.accepted_tld_list:
domain_tld = domain_labels[-1].lower()
if domain_tld not in self.effective_tld.accepted_tld_list:
raise exceptions.InvalidTLD('Unknown or invalid TLD')
# Check if the domain_name is the same as an effective TLD.
if self.effective_tld.is_effective_tld(domain_name):
raise exceptions.DomainIsSameAsAnEffectiveTLD(
'Domain name cannot be the same as an effective TLD')
# Check domain name blacklist
if self._is_blacklisted_domain_name(context, domain_name):
# Some users are allowed bypass the blacklist.. Is this one?
if not policy.check('use_blacklisted_domain', context, exc=None):
raise exceptions.InvalidDomainName('Blacklisted domain name')
return True
def _is_valid_record_name(self, context, domain, record_name, record_type):
if not record_name.endswith('.'):
raise ValueError('Please supply a FQDN')
# Validate record name length
if len(record_name) > cfg.CONF['service:central'].max_record_name_len:
raise exceptions.InvalidRecordName('Name too long')
# Record must be contained in the parent zone
if not record_name.endswith(domain['name']):
raise exceptions.InvalidRecordLocation('Record is not contained '
'within it\'s parent '
'domain')
# CNAME's must not be created at the zone apex.
if record_type == 'CNAME' and record_name == domain['name']:
raise exceptions.InvalidRecordLocation('CNAME records may not be '
'created at the zone apex')
def _is_valid_record_placement(self, context, domain, record_name,
record_type, record_id=None):
# CNAME's must not share a name with other records
criterion = {
'name': record_name,
'domain_id': domain['id']
}
if record_type != 'CNAME':
criterion['type'] = 'CNAME'
records = self.storage_api.find_records(context, criterion=criterion)
if ((len(records) == 1 and records[0]['id'] != record_id)
or len(records) > 1):
raise exceptions.InvalidRecordLocation('CNAME records may not '
'share a name with any '
'other records')
# Duplicate PTR's with the same name are not allowed
if record_type == 'PTR':
criterion = {
'name': record_name,
'type': 'PTR',
'domain_id': domain['id']}
records = self.storage_api.find_records(context,
criterion=criterion)
if ((len(records) == 1 and records[0]['id'] != record_id)
or len(records) > 1):
raise exceptions.DuplicateRecord()
return True
def _is_blacklisted_domain_name(self, context, domain_name):
"""
Ensures the provided domain_name is not blacklisted.
"""
blacklists = cfg.CONF['service:central'].domain_name_blacklist
for blacklist in blacklists:
if bool(re.search(blacklist, domain_name)):
return blacklist
return False
def _is_subdomain(self, context, domain_name):
# Break the name up into it's component labels
labels = domain_name.split(".")
i = 1
# Starting with label #2, search for matching domain's in the database
while (i < len(labels)):
name = '.'.join(labels[i:])
try:
domain = self.storage_api.find_domain(context, {'name': name})
except exceptions.DomainNotFound:
i += 1
else:
return domain
return False
def _is_subrecord(self, context, domain, record_name, criterion):
# Break the names up into their component labels
domain_labels = domain['name'].split(".")
record_labels = record_name.split(".")
i = 1
j = len(record_labels) - len(domain_labels)
criterion['domain_id'] = domain['id']
# Starting with label #2, search for matching records's in the database
while (i <= j):
criterion['name'] = '.'.join(record_labels[i:])
records = self.storage_api.find_records(context, criterion)
if len(records) == 0:
i += 1
else:
return records
return False
def _increment_domain_serial(self, context, domain_id):
domain = self.storage_api.get_domain(context, domain_id)
# Increment the serial number
values = {'serial': utils.increment_serial(domain['serial'])}
with self.storage_api.update_domain(
context, domain_id, values) as domain:
with wrap_backend_call():
self.backend.update_domain(context, domain)
return domain
# Quota Enforcement Methods
def _enforce_domain_quota(self, context, tenant_id):
criterion = {'tenant_id': tenant_id}
count = self.storage_api.count_domains(context, criterion)
self.quota.limit_check(context, tenant_id, domains=count)
def _enforce_record_quota(self, context, domain):
# Ensure the records per domain quota is OK
criterion = {'domain_id': domain['id']}
count = self.storage_api.count_records(context, criterion)
self.quota.limit_check(context, domain['tenant_id'],
domain_records=count)
# Misc Methods
def get_absolute_limits(self, context):
# NOTE(Kiall): Currently, we only have quota based limits..
return self.quota.get_quotas(context, context.tenant_id)
# Quota Methods
def get_quotas(self, context, tenant_id):
target = {'tenant_id': tenant_id}
policy.check('get_quotas', context, target)
return self.quota.get_quotas(context, tenant_id)
def get_quota(self, context, tenant_id, resource):
target = {'tenant_id': tenant_id, 'resource': resource}
policy.check('get_quota', context, target)
return self.quota.get_quota(context, tenant_id, resource)
def set_quota(self, context, tenant_id, resource, hard_limit):
target = {
'tenant_id': tenant_id,
'resource': resource,
'hard_limit': hard_limit,
}
policy.check('set_quota', context, target)
return self.quota.set_quota(context, tenant_id, resource, hard_limit)
def reset_quotas(self, context, tenant_id):
target = {'tenant_id': tenant_id}
policy.check('reset_quotas', context, target)
self.quota.reset_quotas(context, tenant_id)
# Server Methods
def create_server(self, context, values):
policy.check('create_server', context)
with self.storage_api.create_server(context, values) as server:
# Update backend with the new server..
with wrap_backend_call():
self.backend.create_server(context, server)
self.notifier.info(context, 'dns.server.create', server)
return server
def find_servers(self, context, criterion=None):
policy.check('find_servers', context)
return self.storage_api.find_servers(context, criterion)
def get_server(self, context, server_id):
policy.check('get_server', context, {'server_id': server_id})
return self.storage_api.get_server(context, server_id)
def update_server(self, context, server_id, values):
policy.check('update_server', context, {'server_id': server_id})
with self.storage_api.update_server(
context, server_id, values) as server:
# Update backend with the new details..
with wrap_backend_call():
self.backend.update_server(context, server)
self.notifier.info(context, 'dns.server.update', server)
return server
def delete_server(self, context, server_id):
policy.check('delete_server', context, {'server_id': server_id})
# don't delete last of servers
servers = self.storage_api.find_servers(context)
if len(servers) == 1 and server_id == servers[0]['id']:
raise exceptions.LastServerDeleteNotAllowed(
"Not allowed to delete last of servers")
with self.storage_api.delete_server(context, server_id) as server:
# Update backend with the new server..
with wrap_backend_call():
self.backend.delete_server(context, server)
self.notifier.info(context, 'dns.server.delete', server)
# TSIG Key Methods
def create_tsigkey(self, context, values):
policy.check('create_tsigkey', context)
with self.storage_api.create_tsigkey(context, values) as tsigkey:
with wrap_backend_call():
self.backend.create_tsigkey(context, tsigkey)
self.notifier.info(context, 'dns.tsigkey.create', tsigkey)
return tsigkey
def find_tsigkeys(self, context, criterion=None):
policy.check('find_tsigkeys', context)
return self.storage_api.find_tsigkeys(context, criterion)
def get_tsigkey(self, context, tsigkey_id):
policy.check('get_tsigkey', context, {'tsigkey_id': tsigkey_id})
return self.storage_api.get_tsigkey(context, tsigkey_id)
def update_tsigkey(self, context, tsigkey_id, values):
policy.check('update_tsigkey', context, {'tsigkey_id': tsigkey_id})
with self.storage_api.update_tsigkey(
context, tsigkey_id, values) as tsigkey:
with wrap_backend_call():
self.backend.update_tsigkey(context, tsigkey)
self.notifier.info(context, 'dns.tsigkey.update', tsigkey)
return tsigkey
def delete_tsigkey(self, context, tsigkey_id):
policy.check('delete_tsigkey', context, {'tsigkey_id': tsigkey_id})
with self.storage_api.delete_tsigkey(context, tsigkey_id) as tsigkey:
with wrap_backend_call():
self.backend.delete_tsigkey(context, tsigkey)
self.notifier.info(context, 'dns.tsigkey.delete', tsigkey)
# Tenant Methods
def find_tenants(self, context):
policy.check('find_tenants', context)
return self.storage_api.find_tenants(context)
def get_tenant(self, context, tenant_id):
target = {
'tenant_id': tenant_id
}
policy.check('get_tenant', context, target)
return self.storage_api.get_tenant(context, tenant_id)
def count_tenants(self, context):
policy.check('count_tenants', context)
return self.storage_api.count_tenants(context)
# Domain Methods
def create_domain(self, context, values):
# TODO(kiall): Refactor this method into *MUCH* smaller chunks.
values['tenant_id'] = context.tenant_id
target = {
'tenant_id': values['tenant_id'],
'domain_name': values['name']
}
policy.check('create_domain', context, target)
# Ensure the tenant has enough quota to continue
self._enforce_domain_quota(context, values['tenant_id'])
# Ensure the domain name is valid
self._is_valid_domain_name(context, values['name'])
# Handle sub-domains appropriately
parent_domain = self._is_subdomain(context, values['name'])
if parent_domain:
if parent_domain['tenant_id'] == values['tenant_id']:
# Record the Parent Domain ID
values['parent_domain_id'] = parent_domain['id']
else:
raise exceptions.Forbidden('Unable to create subdomain in '
'another tenants domain')
# TODO(kiall): Handle super-domains properly
# NOTE(kiall): Fetch the servers before creating the domain, this way
# we can prevent domain creation if no servers are
# configured.
servers = self.storage_api.find_servers(context)
if len(servers) == 0:
LOG.critical('No servers configured. Please create at least one '
'server')
raise exceptions.NoServersConfigured()
# Set the serial number
values['serial'] = utils.increment_serial()
with self.storage_api.create_domain(context, values) as domain:
with wrap_backend_call():
self.backend.create_domain(context, domain)
self.notifier.info(context, 'dns.domain.create', domain)
return domain
def get_domain(self, context, domain_id):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('get_domain', context, target)
return domain
def get_domain_servers(self, context, domain_id, criterion=None):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('get_domain_servers', context, target)
# TODO(kiall): Once we allow domains to be allocated on 1 of N server
# pools, return the filtered list here.
return self.storage_api.find_servers(context, criterion)
def find_domains(self, context, criterion=None):
target = {'tenant_id': context.tenant_id}
policy.check('find_domains', context, target)
if criterion is None:
criterion = {}
if not context.is_admin:
criterion['tenant_id'] = context.tenant_id
return self.storage_api.find_domains(context, criterion)
def find_domain(self, context, criterion):
target = {'tenant_id': context.tenant_id}
policy.check('find_domain', context, target)
if not context.is_admin:
criterion['tenant_id'] = context.tenant_id
return self.storage_api.find_domain(context, criterion)
def update_domain(self, context, domain_id, values, increment_serial=True):
# TODO(kiall): Refactor this method into *MUCH* smaller chunks.
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('update_domain', context, target)
if 'tenant_id' in values:
# NOTE(kiall): Ensure the user is allowed to delete a domain from
# the original tenant.
policy.check('delete_domain', context, target)
# NOTE(kiall): Ensure the user is allowed to create a domain in
# the new tenant.
target = {'domain_id': domain_id, 'tenant_id': values['tenant_id']}
policy.check('create_domain', context, target)
if 'name' in values and values['name'] != domain['name']:
raise exceptions.BadRequest('Renaming a domain is not allowed')
if increment_serial:
# Increment the serial number
values['serial'] = utils.increment_serial(domain['serial'])
with self.storage_api.update_domain(
context, domain_id, values) as domain:
with wrap_backend_call():
self.backend.update_domain(context, domain)
self.notifier.info(context, 'dns.domain.update', domain)
return domain
def delete_domain(self, context, domain_id):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('delete_domain', context, target)
# Prevent deletion of a zone which has child zones
criterion = {'parent_domain_id': domain_id}
if self.storage_api.count_domains(context, criterion) > 0:
raise exceptions.DomainHasSubdomain('Please delete any subdomains '
'before deleting this domain')
with self.storage_api.delete_domain(context, domain_id) as domain:
with wrap_backend_call():
self.backend.delete_domain(context, domain)
self.notifier.info(context, 'dns.domain.delete', domain)
return domain
def count_domains(self, context, criterion=None):
if criterion is None:
criterion = {}
target = {
'tenant_id': criterion.get('tenant_id', None)
}
policy.check('count_domains', context, target)
return self.storage_api.count_domains(context, criterion)
def touch_domain(self, context, domain_id):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('touch_domain', context, target)
domain = self._increment_domain_serial(context, domain_id)
self.notifier.info(context, 'dns.domain.touch', domain)
return domain
# Record Methods
def create_record(self, context, domain_id, values, increment_serial=True):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'record_name': values['name'],
'tenant_id': domain['tenant_id']
}
policy.check('create_record', context, target)
# Ensure the tenant has enough quota to continue
self._enforce_record_quota(context, domain)
# Ensure the record name and placement is valid
self._is_valid_record_name(context, domain, values['name'],
values['type'])
self._is_valid_record_placement(context, domain, values['name'],
values['type'])
with self.storage_api.create_record(
context, domain_id, values) as record:
with wrap_backend_call():
self.backend.create_record(context, domain, record)
if increment_serial:
self._increment_domain_serial(context, domain_id)
# Send Record creation notification
self.notifier.info(context, 'dns.record.create', record)
return record
def get_record(self, context, domain_id, record_id):
domain = self.storage_api.get_domain(context, domain_id)
record = self.storage_api.get_record(context, record_id)
# Ensure the domain_id matches the record's domain_id
if domain['id'] != record['domain_id']:
raise exceptions.RecordNotFound()
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'record_id': record['id'],
'tenant_id': domain['tenant_id']
}
policy.check('get_record', context, target)
return record
def find_records(self, context, domain_id, criterion=None):
if criterion is None:
criterion = {}
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('find_records', context, target)
criterion['domain_id'] = domain_id
return self.storage_api.find_records(context, criterion)
def find_record(self, context, domain_id, criterion=None):
if criterion is None:
criterion = {}
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('find_record', context, target)
criterion['domain_id'] = domain_id
return self.storage_api.find_record(context, criterion)
def update_record(self, context, domain_id, record_id, values,
increment_serial=True):
domain = self.storage_api.get_domain(context, domain_id)
record = self.storage_api.get_record(context, record_id)
# Ensure the domain_id matches the record's domain_id
if domain['id'] != record['domain_id']:
raise exceptions.RecordNotFound()
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'record_id': record['id'],
'tenant_id': domain['tenant_id']
}
policy.check('update_record', context, target)
# Ensure the record name is valid
record_name = values['name'] if 'name' in values else record['name']
record_type = values['type'] if 'type' in values else record['type']
self._is_valid_record_name(context, domain, record_name, record_type)
self._is_valid_record_placement(context, domain, record_name,
record_type, record_id)
# Update the record
with self.storage_api.update_record(
context, record_id, values) as record:
with wrap_backend_call():
self.backend.update_record(context, domain, record)
if increment_serial:
self._increment_domain_serial(context, domain_id)
# Send Record update notification
self.notifier.info(context, 'dns.record.update', record)
return record
def delete_record(self, context, domain_id, record_id,
increment_serial=True):
domain = self.storage_api.get_domain(context, domain_id)
record = self.storage_api.get_record(context, record_id)
# Ensure the domain_id matches the record's domain_id
if domain['id'] != record['domain_id']:
raise exceptions.RecordNotFound()
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'record_id': record['id'],
'tenant_id': domain['tenant_id']
}
policy.check('delete_record', context, target)
with self.storage_api.delete_record(context, record_id) as record:
with wrap_backend_call():
self.backend.delete_record(context, domain, record)
if increment_serial:
self._increment_domain_serial(context, domain_id)
# Send Record deletion notification
self.notifier.info(context, 'dns.record.delete', record)
return record
def count_records(self, context, criterion=None):
if criterion is None:
criterion = {}
target = {
'tenant_id': criterion.get('tenant_id', None)
}
policy.check('count_records', context, target)
return self.storage_api.count_records(context, criterion)
# Diagnostics Methods
def sync_domains(self, context):
policy.check('diagnostics_sync_domains', context)
domains = self.storage_api.find_domains(context)
results = {}
for domain in domains:
servers = self.storage_api.find_servers(context)
criterion = {'domain_id': domain['id']}
records = self.storage_api.find_records(
context, criterion=criterion)
with wrap_backend_call():
results[domain['id']] = self.backend.sync_domain(context,
domain,
records,
servers)
return results
def sync_domain(self, context, domain_id):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'tenant_id': domain['tenant_id']
}
policy.check('diagnostics_sync_domain', context, target)
records = self.storage_api.find_records(
context, criterion={'domain_id': domain_id})
with wrap_backend_call():
return self.backend.sync_domain(context, domain, records)
def sync_record(self, context, domain_id, record_id):
domain = self.storage_api.get_domain(context, domain_id)
target = {
'domain_id': domain_id,
'domain_name': domain['name'],
'record_id': record_id,
'tenant_id': domain['tenant_id']
}
policy.check('diagnostics_sync_record', context, target)
record = self.storage_api.get_record(context, record_id)
with wrap_backend_call():
return self.backend.sync_record(context, domain, record)
def ping(self, context):
policy.check('diagnostics_ping', context)
try:
backend_status = self.backend.ping(context)
except Exception as e:
backend_status = {'status': False, 'message': str(e)}
try:
storage_status = self.storage_api.ping(context)
except Exception as e:
storage_status = {'status': False, 'message': str(e)}
if backend_status and storage_status:
status = True
else:
status = False
return {
'host': cfg.CONF.host,
'status': status,
'backend': backend_status,
'storage': storage_status
}
|
UTF-8
|
Python
| false | false | 2,013 |
19,215,683,721,479 |
f21ad5d6206b27dd20a49bcd31c8ce5d989453c0
|
67ed3578484d0b2b5b8000e338cdd1c693a5cf4e
|
/src/ui/mainwindow.py
|
0e6b26e30bc00054ec5959aa47a456594aa30080
|
[] |
no_license
|
wintergalt/todo
|
https://github.com/wintergalt/todo
|
a0dad9d2bf3e55f621f14f02449b9ad15c800598
|
18529b0b1a7ebbfa54f4b1c99ba876866b9a5a75
|
refs/heads/master
| 2021-01-10T19:57:50.178904 | 2012-05-13T18:33:04 | 2012-05-13T18:33:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainwindow.ui'
#
# Created: Fri Nov 4 12:22:53 2011
# by: pyside-uic 0.2.13 running on PySide 1.0.7
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(605, 425)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/accessories-text-editor.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName("horizontalLayout")
self.treeView = QtGui.QTreeView(self.centralwidget)
self.treeView.setAutoFillBackground(True)
self.treeView.setAlternatingRowColors(False)
self.treeView.setSortingEnabled(True)
self.treeView.setObjectName("treeView")
self.treeView.header().setVisible(True)
self.horizontalLayout.addWidget(self.treeView)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 605, 23))
self.menubar.setObjectName("menubar")
self.menu_File = QtGui.QMenu(self.menubar)
self.menu_File.setObjectName("menu_File")
self.menu_Help = QtGui.QMenu(self.menubar)
self.menu_Help.setObjectName("menu_Help")
self.menu_Tasks = QtGui.QMenu(self.menubar)
self.menu_Tasks.setObjectName("menu_Tasks")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(MainWindow)
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.action_Todo_Help = QtGui.QAction(MainWindow)
self.action_Todo_Help.setObjectName("action_Todo_Help")
self.actionA_bout_Todo = QtGui.QAction(MainWindow)
self.actionA_bout_Todo.setObjectName("actionA_bout_Todo")
self.action_Open = QtGui.QAction(MainWindow)
self.action_Open.setObjectName("action_Open")
self.action_Exit = QtGui.QAction(MainWindow)
self.action_Exit.setObjectName("action_Exit")
self.actionDelete_Task = QtGui.QAction(MainWindow)
self.actionDelete_Task.setEnabled(False)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/list-remove.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionDelete_Task.setIcon(icon1)
self.actionDelete_Task.setObjectName("actionDelete_Task")
self.actionNew_Task = QtGui.QAction(MainWindow)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/list-add.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionNew_Task.setIcon(icon2)
self.actionNew_Task.setObjectName("actionNew_Task")
self.actionEdit_Task = QtGui.QAction(MainWindow)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/stock_search-and-replace.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionEdit_Task.setIcon(icon3)
self.actionEdit_Task.setObjectName("actionEdit_Task")
self.menu_File.addAction(self.action_Open)
self.menu_File.addSeparator()
self.menu_File.addAction(self.action_Exit)
self.menu_Help.addAction(self.action_Todo_Help)
self.menu_Help.addSeparator()
self.menu_Help.addAction(self.actionA_bout_Todo)
self.menu_Tasks.addAction(self.actionNew_Task)
self.menu_Tasks.addAction(self.actionEdit_Task)
self.menu_Tasks.addSeparator()
self.menu_Tasks.addAction(self.actionDelete_Task)
self.menubar.addAction(self.menu_File.menuAction())
self.menubar.addAction(self.menu_Tasks.menuAction())
self.menubar.addAction(self.menu_Help.menuAction())
self.toolBar.addAction(self.actionNew_Task)
self.toolBar.addAction(self.actionEdit_Task)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionDelete_Task)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "Tasks!", None, QtGui.QApplication.UnicodeUTF8))
self.menu_File.setTitle(QtGui.QApplication.translate("MainWindow", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.menu_Help.setTitle(QtGui.QApplication.translate("MainWindow", "&Help", None, QtGui.QApplication.UnicodeUTF8))
self.menu_Tasks.setTitle(QtGui.QApplication.translate("MainWindow", "&Tasks", None, QtGui.QApplication.UnicodeUTF8))
self.toolBar.setWindowTitle(QtGui.QApplication.translate("MainWindow", "toolBar", None, QtGui.QApplication.UnicodeUTF8))
self.action_Todo_Help.setText(QtGui.QApplication.translate("MainWindow", "&Todo Help...", None, QtGui.QApplication.UnicodeUTF8))
self.actionA_bout_Todo.setText(QtGui.QApplication.translate("MainWindow", "A&bout Todo...", None, QtGui.QApplication.UnicodeUTF8))
self.action_Open.setText(QtGui.QApplication.translate("MainWindow", "&Open...", None, QtGui.QApplication.UnicodeUTF8))
self.action_Exit.setText(QtGui.QApplication.translate("MainWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8))
self.actionDelete_Task.setText(QtGui.QApplication.translate("MainWindow", "Delete Task", None, QtGui.QApplication.UnicodeUTF8))
self.actionDelete_Task.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+D", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Task.setText(QtGui.QApplication.translate("MainWindow", "New Task", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Task.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+N", None, QtGui.QApplication.UnicodeUTF8))
self.actionEdit_Task.setText(QtGui.QApplication.translate("MainWindow", "Edit Task", None, QtGui.QApplication.UnicodeUTF8))
self.actionEdit_Task.setShortcut(QtGui.QApplication.translate("MainWindow", "Return", None, QtGui.QApplication.UnicodeUTF8))
import icons_rc
|
UTF-8
|
Python
| false | false | 2,012 |
13,846,974,607,170 |
a90ba1cfaee890a192deaf5a41a01565ed755071
|
054ce39bc512efd76c2cc583e1ee12623450458f
|
/test/test_rom.py
|
a98734c1e529d35678de1f5eb06f3d14438853b7
|
[
"LGPL-2.1-only",
"LGPL-3.0-only",
"LGPL-2.0-or-later"
] |
non_permissive
|
ioddly/rom
|
https://github.com/ioddly/rom
|
69ecdbb057e334cea3f7e7b16593ed732df123d6
|
1ba8ec1354ecf73cc176c2aa7919171678a0536a
|
refs/heads/master
| 2018-12-10T13:31:27.844141 | 2013-11-04T03:10:16 | 2013-11-04T03:10:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from datetime import datetime, date, time as dtime
from decimal import Decimal as _Decimal
import time
import unittest
import redis
from rom import util
util.CONNECTION = redis.Redis(db=15)
connect = util._connect
from rom import *
from rom import _enable_lua_writes
from rom.exceptions import *
def global_setup():
c = connect(None)
keys = c.keys('RomTest*')
if keys:
c.delete(*keys)
from rom.columns import MODELS
Model = MODELS['Model']
for k,v in MODELS.items():
if v is not Model:
del MODELS[k]
def get_state():
c = connect(None)
data = []
for k in c.keys('*'):
t = c.type(k)
if t == 'string':
data.append((k, c.get(k)))
elif t == 'list':
data.append((k, c.lrange(k, 0, -1)))
elif t == 'set':
data.append((k, c.smembers(k)))
elif t == 'hash':
data.append((k, c.hgetall(k)))
else:
data.append((k, c.zrange(k, 0, -1, withscores=True)))
data.sort()
return data
_now = datetime.utcnow()
_now_time = time.time()
def _default_time():
return _now_time
class TestORM(unittest.TestCase):
def setUp(self):
session.rollback()
def test_basic_model(self):
class RomTestBasicModel(Model):
val = Integer()
oval = Integer(default=7)
created_at = Float(default=_default_time)
req = String(required=True)
self.assertRaises(ColumnError, RomTestBasicModel)
self.assertRaises(InvalidColumnValue, lambda: RomTestBasicModel(oval='t'))
self.assertRaises(MissingColumn, lambda: RomTestBasicModel(created_at=7))
# try object saving/loading
x = RomTestBasicModel(val=1, req="hello")
x.save()
id = x.id
x = x.to_dict()
y = RomTestBasicModel.get(id)
yd = y.to_dict()
## cax = x.pop('created_at'); cay = yd.pop('created_at')
self.assertEqual(x, yd)
## self.assertTrue(abs(cax - cay) < .005, cax-cay)
# try object copying
zd = y.copy().to_dict()
## caz = zd.pop('created_at')
self.assertNotEqual(yd, zd)
zd.pop('id')
yd.pop('id')
self.assertEqual(yd, zd)
## self.assertTrue(abs(cay-caz) < .005, cay-caz)
def test_unique_index(self):
def foo2():
class RomTestBadIndexModel2(Model):
bad = Integer(unique=True)
self.assertRaises(ColumnError, foo2)
class RomTestIndexModel(Model):
key = String(required=True, unique=True)
self.assertRaises(MissingColumn, RomTestIndexModel)
item = RomTestIndexModel(key="hello")
item.save()
m = RomTestIndexModel.get_by(key="hello")
self.assertTrue(m)
self.assertEquals(m.id, item.id)
self.assertTrue(m is item)
def test_foreign_key(self):
def foo():
class RomTestBFkey1(Model):
bad = ManyToOne("RomTestBad")
RomTestBFkey1()
self.assertRaises(ORMError, foo)
def foo2():
class RomTestBFkey2(Model):
bad = OneToMany("RomTestBad")
RomTestBFkey2()
self.assertRaises(ORMError, foo2)
class RomTestFkey1(Model):
fkey2 = ManyToOne("RomTestFkey2")
class RomTestFkey2(Model):
fkey1 = OneToMany("RomTestFkey1")
x = RomTestFkey2()
y = RomTestFkey1(fkey2=x) # implicitly saves x
y.save()
xid = x.id
yid = y.id
x = y = None
y = RomTestFkey1.get(yid)
self.assertEquals(y.fkey2.id, xid)
fk1 = y.fkey2.fkey1
self.assertEquals(len(fk1), 1)
self.assertEquals(fk1[0].id, y.id)
def test_unique(self):
class RomTestUnique(Model):
attr = String(unique=True)
a = RomTestUnique(attr='hello')
b = RomTestUnique(attr='hello2')
a.save()
b.save()
b.attr = 'hello'
self.assertRaises(UniqueKeyViolation, b.save)
c = RomTestUnique(attr='hello')
self.assertRaises(UniqueKeyViolation, c.save)
def test_saving(self):
class RomTestNormal(Model):
attr = String()
self.assertTrue(RomTestNormal().save())
self.assertTrue(RomTestNormal(attr='hello').save())
x = RomTestNormal()
self.assertTrue(x.save())
self.assertFalse(x.save())
session.commit()
self.assertTrue(x is RomTestNormal.get(x.id))
def test_index(self):
class RomTestIndexedModel(Model):
attr = String(index=True)
attr2 = String(index=True)
attr3 = Integer(index=True)
attr4 = Float(index=True)
attr5 = Decimal(index=True)
x = RomTestIndexedModel(
attr='hello world',
attr2='how are you doing?',
attr3=7,
attr4=4.5,
attr5=_Decimal('2.643'),
)
x.save()
RomTestIndexedModel(
attr='world',
attr3=100,
attr4=-1000,
attr5=_Decimal('2.643'),
).save()
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello').count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr2='how').filter(attr2='are').count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello').filter(attr2='how').filter(attr2='are').count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', noattr='bad').filter(attr2='how').filter(attr2='are').count(), 0)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', attr3=(None, None)).count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', attr3=(None, 10)).count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', attr3=(None, 10)).execute()[0].id, 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', attr3=(5, None)).count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', attr3=(5, 10), attr4=(4,5), attr5=(2.5, 2.7)).count(), 1)
first = RomTestIndexedModel.query.filter(attr='hello', attr3=(5, 10), attr4=(4,5), attr5=(2.5, 2.7)).first()
self.assertTrue(first)
self.assertTrue(first is x)
self.assertEquals(RomTestIndexedModel.query.filter(attr='hello', attr3=(10, 20), attr4=(4,5), attr5=(2.5, 2.7)).count(), 0)
self.assertEquals(RomTestIndexedModel.query.filter(attr3=100).count(), 1)
self.assertEquals(RomTestIndexedModel.query.filter(attr='world', attr5=_Decimal('2.643')).count(), 2)
results = RomTestIndexedModel.query.filter(attr='world').order_by('attr4').execute()
self.assertEquals([x.id for x in results], [2,1])
for i in xrange(50):
RomTestIndexedModel(attr3=i)
session.commit()
session.rollback()
self.assertEquals(len(RomTestIndexedModel.get_by(attr3=(10, 25))), 16)
self.assertEquals(len(RomTestIndexedModel.get_by(attr3=(10, 25), _limit=(0,5))), 5)
def test_alternate_models(self):
ctr = [0]
class RomTestAlternate(object):
def __init__(self, id=None):
if id is None:
id = ctr[0]
ctr[0] += 1
self.id = id
@classmethod
def get(self, id):
return RomTestAlternate(id)
class RomTestFModel(Model):
attr = ForeignModel(RomTestAlternate)
a = RomTestAlternate()
ai = a.id
i = RomTestFModel(attr=a).id
session.commit() # two lines of magic to destroy session history
session.rollback() #
del a
f = RomTestFModel.get(i)
self.assertEquals(f.attr.id, ai)
def test_model_connection(self):
class RomTestFoo(Model):
pass
class RomTestBar(Model):
_conn = redis.Redis(db=14)
RomTestBar._conn.delete('RomTestBar:id:')
RomTestFoo().save()
RomTestBar().save()
self.assertEquals(RomTestBar._conn.get('RomTestBar:id:'), '1')
self.assertEquals(util.CONNECTION.get('RomTestBar:id:'), None)
RomTestBar.get(1).delete()
RomTestBar._conn.delete('RomTestBar:id:')
k = RomTestBar._conn.keys('RomTest*')
if k:
RomTestBar._conn.delete(*k)
def test_entity_caching(self):
class RomTestGoo(Model):
pass
f = RomTestGoo()
i = f.id
p = id(f)
session.commit()
for j in xrange(10):
RomTestGoo()
g = RomTestGoo.get(i)
self.assertTrue(f is g)
def test_index_preservation(self):
""" Edits to unrelated columns should not remove the index of other
columns. Issue: https://github.com/josiahcarlson/rom/issues/2. """
class RomTestM(Model):
u = String(unique=True)
i = Integer(index=True)
unrelated = String()
RomTestM(u='foo', i=11).save()
m = RomTestM.get_by(u='foo')
m.unrelated = 'foobar'
self.assertEqual(len(RomTestM.get_by(i=11)), 1)
m.save()
self.assertEqual(len(RomTestM.get_by(i=11)), 1)
self.assertEqual(len(RomTestM.get_by(i=(10, 12))), 1)
def test_json_multisave(self):
class RomTestJsonTest(Model):
col = Json()
d = {'hello': 'world'}
x = RomTestJsonTest(col=d)
x.save()
del x
for i in xrange(5):
x = RomTestJsonTest.get(1)
self.assertEquals(x.col, d)
x.save(full=True)
session.rollback()
def test_boolean(self):
class RomTestBooleanTest(Model):
col = Boolean(index=True)
RomTestBooleanTest(col=True).save()
RomTestBooleanTest(col=1).save()
RomTestBooleanTest(col=False).save()
RomTestBooleanTest(col='').save()
RomTestBooleanTest(col=None).save() # None is considered "not data", so is ignored
y = RomTestBooleanTest()
yid = y.id
y.save()
del y
self.assertEquals(len(RomTestBooleanTest.get_by(col=True)), 2)
self.assertEquals(len(RomTestBooleanTest.get_by(col=False)), 2)
session.rollback()
x = RomTestBooleanTest.get(1)
x.col = False
x.save()
self.assertEquals(len(RomTestBooleanTest.get_by(col=True)), 1)
self.assertEquals(len(RomTestBooleanTest.get_by(col=False)), 3)
self.assertEquals(len(RomTestBooleanTest.get_by(col=True)), 1)
self.assertEquals(len(RomTestBooleanTest.get_by(col=False)), 3)
y = RomTestBooleanTest.get(yid)
self.assertEquals(y.col, None)
def test_datetimes(self):
class RomTestDateTimesTest(Model):
col1 = DateTime(index=True)
col2 = Date(index=True)
col3 = Time(index=True)
dtt = RomTestDateTimesTest(col1=_now, col2=_now.date(), col3=_now.time())
dtt.save()
session.commit()
del dtt
self.assertEquals(len(RomTestDateTimesTest.get_by(col1=_now)), 1)
self.assertEquals(len(RomTestDateTimesTest.get_by(col2=_now.date())), 1)
self.assertEquals(len(RomTestDateTimesTest.get_by(col3=_now.time())), 1)
def test_deletion(self):
class RomTestDeletionTest(Model):
col1 = String(index=True)
x = RomTestDeletionTest(col1="this is a test string that should be indexed")
session.commit()
self.assertEquals(len(RomTestDeletionTest.get_by(col1='this')), 1)
x.delete()
self.assertEquals(len(RomTestDeletionTest.get_by(col1='this')), 0)
session.commit()
self.assertEquals(len(RomTestDeletionTest.get_by(col1='this')), 0)
def test_empty_query(self):
class RomTestEmptyQueryTest(Model):
col1 = String()
RomTestEmptyQueryTest().save()
self.assertRaises(QueryError, RomTestEmptyQueryTest.query.all)
self.assertRaises(QueryError, RomTestEmptyQueryTest.query.count)
self.assertRaises(QueryError, RomTestEmptyQueryTest.query.limit(0, 10).count)
def test_refresh(self):
class RomTestRefresh(Model):
col = String()
d = RomTestRefresh(col='hello')
d.save()
d.col = 'world'
self.assertRaises(InvalidOperation, d.refresh)
d.refresh(True)
self.assertEquals(d.col, 'hello')
d.col = 'world'
session.refresh(d, force=True)
self.assertEquals(d.col, 'hello')
d.col = 'world'
session.refresh_all(force=True)
self.assertEquals(d.col, 'hello')
self.assertRaises(InvalidOperation, RomTestRefresh(col='boo').refresh)
def test_datetime(self):
class RomTestDT(Model):
created_at = DateTime(default=datetime.utcnow)
event_datetime = DateTime(index=True)
x = RomTestDT()
x.event_datetime = datetime.utcnow()
x.save()
RomTestDT(event_datetime=datetime.utcnow()).save()
session.rollback() # clearing the local cache
self.assertEquals(RomTestDT.get_by(event_datetime=(datetime(2000, 1, 1), datetime(2000, 1, 1))), [])
self.assertEquals(len(RomTestDT.get_by(event_datetime=(datetime(2000, 1, 1), datetime.utcnow()))), 2)
if __name__ == '__main__':
global_setup()
print "Testing standard writing"
try:
unittest.main()
except:
data = get_state()
global_setup()
_enable_lua_writes()
print "Testing Lua writing"
try:
unittest.main()
except:
lua_data = get_state()
global_setup()
## if data != lua_data:
## print "WARNING: Regular/Lua data writing does not match!"
## import pprint
## pprint.pprint(data)
## pprint.pprint(lua_data)
|
UTF-8
|
Python
| false | false | 2,013 |
7,052,336,330,397 |
f1aa3b7289d13e6eaf3a6ea82bf3b5f6f916f14e
|
0ebf63916dc8c410107bbfc3ea1b5d0c36b314e3
|
/Native/Test/Python/mr-strings.py
|
b13e1a87cdac24f9a045dfcd705aa10f5e1c7431
|
[
"AGPL-3.0-or-later",
"AGPL-3.0-only"
] |
non_permissive
|
corneliuspreidel/FabricEngine_Backup
|
https://github.com/corneliuspreidel/FabricEngine_Backup
|
c46cc04b4db970f6bbd2d8126599612b658e05b7
|
9b310cc1b64450bca207ab455097dff3251f13d4
|
refs/heads/master
| 2020-12-29T02:55:04.121371 | 2012-09-21T13:21:04 | 2012-09-21T13:21:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# Copyright 2010-2012 Fabric Engine Inc. All rights reserved.
#
import fabric
client = fabric.createClient()
ag = client.MR.createArrayGenerator(
client.MR.createConstValue("Size", 100),
client.KLC.createArrayGeneratorOperator(
"gen.kl",
"operator gen(io String output, Size index) { output = String(index+1); }",
"gen"
)
)
print(
len( client.MR.createReduce(ag,
client.KLC.createReduceOperator(
"reduce.kl",
"operator reduce(String input, io String output) { if (output.length > 0) output += '-'; output += input; }",
"reduce"
)
).produce() )
)
client.close()
|
UTF-8
|
Python
| false | false | 2,012 |
16,209,206,598,853 |
2f436b80d726949728f97c3363db6a4dfcacbbbb
|
2bd9decb0aedeea569e3c10cbf9d6671f8c52d84
|
/web.py
|
9ea419a49fc0d5d9757b915a7a9ad6f68a3646dc
|
[] |
no_license
|
niklasl/partquest
|
https://github.com/niklasl/partquest
|
09eafb025fa78f3556dea9ea3bc06a286decabd8
|
20772fe77503d1cb6debd47608c1b75e054560eb
|
refs/heads/master
| 2016-09-05T19:35:16.392850 | 2011-10-25T16:27:13 | 2011-10-25T16:27:13 | 2,622,551 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import Flask, render_template, send_file
app = Flask(__name__, static_folder='media')
@app.route("/")
def index():
return send_file("media/index.html", cache_timeout=0)
@app.route("/db.json")
def db():
return send_file("data/db.json", cache_timeout=0)
@app.route("/world/<path:path>")
def world(path):
return send_file("data/world/%s.html" % path, cache_timeout=0)
if __name__ == "__main__":
import os, sys
port = int(os.environ.get("PORT", 5000))
if '-d' in sys.argv:
app.debug = True
host = '127.0.0.1' if app.debug else '0.0.0.0'
app.run(host=host, port=port)
|
UTF-8
|
Python
| false | false | 2,011 |
13,245,679,182,119 |
756a25323fb20cb17149d86cf511f4553fb3fc55
|
3e5150447a2c90c26354500f1df9660ef35c990b
|
/filesystem/exists()
|
8d6d0809e46907f8d938489e3aee47f4df8bb09f
|
[] |
no_license
|
kilirobbs/python-fiddle
|
https://github.com/kilirobbs/python-fiddle
|
8d6417ebff9d6530e713b6724f8416da86c24c65
|
9c2f320bd2391433288cd4971c2993f1dd5ff464
|
refs/heads/master
| 2016-09-11T03:56:39.808358 | 2013-03-19T19:26:19 | 2013-03-19T19:26:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os.path
os.path.exists(filename)
|
UTF-8
|
Python
| false | false | 2,013 |
18,141,941,885,419 |
a1d0d98c4973a3435e2a084fab71f2963c9daea9
|
c154591fdc1e6edafbd0da28c2bdc00168b5b327
|
/xpybot/exceptions.py
|
3fb9028fa4b7bade9d6032e67d8041738e56b18a
|
[] |
no_license
|
borgstrom/xpybot
|
https://github.com/borgstrom/xpybot
|
5278b15983741888eb8d35feb7f418b2a2ca491d
|
f797362873c61178e1512c6a82585b5a95ae3cd5
|
refs/heads/master
| 2020-04-15T04:29:03.136265 | 2012-04-05T17:42:09 | 2012-04-05T17:42:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class BotException(Exception):
"""
Base Exception that all Bot Exceptions originate from
"""
pass
class BotConnectionException(BotException):
"""
Error occurred during connection
"""
pass
class BotAuthenticationException(BotException):
"""
Error during authentication
"""
pass
|
UTF-8
|
Python
| false | false | 2,012 |
10,359,461,132,505 |
8d7d4b07374abed1217cada7e69cd075b5b0f14a
|
4f1b6a8001280c76f6fa40064251ccf684f2b28b
|
/apps/client/validation.py
|
cd9b720402d8ef95a7148d9033c88db09fdfc335
|
[] |
no_license
|
asorici/envived
|
https://github.com/asorici/envived
|
f51c40f49b8dbee13ebde7709437e4efa12cd335
|
2b818240d6cef7d64f9c7a4ea4ecb52695c82878
|
refs/heads/master
| 2016-09-05T23:37:40.798340 | 2014-07-04T13:35:25 | 2014-07-04T13:35:25 | 11,955,649 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from tastypie.validation import Validation
class AnnotationValidation(Validation):
def is_valid(self, bundle, request=None):
from client.api import EnvironmentResource, AreaResource
## check that we have a user
if not bundle.request.user or bundle.request.user.is_anonymous():
return {'__all__': 'No user found in request.'}
if not bundle.data:
return {'__all__': 'No data submitted.'}
errors = {}
if bundle.request.method.upper() == "POST":
env_obj = None
area_obj = None
if 'environment' in bundle.data:
try:
env_obj = EnvironmentResource().get_via_uri(bundle.data['environment'])
except:
env_obj = None
if 'area' in bundle.data:
try:
area_obj = AreaResource().get_via_uri(bundle.data['area'])
except:
area_obj = None
if env_obj is None and area_obj is None:
errors['environment'] = ['No or wrong environment uri']
errors['area'] = ['No or wrong area uri']
if not env_obj is None and not area_obj is None and area_obj.env != env_obj:
errors['environment'] = ["Environment resource mismatches parent environment of area resource."]
if not 'data' in bundle.data or not bundle.data['data']:
errors['data'] = ["No or empty data field."]
## some additional validation of the data field might also be possible if no errors up to now
if not errors:
ann_cls = bundle.obj.__class__
data = bundle.data['data']
category = bundle.obj.__class__.CATEGORY
data_errors = ann_cls.validate_data(category, data)
if data_errors:
errors['data'] = data_errors
import sys
print >> sys.stderr, data_errors
return errors
|
UTF-8
|
Python
| false | false | 2,014 |
15,358,803,052,803 |
250ea620b751db20ba77c9a5ffb5dd5a069fc31d
|
69228155ff0066bab7862b881c575d35ac3025d5
|
/nodejs/tests/subprocess-helloworld.py
|
7dbe1b4b1ca43267b3795b6e449a4ca100640b1d
|
[
"BSD-3-Clause"
] |
permissive
|
idobatter/PythonJS
|
https://github.com/idobatter/PythonJS
|
e3e0337a20a041400e40c8ff94b1e6b664d7d6ec
|
0161dd5aba6caeaf5b06e74cc8524efd04a36143
|
refs/heads/master
| 2021-01-18T12:10:44.254308 | 2014-05-15T01:26:08 | 2014-05-15T01:26:08 | 19,898,894 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from nodejs.subprocess import *
print 'testing subprocess.call'
subprocess.call( 'ls', ['-lh'] )
print 'test complete.'
|
UTF-8
|
Python
| false | false | 2,014 |
2,568,390,443,360 |
0e3b2aafe75199de6e80e6c0b4bf93938ec9340c
|
83158ce7ab6bfa7dba53dffc6a07748c3ad69732
|
/Quick_Union_Weighted.py
|
2cfb4ee9d7c80cb859a338b4848a818d6780f6d5
|
[] |
no_license
|
iamsidshetty/Algorithms
|
https://github.com/iamsidshetty/Algorithms
|
a3938b0986c8c3677888f5676761e49fe5bb25e9
|
007d39f91ccdd1b0dd23683f4e31258a48ea7c19
|
refs/heads/master
| 2020-12-30T10:50:06.748625 | 2014-07-02T02:54:05 | 2014-07-02T02:54:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'sid'
# Quick Union Weighted Algorithm Implementation
# 1. Initialize (N)
# 2. Find (log N) Note: log base 2
# 3. Union (log N) Note: log base 2
# 4. Complexity: N + M log N
# Pros: Trees that get too Tall are flattened
#Function to initialize the objects
def union_intialize(n):
id_list = []
for i in range(n):
id_list.append(i)
return id_list
#Chase the parent
def root(i, id_list):
while i != id_list[i]:
i = id_list[i]
return i
#Find function to check if the objects are connected or not
def find(p, q, id_list):
if root(p, id_list) == root(q, id_list):
return True
else:
return False
#Assigns root as parent and keeps track of the size
def union(id_list, p, q, t_size):
i = root(p, id_list)
j = root(q, id_list)
if i == j: return
if t_size[i] < t_size[j]:
id_list[i] = j
t_size[j] += t_size[i]
else:
id_list[j] = i
t_size[i] += t_size[j]
def intialize_tree(n):
tree_size = [1] * n
return tree_size
def main():
id_list = []
n = input('Enter the number of elements: ')
id_list = union_intialize(n)
#to keep track of the tree size
t_size = intialize_tree(n)
#User Input
for i in range(1000):
print "\n\n"
print "1. To check if the objects are connected"
print "2. Connect the objects"
print "3. Print the lists"
print "4. Done"
print "\nEnter your option: "
option = input()
if option == 1:
p = input("Enter the First Object(No.): ")
q = input("Enter the Second Object(No.): ")
Result = find(p, q, id_list)
#Spit out the result of the find connection
if Result is False:
print "\n**********Not Connected**********"
else:
print "\n**********Connected**********"
if option == 2:
#Get the Elements for which the connection is to be found
p = input("Enter the First Object(No.): ")
q = input("Enter the Second Object(No.): ")
union(id_list, p, q, t_size)
print "\n**********Success**********"
if option == 3:
print "id_list:"
print id_list
print "t_size:"
print t_size
if option == 4:
exit(1)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
2,027,224,590,034 |
a872fedb479234c64376edd679712c80e82c4668
|
9cd9a82a5c7da01e2f5346689bb5a36f6257335b
|
/sbsmanip/io.py
|
e80305e8d7f3382eb46fd71aac6fcfbf28378f78
|
[
"MIT"
] |
permissive
|
StellarwhimsRepo/sbsmanip
|
https://github.com/StellarwhimsRepo/sbsmanip
|
b11d954c5bb2034338c1ca9a92442f785cb59ad2
|
6ad979a22de1238723703eb86254423877e77c32
|
refs/heads/master
| 2021-05-28T07:24:19.382721 | 2014-06-10T04:37:55 | 2014-06-10T05:18:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import xml.etree.ElementTree as ET
import sector
import cubeblocks
class XMLFile(object):
def __init__(self, filename):
self._tree = ET.parse(filename)
self.root = self._tree.getroot()
def write(self, filename):
self._tree.write(filename)
class SBSFile(XMLFile):
def __init__(self, filename):
super(SBSFile, self).__init__(filename)
self.sector = sector.Sector(self.root)
class CubeBlocksSBC(XMLFile):
def __init__(self, filename):
super(CubeBlocksSBC, self).__init__(filename)
self.definitions = [cubeblocks.Definition(d) for d in self.root.find(
'CubeBlocks').findall('Definition')]
|
UTF-8
|
Python
| false | false | 2,014 |
18,433,999,656,376 |
8f656e2aef42c9e071c376e3a54229454a973332
|
271bbec33d42fe6023aa2dc3252862d200c3e461
|
/bit/core/namespace.py
|
818ffb62e306cccdb5bb9d150bf28eba7d621bdd
|
[
"BSD-2-Clause"
] |
permissive
|
slurps-mad-rips/bit
|
https://github.com/slurps-mad-rips/bit
|
1dd09c4e11fbc000e83c1a92b363889a24520274
|
4ccd6dff343f52f14b1be893bed96dceada5dc0c
|
refs/heads/master
| 2021-01-20T09:09:30.922933 | 2012-03-19T01:35:11 | 2012-03-19T01:35:11 | 3,434,759 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# A 'wrapper' task object that allows toolchains to spawn consistently named
# tasks.
from bit.core.task import Task
class Namespace(Task):
def __init__(self, name, parent):
super().__init__(name, parent)
self.prefix = self.__class__.__name__.lower()
# The prefix doesn't matter in terms of capitalization because
# the MetaTask.get method will use the .lower() call to create it
def spawn(self, name):
return MetaTask.get('{}{}'.format(self.prefix, name))(name, self)
|
UTF-8
|
Python
| false | false | 2,012 |
309,237,668,397 |
ae02d2fff27536f352fc174faa74ae9c1ac9d038
|
90d7b30545cfae4548e232d0ba03fc1d2622ca51
|
/tests/test_setup_utils.py
|
986ec6415d07dccc58fb14928830b23b76333485
|
[] |
no_license
|
gagoel/pkginstaller
|
https://github.com/gagoel/pkginstaller
|
f41783a482350732ceeab3dcfbce1b8450c5b624
|
33b81adb3c7b0785896f7cb5fd4837c62375ef68
|
refs/heads/master
| 2021-01-02T08:47:22.707044 | 2014-06-15T09:05:33 | 2014-06-15T09:05:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
import sys
import os
import unittest
import logging.config
import shutil
import paramiko
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/../pkginstaller'))
from pkginstaller.internal.setup_utils import *
from tests import VERBOSE
class TestSetupUtils(unittest.TestCase):
def setUp(self):
# testcase temp directory, it will delete after tests execution.
curr_file_dir = os.path.abspath(os.path.dirname(__file__))
self.temp_dir = os.path.join(curr_file_dir, 'temp_setup_test_utils')
if os.path.exists(self.temp_dir):
raise Exception(
'Make sure you do not have {} directory, this directory '
'will be used by tests as temporary location and it will be '
'deleted after operation'.format(self.temp_dir)
)
else:
os.makedirs(self.temp_dir)
# parsing configuration file
self.test_config_file = os.path.join(
curr_file_dir, 'tests_config.json'
)
self.test_config = None
with open(self.test_config_file) as conf_file:
try:
self.test_config = json.load(conf_file)
except Exception as e:
print(
'configuration file {} parsing failed'.format(
self.test_config_file)
)
raise e
# setting logger.
logging.config.dictConfig(self.test_config['LOGGING'])
# Remote host data
self.remote_host = os.environ['REMOTE_HOST_IP']
self.remote_ssh_port = int(os.environ['REMOTE_HOST_SSH_PORT'])
self.remote_ssh_user = os.environ['REMOTE_HOST_SSH_USER']
self.remote_ssh_pass = os.environ['REMOTE_HOST_SSH_PASS']
self.temp_remote_dir = os.environ['REMOTE_HOST_TEMP_DIR']
if is_path_exists(self.temp_remote_dir, self.remote_host,
self.remote_ssh_port, self.remote_ssh_user, self.remote_ssh_pass
):
raise Exception(
'Make sure you do not have {} directory, this directory '
'will be used by tests as temporary location and it will be '
'deleted after operation'.format(self.temp_remote_dir)
)
else:
mkdirs(
self.temp_remote_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
def tearDown(self):
shutil.rmtree(self.temp_dir)
remove_dir(self.temp_remote_dir, self.remote_host,
self.remote_ssh_port, self.remote_ssh_user, self.remote_ssh_pass)
logging.shutdown()
def test_is_path_exists_localhost(self):
temp_dir = os.path.join(self.temp_dir, 'test-is-path-exists')
path_status = is_path_exists(temp_dir)
self.assertEqual(path_status, False)
mkdirs(temp_dir)
path_status = is_path_exists(temp_dir)
self.assertEqual(path_status, True)
def test_is_path_exists_remotehost(self):
temp_dir = os.path.join(self.temp_remote_dir, 'test-is-path-exists')
path_status = is_path_exists(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(path_status, False)
mkdirs(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
path_status = is_path_exists(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(path_status, True)
def test_create_file_localhost(self):
temp_dir = os.path.join(self.temp_dir, 'test-create-file-dir')
mkdirs(temp_dir)
temp_file = os.path.join(temp_dir, 'testfile')
temp_file_data = 'This is test file data'
create_file_status = create_file(temp_file, temp_file_data)
self.assertEqual(create_file_status, True)
file_obj = open(temp_file, 'r')
file_data = file_obj.read()
file_obj.close()
self.assertEqual(file_data, temp_file_data)
def test_create_file_remotehost(self):
temp_dir = os.path.join(self.temp_remote_dir, 'test-create-file-dir')
mkdirs(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
temp_file = os.path.join(temp_dir, 'testfile')
temp_file_data = 'This is test file data'
create_file_status = create_file(
temp_file,
temp_file_data,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(create_file_status, True)
t = paramiko.Transport((self.remote_host, self.remote_ssh_port))
t.connect(username=self.remote_ssh_user, password=self.remote_ssh_pass)
sftp = paramiko.SFTPClient.from_transport(t)
sftpfile_obj = sftp.open(temp_file, 'r')
file_data = sftpfile_obj.read()
sftpfile_obj.close()
sftp.close()
self.assertEqual(file_data.decode('utf-8'), temp_file_data)
def test_remove_file_localhost(self):
temp_dir = os.path.join(self.temp_dir, 'test-remove-file-dir')
mkdirs(temp_dir)
temp_file = os.path.join(temp_dir, 'testfile')
temp_file_data = 'This is test file data'
create_file_status = create_file(temp_file, temp_file_data)
self.assertEqual(create_file_status, True)
path_status = is_path_exists(temp_file)
self.assertEqual(path_status, True)
remove_file_status = remove_file(temp_file)
self.assertEqual(remove_file_status, True)
path_status = is_path_exists(temp_file)
self.assertEqual(path_status, False)
def test_remove_file_remotehost(self):
temp_dir = os.path.join(self.temp_remote_dir, 'test-remove-file-dir')
mkdirs(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
temp_file = os.path.join(temp_dir, 'testfile')
temp_file_data = 'This is test file data'
create_file_status = create_file(
temp_file,
temp_file_data,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(create_file_status, True)
path_status = is_path_exists(
temp_file,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(path_status, True)
remove_file_status = remove_file(
temp_file,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(remove_file_status, True)
path_status = is_path_exists(
temp_file,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(path_status, False)
def test_remove_dir_localhost(self):
temp_dir = os.path.join(self.temp_dir, 'test-remove-dir')
mkdirs(temp_dir)
temp_file = os.path.join(temp_dir, 'testfile')
create_file_status = create_file(
temp_file, 'This is test file')
self.assertEqual(create_file_status, True)
remove_dir_status = remove_dir(temp_dir)
self.assertEqual(remove_dir_status, True)
def test_remove_dir_remotehost(self):
temp_dir = os.path.join(self.temp_remote_dir, 'test-remove-dir')
mkdirs(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
temp_file = os.path.join(temp_dir, 'testfile')
create_file_status = create_file(
temp_file,
'This is test file',
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(create_file_status, True)
remove_dir_status = remove_dir(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(remove_dir_status, True)
def test_is_file_downloaded_localhost(self):
"""
tar, tar.gz, tar.bz2 or tar.xz download test.
"""
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(self.temp_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
# Checking download status before download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(downloaded_status, False)
# Downloading file.
download_status = download_file(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(download_status, True)
self.assertEqual(os.path.exists(file_path), True)
# Checking download status after download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(downloaded_status, True)
"""
git cloning test
"""
test_file_name = "MySQL-for-Python-3.git"
test_download_urls = "https://github.com/gagoel"
test_src_repo = os.path.join(self.temp_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
# Checking download status before download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(downloaded_status, False)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(download_status, True)
self.assertEqual(os.path.exists(file_path), True)
# Checking download status after download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(downloaded_status, True)
def test_is_file_downloaded_remotehost(self):
"""
tar, tar.gz, tar.bz2 or tar.xz download test.
"""
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(self.temp_remote_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
# Checking download status before download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls,
test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(downloaded_status, False)
# Downloading file.
download_status = download_file(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(download_status, True)
check_path_status = is_path_exists(
file_path, remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(check_path_status, True)
# Checking download status after download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(downloaded_status, True)
"""
git cloning test
"""
test_file_name = "MySQL-for-Python-3.git"
test_download_urls = "https://github.com/gagoel"
test_src_repo = os.path.join(self.temp_remote_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
# Checking download status before download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(downloaded_status, False)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(download_status, True)
check_path_status = is_path_exists(
file_path, remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual(check_path_status, True)
# Checking download status after download
downloaded_status = is_file_downloaded(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(downloaded_status, True)
def test_download_file_localhost(self):
# tar, tar.gz, tar.bz2 or tar.xz download test.
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(self.temp_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(download_status, True)
self.assertEqual(os.path.exists(file_path), True)
# git cloning test
test_file_name = "MySQL-for-Python-3.git"
test_download_urls = "https://github.com/gagoel"
test_src_repo = os.path.join(self.temp_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
self.assertEqual(download_status, True)
self.assertEqual(os.path.exists(file_path), True)
def test_download_file_remotehost(self):
# tar, tar.gz, tar.bz2 or tar.xz download test.
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(self.temp_remote_dir,
'test-download-file/src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(download_status, True)
path_exists = is_path_exists(
file_path,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(path_exists, True)
# git cloning test
test_file_name = "MySQL-for-Python-3.git"
test_download_urls = "https://github.com/gagoel"
test_src_repo = os.path.join(self.temp_remote_dir,
'test-download-file/src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(download_status, True)
path_exists = is_path_exists(
file_path,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(path_exists, True)
def test_extract_file_localhost(self):
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(self.temp_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo, verbose=VERBOSE
)
test_extract_path = os.path.join(self.temp_dir, 'src')
extracted_file = extract_file(
test_file_name, test_src_repo, test_extract_path, verbose=VERBOSE
)
self.assertEqual(os.path.exists(test_extract_path), True)
def test_extract_file_remotehost(self):
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(self.temp_remote_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
test_extract_path = os.path.join(self.temp_remote_dir, 'src')
extracted_file_path = extract_file(
test_file_name, test_src_repo, test_extract_path,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
extracted_readme = os.path.join(extracted_file_path, 'README')
path_exists = is_path_exists(
extracted_file_path,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
self.assertEqual(path_exists, True)
def test_replace_env_vars(self):
test_environments = {
"TEST_ENV1" : "TEST_ENV1_VALUE",
"TEST_ENV2" : "TEST_ENV2_VALUE"
}
for a, b in test_environments.items():
os.environ[a] = b
# string type
test_string = "$TEST_ENV1/testpath"
test_string_after_resolve = os.path.join(
test_environments['TEST_ENV1'], "testpath"
)
self.assertEqual(
test_string_after_resolve, replace_env_vars(
test_string, verbose=VERBOSE)
)
# list type
test_string = ["$TEST_ENV1/testpath1", "$TEST_ENV2/testpath2"]
resolve_test_string = replace_env_vars(test_string, verbose=VERBOSE)
self.assertEqual(resolve_test_string[0], "TEST_ENV1_VALUE/testpath1")
self.assertEqual(resolve_test_string[1], "TEST_ENV2_VALUE/testpath2")
# dict type
test_string = {
"key1" : ["$TEST_ENV1/testpath1", "$TEST_ENV2/testpath2"]
}
resolve_test_string = replace_env_vars(test_string, verbose=VERBOSE)
self.assertEqual(
resolve_test_string['key1'][0], "TEST_ENV1_VALUE/testpath1"
)
self.assertEqual(
resolve_test_string['key1'][1], "TEST_ENV2_VALUE/testpath2"
)
def test_run_command_localhost(self):
cmd = ['/bin/sh', '-c', 'pwd']
stdout, stderr = run_command(cmd, verbose=VERBOSE)
self.assertEqual(stderr, "")
def test_run_command_remotehost(self):
temp_dir = os.path.join(self.temp_remote_dir, 'test-run-command')
mkdirs(
temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
# Downloading file and extracting it by running command on remotehost
test_file_name = "mod_wsgi-3.4.tar.gz"
test_download_urls = "https://modwsgi.googlecode.com/files"
test_src_repo = os.path.join(temp_dir, 'src_repo')
file_path = os.path.join(test_src_repo, test_file_name)
download_status = download_file(
test_file_name, test_download_urls, test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=VERBOSE
)
# There is no assert as it checks if we are getting output while
# running command in case of verbose > 1
cmd = ['tar', 'xvf', file_path]
stdout, stderr = run_command(
cmd, cmd_exec_dir=test_src_repo,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass,
verbose=2
)
cmd = ['pwd']
stdout, stderr = run_command(
cmd, cmd_exec_dir=temp_dir,
remote_host=self.remote_host,
remote_ssh_port=self.remote_ssh_port,
remote_ssh_user=self.remote_ssh_user,
remote_ssh_pass=self.remote_ssh_pass
)
self.assertEqual('', stderr)
self.assertEqual(temp_dir, stdout.strip())
if __name__ == "__main__":
unittest.main(verbosity=2)
|
UTF-8
|
Python
| false | false | 2,014 |
14,826,227,129,425 |
4414189e06afbf3579b5c05a1611bd0fe887dd5c
|
57f95adacb45f6242feeada6141efb8a97e7c56d
|
/trail3er/user/views.py
|
451858dce24d58bbdbdc738ca3521700bbbd3d38
|
[] |
no_license
|
Jerkovic/trail3r
|
https://github.com/Jerkovic/trail3r
|
5e8f904db783c68e582ab8aafb88d9837f192908
|
6554c91f8721acd30fb4c13abcc016c39103efa2
|
refs/heads/master
| 2020-05-29T17:31:40.364871 | 2013-05-28T13:52:59 | 2013-05-28T13:52:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from flask import Blueprint, Response
#from flask import current_app as APP
#from flask.ext.login import login_required, current_user
#from .models import User
user = Blueprint('user', __name__, url_prefix='/user')
@user.route('/')
def index():
return Response('user index')
|
UTF-8
|
Python
| false | false | 2,013 |
10,565,619,572,548 |
79d0bad6dfa291cc3f329ccb570de98e551a6bd5
|
c3d0bd85049b470f31ea7affd61cc462e0a18910
|
/fabfile.py
|
1eb994dfc85bb01fe9a27c73c0cf64bcc80ce637
|
[] |
no_license
|
adamw523/docker_server
|
https://github.com/adamw523/docker_server
|
316b4e42ab3f1e11e9e617c0a1ff3c4e0750ae28
|
6fa881f3fb3bccb9951f05a35e55281f768d6798
|
refs/heads/master
| 2016-09-09T18:36:33.705827 | 2014-12-27T05:05:08 | 2014-12-27T05:05:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import ConfigParser
import fabtools
from fabric.contrib.files import cd, env, exists, local, sudo, uncomment
from fabric.operations import put, run
from fabric.utils import abort
from fabtools import require
env.project_name = 'docker_server'
#---------------------------
# Environemnts
#---------------------------
def dodo():
"""
Select DigitalOcean environment
"""
# get config file
env.config = ConfigParser.ConfigParser()
env.config.read(['private/dodo.cfg'])
# set values from config
env.hosts = [env.config.get('dodo', 'host')]
env.user = env.config.get('dodo', 'user')
def root():
env.user = 'root'
#---------------------------
# Environment config
#---------------------------
def provision_server():
"""
Provision server to host docker images
"""
_add_user()
_install_packages()
_create_swapfile()
def add_public_key(pub_key):
fabtools.user.add_ssh_public_keys('adam', [pub_key])
def _add_user():
if not fabtools.user.exists('adam'):
fabtools.user.create('adam', shell='/bin/bash')
fabtools.require.users.sudoer('adam')
#uncomment('/etc/sudoers', 'includedir')
require.users.user('adam', extra_groups=['docker'])
def _install_packages():
require.deb.packages( ['python-setuptools'] )
sudo('easy_install pip')
sudo('pip install boto fig')
def _create_swapfile():
if not exists('/swapfile'):
sudo('swapoff -a')
sudo('fallocate -l 1024M /swapfile')
sudo('chmod 600 /swapfile')
sudo('mkswap /swapfile')
sudo('swapon /swapfile')
#---------------------------
#
#---------------------------
def _copy_files():
put('nginx', 'builds/server/')
put('fig.yml', 'builds/server/')
docker_ip = run("/sbin/ifconfig docker0 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}'")
run('sed -i "s/docker_ip_address/%s/g" builds/server/nginx/nginx.conf' % docker_ip)
def nginx_build():
"""
Build the nginx server remotely
"""
run('mkdir -p builds/server')
put('fig.yml', 'builds/server/')
_copy_files()
with cd('builds/server'):
run('fig build')
def nginx_up():
"""
Run the nginx server remotely
"""
with cd('~adam/builds/server'):
sudo('fig up -d')
def nginx_kill():
with cd('~adam/builds/server'):
sudo('fig kill')
def nginx_rm():
with cd('~adam/builds/server'):
sudo('fig rm --force')
|
UTF-8
|
Python
| true | false | 2,014 |
15,367,393,023,475 |
62c64be4dfffeb1f86da5474a70c7ec347c8c819
|
724c6c8990d3cb6f99eafacac8e8966fb580271f
|
/Motifs/all_motifs.py
|
f169e3a6015393282de159a412e0b63536243f7b
|
[] |
no_license
|
bluelocust/SynU3-project
|
https://github.com/bluelocust/SynU3-project
|
efba1eeed8aa96e55166c2193d75b9cd96e4bae4
|
81f22f9fd846332afdd972410c2e8d6cb55f4a88
|
refs/heads/master
| 2021-01-19T07:40:39.801000 | 2011-09-04T04:03:26 | 2011-09-04T04:03:26 | 2,296,126 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Sam Sun
# Promoter Library
# -----------------------------------------------
# DONE:
# *Enumerates all possible motifs
# *Implements a basic gillespie algorithm and checked result with graphs
# *Gillespie function reflects diff. sizes (PEG)
# *Densitometry on DNA gels to get distribution of fragment sizes [MATLAB]
# *Exports sequences as nts and motifs in FASTA format
# -----------------------------------------------
# TO DO:
# 1) Separate file into modules...this is getting unwieldly
# 2) Create classes and methods instead of random functions
# 3.1) Convert arbitrary DNA units into molecules, etc.
# 3.2) Compare experimental vs. computational size distribution
# 4) Train HMMs on each motif separately, and apply to fragments
# 5) Make it water-tight
# 5.1) TDD
# 5.2) Error-handling
# -----------------------------------------------
from random import choice, random
import bisect
import matplotlib.pyplot as plt
import sys
import math
from Bio.Alphabet import IUPAC, Alphabet
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from my_hist import hist_and_fit
from random import choice
# globals
__IUPAC = {}
__rxns = {}
def main(script, motifs = 'LTR.txt', code = 'IUPAC.txt'):
global __num_motifs
fp = open(motifs)
# build IUPAC dictionary
build_IUPAC(code)
# build motif dictionary
d = dict()
for line in fp:
motif, seq = process_line(line)
d[motif] = seq
# enumerate all possibilities for each motif
all = {}
for key in d:
seq = d[key]
# ID for specific, fully-determined sequence
# entry 1: sequence position
# entry 2: # of nucleotide choices
seq_ID = []
# find ambiguous nucleotides
for pos in range(len(seq)):
if seq[pos] in 'AGCT':
pass
else:
seq_ID.append((pos, len(__IUPAC[seq[pos]])))
all[key] = generate(seq, seq_ID, [], len(seq_ID))
# build rxn code for Gillespie
rxn_lookup()
# Gillespie algorithm
time = 0
parts = [{0:50}, {1:400}, {}] # 100 HP, 200 parts, 0 HP-stuff-HP
# WHEN YOU GET TO THE END, I ONLY CARE ABOUT DICT3
##full_parts = []
full_time = [] # why is this tracked?
for n in range(2000):
##full_parts.append(tuple(parts))
full_time.append(time)
dt, parts = gillespie(parts)
# end condition; COMBINE; oh, I'm just passing up right now
if dt == -1:
break
time += dt
# aggregate HP-mer-HP fragments for motif-finding & visualization
export_seq(parts[2], all)
# aggregate DNA fragments for gel display
DNA_gel = merge_dict(parts)
plt.plot(DNA_gel.keys(), DNA_gel.values(), 'bo-')
plt.show()
#print '# of motifs:', len(d.keys())
#print '# of sequences:', len(full)
#print 'sequences unique?:', test_uniqueness(full)
#print 'mean, S.D.:'
# Rather than using an IUPAC dictionary, BioPython built-in's could be
# used, but let's only opt for BioPython when creating FASTA files since
# FASTA is a bioinformatics standard
def build_IUPAC(code):
'''creates IUPAC code for ambiguous nucleotides using a .txt'''
global __IUPAC
fp = open(code)
for line in fp:
nuc, nuc_options = process_line(line)
__IUPAC[nuc] = list(nuc_options)
def process_line(line):
'''keeps track of # of lines; also, processes each line
to isolate name from sequence'''
new_line = line.split()
name, seq = new_line[0], ''.join(new_line[1:])
return name, seq
def generate(seq, seq_ID, ACGT_sequences, depth):
'''processes an ambiguous sequence into a list of all
possible sequences'''
# extra case, if motif has not ambiguous nucleotides, e.g. Hairpin
if depth == 0:
return [seq]
unknown_pos, num_choices = extract(seq_ID, depth-1)
# base case
# 1) fill in final unknown nucleotide in the sequence
# 2) add 2-4 determined seq's to ACGT_sequences, and return
# ACGT_sequences
if depth == 1:
for m in range(num_choices):
full_seq = fill_in(seq, unknown_pos, m)
ACGT_sequences.append(''.join(full_seq))
return ACGT_sequences
# recursive case
else:
for n in range(num_choices):
new_seq = fill_in(seq, unknown_pos, n)
generate(new_seq, seq_ID, ACGT_sequences, depth-1)
return ACGT_sequences
def extract(list_of_tuples, which_tuple):
'''extracts a given tuple's entries from a list of tuples'''
pair = list_of_tuples[which_tuple]
a, b = pair[0], pair[1]
return a, b
def fill_in(seq, unknown_pos, which_nuc):
'''takes an ambiguous sequence and fills in one unknown
nucleotide with ACGT'''
# copy sequence
new_seq = list(seq)
# fill in an unknown nucleotide
nuc_choices = __IUPAC[new_seq[unknown_pos]]
nuc = nuc_choices[which_nuc]
new_seq[unknown_pos] = nuc
return new_seq
def test_uniqueness(all_seq):
'''tests if all elements of a list are unique'''
if len(set(all_seq)) == len(all_seq):
return True
else:
return False
def gillespie(parts):
'''implements Gillespie algorithm, based on # of monomers, and #
of hairpins'''
# rate constant
k = .01
# parts
HPmer = parts[0]
polymer = parts[1]
HPmerHP = parts[2]
# rxns
X = sum(HPmer.values())
Y = sum(polymer.values())
a = [k*X*(X-1), k*X*Y, k*Y*(Y-1)]
a_0 = sum(a)
# end condition
if a_0 == 0:
return -1, parts
# find time tau
tau = -(1/a_0) * math.log(random())
# choose a rxn
this_rxn = random()
ctr = 0
for n in range(len(a)):
if this_rxn > ctr and this_rxn < ctr + a[n]/a_0:
rxn = n
break
else:
ctr += a[n]/a_0
# change time, # of molecules
dM = list(__rxns[rxn])
return tau, rxn_update(parts, dM)
def rxn_lookup():
'''creates reaction, or molecule-updating code for gillespie
algorithm'''
global __rxns
# HPmer, polymer, HPmerHP
__rxns[0] = [-2, 0, 1] # require two HPmers
__rxns[1] = [-99, -1, 0]
__rxns[2] = [0, -100, 0] # require two polymers
def rxn_update(parts, dM):
'''updates randomly chosen molecules based on rxn type'''
# track reactants
lenR = []
# delete reactant(s)
for m in range(len(dM)):
while dM[m] < 0:
# randomly select a molecule size
all_len = parts[m].keys()
temp = all_len[weighted_rnd(parts[m].values())]
# destroy one particle
parts[m][temp] -= 1
# housekeeping
if parts[m][temp] == 0:
del parts[m][temp]
lenR.append(temp)
if dM[m] == -99:
dM[m] += 99
dM[m] += 1
# create product
for n in range(len(dM)):
if dM[n] == 1:
lenP = sum(lenR)
if lenP in parts[n]:
parts[n][lenP] += 1
else:
parts[n][lenP] = 1
return parts
def weighted_rnd(weights):
'''randomly selects an element from a list, with the chances of
each length to be selected defined by weights'''
rnd = random() * sum(weights)
for i, w in enumerate(weights):
rnd -= w
if rnd < 0:
return i
def merge_dict(parts):
'''merges a list of dictionaries into a single dictionary
--> + 1 to all keys in HPmer
--> + 2 to all keys in HPmerHP'''
#LISTS are more efficient at this point
full = {}
full.update(parts[0])
for m in range(1, len(parts)):
for k in parts[m].keys():
if k in full:
full[k] += parts[m][k]
else:
full[k] = parts[m][k]
print full
return full
def export_seq(SynU3s, ACGT_motifs):
'''processes a dictionary of SynU3 sizes (# of motifs : # of seq's)
into two lists, seq_as_motifs and seq_as_nt, and exports two
FASTA-formatted files'''
motifs = ACGT_motifs.keys();
# destroy key-entry '0' since HP-HP is not an LTR
if 0 in SynU3s:
del SynU3s[0]
else:
pass
# create a list whose entries correspond to U3 size for all SynU3s
l = [[key]*SynU3s[key] for key in SynU3s.keys()]
SynU3_sizes = [item for sublist in l for item in sublist]
seq_as_motifs = []
seq_as_nts = []
# --- randomly choose motif, randomly choose sequence ---
# 1) use a generator to step through the list, returning # of mers
# 2) for each U3, choose a motif/seq and store in separate lists
for seq in xrange(len(SynU3_sizes)):
motif_string = [choice(motifs) for m in xrange(SynU3_sizes[seq])]
seq_as_motifs.append(''.join([x + ' ' for x in motif_string]))
seq_as_nts.append(''.join([choice(ACGT_motifs[x])
for x in motif_string]))
# seq_NT: feed into MEME or MochiView
# seq_M: compare MEME output to seq_M, using same file format
seq_M = seq_generator(seq_as_motifs, Alphabet())
seq_NT = seq_generator(seq_as_nts, IUPAC.unambiguous_dna)
output1 = open('sim_motifs.fasta', 'w')
output2 = open('sim_seqs.fasta', 'w')
# export as FASTA
SeqIO.write(seq_M, output1, 'fasta')
SeqIO.write(seq_NT, output2, 'fasta')
output1.close()
output2.close()
def seq_generator(data, datatype):
'''process a list of sequences into SeqRecord'''
for index in xrange(len(data)):
yield SeqRecord(Seq(data[index], datatype),
id = 'SynU3---#' + str(index),
description = '')
# *test with MEME and MochiView
# *optimize the p-value, or enforce non-overlapping motifs, and compare
# against seq_M
# *give two options (non-overlapping) vs. using p-values
if __name__ == '__main__':
main(*sys.argv)
|
UTF-8
|
Python
| false | false | 2,011 |
8,048,768,755,666 |
45bae1a9834432c609ee75db4b1d2ecf4b8e8f4e
|
1af1115cc0701c394845bf8ca0809cf7644f4642
|
/rat/util/math.py
|
74e81fae6616b0a3010640053728c092335cc6c1
|
[] |
no_license
|
mastbaum/pyrat
|
https://github.com/mastbaum/pyrat
|
4d5055b5f43511abe012a97c2bde34be6aa8f926
|
97c1d41fb7afdda898c87da47f76cc2831584512
|
refs/heads/master
| 2016-09-08T01:35:03.694652 | 2012-05-16T05:05:51 | 2012-05-16T05:05:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''Extra math functions.'''
def radical_inverse(n, base):
'''from RAT::RadicalInverse.'''
val = 0.0
inv_base = 1.0/base
inv_bi = inv_base
while n > 0:
d_i = (n % base)
val += d_i * inv_bi
n /= base
inv_bi *= inv_base
return val
|
UTF-8
|
Python
| false | false | 2,012 |
15,350,213,158,321 |
d72bb02b1f45a9300c2b42c0dde9ae3356a938d4
|
f5668fbfe3addfebe846b84e22df5a41a2b16550
|
/ECproject/1DEC/3Continue/continue.py
|
a511033204733063d9730d863c61448cedd583df
|
[] |
no_license
|
OvenO/datasphere
|
https://github.com/OvenO/datasphere
|
6b55e96e23feb90eed33cfc1b28346982131ff5c
|
92001d52474ba97d290f7cef519554ef832e9587
|
refs/heads/master
| 2021-03-12T23:54:40.361914 | 2014-09-12T18:59:18 | 2014-09-12T18:59:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import argparse
import time as thetime
# this function needs the number of small blocks passed into it becuase
# it is going to see see if the number of files we get back from the
# qsub is the same as the number we submiter ... which is num_small_blocks
def are_we_done(num_small_blocks):
all_files = os.listdir(".")
count = 0
for a,b in enumerate(all_files):
if ("3_cont_wh" in b):
count+=1
if (count == num_small_blocks):
return True
else:
return False
#*******************************************************************
#*******************************************************************
def info_to_arg_str(dir):
arg_str = " "
info_file = open("/users/o/m/omyers/Data/EC/2DBlock/Old/"+dir+"/info.txt","r")
lns = info_file.readlines()
for i,j in enumerate(lns):
arg_str += j[:-1] + " "
return arg_str
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dir', action = 'store', dest = "dir",type = str,required = True)
# star num and end num alow us to just work on certian files if most of them look pretty good.
# or you can do all of them at once. you have some choice.
# snum --> start number
# enum --> end number
parser.add_argument('--snum', action = 'store', dest = "snum",type = int,default = 0)
parser.add_argument('--enum', action = 'store', dest = "enum",type = int,default = 0)
# new way... now we are set up so it will just loop over the whole file in sections. The number
# of sections is sub_times and this number will come as an argument
parser.add_argument('--stimes', action = 'store', dest = "stimes",type = int,default=1)
parser.add_argument('--aonce', action = 'store', dest = "aonce",type = int,default=0)
# incase sections start at non zero number like 200poindat.txt
parser.add_argument('--start', action = 'store', dest = 'start',type = int,default=0)
inargs = parser.parse_args()
dir = inargs.dir
snum = inargs.snum
enum = inargs.enum
start = inargs.start
# number of times to submit
sub_times = inargs.stimes
# how many are done at a submission
aonce = inargs.aonce
# keep program flexable so we can use --snum --enum still if we want
if (enum == 0):
enum = aonce
arg_str = info_to_arg_str(dir)
print(arg_str)
for alpha in range(sub_times):
print(snum+alpha*aonce,enum+alpha*aonce)
for i in range(snum+alpha*aonce+start,enum+alpha*aonce+start):
# Tell again.py what file to work on
arg_str += " --file " + str(i) + "poindat.txt"
# write the write script file so the arguments are in
# the python line of the script file. then this line
os.system("cp again.script again_temp.script")
edit_file = open("again_temp.script","a")
edit_file.write("python again.py "+arg_str)
edit_file.close()
os.system("qsub again_temp.script")
os.remove("again_temp.script")
# now we are going to wait for all of the little block to be done and then agrogate the
# files into data.txt and poin.txt
done = False
cnt = 0
while (not done):
cnt += 1
thetime.sleep(10)
# are_we_done function checks to see if all the return files are back from q sub. if they
# are all there... We are done :)
done = are_we_done(enum-snum)
# kill it if we have been going for too long (means something is probably wrong)
if cnt>100000:
done = True
# for the are_we_done fuction to work again the next time through the loop we need to
# remove the return fiels
os.system("rm 3_cont_wh*")
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
14,096,082,679,242 |
dccb758b8b6f1f31353d035febad928d7f4b1f44
|
0e1f3e5f77c8bf91322d2536daf96ace1031cd04
|
/nodevers/use.py
|
d5e29891763789a282b7c7781cfb1bc6084a7ac9
|
[
"BSD-3-Clause"
] |
permissive
|
keremc/nodevers
|
https://github.com/keremc/nodevers
|
9b088fca420a61c6f41dad6bc244f63d69fb2cb7
|
8fbf21261bd0dff8956b1d903fda1016a146a76e
|
refs/heads/master
| 2016-07-28T16:13:10.207013 | 2013-09-10T11:36:39 | 2013-09-10T11:36:39 | 12,468,792 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
This is the module for the use command.
"""
__helpstr__ = """Usage: nodevers use <version> [options]
Summary:
Makes the specified Node version the default
Options:
-h/--help Print this text
"""
import os
import sys
import getopt
import nodevers.shared as shared
def parse(args):
"""
Parse the arguments and call the correct functions
based on them.
"""
if len(args) == 0 or args[0] in ("-h", "--help"):
shared.help_func(__helpstr__)
else:
try:
optlist, arglist = getopt.getopt(args[1:], "")
except getopt.error:
err = sys.exc_info()[1]
sys.stderr.write("Error: %s\n" % str(err))
sys.exit(-1)
try:
shared.link_to(args[0])
except shared.NoSuchVersionError:
err = sys.exc_info()[1]
sys.stdout.write("Error: %s\n" % str(err))
|
UTF-8
|
Python
| false | false | 2,013 |
3,951,369,958,852 |
8b6cd7efdb2453d82d5c0c33fc6f405bb37824e9
|
e4927412151d69c4937eecc5556e4a33ce4f1d60
|
/pyexplain/website/utils.py
|
1addaf6a1c5ab19f6fcb43371e20e2c2996c2070
|
[
"MIT"
] |
permissive
|
LucasMagnum/pyexplain
|
https://github.com/LucasMagnum/pyexplain
|
6d6a16724e02c9ecf9b6b5d8036efe3a4ecf61f8
|
df7f9ec3b392a88caf84be65157d1a642d8f3853
|
refs/heads/master
| 2021-01-01T18:33:49.181803 | 2013-10-01T12:46:29 | 2013-10-01T12:46:29 | 12,512,267 | 7 | 1 | null | false | 2013-09-25T18:06:45 | 2013-08-31T22:03:00 | 2013-09-25T18:06:45 | 2013-09-25T18:06:45 | 1,027 | null | 3 | 2 |
Python
| null | null |
# coding: utf-8
import json
class QuerySetEncoder(json.JSONEncoder):
"""
QuerySetEncoder deve retornar JSON
contendo o queryset e suas propertys
"""
def default(self, obj):
if hasattr(obj, 'queryset_dump'):
return obj.queryset_dump() if callable(obj.queryset_dump) else obj.queryset_dump
return json.JSONEncoder.default(self, obj)
queryset_dump = lambda queryset: json.dumps(queryset, cls=QuerySetEncoder)
|
UTF-8
|
Python
| false | false | 2,013 |
4,664,334,524,103 |
289023e80bf226c6afc16fca2a11e84b9d1de30e
|
97f746d503eb961905374ab61dd5202b14e70141
|
/0.0/ObjectStore.py
|
a39a5fdcaf09bbb8d7a3ee7e54c9d9c788d31915
|
[] |
no_license
|
johannesHen/pyDOOMS
|
https://github.com/johannesHen/pyDOOMS
|
4ac450c7e686ff026cbc5e083849dca404a0fe5a
|
9ea24189b96df904d10074ffd8f76c240405d91b
|
refs/heads/master
| 2021-01-01T15:35:12.715030 | 2013-12-16T11:21:53 | 2013-12-16T11:21:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class ObjectStore(object):
"""
Class containing a dictionary of all shared objects and methods to manipulate it.
Objects are indexed by their IDs.
"""
objects = dict()
def addObject(self, object):
"""
Adds the object to the object store dictionary
"""
self.objects[object.ID] = object
|
UTF-8
|
Python
| false | false | 2,013 |
12,996,571,084,155 |
91a7c7cdfbae572cd9ffc250765496ec3f8c2962
|
09fec03161cc38fa1003ed608d9aaac7ef2573fc
|
/combine_data.py
|
e0f3e47880a5a7db14e859064d0eaf63917f181b
|
[
"GPL-3.0-only"
] |
non_permissive
|
cgibb/recommend324
|
https://github.com/cgibb/recommend324
|
24af8d97e0edbdb3a2730c8586cafa609db6f379
|
02181ea8f83f6c2be5ec3c4bc21d27b3d9df6ad9
|
refs/heads/master
| 2021-01-02T22:19:02.818162 | 2014-05-13T14:35:12 | 2014-05-13T14:35:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
with open("usersha1-artmbid-artname-plays.tsv", "r") as playcounts:
pcline = playcounts.readline().split() # buffer
with open("usersha1-profile.tsv", "r") as userinfo:
with open("profile-playcounts", "w+") as output:
for line in userinfo:
line = line.split()
cache = [];
if line[1] not in ['m', 'f']:
line.insert(1, 'x')
cache.append(line[1]) # gender
if not line[2].isdigit():
line.insert(2, '21') # Roll with a default
cache.append(line[2]) # age
country = '"'+line[3];
i = 4
while (i < len(line) and line[i] not in months):
country += line[i];
i += 1
cache.append(country + '"')
cache.append('"' + "".join(line[i:]) + '"') # Date
cache = " ".join(cache) + " "
print line
print pcline
while (pcline[0] == line[0]): # uid matches
print pcline
meshed = [pcline[1]]
meshed.append("_".join(pcline[2:len(pcline)-1]))
meshed.append(pcline[len(pcline)-1])
output.write(cache + " ".join(meshed) + '\n')
pcline = playcounts.readline().split()
|
UTF-8
|
Python
| false | false | 2,014 |
11,321,533,806,756 |
7f825829bf55d4a36c0cdff6c0e44c3e189fe66f
|
7c32c5806c89ec84ee6703a72507fabb553336d5
|
/instagram/extract-all-data.py
|
57aa628de74b0c13e791c141a4316e592116ca6d
|
[] |
no_license
|
berkeley-food-recommendations/data-analysis
|
https://github.com/berkeley-food-recommendations/data-analysis
|
3974656cfd1fffd6645432179e1f0f567c29bf6d
|
b9c0fb116a394fa607c3fa0c689dbb8f82cf9b1b
|
refs/heads/master
| 2021-01-15T18:09:04.580088 | 2012-12-05T01:25:41 | 2012-12-05T01:25:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import json as json
import re
import gzip
output = open("insta_data.tsv", "w")
#unicode fix for chars outside ASCII range
reload(sys)
sys.setdefaultencoding("utf-8")
#run this as 'python extract-all-data.py data/*txt.gz'
for filename in sys.argv[1:]:
print "running...", filename
with gzip.open(filename, 'rb') as f:
insta_data = f.read()
# annoying hackiness b/c data actually isn't valid json
entry_indices = [m.start()+1 for m in re.finditer('}{', insta_data)]
# insert an entry for the first JSON
entry_indices.insert(0, 0)
for i in range(len(entry_indices) - 1):
# iterate through actual JSONs of instagram data
insta_text = insta_data[entry_indices[i]:entry_indices[i+1]]
insta_json = json.loads(insta_text)
#output.write(insta_text + "\n")
data = insta_json.get('data')[0]
loc = data.get('location')
latitude = loc.get('latitude')
longitude = loc.get('longitude')
time = data.get('created_time')
image = data.get('images').get('standard_resolution').get('url')
caption = "None"
if data.get('caption'):
caption = data.get('caption').get("text")
photo_id = data.get('id')
user = data.get('user').get('username')
line = user + "\t" + image + "\t" + photo_id + "\t" + str(time) + "\t" + caption + "\t" + str(latitude) + "\t" + str(longitude) + "\n"
output.write(line)
|
UTF-8
|
Python
| false | false | 2,012 |
9,981,504,009,997 |
551b12a02ca86be8abe4321c32a6600420f98731
|
98b2a0ad3a5d64f5932d2f46bf48b3783f3d49af
|
/pycraft/models/server.py
|
dda907299e0ef8ab858e52e6718ee3a00cfad018
|
[] |
no_license
|
iit-acm/PyCraft
|
https://github.com/iit-acm/PyCraft
|
123840e9f2ef31948159ca8f12fbead533325200
|
fe305a6dd987585b84da59cfa793db7949263e98
|
refs/heads/master
| 2016-09-08T01:12:33.877608 | 2011-11-11T06:07:39 | 2011-11-11T06:07:39 | 2,406,037 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from persistent import Persistent, PersistentList
from threading import RLock
from subprocess import Popen, PIPE
"""
"""
class Server(Persistent):
def __init__(self, name, path):
self.__name__ = name
self.__acl__ = None
self.last_save = None
self.last_bkup = None
self.path = None
self.cmd_args = None
self.userlist = PersistentList()
#connection to the minecraft shell
#self._v_shell = None
#lock to allow our multithreaded app
#to block on multiple requests
#self.lock = RLock()
self.make_server(path)
#VOLITILE ATTRIBUTES
def lock(self):
if hasattr(self, '_v_lock'):
return self._v_lock
lock = RLock(False)
self._v_lock=lock
return lock
def proc(self):
if hasattr(self, '_v_proc'):
return self._v_proc
def connected_userlist(self):
if hasattr(self, '_v_conn_userlist'):
return self._v_conn_userlist
#END VOLITILE ATTRIBUTES
#LOCK METHODS
def acquire_lock(self, t):
if not t:
return self.lock().acquire()
#convert t to a time
now = time()
end = now + t
while(not self.lock().acquire() and now<end):
sleep(.3)
now = time()
return self.lock().acquire()
def release_lock(self):
self.lock().release()
#END LOCK METHODS
def make_server(self, path):
if self.acquire_lock():
try:
self.path = path
finally:
self.release_lock()
def start(self):
if self.acquire_lock():
try:
if not self.proc() and not self.world_locked():
self._v_proc = Popen(self.cmd_args, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=self.path)
#wait for server to start
#start a timer tick function that consumes PIPEs (very bad if not done)
else:
pass #return error
finally:
self.release_lock()
def stop(self):
if self.acquire_lock():
try:
if self.proc():
proc.communicate('say SERVER SHUTTING DOWN')
self.save()
#check for save complete
proc.communicate('stop')
self.release_lock()
def force_stop(self):
#force release lock/stop server
if (proc = self.proc()):
proc.kill
def save(self):
if self.acquire_lock():
try:
if (proc = self.proc()):
proc.communicate('save-on')
proc.communicate('save-all')
#wait for save to complete
proc.communicate('save-off')
finally:
self.release_lock()
def make_map(self):
if self.acquire_lock():
try:
pass
finally:
self.release_lock()
def backup(self):
if self.lock().acquire():
try:
pass
finally:
self.lock().release()
def tick(self):
if self.lock().acquire():
try:
#consume process pipes, check for errors
#use pipe data to fill userlist and log actions
pass
finally:
self.lock().release()
|
UTF-8
|
Python
| false | false | 2,011 |
8,778,913,175,645 |
b3039dbfda2ed275819d42ced12fb5afc95fa2e7
|
30f65b932815cd8cde7196a5890f469ff7c319ec
|
/ebetl/lib/etl/articolidat.py
|
7c9f96b10c881a7bd7248d084a54735ec7e83d3d
|
[
"Artistic-2.0"
] |
permissive
|
nomed/ebetl
|
https://github.com/nomed/ebetl
|
88a15f246a48e1063ddf0987750cc76ee98dfbce
|
1a7fa5d49b63445ca21206f1d46808af2452784c
|
refs/heads/master
| 2023-01-24T03:16:42.705991 | 2014-09-18T09:31:54 | 2014-09-18T09:31:54 | 12,807,236 | 0 | 0 |
Artistic-2.0
| false | 2023-01-08T08:26:56 | 2013-09-13T10:40:57 | 2014-09-04T15:53:11 | 2023-01-08T08:26:49 | 1,136 | 0 | 0 | 8 |
Python
| false | false |
# -*- coding: utf-8 -*-
import sys, os, traceback, optparse
import time
import re
import csv
#from pexpect import run, spawn
"""
GruppiPosST
codici Gruppi Pos separati da pipe es ;GruppiPosST=|1|3|;
FidelityST
codici Gruppi Fidelity separati da pipe es ;FidelityST=|0|1|3|;
|0| rappresenta i senza fidelity
CodicePromo
CodicePromozione di raggruppamento, alfanumerico di 20 car. es ;CodicePromo=P478;
QtaPezziMin
Pezzi minimi di acquisto es ;QtaPezziMin=3;
ScontoVal (2 decimali)
sconto 1,50 es ;ScontoVal=150;
ScontoPer (2 decimali)
QtaPaghi
utile per mxn, 3x2 es ;QtaPaghi=2;
Punti
Punti extra erogati dalla promo es ;Punti=1;
Le promozioni saranno agganciate a promozioni già esistenti in base al CodicePromo
se non c'è un codice promo di raggruppamento, verifica se è già presente una Testata promozione non in corso con Date e TipoPromo uguali
Per i dati non valorizzati saranno presi quelli configurati di default sul “tipo promozione”
se servono altri parametri da gestire basta farmelo sapere
esempio di tracciato
T01SCOPER 2006073120060813Offerta Sconto 15% ScontoPer=15;CodicePromo=2006073115;
RP9000019180015 01
T01TGLPRZ 2006073120060813Offerta Taglio Prezzo
RP9000019400014 01000229
RP9000019410013 01000229
RP9000019450019 01000099
"""
from babel.numbers import format_currency
def _ensure_unicode(s):
if isinstance(s, unicode):
return s
else:
return unicode(s,'utf-8','replace')
record = [
( 0, 13, 'ean'),
( 13, 33, 'codice_interno'),
( 33, 34, 'fisso01'),
( 34, 74, 'descrizione'),
( 74, 75, 'fisso02'),
( 75, 79, 'codice_reparto'),
( 79, 81, 'codice_iva'),
( 81, 85, 'valore_iva'), #x100
( 85, 87, 'udm'),
( 87, 94, 'qty_contenuto'), #x1000
( 94, 96, 'udm_visualizzazione'),
( 96, 103, 'qty_conf'), #x1000
(103, 110, 'qty_collo'), #x1000 (imballo)
(110, 111, 'fisso03'),
(111, 117, 'prezzo'), #x100
(117, 123, 'prezzo_2'),
(123, 129, 'prezzo_3'),
(129, 135, 'prezzo_4'),
(135, 143, 'data_variazione'), #AAAAMMGG
(143, 153, 'codice_cat1'),
(153, 163, 'codice_cat2'),
(163, 164, 'flag'),
(164, 171, 'uso_futuro'),
(171, 172, 'fisso04'),
(172, 180, 'costo'), #x1000 (netto iva)
(180, 198, 'uso_futuro2'),
(198, 203, 'codice_plu'),
(203, 208, 'codice_bil'),
]
record_dict = {}
for r in record:
record_dict[r[2]]=r[1]-r[0]
# ;GruppiPosST=|3|
# ;FidelityST
# ;QtaPezziMin
# ;ScontoVal
# ;ScontoPer
# ;QtaPaghi
# ;Punti
record_offerte_t = [
(0,1,'testata'),
(1,2, 'set_molteplicita'),
(2,23, 'codicepromozione'),
(23,31, 'data_inizio'),
(31,39, 'data_fine'),
(39,89, 'descrizione'),
]
record_offerte_r = [
(0,1,'testata'),
(1,2, 'tipo_prodotto'), # E-P-R-T-A)
(2,22, 'codice'),#
(23,24, 'set_molteplicita'),
(24,30, 'valore'),# valore taglio prezzo x100
(30,38, 'descrizione'),# x10000
]
"""
class Ind(object):
ean_file_name = 'LEG_EAN.csv'
plu_file_name = 'ART_ANAG.csv'
def __init__(self, path_input=None, *args, **kw):
self.path_input = os.path.join(path_input, 'anag', 'ind')
def _get_plu_file_path(self):
return os.path.join(self.path_input, plu_file_name)
plu_file_path = property(_get_plu_file_path)
def _get_ean_file_path(self):
return os.path.join(self.path_input, ean_file_name)
ean_file_path = property(_get_ean_file_path)
def get_div100(self, s):
return round(float(s)/100,2)
def get_int(self,s):
return round(int(s),2)
def _get_ean(self):
ean = property(_get_ean, _set_ean)
def export(self, *args, **kw):
pass
"""
class ToDat(object):
ean_file_name = 'LEG_EAN.csv'
plu_file_name = 'TEan.csv'
prefix = '_todat'
def __init__(self, path_input=None, *args, **kw):
self.path_input = os.path.join(path_input, 'input', self.prefix)
def _get_plu_file_path(self):
return os.path.join(self.path_input, self.plu_file_name)
plu_file_path = property(_get_plu_file_path)
def _out_plu_file_path(self):
return os.path.join(self.path_input, 'Articoli.dat')
out_file_path = property(_out_plu_file_path)
def _get_ean_file_path(self):
return os.path.join(self.path_input, self.ean_file_name)
ean_file_path = property(_get_ean_file_path)
def get_div100(self, s):
return round(float(s)/100,2)
def get_int(self,s):
return round(int(s),2)
def read(self):
spamreader = csv.reader(open(self.plu_file_path), delimiter=';', quotechar='"')
ret = [r for r in spamreader]
return ret
def convert_row(self, row):
pass
def write(self):
pass
class Gamba(ToDat):
ean_file_name = 'LEG_EAN.csv'
plu_file_name = 'TEan.csv'
prefix = 'gamba'
codice_interno = 1
ean = 4
descrizione = 3
prezzo = 15
codice_iva = 6
codice_reparto = 5
def convert_item(self, key, val):
if key == 'prezzo':
val = int(float(val)*100)
return str(val)
def convert_row(self, row):
ret = {}
for key, rec in record_dict.iteritems():
ch = ' '*rec
if key in ['fisso01', 'fisso02']:
ch = '-'
elif key in ['fisso03','fisso04']:
ch = '*'
if hasattr(self, key):
ch = self.convert_item(key, row[getattr(self, key)-1])
if len(ch) < rec:
ch = ' '*(rec-len(ch))+ch
ret[key]=ch[0:rec]
return ret
def write(self):
lines = self.read()
f = open(self.out_file_path,'ab')
for line in lines[1:]:
row = self.convert_row(line)
art = ''
for i in record:
#print art
art = art + row.get(i[2])
#print i , [row.get(i[2])]
print >> f, art
f.close()
def main ():
global options, args
# TODO: Do something more interesting here...
print 'Hello world!'
if __name__ == '__main__':
try:
start_time = time.time()
parser = optparse.OptionParser(formatter=optparse.TitledHelpFormatter(), usage=globals()['__doc__'], version='$Id$')
parser.add_option ('-v', '--verbose', action='store_true', default=False, help='verbose output')
parser.add_option ('-g', '--gamba', action='store_true', default=False, help='gamba')
(options, args) = parser.parse_args()
print args
if len(args) < 1:
parser.error ('missing argument')
source = args[0]
if options.verbose: print time.asctime()
main()
if options.gamba:
obj = Gamba(source)
print obj.write()
if options.verbose: print time.asctime()
if options.verbose: print 'TOTAL TIME IN MINUTES:',
if options.verbose: print (time.time() - start_time) / 60.0
sys.exit(0)
except KeyboardInterrupt, e: # Ctrl-C
raise e
except SystemExit, e: # sys.exit()
raise e
except Exception, e:
print 'ERROR, UNEXPECTED EXCEPTION'
print str(e)
traceback.print_exc()
os._exit(1)
"""
import csv
tracciato= csv.reader(open('fatmicro.csv'), delimiter=';', quoting=csv.QUOTE_ALL)
for i in tracciato:
print "(%s,%s,'%s'),"%(int(i[3])-1,int(i[4]),i[0] )
"""
|
UTF-8
|
Python
| false | false | 2,014 |
7,421,703,507,939 |
fe304b20debc9cbcb79dec1a86bfab11e7580e6c
|
a9131e1de4ebe106b64c65414be52a7d5bb7a118
|
/api/tests/test_book.py
|
820503ba2adcce7839d7e3e3f1cab5983b5e9c20
|
[] |
no_license
|
gdut-library/mecury
|
https://github.com/gdut-library/mecury
|
5d4e727a8bf4f99f17034d29dd17126b03846833
|
6047a09626f3ae4a2d2eff3ca9d8c09fff097763
|
refs/heads/master
| 2016-09-06T16:25:57.306051 | 2013-10-17T13:35:36 | 2013-10-17T13:35:36 | 12,855,961 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#coding: utf-8
import unittest
from api.book import Book, LibraryNotFoundError
class BookTestCase(unittest.TestCase):
def setUp(self):
self.book = Book()
def testGet(self):
book_info = self.book.get(574811)
self.assertEqual(book_info['name'], u'Computer systems[monograph]'
u'=深入理解计算机系统 :a programmer\'s'
u' perspective :Second edition')
self.assertEqual(book_info['isbn'], '9787111326311')
self.assertEqual(book_info['ctrlno'], '574811')
self.assertEqual(len(book_info['locations']), 2)
self.assertRaises(LibraryNotFoundError, self.book.get, 'not_a_book')
def testSearch(self):
books_info = self.book.search(u'计算机', verbose=True, limit=13)
self.assertEqual(len(books_info), 13)
self.assertRaises(LibraryNotFoundError, self.book.search,
'gdut library wrapper')
def testSearchUnicode(self):
self.assertRaises(LibraryNotFoundError, self.book.search,
u'ノ・ゾ・キ・ア・ナ')
|
UTF-8
|
Python
| false | false | 2,013 |
6,811,818,168,996 |
6c6498d47a39e3a3dd78a69ccf09fdba001f31e8
|
7e1079b46b08bbe60a66e105c73bb9ab10397743
|
/src/ibm/teal/connector/tlmlxtraphandler.py
|
e4af5e2a86943340c3fbf80e7b0243d604ee7adb
|
[] |
no_license
|
ppjsand/pyteal
|
https://github.com/ppjsand/pyteal
|
f810697e59ecb393e3d7c3b9eb69b5150f7f7f70
|
eba6c1489b503fdcf040a126942643b355867bcd
|
refs/heads/master
| 2020-05-17T22:44:18.135207 | 2012-08-01T14:38:56 | 2012-08-05T02:02:56 | 4,961,237 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
import time
import os
import socket
import re
import commands
from ibm.teal import teal
from ibm.teal import registry
from ibm.teal import event
from ibm.teal.database import db_interface
from ibm.teal.monitor import teal_semaphore
now_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
MLX_TEAL_COLS = (event.EVENT_ATTR_EVENT_ID,
event.EVENT_ATTR_TIME_OCCURRED,
event.EVENT_ATTR_SRC_COMP,
event.EVENT_ATTR_SRC_LOC_TYPE,
event.EVENT_ATTR_SRC_LOC,
event.EVENT_ATTR_RPT_COMP,
event.EVENT_ATTR_RPT_LOC_TYPE,
event.EVENT_ATTR_RPT_LOC,
event.EVENT_ATTR_RAW_DATA,
event.EVENT_ATTR_RAW_DATA_FMT
)
MLX_EVENTS_IDS = {"asicChipDown":"MX000001",
"asicOverTempReset":"MX000002",
"asicOverTemp":"MX000003",
"lowPower":"MX000004",
"bxAsicChipDown":"MX010001",
"bxAsicOverTempReset":"MX010002",
"bxAsicOverTemp":"MX010003",
"ibSMup":"MX020001",
"ibSMdown":"MX020002",
"ibSMrestart":"MX020003",
"internalBusError":"MX030001",
"procCrash":"MX030002",
"cpuUtilHigh":"MX030003",
"procUnexpectedExit":"MX030004",
"unexpectedShutdown":"MX030005",
"diskSpaceLow":"MX030006",
"systemHealthStatus":"MX030007",
"lowPowerRecover":"MX030008",
"insufficientFans":"MX030009",
"insufficientFansRecover":"MX030010",
"mlxIBCAHealthStatusChange":"MX040001",
"mlxIBCAInsertion":"MX040002",
"mlxIBCARemoval":"MX040003",
"mlxIBSwitchInsertion":"MX040004",
"mlxIBSwitchRemoval":"MX040005",
"mlxIBRouterInsertion":"MX040006",
"mlxIBRouterRemoval":"MX040007",
"mlxIBPortStateChange":"MX040008",
"mlxIBPortPhysicalStateChange":"MX040009",
"mlxIBPortInsertion":"MX040010",
"mlxIBPortRemoval":"MX040011"
}
mlx_eventid = lambda x: MLX_EVENTS_IDS.get(x)
# XCAT Table definitions
XCAT_NODE = 'nodelist'
XCAT_NODE_KEY = 'groups'
XCAT_NODE_VALUE = "'%ufm%'"
XCAT_PWD = 'passwd'
XCAT_PWD_KEY = 'key'
def translate_trap(ip,trap_oid):
src_loc_type = 'D'
src_comp = 'IB'
rpt_comp = 'TEAL'
rpt_loc_type = 'A'
raw_data = ''
raw_data_fmt = 0
src_loc = ''
teal_event = None
rpt_loc = os.path.basename(sys.argv[0]) + '##' + str(os.getpid())
switch_name = socket.gethostbyaddr(ip)[0]
if not switch_name:
command = '/usr/bin/snmpget -v 2c -c public ' + ip + ' MELLANOX-MIB::nodeName.0'
registry.get_logger().debug('command to retrieve switch name: {0}'.format(command))
(status, output) = commands.getstatusoutput(command)
if status == 0 :
if re.match('.*STRING',output):
src_loc = output.rsplit('"',2)[1]
else:
src_loc = ip
else:
registry.get_logger().error('Retrieve switch name failed, use ip address instead.')
src_loc = ip
else:
src_loc = switch_name
command = '/usr/bin/snmptranslate -Td MELLANOX-MIB::' + trap_oid
registry.get_logger().debug('command to retrieve trap name: {0}'.format(command))
(status, output) = commands.getstatusoutput(command)
if status == 0 :
raw_data = output.split('"')[1].replace('\n ','')
else:
registry.get_logger().error('Retrieve trap description failed, use trap name instead.')
raw_data = trap_oid
time_occurred = now_time
event_id = mlx_eventid(trap_oid.split('.')[0])
if not event_id:
msg = "Unrecognized event!"
registry.get_logger().warn(msg)
event_id = "MX03FFFF"
teal_event = (event_id, time_occurred, src_comp, src_loc_type, src_loc, rpt_comp, rpt_loc_type, rpt_loc, raw_data, raw_data_fmt)
msg = "Common data:: event_id: %s, time_occurred: %s, src_comp: %s, src_loc_type: %s, src_loc: %s, rpt_comp: %s, rpt_loc_type: %s, rpt_loc: %s, raw_data: %s, raw_data_fmt: %d" % teal_event
registry.get_logger().debug(msg)
return teal_event
def log_traps(event):
event_logged = False
db = registry.get_service(registry.SERVICE_DB_INTERFACE)
cnxn = db.get_connection()
teal_cursor = cnxn.cursor()
try:
db.insert(teal_cursor, MLX_TEAL_COLS, db_interface.TABLE_EVENT_LOG, event)
cnxn.commit()
msg = "Logged an event: event_id: %s, time_occurred: %s, src_comp: %s, src_loc_type: %s, src_loc: %s, rpt_comp: %s, rpt_loc_type: %s, rpt_loc: %s, raw_data: %s, raw_data_fmt: %d" % teal_event
registry.get_logger().info(msg)
# Notify TEAL that events have been inserted
notifier = teal_semaphore.Semaphore()
if notifier:
notifier.post()
else:
registry.get_logger().warn('TEAL notifier not configured.')
except:
# Don't attempt to commit anything since we had an error processing the events
registry.get_logger().exception("Error processing new events")
cnxn.rollback()
cnxn.close()
##########MAIN##########
if __name__ == '__main__':
log_file = '$TEAL_LOG_DIR/tlmlxtraphandler.log'
try:
# Set up the TEAL environment to get at the data required for logging
t = teal.Teal(None,
data_only=True,
msgLevel='warn',
logFile=log_file,
daemon_mode=False)
switch_ip = ''
sys_up_time = ''
trap_oid = ''
trap_vars = {}
for line in sys.stdin.readlines():
# Filter out this redundant line firstly
if re.match('.*snmpTrapEnterprise',line):
continue
if re.match('.*ip=',line):
switch_ip = line.split('[')[1].split(']')[0]
continue
if re.match('.*snmpTrapOID',line):
trap_oid = line.split('::')[2].split('.')[0].strip('\n')
continue
if re.match('.*mlx',line):
vars = line.split('=')
value = vars[1].strip('\n')
key = vars[0].split('::')[1].split('.')[0]
trap_vars[key] = value
continue
msg = "trap received: switch_ip = {0}, trap_oid = {1},vars = {2}".format(switch_ip,trap_oid,trap_vars)
registry.get_logger().debug(msg)
teal_event = translate_trap(switch_ip,trap_oid)
log_traps(teal_event)
except:
registry.get_logger().exception("Teal Mellanox switch trap handler failed")
sys.exit(1)
|
UTF-8
|
Python
| false | false | 2,012 |
5,360,119,187,755 |
7311133d230e63e4cdcd1602929fab8d1c659bd3
|
87725926135928ed0a7c9fef351f9282d1ac7cbe
|
/app.py
|
eb130faf48eb9e7539248c6b5783b8d760efd7a4
|
[
"MIT"
] |
permissive
|
mr-z-ro/salvavida
|
https://github.com/mr-z-ro/salvavida
|
de9a97a7c918f1e65badb586c44d7413a2336030
|
84c222db72501d90710d30e61f91c18cb5a75973
|
refs/heads/master
| 2021-05-28T06:10:35.986310 | 2013-11-16T02:15:53 | 2013-11-16T02:15:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
from datetime import datetime
from flask import Flask, jsonify, Markup, render_template, request
from pygeocoder import Geocoder
from salvavida.database import db_session
from salvavida.svmodels import Feed
app = Flask(__name__)
@app.route("/")
def index():
try:
feeds = [i.serialize for i in Feed.query.filter(
Feed.state=='open').all()]
finally:
db_session.remove()
return render_template('map.html', feeds=Markup(feeds))
@app.route("/sos", methods=["POST"])
def sos():
data = json.loads(request.data)
name = data.get('name').upper()
lat = data.get('lat')
lng = data.get('lng')
description = data.get('description')
feed = Feed.query.filter(Feed.name==name, Feed.lat==lat, Feed.lng==lng,
Feed.state=='open').first()
result = None
if not feed:
try:
address = Geocoder.reverse_geocode(float(lat), float(lng))[0]
new_feed = Feed(name=name, lat=lat, lng=lng, address=address,
description=description)
db_session.add(new_feed)
db_session.commit()
result = {
'id': new_feed.id,
'lat': new_feed.lat,
'lng': new_feed.lng,
'createdAt':
new_feed.created_at.strftime("%Y-%m-%dT%H:%M:%SZ"),
'name': new_feed.name,
'description': new_feed.description,
'state': new_feed.state
}
except:
result = { 'error_msg': 'DB Error' }
finally:
db_session.remove()
else:
result = {
'error_msg': 'Entry exists.'
}
return jsonify(result)
@app.route("/rescue", methods=["POST"])
def rescue():
data = json.loads(request.data)
result = None
try:
id = int(data.get('id'))
feed = Feed.query.filter(Feed.id==id).first()
if feed:
feed.state = 'closed'
feed.last_modified = datetime.now()
db_session.merge(feed)
db_session.commit()
result = {
'id': feed.id,
'lat': feed.lat,
'lng': feed.lng,
'lastModified':
feed.last_modified.strftime("%Y-%m-%dT%H:%M:%SZ"),
'name': feed.name,
'description': feed.description,
'state': feed.state
}
else:
result = { 'error_msg': 'Entry does not exist.' }
except ValueError:
result = { 'error_msg': 'Invalid ID format %s' % (data.get('id')) }
except:
result = { 'error_msg': 'Internal error.' }
finally:
db_session.remove()
return jsonify(result)
@app.route("/cases", methods=["GET"])
def cases():
ts = datetime.strptime(request.args.get('since'), '%Y-%m-%dT%H:%M:%SZ')
state = request.args.get('state')
try:
if state == 'any':
feeds = Feed.query.filter(Feed.created_at>=ts).all()
else:
feeds = Feed.query.filter(Feed.state==state,
Feed.last_modified>=ts).all()
finally:
db_session.remove()
result = []
if feeds:
for feed in feeds:
result.append({
'id': feed.id,
'lat': feed.lat,
'lng': feed.lng,
'createdAt': feed.created_at.strftime("%Y-%m-%dT%H:%M:%SZ"),
'lastModified':
feed.last_modified.strftime("%Y-%m-%dT%H:%M:%SZ"),
'name': feed.name,
'description': feed.description,
'state': feed.state
})
response = {'results': result}
return jsonify(response)
if __name__ == '__main__':
app.debug = True
app.run()
|
UTF-8
|
Python
| false | false | 2,013 |
9,689,446,266,931 |
ccc62f85636bc913283848a733af75839661c53a
|
02a5bb95ededc67e491e5da4ba018d4219b2d8f2
|
/PythonServer/accounts/admin.py
|
4c1e7f5032ff5d867b84da0a3d866b684c426259
|
[] |
no_license
|
dovanduy/MeepPythonServer
|
https://github.com/dovanduy/MeepPythonServer
|
c43141ca9c0c7fa8eeade0a8cbfab7b262697879
|
ef268b28a0a0aec22850249b34bb1e2c4dfef04c
|
refs/heads/master
| 2020-05-05T06:10:25.522164 | 2014-11-25T05:28:32 | 2014-11-25T05:28:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from models import *
admin.site.register(Account)
admin.site.register(AccountLink)
admin.site.register(AccountSettings)
admin.site.register(AccountSetting)
admin.site.register(AccountDeviceID)
admin.site.register(FacebookProfile)
admin.site.register(VenmoProfile)
admin.site.register(Group)
admin.site.register(UserLocation)
|
UTF-8
|
Python
| false | false | 2,014 |
9,706,626,097,699 |
2a0ed9b011bfcf7d5bb74ec3c158ca2bcb5fd4d2
|
ad239372a355fb7c14b54babb0c5906495586594
|
/is tel/angle.py
|
9e2b9d7dcdb1d9f32a4f1f7944b5ffaacccadaef
|
[] |
no_license
|
masslow/testapp
|
https://github.com/masslow/testapp
|
4277c4c0acd81ec328f02e87677eb151f6f1f861
|
3e0b084203292db9250527964a4c4ae5002de521
|
refs/heads/master
| 2021-01-22T13:51:49.131974 | 2014-08-22T15:41:59 | 2014-08-22T15:41:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Moduliai reikalingi programai:
import math
import os
# Skaiciuoja krypti ir atstuma iki taikinio:
class Calculate(object):
def location(self, l, t):
return (l - t)
def angle(self, lx, ly, tx, ty):
if (lx - tx) > 0:
return ((math.acos( (ly-ty) / (((lx-tx)**2)+((ly-ty)**2))**0.5 )) * (3200 / math.pi))
elif (lx - tx) < 0:
return (3200 - ((math.acos( (ly - ty) / (((lx - tx)**2)+((ly - ty)**2))**0.5 )) * (3200 / math.pi)) + 3200)
else:
return ((math.acos( (ly-ty) / (((lx-tx)**2)+((ly-ty)**2))**0.5 )) * (3200 / math.pi))
def magnitude(self, difx, dify):
return 10*float(((difx**2)+(dify**2))**0.5)
# Tai klase nustatanti stebetoju buvimo vieta ir ju taikini:
class SeekersNest(object):
def __init__(self, x, y, vector, magnitude):
self.x = x
self.y = y
self.vector = vector
self.magnitude = magnitude
def tarx(self, x, y, vector, magnitude):
if vector == 6400 or vector == 3200:
return self.x
else:
return (magnitude * math.sin(vector * (math.pi / 3200)))
def tary(self, x, y, vector, magnitude):
if vector == 1600 or vector == 4800:
return self.y
else:
return (magnitude * math.cos(vector * (math.pi / 3200)))
def changevector(self):
self.vector = float(raw_input("Kryptis: "))
def changemagnitude(self):
self.magnitude = float(raw_input("Atstumas: "))
# Programa:
os.system('clear')
calc = Calculate()
print calc.angle(1,2,1,1)
|
UTF-8
|
Python
| false | false | 2,014 |
4,071,629,035,743 |
09bdc11c9513780617f3e43070e2ad8d59245da1
|
b3033661ad342809019e54a33cc56afddac613f7
|
/vrc6brat-conv/convert.py
|
99b1a7a7d59a55851d2d9bc7b5d1c07006dabd9e
|
[] |
no_license
|
mRB0/nintendont
|
https://github.com/mRB0/nintendont
|
ab075ee5962bb02ab696bd82aa99487148107b74
|
009e466a114930a19f8e1dfa183a55949e2bf88e
|
refs/heads/master
| 2020-04-27T09:20:22.376573 | 2012-07-12T00:24:32 | 2012-07-12T00:24:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
import pyIT
import logging
import collections
import math
import itertools
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)-7s %(message)s")
class ColState(object):
attrs = 'hz vol sample enable vibenabled vibspeed vibpos vibdepth trig'.split()
def __init__(self, *vals):
for attr, val in zip(ColState.attrs, vals):
setattr(self, attr, val)
def _replace(self, **kwargs):
for (attr, val) in kwargs.items():
if attr not in ColState.attrs:
raise ValueError("tried to set nonexistent attr %s", attr)
setattr(self, attr, val)
outfile_fmt='fancysong.%(extension)s'
def convert(inpath):
itfile = pyIT.ITfile()
itfile.open(inpath)
sample_map = {}
outraw = []
states = [ColState(1000, 0, 0, False, False, 0, 0, 0, False) for i in xrange(4)]
speed = itfile.IS
lastout = None
for (ptn_idx, order_num) in zip(itfile.Orders, itertools.count()):
if ptn_idx == 255:
break
if ptn_idx == 254:
continue
ptn = itfile.Patterns[ptn_idx]
for (row, row_num) in zip(ptn.Rows, itertools.count()):
logging.info(' | '.join([str(note) for note in row[:4]]))
for col in xrange(4):
note = row[col]
state = states[col]
if note.Note >= 253:
state.trig = True
state.enable = False
state.vibenabled = False
if note.Note is not None and note.Note <= 119:
if col < 3:
state.hz = pow(2, float(note.Note - 57) / 12) * 220
else:
c5speed = itfile.Samples[note.Instrument - 1].C5Speed
state.hz = pow(2, float(note.Note - 60) / 12) * c5speed
state.vol = 15 if col < 3 else 6
state.enable = True
state.trig = True
if note.Volume is not None and note.Volume <= 64:
if col < 3:
state.vol = min(note.Volume / 4, 15)
else:
state.vol = min(int(note.Volume / 9.14), 6)
if note.Instrument is not None:
if (col >= 3 or
(col < 3 and note.Instrument >= 8 and note.Instrument <= 15)):
state.sample = note.Instrument
# process commands
if note.Effect == 1 and note.EffectArg:
speed = note.EffectArg
if note.Effect == 8:
vibdepth = (0xff & note.EffectArg) & 7
vibspeed = (0xff & note.EffectArg) >> 4
if vibdepth:
state._replace(vibdepth=vibdepth)
if vibspeed:
state._replace(vibspeed=vibspeed)
if not state.vibenabled:
state._replace(vibenabled=True)
state._replace(vibpos=0)
else:
state._replace(vibenabled=False)
# output states for each tick
for tick in xrange(speed):
thisout = []
for col in xrange(4):
state = states[col]
hz = float(state.hz)
if state.vibenabled:
hz = int(hz / pow(2, state.vibdepth * math.sin(state.vibpos * (2 * math.pi) / 256) / (12 * 6)))
state._replace(vibpos=state.vibpos + state.vibspeed * 3)
if col < 2:
per = int(2048000.0 / ((hz + 1) * 16))
smp = ((state.sample - 8) << 4)
elif col < 3:
per = int(2048000.0 / ((hz + 1) * 14))
smp = 0
if col < 3:
thisout.append(smp | state.vol)
thisout.append(per & 0xff)
thisout.append((per >> 8) | ((1 if state.enable else 0) << 7))
elif col == 3:
trig = False
if state.trig:
if state.enable:
trig = True
if state.sample in sample_map:
smp_num = sample_map[state.sample]
else:
smp_num = len(sample_map.values())
sample_map[state.sample] = smp_num
thisout.append(smp_num)
else:
thisout.append(0xfe)
state.trig = False
else:
thisout.append(0xff)
# calculate optimal interrupt period and sample stride
min_intr_period = 0x600 # ticks
cpu_speed = 16000000 # Hz
for stride in itertools.count(1): # play every (stride)-th sample from data
intr_period = int(float(cpu_speed) / (hz / stride))
if intr_period >= min_intr_period:
break
if trig:
logging.info("%d:%d sample=%d period=%d stride=%d vol=%d", order_num, row_num, smp_num, intr_period, stride, state.vol)
thisout.append(intr_period)
thisout.append(stride)
thisout.append(state.vol)
sizes = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1]
if not lastout:
# first frame, output everything
writes = []
for (i, size) in zip(xrange(len(thisout)), sizes):
for b in xrange(size):
byte = 0xff & (thisout[i] >> ((size - b - 1) * 8))
writes.append(byte)
outraw.append([0xff, 0xf8] + writes)
else:
diff = 0
writes = []
for i in xrange(len(thisout)):
if thisout[i] != lastout[i]:
diff |= 1 << (15 - i)
writes.append((diff >> 8) & 0xff)
writes.append(diff & 0xff)
for (i, size) in zip(xrange(len(thisout)), sizes):
if diff & (1 << (15 - i)):
for b in xrange(size):
byte = 0xff & (thisout[i] >> ((size - b - 1) * 8))
writes.append(byte)
outraw.append(writes)
lastout = thisout
cfile = open(outfile_fmt %{'extension': 'cpp'}, 'w')
cfile.write('#include "fancysong.h"\n')
cfile.write('#include <stdint.h>\n')
cfile.write('#include <avr/pgmspace.h>\n')
cfile.write('\n')
fancysong_data = ',\n'.join(['\t' + ', '.join(['0x%02X' %(0xff & i) for i in e]) for e in outraw])
cfile.write('prog_uint8_t fancysong[] PROGMEM = {\n%s\n};\n\n' %(fancysong_data,))
cfile.write('prog_uint32_t fancysong_len = %d;\n\n' %(sum(map(len, outraw)),))
if sample_map:
i = 0
sample_info = {}
for (it_smp, c_smp) in sorted(sample_map.iteritems(), key=lambda x: x[1]):
itSample = itfile.Samples[it_smp-1]
data = itSample.SampleData
altered_data = ''.join([chr((0xff & (128 + ord(b))) >> 2) for b in data])
while '!!!' in altered_data: # work around arduino bootloader bug
altered_data = altered_data.replace('!!!', '!"!')
cdata = 'static prog_uint8_t sample_%d[] PROGMEM = { %s };\n' %(c_smp, ', '.join(['0x%02X' %(0xff & ord(s)) for s in altered_data]),)
cfile.write(cdata)
sample_info[c_smp] = '{ %s, %d, %d, %d, %d }' %('sample_%d' %(c_smp,),
len(altered_data),
1 if itSample.IsLooped else 0,
itSample.LoopBegin,
itSample.LoopEnd)
i += 1
sample_info_list = ',\n'.join([' ' + sample_info[i]
for i
in sorted(sample_info.keys())])
cfile.write('struct sample_info samples[] = {\n%s\n};\n' %sample_info_list)
cfile.close()
hfile = open(outfile_fmt %{'extension': 'h'}, 'w')
hfile.write('''\
#ifndef FANCYSONG_H_
#define FANCYSONG_H_
#include <stdint.h>
#include <avr/pgmspace.h>
struct sample_info {
prog_uint8_t *p_smp;
uint16_t len;
uint8_t loop_en;
uint16_t loop_start;
uint16_t loop_end;
};
extern prog_uint8_t fancysong[] PROGMEM;
extern prog_uint32_t fancysong_len;
extern struct sample_info samples[];
#endif
''')
hfile.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
print "need arg"
else:
convert(sys.argv[1])
|
UTF-8
|
Python
| false | false | 2,012 |
9,706,626,125,603 |
ae4c1c2c59c929136c7ee1fc8bcc631c44e5b7a5
|
0a688dca709cb80a4e096a7920a1748614a1e047
|
/src/Gdv/SConscript
|
6923582d1ac22a64810a9fa473a20b70f1d1448d
|
[
"MIT"
] |
permissive
|
mdk/diva
|
https://github.com/mdk/diva
|
63b6906ccbefcf4cee93816036f31a3eb2a40bb2
|
f58841d8b539d925a95d78d33427b61a2665ff33
|
refs/heads/master
| 2021-01-01T20:16:23.848457 | 2009-05-15T14:27:14 | 2009-06-30T07:26:21 | 126,696 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os;
Import ('CEnv');
Import ('monoEnv');
Import ('shellEnv');
# C ############################################################################
localEnvC = CEnv.Clone ();
localEnvC.Append (CPPPATH='.',
CCFLAGS = Split ('-DG_LOG_DOMAIN=\\\"Gdv\\\"'),
LIBS = Split ('gstinterfaces-0.10'));
sourcesC = Split ('GdvUtil.c \
GdvMain.c \
GdvFrameDimensions.c \
GdvColor.c \
GdvGlue.c \
GdvTime.c \
GdvTimeSpan.c \
GdvSource.c \
GdvErrors.c \
GdvInspector.c \
GdvDecodeBin.c \
GdvVideoFormat.c \
GdvClip.c \
GdvTimeableInterface.c \
GdvMediaItem.c \
GdvAudioItem.c \
GdvAVItem.c \
GdvItemGenerator.c \
GdvFrameRunner.c \
GdvToken.c \
GdvJob.c \
GdvClipGate.c \
GdvSheep.c \
GdvSheepSource.c \
GdvImageSheep.c \
GdvBlackSheep.c \
GdvSilentSheep.c \
GdvFrameRunnerJobData.c \
GdvJobProcessor.c \
GdvJobQueue.c \
GdvFrameableInterface.c \
GdvThumbnailableInterface.c \
GdvAudioFormat.c \
GdvPixbufSink.c \
GdvExporter.c \
GdvPipeline.c \
GdvTwinCompSink.c \
GdvTwinViewBin.c \
GdvSingleViewBin.c \
GdvPhotoItem.c \
GdvBufferUtil.c \
GdvCorrector.c \
GdvCompSink.c \
GdvAudioCompSink.c \
GdvVideoCompSink.c \
GdvVideoBoy.c \
GdvAudioBoy.c \
GdvClipStore.c \
GdvTrack.c \
GdvSourcePad.c \
GdvVideoBuffer.c \
GdvAudioBuffer.c \
GdvFraction.c \
GdvProjectFormat.c \
GdvFileBasedInterface.c \
GdvIdleTower.c');
gdv = localEnvC.SharedLibrary ('libgdv', sourcesC);
# MONO #########################################################################
assemblyInfo = shellEnv.Subst ('AssemblyInfo.cs', '#common/AssemblyInfo.cs.in')
sourcesMono = Split ('AssemblyInfo.cs \
Gdv.FrameDimensions.cs \
Gdv.Application.cs \
Gdv.Time.cs \
Gdv.TimeSpan.cs \
Gdv.Source.cs \
Gdv.DecodeBin.cs \
Gdv.Exceptions.cs \
Gdv.VideoFormat.cs \
Gdv.AudioFormat.cs \
Gdv.Inspector.cs \
Gdv.MediaItem.cs \
Gdv.AVItem.cs \
Gdv.AudioItem.cs \
Gdv.PhotoItem.cs \
Gdv.ITimeable.cs \
Gdv.IThumbnailable.cs \
Gdv.ItemGenerator.cs \
Gdv.Color.cs \
Gdv.Clip.cs \
Gdv.Sheep.cs \
Gdv.Fraction.cs \
Gdv.ProjectFormat.cs \
Gdv.Pipeline.cs \
Gdv.Glue.cs \
Gdv.Track.cs \
Gdv.ImageSheep.cs \
Gdv.IFileBased.cs \
Gdv.Enums.cs \
Gdv.Args.cs \
Gdv.Exporter.cs \
Gdv.SheepSource.cs \
Gdv.ClipStore.cs \
Gdv.Handlers.cs');
localEnvMono = monoEnv.Clone (PKG = Split ('gtk-sharp-2.0'));
gdvdll = localEnvMono.Dll ('gdv.dll', sourcesMono);
localEnvMono.Depends (gdvdll, gdv);
localEnvMono.Depends (gdvdll, assemblyInfo);
# INSTALL ######################################################################
Alias ("install", localEnvC.Install (os.path.join (localEnvC ['DESTDIR'],
localEnvC ['LIBDIR'],
'diva'), gdv));
Alias ("install", localEnvMono.Install (os.path.join (localEnvMono ['DESTDIR'],
localEnvMono ['LIBDIR'],
'diva'), gdvdll));
|
UTF-8
|
Python
| false | false | 2,009 |
17,678,085,413,914 |
322e3a880ea81396a54b3529d23c134fa9ae8f41
|
69c138fe30c7e41a1a640281b78aed75eb545ffd
|
/myshop/myshop/app.py
|
de1a181a588223a6d10222148b293d9d51ed2cbd
|
[] |
no_license
|
orenm/myshop
|
https://github.com/orenm/myshop
|
3b8e7249ca16a49f59b235807ee47aec6a0cc17e
|
f8bd5121bdce3d4f5dae02185a1270a5aab78412
|
refs/heads/master
| 2020-04-14T21:37:58.464449 | 2013-08-12T00:09:52 | 2013-08-12T00:09:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from myshop.apps.promotions.app import myPromotionsApp
# override the original oscar shop with my own.
# override the promotions_app in the new class.
from oscar.app import Shop
class BaseApplication( Shop ):
promotions_app = myPromotionsApp
application = BaseApplication()
|
UTF-8
|
Python
| false | false | 2,013 |
6,038,724,022,699 |
a2ce70d4aa643d6ec86bd74a5a82c698e5f8b7bc
|
d9a3e85d06767dcc307b56dc67d174c14b514903
|
/mainpage/wlansi/donations/models.py
|
b9160275929ced4ec1f4ee9b4a2c52564c4ecf81
|
[] |
no_license
|
matevzmihalic/mainpage
|
https://github.com/matevzmihalic/mainpage
|
00f28b030c0ba8c0ae03ce4dfb1f98cbb4017e76
|
c0646a32cb3333e3af27b22c3df9681e29424815
|
refs/heads/master
| 2016-11-04T09:11:12.036948 | 2012-06-16T12:20:55 | 2012-06-16T12:20:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import decimal
from django.core import validators
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Donation(models.Model):
date = models.DateField(help_text=_("Date of donation on the account."))
amount = models.DecimalField(max_digits=12, decimal_places=2, validators=[validators.MinValueValidator(decimal.Decimal('0.01'))], help_text=_("In EUR, without any processing costs, final amount on the account."))
donor = models.CharField(max_length=255, blank=True, help_text=_("Leave blank if anonymous."))
message = models.TextField(blank=True)
internal_comment = models.TextField(blank=True, help_text=_("Internal comment, like donation source, circumstances, etc."))
class Meta:
verbose_name = _("donation")
verbose_name_plural = _("donations")
ordering = ('-date',)
app_label = 'wlansi'
def is_anonymous(self):
return not bool(self.donor)
is_anonymous.boolean = True
def __unicode__(self):
return unicode(_(u"%(amount)s on %(date)s" % {'amount': self.amount, 'date': self.date}))
|
UTF-8
|
Python
| false | false | 2,012 |
18,107,582,136,401 |
631a7bfa09155999c73c8932e5ec55fdf7044a2f
|
b930830154391bb8aad245ef2cbd03d58e7907b4
|
/web/com/send_queries.py
|
bb378bf62c3762d4e74927ab3731df82db71a0e3
|
[] |
no_license
|
gmacleod/lifetracker
|
https://github.com/gmacleod/lifetracker
|
8c5d513879f66bca2515a9bb5db65f506f8ef535
|
e4952b691910b52faba1f98bff7f077d3cec5de9
|
refs/heads/master
| 2016-09-06T17:18:40.583620 | 2011-12-09T22:19:36 | 2011-12-09T22:48:45 | 1,708,073 | 3 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.api import mail
from datetime import datetime
from model import User, Query, DataPoint, ActionLog
from lthandler import LTHandler
from sms import send_sms
from utils.lt_time import is_daytime
import logging
def send_by_email(query):
# get the user
user = query.user
subject = query.name
to = user.email
sender = 'Lifetracker <[email protected]>'
# construct the message
body = query.text
params = {
'query_id': str(query.key()),
'query_text': query.text,
}
body = open('ui/html/email_form.html').read() % params
# send the message
message = mail.EmailMessage(
sender = sender,
subject = subject,
to = to,
html = body)
message.send()
ActionLog.log('SentEmail')
def send_query_by_sms(query):
text = query.text + ' Please reply "' + query.name + ': value"'
send_sms(query.user.phone, text)
ActionLog.log('SentSMS')
def send_query(query):
if query.user.query_medium == 'sms':
send_query_by_sms(query)
else:
send_by_email(query)
logging.info("Sent Query: " + query.name + " for user " + query.user.email)
query.lastSentAt = datetime.now() # refresh the query
query.put() # commit it
ActionLog.log('SentQuery')
# it will be a problem if this takes a long time
class SendQueriesHandler(LTHandler):
def get(self):
start = datetime.now().strftime('%s')
users = User.all().fetch(1000)
for user in users:
queries = Query.get_by_user(user)
for query in queries:
if query.is_stale() and query.is_time_to_send():
send_query(query)
break # only send one query per user every interval
end = datetime.now().strftime('%s')
logging.info('SendQueries started at ' + start)
logging.info('SendQueries finished at ' + end)
|
UTF-8
|
Python
| false | false | 2,011 |
11,201,274,750,085 |
21e59351dcb3985fb0fd604e30f96c2edc75c493
|
58323a2526cd8f7007b5c6f56bd0a8d47b312179
|
/src/py/stocktools/application.py
|
fbbe5cbc38820f4bf53a5867bd463c1e33b619ae
|
[] |
no_license
|
jagguli/stocktools
|
https://github.com/jagguli/stocktools
|
aae7cfbcd85340710aed2ba1b57a5f6e456f0ad1
|
cfb9be7e94907bb5e898639bff1c399cb415457e
|
refs/heads/master
| 2020-06-06T15:36:21.979235 | 2013-04-28T09:27:19 | 2013-04-28T09:27:19 | 9,727,976 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from debug import debug
import os
import sys
import time
import logging
import tornado.httpserver
import tornado.web
from tornado.options import options, define
from stormbase.options import configure
from stormbase.base_handler import get_static_handlers
from stormbase.session import SessionManager
from stormbase.database.couchdb import CouchDBAdapter
from stormbase.cache import MemcachedAdapter
from tornadotools.mongrel2.handler import Mongrel2Handler
from zmq.eventloop.minitornado.ioloop import IOLoop
#from zmq.eventloop.minitornado.ioloop import install as install_ioloop
from tornadotools.route import Route
from corduroy import Database
from handlers.home import HomeHandler
from handlers.portfolio import PortfolioHandler
import zerorpc
from threading import Thread
class StockApp(tornado.web.Application):
def __init__(self, couchdb):
self.db = couchdb
handlers = Route.routes()
handlers.extend(get_static_handlers())
logging.info( "Added %s %s", len(handlers), " handlers.")
logging.debug( "Hndlers: \n%s ", "\n".join([str(h) for h in handlers]))
settings = dict(
#static_path=os.path.join(os.path.dirname(__file__), '../static'),
template_path=os.path.join(os.path.dirname(__file__), '../../html'),
xsrf_cookies=True,
cookie_secret=options.cookie_secret,
session_secret=options.session_secret,
memcached_addresses=options.memcached_addresses,
session_timeout=1600,
login_url="/authenticate",
debug=options.debug,
gzip=True,
ui_modules={},
)
self.cache = MemcachedAdapter(options.memcached_addresses, binary=True)
self.stockdb = zerorpc.Client("tcp://127.0.0.1:5000",timeout=50000)
self.cache.flush_all()
self.session_manager = SessionManager(
options.session_secret,
self.cache, options.session_timeout)
super(StockApp, self).__init__(handlers, 'stockapp',
**settings)
def extra_options():
define("mongrel", default=False)
define("admin_emails", type=list)
define("site_title")
define("site_description")
def main():
try:
extra_options()
configure()
if options.mongrel:
# install the zmq version of the IOLoop
ioloop = IOLoop.instance()
ioloop.install()
else:
ioloop = tornado.ioloop.IOLoop.instance()
db = Database('blog', (options.couchdb_user, options.couchdb_password))
db = CouchDBAdapter(db)
if not db:
logging.error("Failed to connect to DB:%s", options.couchdb_database)
sys.exit(1)
if options.mongrel:
handler = Mongrel2Handler(StockApp(db), "tcp://127.0.0.1:8000",
"tcp://127.0.0.1:8001",
no_keep_alive=True)
handler.start()
else:
http_server = tornado.httpserver.HTTPServer(StockApp(db), xheaders=True)
http_server.listen(options.port)
logging.info(
"Starting " + options.site_name + " @ port:" + str(options.port))
sys.excepthook = debug
ioloop.start()
ipython_thread.join()
except KeyboardInterrupt:
logging.info("Exiting cleanly on KeyboardInterrupt")
except Exception as e:
logging.exception(e)
|
UTF-8
|
Python
| false | false | 2,013 |
944,892,809,528 |
a09442f4b0310a77554262ea864382098dc8463c
|
747f759311d404af31c0f80029e88098193f6269
|
/addons/portal_project/__terp__.py
|
9690df1087baab0df14976cf2480f54c9fc0642a
|
[] |
no_license
|
sgeerish/sirr_production
|
https://github.com/sgeerish/sirr_production
|
9b0d0f7804a928c0c582ddb4ccb7fcc084469a18
|
1081f3a5ff8864a31b2dcd89406fac076a908e78
|
refs/heads/master
| 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
/home/openerp/production/extra-addons/portal_project/__terp__.py
|
UTF-8
|
Python
| false | false | 2,013 |
1,176,821,079,808 |
2a01a48b669ab9b153d67ae1f62c68310126bdd1
|
c733652465b2f2a41805493f862dc7a6b907f04d
|
/python/tmmseg.py
|
1cd8a2374c8a4942e783aa56e33fee6b0b2244b8
|
[
"GPL-2.0-only"
] |
non_permissive
|
ywdong/mmseg
|
https://github.com/ywdong/mmseg
|
1c69a8d44afcb11c050873bc4bcb208656bd3d76
|
af1dd4f61de25dae49055b18d2a8ad77b8e788b7
|
refs/heads/master
| 2021-01-16T18:04:59.738122 | 2012-02-27T16:53:26 | 2012-02-27T16:53:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import cmmseg
#cmmseg.init('F:\\deps\\mmseg\\src\\win32')
seg = cmmseg.MMSeg('/usr/local/data/mmseg')
rs = seg.segment((u'中国人民银行中文分词,爱因斯坦').encode('utf-8'))
for token in rs:
print token['word']
print token['begin']
print token['end']
print token['token_type']
|
UTF-8
|
Python
| false | false | 2,012 |
10,634,339,035,649 |
1fc4b5ade2f55325d54186e4ab343e53642af2cf
|
6e69b8ff63697e75ec114e4fb8395c2d41439bb9
|
/bid_solver.py
|
b5c49f200980c4d34b194ed422cc216658b3e46e
|
[] |
no_license
|
kennonlee/cacahuates
|
https://github.com/kennonlee/cacahuates
|
adb58e6aecd5da209557e4bf9ce1c5833095479f
|
ca99990859de54d059c1b49b39601e6a636cb575
|
refs/heads/master
| 2021-01-19T11:22:20.424356 | 2013-10-03T04:04:21 | 2013-10-03T04:04:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import socket
import logging
import atom.service
import gdata.service
import gdata.spreadsheet
import gdata.spreadsheet.service
import gdata.spreadsheet.text_db
from munkres import Munkres
# bad things happen in prettify_dupes() if theres a post with more than 9 slots!
DUPE_POSTS = {
'Frankfurt': 2,
'Montevideo': 2,
'Moscow': 2,
'DC': 7
}
# the actual index ranges of dupe posts; used for forcing assignments
DUPE_POST_RANGES = {
'Frankfurt': [3, 5],
'Montevideo': [8, 10],
'Moscow': [10, 12],
'DC': [13, 20]
}
POSTS = {'Abu Dhabi': 0,
'Canberra': 1,
'Dakar': 2,
'Frankfurt1': 3,
'Frankfurt2': 4,
'Frankfurt RCSO': 5,
'London': 6,
'Mexico City': 7,
'Montevideo1': 8,
'Montevideo2': 9,
'Moscow1': 10,
'Moscow2': 11,
'New Delhi': 12,
'DC1': 13,
'DC2': 14,
'DC3': 15,
'DC4': 16,
'DC5': 17,
'DC6': 18,
'DC7': 19
}
RPOSTS = dict((v,k) for k, v in POSTS.iteritems())
class BidSolver():
def add_dupe_posts(self, rankings):
'''
For each list of rankings, adds in multiple items for the posts that
have more than one slot (Frankfurt, Montevideo, Moscow, DC).
'''
new_rankings = {}
for name, ranking in rankings.iteritems():
# sort of inefficient to create a new list, but oh well
new_ranking = []
for post in ranking:
if post in DUPE_POSTS:
dupes = DUPE_POSTS[post]
for i in range(1, dupes + 1):
new_ranking.append('{0}{1}'.format(post, i))
else:
new_ranking.append(post)
new_rankings[name] = new_ranking
return new_rankings
def prettify_dupes(self, post, rank):
'''
Bad things would happen here if theres a post with more than 9 slots!
There's a weak attempt to fudge the rankings but it doesnt really work.
For instance, if we have 3 people that bid Moscow (2 slots), the person
that doesnt get Moscow will get his second bid, but it will show as #3.
The only real way to fix this is to revert the bids to their unduped
state when determining the assignment rank. But thats harder.
'''
if post[:-1] in DUPE_POSTS:
base_rank = rank - int(post[-1]) + 1
return [post[:-1], base_rank]
return [post, rank]
def get_assignments(self, rankings, forced={'Kennon':'London', 'DaveG':'Canberra'}):
#print '###1', rankings
rankings = self.add_dupe_posts(rankings)
#print '###2', rankings
errors = self.validate_rankings(rankings)
if len(errors) != 0:
print errors
raise Exception(errors)
#print '###3', rankings
errors = self.validate_forced(forced)
if len(errors) != 0:
print errors
raise Exception(errors)
#print '###4', rankings
names = []
for name, ranking in rankings.items():
names.append(name)
# converts post names to their alphabetical index
converted = [POSTS[post] for post in ranking]
rankings[name] = converted
#print rankings
# name_map maps matrix row to the person whose rankings the row represents
count = 0
name_map = {}
for name in names:
name_map[count] = name
count += 1
print 'name_map:', name_map
matrix = [self.flip_ranks(ranking) for ranking in rankings.itervalues()]
# print 'unforced matrix:', matrix
# assign very low cost values to force the algorithm to make certain
# assignments
reverse_name_map = dict((v,k) for k, v in name_map.iteritems())
for forced_person, forced_post in forced.items():
# the forced person hasnt actually saved a bid list; make a fake one
if forced_person not in reverse_name_map:
name_map[count] = forced_person
reverse_name_map[forced_person] = count
matrix.append([0] * len(POSTS))
count += 1
person_index = reverse_name_map[forced_person]
# the forced post is a dupe one; assign a low cost to each dupe
if forced_post in DUPE_POST_RANGES:
dupe_post_range = DUPE_POST_RANGES[forced_post]
for i in range(dupe_post_range[0], dupe_post_range[1]):
matrix[person_index][i] = -100
else:
post_index = POSTS[forced_post]
matrix[person_index][post_index] = -100
print 'matrix:', matrix
m = Munkres()
indexes = m.compute(matrix)
print indexes
total = 0
assignments = []
for row, column in indexes:
value = matrix[row][column]
total += value
print '{0} assigned to {1} (cost {2})'.format(name_map[row], RPOSTS[column], value)
prettified = self.prettify_dupes(RPOSTS[column], value + 1)
assignments.append([name_map[row], prettified[0], prettified[1]])
#print 'total cost=%d' % total
return assignments
def validate_rankings(self, rankings):
'''
Throws an error if the given rankings are invalid-- that is, if:
- the list is too short
- the list is too long
- the chosen cities dont match the actual list of possible assignments
'''
errors = []
for name, ranking in rankings.iteritems():
if len(ranking) != len(POSTS):
errors.append("{0} lists {1} posts, but requires {2}".format(name, len(ranking), len(POSTS)))
errors.extend(self.validate_ranking_contents(name, ranking))
return errors
def validate_ranking_contents(self, name, ranking):
errors = []
checklist = dict((k, 1) for k in POSTS)
for post in ranking:
try:
checklist.pop(post)
except KeyError:
errors.append('{0} has duplicate post {1} in list'.format(name, post))
if len(checklist) != 0:
for k in checklist.iterkeys():
errors.append('{0} is missing post {1}'.format(name, k))
return errors
def validate_forced(self, forced):
'''TODO'''
errors = []
return errors
def flip_ranks(self, ranking):
'''
Converts an indexed ordered list into a weighted list ordered by post
indices-- the first element is the weight for Abu Dhabi, second is for
Athens, then Canberra, etc.
'''
ret = []
for i in range(0, len(POSTS)):
weight = 0
for post in ranking:
if i == post:
#print 'found {0} at weight {1}'.format(i, weight)
ret.append(weight)
break
else:
weight += 1
return ret
if __name__ == "__main__":
g = BidSolver()
#r = [11,12,0,1,7,3,4,5,6,2,8,9,10,13,14]
#print r
#print g.flip_ranks(r)
print g.get_assignments(g.get_entries())
|
UTF-8
|
Python
| false | false | 2,013 |
7,060,926,241,631 |
d5e2968b75a54e5aedd211cbbdc483ce1e0e47e2
|
ac303c1d5832a0c74a843bf57c8443c12d292d99
|
/index.py
|
5b25d0f85b7df24dd528ee9beab953e929c0c7d1
|
[] |
no_license
|
DowntownChen/NaiveWeiboSearcher
|
https://github.com/DowntownChen/NaiveWeiboSearcher
|
1767739a0a24a34beee6669257d63f9f0aa55761
|
0eb479ca77a0df68065befb76a7db1c6f0655f3c
|
refs/heads/master
| 2021-01-02T22:51:17.839616 | 2013-12-24T16:06:18 | 2013-12-24T16:06:18 | 15,000,664 | 0 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/env/python
# -*- coding: utf-8 -*-
import os
import csv
import pymongo
import jieba
import re
import math
cutPattern = u'@[-_0-9a-zA-Z\u4e00-\u9fa5]+[:\s]|@[-_0-9a-zA-Z\u4e00-\u9fa5]+$|\[.+\]|http://sinaurl\.cn/[0-9a-zA-Z]{5}|http://t\.cn/[0-9a-zA-Z]{7}'
datafilter = re.compile(cutPattern)
stopPattern = u'^`|~|!|@|#|\$|%|\^|&|\*|\(|\)|_|\+|-|=|\{|\}|\[|\]|\||\\|:|;|<|,|>|\.|\?|/|\s|\'|"|[A-Za-z]|[0-9]+$'
stopfilter = re.compile(stopPattern)
topicPattern = u'#[^#]+#'
topicFilter = re.compile(topicPattern)
with open('./stopList.csv','r') as stopListFile:
stopList = set()
reader = csv.reader(stopListFile)
for row in reader:
stopList.add(unicode(row[0],'utf-8'))
database = pymongo.database.Database(pymongo.MongoClient('localhost',27017), u'InvertedIndexDatabase')
indexDB = database.indexDB
articleDB = database.articleDB
topicIndexDB = database.topicIndexDB
topicDB = database.topicDB
'''
build invindex from input csv file at 'path'
input: path to csvfile
output: no output
'''
def index(path):
docID = 0
invindex = {}
topicSet = set()
with open(path,'r') as csvfile:
reader = csv.reader(csvfile,delimiter='\t')
for row in reader:
docDict = {}
segtext = datafilter.split(unicode(row[2], 'utf-8'))
segTopic = topicFilter.findall(unicode(row[2], 'utf-8'))
seglist = []
for item in segtext:
seglist.extend(jieba.cut_for_search(item))
for item in seglist:
if item not in stopList and not re.match(stopPattern,item):
tf = docDict.setdefault(item, 0)
docDict[item] = tf + 1
for item in docDict.iteritems():
table = invindex.setdefault(item[0],{})
table.setdefault(docID,item[1])
for item in segTopic:
topicSet.add(item)
postArticle = {
u'DocID': docID,
u'User': unicode(row[0], 'utf-8'),
u'Time': unicode(row[1], 'utf-8'),#time.strptime(row[1], '%Y-%m-%d %H:%M'),
u'Article': unicode(row[2], 'utf-8')
}
articleDB.save(postArticle)
docID = docID + 1
N = docID
for index in invindex.iteritems():
df = len(index[1])
idf = math.log10(float(N)/float(df))
table = []
for item in index[1].iteritems():#calc tf*idf/docID (docID represents time in reverse order)
rank = round((math.log10(float(item[1]+1))*idf)/(math.log10(float(docID+1))), 3)
table.append((item[0], rank))
table.sort(cmp=lambda x,y: cmp(x[1],y[1]))
postIndex = {
u'Word': index[0],
u'DF': df,
u'IndexTable': table
}
indexDB.save(postIndex)
topicID = 0
topicindex = {}
for topic in topicSet:
seglist = jieba.cut_for_search(topic)
postTopic = {
u'TopicID': topicID,
u'Topic': topic
}
for item in seglist:
if item not in stopList and not re.match(stopPattern,item):
table = topicindex.setdefault(item, [])
table.append(topicID)
topicDB.save(postTopic)
topicID = topicID + 1
N = topicID
for index in topicindex.iteritems():#topic's tf===1
postIndex = {
u'Word': index[0],
u'IDF': round(math.log10(float(N)/float(len(index[1]))),3),
u'IndexTable': index[1]
}
topicIndexDB.save(postIndex)
try:
index('./library/weibodata.csv')
except Exception,e:
print(e)
raise
else:
print("index build successfully")
|
UTF-8
|
Python
| false | false | 2,013 |
16,578,573,781,339 |
09ec5aef2adb629967107d28decf4d004f54d4bc
|
8e2f87a07b8f0f4ffa2db6222e791d19e2910c6a
|
/pycvf/trunk/pycvf/lib/ontology/pywn/pywnserver.py
|
ee5c463bbdfc591d10bf0d409b77832b2be51146
|
[
"LGPL-3.0-only",
"GPL-3.0-only",
"LGPL-2.0-or-later",
"GPL-1.0-or-later"
] |
non_permissive
|
IronManMark20/pycvf
|
https://github.com/IronManMark20/pycvf
|
67c0177f1aede177659806be174b2336166b5268
|
48164c280e04cebdf7211035a44aacce807dea25
|
refs/heads/master
| 2016-08-12T21:00:50.176859 | 2010-02-11T06:37:17 | 2010-02-11T06:37:17 | 44,293,729 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
from socket import *
from stdwn import impl
from StringIO import *
import thread
import cPickle
stayalive = 1
port = 3334
def acceptConnections(arg):
s = socket(AF_INET, SOCK_STREAM)
s.bind(('', port))
s.listen(1)
print "pwynserver listening on port", port
print "<return> to stop"
while stayalive:
conn = s.accept()
print 'connection opened('+conn[1][0]+':'+`conn[1][1]`+')'
thread.start_new_thread(handleRequests, (conn,))
s.shutdown(2)
kstr = impl.getSynsetKeyFromString
def processCommand(cmd):
if cmd[0] == 'ss':
func = impl.grabSynset
elif cmd[0] == 'key':
func = impl.grabKeys
else:
return None
return func(cmd[1])
def handleRequests(arg):
sock = arg[0]
sockfile = sock.makefile('r+')
host = arg[1][0]
port = `arg[1][1]`
while 1:
try:
cmd = cPickle.load(sockfile)
print host+":"+port, cmd[0], cmd[1]
except error:
break
except EOFError:
break
except cPickle.UnpicklingError:
break
if not cmd:
break
cPickle.dump(processCommand(cmd), sockfile)
sockfile.close()
sock.shutdown(2)
print 'connection closed('+host+':'+port+')'
def open():
thread.start_new_thread(acceptConnections, (None,))
def close():
stayalive = 0
if __name__ == '__main__':
#default pywnserver port
import sys
port = int(sys.argv[1])
open()
sys.stdin.readline()
close()
|
UTF-8
|
Python
| false | false | 2,010 |
13,580,686,598,765 |
31e897fc3e58215f22b1f18ff42f0c01553a2816
|
2c553f65a9f89235b0d1edb7866a4cb4eb6a34e3
|
/easyfind/transactions/views.py
|
53aea4ffa3e2120f99b7f2c84826460508b76f92
|
[
"MIT"
] |
permissive
|
veyselsahin/easyfind
|
https://github.com/veyselsahin/easyfind
|
f48332bbb26f1ec2e8ac520fa7118b3cbdeb0f1b
|
b380f22ed66dd26ff3cb31510cfddb8e88f2a602
|
refs/heads/master
| 2021-01-17T20:54:42.742383 | 2014-04-27T08:01:56 | 2014-04-27T08:01:56 | 19,492,825 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
from django.conf import settings
from django.http import HttpResponse
from django.views.decorators.http import require_http_methods
from easyfind.decorators import authenticate_request
import paypalrestsdk
from paypalrestsdk import Payment
from django.conf import settings
from django.http import HttpResponse
@require_http_methods(['GET'])
@authenticate_request
def paypal_buy(request):
paypalrestsdk.configure({
"mode": settings.PAYPAL_MODE,
"client_id": settings.PAYPAL_CLIENT_ID,
"client_secret": settings.PAYPAL_CLIENT_SECRET})
payment = Payment({
"intent": "sale",
# ###Payer
# A resource representing a Payer that funds a payment
# Payment Method as 'paypal'
"payer": {
"payment_method": "paypal"
},
# ###Redirect URLs
"redirect_urls": {
"return_url": settings.PAYPAL_RETURN_URL,
"cancel_url": settings.PAYPAL_CANCEL_URL,
},
# ###Transaction
# A transaction defines the contract of a
# payment - what is the payment for and who
# is fulfilling it.
"transactions": [{
# ### ItemList
"item_list": {
"items": [{
"name": "item",
"sku": "item",
"price": "0.10",
"currency": "USD",
"quantity": 1}]},
# ###Amount
# Let's you specify a payment amount.
"amount": {
"total": "0.10",
"currency": "USD"},
"description": "This is the payment transaction description......"}]})
# Create Payment and return status
if payment.create():
response = {'data': {'url': payment.links[1]}}
else:
response = {'error': 'Something went wrong.'}
return HttpResponse(json.dumps(response, sort_keys=True, indent=4, cls=DjangoJSONEncoder), content_type="application/json")
@require_http_methods(['GET'])
@authenticate_request
def paypal_cancel(request):
# Respond
response = {'data': {'url': }}
return HttpResponse(json.dumps(response, sort_keys=True, indent=4, cls=DjangoJSONEncoder), content_type="application/json")
@require_http_methods(['GET'])
@authenticate_request
def paypal_return(request):
# Respond
response = {'data': {'url': }}
return HttpResponse(json.dumps(response, sort_keys=True, indent=4, cls=DjangoJSONEncoder), content_type="application/json")
|
UTF-8
|
Python
| false | false | 2,014 |
3,186,865,757,892 |
4d4eeae26de9df4674d50351eb6eddbb87370610
|
841605ce0f64b69ce081183f2e0d73436b6f8ab6
|
/questions/question6.py
|
66ec019e3377ee3c375eb9a65e20c684c19b6790
|
[] |
no_license
|
dr-aryone/python-assessment
|
https://github.com/dr-aryone/python-assessment
|
68823f4a1244d4326db6b18fb6b85896c2d16a21
|
de1679d3f19d851fb2092f394d3b806bfe0d9e22
|
refs/heads/master
| 2020-05-27T01:43:07.308175 | 2014-08-21T19:26:27 | 2014-08-21T19:26:27 | 188,441,288 | 1 | 1 | null | true | 2019-05-24T15:00:22 | 2019-05-24T15:00:22 | 2019-05-24T15:00:19 | 2014-08-21T19:26:35 | 117 | 0 | 0 | 0 | null | false | false |
"""
Question 6
# Method decorators
Write a decorator for the classbasedview that you created in
Question 5 that ensures the view can only be seen by users with
permissions to edit the object being shown. Assume that there are
no thirdparty permissions modules in use, only the builtin auth
from Django.
"""
class MyView(View):
def get(self, request, *args, **kwargs):
return HttpResponse(json.dumps({"status": "OK"}), mimetype='application/json')
@method_decorator(login_required)
@method_decorator(permission_required, 'myapp.change_object')
def dispatch(self, *args, **kwargs):
return super(MyView, self).dispatch(*args, **kwargs)
|
UTF-8
|
Python
| false | false | 2,014 |
5,523,327,969,914 |
38daf40d53a767e114d9ae1c4c9e24d8d28f05c5
|
7fec8e3a0cb82e2579b87e2ee89541484d95fb58
|
/server/netgate.py
|
33ba944e8cb5e5457177df801e15b638442f3f5b
|
[] |
no_license
|
kaleidosgu/pingpongmoai
|
https://github.com/kaleidosgu/pingpongmoai
|
16cb4b599438f72c72ddaf8616538317fa7920b4
|
6992a7ea07487c5d6f0553c6075155d08986a626
|
refs/heads/master
| 2021-01-21T11:14:44.849536 | 2013-04-07T11:38:42 | 2013-04-07T11:38:42 | 32,836,207 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import socket
import threading
import sys
import json
class CtlCmdEnum:
CONNECT = 1
SEND = 2
CLOSE = 3
def get_line(s):
sepidx = s.find("\r\n")
if sepidx == -1:
return (None, s)
line = s[0 : sepidx]
s = s[sepidx + 2 : ]
return (line, s)
def send_line_to_gate(lineObj):
line = json.dumps(lineObj) + "\r\n"
gate_socket.send(line)
def tunnel_client(idx, client_socket):
line = {"idx" : idx, "cmd" : CtlCmdEnum.CONNECT}
send_line_to_gate(line)
buf = ''
while True:
rcvd = None
try:
rcvd = client_socket.recv(65536)
except:
pass
if not rcvd:
client_socket.close()
line = {"idx" : idx, "cmd" : CtlCmdEnum.CLOSE}
send_line_to_gate(line)
break
buf += rcvd
line, buf = get_line(buf)
if line != None:
lineObj = {"idx" : idx, "cmd" : CtlCmdEnum.SEND, "line" : line}
send_line_to_gate(lineObj)
def tunnel_gate():
buf = ''
while True:
rcvd = gate_socket.recv(65536)
if not rcvd:
sys.exit(0)
buf += rcvd
line, buf = get_line(buf)
if line != None:
lineObj = json.loads(line)
if lineObj["cmd"] == CtlCmdEnum.SEND:
client_socket = client_sockets[lineObj["idx"]]
try:
if client_socket.send(lineObj["line"] + "\r\n") < 1:
client_socket.close()
except:
client_socket.close()
elif lineObj["cmd"] == CtlCmdEnum.CLOSE:
client_socket = client_sockets[lineObj["idx"]]
del client_sockets[lineObj["idx"]]
client_socket.close()
else:
assert(False)
gate_socket = socket.socket()
gate_socket.connect(("127.0.0.1", 54322))
server_socket = socket.socket()
server_socket.bind(("0.0.0.0", 54321))
server_socket.listen(50)
idx = 0
client_sockets = {}
threading.Thread(target = tunnel_gate).start()
while True:
client_socket, peer_info = server_socket.accept()
idx += 1
client_sockets[idx] = client_socket
threading.Thread(target = tunnel_client, args = (idx, client_socket)).start()
|
UTF-8
|
Python
| false | false | 2,013 |
10,703,058,529,210 |
073384d4588aa935e1e48595092ec7af09e0dd40
|
a1bbb100843620c85a45881d9605992e4fd82e19
|
/vncsimulator/pulp-examples/MinimizarEnergia.py
|
f9cf9c77c2bcd8cc7aa6e4f7d93997d8c996b88a
|
[] |
no_license
|
glaucogoncalves/VNC-Simulator
|
https://github.com/glaucogoncalves/VNC-Simulator
|
86ce0b8d289ac94ccdf9b8498962c64bbebc2584
|
54c889fcf655d892039cd79dcca1c51706a5b59d
|
refs/heads/master
| 2021-01-18T12:04:42.656471 | 2014-08-07T11:00:19 | 2014-08-07T11:00:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
The Minimize Energy Problem for the PuLP Modeller
Authors: Felipe Fernandes, Dr Glauco Goncalves 2014
"""
from random import randint
from pulp import *
V=5 #number of virtual machines
M=2 #number of servers
profit=[randint(20,100) for i in range(0,V)]
print("Profit:%s"%profit)
demand=[randint(10,50) for i in range(0,V)]
print("Demands:%s"%demand)
capacity=[randint(40,50) for i in range(0,M)]
print("Capacities:%s"%capacity)
energycost=[randint(60,100) for i in range(0,M)]
print("Energy Cost:%s"%energycost)
#demand=[10,30,10,10,20]
#capacity=[80,30]
vms = [ "vm%s"%i for i in range(1,V+1) ]
servers = [ "s%s"%i for i in range(1,M+1) ]
p = dict(zip(vms,profit))
d = dict(zip(vms,demand))
C = dict(zip(servers,capacity))
w = dict(zip(servers,energycost))
# Creates the 'prob' variable to contain the problem data
prob = LpProblem("Minimize Energy",LpMaximize)
# Creates a list of tuples containing all the possible
Possible = [(vm,server) for vm in vms for server in servers]
# A dictionary called 'Vars' is created to contain the referenced variables
Xvars = LpVariable.dicts("x",(vms,servers),0,1,LpInteger)
Yvars = LpVariable.dicts("y",servers,0,1,LpInteger)
##### The objective function is added to 'prob' first
lst1 =[p[vm]*Xvars[vm][server] for (vm,server) in Possible]
lst2 =[w[server]*(1-Yvars[server]) for server in servers]
prob += lpSum(lst1+lst2), "Total Profit"
for server in servers:
prob += lpSum([d[vm]*Xvars[vm][server] for vm in vms])<=Yvars[server]*C[server], "Sum_of_VM_demands_on_Server_%s"%server
for vm in vms:
prob += lpSum([Xvars[vm][server] for server in servers])<=1, "Allocation_Constraint_of_VM_%s"%vm
#prob += lpSum([Yvars[server] for server in servers])<=M, "Sanity check"
# The problem data is written to an .lp file
prob.writeLP("MinimizarEnergia.lp")
# The problem is solved using PuLP's choice of Solver
prob.solve()
# The status of the solution is printed to the screen
print "Status:", LpStatus[prob.status]
# Each of the variables is printed with it's resolved optimum value
for v in prob.variables():
print v.name, "=", v.varValue
# The optimised objective function value is printed to the screen
print "Total Profit = ", value(prob.objective)
|
UTF-8
|
Python
| false | false | 2,014 |
2,319,282,354,127 |
7ad8cabf238c03b2be82d3280464c1d416591d87
|
f4e256d50a95441c32831f5d0b52b7ec35286d1a
|
/blueprints/__init__.py
|
bc03a3a859367d26909b7c9de0c36e2177f1dc7b
|
[] |
no_license
|
y-usuzumi/interfacade
|
https://github.com/y-usuzumi/interfacade
|
fa61686b69243047ffd9886618ad09db4007ce96
|
a277022dbfeef555f9836e7cff5532b9dec679d3
|
refs/heads/master
| 2021-05-27T10:12:15.482971 | 2014-06-28T14:00:19 | 2014-06-28T14:00:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'kj'
from .interface import interface
from .home import home
|
UTF-8
|
Python
| false | false | 2,014 |
7,017,976,568,799 |
314b7475168bfd5c9b33ca3a885e395904483676
|
6cfc67b3ca87a86420d947a7d242966e7fc2d414
|
/Python/sudoku-solver/solve.py
|
af95d0b5a022ebed6da9255ef4d81e46118b9f4b
|
[] |
no_license
|
sirmc/Sirmc-s-public-folder
|
https://github.com/sirmc/Sirmc-s-public-folder
|
331ac66976b52b9128e745ed0addeef63d7860e4
|
53f52a4e7c3b8b2d9e01341e40118de996434f52
|
refs/heads/master
| 2021-01-19T07:24:15.560883 | 2014-04-26T05:33:09 | 2014-04-26T05:33:09 | 3,112,325 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re, time
product = lambda a,b: [x+y for x in a for y in b]
digits = '123456789'
columns = digits
rows = 'ABCDEFGHI'
# This script uses strings instead of dicts because python's built-in
# string manipulation (e.g. string.replace) is faster than the corresponding list function (list.remove)
squares = product(rows, columns)
# Set of Related units
units = ([product(rows, column) for column in columns]
+ [product(row, columns) for row in rows]
+ [product(row, column) for row in ['ABC', 'DEF', 'GHI'] for column in ('123', '456', '789')])
# List of units of square s
unitd = dict((s, [u for u in units if s in u]) for s in squares)
# List of related squares of s
related = dict((square, set(sum(unitd[square], []))-set([square])) for square in squares)
def grid_to_dict(grid):
# Creates a dict of the sudoku puzzle, and calls set_square() on known squares
chars = [c for c in grid if c in digits or c == '0']
assert len(chars) == 81
initial_values = dict(zip(squares, chars))
grid_dict = dict((square, digits) for square in squares)
# For every value in grid, set corrsponding square to that number, if error return error (false)
for square, digit in initial_values.items():
if digit in digits and not set_square(grid_dict, square, digit):
return False
return grid_dict
def set_square(grid_dict, square, value):
#Works by eliminating all but value
not_value = grid_dict[square].replace(value, '')
# If all del_from_square are true return result
if all(del_from_square(grid_dict, square, val2) for val2 in not_value):
return grid_dict
else:
# Cannot set square to value
return False
def del_from_square(grid_dict, square, value):
# Deltes value from square, checks related square for same value
# if that is the only value left in the related square, set that square to value.
if value not in grid_dict[square]:
return grid_dict # value already deleted
grid_dict[square] = grid_dict[square].replace(value, '')
if len(grid_dict[square]) == 0:
return False
elif len(grid_dict[square]) == 1:
value = grid_dict[square]
# Only 1 possible value, remove value from related units
if not all(del_from_square(grid_dict, square2, value) for square2 in related[square]):
# Cannot remove value from another square, this value is wrong
return False
for unit in unitd[square]:
value_in = [s for s in unit if value in grid_dict[s]]
if len(value_in) == 0:
return False # value cannot be added in this unit -> error
elif len(value_in) == 1:
# One possibility to assign value
if not set_square(grid_dict, value_in[0], value):
return False
return grid_dict
def depth_first_search(grid_dict):
# Using a depth-first search to find solution
if grid_dict is False:
return False
if all(len(grid_dict[s]) == 1 for s in squares):
return grid_dict # Already solved
# Chose square with fewest possible solutions
n,square = min((len(grid_dict[square]), square) for square in squares if len(grid_dict[square]) > 1)
search = [depth_first_search(set_square(grid_dict.copy(), square, value)) for value in grid_dict[square]]
# Return first vale that is not false (error)
for item in search:
if item:
return item
return False
def solve(grid):
result = depth_first_search(grid_to_dict(grid))
return [result[square] for square in squares]
def from_file(filename):
#Parse a file into a list of strings, separated by sep.
sfile = file(filename).read()
sfile = sfile.strip()
sfile = re.split('Grid [0-9][0-9]', sfile)
sfile.pop(0)
return sfile
def check(solution):
# Checks each unit for errors, by sorting its values and comparing it to string '123456789'
# which is the string of numbers that we want in each unit
solution_dict = dict(zip(squares, solution))
for unit in units:
unit_numbers = []
for square in unit:
unit_numbers.append(solution_dict[square])
assert ''.join(sorted(unit_numbers)) == digits, "Sudoku not solved"
problem_numbers = []
counter = time.clock()
for grid in from_file('sudoku.txt'):
solution = solve(grid)
check(solution) # Error checking
for i in range(9):
row= ''.join(solution[9*i:9*i+9])
print row
#print solution
print "\n"
problem_numbers.append(100*int(solution[0])+10*int(solution[1])+int(solution[2]))
total_time = time.clock()-counter
print "Total time {0}".format(total_time)
print problem_numbers
print 'Sum of all numbers for problem:'
print sum(problem_numbers)
|
UTF-8
|
Python
| false | false | 2,014 |
16,776,142,259,846 |
ce59812a15259bbf5bd399969e5607869c109d03
|
a395d39b6c08b249c64d91e09ce33e61a2deb42a
|
/Assignment3/navibot.py
|
d3453cfd7316abdd7e62ac6ff43a1f2593676117
|
[] |
no_license
|
NJWong/FIT3140-App
|
https://github.com/NJWong/FIT3140-App
|
69ffa1c9e7da841be9901dcf8585c3a29ba9df15
|
6dc001892a75c07fde749cebd00924c112f8c642
|
refs/heads/master
| 2021-01-23T06:54:28.889846 | 2014-12-21T13:01:29 | 2014-12-21T13:01:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from interpreter import *
from programblock import *
from cleartile import *
from walltile import *
from goaltile import *
from navimaze import *
from naviprogram import *
from naviblocks import *
from navicontrols import *
from kivy.app import App
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import ObjectProperty, ListProperty
from kivy.clock import Clock
from functools import partial
class NaviBot(FloatLayout):
naviblocks = ObjectProperty(None)
naviprogram = ObjectProperty(None)
navicontrols = ObjectProperty(None)
navimaze = ObjectProperty(None)
interpreter = Interpreter()
execution_tree = ListProperty(None)
def run(self, statement, dt):
exec(statement)
def run_program(self, program):
#print('AUX: run program')
self.execution_tree = self.interpreter.create_execution_tree(self.naviprogram.program)
for i in range(len(self.execution_tree)):
Clock.schedule_once(partial(self.run, self.execution_tree[i]), i)
class NaviBotApp(App):
def build(self):
navibot = NaviBot()
navibot.navimaze.initialize()
return navibot
if __name__ == '__main__':
NaviBotApp().run()
|
UTF-8
|
Python
| false | false | 2,014 |
4,432,406,265,785 |
9ec18f828d45723a472cdec6081ea15eb28402ee
|
77ccdbd8b3ade1a3e5db3afc96593d2aa05d8dc4
|
/internal/internal/urls.py
|
1fa0cb645310b3fefe1e064c56424a3b1f1f7005
|
[] |
no_license
|
asilvaC/kalite-internal
|
https://github.com/asilvaC/kalite-internal
|
b1856267e5b099866a02303ad8403af63505f0e8
|
6765e67e366f3fc3b5ad4cd5f98f3d16ce93397f
|
refs/heads/master
| 2021-01-15T17:21:15.768650 | 2013-09-03T02:28:00 | 2013-09-03T02:28:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
import feedbacks.urls
import profiles.urls
import projects.urls
import views
admin.autodiscover()
urlpatterns = patterns('',
url('^$', profiles.urls.profile_index, name='home'),
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name' : 'internal/login.html'}),
url(r'^logout/$', 'profiles.views.logout', name='logout'),
url(r'^admin/', include(admin.site.urls)),
url(r'^profiles/', include(profiles.urls)),
url(r'^projects/', include(projects.urls)),
url(r'^feedbacks/', include(feedbacks.urls)),
url(r'^contact/', views.contact)
)
|
UTF-8
|
Python
| false | false | 2,013 |
3,917,010,193,855 |
81b63ddf1acde1d00b56ff106dbf703331ea2473
|
6b529ddc3cc1f3b068d566ff71a76af9d9c792e9
|
/python_imple.py
|
879fc3a46b54fb6a802836c027fe77a55cb5f2b9
|
[] |
no_license
|
binga/CA-Exacerbator
|
https://github.com/binga/CA-Exacerbator
|
56e4b8e955b4d381066754ab0302b3633eabd32b
|
a8a6546a55150b7ee99e9bb9ac9046ac7df1d41e
|
refs/heads/master
| 2020-02-26T15:17:59.141790 | 2014-11-28T14:32:47 | 2014-11-28T14:32:47 | 27,121,445 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 25 23:21:41 2014
@author: Phani
"""
import pandas as pd
import numpy as np
from sklearn.preprocessing import Imputer
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import cross_val_score, StratifiedKFold
import matplotlib.pyplot as plt
data = pd.read_csv("D:/CA/Exacerbation/CAX_ExacerbationModeling_TRAIN_data.csv")
test = pd.read_csv("D:/CA/Exacerbation/CAX_ExacerbationModeling_Public_TEST_data.csv")
## Imputation
data_total = data.append(test)
imp = Imputer(missing_values='NaN', strategy='median', axis=0, copy = False)
data_total_imputed = imp.fit_transform(data_total)
## Train and Evaluation Split
train = pd.DataFrame(data_total_imputed[1:4099,:])
evaluation = pd.DataFrame(data_total_imputed[4099:,:])
train.columns = data.columns
evaluation.columns = data.columns
## Sampling
#train_0 = train[train['Exacebator'] == 0]
#train_1 = train[train['Exacebator'] == 1]
#samp = np.random.choice(train_0.index.values, 343)
#train_0_samp = train_0.ix[samp]
#train = train_1.append(train_0_samp)
# Shuffling
#train = train.iloc[np.random.permutation(np.arange(len(train)))]
# Ignoring sid variable
colsToDrop = ['Exacebator','sid']
y_train = train['Exacebator']
X_train = train.drop(colsToDrop, axis = 1)
y_eval = evaluation['Exacebator']
X_eval = evaluation.drop(colsToDrop, axis = 1)
## Stratified K fold for cross validation
skfold_train = StratifiedKFold(y_train, n_folds = 10)
## RF classifier
rf = RandomForestClassifier(n_jobs = -1, verbose = True)
param_grid = { 'n_estimators' : [500,1000,1500,2000,2500,3000],
'criterion' : ['gini', 'entropy'],
'max_features' : [35,55,75,100],
'max_depth' : [4,5,6]
}
gs_cv = GridSearchCV(rf, param_grid, scoring = 'f1', n_jobs = -1, verbose = 2).fit(X_train, y_train)
gs_cv.best_params_
scores_rf = cross_val_score(rf, X_train, y_train, scoring = "f1", cv = 10)
print("RF CV Accuracy: %0.3f (+/- %0.3f)" % (scores_rf.mean(), scores_rf.std() * 2))
rf = RandomForestClassifier(max_features= 55, n_estimators= 1500, criterion= 'gini', max_depth= 6,
n_jobs = -1, verbose = True, oob_score = True)
rf.fit(X_train, y_train)
importances = rf.feature_importances_
impDF = pd.DataFrame({"features" : X_train.columns, "importance" : importances})
impDF.head()
impDF = impDF.sort(['importance'], ascending = False)
impDF['Rank'] = range(1,1331)
impDF[impDF['features'] == "V34"]
impDF.describe()
subspace = impDF[impDF['importance'] > 0.0005]['features']
rf = RandomForestClassifier(max_features= 55, n_estimators= 1500, criterion= 'entropy', max_depth= 6, n_jobs = -1, verbose = True, oob_score = True)
scores_rf = cross_val_score(rf, X_train[subspace], y_train, scoring = "f1", cv = skfold_train)
print("RF CV Accuracy: %0.3f (+/- %0.3f)" % (scores_rf.mean(), scores_rf.std() * 2))
rf.fit(X_train[subspace], y_train)
param_grid = { 'n_estimators' : [500,1000,1500,2000,2500,3000],
'criterion' : ['gini', 'entropy'],
'max_features' : [15,20,25,30],
'max_depth' : [4,5,6]
}
gs_cv = GridSearchCV(rf, param_grid, scoring = 'f1', n_jobs = -1, verbose = 2).fit(X_train[subspace], y_train)
gs_cv.best_params_
gs_cv
rf = RandomForestClassifier(max_features= 30, n_estimators= 1500, criterion= 'gini', max_depth= 6, n_jobs = -1, verbose = True, oob_score = True)
scores_rf = cross_val_score(rf, X_train[subspace], y_train, scoring = "f1", cv = skfold_train)
print("RF CV Accuracy: %0.3f (+/- %0.3f)" % (scores_rf.mean(), scores_rf.std() * 2))
rf.fit(X_train[subspace], y_train)
X_eval_preds_rf = rf.predict_proba(X_eval)
X_eval_preds_rf_np = np.asarray(X_eval_preds_rf)
X_eval_preds_rf_1 = X_eval_preds_rf_np[:,1]
## GBM classifier
param_grid = { 'learning_rate' : [0.01, 0.02, 0.05, 0.1],
'max_depth' : [4,6],
'min_samples_leaf' : [3,5,9,17],
'max_features' : 35
}
gbm = GradientBoostingClassifier(n_estimators = 2500, max_features = 45, subsample = 0.85, verbose = True)
#gs_cv = GridSearchCV(gbm, param_grid, scoring = 'f1', n_jobs = -1).fit(X_train, y_train)
#gs_cv.best_params_
scores_gbm = cross_val_score(gbm, X_train, y_train, scoring="f1", cv=skfold_train, n_jobs=-1, verbose=True)
print("GBM CV Accuracy: %0.3f (+/- %0.3f)" % (scores_gbm.mean(), scores_gbm.std() * 2))
gbm.fit(X_train, y_train)
indices_gbm = np.argsort(gbm.feature_importances_)
X_train.columns[indices_gbm]
X_eval_preds_gbm = gbm.predict_proba(X_eval)
X_eval_preds_gbm_np = np.asarray(X_eval_preds_gbm)
X_eval_preds_gbm_1 = X_eval_preds_gbm_np[:,1]
# Extra Trees Classifer
ext = ExtraTreesClassifier(n_jobs = -1, n_estimators = 3500, verbose = True, bootstrap = True, oob_score = True)
scores_ext = cross_val_score(ext, X_train, y_train, scoring = "f1", cv = skfold_train)
print("Extra Trees CV Accuracy: %0.3f (+/- %0.3f)" % (scores_ext.mean(), scores_ext.std() * 2))
ext.fit(X_train,y_train)
X_eval_preds_ext = ext.predict_proba(X_eval)
X_eval_preds_ext_np = np.asarray(X_eval_preds_ext)
X_eval_preds_ext_1 = X_eval_preds_ext_np[:,1]
evaluation['rfoutcome'] = X_eval_preds_rf_1
evaluation['gbmoutcome'] = X_eval_preds_gbm_1
evaluation['extoutcome'] = X_eval_preds_ext_1
evaluation['outcome'] = X_eval_preds_rf_1
evaluation.sid = evaluation.sid.astype(int)
evaluation.sid.dtypes
submission = pd.DataFrame({'sid':evaluation.sid, 'Exacebator':evaluation.outcome})
submission = submission.sort_index(axis=1, ascending = False)
submission.head()
## Change submission name
submission.to_csv("D:/CA/Exacerbation/Python/results/RF_1500.csv", index = False)
#train.to_csv("D:/CA/Exacerbation/ExacerbatorProject/CA-Exacerbator/data/train_new.csv", index = False)
#evaluation.to_csv("D:/CA/Exacerbation/ExacerbatorProject/CA-Exacerbator/data/evaluation_new.csv", index = False)
|
UTF-8
|
Python
| false | false | 2,014 |
16,192,026,736,040 |
f6e439a80476db860436b75c0950a5f848901ce5
|
73bc9cdb7ea210c6d34c74f0449f8d1a2a25d478
|
/tests/coord_trans_proto/coord_trans_proto.py
|
5ac26c9cf5066482f0ad2b0b23ad106b64c6d042
|
[] |
no_license
|
nikobockerman/AtoBe
|
https://github.com/nikobockerman/AtoBe
|
ba127f9b06f0e1ee3c81051736188f8f36796d29
|
84ba06ea18ec63198b7d5abe1b9e42e49cff1a8f
|
refs/heads/master
| 2016-09-05T11:48:50.104758 | 2011-07-10T16:24:23 | 2011-07-10T16:24:23 | 2,026,126 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Adapted from http://positio.rista.net/en/pys60gps/src/KKJWGS84.py
import math
# Constants
# Longitude0 and Center meridian of KKJ bands
KKJ_ZONE_INFO = { 0: (18.0, 500000.0), \
1: (21.0, 1500000.0), \
2: (24.0, 2500000.0), \
3: (27.0, 3500000.0), \
4: (30.0, 4500000.0), \
5: (33.0, 5500000.0), \
}
###########################################################################
# Function: KKJ_Zone_I
###########################################################################
def KKJ_Zone_I(KKJI):
ZoneNumber = math.floor((KKJI/1000000.0))
if ZoneNumber < 0 or ZoneNumber > 5:
ZoneNumber = -1
return ZoneNumber
###########################################################################
# Function: KKJ_Zone_Lo
###########################################################################
def KKJ_Zone_Lo(KKJlo):
# determine the zonenumber from KKJ easting
# takes KKJ zone which has center meridian
# longitude nearest (in math value) to
# the given KKJ longitude
ZoneNumber = 5
while ZoneNumber >= 0:
if math.fabs(KKJlo - KKJ_ZONE_INFO[ZoneNumber][0]) <= 1.5:
break
ZoneNumber = ZoneNumber - 1
return ZoneNumber
###########################################################################
# Function: KKJlalo_to_KKJxy
###########################################################################
def KKJlalo_to_KKJxy(INP, ZoneNumber):
Lo = math.radians(INP['Lo']) - math.radians(KKJ_ZONE_INFO[ZoneNumber][0])
a = 6378388.0 # Hayford ellipsoid
f = 1/297.0
b = (1.0 - f) * a
bb = b * b
c = (a / b) * a
ee = (a * a - bb) / bb
n = (a - b)/(a + b)
nn = n * n
cosLa = math.cos(math.radians(INP['La']))
NN = ee * cosLa * cosLa
LaF = math.atan(math.tan(math.radians(INP['La'])) / math.cos(Lo * math.sqrt(1 + NN)))
cosLaF = math.cos(LaF)
t = (math.tan(Lo) * cosLaF) / math.sqrt(1 + ee * cosLaF * cosLaF)
A = a / ( 1 + n )
A1 = A * (1 + nn / 4 + nn * nn / 64)
A2 = A * 1.5 * n * (1 - nn / 8)
A3 = A * 0.9375 * nn * (1 - nn / 4)
A4 = A * 35/48.0 * nn * n
OUT = {}
OUT['P'] = A1 * LaF - \
A2 * math.sin(2 * LaF) + \
A3 * math.sin(4 * LaF) - \
A4 * math.sin(6 * LaF)
OUT['I'] = c * math.log(t + math.sqrt(1+t*t)) + \
500000.0 + ZoneNumber * 1000000.0
return OUT
###########################################################################
# Function: KKJxy_to_KKJlalo
###########################################################################
def KKJxy_to_KKJlalo(KKJ):
# Scan iteratively the target area, until find matching
# KKJ coordinate value. Area is defined with Hayford Ellipsoid.
LALO = {}
ZoneNumber = KKJ_Zone_I(KKJ['I'])
MinLa = math.radians(59.0)
MaxLa = math.radians(70.5)
MinLo = math.radians(18.5)
MaxLo = math.radians(32.0)
i = 1
while (i < 35):
DeltaLa = MaxLa - MinLa
DeltaLo = MaxLo - MinLo
LALO['La'] = math.degrees(MinLa + 0.5 * DeltaLa)
LALO['Lo'] = math.degrees(MinLo + 0.5 * DeltaLo)
KKJt = KKJlalo_to_KKJxy(LALO, ZoneNumber)
if (KKJt['P'] < KKJ['P']):
MinLa = MinLa + 0.45 * DeltaLa
else:
MaxLa = MinLa + 0.55 * DeltaLa
if (KKJt['I'] < KKJ['I']):
MinLo = MinLo + 0.45 * DeltaLo
else:
MaxLo = MinLo + 0.55 * DeltaLo
i = i + 1
return LALO
###########################################################################
# Function: KKJlalo_to_WGS84lalo
###########################################################################
def KKJlalo_to_WGS84lalo(KKJ):
La = KKJ['La']
Lo = KKJ['Lo']
dLa = math.radians( 0.124867E+01 + \
-0.269982E+00 * La + \
0.191330E+00 * Lo + \
0.356119E-02 * La * La + \
-0.122312E-02 * La * Lo + \
-0.335514E-03 * Lo * Lo ) / 3600.0
dLo = math.radians(-0.286111E+02 + \
0.114183E+01 * La + \
-0.581428E+00 * Lo + \
-0.152421E-01 * La * La + \
0.118177E-01 * La * Lo + \
0.826646E-03 * Lo * Lo ) / 3600.0
WGS = {}
WGS['La'] = math.degrees(math.radians(KKJ['La']) + dLa)
WGS['Lo'] = math.degrees(math.radians(KKJ['Lo']) + dLo)
return WGS
###########################################################################
# Function: WGS84lalo_to_KKJlalo
###########################################################################
def WGS84lalo_to_KKJlalo(WGS):
La = WGS['La']
Lo = WGS['Lo']
dLa = math.radians(-0.124766E+01 + 0.269941E+00 * La + -0.191342E+00 * Lo + -0.356086E-02 * La * La + 0.122353E-02 * La * Lo + 0.335456E-03 * Lo * Lo ) / 3600.0
dLo = math.radians( 0.286008E+02 + \
-0.114139E+01 * La + \
0.581329E+00 * Lo + \
0.152376E-01 * La * La + \
-0.118166E-01 * La * Lo + \
-0.826201E-03 * Lo * Lo ) / 3600.0
KKJ = {}
KKJ['La'] = math.degrees(math.radians(WGS['La']) + dLa)
KKJ['Lo'] = math.degrees(math.radians(WGS['Lo']) + dLo)
return KKJ
###########################################################################
# Function: KKJxy_to_WGS84lalo
###########################################################################
# Input: dictionary with ['P'] is KKJ Northing
# ['I'] in KKJ Eeasting
# Output: dictionary with ['La'] is latitude in degrees (WGS84)
# ['Lo'] is longitude in degrees (WGS84)
###########################################################################
def KKJxy_to_WGS84lalo(KKJin):
KKJz = KKJxy_to_KKJlalo(KKJin)
WGS = KKJlalo_to_WGS84lalo(KKJz)
return WGS
###########################################################################
# Function: WGS84lalo_to_KKJxy
###########################################################################
# Input: dictionary with ['La'] is latitude in degrees (WGS84)
# ['Lo'] is longitude in degrees (WGS84)
# Output: dictionary with ['P'] is KKJ Northing
# ['I'] in KKJ Eeasting
###########################################################################
def WGS84lalo_to_KKJxy(WGSin):
KKJlalo = WGS84lalo_to_KKJlalo(WGSin)
ZoneNumber = KKJ_Zone_Lo(KKJlalo['Lo'])
KKJxy = KKJlalo_to_KKJxy(KKJlalo, ZoneNumber)
return KKJxy
###########
# Test code
###########
class testCoordinate:
def __init__(self, x, y, lon, lat):
self.x = x
self.y = y
self.lon = lon
self.lat = lat
testData = []
# Test data extracted from example on page
# http://developer.reittiopas.fi/pages/fi/http-get-interface.php
testData.append(testCoordinate(2556686, 6682815, 25.02051, 60.2528))
testData.append(testCoordinate(2546340, 6675352, 24.832, 60.18713))
testData.append(testCoordinate(2557985, 6685213, 25.04465, 60.27414))
testData.append(testCoordinate(2556532, 6682578, 25.01767, 60.2507))
testData.append(testCoordinate(2524959, 6686629, 24.44804, 60.2902))
testData.append(testCoordinate(2559094, 6693721, 25.06718, 60.35033))
testData.append(testCoordinate(2556861, 6683030, 25.02373, 60.25471))
testData.append(testCoordinate(2556888, 6682971, 25.0242, 60.25417))
testData.append(testCoordinate(2560257, 6698983, 25.08981, 60.39737))
testData.append(testCoordinate(2562518, 6686969, 25.12709, 60.28923))
testData.append(testCoordinate(2536615, 6673635, 24.65643, 60.1727))
testData.append(testCoordinate(2559118, 6693833, 25.06764, 60.35133))
testData.append(testCoordinate(2559182, 6693629, 25.06874, 60.34949))
testData.append(testCoordinate(2556947, 6682640, 25.02518, 60.25119))
testData.append(testCoordinate(2556822, 6682723, 25.02294, 60.25196))
testData.append(testCoordinate(2559089, 6693605, 25.06705, 60.34929))
testData.append(testCoordinate(2546445, 6675512, 24.83393, 60.18855))
testData.append(testCoordinate(2556964, 6682609, 25.02547, 60.25091))
testData.append(testCoordinate(2556740, 6682861, 25.0215, 60.25321))
testData.append(testCoordinate(2559002, 6694007, 25.06559, 60.35291))
def testKKJxytoWGS84lalo(x, y):
test = { 'P': y, 'I': x }
result = KKJxy_to_WGS84lalo(test)
return [result['Lo'], result['La']]
testsPass = True
# Test transforming from KKJxy to WGS84latlon
for t in testData:
[lon, lat] = testKKJxytoWGS84lalo(t.x, t.y)
if math.fabs(t.lon - lon) < 0.001 and math.fabs(t.lat - lat) < 0.001:
pass
else:
print "Got: (",lon,lat,"), expected: (",t.lon,t.lat,")"
testsPass = False
if testsPass:
print "All tests in testKKJxytoWGS84lalo passed"
def testWGS84lalotoKKJxy(lon, lat):
test = { 'La': lat, 'Lo': lon }
result = WGS84lalo_to_KKJxy(test)
return [result['I'], result['P']]
testsPass = True
# Test transforming from WGS84latlon to KKJxy
for t in testData:
[x, y] = testWGS84lalotoKKJxy(t.lon, t.lat)
if abs(t.x - x) < 2 and abs(t.y - y) < 2:
pass
else:
print "Got: (",x,y,"), expected: (",t.x,t.y,")"
testsPass = False
if testsPass:
print "All tests in testWGS84lalotoKKJxy passed"
|
UTF-8
|
Python
| false | false | 2,011 |
2,164,663,527,201 |
f0c511fbd32b9a2f4667f537168da0033d7c1244
|
99071b4e631e0288c197aaff3533acd117cdd551
|
/getCategories.py
|
f5407e18dd6f536212c143abb0d98e8aa03d3fa7
|
[] |
no_license
|
bhenne/Android-Market-Crawler
|
https://github.com/bhenne/Android-Market-Crawler
|
9d8294ea1abe8e8aab34b5b562efbf556731f17b
|
563db3fa52800003e0d13884087bf2f6b1dc0e01
|
refs/heads/master
| 2020-12-25T09:38:37.988981 | 2013-07-03T15:54:23 | 2013-07-03T15:54:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import urllib2
import re
from BeautifulSoup import BeautifulSoup
__author__ = "Benjamin Henne"
URL = "https://play.google.com/store/apps/category/GAME?feature=category-nav"
s = set()
request = urllib2.Request(URL)
request.add_header("User-Agent", "PermissionCrawler")
handle = urllib2.build_opener()
content = handle.open(request).read()
soup = BeautifulSoup(content)
tags = soup('a')
for tag in tags:
href = tag.get("href")
if href is not None and re.search('/category/', href):
x = href.partition('/category/')[2].partition('/')[0]
if "?" in x:
x = x.partition("?")[0]
s.add(x)
# this categories should be at the end of the list, add them manually
s.remove("GAME")
s.remove("APPLICATION")
print " ".join(s), "GAME", "APPLICATION"
|
UTF-8
|
Python
| false | false | 2,013 |
18,253,611,016,253 |
50d07edd9f0edcd2aed100d14c8ddd423b467065
|
9ba9251319b251e6c7cef9a30b00d08a3b6dae56
|
/GUI/loadDataDlg.py
|
2d82ccb69f9091a0d90dd6d5cf4d5321a9f43b81
|
[
"GPL-3.0-only"
] |
non_permissive
|
mlangill/STAMP
|
https://github.com/mlangill/STAMP
|
f2d4cba6b599f6dce8455209e8930d937b31513c
|
14a9126999d4da48b56ae093814191ace670e1d3
|
refs/heads/master
| 2021-01-16T22:49:27.460270 | 2013-05-24T03:51:12 | 2013-05-24T03:51:12 | 12,270,098 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#=======================================================================
# Author: Donovan Parks
#
# Dialog box used to load profile data and group metadata.
#
# Copyright 2011 Donovan Parks
#
# This file is part of STAMP.
#
# STAMP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# STAMP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with STAMP. If not, see <http://www.gnu.org/licenses/>.
#=======================================================================
from PyQt4 import QtGui, QtCore
from loadDataDlgUI import Ui_loadDataDlg
class LoadDataDlg(QtGui.QDialog):
def __init__(self, preferences, parent=None, info=None):
QtGui.QWidget.__init__(self, parent)
# initialize GUI
self.ui = Ui_loadDataDlg()
self.ui.setupUi(self)
self.centerWindow()
self.preferences = preferences
# connect signals to slots
self.connect(self.ui.tbProfileFile, QtCore.SIGNAL("clicked()"), self.openProfileFile)
self.connect(self.ui.tbMetadataFile, QtCore.SIGNAL("clicked()"), self.openMetadataFile)
def openProfileFile(self):
profileFile = QtGui.QFileDialog.getOpenFileName(self, 'Open profile', self.preferences['Last directory'], 'STAMP profile file (*.spf *.tsv *.txt);;All files (*.*)')
if profileFile != '':
self.preferences['Last directory'] = profileFile[0:profileFile.lastIndexOf('/')]
self.ui.txtProfileFile.setText(profileFile)
def openMetadataFile(self):
metadataFile = QtGui.QFileDialog.getOpenFileName(self, 'Open group metadata', self.preferences['Last directory'], 'STAMP group metadata file (*.met *.tsv *.txt);;All files (*.*)')
if metadataFile != '':
self.preferences['Last directory'] = metadataFile[0:metadataFile.lastIndexOf('/')]
self.ui.txtMetadataFile.setText(metadataFile)
def centerWindow(self):
screen = QtGui.QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width()-size.width())/2, (screen.height()-size.height())/2)
def getProfileFile(self):
return self.ui.txtProfileFile.text()
def getMetadataFile(self):
return self.ui.txtMetadataFile.text()
if __name__ == "__main__":
pass
|
UTF-8
|
Python
| false | false | 2,013 |
9,509,057,635,048 |
b85f1adf581bf285a547556548534041804dd243
|
8021447c71c312729ce9b2d239dfdc9ac56ef8ff
|
/cgi-bin/Extract.py
|
7684819b56802e3f80610842feee11bd5abc9174
|
[] |
no_license
|
davechallis/techtales
|
https://github.com/davechallis/techtales
|
89b5864605370d8d4bd1131fa97604505c4aecb0
|
b1d48e0dfb80bb6f22589de2d904f2b69ba9ff77
|
refs/heads/master
| 2021-01-10T18:40:21.467148 | 2010-03-01T12:07:44 | 2010-03-01T12:07:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import os.path
import sys
import re
import pickle
class Extract(object):
def __init__(self, site, datapath='data', cachepath='cache', extractorpath='extractors'):
self.datapath = datapath
self.cachepath = cachepath
self.extractorpath = extractorpath
self.site = site
self.results = {}
self.html = ''
def run(self):
cachefile = self.cachepath+'/'+self.site
# If we have a cache, unpickle and return.
if os.path.exists(cachefile):
f = open(cachefile, 'r')
self.results = pickle.load(f)
f.close()
return self.results
# Otherwise do the processing.
for file in os.listdir(self.datapath):
if file.startswith(self.site):
f = open(self.datapath+'/'+file, 'r')
self.html = f.read()
f.close()
date = file.split('.').pop()
self.results[date] = self.run_extractors()
# Dump out the cache file
cache = open(cachefile, 'w')
pickle.dump(self.results, cache)
cache.close()
return self.results
def run_extractors(self):
skip = ('Extractor.py', '__init__.py')
data = {}
for file in os.listdir(self.extractorpath):
if file.endswith('.pyc'):
continue
if file in skip:
continue
(classname, ext) = os.path.splitext(file)
mod = __import__('extractors.'+classname)
line = 'mod.%s.%s()' % (classname,classname)
obj = eval(line)
results = obj.run(self.html)
data.update(results)
return data
|
UTF-8
|
Python
| false | false | 2,010 |
14,310,831,065,547 |
6245fc303223fb029c848f79fb8ccddc074f3199
|
41f0a2840c90063c147ee5da34716e2a75f03e26
|
/septic_tank/scripts/medley.test.server.py
|
f543e2acbb5f82ce81c19dee5bc3f1445f1df8b3
|
[
"MIT"
] |
permissive
|
jbruce12000/septic-tank
|
https://github.com/jbruce12000/septic-tank
|
151f3ff7b1dc07b9022fcbd6d60e2535e63835b3
|
0adf9c980ad71714ec4d68862b4a736abc480c07
|
refs/heads/master
| 2021-01-23T16:36:35.923792 | 2014-03-12T21:16:16 | 2014-03-12T21:16:16 | 3,910,302 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import logging
import socket
from pipeline import Pipeline
from inputs import FileInput, ZeroMQInput, StdInput
from parsers import RegexParser
from filters import ZuluDateFilter, RemoveFieldsFilter, GrepFilter, LCFilter, UniqFilter, AddFieldsFilter
from outputs import *
from dirwatcher import DirWatcher
logging.basicConfig(filename='./debug.log', level=logging.INFO, format='%(asctime)s:%(levelname)s:%(message)s')
if __name__ == "__main__":
i = FileInput('/home/jbruce/septic_tank/septic_tank/logs/archives/all.medleypid.log')
p = RegexParser(use = ['medleypidlog'])
rff = RemoveFieldsFilter(fields = ['msg'])
zdf = ZuluDateFilter(fields=['date'],iszulu=True)
uniq = UniqFilter()
sqlite = SQLiteOutput(commitrate=10000)
pipeline = Pipeline(pipes = [i,p,rff,zdf,uniq,sqlite])
for data in pipeline:
pass
|
UTF-8
|
Python
| false | false | 2,014 |
16,664,473,135,666 |
4e5d919605fcf36b709447e5992663a0a0cbe6e0
|
4d3f79abd81128155b47041b58bc61155141031c
|
/setup.py
|
65d4055b5ffabdb690d45e25957e8fc0788ce4ca
|
[] |
no_license
|
enolfc/os-cloud-ur
|
https://github.com/enolfc/os-cloud-ur
|
192288704bb1ca49052a7adaad30f07022535bd6
|
68b8de95454b083cc86e23d441a88c0ba10f81bd
|
refs/heads/master
| 2016-03-06T02:59:36.255348 | 2014-01-21T12:33:47 | 2014-01-21T12:33:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2014, Spanish National Research Council
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from setuptools import setup
setup(
name='ceilometer2ur',
version='0.1',
description='Generate CloudAccounting records from Ceilometer.',
long_description=("This package generates CloudAccounting UsageRecords"
" from the data contained in Ceilometer."),
classifiers=[
'Programming Language :: Python',
'Development Status :: 4 - Beta'
'Topic :: System :: Systems Administration',
],
keywords='',
author='Spanish National Research Council',
author_email='[email protected]',
url='https://github.com/enolfc/os-cloud-ur',
license='Apache License, Version 2.0',
include_package_data=True,
packages=['ceilometer2ur'],
zip_safe=False,
#install_requires=[
# 'setuptools',
# 'python-dateutil',
# 'python-ceilometerclient',
# 'dirq',
#],
entry_points = {
'console_scripts': [
'ceilometer2ur = ceilometer2ur.shell:main',
]
},
)
|
UTF-8
|
Python
| false | false | 2,014 |
16,355,235,475,989 |
bfb3e85ee9f80f578ab457bc5a0ec065033529d7
|
6450234cc5339e9d05102b25b25ba38e2bd9e4cb
|
/MonoJetAnalysis/plotsWolfgang/SampleFilters.py
|
36894ebf6b5924e6221c947a6baa1276914a8a5d
|
[] |
no_license
|
wa01/Workspace
|
https://github.com/wa01/Workspace
|
57b87481005c441ab91a8180ddf6ea00b520aca7
|
47759c6a20473f7a694ca9e3fd4e0e8343c8018c
|
refs/heads/master
| 2021-01-15T10:36:55.429420 | 2014-09-20T17:44:54 | 2014-09-20T17:44:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class LeptonFilter:
def __init__(self,leptonPdg):
self.leptonPdg = abs(leptonPdg)
def accept(self,eh):
ngp = int(eh.get("ngp"))
pdgs = eh.get("gpPdg")
for pdg in pdgs[:ngp]:
if abs(pdg)==self.leptonPdg:
return True
return False
class InvertedSampleFilter:
def __init__(self,other):
self.filter = other
def accept(self,eh):
return not self.filter.accept(eh)
|
UTF-8
|
Python
| false | false | 2,014 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.