__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3,685,081,940,809 |
8b3a7d0ca459479bd2773408fa6de67fd62c494f
|
9c3d49964255b35ec0975708862960e4e66c9f27
|
/doc/__init__.py
|
47f9ddf03e98ffa81a83add542c8026b51e1e7b6
|
[] |
no_license
|
SwooshyCueb/mini-buildd
|
https://github.com/SwooshyCueb/mini-buildd
|
42cf4f7dfc8c9ecbc21e846acd948d73673bf48e
|
86ac05a66e605169c8ace43ab6e86aea44c995c1
|
refs/heads/master
| 2016-09-07T18:54:46.634615 | 2014-10-10T16:45:47 | 2014-10-10T16:45:47 | 26,328,363 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
#
# This file only exists for realizing the use of 'apydoc.py'
# and is supposed to be removed some time later -- most likely
# as soon as sphinx >= 1.1 is available in Squeeze.
|
UTF-8
|
Python
| false | false | 2,014 |
1,099,511,659,816 |
e9b5f2602cb540e85c4c6d90e4fe570584ef8930
|
bdf5c2be642e6ff4e975c2b8419086dd0c9c3da0
|
/Bible/src/ch01/cal03.py
|
1b97ed50b680d4f15aa8a59a3ce70122c6dd4387
|
[] |
no_license
|
miki1029/Python_Basic
|
https://github.com/miki1029/Python_Basic
|
d6b71b039cbbb3175405c921a5e7bc345aab6bf4
|
87bdf0fb3782ca9e977bea85d8f5de49c3463f9e
|
refs/heads/master
| 2016-09-15T18:05:46.902721 | 2014-02-07T18:24:58 | 2014-02-07T18:24:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
# file : cal03.py
import calendar
calendar.setfirstweekday(6) # 일요일을 첫 요일로..
calendar.prmonth(2012, 6)
|
UTF-8
|
Python
| false | false | 2,014 |
12,721,693,155,931 |
1d4226c8345a0e0ddb917fe9a1bf52388bee03e2
|
bfc874767de27c84f3b61b7b5d0b6a4ee1fefb7f
|
/analysis/AbstractAnalyzer.py
|
acb550552b35dc72f8a0867e5b80677565f4370a
|
[
"GPL-3.0-only"
] |
non_permissive
|
pombreda/raft
|
https://github.com/pombreda/raft
|
294774b70d07fb4b7d57fac3ddb92e2681fb6a7f
|
c81c5778a8113e3c7095334ed91dc68352e5da5d
|
refs/heads/master
| 2021-01-01T19:07:04.417738 | 2014-08-12T21:17:50 | 2014-08-12T21:17:50 | 32,209,251 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# Author: Justin Engler
#
# Copyright (c) 2011 RAFT Team
#
# This file is part of RAFT.
#
# RAFT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RAFT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RAFT. If not, see <http://www.gnu.org/licenses/>.
#
import json
from .resultsclasses import AnalysisResults
from utility import TreeWidgetTools
from PyQt4.QtGui import *
from PyQt4.QtCore import Qt
class AbstractAnalyzer(object):
"""
AbstractAnalyzer
Defines the interface for a RAFT analyzer.
Analyzers search through request or response data,
locate items of interest, and report back the details
Analyzers shall either inherit from AbstractAnalyzer
or implement all of the member functions with compatible
signatures and return values.
Each analyzer will be instantiated before analysis begins, and each
resulting analysis object will be reused on each page.
It is the responsibility of the analyzer writer to save or clear
object fields as desired for each page.
"""
#####################Standard Functions
#####################Override when defining your own analyzers
def __init__(self):
self.desc="Did you define a desc in your subclass? "\
"An abstract class to define what an analyzer must support."\
" Automatically called during analysis or configuration start."
self.friendlyname="Abstract Analyzer"
self.currentconfiguration=None
def preanalysis(self):
"""Any special pre-test setup goes here. This will be called after configuration, before any pages are loaded
"""
pass
def analyzeTransaction(self,transaction,results):
"""
Performs an analysis on target. This function must be defined by a subclass.
Transaction is information about the request and response (the target of your analysis)
"""
raise Exception("analyze() must be defined in a subclass for analysis")
def postanalysis(self,results):
"""Any post-test steps go here. Called after all pages were analyzed."""
pass
def getDefaultConfiguration(self):
"""Returns the default configuration dictionary for this analyzer.
If your module is configurable at all, you must override this."""
return {}
def defaultEnabled(self):
"""Returns if the analyzer should be Enabled by default"""
return True
#####################Special Functions
#####################You shouldn't need to override these unless you're doing something very special
def setConfiguration(self,newconfiguration):
"""Accepts any configuration data from the system.
Either a settings dictionary or a JSON string representation
of the same is OK."""
if (type(newconfiguration)==str or type(newconfiguration)==str) and len(newconfiguration)>0:
#print "FOUND JSON, decoding"
#pprint.pprint(newconfiguration)
temp=json.loads(newconfiguration)
self.currentconfiguration = temp
elif type(newconfiguration)==dict:
#print "FOUND DICT"
self.currentconfiguration = newconfiguration
else:
raise ValueError("I don't know what that configuration is.")
#pprint.pprint(self.currentconfiguration)
return self.currentconfiguration
def encodeConfiguration(self,newconfiguration):
"""Takes a given configuration and encodes it for storage.
Does not alter existing runtime config of this analyzer.
"""
return json.dumps(newconfiguration)
def initResultsData(self):
"""Sets up results data. Called after preanalysis, but before actual analysis.
Analyzer writers shouldn't need to override this function except for special cases, like
when subclassing a custom AnalysisResults object.
"""
self.analysisresults=AnalysisResults.AnalysisResults()
self.analysisresults.setAnalyzerInfo(self.desc,self.friendlyname,str(self.__class__).translate('<>'))
def getResults(self):
"""returns a reference to the Analyzer's results collection.
Analyzer writers shouldn't need to override this function"""
return self.analysisresults
def getCurrentConfiguration(self):
"""Returns the current config dict. Should not need to override this"""
if not hasattr(self,'currentconfiguration') or self.currentconfiguration is None:
self.currentconfiguration=self.getDefaultConfiguration()
#print 'Using Default Config'
return self.currentconfiguration
def generateConfigurationGui(self,analyzerConfig):
currentconfig=self.getCurrentConfiguration()
TreeWidgetTools.populate_tree_widget(analyzerConfig,currentconfig)
def generalInfoToHTML(self):
"""Returns an HTML 'header' string describing the analyzer"""
outstring="""<h1>%s</h1>
(%s)
<p>%s</p>
"""%(self.friendlyname,str(self.__class__).translate('<>'),self.desc)
return outstring
|
UTF-8
|
Python
| false | false | 2,014 |
8,967,891,727,943 |
32ff0d8df10f6d6762f635a1f4c9daa060008384
|
50aa1effb4b6dc1a44a72f36ba53f25c9720b3f7
|
/research/max_example_ga/GAProblems/TextTester.py
|
93dcdaba8ca38d0456d8ea82b36f14a29033a16a
|
[] |
no_license
|
ahk/robobach
|
https://github.com/ahk/robobach
|
61c3fa11774ba11df47a50fb45e241fca180da6e
|
a5ef875926f7e29ca841ec449f3f34974d73b153
|
refs/heads/master
| 2016-09-06T13:47:40.379729 | 2010-03-31T11:05:28 | 2010-03-31T11:05:28 | 451,885 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
TextTester.py
Created by Max Martin on 2008-01-30.
Copyright (c) 2008 __MyCompanyName__. All rights reserved.
"""
import sys
import os
def main():
array = []
for i in range(10):
array.append([])
for j in range(30):
array[i].append(j)
resultfile = open('TestResults','a')
for sub in array:
for ind in sub:
resultfile.write(str(ind) + ",")
resultfile.close()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,010 |
541,165,902,683 |
d59216e5babd6167f869004dba63ab815fc0eb5b
|
13abcc2276b99233307c090adeea5fb190cfd21d
|
/ex18.py
|
f8a17f2ca6c0cf4b8d60289d5e189606d1523364
|
[] |
no_license
|
wocoburguesa/Python_the_Hard_Way
|
https://github.com/wocoburguesa/Python_the_Hard_Way
|
4efc56546dbae1271f11046459028bf9458f575b
|
8a40814899c36767fd6814691b91531dc9c72ffc
|
refs/heads/master
| 2020-06-09T07:30:22.506444 | 2012-01-19T01:27:09 | 2012-01-19T01:27:09 | 3,193,072 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -- coding: utf-8 --
def print_two(*args):
arg1, arg2 = args
print "Imprimiendo con arg[idx]-> args[0]: %r, arg[1]: %r" % (args[0], args[1])
print "Imprimiendo las variables asignadas-> arg1: %r, arg2: %r" % (arg1, arg2)
#omitiendo el *args
def print_two_again(arg1, arg2):
print "arg1: %r, arg2: %r" % (arg1, arg2)
#con solo un argumento
def print_one(arg1):
print "arg1: %r" % arg1
#sin argumentos
def print_none():
print "I got nothin'."
print_two("Marco", "Flores")
print_two_again("Antonio", "Nuñez")
print_one("Oz")
print_none()
|
UTF-8
|
Python
| false | false | 2,012 |
3,504,693,358,143 |
c169e5fcca8e08e668bee0420008f2c05e8a7a3b
|
044a2b70876a8193f780e44cb20ef1ffca976996
|
/spaciblo/sim/glge.py
|
f8a19eac12cd51dc2e8cece5466527e0afe2a48e
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
brownman/spaciblo
|
https://github.com/brownman/spaciblo
|
9fd9b1a97d8ab7e65c74b99187dc1da56293b5e2
|
e944f7eb3ffc4a4971ff7e765f0344d0358b9ec3
|
refs/heads/master
| 2021-01-18T11:19:21.441727 | 2010-10-25T01:35:43 | 2010-10-25T01:35:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
from django.core.urlresolvers import reverse
"""
A set of objects which define the visual aspects of a 3D scene: position, orientation, motion, geometry, material, lighting...
"""
# Flag for material colour
M_COLOR = 1
# Flag for material normal
M_NOR = 2
# Flag for material alpha
M_ALPHA = 4
# Flag for material specular color
M_SPECCOLOR = 8
# Flag for material specular cvalue
M_SPECULAR = 16
# Flag for material shineiness
M_SHINE = 32
# Flag for material reflectivity
M_REFLECT = 64
# Flag for material emision
M_EMIT = 128
# Flag for material alpha
M_ALPHA = 256
# Flag for masking with textures red value
M_MSKR = 512
# Flag for masking with textures green value
M_MSKG = 1024
# Flag for masking with textures blue value
M_MSKB = 2048
# Flag for masking with textures alpha value
M_MSKA = 4096
# Flag for mapping of the height in parallax mapping
M_HEIGHT = 8192
# Enumeration for first UV layer
UV1 = 0
# Enumeration for second UV layer
UV2 = 1
# Enumeration for normal texture coords
MAP_NORM = 3
# Enumeration for object texture coords
MAP_OBJ = 4
# Enumeration for reflection coords
MAP_REF = 5
# Enumeration for environment coords
MAP_ENV = 6
# Enumeration for view coords
MAP_VIEW = 7
# Enumeration for mix blending mode
BL_MIX = 0
# Enumeration for mix blending mode
BL_MUL = 1
# Enumeration for no fog
FOG_NONE=1;
# Enumeration for linear fall off fog
FOG_LINEAR=2;
# Enumeration for exponential fall off fog
FOG_QUADRATIC=3;
# Enumeration for node group type
G_NODE=1;
# Enumeration for root group type
G_ROOT=2;
# Enum for XYZ rotation order
ROT_XYZ=1;
# Enum for XZY rotation order
ROT_XZY=2;
# Enum for YXZ rotation order
ROT_YXZ=3;
# Enum for YZX rotation order
ROT_YZX=4;
# Enum for ZXY rotation order
ROT_ZXY=5;
# Enum for ZYX rotation order
ROT_ZYX=6;
# Enumeration for euler rotaions mode
P_EULER=1;
# Enumeration for quaternions mode
P_QUAT=2;
# Enumeration for matrix rotation mode
P_MATRIX=3;
def copy_attributes(target, data, ignore=None):
if not data: return
if not ignore: ignore = []
for key in data:
if not key in ignore:
setattr(target, key, data[key])
def populate_children(target, data):
for child_data in data['children']:
if 'children' in child_data:
child = Group().populate(child_data)
else:
child = Object().populate(child_data)
target.children.append(child)
def populate_class_array(target, data, cls, key_name):
if not data: return
target_array = getattr(target, key_name)
for item_data in data[key_name]:
target_array.append(cls().populate(item_data))
class SceneBase(object):
"""The base class which all scene elements extend."""
def __init__(self, uid=None):
if not hasattr(self, 'uid'):
self.uid = uid or SceneBase.createUUID()
self.name = None #used mostly for debugging
def flatten(self):
"""returns a flattened array of the entire DAG"""
if 'children' in self: return [self]
results = [self]
for child in self.children:
results.append(child.flatten())
return results
def populate(self, data):
"""
This should return self after populating it with the data (which is probably parsed from a JSON version of a Scene).
If the data is None then populate should return None.
"""
raise NotImplementedError()
@classmethod
def createUUID(cls):
""" Returns a GLGE style UUID like 'E9C997CB-BAB2-4348-8048B8F938DCCC78EA0' """
data = ["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F"]
data2 = ["8","9","A","B"]
uuid = [data[random.randint(0, len(data) - 1)] for i in range(38)]
uuid[19] = data2[random.randint(0, len(data2) - 1)]
uuid = uuid[0:8] + ['-'] + uuid[9:13] + ['-4'] + uuid[15:18] + ['-'] + uuid[19:]
return ''.join(uuid)
@classmethod
def node_name(cls):
return cls.__name__
class Placeable(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.locX = 0
self.locY = 0
self.locZ = 0
self.dLocX = 0
self.dLocY = 0
self.dLocZ = 0
self.quatW = 1
self.quatX = 0
self.quatY = 0
self.quatZ = 0
self.rotX = 0
self.rotY = 0
self.rotZ = 0
self.dRotX = 0
self.dRotY = 0
self.dRotZ = 0
self.scaleX = 1
self.scaleY = 1
self.scaleZ = 1
self.dScaleX = 0
self.dScaleY = 0
self.dScaleZ = 0
self.matrix = [[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]]
self.rotOrder = ROT_XYZ
self.mode = P_QUAT
#self.lookAt = None
@property
def quat(self): return [self.quatX, self.quatY, self.quatZ, self.quatW]
@property
def loc(self): return [self.locX, self.locY, self.locZ]
def set_rot(self, vals):
self.mode = P_EULER
self.rotX = vals[0]
self.rotY = vals[1]
self.rotZ = vals[2]
def set_quat(self, vals):
self.mode = P_QUAT
self.quatX = vals[0]
self.quatY = vals[1]
self.quatZ = vals[2]
self.quatW = vals[3]
def set_loc(self, vals):
self.locX = vals[0]
self.locY = vals[1]
self.locZ = vals[2]
def set_scale(self, vals):
self.scaleX = vals[0]
self.scaleY = vals[1]
self.scaleZ = vals[2]
class BezTriple(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.x1 = 0
self.y1 = 0
self.x = 0 #why these aren't named x2 and y2, I don't know
self.y = 0
self.x3 = 0
self.y3 = 0
def __unicode__(self):
return "%s,%s,%s,%s,%s,%s" % (self.x1, self.y1, self.x, self.y, self.x3, self.y3)
def populate(self, data):
if not data: return None
copy_attributes(self, data)
return self
class StepPoint(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.x = 0
self.y = 0
def populate(self, data):
if not data: return None
copy_attributes(self, data)
return self
def __unicode__(self):
return "%s,%s" % (self.x, self.y)
class AnimationCurve(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.channel = None
self.keyFrames = []
self.solutions = []
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['channel'])
self.channel = ActionChannel().populate(data['channel'])
return self
class AnimationVector(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.curves = []
self.frames = 250
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['curves'])
populate_class_array(self, data, AnimationCurve, 'curves')
return self
class Animatable(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.animationStart = None
self.animation = None
self.blendStart = 0
self.blendTime = 0
self.lastFrame = None
self.frameRate = 25
self.loop = True
self.paused = False
self.pausedTime = None
class Action(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.channels = []
def populate(self, data):
if not data: return None
copy_attributes(self, data)
populate_class_array(self, data, ActionChannel, 'channels')
return self
class ActionChannel(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.animation = None
def populate(self, data):
if not data: return None
self.animation = AnimationCurve().populate(data['animation'])
return self
class GroupTemplate(SceneBase):
"""The information used to fetch geometry data (e.g. the mesh) from an HTTPd"""
def __init__(self, template_id=None, name=None):
SceneBase.__init__(self)
self.template_id = template_id
self.url = self.generate_url()
self.name = name
def generate_url(self): return reverse('template-api', kwargs={ 'id':self.template_id })
def populate(self, data):
if not data: return None
copy_attributes(self, data)
return self
class Group(Animatable, Placeable):
def __init__(self):
Animatable.__init__(self)
Placeable.__init__(self)
self.group_template = None
self.username = None
self.children = []
self.group_type = G_NODE
def get_user(self, username):
if username == self.username: return self
for child in self.children:
if not isinstance(child, Group): continue
node = child.get_user(username)
if node: return node
return None
def remove_node(self, uid):
for child in self.children:
if child.uid == uid:
self.children.remove(child)
return child
if hasattr(child, 'remove_node'):
node = child.remove_node(uid)
if node: return node
return None
def get_node(self, uid):
if uid == self.uid: return self
for child in self.children:
node = child.get_node(uid)
if node: return node
return None
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['children', 'animation', 'group_template'])
populate_children(self, data)
self.animation = AnimationCurve().populate(data['animation'])
self.group_template = GroupTemplate().populate(data['group_template'])
return self
class Text(Placeable, Animatable):
def __init__(self):
Animatable.__init__(self)
Placeable.__init__(self)
self.zTrans = True
self.aspect = 1.0
self.color = [1,1,1]
self.text = ""
self.font = "Times"
self.size = 100
self.pickType = TEXT_TEXTPICK
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['animation'])
self.animation = AnimationCurve().populate(data['animation'])
return self
class Mesh(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.positions = []
self.normals = []
self.faces = []
self.UV = []
self.joints = []
self.invBind = None
def populate(self, data):
if not data: return None
copy_attributes(self, data)
return self
class Light(Placeable, Animatable):
def __init__(self):
Animatable.__init__(self)
Placeable.__init__(self)
self.constantAttenuation = 1
self.linearAttenuation = 0.002
self.quadraticAttenuation = 0.0008
self.spotCosCutOff = 0.95
self.spotPMatrix = None
self.spotExponent = 10
self.color = [1,1,1]
self.diffuse = True
self.specular = True
self.samples = 0
self.softness = 0.01
self.type = L_POINT
self.texture = None
self.shadowBias = 2.0
self.castShadows = False
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['animation', 'texture'])
self.texture = Texture().populate(data['texture'])
self.animation = AnimationCurve().populate(data['animation'])
return self
class MultiMaterial(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.mesh = None
self.material = None
#self.program = None
#self.GLShaderProgramPick = None
#self.GLShaderProgramShadow = None
#self.GLShaderProgram = None
def populate(self, data):
if not data: return None
self.mesh = Mesh().populate(data['mesh'])
self.material = Material().populate(data['material'])
return self
class Object(Placeable, Animatable):
def __init__(self):
Placeable.__init__(self)
Animatable.__init__(self)
self.mesh = None
self.transformMatrix = [[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]]
self.material = None
self.multimaterials = []
self.zTrans = False
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['mesh', 'material', 'multimaterials'])
self.mesh = Mesh().populate(data['mesh'])
self.material = Material().populate(data['material'])
populate_class_array(self, data, MultiMaterial, 'multimaterials')
return self
class Texture(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.key = None
def __unicode__(self): return self.key
def populate(self, data):
if not data: return None
copy_attributes(self, data)
return self
class TextureCube(SceneBase):
def __init__(self):
SceneBase.__init__(self)
self.posX = 0
self.negX = 0
self.posY = 0
self.negY = 0
self.posZ = 0
self.negZ = 0
self.texture = None
self.loadState = 0
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['texture'])
self.texture = Texture().populate(data['texture'])
return self
class MaterialLayer(Animatable):
def __init__(self):
Animatable.__init__(self)
self.texture = None
self.blendMode = None
self.mapto = M_COLOR
self.mapinput = UV1
self.scaleX = 1
self.offsetX = 0
self.rotX = 0
self.scaleY = 1
self.offsetY = 0
self.rotY = 0
self.scaleZ = 1
self.offsetZ = 0
self.rotZ = 0
self.dScaleX = 0
self.dOffsetX = 0
self.dRotX = 0
self.dScaleY = 0
self.dOffsetY = 0
self.dRotY = 0
self.dScaleZ = 0
self.dOffsetZ = 0
self.dRotZ = 0
self.alpha = 1
self.height = 0.05
self.matrix = None
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['blendMode', 'texture', 'animation'])
self.animation = AnimationCurve().populate(data['animation'])
self.texture = Texture().populate(data['texture'])
return self
class Material(Animatable):
def __init__(self):
Animatable.__init__(self)
self.layers = []
self.textures = []
self.lights = []
self.color = [1,1,1,1]
self.specColor = [1,1,1]
self.reflect = 0.8
self.shine = 10
self.specular = 1
self.emit = 0
self.alpha = 1
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['layers', 'textures', 'lights', 'animation'])
populate_class_array(self, data, Texture, 'textures')
populate_class_array(self, data, MaterialLayer, 'layers')
populate_class_array(self, data, Light, 'lights')
self.animation = AnimationCurve().populate(data['animation'])
return self
class ObjectInstance(Placeable, Animatable):
def __init__(self):
Animatable.__init__(self)
Placeable.__init__(self)
self.object = None
def populate(self, data):
if not data: return None
self.object = Object().populate(data['object', 'animation'])
self.animation = AnimationCurve().populate(data['animation'])
return self
class Scene(Group):
def __init__(self):
Group.__init__(self)
self.backgroundColor = [1,1,1]
self.fogColor = [0.5,0.5,0.5]
self.ambientColor = [0,0,0]
self.fogNear = 10
self.fogFar = 80
self.fogType = FOG_NONE
def populate(self, data):
if not data: return None
copy_attributes(self, data, ['children'])
populate_children(self, data)
return self
SCENE_GRAPH_CLASSES = [BezTriple, StepPoint, AnimationCurve, AnimationVector, Animatable, Action, ActionChannel, Group, Text, Mesh, Light, MultiMaterial, Object, Texture, TextureCube, MaterialLayer, Material, ObjectInstance, Scene]
# Copyright 2010 Trevor F. Smith (http://trevor.smith.name/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
UTF-8
|
Python
| false | false | 2,010 |
16,166,256,929,274 |
9b7f2d3752290a86810572b01906b76914e3977a
|
59dd23b47f3e2afee122813524e4dd8563fa2b18
|
/Interface.py
|
9dc0f72771ce9fabdd3cf6258711e69f67be8a23
|
[
"MIT"
] |
permissive
|
edbrown23/Chives
|
https://github.com/edbrown23/Chives
|
f094d176f3f6af1dfdbe0dbb746afb8e78c5f561
|
88e916c2de877510e3abff9d609865db79ac56c1
|
refs/heads/master
| 2021-01-21T01:46:32.803206 | 2013-02-09T21:05:26 | 2013-02-09T21:05:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import time
from Tkinter import *
from win32api import GetSystemMetrics
SCREEN_WIDTH = GetSystemMetrics(0)
SCREEN_HEIGHT = GetSystemMetrics(1)
class App:
def __init__(self, message='How can I help you, sir?'):
self.message = message
self.createWindow()
def createWindow(self):
self.root = Tk()
self.setHeightAndPosition()
self.root.title("Chives")
self.entry = Entry(self.root, width=30)
self.entry.grid(row=0)
self.entry.bind("<Return>", self.handle_return)
self.entry.focus_set()
self.speech = StringVar()
self.speechLabel = Label(self.root, textvariable=self.speech)
self.speechLabel.grid(row=1)
self.speech.set(self.message)
def setHeightAndPosition(self):
x = SCREEN_WIDTH - 220
y = SCREEN_HEIGHT - 130
posString = "+" + str(x) + "+" + str(y)
window_width = 200
window_height = 50
dimString = str(window_width) + "x" + str(window_height)
self.root.geometry(dimString + posString)
def start(self):
self.root.focus_set()
self.root.mainloop()
def handle_return(self, event):
self.userInput = self.entry.get()
self.ackAndClose()
def ackAndClose(self):
self.speech.set("Yes, sir.")
self.root.update()
self.root.destroy()
def informOfBadCommand(self):
self.speech.set("I don't know that, sir.")
self.root.update()
|
UTF-8
|
Python
| false | false | 2,013 |
9,594,956,947,918 |
9fae6d0381e1949b8c126100e177540523f6d187
|
fa0dc031622f0e4e48f8e27131374dcd686a705e
|
/Database Stuff/fix_eventlist.py
|
a13c0cc99cace1438d944b9d40877851561db979
|
[] |
no_license
|
DroidX86/BoxLinux
|
https://github.com/DroidX86/BoxLinux
|
c61ddbbd05c4ad17391a0c14d0a04122144224c4
|
a1b0c9881f36398de6b79645edd85f59ed922b0c
|
refs/heads/master
| 2016-02-24T12:13:07.234671 | 2014-10-13T14:57:51 | 2014-10-13T14:57:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#implement the algo to find rename/move events from create+delete events
#TODO: Find a way to handle combo events (rename/move + modification)
#define just the function here
|
UTF-8
|
Python
| false | false | 2,014 |
5,368,709,165,233 |
c40e1d3c794232f6d2f7311067eba0b851c46067
|
35a915d7e6fe477b0da081334e62b90e87e70cd0
|
/emacs.py
|
c300513891cf4a0068d74d2475c1a13bdfc3ae6f
|
[] |
no_license
|
cnsuhao/bbot
|
https://github.com/cnsuhao/bbot
|
ee825149480fbd2003b1329a137c98936be5e639
|
b1a09ea8b79c633102646487535cc4ace8b1486e
|
refs/heads/master
| 2021-05-27T12:07:19.161494 | 2013-01-02T03:02:32 | 2013-01-02T03:02:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from procedures import BuildProcedure
from buildbot.steps.source import Git
from buildbot.steps.shell import Test, SetProperty
from buildbot.steps.slave import SetPropertiesFromEnv
from buildbot.process.properties import WithProperties
def Emacs():
return WithProperties(
'%(EMACS)s'
, EMACS=lambda build: build.getProperties().getProperty('EMACS','emacs')
)
def EmacsTest(*args, **kw):
return Test(
command=[Emacs(), '--no-splash', '--debug-init'] + (
list(args)
+ reduce(lambda r, kv: r+['--'+kv[0],kv[1]], kw.items(), [])),
env = { 'HOME': WithProperties('%(FakeHome)s') },
timeout = kw.get('timeout', 40),
logfiles = dict(testlog=dict(filename='test.log'))
)
class GitHubElisp(BuildProcedure):
def __init__(self, repo, *testnames):
BuildProcedure.__init__(self, 'elisp')
self.addSteps(
Git(repourl='git://github.com/%s.git' % repo),
SetPropertiesFromEnv(variables=['EMACS']),
SetProperty(
command=[Emacs(), '--batch', '--eval',
'(princ (make-temp-file "home" t ".bbot"))'],
extract_fn=lambda rc, stdout, stderr: dict(FakeHome=stdout)
))
for t in testnames or ['test/test']:
self.addStep(EmacsTest(load= t+'.el'))
|
UTF-8
|
Python
| false | false | 2,013 |
412,316,885,207 |
de60977045db410f22d88b70c1ff280655b901ff
|
f264bad73709bd6832d9b924777f3f598d846481
|
/loader.py
|
c6e3696404ee725c69e6ed99956053e8429d907d
|
[] |
no_license
|
joshualikethis/monary-talk
|
https://github.com/joshualikethis/monary-talk
|
bb440ad70b2a8fd96716714f555be6e4ee818afd
|
87cb3d78350c8cd6022e6c14101562ab16d2f6c8
|
refs/heads/master
| 2020-12-26T00:26:53.850421 | 2014-12-15T16:54:49 | 2014-12-15T16:54:49 | 29,943,283 | 0 | 1 | null | true | 2015-01-28T00:55:06 | 2015-01-28T00:55:06 | 2015-01-28T00:55:06 | 2014-12-15T16:55:04 | 7,078 | 0 | 0 | 0 |
Python
| null | null |
import sys
from datetime import datetime
import zipfile
from decimal import Decimal
import pymongo
time_format = "%Y-%m-%d %H:%M:%S"
times_square = {"NLat": Decimal(40.7592), "SLat": Decimal(40.7520),
"WLng": Decimal(-73.9908), "ELng": Decimal(-73.9836)}
def parse_trip_data(filename):
documents = {"both": [], "pickup": [], "drop": []}
with pymongo.MongoClient() as client:
db = client.taxi
with zipfile.ZipFile(filename) as z:
with z.open(z.infolist()[0]) as f:
first = True
for line in f:
if first:
first = False
continue
(medallion, hack_license, vendor_id, rate_code,
store_and_fwd_flag, pickup_time, drop_time,
passenger_count, trip_time_in_secs, trip_distance,
pickup_lng, pickup_lat, drop_lng, drop_lat) = \
line.strip().split(',')
try:
# check that lat/long are roughly in NYC area
if ((Decimal(pickup_lng) >= -72 or
Decimal(pickup_lng) <= -75) or
(Decimal(drop_lng) >= -72 or
Decimal(drop_lng) <= -75) or
(Decimal(pickup_lat) >= 42 or
Decimal(pickup_lat) <= 40) or
(Decimal(drop_lat) >= 42 or
Decimal(drop_lat) <= 40)):
raise Exception("invalid geo coordinates")
# check that point is in times square area
pickup_in_ts = ((times_square["WLng"] <=
Decimal(pickup_lng) <=
times_square["ELng"]) and
(times_square["NLat"] >=
Decimal(pickup_lat) >=
times_square["SLat"]))
drop_in_ts = ((
times_square["WLng"] <=
Decimal(drop_lng) <=
times_square["ELng"]) and
(times_square["NLat"] >=
Decimal(drop_lat) >=
times_square["SLat"]))
except: # malformed or empty location data
continue
if pickup_in_ts and drop_in_ts:
collection = "both"
elif pickup_in_ts and not drop_in_ts:
collection = "pickup"
elif not pickup_in_ts and drop_in_ts:
collection = "drop"
else:
continue
doc = {
"medallion": medallion,
"license": hack_license,
"vendor": vendor_id,
"rate_code": int(rate_code),
"pickup_time": datetime.strptime(pickup_time,
time_format),
"drop_time": datetime.strptime(drop_time, time_format),
"passengers": int(passenger_count),
"trip_time": int(trip_time_in_secs),
"distance": float(trip_distance)}
try:
doc["pickup_loc"] = {
"type": "Point",
"coordinates": [float(pickup_lng),
float(pickup_lat)]}
except:
pass
try:
doc["drop_loc"] = {
"type": "Point",
"coordinates": [float(drop_lng),
float(drop_lat)]}
except:
pass
documents[collection].append(doc)
if len(documents["both"]) >= 4000:
db.both.insert(documents["both"])
documents["both"] = []
if len(documents["pickup"]) >= 4000:
db.pickup.insert(documents["pickup"])
documents["pickup"] = []
if len(documents["drop"]) >= 4000:
db.drop.insert(documents["drop"])
documents["drop"] = []
if len(documents["both"]) != 0:
db.both.insert(documents["both"])
if len(documents["pickup"]) != 0:
db.pickup.insert(documents["pickup"])
if len(documents["drop"]) != 0:
db.drop.insert(documents["drop"])
if __name__ == '__main__':
if len(sys.argv) == 1:
sys.exit("Error: need to pass at least one zip file to load")
for s in sys.argv[1:]:
if s[-4:] != ".zip":
sys.exit("Error: files passed into loader need to be .zip")
data_files = sys.argv[1:]
print('Inserting new documents...')
for fname in data_files:
print('\tParsing %s' % fname)
parse_trip_data(fname)
print('Done.')
|
UTF-8
|
Python
| false | false | 2,014 |
11,647,951,341,644 |
217469d8d26baf6940b2ee45beaf3484cc6ca033
|
53cbb8ff5f87caa5930093f7106a8be7eaf5891b
|
/spacewalker/walkers/__init__.py
|
b2169e78541c0a412916eb20028bfaff7e64ecd9
|
[] |
no_license
|
kstrauser/spacewalker
|
https://github.com/kstrauser/spacewalker
|
cfdf2bac208c81598f2663350712e41e638f4b8e
|
f91c87bebaefce4a5184bca4e45bfb9324a2ddb1
|
refs/heads/master
| 2021-01-19T18:54:27.532641 | 2014-03-23T07:12:34 | 2014-03-23T07:12:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from . import pyrandom
walkers = { # pylint: disable=C0103
'pyrandom': pyrandom,
}
|
UTF-8
|
Python
| false | false | 2,014 |
2,757,369,029,214 |
0ceef5629998a996cf466bb881f95ad0fadb2212
|
b3fbe783cf0f5753be8cdd646bd8b28581a1ccf1
|
/sites/site_teenplanet.py
|
f5d6ee0a20c126e2eda2de671fd6a508112db1fb
|
[
"GPL-2.0-only"
] |
non_permissive
|
larsbegas/rip-master
|
https://github.com/larsbegas/rip-master
|
ab1c613434b006680d4b1e82ff4c67740c1025d4
|
95d8f5c587620ef6e722e9b94345f13bfb7f1ec6
|
refs/heads/master
| 2016-09-05T16:49:41.005258 | 2014-04-20T08:57:38 | 2014-04-20T08:57:38 | 15,700,616 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
from basesite import basesite
from threading import Thread
import time, os
class teenplanet(basesite):
""" Parse/strip URL to acceptable format """
def sanitize_url(self, url):
if not 'teenplanet.org/' in url:
raise Exception('')
url = url.replace('http://', '')
splits = url.split('/')
if splits[-1].startswith('page'): splits.pop(-1)
if 'index.html' in splits: splits.remove('index.html')
if len(splits) != 4:
raise Exception('expected teenplanet.org/user/folder/set format not found')
return 'http://%s' % '/'.join(splits)
""" Discover directory path based on URL """
def get_dir(self, url):
splits = url.replace('http://', '').split('/')
return 'teenplanet_%s' % '_'.join(splits[-2:])
def download(self):
self.init_dir()
r = self.web.get(self.url)
index = 0
total = 0
page = 1
while True:
chunk = self.web.between(r, "<div id='thumbnails'>", '<div id="description">')[0]
links = self.web.between(chunk, '<a href="', '"')
total += len(links)
for link in links:
img = 'http://photos.teenplanet.org%s' % link.replace(' ', '%20')
index += 1
self.download_image(img, index, total=total)
page += 1
if '/page%d">' % page in r:
r = self.web.get('%s/page%d' % (self.url, page))
else:
break
self.wait_for_threads()
""" Launches thread to download image """
def download_image(self, url, index, total):
while self.thread_count >= self.max_threads:
time.sleep(0.1)
self.thread_count += 1
args = (url, index, total)
t = Thread(target=self.download_image_thread, args=args)
t.start()
""" Downloads image from deviantart image page """
def download_image_thread(self, url, index, total):
r = self.web.get(url)
if not '<img id="thepic" src="' in r:
self.thread_count -= 1
return
img = 'http://photos.teenplanet.org%s' % self.web.between(r, '<img id="thepic" src="', '"')[0]
img = img.replace(' ', '%20')
filename = img[img.rfind('/')+1:]
saveas = '%s%s%03d_%s' % (self.working_dir, os.sep, index, filename)
self.save_image(img, saveas, index, total)
self.thread_count -= 1
|
UTF-8
|
Python
| false | false | 2,014 |
16,020,228,057,744 |
95d0e5e12107d717bd1f7787c1654c3ade6f2267
|
14e5903548ce2641a04273d83b1169b1b620245d
|
/ccnews/ccsitemap.py
|
08078772a1ac5b4eb8b709e2d4b3b93cf7b192e6
|
[
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"CC-BY-NC-3.0",
"CC-BY-3.0",
"CC-BY-4.0"
] |
non_permissive
|
designcc/django-ccnews
|
https://github.com/designcc/django-ccnews
|
8056e3fcd24ec94721e7776ebd444d1d70ef12cc
|
d79e208ee76615dfcf2b6e77fecdf24e7971845b
|
refs/heads/master
| 2020-05-19T07:23:06.669668 | 2012-09-03T13:20:48 | 2012-09-03T13:20:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import ccsitemaps
from ccnews.models import Article
class ArticleSiteMap(ccsitemaps.SiteMap):
model = Article
@staticmethod
def last_mod():
try:
last_mod = Article.objects\
.visible()\
.order_by('-modified')[0]
return last_mod.modified
except IndexError:
return None
@staticmethod
def get_objects():
return Article.objects.visible()
ccsitemaps.register(ArticleSiteMap)
|
UTF-8
|
Python
| false | false | 2,012 |
18,648,748,002,053 |
6a9e3978324d7f5dd032938243c53bfb7605b3f7
|
43705f52cbdfd750c2fae1693fb87a495164460b
|
/recover.py
|
902d525558493de9272f5abae75b81e3b1f35882
|
[] |
no_license
|
Z3t4Byt3/lostdir_recover
|
https://github.com/Z3t4Byt3/lostdir_recover
|
79e8b02432580e3cf9c95e3b9ab650fe46a87dd5
|
dd270eb60bc45d4fe138f36b185285ac67d61197
|
refs/heads/master
| 2021-12-14T10:31:35.451427 | 2014-10-26T18:57:59 | 2014-10-26T18:57:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#import sys
import re
import os
import magic
import glob
import argparse
from mutagen.easyid3 import EasyID3
from mutagen.easymp4 import EasyMP4
import mutagen
import shutil
import pprint
pp = pprint.PrettyPrinter()
argp = argparse.ArgumentParser(description="Sort files from Android's LOST.DIR")
argp.add_argument('-s', '--source', required=True, type=str)
argp.add_argument('-d', '--destination', required=True, type=str)
args = argp.parse_args()
recover_dir = args.destination
#### Handlers
def handle_mp3(filename):
mp3_dir = os.path.join(recover_dir, 'mp3')
#mp3 = MP3(filename)
try:
id3 = EasyID3(filename)
except mutagen.id3.ID3NoHeaderError:
return None
artist = (id3.get('artist', ['noartist']))[0]
album = (id3.get('album', ['noalbum']))[0]
title = (id3.get('title', ['notitle']))[0]
try:
tracknumber = id3['tracknumber'][0]
except KeyError:
tracknumber = 'XX'
tracknumber = re.sub(r'/.*', '', tracknumber)
try:
tracknumber = '%02d' % int(tracknumber)
except ValueError:
pass
track_file = ''.join([tracknumber, '-', title, '.mp3'])
artist_dir = os.path.join(mp3_dir, artist)
final_path = os.path.join(artist_dir, album, track_file)
#os.renames(filename, final_path)
if not os.path.isdir(os.path.dirname(final_path)):
os.makedirs(os.path.dirname(final_path))
shutil.copy(filename, final_path)
print final_path
def handle_mp4(filename):
mp4_dir = os.path.join(recover_dir, 'mp3')
mp4 = EasyMP4(filename)
artist = (mp4.get('artist', ['noartist']))[0]
album = (mp4.get('album', ['noalbum']))[0]
title = (mp4.get('title', ['notitle']))[0]
try:
tracknumber = mp4['tracknumber'][0]
except KeyError:
tracknumber = 'XX'
tracknumber = re.sub(r'/.*', '', tracknumber)
try:
tracknumber = '%02d' % int(tracknumber)
except ValueError:
pass
track_file = ''.join([tracknumber, '-', title, '.mp4'])
artist_dir = os.path.join(mp4_dir, artist)
final_path = os.path.join(artist_dir, album, track_file)
#os.renames(filename, final_path)
if not os.path.isdir(os.path.dirname(final_path)):
os.makedirs(os.path.dirname(final_path))
shutil.copy(filename, final_path)
print final_path
def handle_mv(filename, dir):
_fn = '%s.%s' % (os.path.basename(filename), dir)
final_path = os.path.join(recover_dir, dir, _fn)
#os.renames(filename, final_path)
if not os.path.isdir(os.path.dirname(final_path)):
os.makedirs(os.path.dirname(final_path))
shutil.copy(filename, final_path)
print "Renamed", filename, final_path
seen_types = {}
known_types = {'audio/mpeg': handle_mp3,
'image/jpeg': lambda fn: handle_mv(fn, 'jpg'),
'application/ogg': lambda fn: handle_mv(fn, 'ogg'),
'audio/mp4': handle_mp4,
'audio/x-wav': lambda fn: handle_mv(fn, 'wav'),
'image/png': lambda fn: handle_mv(fn, 'png'),
'video/mp4': lambda fn: handle_mv(fn, 'mp4'),
'text/plain': lambda fn: handle_mv(fn, 'txt'),
'application/octet-stream': handle_mp3,
}
c = 0
with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as m:
for file in glob.glob(os.path.join(args.source, '*')):
c += 1
type = m.id_filename(file)
seen_types[type] = seen_types.get(type, 0) + 1
if type in known_types:
known_types[type](file)
print "Saw", c, "files"
pp.pprint(seen_types)
|
UTF-8
|
Python
| false | false | 2,014 |
2,688,649,565,796 |
aa2b0a09d9a78fe6601ee42668bda25ec38286d0
|
dd70bfdc971457d3b2c3ce6ca4b1e1f9093cc7b0
|
/python_hw/ex7-4.py
|
4af050a1b01e4370e0a261fdfa39fb15e4b63a73
|
[] |
no_license
|
milanshah/Python_assignements
|
https://github.com/milanshah/Python_assignements
|
db59535b44ba346c339866db8cb54c70a82b29be
|
5491636d54d905cd5a626e9252933853e3b69b14
|
refs/heads/master
| 2020-12-24T16:14:57.217056 | 2014-04-20T01:07:29 | 2014-04-20T01:07:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def inireader(inifile,dd):
f = open('inifile')
sechdr = ''
d = {}
for l in f:
if l[0] == '[' and l[-1] == ']':
if sechdr:
dd[sechdr] = d
sechdr = l[1:-1]
dd[sechdr] = ''
else:
d[l.split('=')[0]) = l.split('=')[1]
dd[sechdr] = d
"""
"""
|
UTF-8
|
Python
| false | false | 2,014 |
18,279,380,842,131 |
951fe20164217dfde9968e2e5148a5523ec05411
|
48cafc37ad181fa04ef3261f34a5380f6287cea4
|
/GetLesson.py
|
88db49cc6ac285a20cc0cd56efd1575ee2593a59
|
[] |
no_license
|
StancuFlorin/LEC-Academy-Crawler
|
https://github.com/StancuFlorin/LEC-Academy-Crawler
|
e3c4236cf240e44570c5e370966f4149465936b9
|
707a1b72fbd4ecde7e2c4f775899bc811d1291f3
|
refs/heads/master
| 2016-09-05T19:56:24.258078 | 2014-09-04T10:06:10 | 2014-09-04T10:06:10 | 21,793,908 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import urllib2
from BeautifulSoup import BeautifulSoup
import cookielib
link = str(sys.argv[1])
class MyHTTPErrorProcessor(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# only add this line to stop 302 redirection.
if code == 302: return response
if not (200 <= code < 300):
response = self.parent.error('http', request, response, code, msg, hdrs)
return response
https_response = http_response
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj), MyHTTPErrorProcessor)
page = opener.open(link)
html = page.read()
soup = BeautifulSoup(html)
title = "None"
if soup.find('h1') is not None:
title = soup.find('h1').getText()
chapper = "None"
if soup.find('h2') is not None:
chapper = soup.find('h2').getText()
vimeo = "None"
if soup.find('iframe') is not None:
vimeo = soup.find('iframe').get('src')
print vimeo
print title
print chapper
|
UTF-8
|
Python
| false | false | 2,014 |
19,301,583,062,027 |
84e7a397b0182665e987be479e65abb4b946b130
|
ced3600c76d59c9f0d3d28f779480c56ce842b43
|
/SparkJobManager/jar_manager.py
|
9de30b8f6d26bc4406d1250aa0644a8795c4dd36
|
[] |
no_license
|
wangqiang8511/spark_job_manager
|
https://github.com/wangqiang8511/spark_job_manager
|
2102bf994761ec6b940bff110f8c19f4aa805c64
|
b656cebc5f453f6ed37f542f79129bb1c033b2d0
|
refs/heads/master
| 2021-01-15T21:14:47.686575 | 2014-05-12T06:53:50 | 2014-05-12T06:53:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
import requests
import settings
def get_jar_url(host):
return host + "jars"
def list_jars(host=settings.JOB_SERVER_URI):
r = requests.get(get_jar_url(host))
print r.text
return json.loads(r.text)
def post_jar(name, jar, host=settings.JOB_SERVER_URI):
r = requests.post(get_jar_url(host) + "/" + name,
data=file(jar, 'rb').read())
print r.text
|
UTF-8
|
Python
| false | false | 2,014 |
3,667,902,097,740 |
085b5ddada30a393fc9cf4d7c5357df4ee104474
|
b77207f7390b2d41114cb8ff05ea2b0239bf675b
|
/lib/test.py
|
3198254037eb64fec40da6be5d5eeb71c2c73f69
|
[] |
no_license
|
Tenteki/Dominoes
|
https://github.com/Tenteki/Dominoes
|
eadab7b17901ba9c212b011a4345a955f2c5ee71
|
8ed20b99686b15cca918f55c528eba0547e9ec21
|
refs/heads/master
| 2020-06-02T05:14:58.286663 | 2014-06-15T22:51:28 | 2014-06-15T22:51:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import gui
a = gui.App()
# from Tkinter import *
# import hand
# import domino
# def removeDomino(value):
# canvas.delete(ALL)
# del b.dominoes[0]
# b.displayHand(30, 10, canvas, orientation = "H")
# print b
# def flipDomino(value):
# print str(b ) + "Before"
# canvas.delete(ALL)
# b.dominoes[0].flip()
# b.displayHand(30, 10, canvas, orientation = "H")
# print str(b) + "After"
# root = Tk()
# canvas = Canvas(root, width = 400, height = 200)
# canvas.pack()
# a = [domino.Domino(), domino.Domino(), domino.Domino(), domino.Domino()]
# print a
# # for i in xrange(1, len(a) + 1):
# # a[i-1].displayValue(canvas, x = i* 60, y = 10, orientation = "H")
# #a.displayValue(canvas, 30, 10, orientation = "H")
# b = hand.Hand(a)
# b.displayHand(30, 10, canvas, orientation = "H", faceup = False)
# Button (root, text = "Button", command = lambda:flipDomino(0)).place(x = 80, y = 50)
# root.mainloop()
|
UTF-8
|
Python
| false | false | 2,014 |
12,429,635,368,623 |
348668694ac58c248c2a11df8b45ab8a17bef97a
|
8bde9ce553484296e22806b949369f22ed9bdb6e
|
/ircyall/web2redis.py
|
b5c738e6df4e8b5b9dc18f8266c2a82e1b482649
|
[
"Apache-2.0"
] |
permissive
|
wikimedia/operations-software-ircyall
|
https://github.com/wikimedia/operations-software-ircyall
|
d4bfbb3a4a24c2881c1df0fee295ef1f870d49e1
|
9c9bb0cf84154c5d5d791e2be3a690d10a43cc29
|
refs/heads/master
| 2023-06-30T10:59:01.007350 | 2014-12-16T22:48:19 | 2014-12-16T23:28:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
from flask import Flask, request
from redis import StrictRedis
app = Flask(__name__)
redis = StrictRedis()
@app.route('/v1/send', methods=['POST'])
def send():
# Validate, validate, validate!
if 'message' not in request.form:
return 'No message found', 400
if 'channels' not in request.form:
return 'No channels found', 400
if 'token' not in request.form:
return 'No token found', 400
message = request.form['message']
if message.startswith('/'):
return 'Message starts with /, not going to process', 400
if '\n' in message:
message = message.split('\n')[0] # Ignore everything after newline
if '\r' in message:
message = message.split('\r')[0] # Because I'm too lazy to use a regex here >_>
data = {
'message': message,
'channels': request.form.getlist('channels')
}
redis.rpush('ircnotifier', json.dumps(data))
return repr(data)
if __name__ == '__main__':
app.run(debug=True)
|
UTF-8
|
Python
| false | false | 2,014 |
2,911,987,841,371 |
af815d86e41aabedd4c11678ea9b3e96aeac79d5
|
2a7a8933aed7ffc541551977ecbec0df4286058a
|
/targets/TEST/OAI/openair.py
|
94d42611a4ecb1ebb64f18d108925f7f74927846
|
[
"GPL-1.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
mspublic/openair4G-mirror
|
https://github.com/mspublic/openair4G-mirror
|
14d17fdbb385c3aab7f535bac6cd888ffad41621
|
9ff74883f8a76ee47993d3f817e67df2bc8160ba
|
refs/heads/master
| 2020-04-09T16:24:55.515513 | 2014-12-20T21:49:50 | 2014-12-20T21:49:50 | 28,283,356 | 22 | 11 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#******************************************************************************
# Eurecom OpenAirInterface
# Copyright(c) 1999 - 2013 Eurecom
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
# The full GNU General Public License is included in this distribution in
# the file called "COPYING".
# Contact Information
# Openair Admin: [email protected]
# Openair Tech : [email protected]
# Forums : http://forums.eurecom.fsr/openairinterface
# Address : Eurecom, Compus SophiaTech 450, route des chappes, 06451 Biot, France
#*****************************************************************************
# \file openair.py
# \brief class that define the oaisim class and its attributes
# \author Navid Nikaein
# \date 2013
# \version 0.1
# @ingroup _test
import pexpect
import pxssh
import time
import os
import array
import shutil
from subprocess import call
from core import *
SHELL = '/bin/bash'
class openair(core):
def __init__(self, hostname, address):
self.error = '% '
self.hostname = hostname
self.address = address
self.localhost = None
core.__init__(self)
@property
def localhost(self):
if self.localhost :
return self.localhost
elif self.hostname in ['localhost', '127.0.0.7', '::1'] :
self.localhost = self.hostname
return self.localhost
@localhost.setter
def localhost(self,localhost):
self.localhost = localhost
def shcmd(self,cmd,sudo=False):
if sudo:
cmd = "sudo %s" % command
proc = subprocess.Popen(command, shell=True,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
stdout, stderr = proc.communicate()
return (stdout, stderr)
def connect(self, username, password, prompt='PEXPECT_OAI'):
self.prompt1 = prompt
self.prompt2 = prompt
try:
if not username:
username = root
if not password:
password = username
self.oai = pxssh.pxssh()
self.oai.login(self.address,username,password)
self.oai.sendline('PS1='+self.prompt1)
self.oai.PROMPT='PEXPECT_OAI'
# need to look for twice the string of the prompt
self.oai.prompt()
self.oai.prompt()
self.oai.sendline('uptime')
self.oai.prompt()
print self.oai.before
except Error, val :
print "Error: can't connect to"+username+"@"+self.address
def connect2(self, username, password, prompt='$'):
self.prompt1 = prompt
self.prompt2 = prompt
while 1:
try:
if not username:
username = root
if not password:
password = username
self.oai = pexpect.spawn('ssh -o "UserKnownHostsFile=/dev/null" -o "StrictHostKeyChecking=no" -o "ConnectionAttempts=1" ' \
+ username + '@' + self.address)
index = self.oai.expect([re.escape(self.prompt1), re.escape(self.prompt2), pexpect.TIMEOUT], timeout=40)
if index == 0 :
return 'Ok'
else :
index = self.oai.expect(['password:', pexpect.TIMEOUT], timeout=40)
if index == 0 :
self.oai.sendline(password)
index = self.oai.expect([re.escape(self.prompt1), re.escape(self.prompt2), pexpect.TIMEOUT], timeout=10)
if index != 0:
print 'ERROR! could not login with SSH.'
print 'Expected ' + self.prompt1 + ', received >>>>' + self.oai.before + '<<<<'
sys.exit(1)
return 'Ok'
except Exception, val:
time.sleep(5)
print "Error:", val
def disconnect(self):
print 'disconnecting the ssh connection to ' + self.address + '\n'
self.oai.send('exit')
# self.cancel()
def kill(self, user, pw):
try:
if user == 'root' :
os.system('pkill oaisim')
os.system('pkill cc1')
time.sleep(1)
os.system('pkill oaisim')
else :
os.system('echo '+pw+' | sudo -S pkill oaisim')
os.system('echo '+pw+' | sudo -S pkill cc1')
time.sleep(1)
os.system('echo '+pw+' | sudo -S pkill oaisim')
except Error, val:
print "Error:", val
def rm_driver(self,oai,user, pw):
try:
if user == 'root' :
#oai.send_nowait('rmmod nasmesh;')
os.system('rmmod nasmesh;')
else :
oai.send_nowait('echo '+pw+ ' | sudo -S rmmod nasmesh;')
#os.system('echo '+pw+ ' | sudo -S rmmod nasmesh;')
except Error, val:
print "Error removing oai network driver module:", val
def driver(self,oai,user,pw):
pwd = oai.send_recv('pwd')
oai.send('cd $OPENAIR_TARGETS;')
oai.send('cd SIMU/USER;')
try:
if user == 'root' :
oai.send_nowait('insmod ./nasmesh.ko;')
else :
oai.send('echo '+pw+ ' | sudo -S insmod ./nasmesh.ko;')
except Error, val:
print "Error inserting oai network driver module:", val
def cleandir (self, logdir,debug) :
for filename in os.listdir(logdir):
filepath = os.path.join(logdir, filename)
if debug == 2 :
print 'logdir is ' + logdir
print 'filepath is ' + filepath
try:
shutil.rmtree(filepath)
except OSError:
os.remove(filepath)
#print 'Could not remove the filepath'+ filepath + ' with error ' + OSError
def create_dir(self,dirname,debug) :
if not os.path.exists(dirname) :
try:
os.makedirs(dirname,0755)
except OSError:
# There was an error on creation, so make sure we know about it
raise
|
UTF-8
|
Python
| false | false | 2,014 |
19,610,820,702,767 |
b7bd1c75d1d11a0286c54f6aadd82a98c08299da
|
53a2a37a53f0272963918a2950eae14c91cf17ac
|
/test_code/approximatematching.py
|
a6537e92246175dca48b3edbba3e6ec17676eeae
|
[] |
no_license
|
c1twcny/Bioinformatics
|
https://github.com/c1twcny/Bioinformatics
|
03c485075f8fb55394517164b26e948a36929f67
|
556479ab9e29995e1d0eb1616645d17512b43ace
|
refs/heads/master
| 2021-01-24T06:10:32.760498 | 2014-11-25T18:15:24 | 2014-11-25T18:19:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#
# Solve Minimum Skew problem: Find a position in a genome minimizing the skew
# Datafile: test10.data
#
import sys
import string
import fileinput
import re
import difflib
#import matplotlib.pyplot as plt
inputdata = []
pos = []
posID = []
for line in fileinput.input():
inputdata.append(line.rstrip()); # strip out newline character
pattern = inputdata[0]
dnastr = inputdata[1]
mismatch= int(inputdata[2])
#print pattern, '\n', dnastr, '\n', mismatch, '\n'
for idx in range(0, len(dnastr)-len(pattern)+1):
# obj = difflib.SequenceMatcher(None, pattern, dnastr[idx:idx+len(pattern)])
# if obj.ratio() >= 0.625:
# pos.append(idx)
posID = [idy for idy in range(0, len(pattern)) if pattern[idy] == dnastr[idx+idy]]
# print idx, len(posID)
if len(posID) >= len(pattern) - mismatch:
pos.append(idx)
print pos
|
UTF-8
|
Python
| false | false | 2,014 |
17,248,588,661,684 |
11cd9cf991599cd3ffccebe5414e03c36272b0d3
|
6cbecbca784eb9e9ef45660ff7312fd7f31906af
|
/main.py
|
98e9982a2912d8dc3199ed7eee2aff649149f846
|
[] |
no_license
|
wreszelewski/emas_fork
|
https://github.com/wreszelewski/emas_fork
|
e2a512c2b0e5839f73ae143661037a24fd1f5a57
|
3c8c4455aa7d8e0c306b6b5f94022bdb8724f99d
|
refs/heads/master
| 2016-08-08T07:29:45.635228 | 2014-11-26T09:21:53 | 2014-11-26T09:21:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from emas.configuration import Configuration
from emas.evolution import Evolution
from emas.problems.zdt import ZDT
from emas.problems.zdt.zdt1_definitions import ZDT1Definitions
from emas.problems.zdt.zdt2_definitions import ZDT2Definitions
from emas.problems.zdt.zdt3_definitions import ZDT3Definitions
from logger import Logger
from metrics.problems.zdt import ZDT3Metrics, ZDT1Metrics, ZDT2Metrics
from plotter.pareto_plotter import ParetoPlotter
from plotter.plotter_configuration import PlotterConfiguration
def print_progress(islands, etilist, iteration):
Logger.log('Iteration: {}'.format(iteration))
for island in islands:
Logger.debug('\tisland population: {},\tenergy: {}'.format(len(island), island.energy))
Logger.debug('\tetilist island population: {},\tenergy: {}'.format(len(etilist), island.energy))
def plot_island(island_number):
def plot(population, etilist, iteration):
plotter.plot_population(population[island_number], iteration, subdirectory=str(island_number))
return plot
collected_metrics = {}
def collect_metrics_island(island_number):
def collect_metrics(population, etilist, iteration):
pareto_front = population[island_number]
metrics = ZDT3Metrics()
hv = metrics.HV(pareto_front)
hvr = metrics.HVR(pareto_front)
collected_metrics[iteration] = hv, hvr
return collect_metrics
def plot_etilist(population, etilist, iteration):
plotter.plot_population(etilist, iteration, subdirectory="etilist")
zdt_definitions = ZDT3Definitions()
problem = ZDT(zdt_definitions)
plotter_configuration = PlotterConfiguration(plot_every_x_generations=50)
plotter = ParetoPlotter(plotter_configuration, zdt_definitions)
configuration = Configuration()
evolution = Evolution(configuration, problem)
evolution.register_iteration_callback(plot_island(0))
evolution.register_iteration_callback(plot_island(1))
evolution.register_iteration_callback(plot_island(2))
evolution.register_iteration_callback(plot_etilist)
evolution.register_iteration_callback(collect_metrics_island(1))
evolution.register_iteration_callback(print_progress)
evolution.initialize()
evolution.evolve()
plotter.plot_x_y(collected_metrics.keys(), map(lambda (hv, hvr): hvr, collected_metrics.values()), 'generation', 'HVR', 'HVR metric for ZDT3', 'hvr')
|
UTF-8
|
Python
| false | false | 2,014 |
858,993,463,290 |
4113ed7d54b8293f3165a9597d60c2a269836b40
|
81d0640ac8f0b918d336bef0aff8183014bfb3dc
|
/tests/mach8_test/unit/instructions/test_transfer.py
|
91fd7de5186b61c675a24bcc7752bc5c0c368e36
|
[] |
no_license
|
blackchip-org/mach8-py
|
https://github.com/blackchip-org/mach8-py
|
6d2712a69f770b0b03dcc0830ab7f8627e017386
|
93ce284af569cb01695e67898936836ccfbd5ff2
|
refs/heads/master
| 2021-01-01T18:22:51.355380 | 2014-01-22T20:06:00 | 2014-01-22T20:06:00 | 7,997,427 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#------------------------------------------------------------------------------
# Mach-8: The Virtual Machinery Playpen
#
# blackchip.org, Inspired by the Vintage Computer Club.
# All rites reversed (K) 2011, Reprint what you like.
#
# $Id: test_transfer.py 96 2011-12-12 22:29:35Z mcgann $
#------------------------------------------------------------------------------
from mach8.assembly import *
from mach8_test import suite
from mach8_test.harness import execution
class TestTransfer(execution.TestHarness):
def test_tax(self):
suite.banner(self.test_tax)
a = self.a
_; a(lda_imm, 0x44)
_; a(tax)
self.run_test()
self.assertEquals(0x44, self.cpu.x)
self.assertTrue(not self.cpu.n and not self.cpu.z)
def test_tax_zero(self):
suite.banner(self.test_tax_zero)
a = self.a
_; a(lda_imm, 0x00)
_; a(tax)
self.run_test()
self.assertEquals(0x00, self.cpu.x)
self.assertTrue(not self.cpu.n and self.cpu.z)
def test_tax_signed(self):
suite.banner(self.test_tax_signed)
a = self.a
_; a(lda_imm, 0xaa)
_; a(tax)
self.run_test()
self.assertEquals(0xaa, self.cpu.x)
self.assertTrue(self.cpu.n and not self.cpu.z)
def test_txa(self):
suite.banner(self.test_txa)
a = self.a
_; a(ldx_imm, 0x44)
_; a(txa)
self.run_test()
self.assertEquals(0x44, self.cpu.a)
self.assertTrue(not self.cpu.n and not self.cpu.z)
def test_txa_zero(self):
suite.banner(self.test_txa_zero)
a = self.a
_; a(ldx_imm, 0x00)
_; a(txa)
self.run_test()
self.assertEquals(0x00, self.cpu.a)
self.assertTrue(not self.cpu.n and self.cpu.z)
def test_txa_signed(self):
suite.banner(self.test_txa_signed)
a = self.a
_; a(ldx_imm, 0xaa)
_; a(txa)
self.run_test()
self.assertEquals(0xaa, self.cpu.a)
self.assertTrue(self.cpu.n and not self.cpu.z)
def test_tay(self):
suite.banner(self.test_tay)
a = self.a
_; a(lda_imm, 0x44)
_; a(tay)
self.run_test()
self.assertEquals(0x44, self.cpu.y)
self.assertTrue(not self.cpu.n and not self.cpu.z)
def test_tay_zero(self):
suite.banner(self.test_tay_zero)
a = self.a
_; a(lda_imm, 0x00)
_; a(tay)
self.run_test()
self.assertEquals(0x00, self.cpu.y)
self.assertTrue(not self.cpu.n and self.cpu.z)
def test_tay_signed(self):
suite.banner(self.test_tay_signed)
a = self.a
_; a(lda_imm, 0xaa)
_; a(tay)
self.run_test()
self.assertEquals(0xaa, self.cpu.y)
self.assertTrue(self.cpu.n and not self.cpu.z)
def test_tya(self):
suite.banner(self.test_tya)
a = self.a
_; a(ldy_imm, 0x44)
_; a(tya)
self.run_test()
self.assertEquals(0x44, self.cpu.a)
self.assertTrue(not self.cpu.n and not self.cpu.z)
def test_tya_zero(self):
suite.banner(self.test_tya_zero)
a = self.a
_; a(ldy_imm, 0x00)
_; a(tya)
self.run_test()
self.assertEquals(0x00, self.cpu.a)
self.assertTrue(not self.cpu.n and self.cpu.z)
def test_tya_signed(self):
suite.banner(self.test_tya_signed)
a = self.a
_; a(ldy_imm, 0xaa)
_; a(tya)
self.run_test()
self.assertEquals(0xaa, self.cpu.a)
self.assertTrue(self.cpu.n and not self.cpu.z)
|
UTF-8
|
Python
| false | false | 2,014 |
6,631,429,541,240 |
fc72546922fec1cccd4f876124d151884df92460
|
c80b3cc6a8a144e9858f993c10a0e11e633cb348
|
/plugins/indexing/indexing/api/domain_indexing.py
|
c4cc41df1b8c85599398d4e0a4d0969f1e71743b
|
[] |
no_license
|
cristidomsa/Ally-Py
|
https://github.com/cristidomsa/Ally-Py
|
e08d80b67ea5b39b5504f4ac048108f23445f850
|
e0b3466b34d31548996d57be4a9dac134d904380
|
refs/heads/master
| 2021-01-18T08:41:13.140590 | 2013-11-06T09:51:56 | 2013-11-06T09:51:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Apr 19, 2012
@package: indexing
@copyright: 2011 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
Provides the decorator to be used by the models in the indexing domain.
'''
from ally.api.config import model
from functools import partial
# --------------------------------------------------------------------
DOMAIN = 'Indexing/'
modelIndexing = partial(model, domain=DOMAIN)
|
UTF-8
|
Python
| false | false | 2,013 |
8,718,783,637,593 |
f71a2bd1ff5f02beb2f3408da09357dec64e36af
|
a821020950b797b67808492a097ea143d015246e
|
/pysqlcli.py
|
615aeb38dbd33639271aab905ac1b41b36a4f273
|
[
"BSD-2-Clause-Views"
] |
permissive
|
rakeshsingh/pysqlcli
|
https://github.com/rakeshsingh/pysqlcli
|
43af21efcb1cb45f0276ee6c898d6e5680d84a87
|
de3649cc8acb10afd91717adaec6acb233808036
|
refs/heads/master
| 2020-12-25T05:47:12.255176 | 2013-07-07T17:00:14 | 2013-07-07T17:00:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
'''
A substitute of Ora** SQL client that doesn't s*x
Author: Alejandro E. Brito Monedero
'''
import readline
import sys
import cx_Oracle
import os
import atexit
import csv
class Database(object):
'''Class to handle the database methods'''
def __init__(self, dsn):
'''Constructor, it opens the connection with the database'''
self._dsn = dsn
self._user = self._get_dbuser()
self._connection = cx_Oracle.Connection(self._dsn)
self._cursor = self._connection.cursor()
def _get_dbuser(self):
'''Extracts the user from the dsn string'''
user = self._dsn.split('/')[0]
return user
def close(self):
'''Closes the cursor and the connection'''
self._cursor.close()
self._connection.close()
def run_describe(self, table):
'''Run the describe query given a table returning a result set'''
# The ugly query to emulate DESCRIBE of sql*plus Thx CERN
describe = ("SELECT atc.column_name, "
"CASE atc.nullable WHEN 'Y' THEN 'NULL' ELSE 'NOT NULL' "
"END \"Null?\", atc.data_type || CASE atc.data_type WHEN 'DATE' "
"THEN '' ELSE '(' || CASE atc.data_type WHEN 'NUMBER' THEN "
"TO_CHAR(atc.data_precision) || CASE atc.data_scale WHEN 0 "
"THEN '' ELSE ',' || TO_CHAR(atc.data_scale) END "
"ELSE TO_CHAR(atc.data_length) END END || CASE atc.data_type "
"WHEN 'DATE' THEN '' ELSE ')' END data_type "
"FROM all_tab_columns atc "
"WHERE atc.table_name = '%s' AND atc.owner = '%s' "
"ORDER BY atc.column_id")
rset = self.execute_query(describe % (table.upper(),
self._user.upper()))
return rset
def run_list_tables(self):
'''Run the a query that shows all the tables and returns a result set'''
list_all = "SELECT table_name FROM user_tables"
rset = self.execute_query(list_all)
return rset
def execute_query(self, query):
'''Executes the given query and returns the result set, None if error'''
try:
rset = self._cursor.execute(query)
return rset
except cx_Oracle.DatabaseError, exc:
error, = exc.args
print >> sys.stderr, "Oracle-Error-Code: %s" % error.code
print >> sys.stderr, "Oracle-Error-Message: %s" % error.message
# reached after an exception. This is too C, better raise and Exception
return None
class Printer(object):
'''Class for printing the result sets'''
def __init__(self):
'''Constructor, sets the csv output option to false'''
self.csv_mode = False
self._file = None
def deactivate_csv(self):
'''Deactivate csv mode and close the file opened'''
if self.csv_mode:
self._file.close()
self.csv_mode = False
print >> sys.stderr, 'CSV output deactivated'
def activate_csv(self, filename):
'''Activate the csv mode'''
if not self.csv_mode:
try:
self._file = open(filename, 'wb')
self.csv_mode = True
print >> sys.stderr, 'CSV output activated'
except IOError, exc:
print >> sys.stderr, 'There was a problem activating csv mode'
print >> sys.stderr, exc
def print_result_set(self, rset):
'''Prints the result set'''
max_lengths = list()
headers = list()
rows = list()
null = 'NULL'
# Get the max length of each field and initialize the headers and
# rows list
for fields in rset.description:
headers.append(fields[0])
max_lengths.append(len(fields[0]))
for row in rset:
rows.append(row)
for idx, elem in enumerate(row):
if elem is None:
if len(null) > max_lengths[idx]:
max_lengths[idx] = len(null)
else:
elem = str(elem)
if len(elem) > max_lengths[idx]:
max_lengths[idx] = len(elem)
# call the apropiate function
if self.csv_mode:
self._print_to_csv(headers, rows)
else:
self._print_to_stdout(max_lengths, headers, rows)
def _print_to_csv(self, headers, rows):
'''Prints the result set to a csv file'''
writer = csv.writer(self._file)
writer.writerow(headers)
writer.writerows(rows)
writer.writerow('')
def _print_to_stdout(self, max_lengths, headers, rows):
'''Prints the result set to stdout'''
nfields = len(max_lengths)
# build and print header
header = self._build_string(' ', ' | ', '',
*[field.ljust(max_lengths[idx]) for idx, field in enumerate(headers)])
print header
# build and print separator
sep = self._build_string('', '+', '',
*['-' * (max_lengths[idx] + 2) for idx in xrange(nfields)])
print sep
# build and print fields
for elem in rows:
row = self._build_string(' ', ' | ', '',
*[self._normalize(field).ljust(max_lengths[idx]) for idx, field in
enumerate(elem)])
print row
# num of rows affected
print '(%d rows)' % len(rows)
def _normalize(self, data):
'''Normalize data for printing'''
null = 'NULL'
if data is None:
return str(null)
else:
return str(data)
def _build_string(self, start_token, sep_token, end_token, *strings):
'''Returns a string starting with start_token followed by string
separator and finalized by end_token'''
n_strings = len(strings)
val = start_token
for idx, elem in enumerate(strings):
val += elem
# is this the last element?
if idx + 1 == n_strings:
val += end_token
else:
val += sep_token
return val
class Processor(object):
'''Class for processing the lines'''
def __init__(self, database):
'''Constructor'''
self._database = database
self._printer = Printer()
# construct the commands list
# The extructure is:
# a list where each elements is a tuple containing
# a string for comparing, the help string to print
# and the function to execute.
# All the functions recieves as args a line
self._commands = [
('\\h', '\\h: Prints this help', self._do_help),
('\\d', ('\\d: Lists all tables\n'
'\\d <table>: Describes a table'),
self._do_describe),
('\\c', ('\\c: desactivates csv output\n'
'\\c <filename>: Activates csv output'), self._do_csv),
('\\q', '\\q: Exits the program', self._do_quit),
('', '<SQL command>: Executes SQL', None),
]
def get_commands(self):
'''Returns a dict with internal commands
The estructure is a dict, where each element has a field indicating if
the commands receives parameters'''
comms = dict()
for elem in self._commands:
if elem[0]:
if elem[0] == '\\d':
comms['\\d'] = 'table'
elif elem[0] == '\\c':
comms['\\c'] = 'filename'
else:
comms[elem[0]] = None
return comms
def _print_help(self):
'''Prints commands help'''
for line in self._commands:
print >> sys.stderr, line[1]
def _do_help(self, line):
'''Prints the help'''
command = line.split()
if len(command) != 1:
print >> sys.stderr, ('Invalid number of arguments,'
'use \\h for help')
return
self._print_help()
def _do_quit(self, line):
'''Function to exit the line processor'''
# A cheap trick to exit
raise EOFError
def _do_describe(self, line):
'''Execute the describe operation'''
command = line.split()
if len(command) > 2:
print >> sys.stderr, ('Invalid number of arguments,'
'use \\h for help')
return
if len(command) == 1:
return self._database.run_list_tables()
elif len(command) == 2:
return self._database.run_describe(command[1])
def _do_csv(self, line):
'''Execute the csv operation'''
command = line.split()
if len(command) > 2:
print >> sys.stderr, ('Invalid number of arguments,'
'use \\h for help')
return
if len(command) == 1:
return self._printer.deactivate_csv()
elif len(command) == 2:
return self._printer.activate_csv(command[1])
def process_line(self, line_readed):
'''Process the line accordingly'''
line = line_readed.strip()
if not line:
# Empty line
return
if line.startswith('\\'):
# its a command
command = line.split()
for comm in self._commands:
if comm[0] == command[0]:
# Command match, run associated function
rset = comm[2](line)
break
if comm[0] == '':
# No command match, we reach SQL
print >> sys.stderr, 'Unknown command, use \\h for help'
rset = None
break
else:
# SQL
rset = self._database.execute_query(line)
if rset:
self._printer.print_result_set(rset)
def close(self):
'''Close csv file'''
# close csv file
if self._printer.csv_mode:
self._printer.deactivate_csv()
class DBcompleter(object):
'''Class for the autocompletion'''
def __init__(self, database, commands_dict):
'''Constructor'''
self._database = database
self._commands_dict = commands_dict
self._commands = [key for key in self._commands_dict]
self._sql_opers_head = ('SELECT',)
self._sql_pretable = ('FROM', 'JOIN')
self._sql_prefield = ('WHERE',)
def _get_tables(self, text, state):
'''Give the posible table alternatives'''
if state == 0:
# cache tables
rset = self._database.run_list_tables()
self._tables = [row[0] for row in rset]
return [table + ' ' for table in self._tables if
table.startswith(text.upper())]
def _get_fields(self, tokens, text, state):
''' Give the posible table.field alternatives'''
if state == 0:
# cache tables and fields
# first get all the tables written, so we can run the describe
# then with the table and its fields, build a dictionary where
# the keys are the tables and the data is a list of tables fields
self._table_fields = dict()
pre_table_readed = False
for elem in tokens:
if pre_table_readed:
# this must be a table, run describe and store fields
if elem.upper() in self._table_fields:
# It has been processed before, skip it
continue
rset = self._database.run_describe(elem.upper())
self._table_fields[elem.upper()] = [row[0] for row in rset]
pre_table_readed = False
continue
if elem.upper() in self._sql_pretable:
pre_table_readed = True
# Need to check if the text has the table already concatenated with a
# dot or not
parts = text.split('.')
if len(parts) < 2:
# We can't give alternatives to the field yet, so we only give the
# table names
return [table + '.' for table in self._table_fields if
table.startswith(parts[0].upper())]
if len(parts) == 2:
# We have a table and a part of a field or a field, try to
# give alternatives
if parts[0].upper() not in self._table_fields:
# This table has not been metioned before
return list()
return [parts[0].upper() + '.' + field + ' ' for field in
self._table_fields[parts[0].upper()] if
field.startswith(parts[1].upper())]
return list()
def _complete_command_args(self, command, text, state):
'''Given a command returns the valid command args'''
option = self._commands_dict.get(command, None)
if not option:
# Nothing to complete
return list()
if option == 'table':
return self._get_tables(text, state)
# It has an option, but I don't know how to autocomplete it
return list()
def _complete_command(self, text, state):
'''Give the command autocompletion alternatives'''
buff = readline.get_line_buffer().lstrip()
tokens = buff.split()
if len(tokens) == 1:
if tokens[0] == '\\':
# Handle this special case that confuses readline
# Return the commands without the blackslash
return [elem[1:] + ' ' for elem in self._commands]
if tokens[0][1:] == text:
# Check if we can continue autocompleting
# Remember that readline swallows the \
return [elem[1:] + ' ' for elem in self._commands if
elem.startswith(tokens[0])]
return self._complete_command_args(tokens[0], text, state)
if (len(tokens) == 2 and not text) or len(tokens) > 2:
# Don't complete options with more than 2 args
# or that have two tokens and the text is empty
return list()
return self._complete_command_args(tokens[0], text, state)
def _sql_complete(self, text, state):
'''Give the sql autocompletion alternatives'''
# Autocomplete first part, after from table, after join table after
# where field
buff = readline.get_line_buffer().lstrip()
tokens = buff.split()
if len(tokens) == 1:
# Complete first part of the command
if not text:
# check if we are still autocompleting the first token
# if we are not text is empty then there are no options
return list()
return [elem + ' ' for elem in self._sql_opers_head if
elem.startswith(text.upper())]
# there are more than one token, check if we can autocomplete the
# tables or table fields
# Get the previous token for checking if we need to autocomplete
if text:
previous = tokens[-2].upper()
else:
previous = tokens[-1].upper()
if previous in self._sql_pretable:
# Autocomplete the table names
return self._get_tables(text, state)
if previous in self._sql_prefield:
# Autocomplete the field name
return self._get_fields(tokens, text, state)
return list()
def complete(self, text, state):
'''The autocompletion function that calls readline'''
buff = readline.get_line_buffer().lstrip()
if not buff:
# empty, give all the options
options = [comm + ' ' for comm in self._commands]
options.extend([oper + ' ' for oper in self._sql_opers_head])
else:
if buff.startswith('\\'):
# Command
options = self._complete_command(text, state)
else:
# SQL
options = self._sql_complete(text, state)
options.append(None)
return options[state]
def print_usage():
'''Prints command usage. =( I can't use argparse'''
# I miss argparse
print >> sys.stderr, ('Usage: %s <oracle connection string (DSN)>' %
sys.argv[0])
def io_loop(processor):
'''Prompt reading loop'''
prompt = 'pysqlcli> '
while True:
try:
line = raw_input(prompt)
processor.process_line(line)
except(EOFError, KeyboardInterrupt):
# Old schoold exception handling, dated python =(
# cosmetic ending
processor.close()
print
break
def _main():
'''Main function'''
if len(sys.argv) != 2:
print_usage()
sys.exit(1)
dsn = sys.argv[1]
database = Database(dsn)
# Enables tab completion and the history magic
readline.parse_and_bind("tab: complete")
# load the history file if it exists
histfile = os.path.join(os.path.expanduser("~"), ".pysqlcli_history")
try:
readline.read_history_file(histfile)
except IOError:
pass
# register the function to save the history at exit. THX python examples
atexit.register(readline.write_history_file, histfile)
processor = Processor(database)
db_completer = DBcompleter(database, processor.get_commands())
readline.set_completer(db_completer.complete)
io_loop(processor)
database.close()
if __name__ == '__main__':
_main()
|
UTF-8
|
Python
| false | false | 2,013 |
1,297,080,168,841 |
a495fa9295e695f53bb4624d7bf047e741d7f741
|
bb28e183ab5d687f5caecafa04c04593579ef607
|
/little/views.py
|
31a4ea83ef58b3cfb4c0f67e72c77be8541d595d
|
[
"MIT"
] |
permissive
|
leotop/little
|
https://github.com/leotop/little
|
fe144688f6db582f58570d8b60546baa0e1e0392
|
96be7796c576af7e4432a933648a76ac29ce9bf3
|
refs/heads/master
| 2020-12-14T09:01:20.779207 | 2014-08-14T22:50:24 | 2014-08-14T22:50:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
from django.contrib.sites.models import get_current_site
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponse
from django.shortcuts import redirect
from .models import APIKey, Short, Visit
def _record_visit(request, short):
remote_addr = (
request.META.get('REMOTE_ADDR') or
request.META.get('HTTP_X_REAL_IP') or
request.META.get('HTTP_X_FORWARDED_FOR')
)
return Visit.objects.create(
short=short,
remote_addr=remote_addr,
user_agent=request.META.get('HTTP_USER_AGENT'),
referrer=request.META.get('HTTP_REFERER'),
)
def short_detail(request, short_key):
try:
short = Short.objects.get_for_key(short_key)
except Short.DoesNotExist as e:
raise Http404(e.message)
_record_visit(request, short)
if short.destination:
return redirect(short.destination)
return redirect(short.image.url)
def short_create(request):
url = request.GET.get('url')
api_key = request.GET.get('key')
user = APIKey.objects.get(key=api_key).user
short, __ = Short.objects.get_or_create(
destination=url,
created_by=user,
)
domain = get_current_site(request).domain
short_path = reverse('short_detail', kwargs={'short_key': short.key})
short_url = '{scheme}://{domain}{short_path}'.format(
scheme=settings.SHORT_SCHEME,
domain=domain,
short_path=short_path)
return HttpResponse(short_url, content_type='text/plain')
|
UTF-8
|
Python
| false | false | 2,014 |
7,636,451,854,668 |
685ba40640c1fed3c3e55afa21990c431bd57d4d
|
cc5ba1f93947152d2ec1f3afb4b38a46f5b8da25
|
/setup.py
|
a94e91ce1350e8908a6a83aeacdacca15b136640
|
[
"BSD-2-Clause"
] |
permissive
|
scopatz/pyjsoncpp
|
https://github.com/scopatz/pyjsoncpp
|
96526b33b41f420b27f98186aeb6ded5210b01d2
|
384eeb7b7f6d37e4f4a2e12c7d2ded5dd26a6a4d
|
refs/heads/master
| 2021-06-06T03:43:07.608873 | 2012-10-16T16:53:44 | 2012-10-16T16:53:44 | 5,564,576 | 3 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os
import sys
import glob
import json
from copy import deepcopy
from distutils.core import setup, run_setup
from distutils import sysconfig
from distutils.ccompiler import CCompiler
from distutils.extension import Extension
from distutils.util import get_platform
from distutils.file_util import copy_file, move_file
from distutils.dir_util import mkpath, remove_tree
from distutils.sysconfig import get_python_version, get_config_vars, get_python_lib
from Cython.Distutils import build_ext
from Cython.Compiler.Version import version as CYTHON_VERSION
INFO = {
'version': '0.1',
}
SITE_PACKAGES = get_python_lib()
_local_subsititues = {'darwin': 'Library'}
HOME = os.environ['HOME'] if os.name != 'nt' else os.environ['UserProfile']
PYNE_DIR = os.path.join(HOME,
_local_subsititues.get(sys.platform, '.local'),
'pyne')
HDF5_DIR = os.environ.get('HDF5_DIR', '')
args = sys.argv[:]
for arg in args:
if arg.find('--hdf5=') == 0:
HDF5_DIR = os.path.expanduser(arg.split('=')[1])
sys.argv.remove(arg)
# Thanks to http://patorjk.com/software/taag/
# and http://www.chris.com/ascii/index.php?art=creatures/dragons
# for ASCII art inspiriation
###########################################
### Set compiler options for extensions ###
###########################################
pyt_dir = os.path.join('jsoncpp')
cpp_dir = os.path.join('cpp')
dat_dir = os.path.join('data')
# HDF5 stuff
#posix_hdf5_libs = ["z", "m", "hdf5", "hdf5_hl",]
posix_hdf5_libs = ["hdf5", "hdf5_hl",]
#nt_hdf5_libs = ["szip", "zlib1", "hdf5dll", "hdf5_hldll",]
nt_hdf5_libs = ["hdf5dll", "hdf5_hldll",]
nt_hdf5_extra_compile_args = ["/EHsc"]
nt_hdf5_macros = [("_WIN32_MSVC", None), ("_HDF5USEDLL_", None),]
###############################
### Platform specific setup ###
###############################
def darwin_linker_paths():
paths = [os.path.join(PYNE_DIR, 'lib')]
vars = ['LD_LIBRARY_PATH', 'DYLD_FALLBACK_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'LIBRARY_PATH']
for v in vars:
curvar = os.getenv(v, '')
varpaths = paths + ([] if 0 == len(curvar) else [curvar])
os.environ[v] = ":".join(varpaths)
def darwin_build_ext_decorator(f):
def new_build_ext(self, ext):
rtn = f(self, ext)
if not ext.name.split('.')[-1].startswith('lib'):
return rtn
libpath = os.path.join(PYNE_DIR, 'lib')
if not os.path.exists(libpath):
mkpath(libpath)
copy_file(self.get_ext_fullpath(ext.name), libpath)
return rtn
return new_build_ext
def darwin_setup():
darwin_linker_paths()
build_ext.build_extension = darwin_build_ext_decorator(build_ext.build_extension)
def win32_finalize_opts_decorator(f):
def replace(lst, val, rep):
if val not in lst:
return lst
ind = lst.index(val)
del lst[ind]
for r in rep[::-1]:
lst.insert(ind, r)
return lst
def posix_like_ext(ext):
replace(ext.extra_compile_args, "__COMPILER__", [])
replace(ext.define_macros, "__COMPILER__", [])
replace(ext.libraries, "__USE_HDF5__", nt_hdf5_libs)
replace(ext.extra_compile_args, "__USE_HDF5__", [])
replace(ext.define_macros, "__USE_HDF5__", [])
def nt_like_ext(ext):
replace(ext.extra_compile_args, "__COMPILER__", ["/EHsc"])
replace(ext.define_macros, "__COMPILER__", [("_WIN32_MSVC", None)])
replace(ext.libraries, "__USE_HDF5__",
["/DEFAULTLIB:" + lib + ".lib" for lib in nt_hdf5_libs])
replace(ext.extra_compile_args, "__USE_HDF5__", nt_hdf5_extra_compile_args)
replace(ext.define_macros, "__USE_HDF5__", nt_hdf5_macros)
update_ext = {'mingw32': posix_like_ext,
'cygwin': posix_like_ext,
'msvc': nt_like_ext,
}
def new_finalize_opts(self):
rtn = f(self)
comp = self.compiler
for ext in self.extensions:
update_ext[comp](ext)
if sysconfig.get_config_var('CC') is None:
if comp in ['mingw32', 'cygwin']:
# Hack to get compiler to be recognized
sysconfig._config_vars['CC'] = 'gcc'
return rtn
return new_finalize_opts
def win32_build_ext_decorator(f):
def new_build_ext(self, ext):
dll_name = ext.name.split('.')[-1]
islib = dll_name.startswith('lib')
config_vars = get_config_vars()
if islib:
config_vars['SO'] = '.dll'
rtn = f(self, ext)
if islib:
config_vars['SO'] = '.pyd'
return rtn
return new_build_ext
def win32_get_exp_sym_decorator(f):
def new_get_exp_sym(self, ext):
rtn = f(self, ext)
dll_name = ext.name.split('.')[-1]
islib = dll_name.startswith('lib')
initfunc_name = 'init' + dll_name
if islib and initfunc_name in rtn:
rtn.remove(initfunc_name)
return rtn
return new_get_exp_sym
def win32_exec_decorator(f):
def new_exec(self, func, args, msg=None, level=1):
if 2 == len(args) and args[0].endswith('.def') and not args[0].startswith('lib'):
filename, contents = args
contents = [c for c in contents if not c.startswith('LIBRARY ')]
args = (filename, contents)
print "Args = ", args
rtn = f(self, func, args, msg, level)
return rtn
return new_exec
def win32_setup():
build_ext.finalize_options = win32_finalize_opts_decorator(build_ext.finalize_options)
build_ext.build_extension = win32_build_ext_decorator(build_ext.build_extension)
build_ext.get_export_symbols = win32_get_exp_sym_decorator(build_ext.get_export_symbols)
#build_ext.execute = win32_exec_decorator(build_ext.execute)
CCompiler.execute = win32_exec_decorator(CCompiler.execute)
platform_setup = {'darwin': darwin_setup, 'win32': win32_setup}
##########################
### Exetension Creator ###
##########################
def cpp_ext(name, sources, libs=None, use_hdf5=False):
"""Helper function for setting up extension dictionary.
Parameters
----------
name : str
Module name
sources : list of str
Files to compile
libs : list of str
Additional files to link against
use_hdf5 : bool
Link against hdf5?
"""
ext = {'name': name}
ext['sources'] = [os.path.join(cpp_dir, s) for s in sources if s.endswith('cpp')] + \
[os.path.join(pyt_dir, s) for s in sources if s.endswith('pyx')] + \
[s for s in sources if not any([s.endswith(suf) for suf in ['cpp', 'pyx']])]
ext["libraries"] = []
ext['include_dirs'] = [pyt_dir, cpp_dir]
if 0 < len(HDF5_DIR):
ext['include_dirs'].append(os.path.join(HDF5_DIR, 'include'))
ext['define_macros'] = [('JSON_IS_AMALGAMATION', None)]
ext['language'] = "c++"
# may need to be more general
ext['library_dirs'] = ['build/lib/jsoncpp/lib',
'build/lib.{0}-{1}/jsoncpp/lib'.format(get_platform(), get_python_version()),
]
if os.name == 'nt':
ext['library_dirs'] += [SITE_PACKAGES, os.path.join(HDF5_DIR, 'dll'),]
ext['library_dirs'].append(os.path.join(HDF5_DIR, 'lib'))
# perfectly general, thanks to dynamic runtime linking of $ORIGIN
#ext['runtime_library_dirs'] = ['${ORIGIN}/lib', '${ORIGIN}']
ext['runtime_library_dirs'] = ['${ORIGIN}/lib', '${ORIGIN}', '${ORIGIN}/.',
'${ORIGIN}/../lib', '${ORIGIN}/..',]
if sys.platform == 'linux2':
#ext["extra_compile_args"] = ["-Wno-strict-prototypes"]
ext["undef_macros"] = ["NDEBUG"]
if use_hdf5:
ext["libraries"] += posix_hdf5_libs
if libs is not None:
ext["libraries"] += libs
elif sys.platform == 'darwin':
ext["undef_macros"] = ["NDEBUG"]
if use_hdf5:
ext["libraries"] += posix_hdf5_libs
if libs is not None:
ext["libraries"] += libs
config_vars = get_config_vars()
#config_vars['SO'] = '.dylib'
config_vars['LDSHARED'] = config_vars['LDSHARED'].replace('-bundle', '-Wl,-x')
ext['library_dirs'] = []
ext['runtime_library_dirs'] = []
ext["extra_compile_args"] = ["-dynamiclib",
"-undefined", "dynamic_lookup",
'-shared',
]
ext["extra_link_args"] = ["-dynamiclib",
"-undefined", "dynamic_lookup",
'-shared',
"-install_name" , os.path.join(PYNE_DIR, 'lib', name.split('.')[-1] + config_vars['SO']),
]
elif sys.platform == 'win32':
ext["extra_compile_args"] = ["__COMPILER__"]
ext["define_macros"] += ["__COMPILER__"]
if use_hdf5:
ext["libraries"].append("__USE_HDF5__")
ext["extra_compile_args"].append("__USE_HDF5__")
ext["define_macros"].append("__USE_HDF5__")
if libs is not None:
ext["libraries"] += libs
elif sys.platform == 'cygwin':
pass
return ext
#
# For extensions
#
exts = []
# Python extension modules
# JsonCpp Wrapper
exts.append(cpp_ext("jsoncpp", ['jsoncpp.cpp', 'jsoncpp.pyx']))
##########################
### Setup Package Data ###
##########################
packages = ['jsoncpp',]
pack_dir = {'jsoncpp': 'jsoncpp',}
pack_data = {'jsoncpp': ['includes/*.h', 'includes/jsoncpp/*.pxd', '*.json'],
}
ext_modules=[Extension(**ext) for ext in exts]
# Compiler directives
compiler_directives = {'embedsignature': False}
for e in ext_modules:
e.pyrex_directives = compiler_directives
# Utility scripts
scripts=[]
def make_cython_version():
pxi = ("# Cython compile-time version information\n"
"DEF CYTHON_VERSION_MAJOR = {major}\n"
"DEF CYTHON_VERSION_MINOR = {minor}\n"
"DEF CYTHON_VERSION_BUILD = {build}")
cyver = CYTHON_VERSION.split('-')[0].split('.')
while len(cyver) < 3:
cyver = cyver + [0]
cyver = dict([(k, int(cv)) for k, cv in zip(['major', 'minor', 'build'], cyver)])
pxi = pxi.format(**cyver)
with open('jsoncpp/includes/cython_version.pxi', 'w') as f:
f.write(pxi)
def cleanup(mdpath="<None>"):
# Clean includes after setup has run
if os.path.exists('jsoncpp/includes'):
remove_tree('jsoncpp/includes')
# clean up metadata file
if os.path.exists(mdpath):
os.remove(mdpath)
def final_message(setup_success=True):
if setup_success:
return
msg = "Compilation failed!"
print msg
###################
### Call setup! ###
###################
def jsoncpp_setup():
# clean includes dir and recopy files over
if os.path.exists('jsoncpp/includes'):
remove_tree('jsoncpp/includes')
mkpath('jsoncpp/includes')
for header in glob.glob('cpp/*.h'):
copy_file(header, 'jsoncpp/includes')
mkpath('jsoncpp/includes/jsoncpp')
for header in glob.glob('jsoncpp/*.pxd'):
copy_file(header, 'jsoncpp/includes/jsoncpp')
make_cython_version()
# Platform specific setup
platform_setup.get(sys.platform, lambda: None)()
setup_kwargs = {
"name": "pyjsoncpp",
"version": INFO['version'],
"description": 'Python bindings for JsonCpp',
"author": 'Anthony Scopatz',
"author_email": '[email protected]',
"url": 'http://scopatz.github.com/',
# "packages": packages,
# "package_dir": pack_dir,
# "package_data": pack_data,
"cmdclass": {'build_ext': build_ext},
"ext_modules": ext_modules,
"scripts": scripts,
}
# call setup
setup_success= False
try:
rtn = setup(**setup_kwargs)
setup_success= True
finally:
cleanup()
final_message(setup_success)
if __name__ == "__main__":
jsoncpp_setup()
|
UTF-8
|
Python
| false | false | 2,012 |
11,235,634,458,784 |
461291c16ebd1388e96ab5cab96c9fe2fe2b9433
|
3017e7f0e8cd99469c7c98ec8a4b9b75d39c0c2f
|
/pythonkitabi/ingilizce/ch17/kalam/workspace/__init__.py
|
f75901995fe63a07fae3a493d9e09ca0bb084ae2
|
[] |
no_license
|
Rmys/projects
|
https://github.com/Rmys/projects
|
de6cb9d5d3f027d98c812647369d1e487d902c4b
|
60ce197bc1fb7ad3fa31f12559b74ee450b69df1
|
refs/heads/master
| 2020-03-19T15:36:11.603931 | 2011-09-16T00:15:34 | 2011-09-16T00:15:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__all__=['workspace','splitspace','stackspace','tabmanager','listspace']
|
UTF-8
|
Python
| false | false | 2,011 |
2,800,318,704,492 |
684256b92711a0ce675920194256d4c59ee36e65
|
89f860d967a7570c16ff2631975d257d5703bd00
|
/alek/milestone.py
|
fde2255fef5b0ed1d46e7e765eabb66065557ad2
|
[] |
no_license
|
alekdimi/cablegate
|
https://github.com/alekdimi/cablegate
|
5e9d0790aa8d043c276385bc7bf154bc11f91fb3
|
50555eb7e932c774f1fc8d83064c900c4a305650
|
refs/heads/master
| 2020-05-15T07:02:10.337441 | 2014-12-09T22:33:08 | 2014-12-09T22:33:08 | 27,280,041 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import math
import snap
import numpy as np
import cPickle as pickle
from scipy.spatial.distance import cosine
from scipy.stats import entropy
from nltk.stem import WordNetLemmatizer
from nltk.corpus import wordnet
def cosine_dict(d1, d2):
if d1 and d2:
d1_items = d1.items()
d2_items = d2.items()
sumxx, sumxy, sumyy = 0, 0, 0
for _, y in d1_items:
sumxx += y * y
for _, y in d2_items:
sumyy += y * y
for x1, y1 in d1_items:
for x2, y2 in d2_items:
if x1 == x2:
sumxy += y1 * y2
return sumxy/math.sqrt(sumxx*sumyy)
else:
#print "either or both empty"
return 0
def normalize(x):
sumx = sum(x)
if sumx == 0:
return x
else:
return [x[i]/float(sumx) for i in xrange(len(x))]
def hist_intersect(u, v):
if sum(u) == 0 or sum(v) == 0:
return 0
else:
return sum([u[i] if u[i] < v[i] else v[i] for i in xrange(len(u))])
def hellinger(u, v):
if sum(u) == 0 or sum(v) == 0:
return 0
else:
return 1 - math.sqrt(1 - sum([math.sqrt(u[i]*v[i]) for i in xrange(len(u))]))
def unused(vex):
cosine_adj = np.zeros((len(vex), len(vex)))
kl_div_adj = np.zeros((len(vex), len(vex)))
chi_sq_adj = np.zeros((len(vex), len(vex)))
hist_i_adj = np.zeros((len(vex), len(vex)))
bhatta_adj = np.zeros((len(vex), len(vex)))
for i in xrange(len(vex)):
for j in xrange(i + 1, len(vex)):
cosine_adj[i, j] = cosine_adj[j, i] = 1 - cosine(vex[i], vex[j])
np.savetxt('cosine.txt', cosine_adj, delimiter=' ')
print "COSINE DONE"
for i in xrange(len(vex)):
for j in xrange(i + 1, len(vex)):
kl_div_adj[i, j] = kl_div_adj[j, i] = 1 - entropy(vex[i], vex[j])
np.savetxt('kl_div.txt', kl_div_adj, delimiter=' ')
print "KL DONE"
for i in xrange(len(vex)):
for j in xrange(i + 1, len(vex)):
hist_i_adj[i, j] = hist_i_adj[j, i] = hist_intersect(normalize((vex[i])), normalize(vex[j]))
np.savetxt('hist_i.txt', hist_i_adj, delimiter=' ')
print "HIST INTERSECT DONE"
for i in xrange(len(vex)):
for j in xrange(i + 1, len(vex)):
bhatta_adj[i, j] = bhatta_adj[j, i] = hellinger(normalize((vex[i])), normalize(vex[j]))
np.savetxt('bhatta.txt', bhatta_adj, delimiter=' ')
print "ALL DONE"
# READ FILE FROM TXT
cosine_matrix = np.loadtxt('cosine.txt')
for threshold in [0, 0.2, 0.4, 0.6, 0.8]:
cosine_t1 = cosine_matrix.copy()
cosine_matrix[ cosine_matrix < threshold ] = 0
def get_vectors():
word_freqs = pickle.load(open("word_freq_dict.p", "rb"))
key_list, cable_vectors = [], []
for k, v in word_freqs.items():
key_list += v.keys()
word_list = list(set(key_list))
'''
# Lemmatization
lemm_word = []
lem = WordNetLemmatizer()
for word in word_list:
lemm_word.append(lem.lemmatize(word))
word_list = lemm_word
'''
#for k, v in word_freqs.items():
# vect = [0] * len(word_list)
# for i, word in enumerate(word_list):
# if word in v:
# vect[i] = v[word]
# cable_vectors.append(vect)
# #print sum([1 for x in vect if x > 0])
print "gonna pop keys and create vectors"
wl_len = len(word_list)
enums = [pair for pair in enumerate(word_list)]
all_keys = word_freqs.keys()
print "start"
#for j in xrange(len(all_keys)):
# print j
# freq = word_freqs.pop(all_keys[j])
# vect = [0] * wl_len
# for i, word in enums:
# if word in freq:
# vect[i] = freq[word]
# cable_vectors.append(vect)
set_word_list = set(word_list)
counter = 0
for ids in word_freqs.keys():
counter += 1
print counter
if counter == 100: break
freq = word_freqs.pop(ids)
vect = {}
freq_keys = freq.keys()
for idx, word in enums:
if word in freq_keys:
vect[idx] = freq[word]
#for word in freq_keys:
# if word in set_word_list:
# vect[word_list.index(word)] = freq[word]
cable_vectors.append(vect)
pickle.dump(cable_vectors, open("cable_vectors.p", "wb"))
#return cable_vectors
# MAIN!
G2 = snap.LoadEdgeList(snap.PUNGraph, "NLP_graph.txt", 0, 1)
print G2.GetNodes()
print G2.GetEdges()
snap.PlotInDegDistr(G2, "edges10k", "Undirected graph - Degree Distribution")
asdas
#codeNLP.frequencies()
#get_vectors()
#cable_vex = pickle.load(open("cable_vectors.p", "rb"))
#print cable_vex
#print "DONE WITH GETTING"
word_freqs = pickle.load(open("word_freq_dict.p", "rb"))
#cosine_adj = np.zeros((len(word_freqs), len(word_freqs)))
#cosine_adj = np.zeros((40000, 40000))
items = word_freqs.items()[:10000]
item_len = len(items)
# G.AddNode(idi)
#print "start"
G = snap.TUNGraph.New()
for idx1 in xrange(0, item_len):
G.AddNode(items[idx1][0])
threshold = 0.1
for idx1 in xrange(0, item_len):
print idx1
for idx2 in xrange(idx1 + 1, item_len):
if cosine_dict(items[idx1][1], items[idx2][1]) > threshold:
G.AddEdge(items[idx1][0], items[idx2][0])
#print "end"
#print G.GetNodes()
#print G.GetEdges()
#snap.PlotInDegDistr(G, "example", "Undirected graph - in-degree Distribution")
#snap.SaveEdgeList(G, 'NLP_graph.txt')
G2 = snap.LoadEdgeList(snap.PUNGraph, "NLP_graph.txt", 0, 1)
G2.GetNodes()
G2.GetEdges()
|
UTF-8
|
Python
| false | false | 2,014 |
9,328,668,989,222 |
3a7c63f0421b4b9490f6299a5c469def89d0adb8
|
a7a34545d216515a4f378e677c230b970cf11931
|
/opendataapi/umnopendata/contracts.py
|
79b86495df0cfc5f44733695aa04d328443ee8d4
|
[] |
no_license
|
mkessy/umnopendata
|
https://github.com/mkessy/umnopendata
|
76e5914dbe310ba69f0e56bf1aa99e096aa96f6d
|
554f8d122d144f2e7603d913317a62aa3cbe7945
|
refs/heads/master
| 2021-01-23T07:10:08.219388 | 2013-10-13T20:08:19 | 2013-10-13T20:08:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from scrapy.contracts import Contract
from scrapy.exceptions import ContractFail
#Contracts for umnclasses spider
class FormContract(Contract):
"""
Contract to check the correct form fields are filled for the
class schedule search.
"""
name = 'correct_form'
def pre_process(self, response):
pass
def post_process(self, output):
pass
class ClassContract(Contract):
"""
"""
name = 'class_contract'
def pre_process(self, response):
pass
def post_process(self, output):
## can add tests/assertions here
test_class = output[0]
# {'classes': [{'class_number': [u'22575'],
# 'class_type': u'LEC',
# 'credits': u'1',
# 'days': [u'M'],
# 'end_time': u'04:25 P.M.',
# 'instructors': [u'Simpson,Scott W', u'Rosand,Jennifer A'],
# 'location': [u'MoosT\n \n \xa02-530\n \n ''],
# 'mode': [u'instruction mode:\xa0Classroom/Onsite'],
# 'section_number': u'001',
# 'start_time': u'03:35 P.M.'}],
# 'name': [u'S'],
# 'number': [u'H'],
# 'subject': [u'Academic Health Center Shared', u'AHS'],
# 'term': u'fall_2013'}
#
# begin tests
print test_class
assert(test_class['name']==u'Orientation to Health Careers')
assert(test_class['subject']==[u'Academic Health Center Shared', 'AHS'])
assert(test_class['number']==u'1101')
assert(test_class['term']==u'fall_2013')
assert(test_class['classes'][0]['class_type']==u'LEC')
assert(test_class['classes'][0]['credits']==u'1')
assert(test_class['classes'][0]['days']==[u'M',])
assert(test_class['classes'][0]['start_time']==u'03:35 P.M.')
assert(test_class['classes'][0]['end_time']==u'04:25 P.M.')
assert(test_class['classes'][0]['instructors']==[u'Simpson,Scott W', u'Rosand,Jennifer A'])
assert(test_class['classes'][0]['section_number']==u'001')
assert(test_class['classes'][0]['mode']==u'Classroom/Onsite')
assert(test_class['classes'][0]['location']==u'MoosT 2-530')
|
UTF-8
|
Python
| false | false | 2,013 |
3,994,319,599,556 |
c959ac3ca031dfd48c29a3a1f917a72b92692288
|
50689f98f2af73064e555dd7ffb38aaf0c252c9a
|
/futiontable/website/cdnperfcalc.py
|
4838b971e8662fc50668cd63510d16a04dc461c4
|
[] |
no_license
|
arvindmahla/ruralnet
|
https://github.com/arvindmahla/ruralnet
|
daad10810014867df6de6471857071d2c84c00f6
|
46f06c06afbca178267f67031f5e4f81822d76c0
|
refs/heads/master
| 2016-09-06T19:05:58.519536 | 2014-09-08T13:47:35 | 2014-09-08T13:47:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import commands
import pickle
import pprint
import sys
import socket
import subprocess as sub
import dns.resolver
import numpy
import json
import time
from collections import Counter,defaultdict
from fuzzywuzzy import fuzz
import iso8601
import datetime
import operator
import scipy.stats
CDN_PROVIDER = [
[".akamai.net", "Akamai"],
[".akafms.net","Akamai"],
[".akamaiedge.net", "Akamai"],
[".llnwd.net", "Limelight"],
["edgecastcdn.net", "EdgeCast"],
["hwcdn.net", "Highwinds"],
[".panthercdn.com", "Panther"],
[".simplecdn.net", "Simple CDN"],
[".instacontent.net", "Mirror Image"],
[".footprint.net", "Level3"],
[".ay1.b.yahoo.com", "Yahoo"],
[".yimg.", "Yahoo"],
["googlesyndication.", "Google"],
[".gstatic.com", "Google"],
[".googleusercontent.com", "Google"],
[".internapcdn.net", "Internap"],
[".cloudfront.net", "Amazon Cloudfront"],
[".netdna-cdn.com", "MaxCDN"],
[".netdna-ssl.com", "MaxCDN"],
[".netdna.com", "MaxCDN"],
[".cotcdn.net", "Cotendo"],
[".cachefly.net", "Cachefly"],
["bo.lt", "BO.LT"],
[".cloudflare.com", "Cloudflare"],
[".afxcdn.net", "afxcdn.net"],
[".lxdns.com", "lxdns.com"],
[".att-dsa.net", "AT&T"],
[".vo.msecnd.net", "Windows Azure"],
[".voxcdn.net", "Voxel"],
[".bluehatnetwork.com", "Blue Hat Network"],
[".swiftcdn1.com", "SwiftCDN"],
[".rncdn1.com", "Reflected Networks"],
[".cdngc.net", "CDNetworks"],
[".fastly.net", "Fastly"],
[".gslb.taobao.com", "Taobao"],
[".gslb.tbcache.com", "Alimama"],
[".doubleclick.net", "Google Ad"],
[".googlesyndication.com", "Google Ad"],
]
graphcontent4pltbyisp="""
=cluster;<ISPLIST>
=table<SORTED>
title=<TITLE>
yformat=%gsec
rotateby=-45
legendx=right
max=150
legendy=center
xlabelshift=0,-5
=nogridy
ylabel=<TITLE>
xlabel=Landmarks
# stretch it out in x direction
xscale=1.4
yscale=1.2
=table
# <ISPLIST_TAB>
"""
def finder(host):
result = None
for cdn in CDN_PROVIDER:
if cdn[0] in host:
return cdn[1]
return None
def findcdnfromhost(host, dnsip = "8.8.8.8"):
res = dns.resolver.Resolver()
try:
r = res.query(host)
newhost=r.canonical_name.to_text()
except Exception as e:
return None
return finder(newhost)
isplist=['mtnl3g1','airtel2g','airtel3g','reliancecdma']#,'reliancehsdpa']
f=open('landmarks.txt','r')
landmarkList=f.readlines()
f.close()
def main():
ispdict={}
for isp in isplist:
folderdict={}
urldict={}
folderlist=commands.getoutput('ls -1 '+isp+'/').split('\n')
for folder in folderlist:
path=isp+'/'+folder+'/'
filelist=commands.getoutput('ls -1d '+isp+'/'+folder+'/* | grep navtimings_').split('\n')
dumplist=commands.getoutput('ls -1d '+isp+'/'+folder+'/* | grep cap').split('\n')
for dumpfile in dumplist:
print dumpfile
urld={}
landmarkname=dumpfile.split('/')[2].split('_')[0]
rttlist=commands.getoutput('tcptrace -nlr '+dumpfile+' | grep \"RTT avg:\" | tr -s \' \' | cut -d\' \' -f4 2>/dev/null').split('\n')
rttlist=[x for x in rttlist if ' ' not in x]
urllist=commands.getoutput('tshark -n -r '+dumpfile+' -T fields -e http.request.full_uri -e tcp.stream 2>/dev/null').split('\n')
for url in urllist:
data=url.split('\t')
if len(data[0]) > 1 and len(data[1])!=0:
urld[data[0]]=float(rttlist[int(data[1])])
urldict.setdefault(landmarkname,[]).append(urld)
for navfile in filelist:
landmarkname=navfile.split('/')[2].split('_')[1]
data=pickle.load(open(navfile,'rb'))
folderdict.setdefault(landmarkname,[]).append(data)
# filelist=commands.getoutput('ls -1d '+isp+'/'+folder+'/* | grep cap').split('\n')
# commands.getoutput('mkdir -p '+isp+'/'+folder+'/'+'dhttp')
# for dumpfile in filelist:
# landmarkname=dumpfile.split('/')[2].split('_')[0]
# # print dumpfile.split('/')[2]
# f=open(isp+'/'+folder+'/'+'dhttp/'+landmarkname+'_http','w')
# p=sub.Popen(['tcptrace','--output_dir=``trace\'\'','-n','-xhttp',dumpfile],stdout=f)
# p.wait()
# f.close()
ispdict[isp]=(folderdict,urldict)
# pprint.pprint(ispdict)
pickle.dump(ispdict,open('navresdata','wb'))
def hashresd(resd):
d={}
for i in range(0,len(resd)):
d[resd[i]['url']]=i
return d
class InvalidRecord(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return self.value
def process(item,origin):
urldict=item[1]
# pprint.pprint(len(urldict.keys()))
# pprint.pprint(urldict)
# sys.exit(0)
hardata=item[0][2]
resD=item[0][1]
plt=item[0][0]['duration']
totObj=len(hardata['log']['entries'])
pSize=0.0
hashrsd=hashresd(resD)
cdndict={}
for entry in hardata['log']['entries']:
if entry['response']['status']==200:
pSize+=float(entry['response']['bodySize'])/1024
url=entry['request']['url'].replace(',','-')
hostname=url.split('/')[2]
cdn=findcdnfromhost(hostname)
# print cdn,hostname
if cdn==None:
cdn=hostname
try:
time=resD[hashrsd[entry['request']['url']]]['duration']
except KeyError:
time=entry['time']
try:
rtt=urldict[str(entry['request']['url'])]
except KeyError:
frdict={}
for urls in urldict.keys():
r=fuzz.ratio(urls,str(entry['request']['url']))
frdict[r]=urls
rtt=urldict[frdict[max(frdict.keys())]]
try:
cdndict.setdefault(cdn,[]).append((time,rtt,float(entry['response']['bodySize'])/1024))
except ZeroDivisionError:
pass
if not cdndict:
raise InvalidRecord('InvalidRecord')
m=0
for key in cdndict:
if m < len(cdndict[key]):
m=len(cdndict[key])
k=key
# print cdndict.keys()
clubcdndict={}
if cdndict:
for key in cdndict:
if key==k:
clubcdndict['primary']=cdndict[key]
elif key==origin or key=='www.'+origin:
clubcdndict['origin']=cdndict[key]
else:
tlist=[]
for p in cdndict[key]:
tlist.append(p)
clubcdndict['nonprimary']=tlist
# pprint.pprint(clubcdndict)
if 'primary' not in clubcdndict.keys():
clubcdndict['primary']=[]
if 'nonprimary' not in clubcdndict.keys():
clubcdndict['nonprimary']=[]
nprimecdn=len(clubcdndict['primary'])
n_primecdn=len(clubcdndict['nonprimary'])
sprimecdn=sum([z for (x,y,z) in clubcdndict['primary']])
s_primecdn=sum([z for (x,y,z) in clubcdndict['nonprimary']])
try:
origintime=numpy.median([y for (x,y,z) in clubcdndict['origin']])
cdntime=numpy.median([y for (x,y,z) in clubcdndict['primary']])
cdnimp=(origintime-cdntime)*100/origintime
except KeyError:
origintime=numpy.median([y for (x,y,z) in clubcdndict['primary']])
cdnimp=0
# O=[]
# C=[]
#for (a,b) in clubcdndict['origin']:
# if b!=0:
# O.append(a/b)
# for (a,b) in clubcdndict['primary']:
# if b!=0:
# C.append(a/b)
# avgTimePerByteOrigin=numpy.mean(O)
# avgTimePerByteCDN=numpy.mean(C)
#origintime=0#avgTimePerByteOrigin*100
#CDNTime=avgTimePerByteCDN*100
#cdnimp=0#((origintime-CDNTime)*100)/origintime
return plt,totObj,pSize,k,nprimecdn,sprimecdn,n_primecdn,s_primecdn,origintime,cdnimp
aftboundry={
'airindia.com':'http://airindia.com/Images/f-button.gif',
'espncricinfo.com':'http://i.imgci.com/espncricinfo/ciSprites58.gif',
# 'flipkart.com':'http://s3-ap-southeast-1.amazonaws.com/wk-static-files/webengage/feedbacktab/~537e09f.png',
'flipkart.com':'http://b.scorecardresearch.com/beacon.js',
'incometaxreview.com':'http://incometaxindia.gov.in/ITPrototype/images/rmenu11.jpg',
'incredibleindia.org':'http://incredibleindia.org/images/home/black-slider/calendar.jpg',
'jharkhand.gov.in':'http://jharkhand.gov.in/documents/12205/0/acts.jpg?t=1387523290512',
'makemytrip.com':'http://d6tizftlrpuof.cloudfront.net/live/resources/buttons/usabilla_black_rightSideImprove.png',
'morth.nic.in':'http://morth.nic.in/images/bullet1.gif',
'mponline.gov.in':'http://www.mponline.gov.in/Quick%20Links/PortalImages/MenuImages/CitizenS.png',
'nrega.nic.in':'http://nrega.nic.in/netnrega/images/middle_s.gif',
'passportindia.gov.in':'http://passportindia.gov.in/AppOnlineProject/images/bt_grey.gif',
'timesofindia.indiatimes.com':'https://ssl.gstatic.com/images/icons/gplus-16.png',
'uk.gov.in':'http://uk.gov.in/files/icons/morearrow.jpg',
'wikipedia.org':'http://upload.wikimedia.org/wikipedia/meta/0/08/Wikipedia-logo-v2_1x.png',
'yatra.com':'http://css.yatra.com/content/fresco/default/images/FareFinder-graph.png',
'irctc.co.in':'http://irctc.co.in/beta_images/plus_icon_home.gif',
'youtube.com':'http://i1.ytimg.com/i/Ah9DbAZny_eoGFsYlH2JZw/1.jpg'
}
def PLT(item,landmark):
hardata=item[2]
pageStart=iso8601.parse_date(hardata['log']['pages'][0]['startedDateTime'])
dtlist=[]
for entry in hardata['log']['entries']:
entryStart=iso8601.parse_date(entry['startedDateTime'])
entrytime=entry['time']
dtlist.append((entryStart,entrytime))
dtlist.sort(key=lambda x: x[0])
last=dtlist[-1:][0]
tottime=float((last[0]-pageStart).total_seconds())+float(last[1])/1000
return tottime
def AFT(item,landmark,isp):
hardata=item[2]
tottime=PLT(item,landmark)
if landmark not in aftboundry:
return tottime
afturl=aftboundry[landmark]
aft=None
pageStart=iso8601.parse_date(hardata['log']['pages'][0]['startedDateTime'])
for entry in hardata['log']['entries']:
if entry['request']['url']==afturl:
# print entry['request']['url']
# print landmark,entry['request']['url']
entryStart=iso8601.parse_date(entry['startedDateTime'])
entrytime=entry['time']
elapsedTime=entryStart-pageStart
aft=float(elapsedTime.total_seconds())+float(entrytime)/1000
# sys.exit(0)
if aft==None:
# return hardata['log']['pages'][0]['pageTimings']['onLoad']
return tottime
return aft
def detectBottleneck(item,landmark):
hardata=item[2]
percentage={}
blocked=0
wait=0
ssl=0
dns=0
connect=0
time=0
for entry in hardata['log']['entries']:
if 'timings' in entry:
blocked+=entry['timings']['blocked']
wait+=entry['timings']['wait']
ssl+=entry['timings']['ssl']
dns+=entry['timings']['dns']
connect+=entry['timings']['connect']
time+=entry['time']
if time!=0:
percentage[float(blocked)/time]='blocked'
percentage[float(wait)/time]='wait'
percentage[float(ssl)/time]='ssl'
percentage[float(dns)/time]='dns'
percentage[float(connect)/time]='connect'
mkey=sorted(percentage.keys())[-1:][0]
return percentage[mkey]
else:
return '\t'
def maxBottle(l):
c=Counter(l).most_common(len(l))
return c[0][0]
urldict={
'facebook.com':'facebook.com',
'timesofindia.indiatimes.com':'timesofindia.indiatimes.com',
'google.co.in':'google.co.in',
'youtube.com':'youtube.com',
'irctc.co.in':'irctc.co.in',
'wikipedia.org':'wikipedia.org',
'espncricinfo.com':'espncricinfo.com',
'flipkart.com':'flipkart.com',
'mponline.gov.in':'mponline.gov.in/portal/index.aspx?langid=en-US',
'nrega.nic.in':'nrega.nic.in/netnrega/home.aspx',
'jharkhand.gov.in':'jharkhand.gov.in',
'uk.gov.in':'uk.gov.in/home/index1',
'morth.nic.in':'morth.nic.in',
'airindia.com':'airindia.com',
'yatra.com':'yatra.com',
'makemytrip.com':'makemytrip.com',
'incredibleindia.org':'incredibleindia.org',
'passportindia.gov.in':'passportindia.gov.in/AppOnlineProject/welcomeLink',
'incometaxindia.gov.in':'incometaxindia.gov.in/home.asp'
}
website={
'gov':[
'irctc.co.in',
'mponline.gov.in',
'nrega.nic.in',
'jharkhand.gov.in',
'uk.gov.in',
'morth.nic.in',
'airindia.com',
'passportindia.gov.in',
'incometaxindia.gov.in'
],
'non-gov':[
'facebook.com',
'timesofindia.indiatimes.com',
'google.co.in',
'youtube.com',
'wikipedia.org',
'espncricinfo.com',
'flipkart.com',
'makemytrip.com',
'yatra.com',
'incredibleindia.org'
],
'cdn':[
'facebook.com',
'timesofindia.indiatimes.com',
'espncricinfo.com',
'flipkart.com',
'google.co.in',
'youtube.com',
'makemytrip.com',
'yatra.com',
'incredibleindia.org',
'irctc.co.in',
'mponline.gov.in',
'nrega.nic.in',
'jharkhand.gov.in',
'uk.gov.in',
'morth.nic.in',
'airindia.com',
'passportindia.gov.in',
'incometaxindia.gov.in'
]
}
def redirectpenalty(item,landmark):
hardata=item[2]
firstreq=None
secreq=None
for entry in hardata['log']['entries']:
if entry['request']['url']=='http://'+urldict[landmark]+'/' and entry['response']['status']==301:
firstreq=entry['startedDateTime']
# print entry['request']['url']
# print firstreq
if firstreq!=None:
for entry in hardata['log']['entries']:
if entry['request']['url']=='http://www.'+urldict[landmark]+'/' and entry['response']['status']==200:
secreq=entry['startedDateTime']
# print entry['request']['url']
# print secreq
if secreq!=None:
return (iso8601.parse_date(secreq)-iso8601.parse_date(firstreq)).total_seconds()
else:
return 0
def hardata():
resdata=pickle.load(open('navresdata','rb'))
# pprint.pprint(resdata['reliancecdma'][0]['incredibleindia.org'][15])
# sys.exit(0)
urllist=[]
for isp in resdata:
for landmark in resdata[isp][0]:#:['facebook.com']:#
l={}
for i in range(0,len(resdata[isp][0][landmark])):
# print resdata[isp][0][landmark][i]
item=resdata[isp][0][landmark][i]
pLoadTime=PLT(item,landmark)
AboveFoldTime=AFT(item,landmark,isp)
Bottleneck=detectBottleneck(item,landmark)
RedirectPenalty=redirectpenalty(item,landmark)#RedirectPenalty=
l.setdefault('PLT',[]).append(pLoadTime)
l.setdefault('AFT',[]).append(AboveFoldTime)
l.setdefault('Btlneck',[]).append(Bottleneck)
l.setdefault('RedirectPenalty',[]).append(RedirectPenalty)
# pprint.pprint(l['RedirectPenalty'])
f=open('PLT/PLT/'+landmark+'_'+isp,'w')
for item in l['PLT']:
f.write(str(item)+'\n')
f.close()
f=open('PLT/AFT/'+landmark+'_'+isp,'w')
for item in l['AFT']:
f.write(str(item)+'\n')
f.close()
f=open('PLT/redPen/'+landmark+'_'+isp,'w')
for item in l['RedirectPenalty']:
f.write(str(item)+'\n')
f.close()
MBottleneck=maxBottle(l['Btlneck'])
# print isp+'\t'+landmark+'\t'+str(numpy.mean(l['PLT']))+'\t'+str(numpy.std(l['PLT']))#+'\t'+isp+'\t'+landmark+'\t'+str(numpy.mean(l['AFT']))+'\t'+str(numpy.std(l['AFT']))+'\t'+isp+'\t'+landmark+'\t'+str(numpy.mean(l['RedirectPenalty']))+'\t'+str(numpy.std(l['RedirectPenalty']))+'\t'+MBottleneck
def resData():
resdata=pickle.load(open('navresdata','rb'))
# pprint.pprint(resdata)
# sys.exit(0)
urllist=[]
for isp in resdata:
for landmark in resdata[isp]:
for item in resdata[isp][landmark]:
resDataa=item[1]
# print isp,landmark
for entry in resDataa:
url=entry['url'].replace(',','-')
print isp+','+landmark+','+url+','+str(entry['duration'])
# pprint.pprint(set(urllist))
isplabel={
'airtel3g':'Airtel UMTS',
'airtel2g':'Airtel EDGE',
'mtnl3g1':'MTNL UMTS',
'reliancecdma':'Reliance 1xEVDO'
}
def stat(typ,maxY,param,title,sort=False,time=True):
resdata=pickle.load(open('navresdata','rb'))
urllist=[]
landdict=defaultdict(dict)
isplist=[]
for isp in resdata:
for landmark in resdata[isp][0]:
l={}
for i in range(0,len(resdata[isp][0][landmark])):
item=resdata[isp][0][landmark][i]
pLoadTime=PLT(item,landmark)
AboveFoldTime=AFT(item,landmark,isp)
Bottleneck=detectBottleneck(item,landmark)
RedirectPenalty=redirectpenalty(item,landmark)
l.setdefault('PLT',[]).append(pLoadTime)
l.setdefault('AFT',[]).append(AboveFoldTime)
l.setdefault('Btlneck',[]).append(Bottleneck)
l.setdefault('RedirectPenalty',[]).append(RedirectPenalty)
l.setdefault('NumObjs',[]).append(len(item[2]['log']['entries']))
l.setdefault('UX',[]).append(AboveFoldTime/pLoadTime)
landdict[landmark][isplabel[isp]]=l
isplist.append(isplabel[isp])
mygraph=graphcontent4pltbyisp
if sort==True:
mygraph=mygraph.replace('<SORTED>','\n=sortdata_ascend\n')
if time==False:
mygraph=mygraph.replace('sec','')
if maxY==-1:
mygraph=mygraph.replace('\nmax=150\n','\n')
else:
mygraph=mygraph.replace('\nmax=150\n','\nmax='+str(maxY)+'\n')
mygraph=mygraph.replace('<TITLE>',title)
mygraph=mygraph.replace('<ISPLIST>',';'.join(isplist))
mygraph=mygraph.replace('<ISPLIST_TAB>','\t'.join(isplist))
mygraph+='\n'
for landmark in website[typ]:
mygraph+=(landmark+avgObj[landmark]+'\t')
for isp in landdict[landmark]:
mygraph+=(str(numpy.mean(landdict[landmark][isp][param]))+'\t')
mygraph+='\n'
mygraph+='=yerrorbars\n'
for landmark in website[typ]:
mygraph+=(landmark+avgObj[landmark]+'\t')
for isp in landdict[landmark]:
mygraph+=(str(numpy.std(landdict[landmark][isp][param]))+'\t')
mygraph+='\n'
fname='charts/'+param+'_'+typ
pprint.pprint(fname)
f=open(fname,'w')
f.write(mygraph)
f.close()
commands.getoutput('bargraph.pl -gnuplot -png -non-transparent '+fname+' > '+fname+'.png')
def statbynumobj(typ,maxY,param,title,sort=False,time=True):
resdata=pickle.load(open('navresdata','rb'))
urllist=[]
landdict=defaultdict(dict)
isplist=[]
for isp in resdata:
for landmark in resdata[isp][0]:
l={}
for i in range(0,len(resdata[isp][0][landmark])):
item=resdata[isp][0][landmark][i]
pLoadTime=PLT(item,landmark)
AboveFoldTime=AFT(item,landmark,isp)
Bottleneck=detectBottleneck(item,landmark)
RedirectPenalty=redirectpenalty(item,landmark)
l.setdefault('PLT',[]).append(pLoadTime)
l.setdefault('AFT',[]).append(AboveFoldTime)
l.setdefault('Btlneck',[]).append(Bottleneck)
l.setdefault('RedirectPenalty',[]).append(RedirectPenalty)
l.setdefault('NumObjs',[]).append(len(item[2]['log']['entries']))
l.setdefault('UX',[]).append(AboveFoldTime/pLoadTime)
landdict[landmark][isplabel[isp]]=l
mygraph=graphcontent4pltbyisp
if sort==True:
mygraph=mygraph.replace('<SORTED>','\n=sortdata_ascend\n')
if time==False:
mygraph=mygraph.replace('sec','')
if maxY==-1:
mygraph=mygraph.replace('\nmax=150\n','\n')
else:
mygraph=mygraph.replace('\nmax=150\n','\nmax='+str(maxY)+'\n')
pprint.pprint(isplist)
mygraph=mygraph.replace('<TITLE>',title)
mygraph=mygraph.replace('<ISPLIST>',';'.join(isplist))
mygraph=mygraph.replace('<ISPLIST_TAB>','\t'.join(isplist))
mygraph+='\n'
avgObj={}
for landmark in website[typ]:
objs=[]
for isp in landdict[landmark]:
print isp
objs.append(numpy.mean(landdict[landmark][isp]['NumObjs']))
avgObj[landmark]=round(numpy.mean(objs),2)
sorted_Obj = sorted(avgObj.iteritems(), key=operator.itemgetter(1))
pprint.pprint(avgObj)
# sys.exit(0)
for (landmark,avg) in sorted_Obj:
mygraph+=(landmark+'('+str(avg)+')\t')
for isp in landdict[landmark]:
mygraph+=(str(numpy.mean(landdict[landmark][isp][param]))+'\t')
mygraph+='\n'
mygraph+='=yerrorbars\n'
for (landmark,avg) in sorted_Obj:
mygraph+=(landmark+'('+str(avg)+')\t')
for isp in landdict[landmark]:
mygraph+=(str(numpy.std(landdict[landmark][isp][param]))+'\t')
mygraph+='\n'
fname='charts/'+param+'_'+typ
pprint.pprint(fname)
f=open(fname,'w')
f.write(mygraph)
f.close()
commands.getoutput('bargraph.pl -gnuplot -png -non-transparent '+fname+' > '+fname+'.png')
def correlation():
resdata=pickle.load(open('navresdata','rb'))
urllist=[]
landdict=defaultdict(dict)
isplist=[]
for isp in resdata:
for landmark in resdata[isp][0]:
l={}
for i in range(0,len(resdata[isp][0][landmark])):
item=resdata[isp][0][landmark][i]
pLoadTime=PLT(item,landmark)
AboveFoldTime=AFT(item,landmark,isp)
Bottleneck=detectBottleneck(item,landmark)
RedirectPenalty=redirectpenalty(item,landmark)
l.setdefault('PLT',[]).append(pLoadTime)
l.setdefault('AFT',[]).append(AboveFoldTime)
l.setdefault('Btlneck',[]).append(Bottleneck)
l.setdefault('RedirectPenalty',[]).append(RedirectPenalty)
l.setdefault('NumObjs',[]).append(len(item[2]['log']['entries']))
l.setdefault('UX',[]).append(AboveFoldTime/pLoadTime)
landdict[landmark][isplabel[isp]]=l
ldict=defaultdict(dict)
idict=defaultdict(dict)
for land in landdict:
for isp in landdict[land]:
ldict[isp][land]=numpy.mean(landdict[land][isp]['UX'])
idict[land][isp]=numpy.mean(landdict[land][isp]['UX'])
ldict=dict(ldict)
idict=dict(idict)
# pprint.pprint(dict(ldict))
for (x,y) in [('Airtel UMTS','Reliance 1xEVDO'),('Airtel UMTS','Airtel EDGE'),('Airtel UMTS','MTNL UMTS'),\
('Reliance 1xEVDO','Airtel EDGE'),('Reliance 1xEVDO','MTNL UMTS'),('Airtel EDGE','MTNL UMTS')]:
X=[]
Y=[]
# print '-------------------------------------------------------'
# print 'Landmark'+'\t'+x+'\t'+y
# print '-------------------------------------------------------'
for land in ldict[x]:
if ldict[x][land] < 1:
# print land+'\t'+str(ldict[x][land])+'\t'+str(ldict[y][land])
X.append(ldict[x][land])
# print ldict[x][land]
# print '-------------------------------------------------------'
for land in ldict[x]:
if ldict[x][land] < 1:
# print land+'\t'+str(ldict[x][land])+'\t'+str(ldict[y][land])
Y.append(ldict[y][land])
# print ldict[y][land]
# print '-------------------------------------------------------'
print x+'\t'+y+'\t'+str(scipy.stats.spearmanr(X,Y)[0])
# print '-------------------------------------------------------'
# print idict.keys()
# for (x,y) in [('airindia.com','incredibleindia.org'),('incredibleindia.org','flipkart.com')\
# ,('irctc.co.in','makemytrip.com'),('wikipedia.org','passportindia.gov.in'),('incometaxindia.gov.in', 'timesofindia.indiatimes.com')\
# ,('irctc.co.in', 'yatra.com'),('uk.gov.in', 'incometaxindia.gov.in'),('jharkhand.gov.in', 'wikipedia.org')]:
# X=[]
# Y=[]
# for isp in idict[x]:
# # print land
# X.append(idict[x][isp])
# # print "dsdsds"
# for isp in idict[y]:
# # print land
# Y.append(idict[y][isp])
# # pprint.pprint(X)
# # pprint.pprint(Y)
# print x+'\t'+y
# print scipy.stats.spearmanr(X,Y)
# for
# pprint.pprint(scipy.stats.spearmanr(landdict[land]['Airtel UMTS']['UX'],landdict[land]['Airtel EDGE']['UX']))
if __name__ == '__main__':
correlation()
# main()
# stat('gov',150,'AFT','Above The Fold Time',True)
# stat('non-gov',150,'AFT','Above The Fold Time',True)
# stat('gov',150,'PLT','Page Load Time',True)
# stat('non-gov',150,'PLT','Page Load Time',True)
# stat('cdn',150,'PLT','Page Load Time')
# stat('gov',-1,'NumObjs','Number of Objects fetched',True,False)
# stat('non-gov',-1,'NumObjs','Number of Objects fetched',True,False)
# stat('cdn',-1,'NumObjs','Number of Objects fetched',False,False)
# stat('gov',-1,'UX','PLT / AFT ratio',True,False)
# stat('non-gov',-1,'UX','PLT / AFT ratio',True,False)
# stat('cdn',-1,'UX','PLT / AFT ratio',False,False)
# statbynumobj('cdn',300,'PLT','Page Load Time',True)
# resData()
|
UTF-8
|
Python
| false | false | 2,014 |
1,864,015,820,768 |
5f138b3cd0196fa2fc97f6ff23d25e0500dbaa5e
|
adee3fb8e6d23772dc22ef08740efba10c372d24
|
/src/python/twitter/pants/tasks/scala_compile.py
|
d3e5fd4c07603e2dacd18c979cbe65838ff5e306
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
non_permissive
|
UrbanCompass/commons
|
https://github.com/UrbanCompass/commons
|
cad3195c839b70a05d18070f48cd86f063deea2d
|
da52a5b693123668aead91efaf3a65be289e1625
|
refs/heads/master
| 2021-01-18T10:41:07.771318 | 2014-07-17T18:54:39 | 2014-07-17T20:37:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================================================
__author__ = 'Benjy Weinberger'
import os
from twitter.pants import has_sources, is_scalac_plugin
from twitter.pants.goal.workunit import WorkUnit
from twitter.pants.targets.scala_library import ScalaLibrary
from twitter.pants.tasks import Task, TaskError
from twitter.pants.tasks.jvm_dependency_cache import JvmDependencyCache
from twitter.pants.tasks.nailgun_task import NailgunTask
from twitter.pants.reporting.reporting_utils import items_to_report_element
from twitter.pants.tasks.scala.zinc_artifact import ZincArtifactFactory, AnalysisFileSpec
from twitter.pants.tasks.scala.zinc_utils import ZincUtils
def _is_scala(target):
return has_sources(target, '.scala')
class ScalaCompile(NailgunTask):
@classmethod
def setup_parser(cls, option_group, args, mkflag):
NailgunTask.setup_parser(option_group, args, mkflag)
option_group.add_option(mkflag('warnings'), mkflag('warnings', negate=True),
dest='scala_compile_warnings', default=True,
action='callback', callback=mkflag.set_bool,
help='[%default] Compile scala code with all configured warnings '
'enabled.')
option_group.add_option(mkflag('plugins'), dest='plugins', default=None,
action='append', help='Use these scalac plugins. Default is set in pants.ini.')
option_group.add_option(mkflag('partition-size-hint'), dest='scala_compile_partition_size_hint',
action='store', type='int', default=-1,
help='Roughly how many source files to attempt to compile together. Set to a large number ' \
'to compile all sources together. Set this to 0 to compile target-by-target. ' \
'Default is set in pants.ini.')
JvmDependencyCache.setup_parser(option_group, args, mkflag)
def __init__(self, context):
NailgunTask.__init__(self, context, workdir=context.config.get('scala-compile', 'nailgun_dir'))
# Set up the zinc utils.
color = not context.options.no_color
self._zinc_utils = ZincUtils(context=context, nailgun_task=self, color=color)
# The rough number of source files to build in each compiler pass.
self._partition_size_hint = (context.options.scala_compile_partition_size_hint
if context.options.scala_compile_partition_size_hint != -1
else context.config.getint('scala-compile', 'partition_size_hint',
default=1000))
# Set up dep checking if needed.
if context.options.scala_check_missing_deps:
JvmDependencyCache.init_product_requirements(self)
self._opts = context.config.getlist('scala-compile', 'args')
if context.options.scala_compile_warnings:
self._opts.extend(context.config.getlist('scala-compile', 'warning_args'))
else:
self._opts.extend(context.config.getlist('scala-compile', 'no_warning_args'))
# Various output directories.
workdir = context.config.get('scala-compile', 'workdir')
self._resources_dir = os.path.join(workdir, 'resources')
self._artifact_factory = ZincArtifactFactory(workdir, self.context, self._zinc_utils)
# The ivy confs for which we're building.
self._confs = context.config.getlist('scala-compile', 'confs')
# The artifact cache to read from/write to.
artifact_cache_spec = context.config.getlist('scala-compile', 'artifact_caches', default=[])
self.setup_artifact_cache(artifact_cache_spec)
# If we are compiling scala libraries with circular deps on java libraries we need to make sure
# those cycle deps are present.
self._inject_java_cycles()
def _inject_java_cycles(self):
for scala_target in self.context.targets(lambda t: isinstance(t, ScalaLibrary)):
for java_target in scala_target.java_sources:
self.context.add_target(java_target)
def product_type(self):
return 'classes'
def can_dry_run(self):
return True
def execute(self, targets):
scala_targets = filter(_is_scala, targets)
if not scala_targets:
return
# Get the exclusives group for the targets to compile.
# Group guarantees that they'll be a single exclusives key for them.
egroups = self.context.products.get_data('exclusives_groups')
exclusives_key = egroups.get_group_key_for_target(targets[0])
exclusives_classpath = egroups.get_classpath_for_group(exclusives_key)
with self.context.state('upstream_analysis_map', {}) as upstream_analysis_map:
with self.invalidated(scala_targets, invalidate_dependents=True,
partition_size_hint=self._partition_size_hint) as invalidation_check:
# Process partitions one by one.
for vts in invalidation_check.all_vts_partitioned:
# Refresh the classpath, to pick up any changes from update_compatible_classpaths.
exclusives_classpath = egroups.get_classpath_for_group(exclusives_key)
# Get the classpath generated by upstream JVM tasks (including previous calls to execute()).
# Add the global classpaths here, directly, instead of doing the
# add-to-compatible thing.
self._add_globally_required_classpath_entries(exclusives_classpath)
if not self.dry_run:
merged_artifact = self._process_target_partition(vts, exclusives_classpath,
upstream_analysis_map)
vts.update()
# Note that we add the merged classes_dir to the upstream.
# This is because zinc doesn't handle many upstream dirs well.
if os.path.exists(merged_artifact.classes_dir):
for conf in self._confs: ### CLASSPATH UPDATE
# Update the exclusives group classpaths.
egroups.update_compatible_classpaths(exclusives_key, [(conf, merged_artifact.classes_dir)])
if os.path.exists(merged_artifact.analysis_file):
upstream_analysis_map[merged_artifact.classes_dir] = \
AnalysisFileSpec(merged_artifact.analysis_file, merged_artifact.classes_dir)
if invalidation_check.invalid_vts:
# Check for missing dependencies.
all_analysis_files = set()
for target in scala_targets:
analysis_file_spec = self._artifact_factory.analysis_file_for_targets([target])
if os.path.exists(analysis_file_spec.analysis_file):
all_analysis_files.add(analysis_file_spec)
deps_cache = JvmDependencyCache(self.context, scala_targets, all_analysis_files)
deps_cache.check_undeclared_dependencies()
def _add_globally_required_classpath_entries(self, cp):
# Add classpath entries necessary both for our compiler calls and for downstream JVM tasks.
for conf in self._confs:
cp.insert(0, (conf, self._resources_dir))
for jar in self._zinc_utils.plugin_jars():
cp.insert(0, (conf, jar))
def _localize_portable_analysis_files(self, vts):
# Localize the analysis files we read from the artifact cache.
for vt in vts:
analysis_file = self._artifact_factory.analysis_file_for_targets(vt.targets)
if self._zinc_utils.localize_analysis_file(
ZincArtifactFactory.portable(analysis_file.analysis_file), analysis_file.analysis_file):
self.context.log.warn('Zinc failed to localize analysis file: %s. Incremental rebuild' \
'of that target may not be possible.' % analysis_file)
def check_artifact_cache(self, vts):
# Special handling for scala artifacts.
cached_vts, uncached_vts = Task.check_artifact_cache(self, vts)
if cached_vts:
# Localize the portable analysis files.
with self.context.new_workunit('localize', labels=[WorkUnit.MULTITOOL]):
self._localize_portable_analysis_files(cached_vts)
# Split any merged artifacts.
for vt in cached_vts:
if len(vt.targets) > 1:
artifacts = [self._artifact_factory.artifact_for_target(t) for t in vt.targets]
merged_artifact = self._artifact_factory.merged_artifact(artifacts)
merged_artifact.split()
for v in vt.versioned_targets:
v.update()
return cached_vts, uncached_vts
def _process_target_partition(self, vts, cp, upstream_analysis_map):
"""Must run on all target partitions, not just invalid ones.
May be invoked concurrently on independent target sets.
Postcondition: The individual targets in vts are up-to-date, as if each were
compiled individually.
"""
artifacts = [self._artifact_factory.artifact_for_target(target) for target in vts.targets]
merged_artifact = self._artifact_factory.merged_artifact(artifacts)
if not merged_artifact.sources:
self.context.log.warn('Skipping scala compile for targets with no sources:\n %s' %
merged_artifact.targets)
else:
# Get anything we have from previous builds (or we pulled from the artifact cache).
# We must do this even if we're not going to compile, because the merged output dir
# will go on the classpath of downstream tasks. We can't put the per-target dirs
# on the classpath because Zinc doesn't handle large numbers of upstream deps well.
current_state = merged_artifact.merge(force=not vts.valid)
# Note: vts.valid tells us if the merged artifact is valid. If not, we recreate it
# above. [not vt.valid for vt in vts.versioned_targets] tells us if anything needs
# to be recompiled. The distinction is important: all the underlying targets may be
# valid because they were built in some other pants run with different partitions,
# but this partition may still be invalid and need merging.
# Invoke the compiler if needed.
if any([not vt.valid for vt in vts.versioned_targets]):
# Do some reporting.
self.context.log.info(
'Operating on a partition containing ',
items_to_report_element(vts.cache_key.sources, 'source'),
' in ',
items_to_report_element([t.address.reference() for t in vts.targets], 'target'), '.')
old_state = current_state
classpath = [entry for conf, entry in cp if conf in self._confs]
with self.context.new_workunit('compile'):
# Zinc may delete classfiles, then later exit on a compilation error. Then if the
# change triggering the error is reverted, we won't rebuild to restore the missing
# classfiles. So we force-invalidate here, to be on the safe side.
vts.force_invalidate()
if self._zinc_utils.compile(classpath, merged_artifact.sources,
merged_artifact.classes_dir,
merged_artifact.analysis_file, upstream_analysis_map):
raise TaskError('Compile failed.')
write_to_artifact_cache = self._artifact_cache and \
self.context.options.write_to_artifact_cache
current_state = merged_artifact.split(old_state, portable=write_to_artifact_cache)
if write_to_artifact_cache:
# Write the entire merged artifact, and each individual split artifact,
# to the artifact cache, if needed.
vts_artifact_pairs = zip(vts.versioned_targets, artifacts) + [(vts, merged_artifact)]
self._update_artifact_cache(vts_artifact_pairs)
# Register the products, if needed. TODO: Make sure this is safe to call concurrently.
# In practice the GIL will make it fine, but relying on that is insanitary.
if self.context.products.isrequired('classes'):
self._add_products_to_genmap(merged_artifact, current_state)
return merged_artifact
def _add_products_to_genmap(self, artifact, state):
"""Must be called on all targets, whether they needed compilation or not."""
genmap = self.context.products.get('classes')
for target, sources in artifact.sources_by_target.items():
for source in sources:
classes = state.classes_by_src.get(source, [])
relsrc = os.path.relpath(source, target.target_base)
genmap.add(relsrc, artifact.classes_dir, classes)
genmap.add(target, artifact.classes_dir, classes)
# TODO(John Sirois): Map target.resources in the same way
# Create and Map scala plugin info files to the owning targets.
if is_scalac_plugin(target) and target.classname:
basedir, plugin_info_file = self._zinc_utils.write_plugin_info(self._resources_dir, target)
genmap.add(target, basedir, [plugin_info_file])
def _update_artifact_cache(self, vts_artifact_pairs):
# Relativize the analysis.
# TODO: Relativize before splitting? This will require changes to Zinc, which currently
# eliminates paths it doesn't recognize (including our placeholders) when splitting.
vts_artifactfiles_pairs = []
with self.context.new_workunit(name='cacheprep'):
with self.context.new_workunit(name='relativize', labels=[WorkUnit.MULTITOOL]):
for vts, artifact in vts_artifact_pairs:
if os.path.exists(artifact.analysis_file) and \
self._zinc_utils.relativize_analysis_file(artifact.analysis_file,
artifact.portable_analysis_file):
raise TaskError('Zinc failed to relativize analysis file: %s' % artifact.analysis_file)
artifact_files = [artifact.classes_dir, artifact.portable_analysis_file]
vts_artifactfiles_pairs.append((vts, artifact_files))
self.update_artifact_cache(vts_artifactfiles_pairs)
|
UTF-8
|
Python
| false | false | 2,014 |
15,899,968,964,848 |
992b1e8c5fab416169034e0d937df71f89d2dfd1
|
b26a1db6e7b5b39efb2170e8509df04b62f8f16d
|
/Stuff/Misc/test_decorators.py
|
4f831cc2475bf984ab6b33a13d4bcb1ea0916345
|
[
"LGPL-2.1-or-later",
"LGPL-2.0-or-later",
"GPL-1.0-or-later"
] |
non_permissive
|
moneytech/pyvm
|
https://github.com/moneytech/pyvm
|
e342b9160dbf64fb04149226db52bf0a8ed200c7
|
bb1779e86296fa33cc52517f7ba907974a3c138b
|
refs/heads/master
| 2020-12-27T05:43:42.960801 | 2014-08-08T19:17:44 | 2014-08-08T19:17:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# taken from Python-2.4 Lib/tests/test_decorators.py
#
def funcattrs(**kwds):
def decorate(func):
func.__dict__.update(kwds)
return func
return decorate
class MiscDecorators (object):
@staticmethod
def author(name):
def decorate(func):
func.__dict__['author'] = name
return func
return decorate
# -----------------------------------------------
class DbcheckError (Exception):
def __init__(self, exprstr, func, args, kwds):
# A real version of this would set attributes here
Exception.__init__(self, "dbcheck %r failed (func=%s args=%s kwds=%s)" %
(exprstr, func, args, kwds))
def dbcheck(exprstr, globals=None, locals=None):
"Decorator to implement debugging assertions"
def decorate(func):
expr = compile(exprstr, "dbcheck-%s" % func.func_name, "eval")
def check(*args, **kwds):
if not globals:
if not eval (expr):
raise DbcheckError(exprstr, func, args, kwds)
elif not eval(expr, globals, locals):
raise DbcheckError(exprstr, func, args, kwds)
return func(*args, **kwds)
return check
return decorate
# -----------------------------------------------
def countcalls(counts):
"Decorator to count calls to a function"
def decorate(func):
func_name = func.func_name
counts[func_name] = 0
def call(*args, **kwds):
counts[func_name] += 1
return func(*args, **kwds)
call.func_name = func_name
return call
return decorate
# -----------------------------------------------
def memoize(func):
saved = {}
def call(*args):
try:
return saved[args]
except KeyError:
res = func(*args)
saved[args] = res
return res
except TypeError:
# Unhashable argument
return func(*args)
call.func_name = func.func_name
return call
# -----------------------------------------------
def verify(x, y, msg=None):
if x != y:
if msg:
print msg
FAIL ()
def FAIL ():
print "FAIL"
raise SystemExit
def verifyRaises (E, f, *args):
try:
f (*args)
except E:
return
FAIL ()
def test_single():
class C(object):
@staticmethod
def foo(): return 42
verify(C.foo(), 42)
verify(C().foo(), 42)
def test_dotted():
decorators = MiscDecorators()
@decorators.author('Cleese')
def foo(): return 42
verify(foo(), 42)
verify(foo.author, 'Cleese')
def test_argforms():
# A few tests of argument passing, as we use restricted form
# of expressions for decorators.
def noteargs(*args, **kwds):
def decorate(func):
setattr(func, 'dbval', (args, kwds))
return func
return decorate
args = ( 'Now', 'is', 'the', 'time' )
kwds = dict(one=1, two=2)
@noteargs(*args, **kwds)
def f1(): return 42
verify(f1(), 42)
verify(f1.dbval, (args, kwds))
@noteargs('terry', 'gilliam', eric='idle', john='cleese')
def f2(): return 84
verify(f2(), 84)
verify(f2.dbval, (('terry', 'gilliam'),
dict(eric='idle', john='cleese')))
@noteargs(1, 2,)
def f3(): pass
verify(f3.dbval, ((1, 2), {}))
def test_dbcheck():
@dbcheck('args[1] is not None')
def f(a, b):
return a + b
verify(f(1, 2), 3)
verifyRaises(DbcheckError, f, 1, None)
def test_memoize():
counts = {}
@memoize
@countcalls(counts)
def double(x):
return x * 2
verify(double.func_name, 'double')
verify(counts, dict(double=0))
# Only the first call with a given argument bumps the call count:
#
verify(double(2), 4)
verify(counts['double'], 1)
verify(double(2), 4)
verify(counts['double'], 1)
verify(double(3), 6)
verify(counts['double'], 2)
# Unhashable arguments do not get memoized:
#
verify(double([10]), [10, 10])
verify(counts['double'], 3)
verify(double([10]), [10, 10])
verify(counts['double'], 4)
def test_double():
class C(object):
@funcattrs(abc=1, xyz="haha")
@funcattrs(booh=42)
def foo(self): return 42
verify(C().foo(), 42)
verify(C.foo.abc, 1)
verify(C.foo.xyz, "haha")
verify(C.foo.booh, 42)
def test_order():
# Test that decorators are applied in the proper order to the function
# they are decorating.
def callnum(num):
"""Decorator factory that returns a decorator that replaces the
passed-in function with one that returns the value of 'num'"""
def deco(func):
return lambda: num
return deco
@callnum(2)
@callnum(1)
def foo(): return 42
verify(foo(), 2,
"Application order of decorators is incorrect")
def test_eval_order():
# Evaluating a decorated function involves four steps for each
# decorator-maker (the function that returns a decorator):
#
# 1: Evaluate the decorator-maker name
# 2: Evaluate the decorator-maker arguments (if any)
# 3: Call the decorator-maker to make a decorator
# 4: Call the decorator
#
# When there are multiple decorators, these steps should be
# performed in the above order for each decorator, but we should
# iterate through the decorators in the reverse of the order they
# appear in the source.
actions = []
def make_decorator(tag):
actions.append('makedec' + tag)
def decorate(func):
actions.append('calldec' + tag)
return func
return decorate
class NameLookupTracer (object):
def __init__(self, index):
self.index = index
def __getattr__(self, fname):
if fname == 'make_decorator':
opname, res = ('evalname', make_decorator)
elif fname == 'arg':
opname, res = ('evalargs', str(self.index))
else:
assert False, "Unknown attrname %s" % fname
actions.append('%s%d' % (opname, self.index))
return res
c1, c2, c3 = map(NameLookupTracer, [ 1, 2, 3 ])
expected_actions = [ 'evalname1', 'evalargs1', 'makedec1',
'evalname2', 'evalargs2', 'makedec2',
'evalname3', 'evalargs3', 'makedec3',
'calldec3', 'calldec2', 'calldec1' ]
actions = []
@c1.make_decorator(c1.arg)
@c2.make_decorator(c2.arg)
@c3.make_decorator(c3.arg)
def foo(): return 42
verify(foo(), 42)
verify(actions, expected_actions)
# Test the equivalence claim in chapter 7 of the reference manual.
#
actions = []
def bar(): return 42
bar = c1.make_decorator(c1.arg)(c2.make_decorator(c2.arg)(c3.make_decorator(c3.arg)(bar)))
verify(bar(), 42)
verify(actions, expected_actions)
def R(x):
print x.func_name,
x ()
print "OK"
def test_main():
R (test_single)
R (test_dotted)
R (test_argforms)
R (test_dbcheck)
print "This will fail for 2.3:",
R (test_memoize)
R (test_double)
R (test_order)
R (test_eval_order)
if __name__=="__main__":
test_main()
|
UTF-8
|
Python
| false | false | 2,014 |
12,524,124,640,267 |
b54657e2ab02d53f2cb033aac46c68b013246e37
|
d17c43e3d7f90eea4925a17db5efc0bbfbb0be26
|
/feedhq/models.py
|
499b409cc1a8f521df3b747b3054296535c0e412
|
[] |
no_license
|
troytop/feedhq
|
https://github.com/troytop/feedhq
|
0bcb6e98ccec974fbc0696c975fb68ef44e7d66d
|
5ad370d92d4c4e838a87f9fcd4f7ac7b58a11d84
|
refs/heads/master
| 2021-01-18T14:55:05.022340 | 2013-05-05T19:09:05 | 2013-05-05T19:09:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.utils.functional import curry
def contribute_to_model(contrib, destination):
"""
Update ``contrib`` model based on ``destination``.
Every new field will be created. Existing fields will have some properties
updated.
Methods and properties of ``contrib`` will populate ``destination``.
Usage example:
>>> from django.contrib.auth.models import User
>>> from django.db import models
>>>
>>> class MyUser(models.Model):
... class Meta:
... abstract = True
... db_table = 'user' # new auth_user table name
...
... # New field
... phone = models.CharField('phone number', blank=True, max_length=20)
...
... # Email could be null
... email = models.EmailField(blank=True, null=True)
...
... # New (stupid) method
... def get_phone(self):
... return self.phone
...
>>> contribute_to_model(MyUser, User)
"""
# Contrib should be abstract
if not contrib._meta.abstract:
raise ValueError('Your contrib model should be abstract.')
protected_get_display_method = []
# Update or create new fields
for field in contrib._meta.fields:
try:
destination._meta.get_field_by_name(field.name)
except models.FieldDoesNotExist:
field.contribute_to_class(destination, field.name)
if field.choices:
setattr(destination, 'get_%s_display' % field.name,
curry(destination._get_FIELD_display, field=field))
protected_get_display_method.append(
'get_%s_display' % field.name
)
else:
current_field = destination._meta.get_field_by_name(field.name)[0]
current_field.null = field.null
current_field.blank = field.blank
current_field.max_length = field.max_length
current_field._unique = field.unique
# Change some meta information
if hasattr(contrib.Meta, 'db_table'):
destination._meta.db_table = contrib._meta.db_table
# Add (or replace) properties and methods
protected_items = (dir(models.Model) + ['Meta', '_meta'] +
protected_get_display_method)
for k, v in contrib.__dict__.items():
if k not in protected_items:
setattr(destination, k, v)
|
UTF-8
|
Python
| false | false | 2,013 |
16,527,034,192,651 |
da17dca9d02cf412a9e0ef1e9f33e95f9e43f491
|
5b20598d7e5ee3922099b83d8432ecebb81a9dd9
|
/ui_files/ui_ui_converter.py
|
ff316f5b7a8f3b0fc5a20bf69493eb1dde52821f
|
[] |
no_license
|
ver007/reStInPeace
|
https://github.com/ver007/reStInPeace
|
03d8c213d6caed7acde90aa099261ffbedc6e200
|
125599f1c3fd06e7301da7ccad1a23ca148dd87f
|
refs/heads/master
| 2020-06-17T06:47:53.212733 | 2013-03-02T02:41:27 | 2013-03-02T02:41:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/kib/Public/reStInPeace/reStInPeace0.9/ui_files/ui_converter.ui'
#
# Created: Wed Oct 24 20:38:07 2007
# by: PyQt4 UI code generator 4.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Converter(object):
def setupUi(self, Converter):
Converter.setObjectName("Converter")
Converter.resize(QtCore.QSize(QtCore.QRect(0,0,235,156).size()).expandedTo(Converter.minimumSizeHint()))
self.vboxlayout = QtGui.QVBoxLayout(Converter)
self.vboxlayout.setObjectName("vboxlayout")
self.vboxlayout1 = QtGui.QVBoxLayout()
self.vboxlayout1.setObjectName("vboxlayout1")
self.gridlayout = QtGui.QGridLayout()
self.gridlayout.setObjectName("gridlayout")
self.label_2 = QtGui.QLabel(Converter)
self.label_2.setObjectName("label_2")
self.gridlayout.addWidget(self.label_2,0,0,1,1)
self.combo_out = QtGui.QComboBox(Converter)
self.combo_out.setMinimumSize(QtCore.QSize(0,0))
self.combo_out.setModelColumn(0)
self.combo_out.setObjectName("combo_out")
self.gridlayout.addWidget(self.combo_out,0,1,1,1)
self.label_4 = QtGui.QLabel(Converter)
self.label_4.setObjectName("label_4")
self.gridlayout.addWidget(self.label_4,1,0,1,1)
self.combo_style = QtGui.QComboBox(Converter)
self.combo_style.setMinimumSize(QtCore.QSize(0,0))
self.combo_style.setObjectName("combo_style")
self.gridlayout.addWidget(self.combo_style,1,1,1,1)
self.label_5 = QtGui.QLabel(Converter)
self.label_5.setObjectName("label_5")
self.gridlayout.addWidget(self.label_5,2,0,1,1)
self.combo_options = QtGui.QComboBox(Converter)
self.combo_options.setMinimumSize(QtCore.QSize(0,0))
self.combo_options.setObjectName("combo_options")
self.gridlayout.addWidget(self.combo_options,2,1,1,1)
self.but_process = QtGui.QPushButton(Converter)
self.but_process.setObjectName("but_process")
self.gridlayout.addWidget(self.but_process,3,0,1,1)
self.changedir = QtGui.QPushButton(Converter)
self.changedir.setObjectName("changedir")
self.gridlayout.addWidget(self.changedir,3,1,1,1)
self.vboxlayout1.addLayout(self.gridlayout)
self.buttonBox = QtGui.QDialogButtonBox(Converter)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.NoButton|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.vboxlayout1.addWidget(self.buttonBox)
self.vboxlayout.addLayout(self.vboxlayout1)
self.retranslateUi(Converter)
self.combo_out.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Converter)
def retranslateUi(self, Converter):
Converter.setWindowTitle(QtGui.QApplication.translate("Converter", "Convertisseur", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Converter", "Output format :", None, QtGui.QApplication.UnicodeUTF8))
self.combo_out.addItem(QtGui.QApplication.translate("Converter", "(X)HTML", None, QtGui.QApplication.UnicodeUTF8))
self.combo_out.addItem(QtGui.QApplication.translate("Converter", "LaTeX", None, QtGui.QApplication.UnicodeUTF8))
self.combo_out.addItem(QtGui.QApplication.translate("Converter", "OpenOffice", None, QtGui.QApplication.UnicodeUTF8))
self.combo_out.addItem(QtGui.QApplication.translate("Converter", "Lout", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Converter", "StyleSheet :", None, QtGui.QApplication.UnicodeUTF8))
self.combo_style.addItem(QtGui.QApplication.translate("Converter", "Default", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Converter", "Options :", None, QtGui.QApplication.UnicodeUTF8))
self.combo_options.addItem(QtGui.QApplication.translate("Converter", "Default", None, QtGui.QApplication.UnicodeUTF8))
self.but_process.setText(QtGui.QApplication.translate("Converter", "Process", None, QtGui.QApplication.UnicodeUTF8))
self.changedir.setText(QtGui.QApplication.translate("Converter", "Change saves directory", None, QtGui.QApplication.UnicodeUTF8))
|
UTF-8
|
Python
| false | false | 2,013 |
2,645,699,854,743 |
3d65f3b4ea2fdd9a3f94ed5d2c0cde81323de239
|
c9fd40e6e57ec42c46e0e32ddd4f4d9e4a564ab0
|
/django/src/routemap/sites/settings/slopemap.py
|
3e7623bc8f802b9a186d66254fbac2af4686939b
|
[
"GPL-1.0-or-later",
"BSD-2-Clause-Views",
"MIT",
"BSD-3-Clause",
"GPL-3.0-only"
] |
non_permissive
|
briff/waymarked-trails-site
|
https://github.com/briff/waymarked-trails-site
|
80102bf4a13dfe50fc20fdf214afd10849ccb2db
|
d78b339c3043df07c85db54c3feea147925317f8
|
refs/heads/master
| 2021-01-22T01:54:36.619414 | 2014-10-07T21:08:05 | 2014-10-07T21:08:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This file is part of the Waymarked Trails Map Project
# Copyright (C) 2011-2012 Sarah Hoffmann
# 2012-2013 Michael Spreng
#
# This is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# common settings for all route maps
from siteconfig import *
# Django settings for slopemap project.
_ = lambda s : s
# Project settings
ROUTEMAP_PAGEINFO = {
# Translators: This is the category of routes for the active map view, will be preceded by site name, such as "Waymarked Trails: ".
"maptopic" : _("Winter Sport Slopes"),
"mapdescription" : _("Waymarked Trails shows winter sport slopes, with maps and information from OpenStreetMap."),
"cssfile" : "slope_theme.css",
"bgimage" : "banner_slopemap.jpg",
"iconimg" : "map_slope.ico"
}
ROUTEMAP_ROUTE_TABLE = 'routemap.sites.models.SlopeRelations'
ROUTEMAP_WAY_TABLE = 'routemap.sites.models.Slopes'
ROUTEMAP_JOINED_WAY_TABLE = 'routemap.sites.models.JoinedSlopes'
ROUTEMAP_SCHEMA = 'slopemap'
ROUTEMAP_COMPILED_SYMBOL_PATH = 'slopemapsyms'
ROUTEMAP_TILE_URL = ROUTEMAP_TILE_BASEURL + '/slopemap'
ROUTEMAP_ROUTEINFO_URLS = 'routemap.apps.slopeinfo.urls'
ROUTEMAP_HELPPAGES = {
'source' : PROJECTDIR + 'django/locale/%s/helppages.yaml',
"structure" : (("about", "slopemap", "osm"),
("rendering", "sloperoutes", "slopeclassification", "slopemaplabels",
("elevationprofiles", "general"),
),
("technical", "general", "translation"),
("legal", "copyright", "usage", "disclaimer"),
("acknowledgements", "text"),
("contact", "text")
)
}
|
UTF-8
|
Python
| false | false | 2,014 |
5,514,738,010,483 |
a2061af28b8747e7f26ce86ef91b78e4ce391ddb
|
e4a0b87d3fe6fe091d9edb8b314127b1113d0d7e
|
/hello.py
|
7761b737c9ba066d276e82d677dc647095023aef
|
[] |
no_license
|
Iam42/Daily-Report
|
https://github.com/Iam42/Daily-Report
|
90f99ef30dde287b289f1e83e283bbffb7df030e
|
30a956c34c226eb1f4f84f5979c5813f9f1ae8ab
|
refs/heads/master
| 2020-05-30T11:06:04.550716 | 2014-12-19T03:08:57 | 2014-12-19T03:08:57 | 28,211,430 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*-coding:utf-8-*-
from flask import Flask, render_template, request, url_for
from model.member import Member
import sys
default_encoding = 'utf-8'
if sys.getdefaultencoding() != default_encoding:
reload(sys)
sys.setdefaultencoding(default_encoding)
app = Flask(__name__)
@app.route('/')
def test():
return render_template('index.html')
@app.route('/fancha', methods=['POST'])
def fancha():
yesterday = request.form.get('fancha_yesterday', "ooo")
today = request.form.get('fancha_today', "ooo")
member = Member('泛槎', yesterday, today)
save(member)
return 'ok'
@app.route('/jiufeng', methods=['POST'])
def jiufeng():
yesterday = request.form.get('jiufeng_yesterday', "ooo")
today = request.form.get('jiufeng_today', "ooo")
member = Member('九风', yesterday, today)
save(member)
return 'ok'
@app.route('/build', methods=['POST'])
def build():
f = open('dbase', 'r')
rs = f.read();
f.close()
return rs
def save(member):
file = open('dbase', 'a')
file.write(member.build() + '\n' + '<br/>' + '\n')
file.close( )
if __name__ == '__main__':
app.debug = True
app.run()
|
UTF-8
|
Python
| false | false | 2,014 |
1,357,209,711,398 |
10ef5951822579a241f4954a85b2dec851c8b0b4
|
07f72a64d08b20e74e7d4b1dcf635fedb6424d6b
|
/sd_gunicorn_client.py
|
50dcfe84f49e41808f18ba446de96661fdecea4d
|
[
"MIT"
] |
permissive
|
ignotas/sd-django
|
https://github.com/ignotas/sd-django
|
91915ad7ccb6ba4c7c4873d5c309f6ddb1f5d881
|
b0048e44637d0b615f5639ebb975d325f66b1b7a
|
refs/heads/master
| 2017-11-11T17:08:53.784266 | 2014-03-17T23:50:14 | 2014-03-17T23:50:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
from datetime import datetime, timedelta
from urllib2 import Request, urlopen
import subprocess
# Get the docker0 interface IP address
result = subprocess.check_output("ip route show", shell=True)
docker_ip = result.split()[2]
# Get the most recent activity from the gunicorn access log
f = open('/var/log/gunicorn/access.log')
log_entries = f.readlines()
last_entry = log_entries[-1]
response_timestamp = last_entry.split()[3].strip('[]')
last_activity = datetime.strptime(response_timestamp, '%d/%b/%Y:%H:%M:%S')
# If last activity was less than 10 mins ago, container is active
if (datetime.now() - last_activity) < timedelta(minutes=10):
active = '1'
else:
active = '0'
# POST the active connections to the spin-docker client
req = Request('http://%s/v1/check-in' % docker_ip, data='active=%s' % active)
try:
resp = urlopen(req)
except Exception:
pass
|
UTF-8
|
Python
| false | false | 2,014 |
14,791,867,398,491 |
6ebbcc2a0c3f4634afe10c57798298a5c62f387b
|
676a8dedef11a7686b134a0b29a2f68fe012c1bf
|
/setup.py
|
e4e2fded9f046f17e73db0ad427ef64b20a0d4b3
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
non_permissive
|
ezeep/django-telekom-bmp
|
https://github.com/ezeep/django-telekom-bmp
|
89e9d76c3135d92ea215fb5cd60295c8bf9687c6
|
054ea7ca268f23df9859bcd209bd97cdfd5c1ada
|
refs/heads/master
| 2016-09-15T18:53:45.419057 | 2014-05-28T10:25:32 | 2014-05-28T10:25:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
def read_that_file(path):
with open(path) as open_file:
return open_file.read()
long_description = '\n'.join((read_that_file('README.md'),
read_that_file('LICENSE.txt')))
version = '0.0.1'
setup(name='django-telekom-bmp',
version=version,
description='',
author='Jose A. Martin',
author_email='[email protected]',
url='https://github.com/ezeep/django-telekom-bmp',
license='Apache Software License',
packages=['telekom_bmp'],
include_package_data=True,
zip_safe=True,
install_requires=[
'python-social-auth',
'django-extensions',
'flufl.enum',
],
tests_require=[
'pytest',
'pytest-cov',
'coveralls',
],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
]
)
|
UTF-8
|
Python
| false | false | 2,014 |
16,913,581,229,090 |
e30e7cc43ac83de338054cd3a4789d309b00ae82
|
b7bc0907b5ff7f3a02e4d2e58e1ee8a2dae54f07
|
/scapes3/model/file_metadata_test.py
|
896498f5fdbdd0266e4c8b2cf0619f0628a1ceda
|
[] |
no_license
|
michaelcupino/scapes
|
https://github.com/michaelcupino/scapes
|
19af3c2d67ec8010a9757e6bfaca54d6191ac190
|
0c461b28bc34d3fbdf272dcac1987a2df62151b9
|
refs/heads/master
| 2021-01-02T09:43:50.103148 | 2014-07-07T07:14:16 | 2014-07-07T07:14:16 | 34,291,128 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import unittest
from google.appengine.ext import testbed
from model.file_metadata import FileMetadata
class FileMetadetaTest(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
def testGetFirstKeyForUser(self):
# TODO(michaelcupino): Find out if scapes is using getFirstKeyForUser
self.assertEqual(1, 1 + 0)
def testGetLastKeyForUser(self):
# TODO(michaelcupino): Find out if scapes is using getLastKeyForUser
self.assertEqual(2, 1 + 1)
def testGetKeyName(self):
# TODO(michaelcupino): Find out if scapes is using getKeyName
self.assertEqual(3, 1 + 2)
def testOwner(self):
# TODO(michaelcupino): Find out if scapes is using owner property
self.assertEqual(4, 1 + 3)
def testFilename(self):
# TODO(michaelcupino): Find out if scapes is using filename property
self.assertEqual(5, 1 + 4)
def testUploadedOn(self):
# TODO(michaelcupino): Find out if scapes is using uploadedOn property
self.assertEqual(5, 1 + 4)
def testSource(self):
# TODO(michaelcupino): Find out if scapes is using source property
self.assertEqual(5, 1 + 4)
def testBlobkey(self):
# TODO(michaelcupino): Find out if scapes is using blobkey property
self.assertEqual(5, 1 + 4)
def testWordcountLink(self):
# TODO(michaelcupino): Find out if scapes is using wordcount_link property
self.assertEqual(5, 1 + 4)
def testIndexLink(self):
# TODO(michaelcupino): Find out if scapes is using index_link property
self.assertEqual(5, 1 + 4)
def testPhrasesLink(self):
# TODO(michaelcupino): Find out if scapes is using phrases_link property
self.assertEqual(5, 1 + 4)
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
5,918,464,957,528 |
f861f493ebc84d3dac98222dcdb7b95fb5ddb7b2
|
90561bc6c51710faa8e2fbb73c0d56fcb8fc17d0
|
/fudgemsg/registry.py
|
f7c287f33c60835e7f20b51b1891a52ce4565f8b
|
[
"Apache-2.0"
] |
permissive
|
joequant/Fudge-Python
|
https://github.com/joequant/Fudge-Python
|
14de6c4dc3b13a2619afae441b34fcde5321bdc8
|
71b4c3ac777168b0d0a8778a255e8cc04ba4bc7a
|
refs/heads/master
| 2020-12-25T19:03:49.787469 | 2014-02-05T14:15:48 | 2014-02-05T14:15:48 | 898,168 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""A Registry, storing Fudge FieldTypes."""
from fudgemsg import codecs
from fudgemsg import types
from fudgemsg import utils
class UnknownTypeError(Exception):
"""An Unknown Type has been used
TODO(jamesc)-We should be able to handle Unknown types
in the registry."""
pass
class FieldType(object):
"""The descriptor for a Fudge Field type.
TODO(jamesc)- proper objects rather than this dispatch style?
"""
def __init__(self, type_id, class_, is_variable_sized, fixed_size,
encoder=None, decoder=None, calc_size=None):
"""Create a new Field type.
Arguments:
type_id : the Fudge Type Identifier
class : The python class this maps to
is_variable_sized:
fixed_size : the fixed size of the type
encoder : Convert from object to bytes
def encoder(obj) -> bytes
decoder : Convert from bytes to object
def decoder(bytes) -> object, bytes_read
calc_size : if is_variable_sized is True, calculate the size needed
to hold this object
def size(object) -> num_bytes
"""
self.type_id = type_id
if class_:
if isinstance(class_, str):
self.classname = class_
else:
self.classname = class_.__name__
else:
self.classname = None
self.is_variable_sized = is_variable_sized
self.fixed_size = fixed_size
self.encoder = encoder
self.decoder = decoder
self.calc_size = calc_size
if self.is_variable_sized:
assert self.calc_size
def name(self):
"""Return the human friendly name of a Fudge Type.
Return:
The name of the type, if known, otherwise 'unknown(type_id)'
"""
try:
return types.FUDGE_TYPE_NAMES[self.type_id]
except KeyError:
return 'unknown(%s)'% self.type_id
def __repr__(self):
return "FieldType[id=%r, classname=%r]"% (self.type_id, self.classname)
def fullname(class_):
"""Return the full class name of a class."""
if class_.__module__ == '__builtin__':
return class_.__name__
else:
return '.'.join((class_.__module__, class_.__name__))
class Registry(object):
"""A Fudge Type registry.
"""
def __init__(self):
self.types_by_id = {}
self.types_by_class = {}
self._add(FieldType(types.INDICATOR_TYPE_ID, 'fudgemsg.types.Indicator', \
False, 0, \
codecs.enc_indicator, codecs.dec_indicator, lambda x : 0))
self._add(FieldType(types.BOOLEAN_TYPE_ID, bool, False, 1, \
codecs.enc_bool, codecs.dec_bool))
self._add(FieldType(types.BYTE_TYPE_ID, int, False, 1, \
codecs.enc_byte, codecs.dec_byte))
self._add(FieldType(types.SHORT_TYPE_ID, int, False, 2, \
codecs.enc_short, codecs.dec_short))
self._add(FieldType(types.INT_TYPE_ID, 'int', False, 4, \
codecs.enc_int, codecs.dec_int))
self._add(FieldType(types.LONG_TYPE_ID, long, False, 8, \
codecs.enc_long, codecs.dec_long))
self._add(FieldType(types.BYTEARRAY_TYPE_ID, str, True, 0, \
codecs.enc_str, codecs.dec_str, types.size_str))
self._add(FieldType(types.SHORTARRAY_TYPE_ID, None, True, 0, \
lambda x : codecs.enc_array(codecs.enc_short, x), \
lambda x : codecs.dec_array(codecs.dec_short, 2, x), \
lambda x : 2 * len(x)))
self._add(FieldType(types.INTARRAY_TYPE_ID, None, True, 0, \
lambda x : codecs.enc_array(codecs.enc_int, x), \
lambda x : codecs.dec_array(codecs.dec_int, 4, x), \
lambda x : 4 * len(x)))
self._add(FieldType(types.LONGARRAY_TYPE_ID, None, True, 0, \
lambda x : codecs.enc_array(codecs.enc_long, x), \
lambda x : codecs.dec_array(codecs.dec_long, 8, x), \
lambda x : 8 * len(x)))
self._add(FieldType(types.FLOAT_TYPE_ID, float, False, 4, \
codecs.enc_float, codecs.dec_float))
self._add(FieldType(types.DOUBLE_TYPE_ID, None, False, 8, \
codecs.enc_double, codecs.dec_double))
self._add(FieldType(types.FLOATARRAY_TYPE_ID, None, True, 0, \
lambda x : codecs.enc_array(codecs.enc_float, x), \
lambda x : codecs.dec_array(codecs.dec_float, 4, x), \
lambda x : 4 * len(x)))
self._add(FieldType(types.DOUBLEARRAY_TYPE_ID, None, True, 0, \
lambda x : codecs.enc_array(codecs.enc_double, x), \
lambda x : codecs.dec_array(codecs.dec_double, 8, x), \
lambda x : 8 * len(x)))
self._add(FieldType(types.STRING_TYPE_ID, unicode, True, 0, \
codecs.enc_unicode, codecs.dec_unicode, types.size_unicode))
# For FUDGEMSG, we shortcut the call to enc, dec
self._add(FieldType(types.FUDGEMSG_TYPE_ID, \
'fudgemsg.message.Message', True, 0, \
None, None, \
calc_size = lambda x, taxonomy : x.size(taxonomy=taxonomy)))
self._add(FieldType(types.BYTEARRAY4_TYPE_ID, str, False, 4, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY8_TYPE_ID, str, False, 8, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY16_TYPE_ID, str, False, 16, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY20_TYPE_ID, str, False, 20, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY32_TYPE_ID, str, False, 32, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY64_TYPE_ID, str, False, 64, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY128_TYPE_ID, str, False, 128, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY256_TYPE_ID, str, False, 256, \
codecs.enc_str, codecs.dec_str))
self._add(FieldType(types.BYTEARRAY512_TYPE_ID, str, False, 512, \
codecs.enc_str, codecs.dec_str))
self._narrower_fns = {
types.BYTE_TYPE_ID: self._narrow_int,
types.SHORT_TYPE_ID: self._narrow_int,
types.INT_TYPE_ID: self._narrow_int,
types.LONG_TYPE_ID: self._narrow_int,
types.BYTEARRAY_TYPE_ID: self._narrow_str,
types.BYTEARRAY4_TYPE_ID: self._narrow_str,
types.BYTEARRAY8_TYPE_ID: self._narrow_str,
types.BYTEARRAY16_TYPE_ID: self._narrow_str,
types.BYTEARRAY20_TYPE_ID: self._narrow_str,
types.BYTEARRAY32_TYPE_ID: self._narrow_str,
types.BYTEARRAY64_TYPE_ID: self._narrow_str,
types.BYTEARRAY128_TYPE_ID: self._narrow_str,
types.BYTEARRAY256_TYPE_ID: self._narrow_str,
types.BYTEARRAY512_TYPE_ID: self._narrow_str,
}
def __getitem__(self, key):
return self.types_by_id[key]
def _add(self, field_type):
self.types_by_id[field_type.type_id] = field_type
if field_type.classname:
self.types_by_class[field_type.classname] = field_type
def type_by_id(self, type_id):
"""Given a type_id return the Fudge FieldType which
it represents.
Arguments:
type_id: the Fudge Type ID
Return:
The FieldType object for the Type Id
Raise:
UnknownTypeError: if we can't find a suitable class in the registry"""
try:
return self.types_by_id[type_id]
except KeyError:
raise UnknownTypeError("Did not recognize ID : %s"%type_id)
def type_by_class(self, value, classname=None):
"""Given a value and an optional class return the Fudge FieldType which
can hold it.
Arguments:
value: the object to find a class for
classname: The name of class we wish to map to. (default: None)
Return:
A FieldType which can hold the object
Raise:
UnknownTypeError if we can't find a suitable class in the registry"""
if not classname:
classname = fullname(value.__class__)
try:
return self.types_by_class[classname]
except KeyError:
raise UnknownTypeError("No type mapping for class : %s"%classname)
def narrow(self, type_, value):
"""Narrow a type if the value can fit into a smaller type."""
if type_.type_id not in self._narrower_fns:
return type_
return self._narrower_fns[type_.type_id](value)
def _narrow_int(self, value):
if value >= utils.MIN_BYTE and value <= utils.MAX_BYTE:
return self[types.BYTE_TYPE_ID]
elif value >= utils.MIN_SHORT and value <= utils.MAX_SHORT:
return self[types.SHORT_TYPE_ID]
elif value >= utils.MIN_INT and value <= utils.MAX_INT:
return self[types.INT_TYPE_ID]
else:
return self[types.LONG_TYPE_ID]
def _narrow_str(self, value):
fixed_bytelen = {
4: self[types.BYTEARRAY4_TYPE_ID],
8: self[types.BYTEARRAY8_TYPE_ID],
16: self[types.BYTEARRAY16_TYPE_ID],
20: self[types.BYTEARRAY20_TYPE_ID],
32: self[types.BYTEARRAY32_TYPE_ID],
64: self[types.BYTEARRAY64_TYPE_ID],
128: self[types.BYTEARRAY128_TYPE_ID],
256: self[types.BYTEARRAY256_TYPE_ID],
512: self[types.BYTEARRAY512_TYPE_ID],
}
array_len = len(value)
if array_len in fixed_bytelen:
return fixed_bytelen[array_len]
return self[types.BYTEARRAY_TYPE_ID]
DEFAULT_REGISTRY = Registry()
|
UTF-8
|
Python
| false | false | 2,014 |
8,813,272,900,437 |
7e49175b84fef4a4744044160b6de81506466601
|
b713679c4fdec6791ca98dd76083e0e3dbb6abc8
|
/drawToScreen.py
|
697858db603165859acd593e5649c34e0aee2da7
|
[] |
no_license
|
anasazi/Swarm-AI---Zombies
|
https://github.com/anasazi/Swarm-AI---Zombies
|
4a6d542102e30ec4ae0222370f6a3960d70dc389
|
5582a2e4b1359808761a41fdb5d6e2c2c284a1d6
|
refs/heads/master
| 2020-05-16T21:59:57.122368 | 2011-02-20T18:59:27 | 2011-02-20T18:59:27 | 1,309,922 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame, sys, time
from pygame.locals import *
from random import randint, choice, random, sample
from vector import *
#from movingAgent import *
from agent import *
from math import sin, cos, pi
import threading
class Box:
def __init__(self, thing):
self.thing = thing
def alter(self, thing):
self.thing = thing
def peek(self):
return self.thing
class UpdateThread(threading.Thread):
def __init__(self, agent, everyone, box):
threading.Thread.__init__(self)
self.agent = agent
self.everyone = everyone
self.box = box
def run(self):
self.box.alter(self.agent.getTotalForce(self.everyone))
# set up pygame
pygame.init()
#are we rendering to a file?
RENDERTOFILE = 1
FILENAME = 'Outbreak\\'
# set up the window
WINDOWWIDTH = 800
WINDOWHEIGHT = 800
windowSurface = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT), 0, 32)
pygame.display.set_caption('Animation')
# set up direction variables
PIover2 = pi/2
# set up the colors
BLACK = (0, 0, 0)
RED = (170, 100, 100)
GREEN = (15, 155, 15)
GRAY = (100, 100, 100)
BLUE = (0, 128, 255)
# set up the block data structure
agents = []
agentDots = []
tMAX = 3000
t1 = time.time()
##x = HumanAgent(5, Vector(170,150), Vector(1,1), 0.5, 20, 180, 1, False, 35.0, 0, 5, 10)
##agents += [x]
##agentDots += [{'x':180,'y':150,'rad':3,'color':GRAY}]
##
##x = ZombieAgent(5, Vector(220,190), Vector(1,0), 1, 20, 180, 1, 25, 5, 10)
##agents += [x]
##agentDots += [{'x':190,'y':150,'rad':3,'color':GREEN}]
for i in range(250):
massi = 5
positioni = Vector(randint(10, 790), randint(10, 790))
speedi = 2
max_forcei = 1
max_speedi = 0.9
orientationi = Vector(random()-.5,random()-.5)
sightAngle = 80
sightRadius = 120
tempAgent = HumanAgent(massi, positioni, orientationi, speedi, sightRadius, sightAngle, max_speedi, False, 35.0, 0, 5, 10)
agents.append(tempAgent)
tempBall = {'x':positioni.x,'y':positioni.y,'rad':3, 'color':GRAY}
agentDots.append(tempBall)
for i in range(25):
massi = 5
positioni = Vector(randint(10, 790), randint(10, 790))
speedi = .5
max_forcei = 1
max_speedi = 1.6
orientationi = Vector(random()-.5,random()-.5)
sightAngle = 90
sightRadius = 120
tempAgent = ZombieAgent(massi, positioni, orientationi, speedi, sightRadius, sightAngle, max_speedi, 25, 12, 10)
agents.append(tempAgent)
tempBall = {'x':positioni.x,'y':positioni.y,'rad':3, 'color':GREEN}
agentDots.append(tempBall)
# initialize the building types
def draw_wall(nums_list, x, y):
nums_list = nums_list[0]
for nums in nums_list:
l = Vector(nums[0]+x, nums[1]+y)
r = Vector(nums[2]+x, nums[3]+y)
n = (r - l).normal()
n = n / n.magnitude()
wall = WallAgent(l, r, n)
agents.append(wall)
##agents += [WallAgent(Vector(160,140), Vector(160,180), Vector(1,0))]
##agents += [WallAgent(Vector(230,140), Vector(230,200), Vector(-1,0))]
##agents += [WallAgent(Vector(160,140), Vector(230,140), Vector(0,1))]
##agents += [WallAgent(Vector(160,200), Vector(230,200), Vector(0,-1))]
w1 = [(50, 50, 50,100), (50, 100, 100, 100), (50, 50, 100, 50), (100, 50, 100, 80)]
w2 = [(50, 50, 50,100), (50, 100, 80, 100), (50, 50, 100, 50), (100, 50, 100, 100)]
w3 = [(100, 0, 100, 100)]
w4 = [(0, 100, 100, 100)]
w5 = [(0, 0, 0, 100)]
w6 = [(0, 0, 100, 0)]
w7 = [(25, 25, 25, 40), (25, 40, 40, 40), (25, 25, 40, 25), (85, 65, 65, 85)]
w8 = [(10, 10, 75, 10), (10, 10, 10, 45), (10, 45, 45, 45), (45, 45, 45, 65), (75, 10, 75, 65), (65, 65, 75, 65)]
w9 = [(3, 3, 15, 3), (35, 3, 85, 3), (3, 3, 3, 25), (3, 25, 55, 25), (55, 25, 55, 85), (55, 85, 60, 95), (85, 3, 85, 95),(85, 95, 75, 95)]
w10 = [(25, 10, 25, 45), (25, 45, 5, 45), (5, 45, 5, 75), (5, 75, 35, 75), (65, 75, 85, 75), (85, 75, 85, 45), (85, 45, 45, 45), (45, 45, 45, 10)]
building_list = [w1, w2, w3, w4, w5, w6, w7, w8, w9, w10]
for x in range(25, 725, 100):
for y in range(25, 725, 100):
draw_wall(sample(building_list, 1), x, y)
#draw outer edges
l = Vector(0,0)
r = Vector(800,0)
n = (r - l).normal()
n = n / n.magnitude()
wall = WallAgent(l, r, n)
agents.append(wall)
l = Vector(800,0)
r = Vector(799,799)
n = (r - l).normal()
n = n / n.magnitude()
wall = WallAgent(l, r, n)
agents.append(wall)
l = Vector(0,800)
r = Vector(800,800)
n = (r - l).normal()
n = n / n.magnitude()
wall = WallAgent(l, r, n)
agents.append(wall)
l = Vector(0,0)
r = Vector(0,800)
n = (r - l).normal()
n = n / n.magnitude()
wall = WallAgent(l, r, n)
agents.append(wall)
for c in range(4):
pos = Vector(random()*750+25,random()*750+25)
guns = random()*3
cache = GunCacheAgent(pos, guns)
agents.append(cache)
# run the game loop
t = 0
while t < tMAX:
# check for the QUIT event
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
# draw the black background onto the surface
windowSurface.fill(BLACK)
threads = [ UpdateThread(x, agents, Box(0.0)) for x in agents ]
boxes = [ t.box for t in threads ]
[ t.start() for t in threads ]
[ t.join() for t in threads ]
for i in range(len(agents)):
agents[i].update(boxes[i].peek())
if(agents[i].isHuman() and agents[i].incubating):
b = agentDots[i]
b['color'] = (15, round((100-agents[i].health)/100 * 155), 15)
if(agents[i].isWall()):
pygame.draw.line(windowSurface, (220, 220, 220), agents[i].left_point.vec2tuple(), agents[i].right_point.vec2tuple(), 2)
if(agents[i].isGunCache()):
pygame.draw.rect(windowSurface, BLUE, (agents[i].left_point.x, agents[i].left_point.y, 5+agents[i].guns, 5+agents[i].guns ), 0)
if(agents[i].isHuman() and agents[i].has_gun and not agents[i].incubating):
a = agentDots[i]
a['color'] = RED
if(agents[i].isHuman() and agents[i].has_gun and agents[i].firing):
pygame.draw.line(windowSurface, (240, 240, 240), agents[i].position.vec2tuple(), agents[i].firing_target.vec2tuple())
agents[i].firing = 0
agentLimit = len(agentDots)
i = 0
while i < agentLimit:
if(agents[i].position.x < 0):
agents[i].position.x = WINDOWWIDTH
elif (agents[i].position.x > WINDOWWIDTH):
agents[i].position.x = 0
if(agents[i].position.y < 0):
agents[i].position.y = WINDOWHEIGHT
elif (agents[i].position.y > WINDOWHEIGHT):
agents[i].position.y = 0
b = agentDots[i]
b['x'] = agents[i].position.x
b['y'] = agents[i].position.y
if (agents[i].isHuman() and agents[i].health <= 0):
print('t:' + str(t) + ' ', agents[i], 'has died and risen as a zombie!')
agents = agents[:i] + [ZombieAgent(agents[i].mass, agents[i].position, agents[i].orientation, 0.00001, 120, 160, 2, 25, 15, 10)] + agents[i+1:]
agentDots = agentDots[:i] + [{'x':positioni.x,'y':positioni.y,'rad':3, 'color':GREEN}] + agentDots[i+1:]
elif (agents[i].isZombie() and agents[i].health <= 0):
print('t:' + str(t) + ' ', 'The survivors slew ', agents[i])
agents = agents[:i] + agents[i+1:]
agentDots = agentDots[:i] + agentDots[i+1:]
i -= 1
agentLimit -= 1
else:
pygame.draw.line(windowSurface, GRAY, (b['x'], b['y']),(b['x']+agents[i].orientation.x*6,b['y']+agents[i].orientation.y*6))
pygame.draw.circle(windowSurface, b['color'], (round(b['x']), round(b['y'])), b['rad'])
i += 1
# draw the window onto the screen
if(RENDERTOFILE):
pygame.image.save(windowSurface, FILENAME + str(t)+'.png')
else:
pygame.display.update()
time.sleep(0.02)
t += 1
pygame.quit()
t2 = time.time()
print('It took ' + str((t2-t1)) + ' per frame: ' + str((t2-t1)/tMAX))
sys.exit()
|
UTF-8
|
Python
| false | false | 2,011 |
14,611,478,755,530 |
e67ca28caab18e5cbacd6eb49ec90c171d4531f1
|
b51d9f725c349c71d5ef6678d615d346b105272b
|
/src/dsite/urls.py
|
8e6db79d7263f2a1f0fbbfe66ec7a6ba05f0d704
|
[] |
no_license
|
lpe234/dannysite.com
|
https://github.com/lpe234/dannysite.com
|
9557a5ed19da223057d6617153388526398618b1
|
2edae21fcfaec8625422007cc9ef4e1471a4ea54
|
refs/heads/master
| 2021-01-17T05:54:39.802076 | 2014-04-07T04:36:33 | 2014-04-07T04:36:33 | 22,376,149 | 1 | 0 | null | true | 2019-01-08T09:41:41 | 2014-07-29T10:24:22 | 2014-07-08T11:11:10 | 2019-01-08T09:41:31 | 3,655 | 0 | 0 | 1 | null | false | null |
# -*-coding:utf-8 -*-
'''
Created on 2013-10-30
@author: Danny<[email protected]>
DannyWork Project
'''
|
UTF-8
|
Python
| false | false | 2,014 |
14,087,492,779,967 |
fd90a9ac27d42a65a4476a42b8d77f2f4fdb73a6
|
75d3389e2298ab905658f434e43231c20689a94a
|
/twitter/decode_address.py
|
d827b48abd3d42049337e79cdc712b696d8a9128
|
[] |
no_license
|
matheusldaraujo/cse491-webz
|
https://github.com/matheusldaraujo/cse491-webz
|
955f26776c6f77982231e262b5d498004cb4542d
|
16aa5f9b9356adb9792fbd678a2e2b2a2f3eaad9
|
refs/heads/master
| 2020-12-11T05:25:25.404741 | 2013-04-18T17:58:05 | 2013-04-18T17:58:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urllib, urllib2, StringIO, json
def decodeAddressToCoordinates(address):
urlParams = dict(address=address, sensor='false')
url = 'http://maps.google.com/maps/api/geocode/json?' + \
urllib.urlencode(urlParams)
response = urllib2.urlopen(url)
responseBody = response.read()
print 'GOT response:', responseBody
body = StringIO.StringIO(responseBody)
result = json.load(body)
if 'status' not in result or result['status'] != 'OK':
return None
else:
return {
'lat': result['results'][0]['geometry']['location']['lat'],
'lng': result['results'][0]['geometry']['location']['lng']
}
if __name__ == '__main__':
address = 'Constitution Ave NW & 10th St NW, Washington, DC'
print decodeAddressToCoordinates(address)
|
UTF-8
|
Python
| false | false | 2,013 |
11,562,051,979,674 |
723c30c4d58598037b3c98b1a7ef1c876febee80
|
1a9c0b727a1306aa3ca9cee2182832a9cde96fff
|
/platform/src/pulp/server/managers/consumer/agent.py
|
97db146306b23f6246337e543f8adf5efbfd35fb
|
[
"GPL-2.0-only"
] |
non_permissive
|
ryanschneider/pulp
|
https://github.com/ryanschneider/pulp
|
ca38b8bde7c1372284eb8fe41f5c46e3bef5deed
|
6433d0fa2caf18e9e53bc233f991a2027cca8341
|
refs/heads/master
| 2021-01-23T20:13:12.596514 | 2012-10-30T21:12:06 | 2012-10-30T21:12:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2012 Red Hat, Inc.
#
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
"""
Contains agent management classes
"""
from logging import getLogger
import sys
from pulp.server.managers import factory as managers
from pulp.plugins.loader import api as plugin_api
from pulp.plugins.loader import exceptions as plugin_exceptions
from pulp.plugins.profiler import Profiler, InvalidUnitsRequested
from pulp.plugins.conduits.profiler import ProfilerConduit
from pulp.plugins.model import Consumer as ProfiledConsumer
from pulp.server.exceptions import PulpExecutionException, PulpDataException
from pulp.server.agent import PulpAgent
_LOG = getLogger(__name__)
class AgentManager(object):
"""
The agent manager.
"""
def unregistered(self, id):
"""
Notification that a consumer (agent) has
been unregistered. This ensure that all registration
artifacts have been cleaned up.
@param id: The consumer ID.
@type id: str
"""
manager = managers.consumer_manager()
consumer = manager.get_consumer(id)
agent = PulpAgent(consumer)
agent.consumer.unregistered()
def bind(self, id, repo_id):
"""
Apply a bind to the agent.
@param repo_id: A repository ID.
@type repo_id: str
"""
manager = managers.consumer_manager()
consumer = manager.get_consumer(id)
agent = PulpAgent(consumer)
agent.consumer.bind(repo_id)
def unbind(self, id, repo_id):
"""
Apply a unbind to the agent.
@param repo_id: A repository ID.
@type repo_id: str
"""
manager = managers.consumer_manager()
consumer = manager.get_consumer(id)
agent = PulpAgent(consumer)
agent.consumer.unbind(repo_id)
def install_content(self, id, units, options):
"""
Install content units on a consumer.
@param id: The consumer ID.
@type id: str
@param units: A list of content units to be installed.
@type units: list of:
{ type_id:<str>, unit_key:<dict> }
@param options: Install options; based on unit type.
@type options: dict
"""
manager = managers.consumer_manager()
consumer = manager.get_consumer(id)
conduit = ProfilerConduit()
collated = Units(units)
for typeid, units in collated.items():
pc = self.__profiled_consumer(id)
profiler, cfg = self.__profiler(typeid)
units = self.__invoke_plugin(profiler.install_units, pc, units, options, cfg, conduit)
collated[typeid] = units
units = collated.join()
agent = PulpAgent(consumer)
agent.content.install(units, options)
def update_content(self, id, units, options):
"""
Update content units on a consumer.
@param id: The consumer ID.
@type id: str
@param units: A list of content units to be updated.
@type units: list of:
{ type_id:<str>, unit_key:<dict> }
@param options: Update options; based on unit type.
@type options: dict
"""
manager = managers.consumer_manager()
consumer = manager.get_consumer(id)
conduit = ProfilerConduit()
collated = Units(units)
for typeid, units in collated.items():
pc = self.__profiled_consumer(id)
profiler, cfg = self.__profiler(typeid)
units = self.__invoke_plugin(profiler.update_units, pc, units, options, cfg, conduit)
collated[typeid] = units
units = collated.join()
agent = PulpAgent(consumer)
agent.content.update(units, options)
def uninstall_content(self, id, units, options):
"""
Uninstall content units on a consumer.
@param id: The consumer ID.
@type id: str
@param units: A list of content units to be uninstalled.
@type units: list of:
{ type_id:<str>, type_id:<dict> }
@param options: Uninstall options; based on unit type.
@type options: dict
"""
manager = managers.consumer_manager()
consumer = manager.get_consumer(id)
conduit = ProfilerConduit()
collated = Units(units)
for typeid, units in collated.items():
pc = self.__profiled_consumer(id)
profiler, cfg = self.__profiler(typeid)
units = self.__invoke_plugin(profiler.uninstall_units, pc, units, options, cfg, conduit)
collated[typeid] = units
units = collated.join()
agent = PulpAgent(consumer)
agent.content.uninstall(units, options)
def send_profile(self, id):
"""
Send the content profile(s).
@param id: The consumer ID.
@type id: str
"""
_LOG.info(id)
def __invoke_plugin(self, call, *args, **kwargs):
try:
return call(*args, **kwargs)
except InvalidUnitsRequested, e:
raise PulpDataException(e.units, e.message)
except Exception:
raise PulpExecutionException(), None, sys.exc_info()[2]
def __profiler(self, typeid):
"""
Find the profiler.
Returns the Profiler base class when not matched.
@param typeid: The content type ID.
@type typeid: str
@return: (profiler, cfg)
@rtype: tuple
"""
try:
plugin, cfg = plugin_api.get_profiler_by_type(typeid)
except plugin_exceptions.PluginNotFound:
plugin = Profiler()
cfg = {}
return plugin, cfg
def __profiled_consumer(self, id):
"""
Get a profiler consumer model object.
@param id: A consumer ID.
@type id: str
@return: A populated profiler consumer model object.
@rtype: L{ProfiledConsumer}
"""
profiles = {}
manager = managers.consumer_profile_manager()
for p in manager.get_profiles(id):
typeid = p['content_type']
profile = p['profile']
profiles[typeid] = profile
return ProfiledConsumer(id, profiles)
class Units(dict):
"""
Collated content units
"""
def __init__(self, units):
"""
Unit is: {type_id:<str>, unit_key:<dict>}
@param units: A list of content units.
@type units: list
"""
for unit in units:
typeid = unit['type_id']
lst = self.get(typeid)
if lst is None:
lst = []
self[typeid] = lst
lst.append(unit)
def join(self):
"""
Flat (uncollated) list of units.
@return: A list of units.
@rtype: list
"""
return [j for i in self.values() for j in i]
|
UTF-8
|
Python
| false | false | 2,012 |
5,214,090,313,008 |
409ac4e5d84533dc16b59a40812e6882c7660e5d
|
abf35b3d2568c5627fec4fde69c8f3d07cdaf7ee
|
/mysocietyorg/moin/lib/python2.4/site-packages/MoinMoin/i18n/pl.py
|
6ec12785515f54bc55e5741e8fa5d42882573fd6
|
[] |
no_license
|
MyfanwyNixon/orgsites
|
https://github.com/MyfanwyNixon/orgsites
|
2d370a1b314ef4322ac5856c27f37001bf29e70d
|
a2c30c3b742c65fb2c5bfbab1267d643823882a5
|
refs/heads/master
| 2021-01-18T11:17:43.591087 | 2013-07-04T15:43:34 | 2013-07-04T15:43:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Text translations for Polski (pl).
# Automatically generated - DO NOT EDIT, edit pl.po instead!
meta = {
'language': 'Polski',
'elanguage': 'Polish',
'maintainer': 'S.P. <[email protected]>',
'encoding': 'utf-8',
'direction': 'ltr',
'wikimarkup': True,
}
text = {
'''The backupped content of this page is deprecated and will not be included in search results!''':
'''Kopia zapasowa zawartości tej strony jest nieważna i nie będzie włączona do rezultatów przeszukiwania''',
'''Revision %(rev)d as of %(date)s''':
'''Wersja %(rev)d z %(date)s''',
'''Redirected from page "%(page)s"''':
'''Przekazano ze strony "%(page)s"''',
'''This page redirects to page "%(page)s"''':
'''Ta strona przekazuje na stronę "%(page)s"''',
'''~-If you submit this form, the submitted values will be displayed.
To use this form on other pages, insert a
[[BR]][[BR]]\'\'\'{{{ [[Form("%(pagename)s")]]}}}\'\'\'[[BR]][[BR]]
macro call.-~
''':
'''~-Jeżeli zaakceptujesz dane z formularza, wpisane wartości zostaną wyświetlone.
Aby użyć tego formularza na innych stronach, wprowadź
[[BR]][[BR]]\'\'\'{{{ [[Form("%(pagename)s")]]}}}\'\'\'[[BR]][[BR]]
makropolecenie.-~
''',
'''You are not allowed to view this page.''':
'''Nie możesz oglądać tej strony.''',
'''(cached %s)''':
'''(w pamięci podręcznej %s)''',
'''You are not allowed to edit this page.''':
'''Nie możesz zmieniać zawartości tej strony.''',
'''Page is immutable!''':
'''Strona nie została zmieniona!''',
'''Cannot edit old revisions!''':
'''Nie można zmieniać starych wersji!''',
'''The lock you held timed out, be prepared for editing conflicts!''':
'''Blokada tej strony wygasła, bądź przygotowany na ewentualne konflikty!''',
'''Edit "%(pagename)s"''':
'''Modyfikuj "%(pagename)s"''',
'''Preview of "%(pagename)s"''':
'''Podgląd "%(pagename)s"''',
'''Your edit lock on %(lock_page)s has expired!''':
'''Blokada strony %(lock_page)s wygasła!''',
'''Your edit lock on %(lock_page)s will expire in # minutes.''':
'''Blokada strony %(lock_page)s wygaśnie w ciągu # minut.''',
'''Your edit lock on %(lock_page)s will expire in # seconds.''':
'''Blokada strony %(lock_page)s wygaśnie w ciągu # sekund.''',
'''Someone else deleted this page while you were editing!''':
'''Ktoś inny skasował tę stronę w czasie, kiedy ją modyfikowałeś!''',
'''Someone else changed this page while you were editing!''':
'''Ktoś inny zmienił zawartość tej strony w czasie, kiedy ją modyfikowałeś!''',
'''Someone else saved this page while you were editing!
Please review the page and save then. Do not save this page as it is!
Have a look at the diff of %(difflink)s to see what has been changed.''':
'''Ktoś inny zapisał tę stronę kiedy ją edytowano!
Przejrzyj tę stronę i wtedy zapisz. Nie zapisuj jej takiej jaka jest!
Obejrzyj %(difflink)s aby stwierdzić, co zostało zmienione.''',
'''[Content of new page loaded from %s]''':
'''[Zawartość nowej strony zaczerpnięta z %s]''',
'''[Template %s not found]''':
'''[Szablon %s nie znaleziony]''',
'''[You may not read %s]''':
'''[Nie można czytać %s]''',
'''Skip to preview''':
'''Pomiń aby zobaczyć podgląd''',
'''[current page size \'\'\'%(size)d\'\'\' bytes]''':
'''[Rozmiar bieżącej strony \'\'\'%(size)d\'\'\' bajtów]''',
'''Describe %s here.''':
'''Opisz %s tutaj.''',
'''Optional comment about this change''':
'''Opcjonalny komentarz dotyczący niniejszej zmiany''',
'''<No addition>''':
'''<Bez dodania>''',
'''Make this page belong to category %(category)s''':
'''Przypisz tę stronę do kategorii %(category)s''',
'''Check Spelling''':
'''Sprawdź pisownię''',
'''Save Changes''':
'''Zachowaj zmiany''',
'''Cancel''':
'''Anuluj''',
'''By hitting \'\'\'%(save_button_text)s\'\'\' you put your changes under the %(license_link)s.
If you don\'t want that, hit \'\'\'%(cancel_button_text)s\'\'\' to cancel your changes.''':
'''Klikając w \'\'\'%(save_button_text)s\'\'\' umieszczasz swoje zmiany pod %(license_link)s.
Jeżeli tego nie chcesz, kliknij \'\'\'%(cancel_button_text)s\'\'\' aby zrezygnować z wprowadzenia zmian.''',
'''Preview''':
'''Podgląd''',
'''Trivial change''':
'''Drobna zmiana''',
'''Remove trailing whitespace from each line''':
'''Usuń puste znaki z przodu każdej linii''',
''' Emphasis:: [[Verbatim(\'\')]]\'\'italics\'\'[[Verbatim(\'\')]]; [[Verbatim(\'\'\')]]\'\'\'bold\'\'\'[[Verbatim(\'\'\')]]; [[Verbatim(\'\'\'\'\')]]\'\'\'\'\'bold italics\'\'\'\'\'[[Verbatim(\'\'\'\'\')]]; [[Verbatim(\'\')]]\'\'mixed \'\'[[Verbatim(\'\'\')]]\'\'\'\'\'bold\'\'\'[[Verbatim(\'\'\')]] and italics\'\'[[Verbatim(\'\')]]; [[Verbatim(----)]] horizontal rule.
Headings:: [[Verbatim(=)]] Title 1 [[Verbatim(=)]]; [[Verbatim(==)]] Title 2 [[Verbatim(==)]]; [[Verbatim(===)]] Title 3 [[Verbatim(===)]]; [[Verbatim(====)]] Title 4 [[Verbatim(====)]]; [[Verbatim(=====)]] Title 5 [[Verbatim(=====)]].
Lists:: space and one of: * bullets; 1., a., A., i., I. numbered items; 1.#n start numbering at n; space alone indents.
Links:: [[Verbatim(JoinCapitalizedWords)]]; [[Verbatim(["brackets and double quotes"])]]; url; [url]; [url label].
Tables:: || cell text |||| cell text spanning 2 columns ||; no trailing white space allowed after tables or titles.''':
''' Wyróżnienie [[Verbatim(\'\')]]\'\'kursywa\'\'[[Verbatim(\'\')]]; [[Verbatim(\'\'\')]]\'\'\'pogrubienie\'\'\'[[Verbatim(\'\'\')]]; [[Verbatim(\'\'\'\'\')]]\'\'\'\'\'pogrubiona kursywas\'\'\'\'\'[[Verbatim(\'\'\'\'\')]]; [[Verbatim(\'\')]]\'\'mix \'\'[[Verbatim(\'\'\')]]\'\'\'\'\'pogrubienie\'\'\'[[Verbatim(\'\'\')]] i kursywa\'\'[[Verbatim(\'\')]]; [[Verbatim(----)]] pozioma linia.
Nagłówki:: [[Verbatim(=)]] Tytuł 1 [[Verbatim(=)]]; [[Verbatim(==)]] Tytuł 2 [[Verbatim(==)]]; [[Verbatim(===)]] Tytuł 3 [[Verbatim(===)]]; [[Verbatim(====)]] Tytuł 4 [[Verbatim(====)]]; [[Verbatim(=====)]] Tytuł 5 [[Verbatim(=====)]].
Listy:: spacja plus jedno z: * znak wypunktowania; 1., a., A., i., I. 1.#n rozpoczyna numerowanie od n; spacja pojedyncze wcięcia.
Odnośniki: [[Verbatim(JoinCapitalizedWords)]]; [[Verbatim(["nawiasy i znaki cudzysłowia"])]]; url; [url]; [url label].
Tabele:: || tekst pola |||| tekst na dwa pola ||; niedopuszczalna jest spacja po tabelach lub tytułach.''',
'''Edit was cancelled.''':
'''Zmiany zostały anulowane''',
'''Dear Wiki user,
You have subscribed to a wiki page or wiki category on "%(sitename)s" for change notification.
The following page has been changed by %(editor)s:
%(pagelink)s
''':
'''Drogi czytelniku wiki,
Zasubskrybowałeś stronę wiki lub kategorię wiki na "%(sitename)s" w celu otrzymywania informacji o zmianach.
Następująca strona została zaktualizowana przez %(editor)s:
%(pagelink)s
''',
'''The comment on the change is:
%(comment)s
''':
'''Komentarz do zmiany:
%(comment)s
''',
'''New page:
''':
'''Nowa strona:
''',
'''No differences found!
''':
'''Nie znaleziono różnic!
''',
'''Trivial ''':
'''Drobna ''',
'''Status of sending notification mails:''':
'''Stan wysyłki powiadomienia pocztą elektroniczną:''',
'''[%(lang)s] %(recipients)s: %(status)s''':
'''[%(lang)s] %(recipients)s: %(status)s''',
'''## backup of page "%(pagename)s" submitted %(date)s''':
'''## kopia zapasowa strony "%(pagename)s" wprowadzona %(date)s''',
'''You are not allowed to edit this page!''':
'''Nie możesz zmieniać zawartości tej strony!''',
'''You cannot save empty pages.''':
'''Nie możesz zapisywać pustcyh stron.''',
'''Sorry, someone else saved the page while you edited it.
Please do the following: Use the back button of your browser, and cut&paste
your changes from there. Then go forward to here, and click EditText again.
Now re-add your changes to the current page contents.
\'\'Do not just replace
the content editbox with your version of the page, because that would
delete the changes of the other person, which is excessively rude!\'\'
''':
'''Przykro mi, ale ktoś inny zachował tę stronę w czasie, kiedy ją edytowano.
Musisz teraz cofnąć się za pomocą przeglądarki, i skopiować do schowka
Twoje zmiany. Potem ponownie rozpocznij edycję strony i wklej zawartość
schowka odpowiednio ją modyfikując wobec zmian uczynionych przez innych.
\'\'Tylko przypadkiem nie zapisuj na siłę swoich zmian, gdyż w ten sposób
możesz zniszczyć pracę innych, co może grozić wzrostem poziomu agresji!\'\'
''',
'''A backup of your changes is [%(backup_url)s here].''':
'''Kopia zapasowa Twoich zmian jest [%(backup_url)s tutaj].''',
'''You did not change the page content, not saved!''':
'''Nie zmieniono zawartości strony, nic nie zapisano!''',
'''You can\'t change ACLs on this page since you have no admin rights on it!''':
'''Nie możesz zmieniać ACL tej strony, ponieważ nie masz praw administratora!''',
'''Thank you for your changes. Your attention to detail is appreciated.''':
'''Dziękujemy za Twe zmiany. Twoja troska poświęcona detalom jest nieoceniona.''',
'''The lock of %(owner)s timed out %(mins_ago)d minute(s) ago, and you were granted the lock for this page.''':
'''Blokada założona przez %(owner)s wygasła %(mins_ago)d minut temu, i już możesz śmiało edytować tę stronę.''',
'''Other users will be \'\'blocked\'\' from editing this page until %(bumptime)s.''':
'''Inni czytelnicy nie mogą zmieniać tej strony do %(bumptime)s.''',
'''Other users will be \'\'warned\'\' until %(bumptime)s that you are editing this page.''':
'''Inni czytelnicy będą ostrzeżeni że edytujesz tę stronę do %(bumptime)s .''',
'''Use the Preview button to extend the locking period.''':
'''Użyj przycisku podglądu aby przedłużyć czas blokady.''',
'''This page is currently \'\'locked\'\' for editing by %(owner)s until %(timestamp)s, i.e. for %(mins_valid)d minute(s).''':
'''Ta strona jest obecnie \'\'zablokowana\'\' w celu edycji przez %(owner)sdo %(timestamp)s, to jest przez %(mins_valid)d minut.''',
'''This page was opened for editing or last previewed at %(timestamp)s by %(owner)s.[[BR]]
\'\'\'You should \'\'refrain from editing\'\' this page for at least another %(mins_valid)d minute(s),
to avoid editing conflicts.\'\'\'[[BR]]
To leave the editor, press the Cancel button.''':
'''Ta strona została otwarta do edycji albo ostatni podglądana %(timestamp)s przez %(owner)s.[[BR]]
\'\'\'Powinno się \'\'powstrzymać od edycji\'\' tej strony przez minimum następne %(mins_valid)d minut,
aby zapobiec konfliktom w edycji.\'\'\'[[BR]]
Aby opuścić edytor, kliknij w przycisk "Anuluj".''',
'''<unknown>''':
'''<nieznany>''',
'''Diffs''':
'''Różnice''',
'''Info''':
'''Info''',
'''Edit''':
'''Edycja''',
'''UnSubscribe''':
'''Zaprzestań subskrybcji''',
'''Subscribe''':
'''Zasubskrybuj''',
'''Raw''':
'''Surowy''',
'''XML''':
'''XML''',
'''Print''':
'''Drukuj''',
'''View''':
'''Zobacz''',
'''Up''':
'''Góra''',
'''Invalid user name {{{\'%s\'}}}.
Name may contain any Unicode alpha numeric character, with optional one
space between words. Group page name is not allowed.''':
'''Nieprawidłowy identyfikator czytelnika {{{\'%s\'}}}.
Identyfikator może zawierać jakikolwiek znak unikodu, z opcjonalnie
jedną spacją pomiędzy słowami. Grupowa nazwa strony jest niedozwolona.''',
'''You are not allowed to do %s on this page.''':
'''Nie możesz wykonać %s na tej stronie.''',
'''Login''':
'''Login''',
''' %s and try again.''':
''' %s i spróbuj ponownie.''',
'''Can\'t work out query''':
'''Nie potrafię zadać pytania''',
'''%(hits)d results out of %(pages)d pages.''':
'''%(hits)d rezultatów z liczby %(pages)d stron.''',
'''%.2f seconds''':
''' %.2f sekund.''',
'''match''':
'''porównanie''',
'''matches''':
'''porówniania''',
'''Open editor on double click''':
'''Otwórz edytor podwójnym kliknięciem''',
'''Remember last page visited''':
'''Zapamiętaj ostatnią odwiedzoną stronę''',
'''Show fancy links''':
'''Pokaż ozdobne odnośniki''',
'''Show question mark for non-existing pagelinks''':
'''Pokaż znaki zapytania dla nieistniejących jeszcze stron''',
'''Show page trail''':
'''Pokaż ślad strony''',
'''Show icon toolbar''':
'''Pokaż pasek narzędziowy''',
'''Show top/bottom links in headings''':
'''Pokaż górne/dolne odnośniki w nagłówkach''',
'''Show fancy diffs''':
'''Pokaż ozdobne różnice''',
'''Add spaces to displayed wiki names''':
'''Dodaj spacje do wyświetlanych nazw wiki''',
'''Remember login information''':
'''Zapamiętaj informacje o czytelniku''',
'''Subscribe to trivial changes''':
'''Zasubksrybuj drobne zmiany''',
'''Disable this account forever''':
'''Wyłącz to konto na zawsze''',
'''Cookie deleted. You are now logged out.''':
'''Ciasteczko skasowane. Wylogowano.''',
'''This wiki is not enabled for mail processing.
Contact the owner of the wiki, who can enable email.''':
'''W tym wiki nie uruchomiono przetwarzania poczty elektronicznej.
Skontaktuj się z właścicielem, który może tę opcję włączyć.''',
'''Please provide a valid email address!''':
'''Podaj prawidłowy adres poczty elektronicznej!''',
'''Found no account matching the given email address \'%(email)s\'!''':
'''Brak odpowiednich kont do podanego adresu \'%(email)s\'!''',
'''Unknown user name: {{{"%s"}}}. Please enter user name and password.''':
'''Nieznana nazwa konta: {{{"%s"}}}. Podaj identyfikator i hasło.''',
'''Missing password. Please enter user name and password.''':
'''Zapomniano o haśle. Podaj identyfikator i hasło.''',
'''Sorry, wrong password.''':
'''Przykro mi, hasło nieprawidłowe.''',
'''Bad relogin URL.''':
'''Niewłaściwy URL logowania.''',
'''Unknown user.''':
'''Nieznany identyfikator czytelnika.''',
'''Empty user name. Please enter a user name.''':
'''Puste pole identyfikatora. Podaj swój identyfikator''',
'''This user name already belongs to somebody else.''':
'''Niniejsza nazwa należy do kogoś innego.''',
'''Passwords don\'t match!''':
'''Hasła się nie zgadzają!''',
'''Please specify a password!''':
'''Podaj hasło!''',
'''Please provide your email address. If you loose your login information, you can get it by email.''':
'''Lepiej podaj swój adres poczty elektronicznej. Obecni jeżeli zapomnisz identyfikatora albo hasła, nie dostaniesz podpowiedzi pocztą.''',
'''This email already belongs to somebody else.''':
'''Ten adres poczty elektronicznej należy do kogoś innego.''',
'''The theme \'%(theme_name)s\' could not be loaded!''':
'''wystrój \'%(theme_name)s\' nie może zostać załadowany!''',
'''User preferences saved!''':
'''Ustawienia użytkownika zapisane!''',
'''Default''':
'''Domyślny''',
'''<Browser setting>''':
'''<Ustawienia przeglądarki>''',
'''Save''':
'''Zachowaj''',
'''Logout''':
'''Wyjdź''',
'''Create Profile''':
'''Utwórz profil''',
'''Mail me my account data''':
'''Prześlij do mnie moje dane''',
'''Name''':
'''Nazwa''',
'''(Use FirstnameLastname)''':
'''(Użyj postaci ImięNazwisko''',
'''Password''':
'''Hasło''',
'''Password repeat''':
'''Powtórz hasło''',
'''(Only when changing passwords)''':
'''(Tylko podczas zmiany hasła)''',
'''Email''':
'''Email''',
'''Preferred theme''':
'''Ulubiony wystrój''',
'''User CSS URL''':
'''Podaj odnośnik do pliku CSS''',
'''(Leave it empty for disabling user CSS)''':
'''(pozostaw pole puste, aby wyłączyć CSS użytkownika)''',
'''Editor size''':
'''Rozmiar edytora''',
'''Time zone''':
'''Strefa czasowa''',
'''Your time is''':
'''U Ciebie jest teraz''',
'''Server time is''':
'''Na serwerze jest teraz''',
'''Date format''':
'''Format daty''',
'''Preferred language''':
'''Ulubiony język''',
'''General options''':
'''Głowne opcje''',
'''Quick links''':
'''Szybkie skróty''',
'''This list does not work, unless you have entered a valid email address!''':
'''Ta lista nie działa, dopóki nie podasz prawidłowego adresu poczty elektronicznej!''',
'''Subscribed wiki pages (one regex per line)''':
'''Zasubskrybowane strony wiki (jedno wyrażenie regularne w linii)''',
'''No older revisions available!''':
'''Nie ma starszych wersji!''',
'''Diff for "%s"''':
'''Różnice "%s"''',
'''Differences between revisions %d and %d''':
'''Różnice pomiędzy wersjami %d i %d''',
'''(spanning %d versions)''':
'''(Spinanie %d wersji)''',
'''No differences found!''':
'''Nie znaleziono różnic!''',
'''The page was saved %(count)d times, though!''':
'''Strona została zachowana %(count)d razy, wszelako!''',
'''Ignore changes in the amount of whitespace''':
'''Ignoruj zmiany w ilości spacji''',
'''General Information''':
'''Podstawowe informacje''',
'''Page size: %d''':
'''Rozmiar strony: %d''',
'''SHA digest of this page\'s content is:''':
'''Odcisk SHA niniejszej strony:''',
'''The following users subscribed to this page:''':
'''Następujący czytelnicy zasubskrybowali niniejszą stronę:''',
'''This page links to the following pages:''':
'''Niniejsza strona zawiera odnośniki do następujących stron:''',
'''Date''':
'''Data''',
'''Size''':
'''Rozmiar''',
'''Diff''':
'''Róznice''',
'''Editor''':
'''Edytor''',
'''Comment''':
'''Komentarz''',
'''Action''':
'''Akcja''',
'''view''':
'''Zobacz''',
'''raw''':
'''surowy''',
'''print''':
'''drukuj''',
'''revert''':
'''przywróć''',
'''Revert to revision %(rev)d.''':
'''Przywróć wersję %(rev)d.''',
'''edit''':
'''edytuj''',
'''get''':
'''pobierz''',
'''del''':
'''usuń''',
'''N/A''':
'''N/D''',
'''Revision History''':
'''Histora zmian''',
'''No log entries found.''':
'''Nie znaleziono wpisów do dziennika.''',
'''Info for "%s"''':
'''Informacja o "%s"''',
'''Show "%(title)s"''':
'''Pokaż "%(title)s"''',
'''General Page Infos''':
'''Główne informacje o stronie''',
'''Show chart "%(title)s"''':
'''Pokaż wykres "%(title)s"''',
'''Page hits and edits''':
'''Ilość odsłon i edycji''',
'''You are not allowed to revert this page!''':
'''Nie możesz przywracać tej strony!''',
'''You are not allowed to subscribe to a page you can\'t read.''':
'''Nie możesz subskrybować strony, której nie możesz przeczytać.''',
'''This wiki is not enabled for mail processing. Contact the owner of the wiki, who can either enable email, or remove the "Subscribe" icon.''':
'''W tym wiki nie uruchomiono przetwarzania poczty elektronicznej. Skontaktuj się z właścicielem, aby je uruchomił, albo usuń ikonę "Subskrybuj".''',
'''You didn\'t create a user profile yet. Select UserPreferences in the upper right corner to create a profile.''':
'''Nie utworzyłeś dotychczas profilu czytelnika. Zaznacz PreferencjeCzytelnika w prawym górnym rogu aby go utworzyć.''',
'''You didn\'t enter an email address in your profile. Select your name (UserPreferences) in the upper right corner and enter a valid email address.''':
'''Nie podano adresu poczty elektronicznej w Twoim profilu. Zaznacz (PreferencjeCzytelnika) w prawym górnym rogu i podaj prawidłowy adres.''',
'''Your subscribtion to this page has been removed.''':
'''Subskrybcja tej strony przez Ciebie została usunięta.''',
'''Can\'t remove regular expression subscription!''':
'''Nie można usunąć subskrybcji wyrażenia skończonego!''',
'''To unsubscribe, go to your profile and delete this page from the subscription list.''':
'''Aby zrezygnować z subskrybcji, wejdź do swojego profilu i usuń stronę z listy subskrybcji.''',
'''You have been subscribed to this page.''':
'''Rezygnacja z subskrybcji została przyjęta.''',
'''Required attribute "%(attrname)s" missing''':
'''Niezbędny brakujący atrybut "%(attrname)s"''',
'''Submitted form data:''':
'''Dane przekazane z formularza:''',
'''Search Titles''':
'''Przeszukaj tytuły''',
'''Display context of search results''':
'''Pokaż kontekst rezultatu poszukiwań''',
'''Case-sensitive searching''':
'''Przeszukiwanie z uwzględnieniem wielkości liter''',
'''Search Text''':
'''Szukaj frazy''',
'''Go To Page''':
'''Idź do strony''',
'''Include system pages''':
'''Uwzględnij strony systemowe''',
'''Exclude system pages''':
'''Pomiń strony systemowe''',
'''Plain title index''':
'''Indeks tytułów''',
'''XML title index''':
'''Indeks tytułów XML''',
'''Python Version''':
'''Wersja języka Python''',
'''MoinMoin Version''':
'''Wersja MoinMoin''',
'''Release %s [Revision %s]''':
'''Wydanie %s [Korekta %s]''',
'''4Suite Version''':
'''Wersja 4Suite''',
'''Number of pages''':
'''Ilość stron''',
'''Number of system pages''':
'''Ilość stron systemowych''',
'''Accumulated page sizes''':
'''Zakumulowana wielkość stron''',
'''Entries in edit log''':
'''Pozycji w dzienniku zmian''',
'''%(logcount)s (%(logsize)s bytes)''':
'''%(logcount)s (%(logsize)s bajtów)''',
'''NONE''':
'''BRAK''',
'''Global extension macros''':
'''Globalne rozszerzenia makropoleceń''',
'''Local extension macros''':
'''Lokalne rozszerzenia makropoleceń''',
'''Global extension actions''':
'''Globalne rozsrzerzenia operacji''',
'''Local extension actions''':
'''Lokalne rozszerzenia operacji''',
'''Installed parsers''':
'''Zainstalowane analizatory składni''',
'''Installed processors (DEPRECATED -- use Parsers instead)''':
'''Zainstalowane procesory (NIEAKTUALNE -- użyj analizatorów składni)''',
'''Please use a more selective search term instead of {{{"%s"}}}''':
'''Spróbuj bardziej selektywnego sposobu poszukiwań zamiast {{{"%s"}}}''',
'''ERROR in regex \'%s\'''':
'''Błąd w wyrażeniu \'%s\'''',
'''Bad timestamp \'%s\'''':
'''Nieprawidłowy datownik binarny \'%s\'''',
'''Expected "=" to follow "%(token)s"''':
'''Spodziewane "=" aby podążyć za "%(token)s"''',
'''Expected a value for key "%(token)s"''':
'''Spodziwana wartość dla klucza "%(token)s"''',
'''Wiki Markup''':
'''Język oznaczeń Wiki''',
'''Print View''':
'''Podgląd wydruku''',
'''[%d attachments]''':
'''[%d załączników]''',
'''There are <a href="%(link)s">%(count)s attachment(s)</a> stored for this page.''':
'''Istnieje <a href="%(link)s">%(count)s załącznik(ów)</a> do tej strony.''',
'''Filename of attachment not specified!''':
'''Nie została podana nazwa pliku załącznika!''',
'''Attachment \'%(filename)s\' does not exist!''':
'''Załącznik \'%(filename)s\' nie istnieje!''',
'''To refer to attachments on a page, use \'\'\'{{{attachment:filename}}}\'\'\',
as shown below in the list of files.
Do \'\'\'NOT\'\'\' use the URL of the {{{[get]}}} link,
since this is subject to change and can break easily.''':
'''Aby odnieść się do załącznika na stronie, użyj \'\'\'{{{attachment:filename}}}\'\'\',
jak pokazano poniżej na liście plików.
\'\'\'NIGDY\'\'\' nie używaj URLa {{{[get]}}}, ponieważ odnośnik może się zmienić
i spowodować utratę informacji, którą chcesz przekazać.''',
'''No attachments stored for %(pagename)s''':
'''Strona %(pagename)s nie ma żadnyc załączników''',
'''Edit drawing''':
'''Edytuj rysunek''',
'''Attached Files''':
'''Załączone pliki''',
'''You are not allowed to attach a file to this page.''':
'''Nie możesz załączać plików do niniejszej strony.''',
'''New Attachment''':
'''Nowy załącznik''',
'''An upload will never overwrite an existing file. If there is a name
conflict, you have to rename the file that you want to upload.
Otherwise, if "Rename to" is left blank, the original filename will be used.''':
'''Wgranie pliku do wiki nigdy nie nadpisze istniejącego zbioru. Jeżeli w wiki
istnieje już plik o identycznej nazwie, musisz zmienić nazwę zbioru, który chcesz wgrać.
W przeciwnym wypadku, kiedy pole "Zmień nazwę" jest puste, zostanie użyta nazwa oryginalna.''',
'''File to upload''':
'''Plik do przekazania''',
'''Save as''':
'''Zachowaj jako''',
'''Upload''':
'''Przekaż''',
'''File attachments are not allowed in this wiki!''':
'''Załączniki w postaci plików nie są dozwolne w tym wiki!''',
'''You are not allowed to save a drawing on this page.''':
'''Nie możesz zachowywać rysunków na niniejszej stronie.''',
'''No file content. Delete non ASCII characters from the file name and try again.''':
'''W pliku nie ma nic. Skasuj wszystkie dziwne znaczki i spróbuj ponownie.''',
'''You are not allowed to delete attachments on this page.''':
'''Nie możesz skasować załączników z tej strony.''',
'''You are not allowed to get attachments from this page.''':
'''Nie możesz pobierać załączników z tej strony.''',
'''You are not allowed to view attachments of this page.''':
'''Nie możesz obejrzeć załączników do tej strony.''',
'''Unsupported upload action: %s''':
'''Nieprzewidziany sposób poboru pliku: %s''',
'''Attachments for "%(pagename)s"''':
'''Załączniki do "%(pagename)s"''',
'''Attachment \'%(target)s\' (remote name \'%(filename)s\') already exists.''':
'''Załącznik \'%(target)s\' (nazwa zdalna \'%(filename)s\') już istnieje.''',
'''Attachment \'%(target)s\' (remote name \'%(filename)s\') with %(bytes)d bytes saved.''':
'''Załącznik \'%(target)s\' (nazwa zdalna \'%(filename)s\') o wielkości %(bytes)d bajtów zapisany.''',
'''Attachment \'%(filename)s\' deleted.''':
'''Załącznik \'%(filename)s\' usunięty.''',
'''Attachment \'%(filename)s\'''':
'''Załącznik \'%(filename)s\'''',
'''Unknown file type, cannot display this attachment inline.''':
'''Nieznany rodzaj pliku, nie potrafię go tutaj wyświetlić.''',
'''attachment:%(filename)s of %(pagename)s''':
'''załącznik:%(filename)s z %(pagename)s''',
'''You are not allowed to delete this page.''':
'''Nie możesz kasować niniejszej strony.''',
'''This page is already deleted or was never created!''':
'''Niniejsza strona została już skasowana albo nigdy jej nie było!''',
'''Please use the interactive user interface to delete pages!''':
'''Użyj interkatywnego interfejsu aby usuwać strony!''',
'''Page "%s" was successfully deleted!''':
'''Strona "%s" została pomyślnie skasowana!''',
'''Really delete this page?''':
'''Czy na pewno usunąć niniejszą stronę?''',
'''Delete''':
'''Skasować?''',
'''Optional reason for the deletion''':
'''Opcjonalna przyczyna usunięcia''',
'''No pages like "%s"!''':
'''Brak stron w rodzaju "%s"!''',
'''Exactly one page like "%s" found, redirecting to page.''':
'''Dokładnie jedna strona typu "%s" została znaleziona, przekazuję Cię na tę stronę.''',
'''Pages like "%s"''':
'''Stron w rodzaju "%s"''',
'''%(matchcount)d %(matches)s for "%(title)s"''':
'''%(matchcount)d %(matches)s dla "%(title)s"''',
'''Local Site Map for "%s"''':
'''Mapa "%s"''',
'''You are not allowed to rename pages in this wiki!''':
'''Nie możesz zmieniać nazw stron na tym wiki!''',
'''Please use the interactive user interface to rename pages!''':
'''Użyj interkatywnego interfejsu aby zmieniać nazwy stron!''',
'''Could not rename page because of file system error: %s.''':
'''Nie mogę zmienić nazwy strony ze względu na błąd systemu plików: %s.''',
'''Rename Page''':
'''Zmień nazwę strony''',
'''New name''':
'''Nowa nazwa''',
'''Optional reason for the renaming''':
'''Opcjonalna przyczyna zmiany nazwy''',
'''\'\'\'A page with the name {{{\'%s\'}}} already exists.\'\'\'
Try a different name.''':
'''\'\'\'Strona o nazwie {{{\'%s\'}}} już istnieje.\'\'\'
Spróbuj innej nazwy.''',
'''(including %(localwords)d %(pagelink)s)''':
'''(zawarto %(localwords)d %(pagelink)s)''',
'''The following %(badwords)d words could not be found in the dictionary of %(totalwords)d words%(localwords)s and are highlighted below:''':
'''Następujące %(badwords)d wyrazy nie zostały odnalezione w słowniku z %(totalwords)d words%(localwords)s i zostały podświetlone poniżej:''',
'''Add checked words to dictionary''':
'''Dodaj sprawdzone słowa do słownika''',
'''No spelling errors found!''':
'''Nie znaleziono błędów!''',
'''You can\'t check spelling on a page you can\'t read.''':
'''Nie możesz sprawdzać stron, których nie możesz czytać.''',
'''Title Search: "%s"''':
'''Przeszukiwanie tytułów: "%s"''',
'''Full Text Search: "%s"''':
'''Przeszukiwanie pełnotekstowe: "%s"''',
'''Full Link List for "%s"''':
'''Pełna lista odnośników dla "%s"''',
'''Invalid include arguments "%s"!''':
'''Nieprawidłowe argumenty "%s"!''',
'''Nothing found for "%s"!''':
'''Nic nie znaleziono dla "%s"!''',
'''Unsupported navigation scheme \'%(scheme)s\'!''':
'''Nieobsługiwany schemat nawigacyjny \'%(scheme)s\'!''',
'''No parent page found!''':
'''Nie znaleziono strony nadrzędnej!''',
'''Wiki''':
'''Wiki''',
'''Slideshow''':
'''Pokaz slajdów''',
'''Start''':
'''Start''',
'''Slide %(pos)d of %(size)d''':
'''Slajd %(pos)d z %(size)d''',
'''No orphaned pages in this wiki.''':
'''Nie znaleziono stron - bękartów w tym wiki.''',
'''No quotes on %(pagename)s.''':
'''Brak przytoczeń na %(pagename)s.''',
'''Upload of attachment \'%(filename)s\'.''':
'''Przekazanie załącznika \'%(filename)s\'.''',
'''Drawing \'%(filename)s\' saved.''':
'''Rysunek \'%(filename)s\' zachowany.''',
'''%(hours)dh %(mins)dm ago''':
'''%(hours)dh %(mins)dm temu''',
'''(no bookmark set)''':
'''(brak ustalonych zakładek)''',
'''(currently set to %s)''':
'''(obecnie ustawione na %s)''',
'''Delete Bookmark''':
'''Usuń zakładkę''',
'''Set bookmark''':
'''Ustaw zakładkę''',
'''set bookmark''':
'''ustaw zakładkę''',
'''[Bookmark reached]''':
'''[Zakładka osiągnięta]''',
'''Markup''':
'''Znacznik''',
'''Display''':
'''Pokaż''',
'''Filename''':
'''Nazwa pliku''',
'''You need to provide a chart type!''':
'''Podaj rodzaj wykresu!''',
'''Bad chart type "%s"!''':
'''Nieprawidłowy rodzaj wykresu "%s"!''',
'''Download XML export of this wiki''':
'''Zgraj wersję XML tego wiki''',
'''No wanted pages in this wiki.''':
'''Brak poszukiwanych stron w tym wiki.''',
'''Create new drawing "%(filename)s"''':
'''Utwórz nowy rysunek "%(filename)s"''',
'''Upload new attachment "%(filename)s"''':
'''Wgraj nowy załącznik "%(filename)s"''',
'''Edit drawing %(filename)s''':
'''Edytuj rysunek %(filename)s''',
'''Expected "%(wanted)s" after "%(key)s", got "%(token)s"''':
'''Spodziewane "%(wanted)s" po "%(key)s", otrzymano "%(token)s"''',
'''Expected an integer "%(key)s" before "%(token)s"''':
'''Spodziewana wartość całkowita "%(key)s" przed "%(token)s"''',
'''Expected an integer "%(arg)s" after "%(key)s"''':
'''Spodziewana wartość całkowita "%(arg)s" po "%(key)s"''',
'''Expected a color value "%(arg)s" after "%(key)s"''':
'''Spodziewana wartość koloru "%(arg)s" po "%(key)s"''',
'''XSLT option disabled!''':
'''Opcja XSLT wyłączona!''',
'''XSLT processing is not available!''':
'''Przetwarzanie XSLT niedostępne!''',
'''%(errortype)s processing error''':
'''błąd przetwarzania %(errortype)s''',
'''RefreshCache''':
'''Odśwież pamięć podręczną''',
'''for this page (cached %(date)s)''':
'''dla tej strony (w pamięci podręcznej od %(date)s)''',
'''Charts are not available!''':
'''Wykresy nie są dostępne!''',
'''%(chart_title)s for %(filterpage)s''':
'''%(chart_title)s dla %(filterpage)s''',
'''green=view
red=edit''':
'''zielony=podgląd
czerwony=edycja''',
'''date''':
'''data''',
'''# of hits''':
'''# odwiedzin''',
'''page size upper bound [bytes]''':
'''rozmiar strony [bajtów]''',
'''# of pages of this size''':
'''# stron tego rozmiaru''',
'''Others''':
'''Inni''',
'''Unsubscribe''':
'''Rezygnuj z subskrybcji''',
'''Click to do a full-text search for this title''':
'''kliknij aby poszukać tytułu metodą pełnotekstową''',
'''Clear message''':
'''Usuń wiadomość''',
'''last edited %(time)s by %(editor)s''':
'''ostatnio modyfikowane %(time)s przez %(editor)s''',
'''last modified %(time)s''':
'''ostatnio modyfikowane %(time)s''',
'''Search:''':
'''Szukaj:''',
'''Text''':
'''Tekst''',
'''Titles''':
'''Tytuły''',
'''More Actions:''':
'''Więcej operacji:''',
'''Show Raw Text''':
'''Pokaż tekst źródłowy''',
'''Show Print View''':
'''Pokaż podgląd wydruku''',
'''Attach File''':
'''Załącz plik''',
'''Delete Page''':
'''Usuń stronę''',
'''Show Like Pages''':
'''Pokaż podobne strony''',
'''Show Local Site Map''':
'''Pokaż mapę wiki''',
'''Do''':
'''Wykonaj''',
'''Show Parent''':
'''Pokaż nadrzędne''',
'''Immutable Page''':
'''Strona nie zmieniona''',
'''Show Changes''':
'''Pokaż zmiany''',
'''Get Info''':
'''Pobierz info''',
'''Show %s days.''':
'''Pokaż %s dni.''',
'''EditText''':
'''Edytuj tekst''',
'''Immutable page''':
'''Strona nie zmieniona''',
'''Or try one of these actions:''':
'''Lub spróbuj wykonać: ''',
'''Page''':
'''Strona''',
'''Trail''':
'''Ślad''',
'''User''':
'''Czytelnik''',
'''Sorry, can not save page because "%(content)s" is not allowed in this wiki.''':
'''Przepraszam, strona nie została zachowana ze względu na to, że "%(content)s" nie są dozwolone na tej stronie''',
'''Line''':
'''Linia''',
'''Deletions are marked like this.''':
'''Usunięcia zostały oznaczone w ten sposób.''',
'''Additions are marked like this.''':
'''Dodatki zostały oznaczone w ten sposób.''',
'''Connection to mailserver \'%(server)s\' failed: %(reason)s''':
'''Awaria połączenia z serwerem poczty elektronicznej \'%(server)s\' z powodu: %(reason)s''',
'''Mail sent OK''':
'''Poczta została wysłana''',
'''FrontPage''':
'''StronaGłowna''',
'''RecentChanges''':
'''OstatnieZmiany''',
'''TitleIndex''':
'''IndeksTytułów''',
'''WordIndex''':
'''IndeksSłów''',
'''FindPage''':
'''ZnajdźStronę''',
'''SiteNavigation''':
'''NawigacjaWitryny''',
'''HelpContents''':
'''KontekstPomocy''',
'''HelpOnFormatting''':
'''PomocFormatowania''',
'''UserPreferences''':
'''PreferencjeCzytelnika''',
'''WikiLicense''':
'''LicencjaWiki''',
'''MissingPage''':
'''ZagubionaStrona''',
'''Mon''':
'''pon''',
'''Tue''':
'''wto''',
'''Wed''':
'''śro''',
'''Thu''':
'''czw''',
'''Fri''':
'''pią''',
'''Sat''':
'''sob''',
'''Sun''':
'''nie''',
'''AttachFile''':
'''ZałączPlik''',
'''DeletePage''':
'''UsuńStronę''',
'''LikePages''':
'''PodobneStrony''',
'''LocalSiteMap''':
'''MapaWitryny''',
'''RenamePage''':
'''ZmieńNazwę''',
'''SpellCheck''':
'''SprawdźPisownię''',
}
|
UTF-8
|
Python
| false | false | 2,013 |
7,868,380,098,464 |
a5739a82f51d39128880039ca66d716d85c54620
|
6a32989dbeb4fb631f0930e42dc915a9e566a15d
|
/happening/admin.py
|
b8c514bf51b10d043f24075f7a699e1c1cb06f71
|
[
"AGPL-3.0-or-later",
"AGPL-3.0-only"
] |
non_permissive
|
UFRB/happening
|
https://github.com/UFRB/happening
|
b98ea933ccfc4ad27ca6526006ef077596ba9409
|
36bc72a8c90706bbc5512beac220d1cad71d271e
|
refs/heads/master
| 2021-01-01T18:29:11.675178 | 2014-07-07T14:25:21 | 2014-07-07T14:25:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.contrib.admin import ModelAdmin
from django.contrib import admin
from .models import Event, Discipline, City
class EventAdmin(ModelAdmin):
date_hierarchy = 'starts'
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'venue', 'starts')
list_filter = ('starts', 'city', 'venue', 'discipline')
class DisciplineAdmin(ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class CityAdmin(ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('name', 'state')
admin.site.register(Event, EventAdmin)
admin.site.register(Discipline, DisciplineAdmin)
admin.site.register(City, CityAdmin)
|
UTF-8
|
Python
| false | false | 2,014 |
15,839,839,422,514 |
6316e83b00f0b592bf2d4ebbcc5449e51fa896bb
|
72168cd6fb952626c2e63893f915565ed9c63bbc
|
/todolist/models.py
|
b332b5dc7fdcba31638f5d5e1a346e50b3181d9a
|
[] |
no_license
|
rip-the-jacker/todolist
|
https://github.com/rip-the-jacker/todolist
|
3a2118c1090a3821a9272e6ce7b4091a31507c4e
|
596f0a6f453bae550221cca851075d2fec8ccec2
|
refs/heads/master
| 2016-09-05T18:27:47.514334 | 2013-05-01T08:18:35 | 2013-05-01T08:18:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
class TodoItem(models.Model):
description = models.CharField(max_length=50)
status = models.CharField(max_length=50)
def __unicode__(self):
return self.description
def get_values(self):
values = {}
values['description'] = self.description
values['status'] = self.status
values['id'] = self.id
return values
def set_values(self,values):
self.description = values['description']
self.status = values['status']
self.save()
|
UTF-8
|
Python
| false | false | 2,013 |
16,819,091,955,452 |
933dfecfa2d00bc0d0c9f17a3808f5a8a19031ef
|
acb6416538d3252206ed0227be6e67cdb63be101
|
/gui/main_frame.py
|
24b64431f8820301bac1d5cdd71c2121280c9180
|
[] |
no_license
|
valzevul/summer_project
|
https://github.com/valzevul/summer_project
|
6f76a82da1d3e2c143ee6902b24619efc47ef434
|
f38acf391f4603d2a0e83ec87b0743f0bf0f9cd2
|
refs/heads/master
| 2021-01-10T22:05:07.089714 | 2014-07-07T11:40:00 | 2014-07-07T11:40:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import wx
import wx.html2
news = [("Новость 1", "Текст новости"), ("Новость 2", "Текст новости"), ("Новость 2", "Текст новости")]
html_template = "<h3>%s</h3><p>%s</p>"
html_string = ""
class MyBrowser(wx.Dialog):
def __init__(self, *args, **kwds):
wx.Dialog.__init__(self, *args, **kwds)
sizer = wx.BoxSizer(wx.VERTICAL)
self.browser = wx.html2.WebView.New(self)
sizer.Add(self.browser, 1, wx.EXPAND, 10)
self.SetSizer(sizer)
self.SetSize((400, 500))
if __name__ == '__main__':
app = wx.App()
dialog = MyBrowser(None, -1)
dialog.SetTitle('Читалка новостей')
for pair in news:
html_string += html_template % pair
dialog.browser.SetPage(html_string, "")
dialog.Show()
app.MainLoop()
|
UTF-8
|
Python
| false | false | 2,014 |
17,652,315,589,742 |
5e108f7d24dae60eb386662f997d6d2c1ff9ab9f
|
361f09b51e8b3ef4fbe9ca7270074afad0b98290
|
/prototype/src/c3s/dev/accountancy/__init__.py
|
f4323068eb9de892903c3173a1e0cd21c22e75d8
|
[] |
no_license
|
C3S/c3s.dev
|
https://github.com/C3S/c3s.dev
|
b9c042860e9c064e9f6b4ff61dbede2ed8d9dcc5
|
82bb0bdd8b9d2d126baaad1c54b5a88d197d0bb5
|
refs/heads/master
| 2020-05-19T07:18:10.139540 | 2013-12-30T01:22:41 | 2013-12-30T01:22:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import Fees, \
payout, \
membership, \
invoicing, \
accountants
|
UTF-8
|
Python
| false | false | 2,013 |
8,632,884,274,546 |
efd2571330ee0713fa70f0c159e35ab8978bcf27
|
f1d67cc47db6c0e778276cc595b4a858474beb69
|
/scalrr/lib/python2.7/site-packages/pyfits/tests/test_uint.py
|
025e1322f79ebf60fde728994582c21313adc9d9
|
[] |
no_license
|
esoroush/scalrr
|
https://github.com/esoroush/scalrr
|
7c8c11b5e16d8c0180ca3b55757c97f8c80c04a3
|
4c8209e77721aac2489e6f695e1b4dca8adae1ca
|
refs/heads/master
| 2021-01-19T08:07:47.056766 | 2013-08-20T01:41:44 | 2013-08-20T01:41:44 | 11,591,796 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import division, with_statement
import platform
import numpy as np
import pyfits
from pyfits.tests import PyfitsTestCase
from nose.tools import assert_equal, assert_true
class TestUintFunctions(PyfitsTestCase):
def test_uint16(self):
hdu = pyfits.PrimaryHDU(np.array([-3,-2,-1,0,1,2,3]))
hdu.scale('int16', '', bzero=2**15)
hdu.writeto(self.temp('tempfile.fits'))
hdul = pyfits.open(self.temp('tempfile.fits'), uint=True)
assert_equal(hdul[0].data.dtype, np.uint16)
assert_equal(np.all(hdul[0].data ==
np.array([(2**16)-3, (2**16)-2, (2**16)-1, 0, 1, 2, 3],
dtype=np.uint16)), True)
hdul.writeto(self.temp('tempfile1.fits'))
hdul1 = pyfits.open(self.temp('tempfile1.fits'), uint16=True)
assert_equal(np.all(hdul[0].data == hdul1[0].data), True)
assert_equal(hdul[0].section[:1].dtype.name, 'uint16')
assert_true((hdul[0].section[:1] == hdul[0].data[:1]).all())
hdul.close()
hdul1.close()
def test_uint32(self):
hdu = pyfits.PrimaryHDU(np.array([-3, -2, -1, 0, 1, 2, 3]))
hdu.scale('int32', '', bzero=2**31)
hdu.writeto(self.temp('tempfile.fits'))
hdul = pyfits.open(self.temp('tempfile.fits'), uint=True)
assert_equal(hdul[0].data.dtype, np.uint32)
assert_equal(np.all(hdul[0].data ==
np.array([(2**32)-3, (2**32)-2, (2**32)-1, 0, 1, 2, 3],
dtype=np.uint32)), True)
hdul.writeto(self.temp('tempfile1.fits'))
hdul1 = pyfits.open(self.temp('tempfile1.fits'), uint=True)
assert_equal(np.all(hdul[0].data == hdul1[0].data), True)
assert_equal(hdul[0].section[:1].dtype.name, 'uint32')
assert_true((hdul[0].section[:1] == hdul[0].data[:1]).all())
hdul.close()
hdul1.close()
def test_uint64(self):
if platform.architecture()[0] == '64bit':
hdu = pyfits.PrimaryHDU(np.array([-3,-2,-1,0,1,2,3]))
hdu.scale('int64', '', bzero=2**63)
hdu.writeto(self.temp('tempfile.fits'))
hdul = pyfits.open(self.temp('tempfile.fits'), uint=True)
assert_equal(hdul[0].data.dtype, np.uint64)
assert_equal(np.all(hdul[0].data ==
np.array([(2**64)-3,(2**64)-2,(2**64)-1,0,1,2,3],
dtype=np.uint64)), True)
hdul.writeto(self.temp('tempfile1.fits'))
hdul1 = pyfits.open(self.temp('tempfile1.fits'), uint=True)
assert_equal(np.all(hdul[0].data == hdul1[0].data), True)
assert_equal(hdul[0].section[:1].dtype.name, 'uint64')
assert_true((hdul[0].section[:1] == hdul[0].data[:1]).all())
hdul.close()
hdul1.close()
def test_uint_compressed(self):
hdu = pyfits.CompImageHDU(np.array([-3, -2, -1, 0, 1, 2, 3]))
hdu.scale('int32', '', bzero=2**31)
hdu.writeto(self.temp('temp.fits'))
with pyfits.open(self.temp('temp.fits'), uint=True) as hdul:
assert_equal(hdul[1].data.dtype, np.uint32)
assert_true(
(hdul[1].data ==
np.array([(2**32)-3, (2**32)-2, (2**32)-1, 0, 1, 2, 3],
dtype=np.uint32)).all())
hdul.writeto(self.temp('temp2.fits'))
with pyfits.open(self.temp('temp2.fits'), uint=True) as hdul2:
assert_true((hdul[1].data == hdul2[1].data).all())
# TODO: Enable these lines if CompImageHDUs ever grow .section
# support
#assert_equal(hdul[1].section[:1].dtype.name, 'uint32')
#assert_true((hdul[1].section[:1] == hdul[1].data[:1]).all())
|
UTF-8
|
Python
| false | false | 2,013 |
1,340,029,824,734 |
c8b31e9e6d0327c9894d6f802054303525514fda
|
4d8d5b322023942b17085d4058bceb2ca79b3911
|
/pyspambayes/tokenizer/__init__.py
|
c1d3715403afbccb3d15e9c25c84392bb0a931f4
|
[] |
no_license
|
gryphius/pyspambayes
|
https://github.com/gryphius/pyspambayes
|
6751ef2eac158f374dcfe8221e14537f2981312c
|
fd6a218942113359ee1335b333a3402e656ccc54
|
refs/heads/master
| 2016-09-05T15:36:57.693557 | 2014-09-11T15:27:26 | 2014-09-11T15:27:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'gryphius'
|
UTF-8
|
Python
| false | false | 2,014 |
6,004,364,304,191 |
64adfec2f385481c90e4ad78f3b2ea5787e8d4b5
|
77c2a134f99b92003fde35223039faab223db969
|
/apps/blog/urls.py
|
07694dbe178762aae74297371b978c60c20903d9
|
[] |
no_license
|
jamiecurle/ultrarenderdemo
|
https://github.com/jamiecurle/ultrarenderdemo
|
cde9d549215c5089681882146cdd83b1798e499e
|
cef6b9119da77285ca9a68d81abb4823c6c7afaf
|
refs/heads/master
| 2020-05-20T08:37:23.046000 | 2010-10-23T19:55:13 | 2010-10-23T19:55:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
from models import BlogPost
object_dict={
'queryset' : BlogPost.objects.all(),
}
urlpatterns = patterns('django.views.generic.list_detail',
(r'^$', 'object_list', object_dict),
url(r'^(?P<object_id>\d+)/$', 'object_detail', object_dict, name="blog_view"),
)
|
UTF-8
|
Python
| false | false | 2,010 |
5,583,457,487,435 |
b55c8116561c0fe43667ced20df66a5d7010ce09
|
c209d5eb3a3c19d2fc0990e21a611bcd453b6685
|
/saralyze.py
|
3f6666e6a2626e57174c4f876241d68b5188b057
|
[] |
no_license
|
ddremund/saralyze
|
https://github.com/ddremund/saralyze
|
efdbc5bc2bf5acfe9ebabd1d1173b92d58122397
|
885dc29d93b83f63fb9786687ba288e1c9bd3e5d
|
refs/heads/master
| 2016-09-06T06:22:25.650278 | 2013-11-18T23:19:20 | 2013-11-18T23:19:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python -tt
# Copyright 2013 Derek Remund ([email protected])
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
def process_lines(lines):
firstline = lines.pop(0)
system = firstline.split()[2][1:-1]
date = firstline.split()[3]
cpudata = ''
memdata = ''
loaddata = ''
cpuindex = memindex = loadindex = 0
for index, data in enumerate(lines):
tokens = data.split()
if len(tokens) > 2:
if tokens[2] == 'CPU':
cpuindex = index
if tokens[2] == 'kbmemfree':
memindex = index
if tokens[2] == 'runq-sz':
loadindex = index
print cpuindex,memindex,loadindex
cpudata = process_data(date, lines[cpuindex + 1:])
memdata = process_data(date, lines[memindex + 1:])
loaddata = process_data(date, lines[loadindex + 1:])
return (cpudata, memdata, loaddata, system)
def process_data(date, lines):
data = ''
for line in lines:
print line
tokens = line.split()
print tokens
if len(line) < 2:
continue
if tokens[0] == 'Average:':
break
data = data + date + ',' + tokens[0] + ' ' + tokens[1] + ',' + ','.join(tokens[2:]) + '\n'
return data
def main():
cpudata = 'Date,Time,CPU,%user,%nice,%system,%iowait,%steal,%idle\n'
memdata = 'Date,Time,kbmemfree,kbmemused,%memused,kbbuffers,kbcached,kbswapfree,kbswpused,%swpused,kbswpcad\n'
loaddata = 'Date,Time,runq-sz,plist-sz,ldavg-1,ldavg-5,ldavg-15\n'
directory = os.path.dirname(os.path.realpath(__file__))
for item in os.listdir(directory):
full_path = os.path.join(directory, item)
if os.path.isfile(full_path) and item != __file__:
with open(full_path) as f:
lines = f.read().splitlines()
results = process_lines(lines)
cpudata = cpudata + results[0]
memdata = memdata + results[1]
loaddata = loaddata + results[2]
with open(os.path.join(directory, results[3] + '-cpu.csv'), 'w') as f:
f.write(cpudata)
with open(os.path.join(directory, results[3] + '-mem.csv'), 'w') as f:
f.write(memdata)
with open(os.path.join(directory, results[3] + '-load.csv'), 'w') as f:
f.write(loaddata)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
10,075,993,277,881 |
f22626605a3211c93f28a5d1485da8cfbf984446
|
c0696e33d87dc812670dcfb5f0accb520f33d90f
|
/app/tests/utils/__init__.py
|
eb85a6a4506ac8a73847165200368ce4e640d6ac
|
[] |
no_license
|
gregorynicholas/grgrynch
|
https://github.com/gregorynicholas/grgrynch
|
74d6b0795f742abcad3c94953488c4aec2e59c63
|
3422afbe112d85734352c8824f85e3803e344334
|
refs/heads/master
| 2021-03-27T12:50:17.264650 | 2014-06-03T07:02:37 | 2014-06-03T07:02:37 | 2,913,638 | 4 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
app.tests.utils
~~~~~~~~~~~~~~~
tests for the app.utils.* package.
:copyright: (c) 2014 by gregorynicholas.
:license: MIT, see LICENSE for more details.
"""
from __future__ import unicode_literals
|
UTF-8
|
Python
| false | false | 2,014 |
3,341,484,600,886 |
7d9b945cee3df0978ef5f14d56cf6faa781303be
|
7f506cc468901706a128f25e80574eec060b702c
|
/dynatree/widgets.py
|
1f6b9eb165e945b93db09e68e3326e7bf0c32353
|
[
"BSD-2-Clause"
] |
permissive
|
MikeAmy/django-dynatree
|
https://github.com/MikeAmy/django-dynatree
|
e376c865f0eda8fb3de29509d25b6069bb3d2ad0
|
2ee38bed7315f176159e5c8691e3bcac42d51fe0
|
refs/heads/master
| 2021-01-18T12:18:02.210993 | 2012-10-17T13:08:42 | 2012-10-17T13:08:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from itertools import chain
from django import forms
from django.conf import settings
from django.forms.widgets import SelectMultiple
from django.utils.encoding import force_unicode
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from django.utils import simplejson as json
def get_doc(node, values):
if hasattr(node, "get_doc"):
return node.get_doc(values)
if hasattr(node, "name"):
name = node.name
else:
name = unicode(node)
doc = {"title": name, "key": node.pk, "url": node.url}
if str(node.pk) in values:
doc['select'] = True
doc['expand'] = True
return doc
def get_tree(nodes, values):
parent = {}
parent_level = 0
stack = []
results = []
def find_parent(child, results):
for node in reversed(results):
if child.url.startswith(node["url"]):
if child.parent_id != node["key"]:
return find_parent(child, node["children"])
else:
return node
def add_doc(node):
if node.level == 0:
results.append(get_doc(node, values))
elif node.level >= 1:
parent = find_parent(node, results)
children = parent.get("children", [])
child = get_doc(node, values)
if child.get('select', False):
parent['expand'] = True
children.append(child)
parent["children"] = children
for node in nodes:
add_doc(node)
return results
class DynatreeWidget(SelectMultiple):
def __init__(self, attrs=None, choices=(), queryset=None, select_mode=2):
super(DynatreeWidget, self).__init__(attrs, choices)
self.queryset = queryset
self.select_mode = select_mode
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
has_id = attrs and 'id' in attrs
id = attrs['id']
final_attrs = self.build_attrs(attrs, name=name)
output = []
out = output.append
out('<input type="hidden" name="%(name)s" id="%(id)s" value="%(value)s"/>' % dict(name=name, id=id, value=value))
if has_id:
out(u'<div id="%s_div"></div>' % id)
else:
out(u'<div></div>')
if has_id:
out(u'<ul class="dynatree_checkboxes" id="%s_checkboxes">' % id)
else:
out(u'<ul class="dynatree_checkboxes">')
str_values = set([force_unicode(v) for v in value])
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (id, option_value))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(
u'<li><label%s>%s %s</label></li>' % (label_for, rendered_cb, option_label)
)
output.append(u'</ul>')
output.append(u'<script type="text/javascript">')
if has_id:
output.append(u'var dynatree_data_%s = %s;' % (
id,
json.dumps(get_tree(self.queryset, str_values))
))
output.append(
"""
$(function() {
$("#%(id)s_div").dynatree({
checkbox: true,
selectMode: %(select_mode)d,
children: dynatree_data_%(id)s,
debugLevel: %(debug)d,
onSelect: function(select, node) {
$('#%(id)s_checkboxes').find('input[type=checkbox]').removeAttr('checked');
var selNodes = node.tree.getSelectedNodes();
$('#%(id)s').val("")
var selKeys = $.map(selNodes, function(node){
$('#%(id)s_' + (node.data.key)).attr('checked', 'checked');
return node.data.key;
});
$('#%(id)s').val("["+selKeys.join(", ")+"]")
},
onClick: function(node, event) {
if( node.getEventTargetType(event) == "title" )
node.toggleSelect();
},
onKeydown: function(node, event) {
if( event.which == 32 ) {
node.toggleSelect();
return false;
}
}
});
});
""" % {
'id': id,
'debug': settings.DEBUG and 1 or 0,
'select_mode': self.select_mode,
}
);
output.append(u'</script>')
return mark_safe(u'\n'.join(output))
class Media:
css = {
'all': ('dynatree/skin/ui.dynatree.css',)
}
js = ('dynatree/jquery.dynatree.min.js',)
|
UTF-8
|
Python
| false | false | 2,012 |
6,347,961,664,133 |
4eed6210be87814845680860a6f75dd7b43dfa94
|
153ecce57c94724d2fb16712c216fb15adef0bc4
|
/zc.vault/tags/0.10/src/zc/vault/traversal.py
|
5a53efa896f2de2678bcfac7017d9659e0f253b5
|
[
"ZPL-2.1"
] |
permissive
|
pombredanne/zope
|
https://github.com/pombredanne/zope
|
10572830ba01cbfbad08b4e31451acc9c0653b39
|
c53f5dc4321d5a392ede428ed8d4ecf090aab8d2
|
refs/heads/master
| 2018-03-12T10:53:50.618672 | 2012-11-20T21:47:22 | 2012-11-20T21:47:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import zope.component
import zope.interface
import zope.interface.common.mapping
import zope.location
import zope.proxy
import zope.copypastemove.interfaces
import zope.app.container.interfaces
import zope.app.container.constraints
import zc.copy
import zc.freeze.interfaces
import zc.shortcut
import zc.shortcut.proxy
import zc.shortcut.interfaces
# interfaces
class IInventoryItemAware(zope.interface.Interface):
_z_inventory_node = zope.interface.Attribute(
"""a zc.vault.interfaces.IContained (an IInventoryItem or an
IInventoryContents.""")
class IInventoryItemAwareFactory(zope.interface.Interface):
def __call__(item, parent, name):
"""returns an object that provudes IInventoryItemAware"""
class IProxy(
IInventoryItemAware, zc.shortcut.interfaces.ITraversalProxy):
"""these proxies have _z_inventory_node, __traversed_parent__, and
__traversed_name__"""
class IData(zope.interface.Interface):
"""A marker interface that indicates that this object should be adapted
to IInventoryItemAwareFactory, and then the factory should be called with
the object's item, its parent, and its name within the parent."""
# the proxy
class Proxy(zc.shortcut.proxy.ProxyBase):
zc.shortcut.proxy.implements(IProxy)
__slots__ = '_z_inventory_node',
def __new__(self, ob, parent, name, item):
return zc.shortcut.proxy.ProxyBase.__new__(self, ob, parent, name)
def __init__(self, ob, parent, name, item):
zc.shortcut.proxy.ProxyBase.__init__(self, ob, parent, name)
self._z_inventory_node = item
# the containers
class ReadContainer(zope.location.Location):
zope.interface.classProvides(IInventoryItemAwareFactory)
zope.interface.implements(
IInventoryItemAware, zope.interface.common.mapping.IEnumerableMapping)
def __init__(self, item, parent=None, name=None):
self.__parent__ = parent
self.__name__ = name
self._z_inventory_node = item
def __len__(self):
return len(self._z_inventory_node)
def __iter__(self):
return iter(self._z_inventory_node)
def __contains__(self, key):
return key in self._z_inventory_node
def __getitem__(self, key):
item = self._z_inventory_node(key)
if item.object is None:
factory = zope.component.getUtility(IInventoryItemAwareFactory)
return factory(item, self, key)
elif IData.providedBy(item.object):
factory = IInventoryItemAwareFactory(item.object)
return factory(item, self, key)
else:
return Proxy(item.object, self, key, item)
def keys(self):
return self._z_inventory_node.keys()
def values(self):
return [self[key] for key in self]
def items(self):
return [(key, self[key]) for key in self]
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __getstate__(self):
raise RuntimeError('This should not be persisted.')
class Container(ReadContainer):
zope.interface.implements(zope.interface.common.mapping.IMapping)
def __setitem__(self, key, value):
self._z_inventory_node[key] = value
def __delitem__(self, key):
del self._z_inventory_node[key]
def updateOrder(self, order):
self._z_inventory_node.updateOrder(order)
# the movers and shakers
# Unfortunately we have to duplicate the standard checkObject so we can
# weed out the IContainer check, which is not pertinent here.
def checkObject(container, name, object):
"""Check containment constraints for an object and container
"""
# check __setitem__ precondition
containerProvided = zope.interface.providedBy(container)
__setitem__ = containerProvided.get('__setitem__')
if __setitem__ is not None:
precondition = __setitem__.queryTaggedValue('precondition')
if precondition is not None:
precondition(container, name, object)
# check the constraint on __parent__
__parent__ = zope.interface.providedBy(object).get('__parent__')
if __parent__ is not None:
try:
validate = __parent__.validate
except AttributeError:
pass
else:
validate(container)
def isInventoryObject(obj):
return obj._z_inventory_node.object is zope.proxy.removeAllProxies(obj)
class ObjectMover(object):
"""can only move objects within and among manifests; moving elsewhere
has reparenting connotations that are inappropriate, since inventory
membership and parentage are unrelated."""
zope.interface.implements(zope.copypastemove.interfaces.IObjectMover)
zope.component.adapts(IInventoryItemAware)
def __init__(self, context):
self.context = context
self.__parent__ = context
def moveTo(self, target, new_name=None):
if not IInventoryItemAware.providedBy(target):
raise ValueError('target must be IInventoryItemAware')
node = self.context._z_inventory_node
if new_name is None:
new_name = node.name
if node == target._z_inventory_node and new_name == node.name:
return # noop
manifest = node.inventory.manifest
if manifest._z_frozen:
raise zc.freeze.interfaces.FrozenError(manifest)
if target._z_inventory_node.inventory.manifest._z_frozen:
raise zc.freeze.interfaces.FrozenError(
target._z_inventory_node.inventory.manifest)
checkObject(target, new_name, self.context)
chooser = zope.app.container.interfaces.INameChooser(target)
new_name = chooser.chooseName(new_name, node.object)
node.moveTo(target._z_inventory_node, new_name)
return new_name
def moveable(self):
manifest = self.context._z_inventory_node.inventory.manifest
return not manifest._z_frozen
def moveableTo(self, target, new_name=None):
node = self.context._z_inventory_node
manifest = node.inventory.manifest
if (not manifest._z_frozen and
IInventoryItemAware.providedBy(target) and
not target._z_inventory_node.inventory.manifest._z_frozen):
if new_name is None:
new_name = node.name
try:
checkObject(target, new_name, self.context)
except zope.interface.Invalid:
pass
else:
return True
return False
class ObjectCopier(object):
"""Generally, make new copies of objects.
If target is from a non-versioned manifest, use
copyTo and then copy all of the non-None data objects in the tree.
otherwise if the object is a proxied leaf node, do a normal copy; otherwise
puke (can't copy a vault-specific object out of a vault).
"""
zope.interface.implements(zope.copypastemove.interfaces.IObjectCopier)
zope.component.adapts(IInventoryItemAware)
def __init__(self, context):
self.context = context
self.__parent__ = context
def copyTo(self, target, new_name=None):
if IInventoryItemAware.providedBy(target):
if target._z_inventory_node.inventory.manifest._z_frozen:
raise zc.freeze.interfaces.FrozenError(
target._z_inventory_node.inventory.manifest)
else:
if not isInventoryObject(self.context):
raise ValueError # TODO better error
return zope.copypastemove.interfaces.IObjectCopier(
zc.shortcut.proxy.removeProxy(self.context)).copyTo(
target, new_name)
node = self.context._z_inventory_node
manifest = node.inventory.manifest
if new_name is None:
new_name = node.name
checkObject(target, new_name, self.context)
chooser = zope.app.container.interfaces.INameChooser(target)
new_name = chooser.chooseName(new_name, node.object)
node.copyTo(target._z_inventory_node, new_name)
new_node = zope.proxy.removeAllProxies(
target._z_inventory_node(new_name))
stack = [(lambda x: new_node, iter(('',)))]
while stack:
node, i = stack[-1]
try:
key = i.next()
except StopIteration:
stack.pop()
else:
next = node(key)
original = next.object
next.object = zc.copy.copy(original)
stack.append((next, iter(next)))
return new_name
def copyable(self):
return True
def copyableTo(self, target, new_name=None):
if not self.copyable():
return False
if IInventoryItemAware.providedBy(target):
if target._z_inventory_node.inventory.manifest._z_frozen:
return False
check = checkObject
else:
if not isInventoryObject(self.context):
return False
check = zope.app.container.constraints.checkObject
node = self.context._z_inventory_node
manifest = node.inventory.manifest
if new_name is None:
new_name = node.name
try:
check(target, new_name, self.context)
except zope.interface.Invalid:
return False
else:
return True
class ObjectLinker(object):
zope.component.adapts(IInventoryItemAware)
zope.interface.implements(zc.shortcut.interfaces.IObjectLinker)
def __init__(self, context):
self.context = context
self.__parent__ = context
def linkTo(self, target, new_name=None):
if IInventoryItemAware.providedBy(target):
if target._z_inventory_node.inventory.manifest._z_frozen:
raise zc.freeze.interfaces.FrozenError(
target._z_inventory_node.inventory.manifest)
else:
if not isInventoryObject(self.context):
raise ValueError # TODO better error
return zc.shortcut.interfaces.IObjectLinker(
zc.shortcut.proxy.removeProxy(self.context)).linkTo(
target, new_name)
node = self.context._z_inventory_node
manifest = node.inventory.manifest
if new_name is None:
new_name = node.name
checkObject(target, new_name, self.context)
chooser = zope.app.container.interfaces.INameChooser(target)
new_name = chooser.chooseName(new_name, node.object)
node.copyTo(target._z_inventory_node, new_name)
return new_name
def linkable(self):
return True
def linkableTo(self, target, new_name=None):
if IInventoryItemAware.providedBy(target):
if target._z_inventory_node.inventory.manifest._z_frozen:
return False
obj = self.context
check = checkObject
else:
if not isInventoryObject(self.context):
return False
obj = self._createShortcut(
zc.shortcut.proxy.removeProxy(self.context))
check = zope.app.container.constraints.checkObject
node = self.context._z_inventory_node
manifest = node.inventory.manifest
if new_name is None:
new_name = node.name
try:
check(target, new_name, obj)
except zope.interface.Invalid:
return False
else:
return True
def _createShortcut(self, target):
return zc.shortcut.Shortcut(target)
|
UTF-8
|
Python
| false | false | 2,012 |
7,902,739,845,494 |
55cda3ecf197db6ebbd5aa52db5665a250aab0e3
|
13b0a6e7c74f19b6e7de1181c50aac8e67494266
|
/calccofs.py
|
2487046a4a3faedde0f859ab64089d98071c3de3
|
[] |
no_license
|
khadiwala/Rubik-s-Cube-Solver
|
https://github.com/khadiwala/Rubik-s-Cube-Solver
|
1248269d5b0c9508d2d490c1660f2de64efe58d7
|
edfab4275add99320e1584cad46ce0daf7958c06
|
refs/heads/master
| 2016-09-05T18:58:28.170289 | 2011-04-28T04:02:18 | 2011-04-28T04:02:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy
def calc_cof():
#original 4 points
facesize = 30
nw = (0,0)
ne = (facesize,0)
sw = (0,facesize)
se = (facesize,facesize)
#maps to
mnw = (facesize/3,facesize/3)
mne = (facesize/3,(2*facesize)/3)
msw = (0,facesize)
mse = (facesize,facesize)
G = numpy.array([ [nw[0],nw[1],1,0,0,0,-nw[0]*mnw[0],-nw[1]*mnw[0]],
[ne[0],ne[1],1,0,0,0,-ne[0]*mne[0],-nw[1]*mne[0]],
[sw[0],sw[1],1,0,0,0,-sw[0]*msw[0],-sw[1]*msw[0]],
[se[0],se[1],1,0,0,0,-se[0]*mse[0],-se[1]*mse[0]],
[0, 0, 0, nw[0],nw[1],1,-nw[0]*mnw[0],-nw[0]*mnw[1]],
[0, 0, 0, ne[0],ne[1],1,-ne[0]*mne[0],-ne[0]*mne[1]],
[0, 0, 0, sw[0],sw[1],1,-sw[0]*msw[0],-sw[0]*msw[1]],
[0, 0, 0, se[0],se[1],1,-se[0]*mse[0],-se[0]*mse[1]] ])
d = numpy.transpose([mnw[0],mne[0],msw[0],mse[0],mnw[1],mne[1],msw[1],mse[1]])
m = numpy.linalg.solve(G,d)
p = tuple(m)
return m
|
UTF-8
|
Python
| false | false | 2,011 |
17,643,725,662,435 |
c78fc873ff5431dc4af8474ebf56a5b8d4b7261d
|
8bebdfd21a527a011035cb6b77699bb3abdc2bee
|
/app-math-tr/eigseg/build.py
|
4d9729c0e0c175d04c3796867deebe5b65bc2e46
|
[
"GPL-2.0-only"
] |
non_permissive
|
mupavan/classnotes
|
https://github.com/mupavan/classnotes
|
e46bba6ca5d32664547293541fd24f09316cc50c
|
0bfe3956a1ae01e32f81d8ed498a8eb2f33dfd1d
|
refs/heads/master
| 2021-05-29T13:21:33.256643 | 2012-10-17T13:16:30 | 2012-10-17T13:16:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os, sys
if len(sys.argv) == 1 or sys.argv[1] == 'tex':
os.system("pdflatex eigseg.tex")
os.system("evince eigseg.pdf")
exit()
if sys.argv[1] == 'zip':
os.system("zip ~/Dropbox/Public/skfiles/eigseg.zip eigseg.pdf eigseg.tex eigseg.py *.jpg *.png build.py")
|
UTF-8
|
Python
| false | false | 2,012 |
1,838,246,031,568 |
f273d8bea0c5911f9b6b4269231eab32eb019ee7
|
a41ac32d09d47b080832234be8ae0cbdf753eff1
|
/Samples/BackendNodes/Main.py
|
afe9a06d786a40f8a9a5ecf889bbfdb982a3fcc4
|
[
"LGPL-3.0-only"
] |
non_permissive
|
Qlien/MoveMe
|
https://github.com/Qlien/MoveMe
|
ef2b530913d5ac494c36c41aa184f3c00ce86165
|
11283068b90b0f99e31037f07bb9bedeba4d2fe7
|
refs/heads/master
| 2021-05-10T17:49:10.806714 | 2013-12-12T19:40:14 | 2013-12-12T19:40:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Created by Dmytro Konobrytskyi, 2013 (github.com/Akson/MoveMe)
import wx
import logging
from MoveMe.Canvas.Canvas import Canvas
from MoveMe.Canvas.Factories.DefaultNodesFactory import DefaultNodesFactory
from MoveMe.Canvas.Objects.MessageProcessingNodes.BackendNode import SourceBackendNode,\
BackendNode, DestinationBackendNode
class CanvasWindow(wx.Frame):
def __init__(self, *args, **kw):
wx.Frame.__init__(self, size=[1280, 720], *args, **kw)
s = wx.BoxSizer(wx.VERTICAL)
supportedClasses = {
"SourceBackendNode":SourceBackendNode,
"BackendNode":BackendNode,
"DestinationBackendNode":DestinationBackendNode
}
canvas = Canvas(parent=self, nodesFactory=DefaultNodesFactory(supportedClasses))
canvas.applicationId = "BackendNode"
canvas.CreateNodeFromDescriptionAtPosition('{"NodeClass": "SourceBackendNode", "APPLICATION_ID": "BackendNode", "NodeParameters":{"backendPath": "MoveMe.Canvas.Objects.MessageProcessingNodes.PassThroughBackendExample"}}', [20,20])
canvas.CreateNodeFromDescriptionAtPosition('{"NodeClass": "BackendNode", "APPLICATION_ID": "BackendNode"}', [240,20])
canvas.CreateNodeFromDescriptionAtPosition('{"NodeClass": "DestinationBackendNode", "APPLICATION_ID": "BackendNode"}', [460,20])
s.Add(canvas, 1, wx.EXPAND)
self.SetSizer(s)
self.SetTitle("MoveMe MessageProcessingGraph example")
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
app = wx.PySimpleApp()
CanvasWindow(None).Show()
app.MainLoop()
|
UTF-8
|
Python
| false | false | 2,013 |
10,479,720,218,566 |
ef1e40ffad3a4dc6d7c0b26ae3c3d0a8e3e6a833
|
21282f9f5dfd839b91b328698422f1b8672ddbec
|
/TEST/projetImagerie.py
|
f09d1421a1a667346fd9b16a3c08f14e83dbdc06
|
[
"GPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"GPL-2.0-only"
] |
non_permissive
|
Radrangi/pattern-recognition
|
https://github.com/Radrangi/pattern-recognition
|
b84da809956eb5342bf9840bf4e3e6cab734440b
|
69cfea249ffde79dd4654d2e818d78f3f9715c59
|
refs/heads/master
| 2020-03-15T04:42:37.809991 | 2014-05-08T21:42:08 | 2014-05-08T21:42:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Author : GORMEZ David
Imagery Project: Pattern recognition
"""
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.pyplot import cm
from skimage.color import rgb2hsv,rgb2lab,hsv2rgb
from mpl_toolkits.mplot3d import Axes3D
from scipy.cluster.vq import kmeans,kmeans2,vq
def loadImages(formatChange):
if formatChange:
return changeFormat(plt.imread("./images/AB05.png"))
else:
return plt.imread("./images/AB05.png")
def changeFormat(img):
return (255*img).astype(np.uint8)
def convertHSV(img):
if img.shape[2]==4:
return rgb2hsv(img[:,:,0:3])
else:
if img.shape[2]==3:
return rgb2hsv(img)
else:
print ("Image format not supported")
def convertHSVtoRGB(img):
return hsv2rgb(img)
def scatter3D(centroids):
# visualizing the centroids into the RGB space
fig = plt.figure(3)
ax = Axes3D(fig)
ax.scatter(centroids[:,0],centroids[:,1],centroids[:,2],c=centroids/255.,s=100)
def convertLAB(img):
if img.shape[2]==4:
return rgb2lab(img[:,:,0:3])
else:
if img.shape[2]==3:
return rgb2lab(img)
else:
print ("Image format not supported")
def showOnScreen(img):
plt.Figure()
plt.imshow(img,interpolation='nearest')
def clustering(img,clusters):
#Reshaping image in list of pixels to allow kmean Algorithm
#From 1792x1792x3 to 1792^2x3
pixels = np.reshape(img,(img.shape[0]*img.shape[1],3))
print ("pixels in Clustering : ",pixels.dtype,pixels.shape,type(pixels))
#performing the clustering
centroids,_ = kmeans(pixels,clusters,iter=3)
print ("Centroids : ",centroids.dtype,centroids.shape,type(centroids))
print centroids
# quantization
#Assigns a code from a code book to each observation
#code : A length N array holding the code book index for each observation.
#dist : The distortion (distance) between the observation and its nearest code.
code,_ = vq(pixels,centroids)
print ("Code : ",code.dtype,code.shape,type(code))
print code
# reshaping the result of the quantization
reshaped = np.reshape(code,(img.shape[0],img.shape[1]))
print ("reshaped : ",reshaped.dtype,reshaped.shape,type(reshaped))
clustered = centroids[reshaped]
print ("clustered : ",clustered.dtype,clustered.shape,type(clustered))
#scatter3D(centroids)
return clustered
def clustering2(img,clusters):
#Reshaping image in list of pixels to allow kmean Algorithm
#From 1792x1792x3 to 1792^2x3
pixels = np.reshape(img,(img.shape[0]*img.shape[1],3))
centroids,_ = kmeans2(pixels,3,iter=3,minit= 'random')
print ("Centroids : ",centroids.dtype,centroids.shape,type(centroids))
print centroids
# quantization
#Assigns a code from a code book to each observation
#code : A length N array holding the code book index for each observation.
#dist : The distortion (distance) between the observation and its nearest code.
code,_ = vq(pixels,centroids)
print ("Code : ",code.dtype,code.shape,type(code))
print code
# reshaping the result of the quantization
reshaped = np.reshape(code,(img.shape[0],img.shape[1]))
print ("reshaped : ",reshaped.dtype,reshaped.shape,type(reshaped))
clustered = centroids[reshaped]
print ("clustered : ",clustered.dtype,clustered.shape,type(clustered))
#scatter3D(centroids)
return clustered
img = loadImages('false')
print ("Original Image",img.dtype, type(img),img.shape)
print ("pixel test Original = ", img[img.shape[0]/2,img.shape[1]/2,:])
#img = changeFormat(img)
imgHSV = convertHSV(img)
print ("imgHSV : ", imgHSV.dtype, type(imgHSV),imgHSV.shape)
print ("pixel test HSV = ", imgHSV[imgHSV.shape[0]/2,imgHSV.shape[1]/2,:])
clusters = 6
imgClus = convertHSVtoRGB(clustering(imgHSV,clusters))
imgClus2 = convertHSVtoRGB(clustering2(imgHSV,clusters))
"""
kmeanHSV1 = kmeansAlgo(imgHSV)
kmean2 = kmeansAlgo2(img)
kmeanHSV2 = kmeansAlgo2(imgHSV)
"""
#imgLAB = convertLAB(img)
window1 = plt.figure(1)
window1.add_subplot(1,2,1)
plt.title('Original')
plt.imshow(img)
window1.add_subplot(1,2,2)
plt.imshow(imgClus)
plt.title("After Clustering1")
window2= plt.figure(2)
plt.imshow(imgClus2)
plt.title("After Clustering2")
plt.show()
|
UTF-8
|
Python
| false | false | 2,014 |
13,262,859,039,842 |
db161fddbef401757c16b15bcbe4c170188e5285
|
df40547a5bc530c31f052b8b6efe5758df3d3ae6
|
/price_collection/crawl.py
|
8e3d54394080eb901458c72a89b1a54be44326cf
|
[] |
no_license
|
toanant/ISBN-Search
|
https://github.com/toanant/ISBN-Search
|
41fcb08f10d382eafd55dcb527380376a3aabeb7
|
ac4d105d6265030f5642ea720ffe047b60d623dd
|
refs/heads/master
| 2021-01-22T05:27:52.355217 | 2013-12-16T19:59:01 | 2013-12-16T19:59:01 | 7,990,666 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
This Crawl script assign task from rabbitMQ Queue to
celery task to collect price of book from different
websites.
'''
from tasks import *
def crawl():
cursor = Review.find({'Bookadda':{'$exists':0}}, {'_id':1})
size = cursor.count()
i = 0
while(i < size):
rev = cursor[i]
u = str(rev['_id'])
get_review.delay(u)
print u
i += 1
if __name__ == '__main__':
crawl()
|
UTF-8
|
Python
| false | false | 2,013 |
12,953,621,373,341 |
28c81ff2a33282afe85ec804ae18a6152900ab52
|
4f639ce77efd00e366ff786784cf38193da18858
|
/bots/common/postCreator.py
|
f15c418996d095ae342e706671027a1e042e0fab
|
[] |
no_license
|
indigos33k3r/ScraperBot
|
https://github.com/indigos33k3r/ScraperBot
|
f425b91b08a51b0829d6e9e6af37f125bd854a4e
|
579eb45d7514036b6ff0e2fa79f6f4d1b9fcbc30
|
refs/heads/master
| 2020-04-21T16:34:14.196940 | 2012-06-19T20:10:57 | 2012-06-19T20:10:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urllib2
import xmlrpclib
import time
import datetime
import dataHandler
import random
from random import randint
import calendar
from wordpress_xmlrpc.base import Client
from wordpress_xmlrpc.methods.posts import GetRecentPosts, NewPost
from wordpress_xmlrpc.wordpress import WordPressPost
class PostCreator():
def __init__( self ):
#=======================================================================
# self.wp_site = "http://localhost/wordpress/xmlrpc.php"
# self.url = "http://localhost/wordpress/xmlrpc.php"
#=======================================================================
self.wp_site = "http://www.hottestporn4u.com/xmlrpc.php"
self.url = "http://www.hottestporn4u.com/xmlrpc.php"
self.login = "pornmaster"
self.password = "pornmasterpiece"
self.wp = Client( self.wp_site, self.login, self.password )
self._server = xmlrpclib.ServerProxy( self.url )
#self._server = xmlrpclib.Server(self.url)
self.blogId = 0
self.dataHandler = dataHandler.DataHandler()
self.categoriesList = self.dataHandler.read_categories()
self.getCategoryList = self.getCategoriesFromBlog()
def get_url_content( self, url ):
try:
content = urllib2.urlopen( url )
return content.read()
except:
print 'error! NOOOOOO!!!'
def enter_WP_user( self ):
user = raw_input ( "WP user >> " )
return user
def enter_WP_password( self ):
password = raw_input ( "WP password >> " )
return password
def uploadFileToWp( self, thumbnail ):
print "Client connected ..."
# set to the path to your file
file_url = thumbnail
extension = file_url.split( "." )
leng = extension.__len__()
extension = extension[leng - 1]
if ( extension == 'jpg' ):
xfileType = 'image/jpeg'
elif( extension == 'png' ):
xfileType = 'image/png'
elif( extension == 'bmp' ):
xfileType = 'image/bmp'
file = self.get_url_content( file_url )
file = xmlrpclib.Binary( file )
server = xmlrpclib.Server( self.wp_site )
filename = str( time.strftime( '%H:%M:%S' ) )
mediarray = {'name':filename + '.' + extension,
'type':xfileType,
'bits':file,
'overwrite':'false'}
xarr = ['1', self.login, self.password, mediarray]
result = server.wp.uploadFile( xarr )
print result
def createPost( self, title, thumbnail, iframe, videoduration, snippets_Duration, categories, url ):
print "WP creating post ..."
average = str( round( self.prepare_rating_for_post(), 2 ) )
number_of_votes = str( self.prepare_number_of_votes() )
dateFormat = self.prepare_post_date()
post = WordPressPost()
post.title = title
post.description = '<div class="hreview-aggregate"><div class="item vcard"><div itemscope itemtype="http://schema.org/VideoObject"><h2 class="fn"><meta itemprop="embedURL" content="' + url + '" />' + iframe + '<p><span itemprop="name">' + title + '</span></h2><meta itemprop="duration" content="' + snippets_Duration + '" /><h3>(' + videoduration + ')</h3><meta itemprop="thumbnailUrl" content="' + thumbnail + '" /><p><span itemprop="description">This video is called ' + title + '</span></div></div><span class="rating"><span class="average">' + average + '</span> out of <span class="best"> 10 </span>based on <span class="votes">' + number_of_votes + ' </span>votes</span><p><img src="' + thumbnail + '" alt="' + title + '"><br></div>'
post.categories = str( self.dataHandler.prepare_categories_for_post( categories, self.categoriesList ) )
post.tags = str( self.dataHandler.prepare_tags_for_post( title ) )
post.date = str( dateFormat )
print "post.title " + post.title
print "post.description " + post.description
print "post.categories " + post.categories
print "post.tags " + post.tags
print "post.date_created " + post.date
#prepare categories
cats = post.categories
cats2 = cats.replace( "'", "" )
cats3 = cats2.replace( "[", "" )
cats4 = cats3.replace( "]", "" )
cats5 = cats4.split( "," )
catAux = []
for cat1 in cats5:
catAux.append( cat1.strip() )
post.categories = catAux
postId = self.wp.call( NewPost( post, True ) )
print "WP " + postId + " uploaded [OK]"
return 0
def prepare_rating_for_post( self ):
var = random.uniform( 7.5, 10 )
return var
def prepare_number_of_votes( self ):
var = random.randrange( 0, 10100, 2 )
return var
def prepare_post_date( self ):
now = datetime.datetime.now()
lastDay = self.get_last_day_of_the_month( now )
if now.day == lastDay.day:
day = randint( 1, lastDay.day )
month = now.month + 1
elif ( calendar.isleap( now.year ) and now.day == 29 ):
day = randint( 1, lastDay.day )
month = now.month + 1
elif ( not( calendar.isleap( now.year ) ) and now.day == 28 ):
day = randint( 1, lastDay.day )
month = now.month + 1
else:
dayRange = now.day + 1
day = randint( now.day, dayRange )
month = now.month
minute = randint( now.minute, 59 )
hour = randint( 0, 23 )
if month < 10:
month = "0" + str( month )
else:
month = str( month )
if day < 10:
day = "0" + str( day )
else:
day = str( day )
if hour < 10:
hour = "0" + str( hour )
else:
hour = str( hour )
if minute < 10:
minute = "0" + str( minute )
else:
minute = str( minute )
if now.second < 10:
second = "0" + str( now.second )
else:
second = str( now.second )
date = str( now.year ) + "" + month + "" + day + "T" + hour + ":" + minute + ":" + second
return str( date )
def get_posts( self, number_of_posts ):
posts = self.wp.call( GetRecentPosts( 10 ) )
return posts
def get_last_day_of_the_month( self, date ):
if date.month == 12:
return date.replace( day = 31 )
return date.replace( month = date.month + 1, day = 1 ) - datetime.timedelta( days = 1 )
def getCategoryList( self ):
"""Get blog's categories list
"""
try:
if not self.categories:
self.categories = []
categories = self._server.mt.getCategoryList( self.blogId,
self.login, self.password )
for cat in categories:
self.categories.append( self._filterCategory( cat ) )
return self.categories
except xmlrpclib.Fault, fault:
print "Error: Wordpress exception!"
def getCategoriesFromBlog( self ):
"""Get categories from Wordpress blog
"""
categoriesStruct = self._server.metaWeblog.getCategories( self.blogId, self.login, self.password )
return categoriesStruct
def getCategoryIdFromName( self, name ):
"""Get category id from category name
"""
for c in self.getCategoryList:
if c['categoryName'] == name:
return c['categoryId']
|
UTF-8
|
Python
| false | false | 2,012 |
10,376,641,022,049 |
a63fa252c1d8700da9a35b17cd97066f63aa06e2
|
53a8bb5b1133c16b3ec48569751f0e802adc882a
|
/external/pymongo.py
|
41f1c33dd443ac3f6c76c0cdac080198be6f9f29
|
[] |
no_license
|
asache3/python-practice
|
https://github.com/asache3/python-practice
|
6600a7fc28ebda40d32ffbad0715801969339e66
|
b236bb3ae791d98f4ad1d25e491e2831c0b5f14a
|
refs/heads/master
| 2016-09-11T06:33:52.007057 | 2013-08-11T04:34:34 | 2013-08-11T04:34:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: UTF-8
import sys
from datetime import datetime
from pymongo import Connection
from pymongo.errors import ConnectionFailure
# Connectionオブジェクトを作る
try:
c = Connection(host="localhost", port=27017)
print "Connected successfully"
except ConnectionFailure, e:
sys.stderr.write("Could not connect to MongoDB: %s" % e)
sys.exit(1)
# Database Handleを取得する
dbh = c["mydb"]
user_doc = {
"username" : "janedoe",
"firstname" : "Jane",
"surname" : "Doe",
"dateofbirth" : datetime(1974, 4, 12),
"email" : "[email protected]",
"score" : 0
}
dbh.users.insert(user_doc, safe=True)
print "Successfully inserted document %s" % user_doc
# Replica Setの中の少なくとも2つのサーバーに書き込む
dbh.users.insert(user_doc, w=2)
# 条件に合ったものを1つだけ検索する
dbh.users.find_one({"username":"janedoe"})
# 条件に合ったものをすべて検索する
dbh.users.find({"firsename":"jane"})
for user in users:
print user.get("email")
# 条件に合ったものから特定の値を抽出する
dbh.users.find({"firstname":"jane"}, {"email":1})
# 条件に合うものの件数を取得する
userscount = dbh.users.find().count()
print "There are %d documents in users collection" % userscount
# 検索した結果をソートする
dbh.users.find({"firstname":"jane"}).sort(("dateofbirth", pymongo.DESCENDING))
users = dbh.users.find({"firstname":"jane"}, sort[("dateofbirth", pymongo.DESCENDING)])
# 検索した結果から指定した件数を取得する
dbh.users.find().sort(("score", pymongo.DESCENDING)).limit(10)
dbh.users.find().sort(("surname", pymongo.ASCENDING)).limit(20).skip(20)
# Snapshotモードを使う
for user in dbh.users.find(snapshot=True):
print user.get("username"), user.get("score", 0)
# ドキュメントをupdateする
dbh.users.update({"firstname":"jane"}, {"$set":{"email":"[email protected]", "score":1}}, safe=True)
# 条件に合致したすべてのドキュメントをupdateする
dbh.users.update({"score":0}, {"$set":{"flagged":True}}, multi=True, safe=True)
# ドキュメントを削除する
dbh.users.remove({"score":1}, safe=True)
# コレクションの中のすべてのドキュメントを削除する
dbh.users.remove(None, safe=True)
# Embedされたサブドキュメントを取得する
user_doc = {
"username":"foouser",
"twitter":{
"username":"footwitter",
"password":"secret",
"email":"[email protected]"
},
"facebook":{
"username":"foofacebook",
"password":"secret",
"email":"[email protected]"
},
"irc":{
"username":"fooirc",
"password":"secret",
}
}
dbh.users.find_one({facebook.username:"foofacebook"})
# サブドキュメントのリストから値を削除する
dbh.users.update({"username":"foouser"}, {"$pull":{"emails":{"email":"[email protected]"}}}, safe=True)
dbh.users.update({"username":"foouser"}, {"$pull":{"emails":{"primary":{"$ne":True}}}}, safe=True)
# ドキュメントのリストに値を追加する
dbh.users.update({"username":"foouser"}, {"$push":{"email":new_email}}, safe=True)
# positional operator
dbh.users.update({"emails.email":"[email protected]"}, {"$set":{"emails.$.primary":True}}, safe=True)
dbh.users.update({"emails.email":"[email protected]"}, {"$set":{"emails.$.primary":False}}, safe=True)
# indexを作成する
dbh.users.create_index("username")
dbh.users.create_index([("first_name", pymongo.ASCENDING), ("last_name", pymongo.ASCENDING)], name="name_idx")
# backgroundでindexを作成する
dbh.users.create_index("username", background=True)
# uniqueなindexを作成する
dbh.users.create_index("username", unique=True)
# uniqueなindexで重複したときに値を破棄する
dbh.users.create_index("username", unique=True, drop_dups=True)
dbh.users.create_index("username", unique=True, dropDups=True)
# indexを削除する
dbh.users.drop_index("username_idx")
dbh.users.drop_index([("first_name", pymongo.ASCENDING), ("last_name", pymongo.ASCENDING)])
|
UTF-8
|
Python
| false | false | 2,013 |
42,949,703,921 |
7ea558b4313619fec1915daa13a765ac8f183ddb
|
95836505683660cd30cbf67f8b32ebd1851769ce
|
/server-loadbalancing/plot_wmslist_inALIAS.py
|
a6ed87dc5c422d4d42d7b753d890a1e83cb9e766
|
[] |
no_license
|
danido/WMSMonitor
|
https://github.com/danido/WMSMonitor
|
5728aac3f8604d0df8e5f1fa549fd5552ac07c22
|
01fb0ebe6a5533bc23a7450b5069dd50bb7a497c
|
refs/heads/master
| 2021-01-19T06:04:47.097586 | 2012-11-16T11:01:26 | 2012-11-16T11:01:26 | 6,044,916 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python
# Main program to call sensor functions
import os, commands, sys, fpformat
sys.path.append('/opt/WMSMonitor/collector/bin/')
import readconf_func
confvar=readconf_func.readconf()
import MySQLdb
def plot_wmslist_inALIAS():
'''plot_wmslist_inALIAS() -> utility to plot on file the list
of wms in aliases defined for your site
'''
fileout=open('/var/www/html/wmsmon/main/wmspoolinfo.txt','w')
fileout.write('GENERAL INFO ABOUT CNAF WMS/LB INSTANCES POOL ON: ' + commands.getoutput('date'))
print "Starting db connection"
try:
db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'),user=confvar.get('WMSMON_DB_USER'),passwd=confvar.get('WMSMON_DB_PWD'),db=confvar.get('WMSMON_DB_NAME'))
except Exception,e:
stri2= "ERROR CONNECTING TO WMSMonitor DB: " + str(e)
print stri2
print "ERROR: Please check mysql daemon is running and connection parameters are correct!"
sys.exit(1)
#+++++++++++++++++++++++++++++
try:
querystr="select alias_name, numout, subtest_enable, idalias from admin_loadbalancing where enable_flag=1;"
#INITIALIZATION
aliases=[]
numout=[]
subtest_enable=[]
idalias=[]
db.query(querystr)
r = db.store_result()
row = r.fetch_row(10000)
host_vo_dict = {}
if len(row) > 0:
for line in row:
#host_vo_dict[hostname]=[idhost,vo,service]
aliases.append(line[0])
numout.append(line[1])
subtest_enable.append(line[2])
idalias.append(line[3])
except Exception,e:
str2= "ERROR READING ALIAS LIST FROM WMSMonitor DB: " + str(e)
print str2
sys.exit(1)
try:
querystr="select hostname,vo,service from admin_host_labels inner join hosts on hosts.idhost=admin_host_labels.idhost where admin_host_labels.active='1' and service='WMS';"
db.query(querystr)
r = db.store_result()
row = r.fetch_row(10000)
if len(row) > 0:
for line in row:
print line
except Exception,e:
str2= "ERROR HOSTS LIST FROM WMSMonitor DB: " + str(e)
sys.exit(1)
for ik in range(len(aliases)):
fileout.write('\n\nALIAS: ' + aliases[ik] + '\n')
try:
wmslist=[]
querystr="select hostname from hosts join admin_wms_alias_list on hosts.idhost=admin_wms_alias_list.idwms where idalias=" + idalias[ik] + " and spare_label='0';"
db.query(querystr)
r = db.store_result()
row = r.fetch_row(10000)
if len(row) > 0:
for line in row:
#host_vo_dict[hostname]=[idhost,vo,service]
wmslist.append(line[0])
except Exception,e:
print "ERROR READING WMS LIST FROM WMSMonitor DB: " + str(e)
print "ERROR: Please check query and DB status"
sys.exit(1)
fileout.write('ASSOCIATED WMS LIST: ' + str(wmslist))
fileout.write('\nWMS CURRENTLY IN THE ALIAS:\n')
fileout.write(commands.getoutput("for a in `host " + aliases[ik] + ".grid.cnaf.infn.it |awk '{print $4}'` ; do host $a|awk '{print $5}'; done"))
fileout.write('\n\n')
plot_wmslist_inALIAS()
|
UTF-8
|
Python
| false | false | 2,012 |
10,591,389,402,669 |
326c588f522fdcc9907e36c4c693b7c3e11f4081
|
cea9dcf76133e1df196210fe375107dcad697841
|
/bot.py
|
642406c724aacdcbac030f2299c65fb2a648b059
|
[
"GPL-3.0-only"
] |
non_permissive
|
jlzirani/yolo-irc-bot
|
https://github.com/jlzirani/yolo-irc-bot
|
c2bf3ebe4e09941b15d7f6bc65f39d3e9398b27f
|
403eeb72b6a0308b4a1f9ef73829bf2fcc73764a
|
refs/heads/master
| 2020-04-14T09:42:01.839506 | 2013-11-26T21:27:37 | 2013-11-26T21:27:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
This file is part of yolo-irc-bot.
Yolo-irc-bot is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Yolo-irc-bot is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Copyright 2013 Jean-Luc Z.
"""
from ircbot.masterMod import MasterMod, botTypeList
from ircbot.rssMod import RssMod
from ircbot.bot import ircBot
import ConfigParser
if __name__ == '__main__':
global botTypeList
botTypeList['rss'] = RssMod
Config = ConfigParser.ConfigParser()
Config.read("config.ini")
server = (Config.get('general', 'server'), int(Config.get('general', 'port')))
masterBot = ircBot([server], Config.get('general', 'masterBot'), Config.get('general', 'chan'))
masterBot.addModule(MasterMod)
masterBot.start()
|
UTF-8
|
Python
| false | false | 2,013 |
2,748,779,115,327 |
1082024922dfd39e28962b41528bf70ecd5961af
|
2b20987cafa4d2ba3a81cb5fbd531abaad9c5137
|
/qual1/task2.py
|
e2f288df046738e15c9e8ed58c8d8345818fe658
|
[] |
no_license
|
dmedvinsky/rcc
|
https://github.com/dmedvinsky/rcc
|
f1e871248b1e1750b48cd39bebd7aaf84806f192
|
94a0c48a4f53b1a4ef832057a2cad3f3f6b93223
|
refs/heads/master
| 2016-09-09T21:13:59.889638 | 2011-05-15T15:41:40 | 2011-05-15T15:41:40 | 1,747,103 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
from math import ceil
def solve(data):
data.pop(0)
calls = map(int, data.pop().split())
tarifs = map(lambda s: map(int, s.split()), data)
def p((c, t, p)):
return reduce(lambda s, T: s + (0 if T < t else p * ceil(T / float(t))),
calls, c * 100)
results = map(p, tarifs)
return results.index(min(results)) + 1
def main():
data = sys.stdin.readlines()
result = solve(data)
sys.stdout.write(str(result))
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,011 |
9,320,079,080,623 |
bd5fd3d49c8406ba394b696ff4786c7ba9af939a
|
5b8cc893af5073ab02d2eaf5a7b561464acf97a7
|
/maraschino/modules.py
|
53d471bdb9cf302b1089fbae23806f9832a0d902
|
[] |
no_license
|
Mar2zz/maraschino
|
https://github.com/Mar2zz/maraschino
|
74761fc23e8920e2a96afab16d3006ae4b456564
|
2c606bb180ef1c8c209ea5d2a7646da9375d9f1d
|
refs/heads/master
| 2021-01-21T01:16:07.543494 | 2012-01-08T07:07:26 | 2012-01-08T07:07:26 | 3,044,197 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import Flask, jsonify, render_template, request
from maraschino.database import db_session
import copy, json
from Maraschino import app
from settings import *
from maraschino.tools import *
from maraschino.database import *
from maraschino.models import Module
# name, label, description, and static are not user-editable and are taken from here
# poll and delay are user-editable and saved in the database - the values here are the defaults
# settings are also taken from the database - the values here are defaults
# if static = True then poll and delay are ignored
AVAILABLE_MODULES = [
{
'name': 'applications',
'label': 'Applications',
'description': 'Allows you to link to whatever applications you want (SabNZBd, SickBeard, etc.)',
'static': True,
'poll': 0,
'delay': 0,
},
{
'name': 'diskspace',
'label': 'Disk space',
'description': 'Shows you available disk space on your various drives.',
'static': False,
'poll': 350,
'delay': 0,
},
{
'name': 'library',
'label': 'Media Library',
'description': 'Allows you to browse your media library and select items to play in XBMC.',
'static': True,
'poll': 0,
'delay': 0,
},
{
'name': 'recently_added',
'label': 'Recently added episodes',
'description': 'Shows you TV Episodes recently added to your library.',
'static': False,
'poll': 350,
'delay': 0,
'settings': [
{
'key': 'num_recent_episodes',
'value': 3,
'description': 'Number of episodes to display',
},
]
},
{
'name': 'recently_added_movies',
'label': 'Recently added movies',
'description': 'Shows you Movies recently added to your library.',
'static': False,
'poll': 350,
'delay': 0,
'settings': [
{
'key': 'num_recent_movies',
'value': 3,
'description': 'Number of movies to display',
},
]
},
{
'name': 'recommendations',
'label': 'Recommendations',
'description': 'Movies that may suit you.',
'static': False,
'poll': 0,
'delay': 10,
'settings': [
{
'key': 'trakt_api_key',
'value': '',
'description': 'Trakt API Key',
},
{
'key': 'trakt_username',
'value': '',
'description': 'Trakt Username',
},
{
'key': 'trakt_password',
'value': '',
'description': 'Trakt Password',
},
]
},
{
'name': 'sabnzbd',
'label': 'SABnzbd+',
'description': 'Shows you information about your SABnzbd+ downloads.',
'static': False,
'poll': 10,
'delay': 0,
'settings': [
{
'key': 'sabnzbd_host',
'value': '',
'description': 'Hostname',
},
{
'key': 'sabnzbd_port',
'value': '',
'description': 'Port',
},
{
'key': 'sabnzbd_api',
'value': '',
'description': 'API Key',
},
]
},
{
'name': 'synopsis',
'label': 'Synopsis',
'description': 'Shows you a plot synopsis of what you are currently watching.',
'static': True,
'poll': 0,
'delay': 0,
},
{
'name': 'trakt',
'label': 'trakt.tv',
'description': 'Shows you what people are saying about what you are watching and allows you to add your own comments.',
'static': True,
'poll': 0,
'delay': 0,
'settings': [
{
'key': 'trakt_api_key',
'value': '',
'description': 'Trakt API Key',
},
{
'key': 'trakt_username',
'value': '',
'description': 'Trakt Username',
},
{
'key': 'trakt_password',
'value': '',
'description': 'Trakt Password',
},
]
},
{
'name': 'sickbeard',
'label': 'Sickbeard Manager',
'description': 'Manage Sickbeard from within Maraschino',
'static': True,
'poll': 0,
'delay': 0,
'settings': [
{
'key': 'sickbeard_api',
'value': '',
'description': 'Sickbeard API Key',
},
{
'key': 'sickbeard_user',
'value': '',
'description': 'Sickbeard Username',
},
{
'key': 'sickbeard_password',
'value': '',
'description': 'Sickbeard Password',
},
{
'key': 'sickbeard_ip',
'value': '',
'description': 'Sickbeard Hostname',
},
{
'key': 'sickbeard_port',
'value': '',
'description': 'Sickbeard Port',
},
{
'key': 'sickbeard_compact',
'value': '0',
'description': 'Compact view',
'type': 'bool',
},
]
},
]
SERVER_SETTINGS = [
{
'key': 'server_hostname',
'value': 'localhost',
'description': 'XBMC Hostname',
},
{
'key': 'server_port',
'value': '8080',
'description': 'XBMC Port ',
},
{
'key': 'server_username',
'value': '',
'description': 'XBMC Username',
},
{
'key': 'server_password',
'value': '',
'description': 'XBMC Password',
},
{
'key': 'fanart_backgrounds',
'value': '1',
'description': 'Show fanart backgrounds when watching media',
'type': 'bool',
},
{
'key': 'random_backgrounds',
'value': '0',
'description': 'Use a random background when not watching media',
'type': 'bool',
},
{
'key': 'server_macaddress',
'value': '',
'description': 'XBMC Mac Address',
},
]
@app.route('/xhr/add_module_dialog')
@requires_auth
def add_module_dialog():
modules_on_page = Module.query.all()
available_modules = copy.copy(AVAILABLE_MODULES)
for module_on_page in modules_on_page:
for available_module in available_modules:
if module_on_page.name == available_module['name']:
available_modules.remove(available_module)
break
return render_template('add_module_dialog.html',
available_modules = available_modules,
)
@app.route('/xhr/add_module', methods=['POST'])
@requires_auth
def add_module():
try:
module_id = request.form['module_id']
column = request.form['column']
position = request.form['position']
# make sure that it's a valid module
module_info = get_module_info(module_id)
if not module_info:
raise Exception
except:
return jsonify({ 'status': 'error' })
module = Module(
module_info['name'],
column,
position,
module_info['poll'],
module_info['delay'],
)
db_session.add(module)
# if module template has extra settings then create them in the database
# with default values if they don't already exist
if 'settings' in module_info:
for s in module_info['settings']:
setting = get_setting(s['key'])
if not setting:
setting = Setting(s['key'], s['value'])
db_session.add(setting)
db_session.commit()
module_info['template'] = '%s.html' % (module_info['name'])
# if the module is static and doesn't have any extra settings, return
# the rendered module
if module_info['static'] and not 'settings' in module_info:
return render_template('placeholder_template.html',
module = module_info
)
# otherwise return the rendered module settings dialog
else:
return module_settings_dialog(module_info['name'])
@app.route('/xhr/rearrange_modules', methods=['POST'])
@requires_auth
def rearrange_modules():
try:
modules = json.JSONDecoder().decode(request.form['modules'])
except:
return jsonify({ 'status': 'error' })
for module in modules:
try:
m = Module.query.filter(Module.name == module['name']).first()
m.column = module['column']
m.position = module['position']
db_session.add(m)
except:
pass
db_session.commit()
return jsonify({ 'status': 'success' })
@app.route('/xhr/remove_module/<name>', methods=['POST'])
@requires_auth
def remove_module(name):
module = Module.query.filter(Module.name == name).first()
db_session.delete(module)
db_session.commit()
return jsonify({ 'status': 'success' })
@app.route('/xhr/module_settings_dialog/<name>')
@requires_auth
def module_settings_dialog(name):
module_info = get_module_info(name)
module_db = get_module(name)
if module_info and module_db:
# look at the module template so we know what settings to look up
module = copy.copy(module_info)
# look up poll and delay from the database
module['poll'] = module_db.poll
module['delay'] = module_db.delay
# iterate through possible settings and get values from database
if 'settings' in module:
for s in module['settings']:
setting = get_setting(s['key'])
if setting:
s['value'] = setting.value
return render_template('module_settings_dialog.html',
module = module,
)
return jsonify({ 'status': 'error' })
@app.route('/xhr/module_settings_cancel/<name>')
@requires_auth
def module_settings_cancel(name):
module = get_module_info(name)
if module:
module['template'] = '%s.html' % (module['name'])
return render_template('placeholder_template.html',
module = module,
)
return jsonify({ 'status': 'error' })
@app.route('/xhr/module_settings_save/<name>', methods=['POST'])
@requires_auth
def module_settings_save(name):
try:
settings = json.JSONDecoder().decode(request.form['settings'])
except:
return jsonify({ 'status': 'error' })
for s in settings:
# poll and delay are stored in the modules tables
if s['name'] == 'poll' or s['name'] == 'delay':
module = get_module(name)
if s['name'] == 'poll':
module.poll = int(s['value'])
if s['name'] == 'delay':
module.delay = int(s['value'])
db_session.add(module)
# other settings are stored in the settings table
else:
setting = get_setting(s['name'])
if not setting:
setting = Setting(s['name'])
setting.value = s['value']
db_session.add(setting)
db_session.commit()
# you can't cancel server settings - instead, return an updated template
# with 'Settings saved' text on the button
if name == 'server_settings':
return server_settings_dialog(updated=True)
# for everything else, return the rendered module
return module_settings_cancel(name)
@app.route('/xhr/server_settings_dialog')
@requires_auth
def server_settings_dialog(updated=False):
settings = copy.copy(SERVER_SETTINGS)
for s in settings:
setting = get_setting(s['key'])
if setting:
s['value'] = setting.value
return render_template('server_settings_dialog.html',
server_settings = settings,
updated = updated,
)
# helper method which returns a module record from the database
def get_module(name):
try:
return Module.query.filter(Module.name == name).first()
except:
return None
# helper method which returns a module template
def get_module_info(name):
for available_module in AVAILABLE_MODULES:
if name == available_module['name']:
return available_module
return None
|
UTF-8
|
Python
| false | false | 2,012 |
10,995,116,296,089 |
cfa75aef9aa2656c8fccaa3f3812475defa9a552
|
21dc6c75dbd0544a117ed4d78303725eeae22d1e
|
/blueprints/donations/ipn.py
|
0721a1e7f75b827c5fb970b0e4cadcd6c29511e5
|
[
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft"
] |
non_permissive
|
JunctionAt/JunctionWWW
|
https://github.com/JunctionAt/JunctionWWW
|
14258474c5510a3c9e6ab5bb144030b8440f8a7f
|
261c40ad82dce21078a32ccac54c0310dc97b95e
|
refs/heads/master
| 2020-04-18T23:18:09.552836 | 2014-09-15T21:01:13 | 2014-09-15T21:01:13 | 24,073,712 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import request, current_app
import requests
from itsdangerous import BadData, BadPayload, BadSignature
from . import blueprint
from models.donation_model import DonationTransaction, DonationTransactionStatus, TransactionLog
from . import username_signer
is_debug = current_app.config['PAYPAL_IPN_DEBUG_MODE']
@blueprint.route('/donate/ipn_callback', methods=['POST'])
# @csrf.exempt
def ipn_listener():
#arg = ''
values = request.form.to_dict()
#for x, y in values.iteritems():
# if len(arg) is not 0:
# arg += "&"
# arg += "%s=%s"% (x, y,)
#arg += ""
values['cmd'] = "_notify-validate"
if not is_debug:
validate_url = "https://www.paypal.com/cgi-bin/webscr"
else:
validate_url = "https://www.sandbox.paypal.com/cgi-bin/webscr"
#print values
#print 'Validating IPN using %s' % validate_url
r = requests.post(validate_url, data=values, headers={
"Content-Type": "application/x-www-form-urlencoded",
"Host": "www.paypal.com",
"Connection": "Close"
})
#print r.text
if 'VERIFIED' in r.text:
#print "PayPal transaction was verified successfully."
if is_debug:
print values
else:
process_transaction(values)
#payer_email = request.form.get('payer_email')
#print "Pulled {email} from transaction".format(email=payer_email)
else:
pass
raise InvalidResponseError()
#print 'Paypal IPN string {arg} did not validate'.format(arg=arg)
print r.status_code
return r.text
def process_transaction(data):
# Get the username
if not data.get("custom", None) or data.get("custom", None) == "None":
username = None
else:
try:
username = username_signer.loads(data["custom"])
except (BadPayload, BadData, BadSignature):
username = None
TransactionLog(data=data, username=username).save()
txn_id = data.get("parent_txn_id") or data.get("txn_id")
# Check if the transaction already exists in db.
transaction = DonationTransaction.objects(transaction_id=txn_id).first()
if transaction:
pass
else:
transaction = DonationTransaction()
transaction.username = username
transaction.email = data["payer_email"]
transaction.gross = float(data.get("mc_gross", 0))
transaction.fee = float(data.get("mc_fee", 0))
transaction.amount = transaction.gross - transaction.fee
transaction.payment_type = data.get("payment_type", "")
transaction.transaction_id = txn_id
transaction_status = DonationTransactionStatus()
transaction_status.status = data["payment_status"]
transaction_status.reason = data.get("pending_reason", None) or data.get("reason_code", None)
transaction_status.valid = validate_transaction(data)
transaction_status.gross = float(data.get("mc_gross", 0))
transaction_status.fee = float(data.get("mc_fee", 0))
transaction_status.complete_data = data
transaction.payment_status_events.append(transaction_status)
transaction.valid = validate_transaction(data)
transaction.save()
#cache.delete_memoized('donation_stats_data')
def validate_transaction(data):
return data["payment_status"] in ["Canceled_Reversal", "Completed", "Pending", "Processed"]
class InvalidResponseError(Exception):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
19,301,583,054,445 |
096649d54e6f5fc0e9af1b64c8f5e50549a7730c
|
a53fafdeecff389d3bf6da46620b6f299caf3556
|
/balls/balls.py
|
eaef7925fd380589b714823d7071342d16ab035e
|
[] |
no_license
|
cathoderay/lumino-projects
|
https://github.com/cathoderay/lumino-projects
|
a3e4f80b228e860cb861ded7ab84279bed8a86b0
|
0f3b36d5b07838b5f503764a265ffba82c1af801
|
refs/heads/master
| 2021-01-01T19:35:30.068976 | 2013-07-31T02:53:30 | 2013-07-31T02:53:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame
from lumino import Lumino
resolution = (1024, 768)
pygame.init()
screen = pygame.display.set_mode(resolution)
pygame.display.toggle_fullscreen()
l = Lumino('/dev/ttyUSB0')
running = True
while running:
screen.fill((0, 0, 0))
ra, rb = l.get()
pygame.draw.circle(screen, (255, 0, 0), (300, 300), ra/3, 0)
pygame.draw.circle(screen, (0, 255, 0), (700, 300), rb/3, 0)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and \
event.key == pygame.K_ESCAPE:
running = False
pygame.quit()
|
UTF-8
|
Python
| false | false | 2,013 |
1,202,590,861,789 |
30cda33f87de4280cf77fe66cbe436f971fa1ec3
|
236fcc69680105959d6adee1a227573824a0cc5a
|
/fts.py
|
09465329bbf729351ecf03e946f8ca69a04a818c
|
[] |
no_license
|
abarnert/py-fts
|
https://github.com/abarnert/py-fts
|
1c6270a551e176b8e4472c8c0023007b3adbe9e9
|
ada9c0a86436a6e71fac71e758a56ad47505ab21
|
refs/heads/master
| 2016-09-06T13:39:03.932310 | 2012-11-16T12:06:53 | 2012-11-16T12:06:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from ctypes import *
class Timespec(Structure):
_fields_ = [('tm_sec', c_int),
('tm_min', c_int),
('tm_hour', c_int),
('tm_mday', c_int),
('tm_mon', c_int),
('tm_year', c_int),
('tm_wday', c_int),
('tm_yday', c_int),
('tm_isdst', c_int),
('tm_zone', c_char_p),
('tm_gmtoff', c_long)]
class Stat(Structure):
_fields_ = [('st_dev', c_uint32),
('st_mode', c_uint16),
('st_nlink', c_uint16),
('st_ino', c_uint64),
('st_uid', c_uint32),
('st_gid', c_uint32),
('st_rdev', c_uint32),
('st_atimespec', Timespec),
('st_mtimespec', Timespec),
('st_ctimespec', Timespec),
('st_birthtimespec', Timespec),
('st_size', c_uint64),
('st_blocks', c_uint64),
('st_blksize', c_uint32),
('st_flags', c_uint32),
('st_gen', c_uint32),
('st_lspare', c_uint32),
('st_qspare0', c_uint64),
('st_qspare1', c_uint64)]
class FTSENT(Structure):
_fields_ = [('fts_cycle', c_void_p),
('fts_parent', c_void_p),
('fts_link', c_void_p),
('fts_number', c_long),
('fts_pointer', c_void_p),
('fts_accpath', c_char_p),
('fts_path', c_char_p),
('fts_errno', c_int),
('fts_symfd', c_int),
('fts_pathlen', c_ushort),
('fts_namelen', c_ushort), # @66
('fts_ino', c_uint64), # @72
('fts_dev', c_uint32), # @80
('fts_nlink', c_uint16), #@84
('fts_level', c_short), #@86
('fts_info', c_ushort), #@88
('fts_flags', c_ushort), #@90
('fts_instr', c_ushort), #@92
('fts_statp', POINTER(Stat)), #@96
('fts_name', c_char*1)]
libc = CDLL('/usr/lib/libc.dylib', use_errno=True)
libc.fts_open.argtypes=[POINTER(c_char_p), c_int, c_void_p]
libc.fts_open.restype=c_void_p
libc.fts_close.argtypes=[c_void_p]
libc.fts_close.restype=c_int
libc.fts_read.argtypes=[c_void_p]
libc.fts_read.restype=POINTER(FTSENT)
FTS_COMFOLLOW = 0x001 # /* follow command line symlinks */
FTS_LOGICAL = 0x002 # /* logical walk */
FTS_NOCHDIR = 0x004 # /* don't change directories */
FTS_NOSTAT = 0x008 # /* don't get stat info */
FTS_PHYSICAL = 0x010 # /* physical walk */
FTS_SEEDOT = 0x020 # /* return dot and dot-dot */
FTS_XDEV = 0x040 # /* don't cross devices */
FTS_WHITEOUT = 0x080 # /* return whiteout information */
FTS_COMFOLLOWDIR=0x400# /* (non-std) follow command line symlinks for directories only */
FTS_OPTIONMASK= 0x4ff # /* valid user option mask */
FTS_NAMEONLY = 0x100 # /* (private) child names only */
FTS_STOP = 0x200 #
class fts(object):
def __init__(self, path, flags=FTS_PHYSICAL):
self.path=path
a = ARRAY(c_char_p, 2)(path.encode('utf8'), None)
self.fts = libc.fts_open(a, flags, None)
def close(self):
if self.fts:
libc.fts_close(self.fts)
self.fts = None
def __del__(self):
self.close()
def __next__(self):
e = libc.fts_read(self.fts)
if not e:
raise StopIteration
return e.contents
def next(self):
return self.__next__()
def __iter__(self):
return self
def test():
f = fts('.')
for e in f:
print(e.fts_path)
f.close()
|
UTF-8
|
Python
| false | false | 2,012 |
17,832,704,241,784 |
27c9dc700195bee06cc6b7f5192193a3381749db
|
d10f70595d37d0c15137b50890a4e1d6117d53cd
|
/src/constructor/tests.py
|
ac322fac7554eb23fba42d39ea3f80ff3ccaa338
|
[] |
no_license
|
aganzha/proto-constructor
|
https://github.com/aganzha/proto-constructor
|
a17803ab08a7401ef42123163304cfba67efadf6
|
e93ee11b4a18a2f2b92c0243d2622ac33958bfad
|
refs/heads/master
| 2015-08-09T01:13:42.817266 | 2013-10-30T23:16:31 | 2013-10-30T23:16:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding:utf-8 -*-
from django.utils import unittest
import models
from django.contrib.auth.models import User
import simplejson
from django.test.client import Client
# from django.core.files import File
# from django.conf import settings
# import os, json
class AppTestCase(unittest.TestCase):
username = 'john'
password = 'johnpassword'
def setUp(self):
self.django_user = User.objects.create_user(self.username,
'[email protected]',
self.password)
self.app = models.App(title='test', user=self.django_user)
self.app.save()
def tearDown(self):
self.app.delete()
self.django_user.delete()
def makeClient(self):
c = Client()
c.login(username=self.username, password=self.password)
return c
def testStages(self):
c = self.makeClient()
# POST------------------------------------
di = {'app':self.app.pk, 'title':'TestStage1', 'img':'data:image/gif;base64,'}
response = c.post('/constructor/api/stage/',
content_type='application/json',
data=simplejson.dumps(di))
self.assertTrue(response.status_code,201)
json = simplejson.loads(response.content)
self.assertTrue(di.get('title') in json.get('img'))
stages = self.app.stage_set.all()
# remember length
le = len(stages)
self.assertEqual(stages[0].title, di['title'])
self.assertTrue(response.has_header('Location'))
_id = None
for h,v in response.items():
if h == 'Location':
_id = int(v.split('/').pop())
self.assertEqual(_id, stages[0].pk)
self.assertEqual(stages[0],models.App.objects.all()[0].startStage)
# POST with bad image -----------------------
fail = c.post('/constructor/api/stage/',
content_type='application/json',
data=simplejson.dumps({'app':1,'title':'title','img':'wrong image'}))
self.assertEqual(fail.status_code,400)
self.assertEqual(len(stages),le)
# GET ------------------------------
response = c.get('/constructor/api/stage/',{'app':self.app.pk})
stages = simplejson.loads(response.content)
self.assertEqual(stages[0].get('id'),_id)
# PUT ------------------------------
di['title'] = di['title'].replace('1','2')
put = c.put('/constructor/api/stage/'+str(_id)+'/',
content_type='application/json',
data=simplejson.dumps(di))
self.assertEqual(put.status_code,200)
stages = self.app.stage_set.all()
self.assertEqual(stages[0].title,'TestStage2')
# DELETE ---------------------------
delete = c.delete('/constructor/api/stage/'+str(_id)+'/')
self.assertEqual(delete.status_code, 200)
stages = self.app.stage_set.all()
self.assertEqual(len(stages),0)
self.assertEqual(self.app.startStage,None)
# TEST COPY (POST)------------------
test_image_content = 'testimagecontent'
di = {'app':self.app.pk, 'title':'TestStage3', 'img':'data:image/gif;base64,'+
test_image_content.encode('base64')}
response = c.post('/constructor/api/stage/',
content_type='application/json',
data=simplejson.dumps(di))
self.assertTrue(response.status_code,201)
for h,v in response.items():
if h == 'Location':
_id = int(v.split('/').pop())
di1 = {'app':self.app.pk, 'title':'TestStage4', 'src':_id}
response1 = c.post('/constructor/api/stage/',
content_type='application/json',
data=simplejson.dumps(di1))
self.assertTrue(response1.status_code,201)
json = simplejson.loads(response1.content)
self.assertTrue(di.get('title') in json.get('img'))
stages = [s for s in self.app.stage_set.all()]
copy = stages.pop()
original = stages.pop()
copy.img.open()
src=copy.img.read()
self.assertEqual(src, test_image_content)
copy.img.close()
self.assertTrue(original.hasCopies)
self.assertTrue(copy.hasCopies)
# DELETE copy---------------------------
delete = c.delete('/constructor/api/stage/'+str(original.pk)+'/')
self.assertEqual(delete.status_code, 200)
# Здесь нужно проверить, что файл копии не удалился
copy.img.open()
src=copy.img.read()
self.assertEqual(src, test_image_content)
copy.img.close()
def testMaps(self):
stage = models.Stage(app_id=self.app.pk, title='stage1', img='hohoi')
stage.save()
c = self.makeClient()
di = {'stage':stage.pk, 'coord':'coord', 'action':1, 'target':stage.pk}
# POST ------------------------------
response = c.post('/constructor/api/map/',
content_type='application/json',
data=simplejson.dumps(di))
self.assertTrue(response.status_code,201)
maps = stage.map_set.all()
self.assertEqual(maps[0].action, 1)
self.assertTrue(response.has_header('Location'))
_id = None
for h,v in response.items():
if h == 'Location':
_id = int(v.split('/').pop())
self.assertEqual(_id, maps[0].pk)
# GET ------------------------------
response = c.get('/constructor/api/map/',{'stage':stage.pk})
maps = simplejson.loads(response.content)
self.assertEqual(maps[0].get('id'),_id)
# PUT ------------------------------
di['action'] = 2
put = c.put('/constructor/api/map/'+str(_id)+'/',
content_type='application/json',
data=simplejson.dumps(di))
self.assertEqual(put.status_code,200)
maps = stage.map_set.all()
self.assertEqual(maps[0].action,2)
# return
# DELETE ---------------------------
delete = c.delete('/constructor/api/map/'+str(_id)+'/')
self.assertEqual(delete.status_code, 200)
maps = stage.map_set.all()
self.assertEqual(len(maps),0)
def testApp(self):
stage1 = models.Stage(app_id=self.app.pk, title='stage1', img='data:image/gif;base64,')
stage1.save()
stage2 = models.Stage(app_id=self.app.pk, title='stage2', img='data:image/gif;base64,')
stage2.save()
c = self.makeClient()
c.login(username='john', password='johnpassword')
put = c.put('/constructor/api/app/'+str(self.app.pk)+'/',
content_type='application/json',
data=simplejson.dumps({'startStage':stage2.id}))
self.assertEqual(put.status_code,200)
self.assertEqual(models.App.objects.all()[0].startStage,stage2)
put = c.put('/constructor/api/app/'+str(self.app.pk)+'/',
content_type='application/json',
data=simplejson.dumps({'startStage':stage1.id}))
self.assertEqual(put.status_code,200)
self.assertEqual(models.App.objects.all()[0].startStage,stage1)
def testSecurity(self):
c = Client()
put = c.put('/constructor/api/app/'+str(self.app.pk)+'/',
content_type='application/json',
data=simplejson.dumps({}))
self.assertEqual(put.status_code,401)
post = c.post('/constructor/api/stage/',
content_type='application/json',
data=simplejson.dumps({'app':self.app.pk}))
self.assertEqual(post.status_code,401)
get = c.get('/constructor/api/stage/',{'app':self.app.pk})
self.assertEqual(get.status_code,401)
stage = models.Stage(app_id=self.app.pk, title='stage1', img='hohoi')
stage.save()
put = c.put('/constructor/api/stage/'+str(stage.pk))
self.assertEqual(put.status_code,401)
post = c.post('/constructor/api/map/',
content_type='application/json',
data=simplejson.dumps({'stage':stage.pk}))
self.assertEqual(post.status_code,401)
get = c.get('/constructor/api/map/',{'stage':stage.pk})
self.assertEqual(get.status_code,401)
map = models.Map(stage=stage,coord='map1',target=stage,action=1)
map.save()
put = c.put('/constructor/api/map/'+str(map.pk),
content_type='application/json',
data=simplejson.dumps({}))
self.assertEqual(put.status_code,401)
|
UTF-8
|
Python
| false | false | 2,013 |
4,724,464,034,633 |
8f860242c20c7e97e607a0579009a99d4bac1bb8
|
2f1224559dc2317e5a741512a94e07bde878aa64
|
/server/src/wscript
|
56ad8db3e7d214bc012255c7e7613c9cb0a52ffd
|
[
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-or-later",
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
diegotoral/ufrn-2do
|
https://github.com/diegotoral/ufrn-2do
|
5b21f148266d93289179499b3bf615832ed32f63
|
9601fb36bdea2dd84c99afe5a30d4c5db3bc04c9
|
refs/heads/master
| 2016-09-07T04:41:30.849113 | 2012-10-18T23:08:27 | 2012-10-18T23:08:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/env python
# encoding: utf-8
def build(bld):
bld(features= 'c cprogram',
source = '2do-main.vala 2do-server.vala',
target = '2do-server',
install_binding = False,
uselib = 'LIBSOUP GEE1',
packages = 'libsoup-2.4 sqlite3 gee-1.0'
)
|
UTF-8
|
Python
| false | false | 2,012 |
9,955,734,243,681 |
f3f106b8d1cbbc87b066619284129a84b1725759
|
dfba6c60baf82ac569264f3d2eee858a342f72fd
|
/mercurial_reviewboard/tests/test_getreviewboard.py
|
e55faa6f403fe2fbb5848115cd9ff0fc2b03cb1b
|
[
"MIT"
] |
permissive
|
luxbet/mercurial-reviewboard
|
https://github.com/luxbet/mercurial-reviewboard
|
bf8fbe0e3e6240a997bbfbc90bd9c40d399a7d5f
|
56d3e683b45be80af69d191a85911a2ef26bd498
|
refs/heads/master
| 2016-09-06T09:55:17.193864 | 2013-12-03T21:57:49 | 2013-12-03T21:57:49 | 3,703,304 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from mock import patch
from mercurial_reviewboard import getreviewboard
from mercurial_reviewboard.tests import get_initial_opts, mock_ui
@patch('mercurial_reviewboard.make_rbclient')
def test_get_credentials_from_config(mock_reviewboard):
# username and password configs are included
# in the mock
ui = mock_ui()
opts = get_initial_opts()
getreviewboard(ui, opts)
mock_reviewboard.assert_called_with('http://example.com',
'foo', 'bar', proxy=None, apiver='')
|
UTF-8
|
Python
| false | false | 2,013 |
4,492,535,813,077 |
95941bf4f8a5e55a6d86c9319134308f450283d8
|
63aa949c1e03977b460bea626ae07cf8744e65e5
|
/fwiktr_web.py
|
aca9604a43085613edf6ae07be39576afc8f40e4
|
[] |
no_license
|
qdot/fwiktr
|
https://github.com/qdot/fwiktr
|
a34e6e6db7b9b364286d730a0046b46ac3df071e
|
9714123f576c76a0df700b699220a2a999914f16
|
refs/heads/master
| 2016-09-15T14:27:35.509916 | 2009-08-10T03:29:20 | 2009-08-10T03:29:20 | 220,052 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import time
import os
import sys
import twitter
import urllib
import math
import random
import uuid
import ConfigParser
import pycurl
import cgi
import re
from xml.parsers.xmlproc import xmlval
from flickrapi import FlickrAPI
from nltk import tag, corpora, tokenize
fwiktr_xml = '''<?xml version="1.0"?>
<!DOCTYPE fwiktr SYSTEM "fwiktr.dtd">
<fwiktr>
<language>
<language_method>%(language_method)s</language_method>
<language_description>%(language_description)s</language_description>
<language_output>%(language_output)s</language_output>
<language_result>%(language_result)s</language_result>
</language>
<art>
%(art_tags)s
</art>
<post>
<post_author_name>%(post_author_name)s</post_author_name>
<post_location>%(post_location)s</post_location>
<post_text>%(post_text)s</post_text>
<post_date>%(post_date)s</post_date>
%(post_info)s
</post>
<picture>
<picture_title>%(picture_title)s</picture_title>
%(picture_info)s
</picture>
<transforms>
%(transforms)s
</transforms>
</fwiktr>
'''
transform_info_xml = '''
<transform>
<transform_name>%(transform_name)s</transform_name>
<transform_description>%(transform_description)s</transform_description>
<transform_step>%(transform_step)s</transform_step>
<transform_output>%(transform_output)s</transform_output>
</transform>
'''
twitter_info_xml = '''
<twitter>
<twitter_post_id>%(twitter_post_id)s</twitter_post_id>
<twitter_author_id>%(twitter_author_id)s</twitter_author_id>
</twitter>
'''
flickr_info_xml = '''
<flickr>
<flickr_farm>%(flickr_farm)s</flickr_farm>
<flickr_server>%(flickr_server)s</flickr_server>
<flickr_photo_id>%(flickr_photo_id)s</flickr_photo_id>
<flickr_owner_id>%(flickr_owner_id)s</flickr_owner_id>
<flickr_secret>%(flickr_secret)s</flickr_secret>
</flickr>
'''
gCurl = pycurl.Curl()
def OutputTagList(tag_list):
if not isinstance(tag_list, list): return ""
return "<tags>" + ''.join( [("<tag>%s</tag>"%i) for i in tag_list] ) + "</tags>"
class Callable:
def __init__(self, anycallable):
self.__call__ = anycallable
#
# Base Classes
#
class FwiktrServiceManager:
def __init__(self):
self._config = None
self._SetupService()
def _GetOption(self, option):
try:
return self._GetConfig().get('Fwiktr', option)
except:
return None
def _GetConfig(self):
if not self._config:
self._config = ConfigParser.ConfigParser()
self._config.read(os.path.expanduser('~/.fwiktrrc'))
return self._config
class FwiktrTransformManager:
step = 0
transform_xml = ""
def __init__(self):
self._before = None
self._after = None
self._output = None
self._description = "Generic Transform Description. PLEASE CHANGE."
self._name = "Generic Transform Name. PLEASE CHANGE."
def ClearTransformInfo():
FwiktrTransformManager.step = 0
FwiktrTransformManager.transform_xml = ""
ClearTransformInfo = Callable(ClearTransformInfo)
def AddTransformInfo(self):
FwiktrTransformManager.transform_xml += self._BuildTransformXML()
FwiktrTransformManager.step = FwiktrTransformManager.step + 1
def RunTransform(self, transform_data):
self._output = ""
# self._before = []
# self._after = []
# if isinstance(transform_data, list):
# self._before = transform_data
val = self._Run(transform_data)
# if isinstance(val, list):
# self._after = val
self.AddTransformInfo()
return val
def _Run(self, transform_data):
raise Exception, "ONLY TO BE CALLED FROM CHILD CLASSES"
def GetTransformXML(self):
return FwiktrTransformManager.transform_xml
def _BuildTransformXML(self):
# transform_info = {"transform_before":OutputTagList(self._before), "transform_after":OutputTagList(self._after), "transform_output":self._output, "transform_step":FwiktrTransformManager.step, "transform_name":self._name, "transform_description":self._description}
transform_info = {"transform_output":self._output, "transform_step":FwiktrTransformManager.step, "transform_name":self._name, "transform_description":self._description}
return transform_info_xml % transform_info
#
# Language Services
#
class FwiktrLanguageService:
def __init__(self):
self._name = ""
self._description = ""
def GetLanguageData(self):
return {'language_method':self._name, 'language_description':self._description, 'language_output':self._output, 'language_result':self._result}
class FwiktrPerlLinguaIdentify(FwiktrLanguageService):
def __init__(self):
FwiktrLanguageService.__init__(self)
self._name = "Lingua::Identify"
self._description = "Identifies language of text by using langof() function of Lingua::Identify perl module (http://search.cpan.org/~cog/Lingua-Identify-0.19/)"
def Identify(self, text):
self._output = ""
cmd = "echo \"%s\" | perl lingua_test.pl > ./lingua_output.txt" % text
os.system(cmd)
lang_file = open('lingua_output.txt', 'r')
self._output = lang_file.read()
lang_file.seek(0)
self._result = (lang_file.readlines()).pop().strip()
#
# Post Services
#
class FwiktrPostRetriever(FwiktrServiceManager):
def __init__(self):
FwiktrServiceManager.__init__(self)
self._msg_list = []
self._current_msg = None
self.name = ""
def NextPost(self):
#Iterate to the next post on the list. If we've exhausted the list, pull a new one
if len(self._msg_list) is 0:
time.sleep(30)
self._GetNewPosts()
self._current_msg = self._msg_list.pop()
def GetPostDate(self):
raise Exception, "ONLY TO BE CALLED FROM CHILD CLASSES"
def GetPostText(self):
raise Exception, "ONLY TO BE CALLED FROM CHILD CLASSES"
def GetPostData(self):
return {'post_author_name':self._current_msg.user.screen_name,'post_location':self._current_msg.user.location,'post_info':self._GetPostSpecificXML()}
def _GetPostSpecificXML(self):
raise Exception, "ONLY TO BE CALLED FROM CHILD CLASSES"
def _GetNewPosts(self):
raise Exception, "ONLY TO BE CALLED FROM CHILD CLASSES"
class FwiktrTwitterRetriever(FwiktrPostRetriever):
def __init__(self):
FwiktrPostRetriever.__init__(self)
self.name = "Twitter"
def _SetupService(self):
self._tapi = twitter.Api()
def _GetNewPosts(self):
self._msg_list = self._tapi.GetPublicTimeline()
def GetPostDate(self):
return self._current_msg.created_at
def GetPostText(self):
return self._current_msg.text
def _GetPostSpecificXML(self):
return twitter_info_xml % {'twitter_post_id':self._current_msg.id,'twitter_author_id':self._current_msg.user.id}
#
# Picture Services
#
class FwiktrFlickrFuckItSelectionTransform(FwiktrTransformManager):
def __init__(self):
self._description = "Uses the 'ANY' (Universal OR) search to seed the picture search, then selects a random picture."
self._name = "Flickr 'Fuck It' Selector"
self._output = ""
def _Run(self, transform_data):
self._output = "Total Pictures Found: %(total)s\nPicture Index Used: %(picked)s" % transform_data
return
class FwiktrFlickrFullANDSelectionTransform(FwiktrTransformManager):
def __init__(self):
self._description = "Uses the 'ALL' (Universal AND) search to seed the picture search, then selects a random picture if there are results."
self._name = "Flickr 'Full AND' Selector"
self._output = ""
def _Run(self, transform_data):
self._output = "Total Pictures Found: %(total)s\nPicture Index Used: %(picked)s" % transform_data
return
class FwiktrFlickrTagCullTransform(FwiktrTransformManager):
def __init__(self):
self._description = "Culls tag list down to 20 tags (maximum allowed by flickr API)"
self._name = "Flickr Tag Cull Transformer"
def _Run(self, transform_data):
return tag_list[0:19]
class FwiktrFlickrRetriever(FwiktrServiceManager):
transformList = [FwiktrFlickrFuckItSelectionTransform(), FwiktrFlickrFullANDSelectionTransform()]
def __init__(self):
self._pic_info = []
FwiktrServiceManager.__init__(self)
self.name = "Flickr"
def _SetupService(self):
self._fapi = FlickrAPI(self._GetOption('flickr_api_key'), self._GetOption('flickr_api_secret'))
def GetPictureXML(self):
return flickr_info_xml
def GetPictureData(self):
return {'picture_title':cgi.escape(self._pic_info['title']), 'picture_info':self._GetPictureSpecificData()}
def _GetPictureSpecificData(self):
return flickr_info_xml % {'flickr_server':self._pic_info['server'], 'flickr_farm':self._pic_info['farm'], 'flickr_photo_id':self._pic_info['id'], 'flickr_secret':self._pic_info['secret'], 'flickr_owner_id':self._pic_info['owner']}
def GetNewPicture(self, tag_list):
try:
if len(tag_list) > 20:
culler = FwiktrFlickrTagCullTransform()
tag_list = culler.RunTransform(tag_list)
tag_string = ','.join(tag_list)
if(tag_string == ""): return False
pic = FwiktrFlickrFullANDSelectionTransform()
rsp = self._fapi.photos_search(api_key=self._GetOption('flickr_api_key'),tags=tag_string,tag_mode='all')
self._fapi.testFailure(rsp)
print rsp.photos[0]['total']
if(int(rsp.photos[0]['total']) == 0):
pic = FwiktrFlickrFuckItSelectionTransform()
rsp = self._fapi.photos_search(api_key=self._GetOption('flickr_api_key'),tags=tag_string,tag_mode='any')
print rsp.photos[0]['total']
self._fapi.testFailure(rsp)
if(int(rsp.photos[0]['total']) == 0):
return False
rand_index = random.randint(0, min(int(rsp.photos[0]['perpage']), int(rsp.photos[0]['total'])))
self._pic_info = rsp.photos[0].photo[rand_index]
pic.RunTransform({'total':rsp.photos[0]['total'],'picked':rand_index})
return True
except:
return False
#
# Tag Services
#
class FwiktrTokenize(FwiktrTransformManager):
def __init__(self):
self._description = "Tokenizer from Python NLP Toolkit. Removes all punctuation and whitespace, gives back tokenized word list with no filtering."
self._name = "NLPK Tokenizer"
def GetTagList(self, text):
self._before = text
tags = []
[tags.append(i) for i in tokenize.whitespace(text)]
self._output = ""
self._after = tags
return tags
class FwiktrTreeTagger(FwiktrTransformManager):
def __init__(self):
FwiktrTransformManager.__init__(self)
def _Run(self, text):
self._output = ""
cmd = "echo \"%s\" | treetagger/cmd/tree-tagger-english > ./twitter_message_output.txt" % text
os.system(cmd)
pos_file = open('twitter_message_output.txt', 'r')
tokens = []
self.parse_string = ""
for line in pos_file:
current_line = []
self._output += line
for value in tokenize.whitespace(line):
current_line.append(value)
tokens.append(current_line)
self._output = self._output.replace("<unknown>", "[unknown]")
filtered_tags = filter(self._ComparisonFunction, tokens)
final_tags = []
[final_tags.append(i[0]) for i in filtered_tags]
return final_tags
def _ComparisonFunction(self, list):
raise Exception, "COMPARISON MUST BE DEFINED IN CHILD CLASS"
class FwiktrTreeTaggerPOSPicker(FwiktrTreeTagger):
def __init__(self):
FwiktrTreeTagger.__init__(self)
self._poslist = []
def _ComparisonFunction(self, list):
#Start by culling everything that's not a noun
if list[1] in self._poslist:
return True
return False
class FwiktrTreeTaggerNounsOnly(FwiktrTreeTaggerPOSPicker):
def __init__(self):
FwiktrTreeTaggerPOSPicker.__init__(self)
self._poslist = ["NP", "NN", "NNS", "NPS"]
self._name = "TreeTagger ENGLISH - Nouns Only"
self._description = "Return only words having Parts of Speech type NN, NP, NNS, or NPS, as identified by TreeTagger (http://www.ims.uni-stuttgart.de/projekte/corplex/TreeTagger/)"
#
# Seasoning Services
#
#
# Weather Seasoner
#
#
# Google Recommendation Seasoner
#
#
# Main Functionality
#
def CombineDictionaries(dict1, dict2):
return dict(dict1, **dict2)
class Fwiktr:
def __init__(self):
self._post_rets = [FwiktrTwitterRetriever()]
self._pic_rets = [FwiktrFlickrRetriever()]
self._tag_rets = [FwiktrTokenize(), FwiktrTreeTaggerNounsOnly()]
self._lang_rets = [FwiktrPerlLinguaIdentify()]
def CreateArt(self):
FwiktrTransformManager.ClearTransformInfo()
xml_info = dict()
#Pull post from source
post_ret = self._post_rets[0]
post_ret.NextPost()
try:
post_text = str(post_ret.GetPostText())
except:
print "Non-ASCII Message, skipping"
return
print post_text
#Identify source's language
lang_ret = self._lang_rets[0]
try:
lang_ret.Identify(post_text)
except:
print "Cannot identify language"
return
#Pull tags from source
tag_ret = self._tag_rets[1]
tag_list = tag_ret.RunTransform(post_text)
print tag_list
#Season tag list
#Retrieve picture using tags
pic_ret = self._pic_rets[0]
if pic_ret.GetNewPicture(tag_list) is False: return
#Build XML blob
xml_dict = {'language_method':"No Detection - English", 'language_result':"English", 'language_output':"", 'language_description':"No processing done, assumes english", 'art_tags':OutputTagList(tag_list), 'post_date':post_ret.GetPostDate(), 'post_text':cgi.escape(post_text), 'transforms':tag_ret.GetTransformXML()}
xml_dict = CombineDictionaries(xml_dict, post_ret.GetPostData())
xml_dict = CombineDictionaries(xml_dict, pic_ret.GetPictureData())
xml_dict = CombineDictionaries(xml_dict, lang_ret.GetLanguageData())
xml_info["transform_info_xml"] = tag_ret.GetTransformXML()
fwiktr_info = fwiktr_xml % xml_dict
print fwiktr_info
#check the validity of our XML before we ship it off
try:
parser=xmlval.XMLValidator()
parser.feed(fwiktr_info)
except:
# raise
return
#Post data to web
self._PostDataToWeb(fwiktr_info)
def _PostDataToWeb(self, info):
try:
gCurl.setopt(pycurl.URL, 'http://www.30helensagree.com/fwiktr/fwiktr_post.php')
gCurl.setopt(pycurl.POST, 1)
gCurl.setopt(pycurl.POSTFIELDS, urllib.urlencode([("fwiktr_post", info)]))
gCurl.perform()
except:
return
def main():
f = Fwiktr()
while 1:
try:
f.CreateArt()
except KeyboardInterrupt:
return
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,009 |
14,671,608,309,697 |
453241fa70559d81055447e1a6a01ce035adc7dc
|
d7f740af90c19c6b1547cc19436eec459da001b3
|
/FluiDevUI.py
|
fb40b1abaa810c219af97828931260f7a11e7a7b
|
[] |
no_license
|
edhedges/FluiDev
|
https://github.com/edhedges/FluiDev
|
2dd6b0f0ef7194110940b7e042fb091b08738b8b
|
c646aa612d3f8fc16e33195449b3dfd09b36a25d
|
refs/heads/master
| 2021-01-01T19:42:17.020102 | 2012-07-08T07:25:56 | 2012-07-08T07:25:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from PySide import QtCore, QtGui
class FluiDevUIDesign(object):
def setupUi(self, FluiDevMainWindow):
FluiDevMainWindow.setObjectName("FluiDevMainWindow")
FluiDevMainWindow.resize(844, 550)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(FluiDevMainWindow.sizePolicy().hasHeightForWidth())
FluiDevMainWindow.setSizePolicy(sizePolicy)
FluiDevMainWindow.setMinimumSize(QtCore.QSize(844, 550))
FluiDevMainWindow.setMaximumSize(QtCore.QSize(844, 550))
FluiDevMainWindow.setBaseSize(QtCore.QSize(844, 550))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
FluiDevMainWindow.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(13)
FluiDevMainWindow.setFont(font)
FluiDevMainWindow.setTabShape(QtGui.QTabWidget.Rounded)
self.centralwidget = QtGui.QWidget(FluiDevMainWindow)
self.centralwidget.setObjectName("centralwidget")
self.createNewWebsiteButton = QtGui.QPushButton(self.centralwidget)
self.createNewWebsiteButton.setGeometry(QtCore.QRect(480, 30, 301, 61))
font = QtGui.QFont()
font.setPointSize(16)
self.createNewWebsiteButton.setFont(font)
self.createNewWebsiteButton.setObjectName("createNewWebsiteButton")
self.existingWebsitesDisplay = QtGui.QTableWidget(self.centralwidget)
self.existingWebsitesDisplay.setGeometry(QtCore.QRect(480, 160, 302, 311))
self.existingWebsitesDisplay.setObjectName("existingWebsitesDisplay")
self.existingWebsitesDisplay.setColumnCount(3)
self.existingWebsitesDisplay.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.existingWebsitesDisplay.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.existingWebsitesDisplay.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.existingWebsitesDisplay.setHorizontalHeaderItem(2, item)
self.existingWebsitesLabel = QtGui.QLabel(self.centralwidget)
self.existingWebsitesLabel.setGeometry(QtCore.QRect(550, 130, 161, 21))
font = QtGui.QFont()
font.setPointSize(16)
font.setWeight(75)
font.setBold(True)
self.existingWebsitesLabel.setFont(font)
self.existingWebsitesLabel.setAlignment(QtCore.Qt.AlignCenter)
self.existingWebsitesLabel.setObjectName("existingWebsitesLabel")
self.featureList = QtGui.QTreeWidget(self.centralwidget)
self.featureList.setGeometry(QtCore.QRect(60, 160, 321, 311))
self.featureList.setObjectName("featureList")
item_0 = QtGui.QTreeWidgetItem(self.featureList)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_0 = QtGui.QTreeWidgetItem(self.featureList)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_0 = QtGui.QTreeWidgetItem(self.featureList)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1 = QtGui.QTreeWidgetItem(item_0)
self.titleLabel = QtGui.QLabel(self.centralwidget)
self.titleLabel.setGeometry(QtCore.QRect(60, 20, 321, 41))
font = QtGui.QFont()
font.setPointSize(26)
font.setWeight(75)
font.setBold(True)
self.titleLabel.setFont(font)
self.titleLabel.setAlignment(QtCore.Qt.AlignBottom | QtCore.Qt.AlignHCenter)
self.titleLabel.setObjectName("titleLabel")
self.authorLabel = QtGui.QLabel(self.centralwidget)
self.authorLabel.setGeometry(QtCore.QRect(60, 70, 321, 41))
font = QtGui.QFont()
font.setPointSize(13)
font.setWeight(50)
font.setBold(False)
self.authorLabel.setFont(font)
self.authorLabel.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignTop)
self.authorLabel.setOpenExternalLinks(True)
self.authorLabel.setObjectName("authorLabel")
self.featuresLabel = QtGui.QLabel(self.centralwidget)
self.featuresLabel.setGeometry(QtCore.QRect(130, 130, 171, 21))
font = QtGui.QFont()
font.setPointSize(16)
font.setWeight(75)
font.setBold(True)
self.featuresLabel.setFont(font)
self.featuresLabel.setAlignment(QtCore.Qt.AlignCenter)
self.featuresLabel.setObjectName("featuresLabel")
FluiDevMainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(FluiDevMainWindow)
self.statusbar.setObjectName("statusbar")
FluiDevMainWindow.setStatusBar(self.statusbar)
self.menubar = QtGui.QMenuBar()
self.menubar.setGeometry(QtCore.QRect(0, 0, 844, 22))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setObjectName("menuHelp")
self.menuSimple_Web = QtGui.QMenu(self.menubar)
self.menuSimple_Web.setObjectName("menuSimple_Web")
FluiDevMainWindow.setMenuBar(self.menubar)
self.actionAbout_Simple_Web = QtGui.QAction(FluiDevMainWindow)
self.actionAbout_Simple_Web.setObjectName("actionAbout_Simple_Web")
self.actionPreferences = QtGui.QAction(FluiDevMainWindow)
self.actionPreferences.setObjectName("actionPreferences")
self.actionQuite = QtGui.QAction(FluiDevMainWindow)
self.actionQuite.setObjectName("actionQuite")
self.actionNew_Website = QtGui.QAction(FluiDevMainWindow)
self.actionNew_Website.setObjectName("actionNew_Website")
self.actionOpen_Existing_Website = QtGui.QAction(FluiDevMainWindow)
self.actionOpen_Existing_Website.setObjectName("actionOpen_Existing_Website")
self.actionSave_Website = QtGui.QAction(FluiDevMainWindow)
self.actionSave_Website.setObjectName("actionSave_Website")
self.actionDocumentation = QtGui.QAction(FluiDevMainWindow)
self.actionDocumentation.setObjectName("actionDocumentation")
self.actionSource_Code = QtGui.QAction(FluiDevMainWindow)
self.actionSource_Code.setObjectName("actionSource_Code")
self.actionSupport = QtGui.QAction(FluiDevMainWindow)
self.actionSupport.setObjectName("actionSupport")
self.actionDonate = QtGui.QAction(FluiDevMainWindow)
self.actionDonate.setObjectName("actionDonate")
self.actionTutorial = QtGui.QAction(FluiDevMainWindow)
self.actionTutorial.setObjectName("actionTutorial")
self.actionAbout = QtGui.QAction(FluiDevMainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionPreferences_2 = QtGui.QAction(FluiDevMainWindow)
self.actionPreferences_2.setObjectName("actionPreferences_2")
self.actionQuit = QtGui.QAction(FluiDevMainWindow)
self.actionQuit.setObjectName("actionQuit")
self.actionAbout_2 = QtGui.QAction(FluiDevMainWindow)
self.actionAbout_2.setObjectName("actionAbout_2")
self.actionPreferencew = QtGui.QAction(FluiDevMainWindow)
self.actionPreferencew.setObjectName("actionPreferencew")
self.actionQuit_2 = QtGui.QAction(FluiDevMainWindow)
self.actionQuit_2.setObjectName("actionQuit_2")
self.actionAdf = QtGui.QAction(FluiDevMainWindow)
self.actionAdf.setObjectName("actionAdf")
self.actionAbout_3 = QtGui.QAction(FluiDevMainWindow)
self.actionAbout_3.setObjectName("actionAbout_3")
self.actionPreferences_3 = QtGui.QAction(FluiDevMainWindow)
self.actionPreferences_3.setObjectName("actionPreferences_3")
self.actionQuit_3 = QtGui.QAction(FluiDevMainWindow)
self.actionQuit_3.setObjectName("actionQuit_3")
self.actionInfo = QtGui.QAction(FluiDevMainWindow)
self.actionInfo.setObjectName("actionInfo")
self.actionSettings = QtGui.QAction(FluiDevMainWindow)
self.actionSettings.setObjectName("actionSettings")
self.actionQuit_4 = QtGui.QAction(FluiDevMainWindow)
self.actionQuit_4.setObjectName("actionQuit_4")
self.menuFile.addAction(self.actionNew_Website)
self.menuFile.addAction(self.actionOpen_Existing_Website)
self.menuHelp.addAction(self.actionSupport)
self.menuHelp.addAction(self.actionTutorial)
self.menuHelp.addAction(self.actionDocumentation)
self.menuHelp.addSeparator()
self.menuHelp.addAction(self.actionSource_Code)
self.menuHelp.addAction(self.actionDonate)
self.menuSimple_Web.addAction(self.actionInfo)
self.menubar.addAction(self.menuSimple_Web.menuAction())
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(FluiDevMainWindow)
QtCore.QMetaObject.connectSlotsByName(FluiDevMainWindow)
def retranslateUi(self, FluiDevMainWindow):
FluiDevMainWindow.setWindowTitle(QtGui.QApplication.translate("FluiDevMainWindow", "FluiDev", None, QtGui.QApplication.UnicodeUTF8))
self.createNewWebsiteButton.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Create a New Website", None, QtGui.QApplication.UnicodeUTF8))
self.existingWebsitesDisplay.horizontalHeaderItem(0).setText(QtGui.QApplication.translate("FluiDevMainWindow", "Website Name", None, QtGui.QApplication.UnicodeUTF8))
self.existingWebsitesDisplay.horizontalHeaderItem(1).setText(QtGui.QApplication.translate("FluiDevMainWindow", "Date Last Edited", None, QtGui.QApplication.UnicodeUTF8))
self.existingWebsitesDisplay.horizontalHeaderItem(2).setText(QtGui.QApplication.translate("FluiDevMainWindow", "Date Created", None, QtGui.QApplication.UnicodeUTF8))
self.existingWebsitesLabel.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Existings Websites", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.headerItem().setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Feature List", None, QtGui.QApplication.UnicodeUTF8))
__sortingEnabled = self.featureList.isSortingEnabled()
self.featureList.setSortingEnabled(False)
self.featureList.topLevelItem(0).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Website Management", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(0).child(0).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Create, edit, and delete static pages", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(0).child(1).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Create, edit, and delete a static blog", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(0).child(2).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Customize website settings", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(1).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Markup Editor", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(1).child(0).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Markdown", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(1).child(1).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "HTML5", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(1).child(2).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "reStructuredText", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(1).child(3).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Zen Coding", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(2).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Theme Editor", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(2).child(0).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Editable barebones theme", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(2).child(1).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Editable Simple Web theme", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(2).child(2).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Create and edit custom themes", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.topLevelItem(2).child(3).setText(0, QtGui.QApplication.translate("FluiDevMainWindow", "Write custom CSS", None, QtGui.QApplication.UnicodeUTF8))
self.featureList.setSortingEnabled(__sortingEnabled)
self.titleLabel.setText(QtGui.QApplication.translate("FluiDevMainWindow", "FluiDev", None, QtGui.QApplication.UnicodeUTF8))
self.authorLabel.setText(QtGui.QApplication.translate("FluiDevMainWindow", "by <a href=\"http://edhedges.com\">Eddie Hedges</a>", None, QtGui.QApplication.UnicodeUTF8))
self.featuresLabel.setText(QtGui.QApplication.translate("FluiDevMainWindow", "FluiDev Features", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("FluiDevMainWindow", "File", None, QtGui.QApplication.UnicodeUTF8))
self.menuHelp.setTitle(QtGui.QApplication.translate("FluiDevMainWindow", "Help", None, QtGui.QApplication.UnicodeUTF8))
self.menuSimple_Web.setTitle(QtGui.QApplication.translate("FluiDevMainWindow", "Simple Web", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout_Simple_Web.setText(QtGui.QApplication.translate("FluiDevMainWindow", "About Simple Web", None, QtGui.QApplication.UnicodeUTF8))
self.actionPreferences.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Preferences", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuite.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Website.setText(QtGui.QApplication.translate("FluiDevMainWindow", "New Website", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen_Existing_Website.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Open Existing Website", None, QtGui.QApplication.UnicodeUTF8))
self.actionSave_Website.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Save Website", None, QtGui.QApplication.UnicodeUTF8))
self.actionDocumentation.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Documentation", None, QtGui.QApplication.UnicodeUTF8))
self.actionSource_Code.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Source Code", None, QtGui.QApplication.UnicodeUTF8))
self.actionSupport.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Support", None, QtGui.QApplication.UnicodeUTF8))
self.actionDonate.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Donate", None, QtGui.QApplication.UnicodeUTF8))
self.actionTutorial.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Tutorial", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout.setText(QtGui.QApplication.translate("FluiDevMainWindow", "About", None, QtGui.QApplication.UnicodeUTF8))
self.actionPreferences_2.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Preferences", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout_2.setText(QtGui.QApplication.translate("FluiDevMainWindow", "About", None, QtGui.QApplication.UnicodeUTF8))
self.actionPreferencew.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Preferences", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit_2.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionAdf.setText(QtGui.QApplication.translate("FluiDevMainWindow", "adf", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout_3.setText(QtGui.QApplication.translate("FluiDevMainWindow", "About", None, QtGui.QApplication.UnicodeUTF8))
self.actionPreferences_3.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Preferences", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit_3.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionInfo.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Info", None, QtGui.QApplication.UnicodeUTF8))
self.actionSettings.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Settings", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit_4.setText(QtGui.QApplication.translate("FluiDevMainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
|
UTF-8
|
Python
| false | false | 2,012 |
5,093,831,242,051 |
d2505b6b7288f46c06964ff510d9764e71292e89
|
f8fa32cd84eec044868d906578f888ab15fe0e85
|
/game/projectile/Blast.py
|
046f906b546e2138195d582f16b251098a594e0e
|
[] |
no_license
|
code-11/tanCS
|
https://github.com/code-11/tanCS
|
9d6f97e4db1cd2ade283e1d6fe2e3e3751518a51
|
b68e50cc453c31c0aab5508c2c84a85eaf7f2e8c
|
refs/heads/master
| 2023-03-15T23:31:22.195650 | 2012-05-13T02:04:59 | 2012-05-13T02:04:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
sys.path.append("..")
from Projectile import *
import TaskList
from panda3d.core import Quat
class Blast(Projectile):
'''Class Blast - Dynamic World Object fired by Blaster'''
def __init__(self, weapon, speed):
#Parameters to be set
damage = 1 #Blast should be weakest, base damage
mass = .1
hpr = weapon.getHpr()
h = (hpr[0] + 0) * math.pi / 180
p = hpr[1] * math.pi / 180
direction = Vec3(math.cos(h) * math.cos(p), math.sin(h) * math.cos(p), math.sin(p))
shape = BulletSphereShape(.5)
self.name = weapon.getTank()._nodePath.node().getName() + ' blast'
pos = weapon.getAbsPos()
pos = Point3(pos[0], pos[1], pos[2] + 1)
vel = direction * speed #LVecBase3f
vel = Vec3(vel[0], vel[1], vel[2])
np = loader.loadModel('media/bullet.x')
np.setScale(Vec3(1.5,1.5,1.5))
Projectile.__init__(self, weapon, pos, self.name, shape, vel, mass, damage)
self._collisionCounter = 0
np.reparentTo(self._nodePath)
|
UTF-8
|
Python
| false | false | 2,012 |
9,440,338,147,776 |
628eca16227dde2a02f438c27235a7ee84248c63
|
2c983d66d9d4c28486b7de036ac70c3afc57cee1
|
/app5/fkapp/views/frontend.py
|
ee8da00f8746e29203efa0d9f3296a61b1727426
|
[] |
no_license
|
ajaxj/ajaxjs
|
https://github.com/ajaxj/ajaxjs
|
ba562034af06d42b0aa98d426660bf703ddf3b35
|
2d9b3146d88d43049503aec0720fcf1794d7126d
|
refs/heads/master
| 2015-07-31T17:04:28.797142 | 2014-09-05T04:03:31 | 2014-09-05T04:03:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import datetime
from flask import Module,render_template
from fkapp.extensions import cache
__author__ = 'Administrator'
frontend = Module(__name__)
@cache.cached(timeout=1000)
@frontend.route("/")
def index():
test = ['a','b','c1']
return render_template('index.html',test = test)
@frontend.route('/test')
def test():
return render_template('test.html')
@frontend.route('/matchs')
def matchs():
return "matchs"
@frontend.route('/notes')
def notes():
return "test"
|
UTF-8
|
Python
| false | false | 2,014 |
15,272,903,748,976 |
883986ce4143150e2b3e05b969bbc44df2fa6cf3
|
66999d474f49212db70cc1a3de3705dfe3566e84
|
/cyder/cydns/nameserver/forms.py
|
1cbd9541ae0f7a3b14898423c719b7079a2c9852
|
[] |
no_license
|
ngokevin/chili
|
https://github.com/ngokevin/chili
|
9689156d288cd76a15fae6a738c99846d110fbc8
|
36c354ac567471d5e36dccf9eea5096c6b02d4b9
|
refs/heads/master
| 2021-01-18T06:57:50.779101 | 2013-05-17T22:58:18 | 2013-05-17T22:58:18 | 5,939,998 | 2 | 0 | null | true | 2013-05-01T19:52:31 | 2012-09-24T19:54:20 | 2013-05-01T19:52:30 | 2013-05-01T19:52:29 | 896 | null | 4 | 3 |
Python
| null | null |
from django import forms
from cyder.cydns.address_record.models import AddressRecord
from cyder.cydns.forms import DNSForm
from cyder.cydns.nameserver.models import Nameserver
from cyder.cydhcp.interface.static_intr.models import StaticInterface
class NameserverForm(DNSForm):
class Meta:
model = Nameserver
exclude = ('addr_glue', 'intr_glue')
widgets = {'views': forms.CheckboxSelectMultiple}
def __init__(self, *args, **kwargs):
super(NameserverForm, self).__init__(*args, **kwargs)
if not self.instance:
return
if not self.instance.glue:
# If it doesn't have glue, it doesn't need it.
return
addr_glue = AddressRecord.objects.filter(
label=self.instance.glue.label,
domain=self.instance.glue.domain)
intr_glue = StaticInterface.objects.filter(
label=self.instance.glue.label,
domain=self.instance.glue.domain)
glue_choices = []
for glue in addr_glue:
glue_choices.append(("addr_{0}".format(glue.pk), str(glue)))
for glue in intr_glue:
glue_choices.append(("intr_{0}".format(glue.pk), str(glue)))
if isinstance(self.instance.glue, AddressRecord):
initial = "addr_{0}".format(self.instance.glue.pk)
elif isinstance(self.instance.glue, StaticInterface):
initial = "intr_{0}".format(self.instance.glue.pk)
self.fields['glue'] = forms.ChoiceField(choices=glue_choices,
initial=initial)
class NSDelegated(forms.Form):
server = forms.CharField()
server_ip_address = forms.CharField()
|
UTF-8
|
Python
| false | false | 2,013 |
9,191,230,033,212 |
c289fb0110f99ee069d9568515743bd59440a727
|
15e3cf69a33418895b5ffd16bc5edf0f7c039de5
|
/app/__init__.py
|
ddf5456c2edd448d582c4f3db364ef85da1a40b3
|
[] |
no_license
|
d16r/dustinshahidehpour.com
|
https://github.com/d16r/dustinshahidehpour.com
|
d03bc4af158d8e8526658b71c63aa22d200835dd
|
e33032bdab415cc5d1c04d771c22669f212aeb1f
|
refs/heads/master
| 2021-05-29T15:16:10.406351 | 2014-02-21T19:31:12 | 2014-02-21T19:31:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import Flask
from flask_flatpages import FlatPages
from flask_frozen import Freezer
app = Flask(__name__)
flatpages = FlatPages(app)
freezer = Freezer(app)
app.config.from_object('config')
from app import views
|
UTF-8
|
Python
| false | false | 2,014 |
4,509,715,680,087 |
6c4238ca8f038d1327bbc4c8cd86ee13531f572d
|
afcdca427b09fb60bb35f709b27dbaee1519b9c6
|
/elastic_modules_pressure_change/gassman_module.py
|
7b20c63a4dfaf9e31c557add21591a2bef05261a
|
[] |
no_license
|
crimap/cccm
|
https://github.com/crimap/cccm
|
f7a157af9d8838c484e66daefa998488ea29ea8b
|
87f4f61ddcc0afc96c582a579ea83c7766597bb7
|
refs/heads/master
| 2021-01-21T01:10:45.119638 | 2013-02-22T05:12:21 | 2013-02-22T05:12:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import numpy as np
import math
from pylab import *
import numpy as np
import matplotlib.pyplot as plt
'''
Elastic models based on North Sea geology,
the modeled rock has two components:
quartz and clay
'''
def Gassman(Kdry,Ks,phi,Kwater,Rhos,Kco2,rhowater,rhoco2):
'''
K sat calculation by using Gassman
This modules works for combining Bounds
with fluid effect
'''
nvol = phi.size
CO2 =np.arange(0.0,1.0,0.1)
nCO2=CO2.size
Ksat = np.zeros((nvol,nCO2))
rhosat = np.zeros((nvol,nCO2))
phi[0]=0.00000000000001
for i in range(nvol):
tmp3=0.0
for j in range (nCO2):
Kfluid= (1.0-CO2[j])/Kwater + CO2[j]/Kco2
Kfluid=1.0/Kfluid
rhofluid=(1.0-CO2[j])*rhowater + CO2[j]*rhoco2
tmp3=(phi[i]/Kfluid+(1.0-phi[i])/Ks-Kdry[i]/(Ks*Ks))
tmp1=((1.0-Kdry[i]/Ks)*(1.0-Kdry[i]/Ks))
Ksat[i][j]= Kdry[i] +tmp1/tmp3
rhosat[i][j]= rhofluid*phi[i] + (1.0-phi[i])*Rhos
return (Ksat,rhosat)
def Kdry(Ksat,Ks,phit,so,Kwater,Koil):
Ndepth = Ksat.size
Kdry = np.zeros(Ndepth)
'''
K dry calculation by using Gassman eq in the wells
'''
for i in range (Ndepth):
Kfluid= (1.0-so[i])/Kwater + so[i]/Koil
Kfluid=1.0/Kfluid
tmp1 = Ksat[i]*(phit[i]*Ks[i]/Kfluid + 1.0 - phit[i] ) - Ks[i]
tmp2 = phit[i]*Ks[i]/Kfluid + Ksat[i]/Ks[i] - 1.0 - phit[i]
Kdry[i] = tmp1/tmp2
return (Kdry)
|
UTF-8
|
Python
| false | false | 2,013 |
5,153,960,757,371 |
1675d4ca457d57c532e5c90e0d58fd313fefe88f
|
174e9e23c428325f43036a592cbcb885fc23dc3e
|
/ChenAndWu/wll/views.py
|
86d064f42c4629f9c5b60c14e5152204d5b470e1
|
[] |
no_license
|
sshic/ChenAndWu
|
https://github.com/sshic/ChenAndWu
|
fb32e460d9f3a7fb019f37225fbec9ad176b65aa
|
a498aa0be940518876f92b098dc24e4f9330ee55
|
refs/heads/master
| 2015-08-02T00:20:49.823701 | 2013-10-22T05:05:02 | 2013-10-22T05:05:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render_to_response
def wu_learnings(request):
return render_to_response('wu_learnings.html',{})
def emacs(request):
return render_to_response('emacs.html',{})
def github(request):
return render_to_response('github.html',{})
def vim(request):
return render_to_response('vim.html',{})
def python(request):
return render_to_response('python.html',{})
def cplusplus(request):
return render_to_response('cplusplus.html',{})
def linux(request):
return render_to_response('linux.html',{})
def algorithm(request):
return render_to_response('algorithm.html',{})
def rongchiyuanli(request):
return render_to_response('rongchiyuanli.html',{})
|
UTF-8
|
Python
| false | false | 2,013 |
4,801,773,438,003 |
a4458dad544c553540cc7fdfd1ecef1715642849
|
e99f2f6c17e13e4e74a7afd01321eb2587bc66c2
|
/objects/__init__.py
|
50bab9dedacc94127c6d4c73b9f068d7981a335a
|
[] |
no_license
|
yibter/ectropy
|
https://github.com/yibter/ectropy
|
1ae67657b909a447f047fa952fe942ace4ed91f1
|
338cc925bc8f595af97e0c3710b6d6ebb006019b
|
HEAD
| 2016-09-06T20:01:47.595211 | 2010-12-09T21:06:54 | 2010-12-09T21:06:54 | 856,769 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Asset:
def __init__(self, id, name, start):
self.id = id
self.name = name
self.start = start
def __contains__(self, assets):
for _asset in assets:
if _asset.id == self.id:
return True
return False
class Skill:
def __init__(self, id, name, available, hoursPerDay):
self.id = id
self.name = name
self.available = available
self.hoursPerDay = hoursPerDay
self.availableHours = available * hoursPerDay
def copy(self):
from copy import deepcopy
return deepcopy(self)
class Manpower:
def __init__(self, id, skill, hours):
self.id = id
self.skill = skill
self.hours = hours
|
UTF-8
|
Python
| false | false | 2,010 |
1,649,267,459,361 |
f9778cd9919ab77634866ff05793b894e9922e0c
|
83c589c344a50bf54bcd4ed9dab4b424c0c2acc5
|
/raspberry_pi/reptest/pullup.py
|
1fa82a19974eb05f7327702dd12cb415809ab7e0
|
[] |
no_license
|
RobotTelecomStrasbourg/rts
|
https://github.com/RobotTelecomStrasbourg/rts
|
311cd8887dc00d72ddc9e8efa980100e7589ae9d
|
fb06fb0fc5b59c06bee1fdf4fa598b7b3e29d6c5
|
refs/heads/master
| 2020-06-05T05:16:58.927462 | 2014-12-05T21:02:10 | 2014-12-05T21:02:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import sys
sys.path.append("../lib");
import Getch
import libport
def pull():
getch=Getch._Getch();
pin=libport.Port(3, libport.Port.IN);
pout=libport.Port(5, libport.Port.OUT);
k='0';
while (not k=='q'):
k=getch();
if (k=='o'):
pout.write(1);
elif (k=='f'):
pout.write(0);
v=pin.read();
print "la tension est de {0} v :".format((v*3.3));
pull();
|
UTF-8
|
Python
| false | false | 2,014 |
2,001,454,795,670 |
5c741ce89e2e40ced89ae27d795f394a0556d77c
|
96474361c3350296fabfeb238f58725ebea28f07
|
/robot/urls.py
|
c01f765a9b4dc3dc2d8b1bd4df8ea7ca051e74b9
|
[] |
no_license
|
orginfo/orginfo
|
https://github.com/orginfo/orginfo
|
f32b122b0500cc2c4a225b474c95dd4baf5bff17
|
ef9cf6f8af6f50ee4c761bfd365e9799988b55c7
|
refs/heads/master
| 2021-01-15T17:20:34.467473 | 2014-12-02T12:00:09 | 2014-12-02T12:47:57 | 25,464,769 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
from robot import views
urlpatterns = patterns('',
url(r'^robot/validate/$', views.validate, name='validate'),
)
|
UTF-8
|
Python
| false | false | 2,014 |
10,213,432,247,561 |
6ecdc314e23b6d5b253e28b01c51469b091c4165
|
f1b35d4ad039fe33f69f19d2b4d4c27a06775f98
|
/Python/card.py
|
e0e7ab3eea808684e49b8ccc1a29d9059a6921bc
|
[
"GPL-3.0-only"
] |
non_permissive
|
cyncyncyn/set
|
https://github.com/cyncyncyn/set
|
5982c350b617ff8edbee43ed6f8df5ff10db079b
|
ac862cfa97e1eeaba9f619ec13e1e130bbd68944
|
refs/heads/master
| 2020-05-20T01:51:49.575401 | 2014-08-14T01:44:56 | 2014-08-14T01:44:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
####! /usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Giménez, Christian
#
# Author: Giménez, Christian
#
# card.py
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Card
I'm just a card :-)
'''
class Card(object) :
forms = {'oval', 'diamond', 'squiggle'} # See Set(game) at Wikipedia (English)
nums = {1,2,3}
colors = {'green', 'red', 'purple'}
fills = { 'solid', 'striped', 'open'}
def __init__(self, form, color, num, fill) :
self.__fill = fill
self.__form = form
self.__color = color
self.__num = num
def getNum(self):
return self.__num
def getColor(self):
return self.__color
def getForm(self):
return self.__form
def getFill(self):
return self.__fill
def evalSet(self, card2, card3):
'''
Same as evaluoSet.
I return True when card2, card3 and me make a set.
'''
if (evalProps(card2, card3) or allDiferent(card2, card3)):
return true
else:
return false
def __cmp__(self,other):
if (other.getNum() == self.__num) and (other.getFill() == self.__fill) and (other.getForm() == self.__form) and (other.getColor() == self.__color):
return 0
else:
return 1
def __str__(self):
return form + " " + color + " " + num
|
UTF-8
|
Python
| false | false | 2,014 |
10,943,576,715,300 |
d0a20a5850b927099809fa5c212ee796f12bd997
|
22279c26630ef6c0c917d327ed351ac9c79d7568
|
/server_ftpd.py
|
cce97f693c431717f50a0a1d867401e50ad57729
|
[] |
no_license
|
aj9ms/OneDir-Public
|
https://github.com/aj9ms/OneDir-Public
|
1d696df1a776759489005323f5a32feada73a874
|
232f4e039794aaf83b0181ce48cc3ec834fa5ca9
|
refs/heads/master
| 2016-08-12T07:16:41.447266 | 2014-04-28T20:45:37 | 2014-04-28T20:45:37 | 36,890,562 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Ben Edgar; Alice Ju; Ann Duong
# Team 17
import os
import socket
import smtplib
from shutil import rmtree
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer, ThreadedFTPServer, MultiprocessFTPServer
#Handles all commands from the client side
class Handler(FTPHandler):
def ftp_STAT(self, line):
a = line.split(":")
a[0] = a[0][a[0].rfind('/')+1:]
for i in range(len(a)):
a[i] = a[i].strip()
#Creates a user on the server
if a[0].startswith('createuser'):
with open('pass.dat', 'r') as f:
for line in f:
line2 = line.split(':')
if a[1] == line2[0]:
self.respond('214 user already exists')
return
with open('pass.dat', 'a') as f:
f.write(a[1] + ":" + a[2] + ':' + a[3] + '\n')
try:
os.mkdir(a[1])
except:
pass
self.authorizer.add_user(a[1], a[2], os.path.join(os.getcwd(), a[1]), perm='elradfmwM')
#Change a local user's password on the server
elif a[0].startswith('changepassword'):
temp = ""
b = False
if len(a) == 3:
a.append(":")
with open('pass.dat', 'r') as f:
for line in f:
line2 = line.split(':')
if a[1] == line2[0] and a[3] == line2[1]:
b = True
temp = temp + line
if not b:
self.respond('215 user does not exist')
return
with open('pass.dat', 'w') as f:
temp2 = temp.split('\n')
for line in temp2:
v = line.split(':')
if len(v) == 2:
if line.split(':')[0] == a[1]:
f.write(a[1] + ':' + a[2] + '\n')
else:
f.write(line + '\n')
else:
if line.split(':')[0] == a[1]:
f.write(a[1] + ':' + a[2] + ':' + v[2] + '\n')
else:
f.write(line + '\n')
self.authorizer.remove_user(a[1])
self.authorizer.add_user(a[1], a[2], os.path.join(os.getcwd(), a[1]), perm='elradfmwM')
#Remove a user from the server and the DummyAuthorizer table
elif a[0].startswith('removeuser'):
# syntax is removeuser:username:True/False
temp = ""
with open('pass.dat', 'r') as f:
for line in f:
temp = temp + line
with open('pass.dat', 'w') as f:
temp2 = temp.split('\n')
for line in temp2:
if line.split(':')[0] == a[1]:
pass
else:
f.write(line + '\n')
self.authorizer.remove_user(a[1])
if a[2] == 'True':
rmtree(a[1])
#Admin functionality: prints out the information about all users
#including number of directories and files and total file size
elif a[0].startswith('userinfo'):
with open('root/userinfo.txt', 'w') as f:
totSize = 0
for user in self.authorizer.user_table.keys():
if user == 'root':
continue
size = 0
numFiles = 0
numDirs = -1
info = os.walk(os.path.join(user, 'OneDir'))
for tup in info:
numDirs = numDirs + 1
for fil in tup[2]:
numFiles = numFiles + 1
size = size + os.path.getsize(os.path.join(tup[0], fil))
f.write(user + '\n')
if numDirs == -1: numDirs = 0
f.write('Number of Directories: ' + str(numDirs) + '\n')
f.write('Number of Files: ' + str(numFiles) + '\n')
f.write('Total File Size (bytes): ' + str(size) + '\n')
totSize = totSize + size
f.write('Total Storage Used (bytes): ' + str(totSize) + '\n')
#Admin functionality: prints out all users saved on the server
elif a[0].startswith('users'):
with open('root/users.txt', 'w') as f:
f.write('Users: \n')
for user in self.authorizer.user_table.keys():
if user == 'root':
continue
f.write(user + '\n')
#Allows changepassword when local user forgets password
elif a[0].startswith('forgot'):
b = False
temp = ""
with open('pass.dat', 'r') as f:
for line in f:
if a[1] == line.split(':')[0].strip():
a2 = line.split(':')
if len(a2) > 2:
if a2[2].strip() == a[3]:
b = True
else:
temp = temp + line
if not b:
self.respond('216 something went wrong')
return
else:
with open('pass.dat', 'w') as f:
f.write(a[1] + ':' + a[2] + ':' + a[3] + '\n')
temp2 = temp.split('\n')
for line in temp2:
f.write(line + '\n')
self.authorizer.remove_user(a[1])
self.authorizer.add_user(a[1], a[2], os.path.join(os.getcwd(), a[1]), perm='elradfmwM')
#Admin functionality: shows a "traffic report log" for all users based on watchdog events
elif a[0].startswith('seelogs'):
with open('root/userlogs.txt', 'w') as f:
for user in self.authorizer.user_table.keys():
if os.path.isdir(os.path.join(user, 'OneDir')):
with open(os.path.join(os.path.join(user, 'OneDir'), '.user.log'), 'r') as f2:
f.write(user + ':\n')
for line in f2:
f.write(line)
self.respond('213 Done')
return
#Runs the server
def main():
# Instantiate a dummy authorizer for managing 'virtual' users
authorizer = DummyAuthorizer()
# Creates a pass.dat file if it does not exist
# Creates a root/admin user's folder and gives it full r/w permissions
try:
for line in open('pass.dat'):
info = line.split(':')
if len(info) < 2:
continue
try:
os.mkdir(os.path.join(os.getcwd(), info[0]))
except:
pass
authorizer.add_user(info[0], info[1].strip(), os.path.join(os.getcwd(), info[0]), perm='elradfmwM')
try:
os.mkdir(os.path.join(os.getcwd(), 'root'))
except:
pass
except:
f = open('pass.dat', 'w')
f.close()
authorizer.add_user('root', 'd63dc919e201d7bc4c825630d2cf25fdc93d4b2f0d46706d29038d01', os.path.join(os.getcwd()), perm='elradfmwM')
# Instantiate FTP handler class
handler = Handler
handler.authorizer = authorizer
# Define a customized banner (string returned when client connects)
handler.banner = "OneDir Ready"
# Instantiate FTP server class and listen on 0.0.0.0:2121
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("gmail.com",80))
ip = str(s.getsockname()[0])
s.close()
address = (ip, 2121)
server = FTPServer(address, handler)
# set a limit for connections
server.max_cons = 256
server.max_cons_per_ip = 5
# start ftp server
server.serve_forever()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
5,042,291,616,618 |
1c419e91a7b5ad4d6760bd09ccb5c64fa73a85bf
|
dba00af749c69e79c6a33e569cf9650804ddee0e
|
/scripts/foodCrawler.py
|
b9691e6cdf4fa49bf1b8bceea1e9e6e8d8d12f01
|
[] |
no_license
|
cfoch/paleopot
|
https://github.com/cfoch/paleopot
|
334c781144c04344035a82167fe28745396dbb8b
|
11a7b36f019e3d5a3c8a04985a6f1b09bf43295f
|
refs/heads/master
| 2021-01-18T16:35:36.186368 | 2014-10-05T02:46:24 | 2014-10-05T02:46:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from urllib import urlopen
from bs4 import BeautifulSoup
MIX = -1
LEAVES = 0
LEGUMES = 1
FRUITS = 2
class FoodCrawler():
def __init__(self):
self.main_url = 'http://en.wikipedia.org'
def listFruits(self, url):
soup = self.makeSoup(url)
tags = soup.findAll('div')
# TODO: Complete me!
def listLeafyVeggies(self, url):
soup = self.makeSoup(url)
tags = soup.findAll('td')
veggies = []
for i in range(1, len(tags), 3):
name = tags[i].string
url_tag = str(tags[i - 1])
link = ''
if url_tag is not None:
soup_url = BeautifulSoup(url_tag)
a = soup_url.findAll('a', href=True)
link = self.main_url + a[0]['href']
if name is not None:
food = self.makeFood(name, url=link, category=MIX)
veggies.append(food)
return veggies
def makeFood(self, name, url='', description='', category=''):
food = {
"url": url,
"name": name.lower(),
"description": description,
"category": category
}
return food
def makeSoup(self, url):
page = urlopen(url)
contents = page.read()
return BeautifulSoup(contents)
crawler = FoodCrawler()
leafyVeggiesURL = "htmls/leafyVegetables.html"
frutisURL = "htmls/fruits.html"
print crawler.listLeafyVeggies(leafyVeggiesURL)
|
UTF-8
|
Python
| false | false | 2,014 |
2,628,519,994,415 |
6895f12b0d4ee2cb7f44ab526359bca5cd5c47d5
|
c29892f808bd0f2bb19efea2a73f4ac42e9ee9f0
|
/src/tkacz/io/mongo.py
|
8351eab9d01ae314885e408e6afcfd470f08bb34
|
[
"AGPL-3.0-or-later",
"AGPL-3.0-only"
] |
non_permissive
|
thblt/tkacz-deprecated-first-attempt
|
https://github.com/thblt/tkacz-deprecated-first-attempt
|
6e55199c9675672b5997e448ea8ca8c2105404a1
|
c4a793074504aca74485baa75dd0d0ad41aae060
|
refs/heads/master
| 2015-08-03T18:55:16.642455 | 2013-11-01T19:22:13 | 2013-11-01T19:22:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class MongoDB( object ):
def __init__( self, server=None, dbpath=None, dbname="tkacz", lazy=True ):
'''
Initializes a new MongoDB abstraction layer.
:param server: the URI to the MongoDB server. If this parameter is provided, dbpath
and dbname will be ignored.
:type server: URI string.
:param dbpath: The path the databases files. If this parameter is provided, a mongod
instance will be started.
:type dbpath: String
:param dbname: The name of the database to create.
:type dbname: String
:param lazy: Whether to enable the lazy loader. Default is true.
:type lazy: bool
'''
|
UTF-8
|
Python
| false | false | 2,013 |
1,425,929,182,792 |
f76673719c1bb74c4ef1fc18993ed4d2f5493a0e
|
543b030085e8339da72050b19f5dcb66bc7fb35f
|
/challenges/sum_of_integers_from_file/sum_of_integers_from_file.py
|
46785c9ea13cc18872a5b04bb52e387c08a2568a
|
[] |
no_license
|
eikonomega/codeeval
|
https://github.com/eikonomega/codeeval
|
233f873f6670468ff9072788f48427848033f335
|
520589b593552c9022d867a6cf06c4e3a7f3192f
|
refs/heads/master
| 2020-04-29T08:21:39.036620 | 2013-06-04T17:25:40 | 2013-06-04T17:25:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Solution Accepted
Mon 29 May 2013
"""
import sys
test_cases = open('input.txt', 'r')
# Set initial value of result variable.
cumulative_value_of_numbers = 0
for test in test_cases:
# Strip newline characters from test
test = test.rstrip("\n")
# Add value of current line to cumulative_value_of_numbers
cumulative_value_of_numbers += int(test)
print cumulative_value_of_numbers
test_cases.close()
|
UTF-8
|
Python
| false | false | 2,013 |
5,299,989,679,682 |
d390ce18745e318244c7508267c628e086027998
|
44c661f9456658acdda00d1742197ec331cf7802
|
/pylib-mazer/scripts/render_all_the_3x3s.py
|
ce6dfe28760f5f1f542998c6f0be3a39899185a1
|
[] |
no_license
|
robinhouston/maze-experiments
|
https://github.com/robinhouston/maze-experiments
|
aa777ae0f5ef50702bb788f504ff10ea09137fc1
|
1006be353c5185b79d84d0ee7b177948a8449f3f
|
refs/heads/master
| 2016-09-06T01:52:45.096937 | 2013-07-18T08:42:50 | 2013-07-18T08:42:50 | 1,601,199 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import cairo, mazer
from mazer import (N, E, S, W, LEFT, AHEAD, RIGHT)
PAGE_SIZE = 6 * 72
FILL_COLOUR = (.86, .2, .2)
PAGE_MARGIN = 36.0
BLOCK_MARGIN = 1.5
BLOCK_SIZE = (PAGE_SIZE - 2 * PAGE_MARGIN) / 16
def maze(*arms):
m = mazer.Maze(3,3)
for direction, arm in zip([N,E,S,W], arms):
if arm:
m.move(1, 1, direction)
m.carve(AHEAD)
arm_d = dict(zip([LEFT, RIGHT], arm))
for d in m.branch([LEFT, RIGHT]):
for i in range(arm_d[d]):
m.carve(AHEAD) or m.carve(d)
return m
def render_mazes(renderer):
def draw_maze(y, x, arms):
m = maze(*arms)
renderer.render(m,
left = PAGE_MARGIN + x * BLOCK_SIZE + BLOCK_MARGIN,
top = PAGE_MARGIN + y * BLOCK_SIZE + BLOCK_MARGIN,
width = BLOCK_SIZE - 2 * BLOCK_MARGIN,
height = BLOCK_SIZE - 2 * BLOCK_MARGIN,
)
for i in range(8):
draw_maze(15, 4+i, ((i,7-i), (), (), ()))
draw_maze(4+i, 0, ((), (7-i,i), (), ()))
draw_maze(0, 4+i, ((), (), (7-i,i), ()))
draw_maze(4+i, 15, ((), (), (), (i,7-i)))
for i in range(4):
for j in (0,1):
draw_maze(4*(j+1) + i, 3, ((i, 3-j), (), (j, 3-i), ()))
draw_maze(3, 11 - 4*j - i, ((), (i, 3-j), (), (j, 3-i)))
for j in (2,3):
draw_maze(4*(j-1) + i, 12, ((i, 3-j), (), (j, 3-i), ()))
draw_maze(12, 19 - 4*j - i, ((), (i, 3-j), (), (j, 3-i)))
def corner_pos(i, j):
if j < 4:
return 4+i, 8+j
else:
return 6+i, 6+j
for i in range(2):
for j in range(6):
row, col = corner_pos(i, j)
draw_maze( row, col, ((j,i), (1-i,5-j), (), ()) )
draw_maze( 15-row, col, ((), (5-j,1-i), (i,j), ()) )
draw_maze( row, 15-col, ((i,j), (), (), (5-j,1-i)) )
draw_maze( 15-row, 15-col, ((), (), (j,i), (1-i,5-j)) )
for i in range(2):
for j in range(2):
for k in range(4):
draw_maze(1+i, 4+4*j+k, ((i,j),(1-j,k),(),(3-k,1-i)))
draw_maze(4+4*j+k, 14-i, ((3-k,1-i),(i,j),(1-j,k),()))
draw_maze(14-i, 4+4*j+k, ((),(k,1-j),(j,i),(1-i,3-k)))
draw_maze(4 + 4*j + k, 1+i, ((1-i,3-k),(),(k,1-j),(j,i)))
for i in range(4):
for j in range(4):
draw_maze(6+i, 6+j, (
((i&2)/2, i%2),
(1 - i%2, j%2),
(1 - j%2, (j&2)/2),
(1 - (j&2)/2, 1 - (i&2)/2)
))
def main(output_filename):
surface = cairo.PDFSurface(output_filename, PAGE_SIZE, PAGE_SIZE)
c = cairo.Context(surface)
renderer = mazer.render.CairoRenderer(c)
render_mazes(renderer)
c.show_page()
surface.finish()
if __name__ == "__main__":
import sys
main(sys.argv[1])
|
UTF-8
|
Python
| false | false | 2,013 |
6,674,379,194,184 |
8bf8f6d959503f218c1f5df4b21eae52f3aec515
|
d5982c74eec896e2409ffa4c92674601ab5ca34e
|
/labs/dice_lab/dice/dice.py
|
5e033ab5d3ae6b5f0b1566e78562c3eb172c0ddc
|
[
"CC-BY-SA-3.0"
] |
non_permissive
|
jmcvetta/curriculum-python-essentials
|
https://github.com/jmcvetta/curriculum-python-essentials
|
15c678ae332bf223d2518d80ec962ece07bbbf91
|
fc73731b5c9f61097295f1d4b19176584a0ba842
|
refs/heads/master
| 2016-09-06T02:32:01.524192 | 2013-05-17T21:11:51 | 2013-05-17T21:11:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This is Free Software, released under the terms of the X11 License.
# See http://directory.fsf.org/wiki/License:X11 for details.
from die import Die
class Die6(Die):
'''A six-sided die'''
pass
class Die20(Die):
'''A twenty-sided die'''
pass
|
UTF-8
|
Python
| false | false | 2,013 |
11,098,195,505,176 |
25277253f383b32c239aa251795f2acfd9956a2c
|
fcfa99d730345f697bb80359a2a20a9337e59eb1
|
/efficient_metrics/Node.py
|
0dc1c66292b8b4704818ba143a4ff994d3e0b97a
|
[
"GPL-3.0-only",
"GPL-3.0-or-later",
"GPL-1.0-or-later"
] |
non_permissive
|
mlocs/lineage-trees-clustering
|
https://github.com/mlocs/lineage-trees-clustering
|
86d726ef83e571c6ea992f6898af6e0bac610490
|
9e1a0892778e20b33cfa98639f96cf4cd4f704c5
|
refs/heads/master
| 2021-01-23T15:03:07.136485 | 2014-12-19T15:40:17 | 2014-12-19T15:40:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This file is part of the Lineage Tree Clustering project.
# Copyright (C) 2014
# Author: Valeriy Khakhutskyy
#
# Lineage Tree Clustering is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lineage Tree Clustering is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Lineage Tree Clustering project files. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import numpy.random as rnd
from time import time
from types import *
def compare(a, b):
return -cmp(a.hashNum, b.hashNum)
class Node:
label = None
children = None
hashNum = None
statistics = None
MAX_RECURSION = 100
def copy(self):
newNode = Node(self.label)
newNode.hashNum = self.hashNum
for child in self.getChildren():
newNode.addChild(child.copy())
return newNode
def __getEmtpyStatistics(self):
statistics = {'leaves':{}, 'childs':{}}
return statistics
def __init__(self, label):
self.label = str(label)
self.children = []
self.statistics = self.__getEmtpyStatistics()
def addChild(self, node):
self.children.append(node)
return node
def getChildren(self):
#if self.children == None: return []
# does it make any difference???
#return sorted(self.children,compare)
return self.children
def getLabel(self):
return self.label
def getVertices(self):
vertices = [self]
for c in self.children:
vertices += c.getVertices()
return sorted(vertices, compare)
def getSize(self):
size = 1
for c in self.children:
size += c.getSize()
return size
def loadChildren(self, string):
lastChild = None
i=0
# for j in xrange(len(string)):
while(len(string)>0):
if string[i] == "(":
string = lastChild.loadChildren(string[1:])
elif string[i] == ")":
return string
else:
lastChild = self.addChild(Node(string[i]))
string = string[1:]
return string
## new version
def generateRandom(self, alphabet, deg, pvals, rec_counter=0):
if rec_counter > self.MAX_RECURSION:
return self
if rnd.rand() > pvals[int(self.label)-1, 0]:
results = rnd.multinomial(deg, pvals[int(self.label)-1, 1: ]/sum(pvals[int(self.label)-1, 1: ]))
childLabelIdx = np.nonzero(results)[0]
for idx in childLabelIdx:
node = Node(alphabet[int(idx)])
self.addChild(node.generateRandom(alphabet, deg, pvals, rec_counter+1))
return self
def toString(self):
string = str(self.label)
if len(self.children) > 0:
string += "("
for c in self.children:
string += c.toString()
string += ")"
return string
def visualize(self, node, graph):
import pydot
for c in self.children:
childNode = pydot.Node(str(time())+ str(rnd.random()), label = str(c.getLabel()))
graph.add_node(childNode)
graph.add_edge(pydot.Edge(node, childNode))
c.visualize(childNode, graph)
def initHashNum(self, i=0):
self.hashNum = i+1
if not self.children == None:
for c in self.children:
i += 1
i = c.initHashNum(i)
return i
def getStatistics(self):
self.statistics = self.__getEmtpyStatistics()
statistics = self.__getEmtpyStatistics()
if len(self.children) == 0:
if not (self.getLabel() in self.statistics['leaves'].keys()):
self.statistics['leaves'][self.getLabel()] = 0
self.statistics['leaves'][self.getLabel()] += 1
for child in self.children:
stat = child.getStatistics()
statistics = self.sumDicts(statistics, stat)
transition = self.getLabel()+child.getLabel()
if not (transition in self.statistics['childs'].keys()):
self.statistics['childs'][transition] = 0
self.statistics['childs'][transition] += 1
self.statistics = self.sumDicts(self.statistics, statistics)
return self.statistics
def sumDicts(self, dict1, dict2):
for key in dict1.keys():
if type(dict1[key]) == DictType:
dict1[key] = self.sumDicts(dict1[key], dict2[key])
else:
if dict2.has_key(key):
dict1[key] += dict2[key]
for key in dict2.keys():
if not dict1.has_key(key):
dict1[key] = dict2[key]
return dict1
def getHashNum(self):
return self.hashNum
|
UTF-8
|
Python
| false | false | 2,014 |
14,164,802,163,329 |
55581f8029db68a8e3bef768343e2eb5e9cc09c0
|
7f62b2459faeba72e3e4def81b7b04a06800a124
|
/apps/forums/tests/test_notifications.py
|
bc6c5922c55898b72bf5b78a1f69a62b62406d14
|
[] |
no_license
|
MechanisM/kitsune
|
https://github.com/MechanisM/kitsune
|
3038e3a85217552ce159cd89256d643ee07db85c
|
a56e857b35fb8398c1ac977c27451b6d5752904e
|
refs/heads/master
| 2021-01-17T06:11:22.780008 | 2011-01-11T00:42:33 | 2011-01-12T02:04:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
import mock
from nose.tools import eq_
from forums.tasks import build_reply_notification, build_thread_notification
import notifications.tasks
from . import ForumTestCase
from forums.models import Post, Thread, Forum
from sumo.tests import post
# Some of these contain a locale prefix on included links, while others don't.
# This depends on whether the tests use them inside or outside the scope of a
# request. See the long explanation in questions.tests.test_notifications.
EMAIL_CONTENT = (
u"""
Reply to thread: Sticky Thread
User admin has replied to a thread you're watching. Here
is their reply:
========
yet another post
========
To view this post on the site, click the following link, or
paste it into your browser's location bar:
https://testserver/forums/test-forum/2#post-4
""",
u"""
Reply to thread: Sticky Thread
User jsocol has replied to a thread you're watching. Here
is their reply:
========
a post
========
To view this post on the site, click the following link, or
paste it into your browser's location bar:
https://testserver/en-US/forums/test-forum/2#post-%s
""",
u"""
New thread: Sticky Thread
User jsocol has posted a new thread in a forum you're watching.
Here is the thread:
========
This is a sticky thread
========
To view this post on the site, click the following link, or
paste it into your browser's location bar:
https://testserver/forums/test-forum/2
""",
u"""
New thread: Awesome Thread
User jsocol has posted a new thread in a forum you're watching.
Here is the thread:
========
With awesome content!
========
To view this post on the site, click the following link, or
paste it into your browser's location bar:
https://testserver/en-US/forums/test-forum/%s
""",)
class NotificationTestCase(ForumTestCase):
"""Test that notifications get sent."""
def setUp(self):
super(NotificationTestCase, self).setUp()
self.thread_ct = ContentType.objects.get_for_model(Thread).pk
self.forum_ct = ContentType.objects.get_for_model(Forum).pk
@mock.patch_object(notifications.tasks.send_notification, 'delay')
@mock.patch_object(Site.objects, 'get_current')
def test_reply_notification(self, get_current, delay):
get_current.return_value.domain = 'testserver'
p = Post.objects.get(pk=4)
build_reply_notification(p)
# delay() is called twice. Verify the args.
eq_(((self.thread_ct, p.thread.id,
u'Reply to: Sticky Thread', EMAIL_CONTENT[0],
(u'user1@nowhere',), 'reply'), {}), delay.call_args_list[0])
eq_(((self.forum_ct, p.thread.forum.id,
u'Reply to: Sticky Thread', EMAIL_CONTENT[0],
(u'user1@nowhere',), 'post'), {}), delay.call_args_list[1])
@mock.patch_object(notifications.tasks.send_notification, 'delay')
@mock.patch_object(Site.objects, 'get_current')
def test_notification_on_reply(self, get_current, delay):
get_current.return_value.domain = 'testserver'
self.client.login(username='jsocol', password='testpass')
t = Thread.objects.get(pk=2)
f = t.forum
post(self.client, 'forums.reply', {'content': 'a post'},
args=[f.slug, t.id])
t = Thread.objects.get(pk=2)
p = t.last_post
# delay() is called twice. Verify the args.
eq_(((self.thread_ct, t.pk,
u'Reply to: Sticky Thread', EMAIL_CONTENT[1] % p.pk,
(u'user118533@nowhere',), 'reply'), {}), delay.call_args_list[0])
eq_(((self.forum_ct, t.forum.id,
u'Reply to: Sticky Thread', EMAIL_CONTENT[1] % p.pk,
(u'user118533@nowhere',), 'post'), {}), delay.call_args_list[1])
@mock.patch_object(notifications.tasks.send_notification, 'delay')
@mock.patch_object(Site.objects, 'get_current')
def test_post_notification(self, get_current, delay):
get_current.return_value.domain = 'testserver'
post = Post.objects.get(pk=3)
build_thread_notification(post)
delay.assert_called_with(
self.forum_ct, post.thread.forum.id,
u'New thread in Test forum forum: Sticky Thread',
EMAIL_CONTENT[2], (u'user118533@nowhere',), 'post')
@mock.patch_object(notifications.tasks.send_notification, 'delay')
@mock.patch_object(Site.objects, 'get_current')
def test_notification_on_thread_post(self, get_current, delay):
get_current.return_value.domain = 'testserver'
f = Forum.objects.filter()[0]
self.client.login(username='jsocol', password='testpass')
post(self.client, 'forums.new_thread',
{'title': 'Awesome Thread', 'content': 'With awesome content!'},
args=[f.slug])
f = Forum.objects.get(pk=f.pk)
t = f.last_post.thread
delay.assert_called_with(
self.forum_ct, f.id,
u'New thread in Test forum forum: Awesome Thread',
EMAIL_CONTENT[3] % t.pk, (u'user118533@nowhere',), 'post')
|
UTF-8
|
Python
| false | false | 2,011 |
19,439,021,985,165 |
2e22ef44cba13f5e1fe82c226daee37966498d0e
|
6c80074cfd6814cdae62ce79d7b889b5509e3e22
|
/Exercise1/guess.py
|
5638b9b0090713e19740dbdaa5555e3e32ee0464
|
[] |
no_license
|
evac/Hackbright
|
https://github.com/evac/Hackbright
|
1d9a9eb437f3f2d858ddc6cef0f6b54f7a65fd5f
|
927de947521dba1dfce41fa879136f77fe0778e6
|
refs/heads/master
| 2019-06-16T08:34:41.887357 | 2013-07-08T22:25:12 | 2013-07-08T22:25:12 | 10,775,813 | 1 | 0 | null | false | 2013-06-27T02:59:17 | 2013-06-18T23:35:38 | 2013-06-27T02:59:17 | 2013-06-27T02:55:56 | 1,680 | null | 0 | 0 |
Python
| null | null |
import random
name = raw_input("Hello, What's your name? ")
num = random.randint(1,100)
counter = 0
#print num
print ("%s, I'm thinking of a number between and 100. Try to guess my number."
%name)
while True:
counter += 1
guess = int(raw_input("Your guess?"))
if num < guess:
print "Your guess is too high, try again."
elif num > guess:
print "Your guess is too low, try again."
elif num == guess:
print ("Well done. %s, You found my number in %d tries"
%(name, counter))
break
|
UTF-8
|
Python
| false | false | 2,013 |
15,410,342,660,914 |
f98badc09d18ef5af03e1ab8640b9cb347da2872
|
119f6e4ba47fba4aac0389886d7b53495bc339e6
|
/ibi/boltzmann_invert.py
|
4e6a6bc1dffb0c4bfba916ef9e9e0bc229986ee3
|
[] |
no_license
|
jayoswald/ibi-coarse-graining
|
https://github.com/jayoswald/ibi-coarse-graining
|
eafa6ab8d6afc85321ac22f379fd9bebb4253e22
|
26214caf3b5cca1af645fa98b99d0c3244812b51
|
refs/heads/master
| 2021-01-22T09:10:29.848851 | 2012-06-05T21:47:10 | 2012-06-05T21:47:10 | 37,222,293 | 12 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from numpy import *
from scipy import optimize
import distribution
from interpolator import Interpolator
from matplotlib import pyplot as py
# Boltzmann constant in kcal/mol/K
kB = 0.0019872041
# Derivative of a 9/6 Lennard Jones potential.
# as in http://lammps.sandia.gov/doc/pair_lj96.html
# v[0]: Sigma, v[1]: Epsilon
def lj96_force(v,r):
f = v[1]*(36.0*v[0]**9/r**10 - 24.0*v[0]**6/r**7)
for i in range(2,len(v),3):
f += v[i+2]*exp(-v[i+1]*v[i+1]*(r-v[i])**2)
return f
"""
Computes potential energy and force vs. distance from the radial density function.
"""
class PairTable:
# Initializes the pair table from all-atom data.
def __init__(self, md_temp, md_rdf):
# Sets the temperature of the all-atom simulation.
self.temperature = md_temp
# Sets the smallest distance in the pair table.
# If LAMMPS simulations crash with pair cutoff error, this needs to be smaller.
self.min_distance = 0.00001
self.npts = 1000
# Pair force correct per unit pressure.
self.pfactor = 1.0/5000.0
self.last_p_error = 0.0
# Computes the average distribution functions for the all-atom case.
self.all_atom_rdf = distribution.average_rdf(distribution.md_rdf_files())
# Initializes the pair tables as empty lists.
self.distance = []
self.force = []
self.energy = []
# Computes the initial potential with the all-atom data.
self.allatomcompute(self.all_atom_rdf)
# Computes the pair table and appends it to the current step.
def allatomcompute(self,rdf):
cut_beg = 13.0
cut_end = 15.0
# Removes values with a density of zero, they will get added back later.
rdf = rdf[nonzero(rdf[:,1])[0], :]
# Finds the first index where the density is greater than 0.25*rho.
i0 = nonzero(rdf[:,1] > 0.25)[0][0]
# Get distance (r) and energy (e).
r = rdf[:,0]
e = -kB*self.temperature*log(rdf[:,1])
dr_final = (cut_end-self.min_distance)/self.npts
# Figure out what range we need
# Compute derivative from splines.
rr = arange(cut_end, r[i0], -dr_final)[::-1]
interp = Interpolator(r,e)
ff = [-interp.derivative(ri) for ri in rr]
ff, v = smooth_force(rr, array(ff), len(self.force))
# Add more values to short distance to make sure that
# LAMMPS run won't fail when pair distance < table min.
rpad = arange(self.min_distance, rr[0]-0.5*dr_final, dr_final)
fpad = lj96_force(v, rpad) + ff[0] - lj96_force(v, rr[0])
rr = concatenate((rpad, rr))
ff = concatenate((fpad, ff))
# Now we cut off forces smoothly at any point past max_attraction.
ff[rr>cut_beg] *= 0.5*(1.0+cos(pi*(rr[rr>cut_beg]-cut_beg)/(cut_end-cut_beg)))
ff[rr>cut_end] = 0.0
# Compute energy by integrating forces.
# Integrating backwards reduces noise.
ee = -simpson_integrate(rr[::-1], ff[::-1])[::-1]
ee -= ee[-1]
self.distance.append(rr)
self.force.append(ff)
self.energy.append(ee)
def compute(self, rdf):
cut_beg = 13.0
cut_end = 15.0
# Removes values with a density of zero, they will get added back later.
rdf = rdf[nonzero(rdf[:,1])[0], :]
# Finds the first index where the density is greater than 0.25*rho.
i0 = nonzero(rdf[:,1] > 0.25)[0][0]
# Get distance (r) and energy (e).
r = rdf[:,0]
e = -kB*self.temperature*log(rdf[:,1])
dr_final = (cut_end-self.min_distance)/self.npts
# Figure out what range we need
# Compute derivative from splines.
rr = arange(cut_end, r[i0], -dr_final)[::-1]
interp = Interpolator(r,e)
ff = [-interp.derivative(ri) for ri in rr]
v0 = [5.0, 0.01]
lj96_err = lambda v: ff - lj96_force(v,rr)
v = optimize.leastsq(lj96_err, v0)[0]
# Add more values to short distance to make sure that
# LAMMPS run won't fail when pair distance < table min.
rpad = arange(self.min_distance, rr[0]-0.5*dr_final, dr_final)
fpad = lj96_force(v, rpad) + ff[0] - lj96_force(v, rr[0])
rr = concatenate((rpad, rr))
ff = concatenate((fpad, ff))
# Now we cut off forces smoothly at any point past max_attraction.
ff[rr>cut_beg] *= 0.5*(1.0+cos(pi*(rr[rr>cut_beg]-cut_beg)/(cut_end-cut_beg)))
ff[rr>cut_end] = 0.0
# Compute energy by integrating forces.
# Integrating backwards reduces noise.
ee = -simpson_integrate(rr[::-1], ff[::-1])[::-1]
ee -= ee[-1]
self.distance.append(rr)
self.force.append(ff)
self.energy.append(ee)
# Writes the pair table data for iteration, it.
def write_lammps(self, path, key, it):
r = self.distance[it]
f = self.force[it]
e = self.energy[it]
fid = open(path, 'w')
fid.write(key+'\n')
fid.write('N %d R %f %f\n\n' %(len(r), min(r), max(r)))
for i in range(len(r)):
fid.write('%d %f %f %f\n' %(i, r[i], e[i], f[i]))
# Plots the forces at an iteration.
def plot_force(self, name, it=-1):
r = self.distance[it]
f = self.force[it]
# Only use first iteration to compute force
# so all plots share the same range.
fmin = 3.0*min(self.force[0])
fmax = -4.0*fmin
py.clf()
py.hold(1)
py.plot(r, f, 'b')
py.axis((min(r), max(r), fmin, fmax))
py.xlabel('Pair distance (A)')
py.ylabel('Force (kcal/mol/Angstrom)')
py.savefig(name)
# Plots the forces at an iteration.
def plot_energy(self, it=-1):
r = self.distance[it]
e = self.energy[it]
py.figure()
py.plot(r, e, linewidth=2, color='b')
py.axis((min(r), max(r), min(e)-0.2, min(e) + 1.0))
py.xlabel('Pair distance (A)')
py.ylabel('Energy (kcal/mol)')
# Computes the corrections to the pair table.
def correction(self, it, pair):
# Compute force table based on current iteration.
rdf = distribution.iteration_rdf_files(it, pair)
# Appends new force, energy, distance, table.
self.compute(distribution.average_rdf(rdf))
# Computes the correction to the force.
df = self.force[-1] - self.force[0]
self.force[-1] = self.force[-2] - df
# Need to reintegrate energy
rr = self.distance[-1]
ff = self.force[-1]
ff = smooth_force(rr, ff, len(self.force))[0]
self.energy[-1] = -simpson_integrate(rr[::-1], ff[::-1])[::-1]
self.energy[-1] -= self.energy[-1][-1]
# Makes a pressure correction to the pair potential.
def pressure_correction(self, p_error):
r = self.distance[-1]
cut_end = r[-1]
incr = 1.0
# Dynamically adjust ratio so that each iteration reduces error by 95%.
if self.last_p_error != 0.0:
ratio = (self.last_p_error - p_error)/self.last_p_error
if ratio > 0.0:
incr /= ratio + 0.05
self.pfactor *= incr
self.last_p_error = p_error
A = -p_error * self.pfactor
print 'Applying force correction of:', A
print 'Correction factor is :', self.pfactor, '(%.2fx)'%incr
dV = A*kB*self.temperature*(1.0-r/cut_end)
dF = A*kB*self.temperature*(1.0/cut_end)
self.energy[-1] += dV
self.force[-1] += dF
# Cumulative integration of f using Simpson's rule.
def simpson_integrate(x,f):
F = zeros((len(f)))
F[0] = 0.0
F[1] = 0.5*(f[0]+f[1]) * (x[1]-x[0])
for i in range(2,len(f)):
# Integral is from i-2 to here + F[i-2]
F[i] = F[i-2] + (f[i-2]+4.0*f[i-1]+f[i])*(x[i]-x[i-2])/6.0
return F
# Smooths out the computed force.
def smooth_force(r, f, it):
mask = f < -min(f)*8.0
rm,fm = r[mask],f[mask]
error = lambda p: fm - lj96_force(p, rm)
p0 = [5.0,0.01]
# for d in [5.0,6.0,8.0,10]:
# p0 += [d, 2.0, 0.0]
fit = optimize.leastsq(error, p0, maxfev=4000, full_output=1)
while len(p0) > 2 and fit[2]['nfev'] == 4000:
p0 = p0[0:-3]
fit = optimize.leastsq(error, p0, maxfev=4000, full_output=1)
p = fit[0]
print 'Gaussian peaks at :', p[2::3]
print 'Gaussian widths are:', array(p[3::3])**-2
print 'Gaussian heights are:', array(p[4::3])
py.clf()
py.plot(rm, fm, '.')
py.plot(rm, lj96_force(p,rm))
fm -= lj96_force(p,rm)
w,K = 1,2
for k in range(K):
for i in range(w,len(fm)-w):
fm[i] = mean(fm[i-w:i+w+1])
f[mask] = fm + lj96_force(p,rm)
f[mask==0] = lj96_force(p, r[mask==0]) + fm[0]
py.plot(rm,f[mask])
py.legend(['original', 'fitted', 'smoothed'], loc='upper right')
py.savefig('smooth-%d.png' %it)
return f,p
|
UTF-8
|
Python
| false | false | 2,012 |
19,653,770,361,695 |
34a15881542dc68ab2b0dbe554cce452880a234d
|
b6122a828a87d156b29822397a8711741c60ebe7
|
/Magee/extract_NM_parse.py
|
279739f1cd53e7741b922c70ac58a5c52190a9ba
|
[] |
no_license
|
nturaga/Projects
|
https://github.com/nturaga/Projects
|
51bffa8a137d39b8585e119d1c7b5751e63dcb71
|
981c75e3be0a97e04431bbd9c0ffd26082a1db67
|
refs/heads/master
| 2020-04-10T21:33:28.130735 | 2013-04-19T17:10:36 | 2013-04-19T17:10:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Author: Nitesh Turaga
# Systems Programmer II, Lab - Adrian lee, Magee Womens Research Institute
# Description:
from xml.dom.minidom import parseString
#all these imports are standard on most modern python implementations
#open the xml file for reading:
file = open('test.xml','r')
#convert to string:
data = file.read()
#close file because we dont need it anymore:
file.close()
#parse the xml you got from the file
dom = parseString(data)
#retrieve the first xml tag (<tag>data</tag>) that the parser finds with name tagName:
NM_list =""
print "count_from:",dom.getElementsByTagName('Seq-interval_from')[0]
print "count_to:",dom.getElementsByTagName('Seq-interval_from')[-1]
for i in xrange(225):
xmlTag = dom.getElementsByTagName('Gene-commentary_accession')[i].toxml()
xmlTag2 = dom.getElementsByTagName('Seq-interval_from')[0].toxml()
xmlTag3 = dom.getElementsByTagName('Seq-interval_to')[0].toxml()
#strip off the tag (<tag>data</tag> ---> data):
xmlData=xmlTag.replace('<Gene-commentary_accession>','').replace('</Gene-commentary_accession>','')
xmlData2 = xmlTag2.replace('<Seq-interval_from>','').replace('</Seq-interval_from>','')
xmlData3 = xmlTag3.replace('<Seq-interval_to>','').replace('</Seq-interval_to>','')
NM_list+=xmlData
NM_list+="\t"
NM_list+=xmlData2
NM_list+="\t"
NM_list+=xmlData3
NM_list+= "\n"
#print out the xml tag and data in this format: <tag>data</tag>
#just print the data
print NM_list
|
UTF-8
|
Python
| false | false | 2,013 |
12,850,542,178,923 |
1f0ac74335caf7d8d9a1be1aa7f47c6ede6623fe
|
156b982610cae122b1234c996cdfb27c910f999c
|
/examples/Kane1985/Chapter4/Ex7.12.py
|
73287b9a571f10b6e6a9493a42c4c96dd5fc0302
|
[] |
no_license
|
chrisdembia/pydy
|
https://github.com/chrisdembia/pydy
|
5656ea58f9156309afdb01bf81c91e1ec7bd896b
|
009d7ae57218dc985a041244e15154dfc32cf872
|
refs/heads/master
| 2021-01-14T12:15:45.278397 | 2014-04-24T15:54:36 | 2014-04-24T15:54:36 | 19,183,542 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 7.12 from Kane 1985."""
from __future__ import division
from sympy import solve, symbols
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import cross, dot
def subdict(d, keylist):
return dict((k, d[k]) for k in keylist)
# vectors A, B have equal magnitude 10 N
alpha = 10 # [N]
beta = symbols('beta', real=True)
b1, b2, b3 = symbols('b1 b2 b3', real=True)
p1, p2, p3 = symbols('p1 p2 p3', real=True)
N = ReferenceFrame('N')
pO = Point('O')
pS = pO.locatenew('S', 10*N.x + 5*N.z)
pR = pO.locatenew('R', 10*N.x + 12*N.y)
pQ = pO.locatenew('Q', 12*N.y + 10*N.z)
pP = pO.locatenew('P', 4*N.x + 7*N.z)
A = alpha * pQ.pos_from(pP).normalize()
B = alpha * pS.pos_from(pR).normalize()
R = A + B
M = cross(pP.pos_from(pO), A) + cross(pR.pos_from(pO), B)
Ms = dot(R, M) * R / dot(R, R)
ps = cross(R, M) / dot(R, R)
A_prime = beta * pP.pos_from(pO).normalize()
B_prime = b1*N.x + b2*N.y + b3*N.z
pB_prime = pO.locatenew("B'", p1*N.x + p2*N.y + p3*N.z)
M_prime = cross(pB_prime.pos_from(pO), B_prime)
eqs = [dot(R - A_prime - B_prime, n) for n in N]
eqs += [dot(M - M_prime, n) for n in N]
# choose pB_prime to be perpendicular to B_prime
# then pB_prime.magnitude() gives the distance d from O
# to the line of action of B'
eqs.append(dot(pB_prime.pos_from(pO), B_prime))
soln = solve(eqs, [beta, b1, b2, b3, p1, p2, p3], dict=True)[0]
print("A' = {0}".format(A_prime.subs(subdict(soln, [beta]))))
print("|A'| = {0} N".format(soln[beta].n()))
print("B' = {0}".format(B_prime.subs(subdict(soln, [b1, b2, b3]))))
print("|B'| = {0} N".format(B_prime.subs(subdict(soln,
[b1, b2, b3])).magnitude().n()))
print("pB' = {0}".format(pB_prime.pos_from(pO).subs(subdict(soln,
[p1, p2, p3]))))
print("|pB'| = {0} m".format(pB_prime.pos_from(pO).subs(
subdict(soln, [p1, p2, p3])).magnitude().n()))
|
UTF-8
|
Python
| false | false | 2,014 |
12,893,491,824,760 |
53e87f634ae56c8ef883c44d6d313836a3562e2e
|
b472aa5d82b6a788332fceaeb1947bafdff53327
|
/hubo_ros/src/hubo_ros/msg/__init__.py
|
45958dc579e3daabb8786e145f8156811f685515
|
[] |
no_license
|
hubo/hubo-ach-ros
|
https://github.com/hubo/hubo-ach-ros
|
45d5d3ba03440d044bc9e61662899718c3b94d15
|
bbebe4333da042461ee5851dfcb81144655f9740
|
refs/heads/master
| 2021-01-18T07:57:09.721584 | 2013-04-09T18:30:33 | 2013-04-09T18:30:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from ._HuboFT import *
from ._HuboHand import *
from ._HuboJointCommand import *
from ._HuboIMU import *
from ._HuboHandCommand import *
from ._HuboCommand import *
from ._HuboState import *
from ._HuboJointState import *
|
UTF-8
|
Python
| false | false | 2,013 |
18,064,632,479,449 |
9bb82cb089b92049268bf43398629af4f4f51550
|
f7dc320cc2990be92c1738426e2a5dd8ca0d774d
|
/tests/unit/offer/offer_tests.py
|
a14442c50d74d1462d0237af8d5bc059d500c915
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
gump/django-oscar
|
https://github.com/gump/django-oscar
|
8a209afe3936b7da725c72dc2663eb16f90ecacf
|
975a516a318bdc8e0209d31f1a29b7a21b91dfda
|
refs/heads/master
| 2017-05-03T06:50:05.196135 | 2012-09-07T11:36:55 | 2012-09-07T11:36:55 | 1,184,996 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import datetime
from django.conf import settings
from django.test import TestCase
from oscar.apps.offer import models
from oscar.test.helpers import create_product
class WholeSiteRangeWithGlobalBlacklistTest(TestCase):
def setUp(self):
self.range = models.Range.objects.create(name="All products", includes_all_products=True)
def tearDown(self):
settings.OSCAR_OFFER_BLACKLIST_PRODUCT = None
def test_blacklisting_prevents_products_being_in_range(self):
settings.OSCAR_OFFER_BLACKLIST_PRODUCT = lambda p: True
prod = create_product()
self.assertFalse(self.range.contains_product(prod))
def test_blacklisting_can_use_product_class(self):
settings.OSCAR_OFFER_BLACKLIST_PRODUCT = lambda p: p.product_class.name == 'giftcard'
prod = create_product(product_class="giftcard")
self.assertFalse(self.range.contains_product(prod))
def test_blacklisting_doesnt_exlude_everything(self):
settings.OSCAR_OFFER_BLACKLIST_PRODUCT = lambda p: p.product_class.name == 'giftcard'
prod = create_product(product_class="book")
self.assertTrue(self.range.contains_product(prod))
class WholeSiteRangeTest(TestCase):
def setUp(self):
self.range = models.Range.objects.create(name="All products", includes_all_products=True)
self.prod = create_product()
def test_all_products_range(self):
self.assertTrue(self.range.contains_product(self.prod))
def test_all_products_range_with_exception(self):
self.range.excluded_products.add(self.prod)
self.assertFalse(self.range.contains_product(self.prod))
def test_whitelisting(self):
self.range.included_products.add(self.prod)
self.assertTrue(self.range.contains_product(self.prod))
def test_blacklisting(self):
self.range.excluded_products.add(self.prod)
self.assertFalse(self.range.contains_product(self.prod))
class PartialRangeTest(TestCase):
def setUp(self):
self.range = models.Range.objects.create(name="All products", includes_all_products=False)
self.prod = create_product()
def test_empty_list(self):
self.assertFalse(self.range.contains_product(self.prod))
def test_included_classes(self):
self.range.classes.add(self.prod.product_class)
self.assertTrue(self.range.contains_product(self.prod))
def test_included_class_with_exception(self):
self.range.classes.add(self.prod.product_class)
self.range.excluded_products.add(self.prod)
self.assertFalse(self.range.contains_product(self.prod))
class ConditionalOfferTest(TestCase):
def test_is_active(self):
start = datetime.date(2011, 01, 01)
test = datetime.date(2011, 01, 10)
end = datetime.date(2011, 02, 01)
offer = models.ConditionalOffer(start_date=start, end_date=end)
self.assertTrue(offer.is_active(test))
def test_is_inactive(self):
start = datetime.date(2011, 01, 01)
test = datetime.date(2011, 03, 10)
end = datetime.date(2011, 02, 01)
offer = models.ConditionalOffer(start_date=start, end_date=end)
self.assertFalse(offer.is_active(test))
|
UTF-8
|
Python
| false | false | 2,012 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.