__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10,230,612,119,671 |
296333b39df2102f096c5161f8131a1881194e35
|
e0ea042882bd8d633970747d2276e711bd753093
|
/src/makeproject
|
dbad5149f904aed5b713f31c92e95f2fc1afb805
|
[] |
no_license
|
MrOvkill/Overkill-Project-Creator
|
https://github.com/MrOvkill/Overkill-Project-Creator
|
14bd77f4452b7b24558e4b5f16df8f8dee9b8675
|
c10afc8a5fa3f37a01dfa707a411a55570e3ae5c
|
refs/heads/master
| 2021-05-27T15:39:58.292991 | 2014-03-31T20:44:50 | 2014-03-31T20:44:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os, sys, time
"""
Overkill Project Creator 0.0.3
Copyright (c) 2014 Samuel "MrOverkill" Meyers
"""
def fmt(string, name, cls):
a = string.replace("{DATE}", time.strftime("%x"))
b = a.replace("{PROJECT}", name)
c = b.replace("{CLASS}", cls)
d = c.replace("{CLASSLOWER}", cls.lower())
return d
def projfmt(string, name, ptype, targets):
string = string.replace("{NAME}", name)
a = string.replace("{TYPE}", ptype)
b = a.replace("{OPCVERSION}", "0.0.3")
c = b.replace("{VERSION}", "0.0.1")
d = c.replace("{SOURCES}", "src")
e = d.replace("{TARGETS}", targets)
return e
def makecpp(name):
os.makedirs(name)
os.chdir(name)
os.makedirs("src")
os.makedirs("out")
os.makedirs("bin")
os.makedirs("targets")
os.chdir("targets")
f = open("default.target", "w")
f.write("""Name###Default
Sources###main.cxx
Complier Flags###none
Linker Flags###none
Target Binary###main.out""")
f.close()
os.chdir("..")
os.chdir("src")
f = open("main.cxx", "w")
e = open("/usr/local/opc/templates/Main-C++.template", "r")
f.write(fmt(e.read(), name, ""))
f.close()
os.chdir("..")
f = open("Makefile", "w")
f.write("""default:
g++ -c src/main.cxx -o out/main.o
g++ out/main.o -o bin/main.out""")
f.close()
f = open(name + ".opf", "w")
f.write(projfmt("""Name###{NAME}
Type###{TYPE}
OPC Version###{OPCVERSION}
Project Version###{VERSION}
Source Directory###{SOURCES}
Targets###{TARGETS}""", name, "CPP", "default"))
f.close()
print "Project created!"
print "Building..."
os.system("make")
print "Succeeded!"
print "Running..."
os.system("./bin/main.out")
print "Done!"
print "Overkill Project Creator 0.0.3"
print "Copyright (c) 2014 Samuel \"MrOverkill\" Meyers"
print ""
usage = """Usage: {0} <language> <projectname>
language may be:
- CPP
projectname may be:
- Any name your filesystem deems "legal"
example: {0} CPP MyAwesomeProject""".format(sys.argv[0])
argc = len(sys.argv)
if argc < 3:
print usage
elif os.path.exists(sys.argv[2] + "/" + sys.argv[2] + ".opf"):
print "Error: Project \"" + sys.argv[2] + "\" Already exists!"
else:
if sys.argv[1] == "CPP":
makecpp(sys.argv[2])
else:
print usage
|
UTF-8
|
Python
| false | false | 2,014 |
2,061,584,313,525 |
e5c83ff30f4924862a29ceed82b86bc57802fb58
|
542e32a42b8f1c02c2793af7ce5642903583ccc7
|
/handlers/testmodule.py
|
9e52db683878436afebd2f0aaf3e216a7895142d
|
[] |
no_license
|
arthasdk/adaptest
|
https://github.com/arthasdk/adaptest
|
2080cb096b18bba7a360c972df52626574021408
|
10c5de898889f55b0c6276fcbc5bb906054eefb8
|
refs/heads/master
| 2021-01-22T12:53:09.804721 | 2014-05-17T04:30:36 | 2014-05-17T04:30:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import webapp2
import jinja2
import os
import time
import logging
import math
import globals
from random import randint
#import everthing :P cuz soon , we are gonna need more than a few of the dbhelper functions
from models.dbhelper import *
from google.appengine.api import users
from computation import calculateP
class InvalidTimeLeftError(Exception):
def __init__(self,timeLeft):
self.timeLeft=timeLeft
def __str__(self):
return str(self.timeLeft)
jinjaEnv=jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname("views/")))
################################# PG 85 ALGO START##################################################
def evalFirstQuestion(u,timeLeft,c):
#worst naive algo i could come up with in a jiffy to test the rest with summation
#ajust this tempTheta with the b value of the question and try and give one with b=~4-6
if timeLeft<0 or timeLeft>30:
raise InvalidTimeLeftError(timeLeft)
tempTheta=2.5
if(u==globals.incorrectAnswer):
#time is not taken into consideration if answer is wrong
tempTheta=tempTheta/2
else:
#time is considered a lot of question is passed, guessing is considered if timeTaken < 10 sec
#else only solve time is taken :)
if (u==globals.passAnswer):
#pass
#best case 1.15x
#worst case 1.05x
tempTheta=tempTheta*( timeLeft/300+1.05)
else:
#correct
if timeLeft>=18:
#best case 1.40
#worst case 1.20
tempTheta=tempTheta*(-timeLeft/60+1.7)
else:
#best case 1.40
#worst case 1.20
tempTheta=tempTheta*(timeLeft/90+1.2)
#logging.info('tempTheta=%s'%tempTheta)
return tempTheta
def evalNextQuestion(u,user,previousTheta):
#part of the formula is taken from Pg 85 and part developed by me and used some part of st. line eqn
#estimate the user's theta and get the next question based on this theta
#very fancy way to do 'do-while' loop in python, but i dont complain if it gets the job done
#get the question params
params=fetchAllQuestionsParamsTestModule(user)
theta_S=previousTheta
while True:
#time.sleep(1) #since db operations are going to happen its beneficial to waste some time here #notWorthIt
theta_S_1=getNewTheta(params,previousTheta)
if math.fabs(theta_S_1-theta_S) <=0.2:
break
else:
theta_S=theta_S_1
return theta_S
def getNewTheta(params,theta_S):
sumNumerator=0
sumDenominator=0.00000001 #just incase the for loop does not get executed!
#logging.info('\nInside getNewTheta %s'%theta_S)
for x in range(0, int(len(params)/4)):
P=float(calculateP(theta_S,params[x*4],params[x*4+1],params[x*4+2]))
#sumNumerator=sumNumerator-params[x*4]*(params[x*4+3]-P) #original formula :/
sumNumerator=sumNumerator+params[x*4]*(params[x*4+3]-P)
sumDenominator=sumDenominator+(params[x*4]*params[x*4])*P*(1-P)
theta_S1=theta_S+(sumNumerator/sumDenominator)
return theta_S1
def getNextQuestion(self, timeAnswerWasPostedToServer, givenAnswerID, currentUser):
#get the currentUser global instances
currentUserGlobals=fetchGlobal(currentUser)
TotalQuestions=int(currentUserGlobals.TotalQuestions)
TotalQuestions=TotalQuestions-1
questionTimerEnd=int(float(currentUserGlobals.questionTimerEnd))
timeRemaining=-int(float(timeAnswerWasPostedToServer))+questionTimerEnd
#u is the score given to the user, 1 if the answer is correct , 0 if its incorrect and 0.33 if passed :)
u=globals.incorrectAnswer
if givenAnswerID == '':
u=globals.passAnswer
else:
CorrectAnswer=isCorrectAnswer(int(givenAnswerID))
if CorrectAnswer:
u=globals.correctAnswer
update_or_Insert_QuestionTestModule(currentUserGlobals.questionNumberToGive,givenAnswerID,currentUser,u)
if int(currentUserGlobals.TotalQuestions) == 10:
logging.info('\ntempTheta for question: %s'%currentUserGlobals.TotalQuestions)
nextTheta=evalFirstQuestion(u,timeRemaining,0.25)
else:
logging.info('\nStd. Calculation for question: %s'%currentUserGlobals.TotalQuestions)
nextTheta=evalNextQuestion(u,currentUser,float(currentUserGlobals.theta))
logging.info('\nnextTheta=%s\n'%nextTheta)
if nextTheta <0 or nextTheta>10:
vals={'message':'Your test has ended!<br>Result :<h1>Inconclusive</h1><br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'}
templateMessage=jinjaEnv.get_template('message.html')
self.response.out.write(templateMessage.render(vals))
return
time.sleep(1)
if float(currentUserGlobals.theta) < nextTheta:
q=fetchMoreDifficultQuestion(nextTheta,currentUser)
else:
q=fetchLessDifficultQuestion(nextTheta,currentUser)
logging.info('\nq=%s\n'%q)
if q == False:
vals={'message':'Sorry, Database is out of Questions!<br>Kindly press Take Test Button to redo the test!!!<br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'}
templateMessage=jinjaEnv.get_template('message.html')
self.response.out.write(templateMessage.render(vals))
else:
questionTimerEnd=round(time.time()+32.5)
update_or_Insert(currentUser, str(TotalQuestions), str(q), str(questionTimerEnd),nextTheta)
time.sleep(1.5)
self.redirect("/test")
return
################################# PG 85 ALGO END##################################################
################################# ALGO 2 START##################################################
### SRC : http://luna.cas.usf.edu/~mbrannic/files/pmet/irt.htm #################################
################################################################################################
def checkTheta(self,nextTheta):
if nextTheta <=0 or nextTheta>10.5:
vals={'message':'Your test has ended!<br>Result :<h1>Inconclusive</h1><br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'}
templateMessage=jinjaEnv.get_template('message.html')
self.response.out.write(templateMessage.render(vals))
return
def DisplayResult(self,user):
str=ReturnScores(user)
vals={'message':'You Have finished giving the test.<br>Score :<h1>%s</h1><br>Press Take Test Button to redo the test!!!<br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'%(str)}
template=jinjaEnv.get_template('message.html')
self.response.out.write(template.render(vals))
return
def fetchNextQuestionParams(self,user, pastAnswer, currentAnswer, currentTheta, TotalQuestions):
q=False
nextTheta=0
if pastAnswer=='correct' and currentAnswer=='correct':
nextTheta=currentTheta+(0.5)
logging.info('\nnextTheta=%s\n'%nextTheta)
checkTheta(self, nextTheta)
updateOrInsertScores(user,upperBound=nextTheta)
time.sleep(0.5)
q=fetchMoreDifficultQuestion(nextTheta,user)
elif currentAnswer=='incorrect':
nextTheta=currentTheta-(0.5)
logging.info('\nnextTheta=%s\n'%nextTheta)
checkTheta(self, nextTheta)
updateOrInsertScores(user,lowerBound=nextTheta)
time.sleep(0.5)
q=fetchLessDifficultQuestion(nextTheta,user)
else:
DisplayResult(self,user)
time.sleep(0.75)
if q == False:
vals={'message':'Sorry, Database is out of Questions!<br>Kindly press Take Test Button to redo the test!!!<br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'}
templateMessage=jinjaEnv.get_template('message.html')
self.response.out.write(templateMessage.render(vals))
else:
questionTimerEnd=round(time.time()+32.5)
update_or_Insert(user, str(TotalQuestions), str(q), str(questionTimerEnd),nextTheta,currentAnswer)
time.sleep(1.5)
self.redirect("/test")
return
def getNextQuestion2(self, givenAnswerID, currentUser):
#get the currentUser global instances
currentUserGlobals=fetchGlobal(currentUser)
TotalQuestions=int(currentUserGlobals.TotalQuestions)
TotalQuestions=TotalQuestions-1
pastAnswer=currentUserGlobals.pastAnswer
currentAnswer='incorrect'
if givenAnswerID == '':
#no scope of passing here and(or) also time
currentAnswer='incorrect'
else:
CorrectAnswer=isCorrectAnswer(int(givenAnswerID))
if CorrectAnswer:
currentAnswer='correct'
update_or_Insert_QuestionTestModule(currentUserGlobals.questionNumberToGive,givenAnswerID,currentUser,0)
fetchNextQuestionParams(self,currentUser, pastAnswer, currentAnswer, currentUserGlobals.theta, TotalQuestions)
return
################################# ALGO 2 END####################################################
### SRC : http://luna.cas.usf.edu/~mbrannic/files/pmet/irt.htm #################################
################################################################################################
################################# ALGO 3 START##################################################
### SRC : Pg 85 + Mixed (AP+GP Series) #########################################################
################################################################################################
def DisplayResultPg85(self,theta):
vals={'message':'You Have finished giving the test.<br>Score :<h1>%s</h1><br>Press Take Test Button to redo the test!!!<br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'%(theta)}
template=jinjaEnv.get_template('message.html')
self.response.out.write(template.render(vals))
return
def getThetaResult(user):
params=fetchAllQuestionsParamsTestModule(user)
logging.info('\nParams @getThetaResult :%s'%params)
theta_S=1.0
while True:
#time.sleep(1) #since db operations are going to happen its beneficial to waste some time here #notWorthIt
theta_S_1=getNewTheta(params,theta_S)
if math.fabs(theta_S_1-theta_S) <=0.2:
break
else:
theta_S=theta_S_1
logging.info('\nend of @getThetaResult')
return theta_S
def fetchNextQuestionParams2(self,user, pastAnswer, currentAnswer, currentTheta, TotalQuestions):
q=False
nextTheta=0
if(TotalQuestions==0):
temp=getThetaResult(user)
DisplayResultPg85(self,temp)
update_or_Insert(user, str(TotalQuestions), str(0), str(0),temp,currentAnswer)
logging.info('\nQuestion No.=%s\n'%TotalQuestions)
if currentAnswer=='correct':
nextTheta=currentTheta*(1.1665290394)
logging.info('\nnextTheta (c)=%s\n'%nextTheta)
checkTheta(self, nextTheta)
time.sleep(0.5)
q=fetchMoreDifficultQuestion(nextTheta,user)
elif currentAnswer=='incorrect':
nextTheta=currentTheta-(.625)
logging.info('\nnextTheta (i)=%s\n'%nextTheta)
checkTheta(self, nextTheta)
time.sleep(0.5)
q=fetchLessDifficultQuestion(nextTheta,user)
else: #passed ofc
nextTheta=currentTheta-(.4)
logging.info('\nnextTheta (p)=%s\n'%nextTheta)
checkTheta(self, nextTheta)
time.sleep(0.5)
q=fetchLessDifficultQuestion(nextTheta,user)
time.sleep(0.75)
if q == False:
vals={'message':'Sorry, Database is out of Questions!<br>Kindly press Take Test Button to redo the test!!!<br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'}
templateMessage=jinjaEnv.get_template('message.html')
self.response.out.write(templateMessage.render(vals))
else:
questionTimerEnd=round(time.time()+32.5)
update_or_Insert(user, str(TotalQuestions), str(q), str(questionTimerEnd),nextTheta,currentAnswer)
time.sleep(1.5)
self.redirect("/test")
return
def getNextQuestion3(self, givenAnswerID, currentUser):
#get the currentUser global instances
currentUserGlobals=fetchGlobal(currentUser)
TotalQuestions=int(currentUserGlobals.TotalQuestions)
TotalQuestions=TotalQuestions-1
pastAnswer=currentUserGlobals.pastAnswer
currentAnswer='incorrect'
if givenAnswerID == '':
#no scope of passing here and(or) also time
currentAnswer='passed'
else:
CorrectAnswer=isCorrectAnswer(int(givenAnswerID))
if CorrectAnswer:
currentAnswer='correct'
update_or_Insert_QuestionTestModule(currentUserGlobals.questionNumberToGive,givenAnswerID,currentUser,0)
fetchNextQuestionParams2(self,currentUser, pastAnswer, currentAnswer, currentUserGlobals.theta, TotalQuestions)
return
################################# ALGO 3 END ###################################################
### SRC : Pg 85 + Mixed (AP+GP Series) #########################################################
################################################################################################
class TestModule(webapp2.RequestHandler):
def post(self):
timeAnswerWasPosted=time.time()
user=users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
#getNextQuestion(self, timeAnswerWasPosted, self.request.get('option'), user)
#getNextQuestion2(self, self.request.get('option'), user)
getNextQuestion3(self, self.request.get('option'), user)
def get(self):
user=users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
currUser=fetchGlobal(user)
questionNumberToGive=currUser.questionNumberToGive
TotalQuestions=int(currUser.TotalQuestions)
questionTimerEnd=currUser.questionTimerEnd
if TotalQuestions>0:
questionNumber=globals.NumberOfQuestions-TotalQuestions
question=fetchQuestion(int((questionNumberToGive)))
answers=fetchAnswersOf(question)
qNo=str(questionNumber+1)
vals={'title':qNo,'endTime':questionTimerEnd,'question':'%s. %s'%(qNo,question.question),'answers':answers,'questionID':questionNumberToGive,'current_user':user}
template=jinjaEnv.get_template('testQuestion.html')
self.response.out.write(template.render(vals))
else:
temp=getThetaResult(user)
vals={'message':'You Have finished giving the test.<br>Score :<h1>%s</h1><br>Press Take Test Button to redo the test!!!<br><br><form id="myForm" action="/" method="GET"><input type="submit" value="Goto Home"></form>'%(temp)}
template=jinjaEnv.get_template('message.html')
self.response.out.write(template.render(vals))
|
UTF-8
|
Python
| false | false | 2,014 |
12,489,764,944,737 |
2b739c84f4accd359b62803fb9c7c3bc05c4cd5c
|
6c401a3ec1d752d9e8abc3574599550631bb777c
|
/config/production.py
|
27b440f350f674f358c91519c65006d63f05422c
|
[] |
no_license
|
yaymukund/tada-api
|
https://github.com/yaymukund/tada-api
|
7ea91796135f3d60ea24564fdd8f29fa78f57333
|
00e861b2f1c6aa233983ea5279220b9497f72b53
|
refs/heads/master
| 2016-09-10T18:43:04.578800 | 2014-05-27T01:12:24 | 2014-05-27T01:12:24 | 20,129,913 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
print('Production mode configuration loaded.')
|
UTF-8
|
Python
| false | false | 2,014 |
4,853,313,089,392 |
f14408b47683d7d4b2638848bfb7f2aae59596a5
|
75eca8ac8c2af2eced38dad9a49b1c9013c306e4
|
/parser.py
|
d9d44f792a79d838b225dd92fea02357313bd604
|
[] |
no_license
|
lisayao/HTTPS-E-Mixed-Content-Blocker-Work
|
https://github.com/lisayao/HTTPS-E-Mixed-Content-Blocker-Work
|
c010f91cf4ad514eefbfaaf446843cd1402fc443
|
154d3f5c57673e7e0ebac74c88a1da54de0dfff7
|
refs/heads/master
| 2020-12-24T18:03:28.784429 | 2013-09-04T01:28:31 | 2013-09-04T01:28:31 | 11,490,900 | 1 | 1 | null | false | 2013-07-20T20:31:02 | 2013-07-18T00:38:54 | 2013-07-20T20:31:02 | 2013-07-20T20:31:02 | 54 | null | 1 | 0 |
Python
| null | null |
#!/usr/bin/env python
# This script pulls all target urls from HTTPS-Everywhere Rulesets.
# It does not print any domain names containing *, any containing "www."
# (which is taken care of by the mochitest), and all rulesets that
# are already deactivated by designation as "default_off" or
# mixed content
import xml.parsers.expat
import glob
import os
import sys
class HTTPSERuleParser:
def __init__(self, httpse_dir):
#checks for valid path name
rules_dir = httpse_dir+'/src/chrome/content/rules'
if not os.path.exists(os.path.dirname(rules_dir)):
print "Please submit valid path: {0} [https-everywhere git repository directory]".format(sys.argv[0])
return
#changes directory to user input
os.chdir(rules_dir)
self.num = 0
self.domains = []
#prints the name of the file and the url for each target tag
for filename in glob.glob("*.xml"):
text = open(filename, "r").read()
self.disabled_by_default = False
self.domains_for_this_rule = []
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = self.start_element
p.Parse(text, 1)
#if the rule isn't disabled, add domains to self.domains
if not self.disabled_by_default and len(self.domains_for_this_rule) > 0:
self.domains += self.domains_for_this_rule
# dedupe and sort the domains
deduped_domains = []
for domain in self.domains:
if domain not in deduped_domains:
deduped_domains.append(domain)
deduped_domains.sort()
i = 0
for domain in deduped_domains:
# putting this in a try block, because encoding errors throw exceptions
try:
print "{0},{1}".format(i, domain)
i += 1
except:
pass
def start_element(self, name, attrs):
# exec 'checkable = 1' in globals()
if name == "ruleset":
for n in range(0, len(attrs)):
key = attrs.keys()[n]
value = attrs.values()[n]
if key == "default_off":
self.disabled_by_default = True
elif (key == "platform") and ('mixedcontent' in value):
self.disabled_by_default = True
if name == "target":
# print 'Start element:', name, attrs
for n in range(0, len(attrs)):
domain = attrs.values()[n]
if '*' not in domain and 'www' not in domain:
self.domains_for_this_rule.append(domain)
if __name__ == '__main__':
#checks for correct number of user arguments
if len(sys.argv) != 2 :
print "Usage: {0} [https-everywhere git repository directory]".format(sys.argv[0])
sys.exit()
httpse_dir = sys.argv[1]
HTTPSERuleParser(httpse_dir);
|
UTF-8
|
Python
| false | false | 2,013 |
12,232,066,877,743 |
897ff96dc92381b14ff9c17e6e58e1c031d2457a
|
3c482372210ef4b7dcfdc284b8f824114a9d1057
|
/Text/Key/piglatin.py
|
07af62233e8d7d7e13b72b6c4506dc3985ef62dd
|
[
"MIT"
] |
permissive
|
CicadaMikoto/Projects
|
https://github.com/CicadaMikoto/Projects
|
b1c6ef74100e2368d9176c3493e543608caf6685
|
ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a
|
refs/heads/master
| 2021-01-21T08:24:18.913041 | 2013-12-20T20:07:27 | 2013-12-20T20:07:27 | 15,300,117 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Pig Latin - Pig Latin is a game of alterations played
on the English language game. To create the Pig Latin
form of an English word the initial consonant sound is
transposed to the end of the word and an ay is affixed
(Ex.: "banana" would yield anana-bay). Read Wikipedia
for more information on rules.
"""
word = raw_input('What\'s your word? ').lower()
vowels = 'aeiou'
pig = 'ay'
first = word[0]
if first in vowels:
new = word + pig
else:
new = word[1:] + first + pig
print new
|
UTF-8
|
Python
| false | false | 2,013 |
11,390,253,298,624 |
8095add79e11b91b1ff970014d0119d2bb58dd08
|
3362545e9060f83562ee9f96cfccee42e3180f76
|
/libs/util.py
|
01b5ac6f1fa6f5ae532c74a289038ba7d9cd57e0
|
[
"GPL-2.0-only"
] |
non_permissive
|
LalatenduMohanty/distaf
|
https://github.com/LalatenduMohanty/distaf
|
4a08a0bc586965c212df4a46d163d3dbeedb7e7e
|
ec79c5958b40506fb931bf0c6b0fbe58777d02db
|
refs/heads/master
| 2021-01-17T14:15:05.355891 | 2014-12-01T10:56:00 | 2014-12-01T10:56:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
from client_rpyc import *
testcases = []
tc = big_bang()
def testcase(name):
global testcases
def decorator(func):
def wrapper(self):
ret = func()
self.assertTrue(ret, "Testcase %s failed" % name)
return ret
testcases.append((name, wrapper))
return wrapper
return decorator
def finii():
tc.fini()
def create_volume(volname, dist, rep=1, stripe=1, trans='tcp', servers=[], snap=False):
"""
Create the gluster volume specified configuration
volname and distribute count are mandatory argument
"""
global tc
if servers == []:
servers = tc.nodes[:]
number_of_bricks = dist * rep * stripe
replica = stripec = ''
brick_root = '/bricks'
n = 0
tempn = 0
bricks_list = ''
rc = tc.run(servers[0], "gluster volume info | egrep \"^Brick[0-9]+\"")
for i in range(0, number_of_bricks):
if not snap:
bricks_list = "%s %s:%s/%s_brick%d" % \
(bricks_list, servers[n], brick_root, volname, i)
else:
sn = len(re.findall(servers[n], rc[1])) + tempn
bricks_list = "%s %s:%s/brick%d/%s_brick%d" % \
(bricks_list, servers[n], brick_root, sn, volname, i)
if n < len(servers[:]) - 1:
n = n + 1
else:
n = 0
tempn = tempn + 1
if rep != 1:
replica = "replica %d" % rep
if stripe != 1:
stripec = "stripe %d" % stripe
ttype = "transport %s" % trans
ret = tc.run(servers[0], "gluster volume create %s %s %s %s %s" % \
(volname, replica, stripec, ttype, bricks_list))
return ret
def mount_volume(volname, mtype='glusterfs', mpoint='/mnt/glusterfs', mserver='', mclient='', options=''):
"""
Mount the gluster volume with specified options
Takes the volume name as mandatory argument
Returns a tuple of (returncode, stdout, stderr)
Returns (0, '', '') if already mounted
"""
global tc
if mserver == '':
mserver = tc.nodes[0]
if mclient == '':
mclient = tc.clients[0]
if options != '':
options = "-o %s" % options
if mtype == 'nfs' and options != '':
options = "%s,vers=3" % options
elif mtype == 'nfs' and options == '':
options = '-o vers=3'
ret, _, _ = tc.run(mclient, "mount | grep %s | grep %s | grep \"%s\"" \
% (volname, mpoint, mserver))
if ret == 0:
tc.logger.debug("Volume %s is already mounted at %s" \
% (volname, mpoint))
return (0, '', '')
mcmd = "mount -t %s %s %s:%s %s" % (mtype, options, mserver,volname, mpoint)
tc.run(mclient, "test -d %s || mkdir -p %s" % (mpoint, mpoint))
return tc.run(mclient, mcmd)
def get_config_data(param=None):
"""
Gets all the config data from the environmental variables
Returns the value of requested parameter
If nothing is requested the whole dict is sent
If the requested parameter does not exist, the False is returned
"""
config_dict = {
'VOLNAME' : 'testvol',
'DIST_COUNT' : 2,
'REP_COUNT' : 2,
'STRIPE' : 1,
'TRANS_TYPE' : 'tcp',
'MOUNT_TYPE' : 'glusterfs',
'MOUNTPOINT' : '/mnt/glusterfs',
'GEO_USER' : 'root',
'FILE_TYPE' : 'text',
'DIR_STRUCT' : 'multi',
'NUM_FILES_MULTI' : 5,
'NUM_FILES_SING' : 1000,
'NUM_THREADS' : 5,
'DIRS_BREADTH' : 5,
'DIRS_DEPTH' : 5,
'SIZE_MIN' : '5k',
'SIZE_MAX' : '10k' }
for conf in config_dict.keys():
if conf in os.environ:
config_dict[conf] = os.environ[conf]
tc.config_data = config_dict
if param == None:
return config_dict
elif param in config_dict.keys():
return config_dict[param]
else:
return False
get_config_data()
|
UTF-8
|
Python
| false | false | 2,014 |
2,044,404,439,293 |
25cdc9824406db033784f8797f8814044e5dad12
|
7ccf1ea5e8e9aa58121edca640e6543c09e66a82
|
/howler/tools/periodic.py
|
3707068ca87a83e3b75596b85eaac3bc913774d1
|
[] |
no_license
|
jh3rnand3z/howler
|
https://github.com/jh3rnand3z/howler
|
e20f4d0ea83c56ce119d643915c2b3cef3c8cdb1
|
fb63dec97ba77c5a466ceb9b595b291c131cbae6
|
refs/heads/master
| 2020-12-27T09:38:18.216782 | 2014-11-03T20:42:09 | 2014-11-03T20:42:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
'''
Howler system periodic tools.
'''
# This file is part of howler.
# Distributed under the terms of the last AGPL License.
# The full license is in the file LICENCE, distributed as part of this software.
__author__ = 'Jean Chassoul'
import motor
from contextlib import contextmanager
from tornado import gen
from bson import objectid
@gen.engine
def get_usernames(db, callback):
'''
Get all the usernames
'''
# limit the size of the find query.
usernames = []
try:
query = db.accounts.find({},{'account':1, '_id':0})
for a in (yield motor.Op(query.to_list)):
usernames.append(a)
except Exception, e:
callback(None, e)
callback(usernames, None)
@gen.engine
def get_unassigned_cdr(db, callback):
'''
Periodic task that returns the unassigned CDR.
'''
try:
result = []
query = db.calls.find({'assigned':{'$exists':False}}).limit(1000)
for c in (yield motor.Op(query.to_list)):
calls.append(c)
except Exception, e:
callback(None, e)
callback(calls, None)
@gen.engine
def process_assigned_false(db, callback):
'''
Periodic task to process each false assigned
'''
result = []
def _got_call(message, error):
'''
got call
'''
if message:
channel = (message['channel'] if 'channel' in message else False)
if channel:
account = [a for a in _accounts
if ''.join(('/', a['account'], '-')) in channel]
account = (account[0] if account else False)
if account:
struct = {
'account':account['account'],
'resource':'calls',
'id':message['_id']
}
result.append(struct)
elif error:
callback(None, error)
return
else:
callback(result, None)
return
try:
# Get account list
_accounts = yield motor.Op(get_usernames, db)
db.calls.find({
'assigned':False
}).limit(1000).each(_got_call)
except Exception, e:
callback(None, e)
@gen.engine
def process_asterisk_cdr(db, callback):
'''
Periodic task to process new asterisk cdr entries.
'''
result = []
def _got_call(message, error):
'''
got call
'''
if error:
callback(None, error)
return
elif message:
channel = (True if 'channel' in message else False)
# get channel the value
channel = (message['channel'] if channel else channel)
if channel:
account = [a for a in _accounts
if ''.join(('/', a['account'], '-')) in channel]
account = (account[0] if account else False)
if account:
struct = {
'account':account['account'],
'resource':'calls',
'id':message['_id']
}
result.append(struct)
else:
#print('hmmm')
# Iteration complete
callback(result, None)
return
try:
# Get account list
_accounts = yield motor.Op(get_usernames, db)
db.calls.find({
'assigned':{'$exists':False}
}).limit(1000).each(_got_call)
except Exception, e:
callback(None, e)
@gen.engine
def assign_call(db, account, callid, callback):
'''
Update a call assigning it to an account.
'''
try:
result = yield motor.Op(
db.calls.update,
{'_id':objectid.ObjectId(callid)},
{'$set': {'assigned': True,
'accountcode':account}}
)
except Exception, e:
callback(None, e)
return
callback(result, None)
|
UTF-8
|
Python
| false | false | 2,014 |
16,028,817,964,080 |
c8246f9267df8b8cd9e939f3d800b670ecd45b6d
|
1e0742f151782af508f8b2dbb228f41806c95d91
|
/demo.py
|
2a660fbfcc3ed02c85103aabaea3fc6e5e1e491d
|
[] |
no_license
|
gbishop/guitarheel
|
https://github.com/gbishop/guitarheel
|
722c5f03330ee71a0e4ae93cb008b2fd25057413
|
2c3ccc189f13a8c79638e474c7f3bf58bbe3ec66
|
refs/heads/master
| 2016-09-06T15:42:02.259787 | 2011-04-28T13:25:32 | 2011-04-28T13:25:32 | 32,183,870 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame
from sounds.SoundBoxes import GuitarSoundBox
pygame.mixer.init(44100, -16, 2, 512)
pygame.init()
uk = 'notes/ukulele/'
a = [uk+'C.ogg', uk+'D.ogg', uk+'E.ogg', uk+'F.ogg', uk+'G.ogg', uk+'A.ogg']
gsb = GuitarSoundBox(a, 1)
guitar = pygame.joystick.Joystick(0)
guitar.init()
guitar_map = {
-1: GuitarSoundBox.NO_NOTE,
5: GuitarSoundBox.GREEN_NOTE,
1: GuitarSoundBox.RED_NOTE,
0: GuitarSoundBox. YELLOW_NOTE,
2: GuitarSoundBox.BLUE_NOTE,
3: GuitarSoundBox.ORANGE_NOTE,
}
guitar_fret_state = {
GuitarSoundBox.NO_NOTE: 1,
GuitarSoundBox.GREEN_NOTE: 0,
GuitarSoundBox.RED_NOTE: 0,
GuitarSoundBox.YELLOW_NOTE: 0,
GuitarSoundBox.BLUE_NOTE: 0,
GuitarSoundBox.ORANGE_NOTE: 0,
}
evt = pygame.event.Event(0)
while(evt.type != pygame.QUIT):
evt = pygame.event.wait()
if(evt.type == pygame.JOYBUTTONDOWN):
if(evt.button == 9):
evt = pygame.event.Event(pygame.QUIT)
elif(evt.button in guitar_map):
guitar_fret_state[guitar_map[evt.button]] = 1
elif(evt.button == 14 or evt.button == 12):
#play sound
for i in [5,4,3,2,1,0]:
if(guitar_fret_state[i] == 1):
gsb.play(i)
break
elif(evt.type == pygame.JOYBUTTONUP):
if(evt.button in guitar_map):
guitar_fret_state[guitar_map[evt.button]] = 0
|
UTF-8
|
Python
| false | false | 2,011 |
17,532,056,531,258 |
1c206e29ae4e6618c2c25906614e6df1a785c3a4
|
f06cbc27f9efa62b33c436781ea61db1def4cb88
|
/buildbotstatus/forms.py
|
bd8f12c860cca2533845cb8d85682f8ee27481e7
|
[
"BSD-3-Clause"
] |
permissive
|
andythigpen/buildbotstatus
|
https://github.com/andythigpen/buildbotstatus
|
9ce4812e078526ba8aa83b03ba0ff2619e34c627
|
44abaa6d02c18e1b6dcd2d90aaa5341b77043433
|
refs/heads/master
| 2021-01-22T02:59:01.435982 | 2014-08-06T01:56:36 | 2014-08-06T01:56:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django import forms
from djblets.extensions.forms import SettingsForm
class BuildbotstatusSettingsForm(SettingsForm):
buildbot_url = forms.CharField(help_text="Buildbot URL")
|
UTF-8
|
Python
| false | false | 2,014 |
17,815,524,346,110 |
13f3b285d830edd7aee66b6abc2c9179529d25ed
|
7f2091efd2efd78fd1d27eea31ef5204b7a651b7
|
/common.py
|
e51ac20f93537305191f003b0164f23c92df8629
|
[] |
no_license
|
pavelpower/Pell
|
https://github.com/pavelpower/Pell
|
8539119f8f4fabe7d2e38a3bda63abae6cec4891
|
59eee13fe473eb0434eaecfe31fb9d30e8726b00
|
refs/heads/master
| 2021-01-15T17:54:46.195420 | 2013-08-14T15:50:56 | 2013-08-14T15:50:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def checkPellSolution(x,y,n):
if x*x-n*y*y==1:
return True
return False
def getContinuedFraction(val):
a=[int(val)]
x=[val-a[0]]
yield a[0]
while True:
a.append(int(1/x[-1]))
xx.append(1/x[-1]-a[-1])
yield a[-1]
|
UTF-8
|
Python
| false | false | 2,013 |
3,865,470,583,888 |
86156c080dbd9b4145960fc43bce146a7612783f
|
9f6e78a0d806497ba81b03cf182c7c8726615136
|
/code/python/class.py
|
06f33c07b6c7eefe3161cc55ed1a2eaab40412d1
|
[] |
no_license
|
spacecow/doc
|
https://github.com/spacecow/doc
|
80d9df33c782306e27e28653ea060165fe922b3b
|
9a9e5e53b73207651ccee282cf2189afd73f4485
|
refs/heads/master
| 2021-01-17T17:07:38.782129 | 2012-10-12T09:41:28 | 2012-10-12T09:41:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from collections import namedtuple
Link = namedtuple('Link',['id','url'])
print Link(1,'google.com') #--> Link(id=1, url='google.com')
|
UTF-8
|
Python
| false | false | 2,012 |
1,769,526,549,841 |
30897c0cd8dbc0230ce141f5687a56ab941d44a2
|
4280df60affa37819c67983e7404c358ecf5acf8
|
/jobs/forms/job.py
|
8c090c8a89fcab8e2e65e334c6af752f2ab69c40
|
[] |
no_license
|
zlzw/jobs.python.org
|
https://github.com/zlzw/jobs.python.org
|
3370f2e298e81d32ab1ceddcc51b770aa74d916d
|
690a1462bdaf244cde86d1b958812749a6f12c24
|
refs/heads/master
| 2021-01-12T15:03:55.989584 | 2013-04-09T16:44:30 | 2013-04-09T16:44:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# decsription length = 500
|
UTF-8
|
Python
| false | false | 2,013 |
3,925,600,122,615 |
53830421a3b41841db9de443e1b5292c115b8b1a
|
7e6db8d2da390d633ba6c34a132be011b9d7506b
|
/link_lexemes.py
|
dbe76f4606b346b80b2ebefd57dbd5e24276bb2b
|
[] |
no_license
|
ishalyminov/associative_text_model
|
https://github.com/ishalyminov/associative_text_model
|
88767e74033f65a52e82ed046f6507ed3afc05ee
|
4b9a6e3d4de3097a52442c170ebcbacf9e6af5ae
|
refs/heads/master
| 2021-01-18T20:20:40.194988 | 2012-11-29T06:14:40 | 2012-11-29T06:14:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import collections
import operator
import itertools
import nltk.corpus
'''
Full link set of lexemes is the lexemes which:
1) are not stopwords,
2) have the frequency of > 1 (multiple appearances within the same sentence are not counted).
'''
NO_POSITION = -1
def tuples_list_uniq(in_tuples_list, in_target_field = 0):
result = []
seen_lexemas = set([])
for value in in_tuples_list:
if value[in_target_field] not in seen_lexemas:
result.append(value)
seen_lexemas.add(value[in_target_field])
return result
def stop(in_sentences, in_language):
result = []
stopwords = nltk.corpus.stopwords.words(in_language)
for sentence in in_sentences:
result.append([word_tuple for word_tuple in sentence if word_tuple[0] not in stopwords])
return result
def extract_full_link_set(in_sentences,
in_language,
keep_positions = False,
keep_counts = False,
stopping = True):
# lexemes of full link set must be sorted by appearance in the text
lexeme_positions = {}
lexeme_counts = collections.defaultdict(lambda: 0)
position = itertools.count()
positioned_sentences = []
for sentence in in_sentences:
# generating the 'position-word' tuples out of a single sentence's words
positioned_sentences.append([(word, pos) for word, pos in zip(sentence, position)])
final_sentences = [tuples_list_uniq(sentence) for sentence in positioned_sentences[:]]
if stopping:
final_sentences = stop(final_sentences, in_language)
for sentence in final_sentences:
for word in sentence:
# remembering the first appearance for each lexeme
if word[0] not in lexeme_positions:
lexeme_positions[word[0]] = word[1]
lexeme_counts[word[0]] += 1
# making a list of tuples (word, position) sorted by position
sorted_positions = sorted(lexeme_positions.iteritems(), key = operator.itemgetter(1))
result = [[word] for (word, position) in sorted_positions if lexeme_counts[word] > 1]
if keep_positions:
for lexeme_tuple in result:
lexeme_tuple.append(lexeme_positions[lexeme_tuple[0]])
if keep_counts:
for lexeme_tuple in result:
lexeme_tuple.append(lexeme_counts[lexeme_tuple[0]])
return result
def test():
text = [['a', 'man', 'going', 'down', 'a', 'street'],
['a', 'street', 'falling', 'down', 'the', 'cat']]
assert(extract_full_link_set(text, 'english', stopping=False) == ['a', 'down', 'street'])
print 'Test OK'
if __name__ =='__main__':
test()
|
UTF-8
|
Python
| false | false | 2,012 |
7,962,869,391,897 |
ae53d3597c987f28edfab6dc94f7fe7aa777491a
|
10902490dd44e3ddc516307d9f423a38d90cda99
|
/tests/urls.py
|
86c33f926526436539af51ea220165aecf6cf158
|
[] |
no_license
|
chris-morgan/django-nap
|
https://github.com/chris-morgan/django-nap
|
528333ef3ef77bf51eec93e173a32e902ea06fe1
|
234e3797955f7aee8a4bd3b3c1ab8dc9991c2ea3
|
refs/heads/master
| 2020-12-27T15:27:16.689464 | 2014-10-02T23:35:26 | 2014-10-02T23:35:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include
from nap import api
api.autodiscover()
urlpatterns = patterns('',
(r'^api/', include(api.patterns(True))),
)
|
UTF-8
|
Python
| false | false | 2,014 |
11,605,001,661,932 |
cfbcc61b1941a614f5e73133cc78aa5d8dde12df
|
93bc0ca618fe57467c3cff6abc07b710446ed483
|
/SubstringLocator.py
|
06d05f585edf2fac9342d1fd113c1acbe91f56b1
|
[] |
no_license
|
codifiman/The-Gibberish-Generator
|
https://github.com/codifiman/The-Gibberish-Generator
|
be592553b16376b62461b7698987e55b165680c5
|
21afb7e4aaf5dfbcbfc402ea06cbf12f8099b622
|
refs/heads/master
| 2021-01-16T18:43:13.734342 | 2013-04-10T05:05:54 | 2013-04-10T05:05:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sqlite3
class DBConn:
conn = sqlite3.connect('./dictionary_database.db')
c = conn.cursor()
def containsSubstring(self, s):
substring = ('%'+s+'%',)
self.c.execute("SELECT * FROM words WHERE word LIKE ?", substring)
words = self.c.fetchall()
wordcount = len(words)
if wordcount > 0:
self.insertMatch(s, int(wordcount))
return True
else:
self.insertNonMatch(substring)
return False
def insertMatch(self, s, numMatches):
s = (s,)
try:
self.c.execute("INSERT INTO consonant_substrings_in_english (substr, occurences) VALUES (?, ?)", (s, numMatches))
self.conn.commit()
except sqlite3.IntegrityError:
pass
def insertNonMatch(self, s):
s = (s,)
try:
self.c.execute("INSERT INTO consonant_substrings_not_in_english (substr) VALUES (?)", s)
self.conn.commit()
except sqlite3.IntegrityError:
pass
class GenerateSubstring:
consonants = ['b', 'c', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'm', 'n', 'p', 'q', 'r', 's', 't', 'v', 'w', 'x', 'y', 'z']
def substring(self, i):
x = 1
def tuples(self):
substringlist = []
for consonant1 in self.consonants:
for consonant2 in self.consonants:
if ()
substringlist.append(consonant1 + consonant2)
return substringlist
def threeples(self):
substringlist = []
for consonant1 in self.consonants:
for consonant2 in self.consonants:
for consonant3 in self.consonants:
substringlist.append(consonant1 + consonant2 + consonant3)
return substringlist
def fourples(self):
substringlist = []
for consonant1 in self.consonants:
for consonant2 in self.consonants:
for consonant3 in self.consonants:
for consonant4 in self.consonants:
substringlist.append(consonant1 + consonant2 + consonant3 + consonant4)
return substringlist
def deleteDuplicates(comp, nomatches):
print comp
print nomatches
for word in nomatches: # iterates over all the non-matches
for plet in comp: # iterates over all the comparators to find comparators whose substring is in the non-matches list
if word in plet:
print "removed %s from the comparators list." % plet
comp.remove(plet)
return comp
def findNoMatches(comp, db):
contains = False
nomatches = []
for comparator in comp:
contains = db.containsSubstring(comparator)
if (contains == False):
nomatches.append(comparator)
print "%s is not in the English language." % comparator
return nomatches
def main():
db = DBConn()
subStr = GenerateSubstring()
notuples = []
nothreeples = []
nofourples = []
comp = subStr.tuples()
# check for groups of two consonants that do not appear in english
notuples = findNoMatches(comp, db)
print notuples
# remove threeples that contain the tuples listed with no english words associated with them
comp = deleteDuplicates(subStr.threeples(), notuples)
print comp
# check for groups of three consonants that do not appear in english
nothreeples = findNoMatches(comp, db)
# remove the threeples and twoples from the fourples
comp = deleteDuplicates(subStr.fourples(), nothreeples)
nofourples = findNoMatches(comp, db)
# print "English does not contain these consonant strings: %s" % (notuples + nothreeples + nofourples)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
18,966,575,597,697 |
9bb444f74fe694b6856745837ff3cba4f4333ab6
|
ee8bdc10af43f2d9b75e40c231dfe785b4048928
|
/apps/website_issues/utils.py
|
1b798cc0fbf20c6a7e475767948d4c7c5bfb2f5d
|
[
"MPL-1.1",
"GPL-2.0-or-later",
"LGPL-2.1-or-later"
] |
non_permissive
|
fox2mike/input.mozilla.org
|
https://github.com/fox2mike/input.mozilla.org
|
a92e0c4af7b1c4cf2b1950da167b860a08638561
|
bbef34bf28ca6141283ef286515274c5e83d6121
|
refs/heads/master
| 2021-01-18T11:41:44.591034 | 2011-10-20T17:06:54 | 2011-10-20T17:06:54 | 2,634,949 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urlparse as urlparse_
class ParseResult(urlparse_.ParseResult):
def geturl(self):
"""Reassemble parsed URL."""
if self.scheme == 'about':
return '%s:%s' % (self.scheme, self.netloc)
else:
return super(ParseResult, self).geturl()
def urlparse(url):
"""about: and chrome:// URL aware URL parser."""
if url.startswith('about:'):
parsed = url.split(':', 1)
return ParseResult(
scheme=parsed[0], netloc=parsed[1], path=None, params=None,
query=None, fragment=None)
elif url.startswith('chrome://'):
parsed = url.split('://', 1)[1].split('/')
path = parsed[1] if len(parsed) > 1 else ''
return ParseResult(
scheme='chrome', netloc=parsed[0], path=path,
params=None, query=None, fragment=None)
else:
return urlparse_.urlparse(url)
def normalize_url(url):
"""Strips paths (and any "www." host) from a given URL.
Keeps the protocol and non-www hosts/subdomains.
>>> normalize_url('http://google.com/test')
'http://google.com'
>>> normalize_url('http://www.google.com:8080/')
'http://google.com:8080'
>>> normalize_url('about:config')
'about:config'
>>> normalize_url('https://mail.example.co.uk/.www/://?argl=#pff')
'https://mailexample.co.uk'
>>> normalize_url('https://user:[email protected]/')
'https://mailexample.co.uk'
"""
parse_result = urlparse(url)
if parse_result.scheme in ('about', 'chrome'):
return parse_result.geturl()
netloc = parse_result.netloc
if '@' in netloc:
netloc = netloc.partition('@')[2]
if netloc.startswith("www."):
netloc = netloc[4:]
return ''.join((parse_result.scheme, '://', netloc))
|
UTF-8
|
Python
| false | false | 2,011 |
10,144,712,800,664 |
939890002b8aec93456187febb0e24c3a72ce659
|
e5e369969fff86e3df7e4b42c8ad2e8a144a75a4
|
/python/10.py
|
187c9879f9653a13b1cf91bd2573e1bfb67fd8b1
|
[] |
no_license
|
devspacenine/project-euler
|
https://github.com/devspacenine/project-euler
|
b2aad3814326833b607fc6ad5df3134649ce9723
|
82d80f2e12df17d19914583ace848ff4dca98d80
|
refs/heads/master
| 2016-09-06T06:17:26.394795 | 2014-10-22T14:12:18 | 2014-10-22T14:12:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
"""
import time
start_time = time.time()
answer = None
# Code here
primes = [2]
candidate = 3
while True:
if candidate >= 2000000:
break
is_prime = True
for prime in primes:
if prime > candidate**.5:
break
if candidate % prime == 0:
is_prime = False
break
if is_prime:
primes.append(candidate)
candidate += 2
answer = sum(primes)
# End timer
end_time = time.time()
print "Answer: \t%d" % answer
print "Exec Time:\t%g microseconds\t%g seconds" % ((end_time - start_time) * 1000000, (end_time - start_time))
|
UTF-8
|
Python
| false | false | 2,014 |
678,604,864,008 |
91dbe3ad2beac8759bea9305df8ccb2837c3fb04
|
5dc4f640a2697b36a208903dae721f6d650ce701
|
/GIS_Python/Calculations/bmi.py
|
de5ffe359066a75f9c95326b005117ebce257e09
|
[] |
no_license
|
osfp-Pakistan/python
|
https://github.com/osfp-Pakistan/python
|
b5c60114d6951380ee114348a7ecb56877222ff0
|
ebde35121493817b3de6db37e03ac5f979959986
|
refs/heads/master
| 2021-01-20T07:57:12.443557 | 2014-04-25T20:03:10 | 2014-04-25T20:03:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Jon Nordling
# GEOG656 Python Programing
# December 9, 2012
# bmi.py
# This is the main function that will call function and
# print the bim of the user imputs
def main():
bmi = calc_bmi()
result = bmi_means(bmi)
print 'You are: ',result
# This function will determine the users Weight and hight
# and calculate there bmi
def calc_bmi():
w = input('Enter your Weight: ')
h = input('Enter Height: ')
bmi = (w*703)/(h*h)
return bmi
# This function passed the bim and then
# Determines the output
def bmi_means(bmi):
if bmi < 18.5:
r = 'Under Weight'
elif bmi >= 18.5 and bmi <25:
r = 'Healthy Weight'
elif bmi >=25 and bmi <30:
r = 'Over Weight'
else:
r = 'obesity'
return r
# This function insures that the main function will exicute first
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
7,533,372,658,267 |
6ce97d9966b8320d869a0262f6dc9cd6d3f6d806
|
1e3ce4306f53f6d443abd2dfed25639d6e33fca2
|
/modeladorges/admin.py
|
b84eb0e60603572663e1e27ebc55941e15e294b7
|
[] |
no_license
|
ehebel/modeladorsemantiko
|
https://github.com/ehebel/modeladorsemantiko
|
737b95fa4f48f9db13522ec1ce284cd46099bc5d
|
08525308c5e1235d0e6ac7a0fcea1e2798014ff0
|
refs/heads/master
| 2021-01-20T12:01:08.161937 | 2014-01-27T17:28:07 | 2014-01-27T17:28:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import autocomplete_light
autocomplete_light.autodiscover()
from django.contrib import admin
admin.autodiscover()
from django.forms import TextInput, Textarea
from modeladorges.models import *
import csv
from django.http import HttpResponse
from django.core.exceptions import PermissionDenied
def export_as_csv(modeladmin, request, queryset):
"""
Generic csv export admin action.
"""
if not request.user.is_staff:
raise PermissionDenied
opts = modeladmin.model._meta
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % unicode(opts).replace('.', '_')
writer = csv.writer(response, delimiter=';')
field_names = [field.name for field in opts.fields]
# Write a first row with header information
writer.writerow(field_names)
# Write data rows
for obj in queryset:
values = []
for field in field_names:
value = (getattr(obj, field))
if callable(value):
try:
value = value() or ''
except:
value = 'Error retrieving value'
if value is None:
value = ''
values.append(unicode(value).encode('utf-8'))
writer.writerow(values)
#writer.writerow([getattr(obj, field) for field in field_names])
return response
export_as_csv.short_description = "Exportar elementos seleccionados como CSV"
class GesAdmin(admin.ModelAdmin):
search_fields = ['glosa',]
list_display = ['id','glosa','get_cie']
form = autocomplete_light.modelform_factory(ges_patologia)
actions = [export_as_csv]
admin.site.register(ges_patologia, GesAdmin)
class DescInLine(admin.TabularInline):
model = descripcione
formfield_overrides = {
models.CharField: {'widget': TextInput(attrs={'size':'100'})},
#models.TextField: {'widget': Textarea(attrs={'rows':4, 'cols':40})},
}
class DescAdmin(admin.ModelAdmin):
form = autocomplete_light.modelform_factory(descripcione)
ordering = ['termino']
admin.site.register(descripcione, DescAdmin)
class ConceptAdmin(admin.ModelAdmin):
form = autocomplete_light.modelform_factory(concepto)
inlines = DescInLine,
admin.site.register(concepto,ConceptAdmin)
class cieDeisAdmin(admin.ModelAdmin):
form = autocomplete_light.modelform_factory(ciediez)
admin.site.register(ciediez,cieDeisAdmin)
class casProcedAdmin(admin.ModelAdmin):
list_display = ['idintervencionclinica','integlosa','grpdescripcion']
list_filter = ['grpdescripcion','sgrdescripcion']
admin.site.register(casprocedimiento,casProcedAdmin)
admin.site.register(casdiagnostico)
__author__ = 'ehebel'
|
UTF-8
|
Python
| false | false | 2,014 |
17,927,193,511,342 |
2e78d2c25e995d374afba76ddf678c724f4b4f1f
|
7faf29f370d20dd7075ad7e5121002293b413080
|
/wasa2il/scripts/wasa2il-people-not-in-sugarcrm.py
|
14503fac330fe18f6c849eda60047148bdde91b5
|
[
"AGPL-3.0-or-later"
] |
non_permissive
|
jnaudon/wasa2il
|
https://github.com/jnaudon/wasa2il
|
8b1a2dbd78cd33dc04e1965a2c4d6bc519eb8869
|
33a81fced0e410048c9e70959c3f8f4f55ba4be3
|
refs/heads/master
| 2021-01-18T01:57:32.939478 | 2014-02-25T11:40:05 | 2014-02-25T11:40:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from django.contrib.auth.models import User
from django.contrib.auth.hashers import check_password
users = User.objects.all()
for user in users:
guess = user.username[::-1]
if check_password(guess, user.password):
print "%s (%s, %s)" % (user.get_name(), user.username, user.email)
|
UTF-8
|
Python
| false | false | 2,014 |
3,367,254,401,288 |
2e7822791c9f02f4d5cf8174c7b9c89fc4f58fc2
|
57577b782af68509c8dd81651f19eacfc572a5c1
|
/NEUQuizMaker.wsgi
|
8d92b1d944c6406aa80e6d596a6fa8974d755325
|
[
"Apache-2.0"
] |
permissive
|
sprite-neu/NEUQuziMaker
|
https://github.com/sprite-neu/NEUQuziMaker
|
720176a1ddb6f67b9ab62d82065ea837b56a49ab
|
7dc8ef081d63866939083d31f6eb10e56e35a28d
|
refs/heads/master
| 2020-12-26T01:49:45.533727 | 2014-10-16T12:33:49 | 2014-10-16T12:33:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
sys.path.insert(0, "/var/www/NEUQuizMaker")
from NEUQuizMaker import app as application
|
UTF-8
|
Python
| false | false | 2,014 |
3,796,751,113,399 |
04ac5f560b96cd27ae04aa62c89f0b7fe5097020
|
c62f32b9044c53e5dbd865d45ede669df410b0e6
|
/ELECTRONIC STATION/04_Find_Sequence.py
|
183c76bcd4123a257a0a12716e3a90242645272a
|
[] |
no_license
|
spoty/Checkio
|
https://github.com/spoty/Checkio
|
667daa340e4d6e77f538fa3afe8273da24370f9e
|
2d4e2b90d7e82b9b498d4733e7d27a0b55a0e485
|
refs/heads/master
| 2020-05-17T11:11:29.374783 | 2014-11-13T20:19:21 | 2014-11-13T20:19:21 | 14,042,177 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def checkio(l):
g = lambda t, n: [t[i:n+i]
for i,_ in enumerate(t)
if len(t[i:n+i])>=n
and t[i:n+i].count(t[i:n+i][0])>=4]
# and t[i:n+i].count(t[i:n+i][0])>=4
# and t[i:n+i].count(t[i:n+i][0])==len(t[i:n+i])
m = lambda f: [g(x,4) for x in f if g(x,4) != [] and len(x)>=4]
h,w = len(l),len(l[0])
nw_se = [[l[h-1-q][p-q]
for q in range(min(p, h-1), max(0, p-w+1)-1, -1)]
for p in range(h+w-1)]
ne_sw = [[l[p-q][q]
for q in range(min(p, h-1), max(0, p-w+1)-1, -1)]
for p in range(h+w-1)]
# print m(l)+m(map(None, *l))
# print "-"*50
# print m([x for x in nw_se+ne_sw if len(x)>=4 and x != []])
# and and x.count(x[0])>=4
return True if m(l+map(None, *l)+nw_se+ne_sw) else False
print checkio([
[1, 2, 1, 1],
[1, 1, 4, 1],
[1, 3, 1, 6],
[1, 7, 2, 5]
])
print checkio([
[7, 1, 4, 1],
[1, 2, 5, 2],
[3, 4, 1, 3],
[1, 1, 8, 1]
])
print checkio([
[2, 1, 1, 6, 1],
[1, 3, 2, 1, 1],
[4, 1, 1, 3, 1],
[5, 5, 5, 5, 5],
[1, 1, 3, 1, 1]
])
print checkio([
[7, 1, 1, 8, 1, 1],
[1, 1, 7, 3, 1, 5],
[2, 3, 1, 2, 5, 1],
[1, 1, 1, 5, 1, 4],
[4, 6, 5, 1, 3, 1],
[1, 1, 9, 1, 2, 1]
])
|
UTF-8
|
Python
| false | false | 2,014 |
11,381,663,373,577 |
c20a950271b27a110dabf424106c6a2d7535a183
|
6bd9b966284041d9aeed5c1ee3ce478c4cead705
|
/proxy_grammar/favorites.py
|
a59ee16892a26160dd715b065cd67766c57ea9d1
|
[
"LGPL-2.0-or-later",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
dopey/aenea-fork
|
https://github.com/dopey/aenea-fork
|
6de573b2ac20ee232155823026331e7b36c72b4c
|
609f48b790ae3afdd3901d7e057b0e902a08dcb1
|
refs/heads/master
| 2021-01-13T02:07:10.258024 | 2014-01-28T22:29:20 | 2014-01-28T22:29:20 | 15,010,574 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from dragonfly import (Grammar, AppContext, CompoundRule, Choice, Dictation, List, Optional, Literal, Context, Repetition)
import natlink, os
from comsat import ComSat
from raul import SelfChoice
grammar_context = AppContext(executable="notepad")
grammar = Grammar("favorites", context=grammar_context)
# {"spoken form":"written form"}
from personal import FAVORITES
class Favorites(CompoundRule):
spec = "fave <key>"
extras = [SelfChoice("key", FAVORITES)]
def _process_recognition(self, node, extras):
value = FAVORITES[str(extras["key"])]
with ComSat() as connection:
connection.getRPCProxy().callText(value)
grammar.add_rule(Favorites())
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
UTF-8
|
Python
| false | false | 2,014 |
14,362,370,668,009 |
d53219e4ee9da33eda8e5952a2ac47871e9fd765
|
061a3bae35f0422cc1f121d7d4c4ca59a3900251
|
/Deterministic/Simple Allosteric/6.Van Zon_ Simple Allosteric.py
|
771de96511ef549e2ab419d5029dba99102a6c95
|
[] |
no_license
|
chaitan3/Cyanobacteria
|
https://github.com/chaitan3/Cyanobacteria
|
c29d7c4bf8780530593a3f154a6d56922488d0df
|
16ed32d071b4760ad05a940120b3320d76b5f374
|
refs/heads/master
| 2016-09-03T06:21:56.700214 | 2012-12-09T09:33:49 | 2012-12-09T09:33:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from matplotlib.pylab import plot, show
from scipy.integrate import odeint
from numpy import arange, zeros, size
from time import sleep
kpf = 13.6
kdps = 0.908
f6 = kdps
b0 = kdps
kAf = 3.45e7
t = arange(0,96,0.1)
#structure of y: concentrations C, AC, C', A, molar
y0 = [0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012]
def rate(y,t0):
C = y[0:7]
AC = y[7:13]
Cp = y[13:20]
A = y[20]
yp = zeros(size(y))
for i in range(0,6):
kAb = 10**(i+1)
if i > 0:
yp[i] = kpf*AC[i-1] + kAb*AC[i] - kAf*A*C[i]
yp[13+i] = kdps*(Cp[i+1]-Cp[i])
yp[i+7] = -kpf*AC[i] - kAb*AC[i] + kAf*A*C[i]
yp[20] += -kAf*A*C[i] + (kAb+kpf)*AC[i]
yp[0] = 10*AC[0] - kAf*A*C[0] + b0*Cp[0]
yp[6] = kpf*AC[5]-f6*C[6]
yp[13] = kdps*Cp[1] - b0*Cp[0]
yp[19] = -kdps*Cp[6] + f6*C[6]
#input pathway
if abs(t0 - 30) < 0.1 :
yp[20] += 0.1
return yp
y = odeint(rate, y0, t)
y = y.transpose()
C = y[0:7]
AC = y[7:13]
Cp = y[13:20]
A = y[20]
plot(t,C[6])
show()
|
UTF-8
|
Python
| false | false | 2,012 |
13,692,355,751,023 |
b5d836e47562cead8a6626196bed4b1eb2cc9abe
|
2825a312676ea6fb623c9714a86e9a79fb8ffb1a
|
/playaevents/api/urls.py
|
f0da289b05fe03723cfba69b80866a70a02b2b5b
|
[
"Apache-2.0"
] |
permissive
|
bkroeze/playaevents
|
https://github.com/bkroeze/playaevents
|
ecadb33e5240bb7d4d0c20979a14fa8b369d2ec9
|
f998feccc71f153fe845784f17ab107ce7c256ea
|
refs/heads/master
| 2021-01-01T16:13:47.005318 | 2012-07-03T00:12:11 | 2012-07-03T00:12:11 | 1,829,789 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from piston.resource import Resource
from playaevents.api import handlers
from signedauth.authentication import IPUserAuthentication
from playaevents.api.views import apidocs
import logging
log = logging.getLogger(__name__)
auth = IPUserAuthentication()
year_handler = Resource(handlers.YearHandler, authentication=auth)
camp_handler = Resource(handlers.ThemeCampHandler, authentication=auth)
art_handler = Resource(handlers.ArtInstallationHandler, authentication=auth)
event_handler = Resource(handlers.PlayaEventHandler, authentication=auth)
user_handler = Resource(handlers.UserHandler, authentication=auth)
cstreet_handler = Resource(handlers.CircularStreetHandler, authentication=auth)
tstreet_handler = Resource(handlers.TimeStreetHandler, authentication=auth)
urlpatterns = patterns(
'',
url(r'^docs/', apidocs, name="apidocs"),
url(r'^user/', user_handler),
url(r'^year/', year_handler),
url(r'^(?P<year_year>\d{4})/camp/(?P<camp_id>\d+)/$', camp_handler),
url(r'^(?P<year_year>\d{4})/camp/', camp_handler),
url(r'^(?P<year_year>\d{4})/art/(?P<art_id>\d+)/$', art_handler),
url(r'^(?P<year_year>\d{4})/art/', art_handler),
url(r'^(?P<year_year>\d{4})/event/(?P<playa_event_id>\d+)/$', event_handler),
url(r'^(?P<year_year>\d{4})/event/', event_handler),
url(r'^(?P<year_year>\d{4})/cstreet/', cstreet_handler),
url(r'^(?P<year_year>\d{4})/tstreet/', tstreet_handler),
)
if settings.DEBUG:
from signedauth.explore.handlers import EchoHandler
echo = Resource(handler=EchoHandler, authentication=auth)
urlpatterns += patterns(
'',
url(r'^explore/$', 'signedauth.explore.views.explore', name="exploreform"),
url(r'^ipecho\.(?P<emitter_format>[-\w]+)/$', echo, name="echohandler")
)
log.debug('added explore form')
else:
log.debug('production urls')
|
UTF-8
|
Python
| false | false | 2,012 |
7,327,214,254,478 |
ffb078c18906eaf9cde7a18a630e235280f45a71
|
56b4daf13b60a82162c252c21d6626b55314d35a
|
/portality/view/api.py
|
ca787d1ebeda0ca7d1a81a2d40f038e51baebcfe
|
[
"MIT"
] |
permissive
|
CottageLabs/contentmine
|
https://github.com/CottageLabs/contentmine
|
0dde49404a5206b8b7ed5c77d356b919cbe185f2
|
cea8c890872ee7b2b78de17db855c9f5a9c35f28
|
refs/heads/master
| 2020-12-24T12:02:06.807734 | 2014-07-02T16:01:46 | 2014-07-02T16:01:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
The contentmine API.
'''
import json, urllib2
from flask import Blueprint, request, abort, make_response, redirect
from flask.ext.login import current_user
from portality.view.query import query as query
import portality.models as models
from portality.core import app
import portality.util as util
from datetime import datetime
blueprint = Blueprint('api', __name__)
# TODO: add auth control
# return the API instructions --------------------------------------------------
@blueprint.route('/<path:path>', methods=['GET','POST'])
@blueprint.route('/', methods=['GET','POST'])
@util.jsonp
def api():
resp = make_response( json.dumps({
"README": {
"description": "Welcome to the ContentMine API. The endpoints listed here are available for their described functions. Append the name of each endpoint to the /api/ URL to gain access to each one.",
"version": "0.1"
},
"crawler": {
"description": "Lists all available crawlers, with further instructions for how to use them. Crawlers are used to crawl across some location or dataset, say a website of a given publisher or a listing of articles provided by an organisation. Each crawler has a specific target and operating process for that target. The purpose of a crawler is to extract metadata from these sources, and that metadata can then be uploaded to the catalogue API.",
"note": "It is not mandatory nor even expected that all crawlers will run directly at ContentMine. However this API will list the crawlers we know about and will make them available for execution on sources that can be accessed publicly. Individual users are instead encouraged to download and run crawlers (or to create their own) themselves across their own permitted access connections to the various useful resources they wish to scrape, then they can crawl those contents for facts that can then be uploaded directly to the fact API."
},
"scraper": {
"description": "Lists all available scrapers, with further instructions for how to use them. Scrapers act on the metadata output of crawlers, and are used to extract content from the sources described by said metadata. For example a particular scraper may know how to extract the full-text article content from a given website or perhaps to retrieve a particular file format. The content retrieved by a scraper can be uploaded to the content API - BUT ONLY IF it is permissible to do so; otherwise it should only be stored and processed locally.",
"note": "It is not mandatory nor even expected that all scrapers will run directly at ContentMine. However this API will list the scrapers we know about and will make them available for execution on crawled sources that are publicly accessible. Individual users are encouraged to download and run scrapers (or to create their own) themselves across the contents that they have access to and that they wish to mine, then they can upload extracted facts directly to the fact API."
},
"visitor": {
"description": "Lists all available visitors, with further instructions for how to use them. Visitors are used to extract certain types of fact from contents. For example once a crawler has identified the metadata of an article from a web page, and a scraper has retrieved the full-text content object, various visitors may be appropriate to run on the content to extract facts. Extracted facts can then be uploaded to the fact API.",
"note": "It is not mandatory nor even expected that all visitors will run directly at ContentMine. However this API will list the visitors we know about and will make them available for execution on contents that are publicly accessible. Individual users are encouraged to download and run visitors (or to create their own) themselves across the contents that they have scraped and that they wish to mine, then they can upload extracted facts directly to the fact API."
},
"catalogue": {
"description": "Provides access to all the metadata of all the items crawled or scraped by ContentMine. New catalogue records can be uploaded too, either as the output of ContentMine scraping or of any other process deemed appropriate. The catalogue provides powerful search features too.",
"note": "Any useful article metadata is welcome to the catalogue API, whether it was retrieved as a result of ContentMine crawling either by ContentMine or by users locally. We will endeavour to store and make available all such metadata for use as a growing and eventually comprehensive catalogue of academic materials."
},
"content": {
"description": "Lists all the content items currently stored for processing by ContentMine. Content objects such as article PDFs can also be uploaded to the content API, BUT ONLY IF it is permissible to do so, and only if absolutely necessary.",
"note": "The aim of ContentMine is to extract facts rather than to archive content, so this feature is only available to assist in that service and is not guaranteed to be a reliable long term storage service. The content API is therefore just a useful place to temporarily make some content available after crawling and scraping for visitors to run on."
},
"fact": {
"description": "THE MAIN EVENT! Here is access to the facts extracted and stored by ContentMine. Also, new facts can be uploaded for storage. Any process that extracts a fact can send such fact (or batch of facts) to this API and it will then become available via the ContentMine stream. The fact API also provides powerful search features too. Long term storage of facts may not be provided - it is hoped to be, but to be decided later in the project.",
"note": "There will also be access to daily lists of extracted facts, and perhaps larger dumps such as weeklies."
},
"activity": {
"description": "Intended to be a useful API for checking what processes are going on in the ContentMine. Probably not useful in general, but will be useful for development debugging and perhaps for technically proficient API users to check on progress of processes."
}
}) )
resp.mimetype = "application/json"
return resp
# provide access to the listing of available crawlers --------------------------
@blueprint.route('/crawler/<path:path>', methods=['GET','POST'])
@blueprint.route('/crawler', methods=['GET','POST'])
@util.jsonp
def crawler():
# TODO: each crawler should be made available in the crawler folder
# each one should be able to report what it does
# see the bibserver codebase for something similar
# each crawler should then be accessible via /api/crawler/NAME
# and should have specified inputs and outputs
# should also make an effort to conventionalise the IOs required
resp = make_response( json.dumps({
"description": "Will eventually list all the crawlers and explain what they do and how to call them."
}) )
resp.mimetype = "application/json"
return resp
# provide access to the list of available scrapers -----------------------------
@blueprint.route('/scraper/<path:path>', methods=['GET','POST'])
@blueprint.route('/scraper', methods=['GET','POST'])
@util.jsonp
def scraper():
# TODO: each scraper should be made available in the scraper folder
# each one should be able to report what it does
# see the bibserver codebase for something similar
# each scraper should then be accessible via /api/scraper/NAME
# and should have specified inputs and outputs
# should also make an effort to conventionalise the IOs required
resp = make_response( json.dumps({
"description": "Will eventually list all the scrapers and explain what they do and how to call them."
}) )
resp.mimetype = "application/json"
return resp
# provide access to the list of available visitors -----------------------------
@blueprint.route('/visitor/<path:path>', methods=['GET','POST'])
@blueprint.route('/visitor', methods=['GET','POST'])
@util.jsonp
def visitor():
# TODO: each visitor should be made available in the visitor folder
# each one should be able to report what it does
# see the bibserver codebase for something similar
# each visitor should then be accessible via /api/visitor/NAME
# and should have specified inputs and outputs
# should also make an effort to conventionalise the IOs required
resp = make_response( json.dumps({
"description": "Will eventually list all the visitors and explain what they do and how to call them."
}) )
resp.mimetype = "application/json"
return resp
# provide access to catalogue of article metadata ------------------------------
@blueprint.route('/catalogue', methods=['GET','POST'])
@util.jsonp
def catalogue():
if request.method == 'GET':
resp = make_response( json.dumps({
"README": {
"description": "The ContentMine catalogue API. The endpoints listed here are available for their described functions. Append the name of each endpoint to the /api/catalogue/ URL to gain access to each one.",
"GET": "Returns this documentation page",
"POST": "POST a JSON payload following the bibJSON metadata convention (www.bibjson.org), and it will be saved in the ContentMine. This action redirects to the saved object, so the location/URL/ID of the object can be known."
},
"<identifier>": {
"GET": "GET /api/catalogue/SOME_IDENTIFIER will return the identified catalogue entry in (bib)JSON format",
"PUT": "PUT to an existing identified catalogue entry at /api/catalogue/SOME_IDENTIFIER will completely overwrite with the provided properly content-typed JSON payload",
"POST": "POST to an existing identified catalogue entry at /api/catalogue/SOME_IDENTIFIER will update the entry with the provided key-value pairs. POST should provide a properly content-typed JSON payload."
},
"query": {
"description": "A query endpoint which gives full access to the power of elasticsearch querying on all of the article metadata stored in the ContentMine catalogue.",
"note": "Some examples of how to write queries will be provided, but for now just see the elasticsearch documentation at www.elasticsearch.org"
}
}) )
resp.mimetype = "application/json"
return resp
elif request.method == 'POST':
f = models.Catalogue()
if request.json:
for k in request.json.keys():
f.data[k] = request.json[k]
else:
for k, v in request.values.items():
f.data[k] = v
f.save()
return redirect('/api/catalogue/' + f.id)
@blueprint.route('/catalogue/<ident>', methods=['GET','PUT','POST'])
@util.jsonp
def cataloguedirect(ident):
# TODO: consider allowing PUT/POST of new objects to provided IDs in
# addition to users being able to send them to /catalogue and having an ID
# created for them. Do we want people to be able to specify their own IDs?
try:
f = models.Catalogue.pull(ident)
except:
abort(404)
if request.method == 'GET':
resp = make_response( f.json )
resp.mimetype = "application/json"
return resp
elif request.method in ['PUT','POST']:
inp = {}
if request.json:
inp = request.json
else:
for k, v in request.values.items():
inp[k] = v
# TODO: strip any control keys that get passed in,
# if they should generally be ignored
if request.method == 'PUT':
f.data = inp
else:
for k in inp.keys():
f.data[k] = inp[k]
f.save()
return redirect('/api/catalogue/' + ident)
@blueprint.route('/catalogue/query', methods=['GET','POST'])
@util.jsonp
def cataloguequery():
if request.method == "POST":
if request.json:
qs = request.json
else:
qs = dict(request.form).keys()[-1]
elif 'q' in request.values:
qs = {'query': {'query_string': { 'query': request.values['q'] }}}
elif 'source' in request.values:
qs = json.loads(urllib2.unquote(request.values['source']))
else:
qs = {'query': {'match_all': {}}}
return query(path='Catalogue',qry=qs)
# provide access to retrieved content objects that can and have been stored ----
@blueprint.route('/content/<path:path>', methods=['GET','POST'])
@blueprint.route('/content', methods=['GET','POST'])
@util.jsonp
def content():
if request.method == 'GET':
# TODO: this should become a listing of stored content
# perhaps with a paging / search facility
resp = make_response( json.dumps({
"description": "Will eventually list all the content stored in ContentMine for processing."
}) )
resp.mimetype = "application/json"
return resp
elif request.method == 'POST':
# TODO: this should save POSTed content to wherever we are saving stuff
# probably the saving of stuff should be handled by an archive class
pass
# provide access to facts ------------------------------------------------------
@blueprint.route('/fact', methods=['GET','POST'])
@util.jsonp
def fact():
if request.method == 'GET':
resp = make_response( json.dumps({
"README": {
"description": "The ContentMine fact API. The endpoints listed here are available for their described functions. Append the name of each endpoint to the /api/fact/ URL to gain access to each one.",
"GET": "Returns this documentation page",
"POST": "POST a JSON payload following the fact metadata convention (err, which does not exist yet), and it will be saved in the ContentMine"
},
"<identifier>": {
"GET": "GET /api/fact/SOME_IDENTIFIER will return the identified fact in JSON format",
"PUT": "PUT to an existing identified fact at /api/fact/SOME_IDENTIFIER will completely overwrite the fact with the provided properly content-typed JSON payload",
"POST": "POST to an existing fact at /api/fact/SOME_IDENTIFIER will update the fact with the provided key-value pairs. POST should provide a properly content-typed JSON payload."
},
"query": {
"description": "A query endpoint which gives full access to the power of elasticsearch querying on all of the facts stored in ContentMine.",
"note": "Some examples of how to write queries will be provided, but for now just see the elasticsearch documentation at www.elasticsearch.org"
},
"daily": {
"description": "Provides a listing of all facts discovered so far for the current day."
}
}) )
resp.mimetype = "application/json"
return resp
elif request.method == 'POST':
f = models.Fact()
if request.json:
for k in request.json.keys():
f.data[k] = request.json[k]
else:
for k, v in request.values.items():
f.data[k] = v
f.save()
return redirect('/api/fact/' + f.id)
@blueprint.route('/fact/<ident>', methods=['GET','POST'])
@util.jsonp
def factdirect(ident):
# TODO: consider allowing PUT/POST of new objects to provided IDs in
# addition to users being able to send them to /catalogue and having an ID
# created for them. Do we want people to be able to specify their own IDs?
try:
f = models.Fact.pull(ident)
except:
abort(404)
if request.method == 'GET':
resp = make_response( f.json )
resp.mimetype = "application/json"
return resp
elif request.method in ['PUT','POST']:
inp = {}
if request.json:
inp = request.json
else:
for k, v in request.values.items():
inp[k] = v
# TODO: strip any control keys that get passed in,
# if they should generally be ignored
if request.method == 'PUT':
f.data = inp
else:
for k in inp.keys():
f.data[k] = inp[k]
f.save()
return redirect('/api/fact/' + ident)
@blueprint.route('/fact/query', methods=['GET','POST'])
@util.jsonp
def factquery():
if request.method == "POST":
if request.json:
qs = request.json
else:
qs = dict(request.form).keys()[-1]
elif 'q' in request.values:
qs = {'query': {'query_string': { 'query': request.values['q'] }}}
elif 'source' in request.values:
qs = json.loads(urllib2.unquote(request.values['source']))
else:
qs = {'query': {'match_all': {}}}
return query(path='Fact',qry=qs)
@blueprint.route('/fact/daily', methods=['GET','POST'])
@util.jsonp
def factdaily():
# TODO: should this accept user-provided queries too? So people can search
# on the daily list? If so, just check the incoming query and build one
# with a MUST that includes the following date-based restriction.
qry = {
'query': {
'query_string': {
'query': datetime.now().strftime("%Y-%m-%d"),
'default_field':'created_date'
}
},
'sort': [{"created_date.exact":{"order":"desc"}}]
}
r = query(path='Fact',qry=qry,raw=True)
# TODO: decide if any control keys should be removed before displaying facts
res = [i['_source'] for i in r.get('hits',{}).get('hits',[])]
resp = make_response( json.dumps(res) )
resp.mimetype = "application/json"
return resp
# list running activities ------------------------------------------------------
@blueprint.route('/activity/<path:path>', methods=['GET','POST'])
@blueprint.route('/activity', methods=['GET','POST'])
@util.jsonp
def activity():
# TODO: useful stuff here
resp = make_response( json.dumps({
}) )
resp.mimetype = "application/json"
return resp
|
UTF-8
|
Python
| false | false | 2,014 |
2,903,397,912,643 |
2d7331e0dc879efa2d1c6066e383faf27aba688d
|
3387a978e3d469c1d695b2b5696781a908a0b299
|
/adopt4k_server/facade/views.py
|
e46f890fa7a16ef22daa1e7c4cf9b37d4e5ceaf4
|
[
"MIT"
] |
permissive
|
4kmapping/4kadopt_server
|
https://github.com/4kmapping/4kadopt_server
|
27c397917fab345971309b32f27725597ebc8954
|
1679b4a2a157894707c20de9cd54225f903fe3b2
|
refs/heads/master
| 2021-01-01T19:39:42.084426 | 2014-08-30T14:16:20 | 2014-08-30T14:16:22 | 21,634,008 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render
# FOR TESTING
from django.db.models.signals import post_save
from django.dispatch import receiver
from api.models import Adoption
from django.http import HttpResponse, HttpResponseNotAllowed
from django.http import HttpResponseBadRequest
# Create your views here.
def index(request):
#TODO: Currently Not Used. TemplateView is used instead.
return render(request, 'facade/index.html', {})
'''
@receiver(post_save, sender=Adoption)
def print_signal(sender, **kawrgs):
print("Adoption was saved.")
'''
def cleanup_adoptions(request):
if request.method == 'GET':
return render(request, 'facade/cleanup.html', {})
elif request.method == 'POST' and request.user.is_superuser:
if request.POST.get('command', None):
cmd = request.POST.get('command')
if cmd == 'CLEARUP-ADOPTIONS':
Adoption.objects.filter(targetyear__gte='2015').delete()
return HttpResponse('Deleted all adoptions.')
else:
print 'wrong command.'
mssg = 'You typed a wrong command.'
return HttpResponseBadRequest(mssg)
else:
mssg = "The HTTP method is not supported or you don't have a privilege."
return HttpResponseBadRequest(mssg)
def download(request):
return render(request, 'facade/download.html', {})
|
UTF-8
|
Python
| false | false | 2,014 |
12,489,764,912,685 |
38608e0cf1232787ec30a99707dd42b30ca64bf4
|
fa36e63e5bb4a38aee4f500c2f01e7e5b7bba6e8
|
/api/urls.py
|
712a0014bcde4121d521c7d30bed3596d8fe2464
|
[] |
no_license
|
dlemmond/nutriquery
|
https://github.com/dlemmond/nutriquery
|
1175773ae8e8bdf60b3fd3a548eaf741dee88f77
|
52605fb6f17175a84d3da145823c497b545ef0a0
|
refs/heads/main
| 2016-05-22T08:27:03.404834 | 2011-11-13T23:58:25 | 2011-11-13T23:58:25 | 2,456,279 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
urlpatterns = patterns('api.views',
(r'^food/(?P<food_id>\d+)$', 'food'),
(r'^nutrients/list$', 'nutrients_list'),
)
|
UTF-8
|
Python
| false | false | 2,011 |
8,332,236,598,901 |
7d6e19cf2045444ec0afbb588ae8204ab3ac07b7
|
08245dda04bdeb09a74e0c05de45286d4f956819
|
/lib/backtest.py
|
63d6365aa5ce7665a20fe05d92d4e333894f4873
|
[] |
no_license
|
tianhm/python-backtest
|
https://github.com/tianhm/python-backtest
|
7ddc9842bdb1c3a942ee1c40e088d76e8ec8e8fc
|
615c618fabe857c148480fef16bcd2608b59c4e4
|
refs/heads/master
| 2020-12-03T00:01:07.390490 | 2012-05-29T12:00:31 | 2012-05-29T12:00:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from collections import namedtuple, Counter
from matplotlib.pyplot import plot, subplot2grid, ylim, yticks, savefig, clf, \
fill_between
Trade = namedtuple('Trade', ['order', 'tick'])
class BackTest(object):
""" Callable object running back tests for a strategy over a stock
>>> backtest = BackTest()
If goog is a Stock and bollinger a strategy (cf. strategy/__init__.py)
>>> backtest(goog, bollinger) #doctest: +SKIP
BackTest(trades=[99], position=short, gross=1253.63, net=662.1)
Current position is short, gross PNL is 1253.63, net PNL taking into account
the closing of the position and the trading costs if applicable is 662.1.
Trading costs are 0 by default. To change that set the cost attribute of
the backtest object to a function taking a trade as an argument and returning
the cost.
>>> backtest.cost = lambda trade: 0.5 * trade / 100
>>> backtest #doctest: +SKIP
BackTest(trades=[99], position=short, gross=1253.63, net=435.78005)
"""
sell = {'long': None, None: 'short'}
buy = {None: 'long', 'short': None}
def __init__(self):
self.cost = lambda trade: 0
self.stock = None
self.strategy = None
self.trades = []
def __call__(self, stock, strategy):
self.stock = stock
self.strategy = strategy
self.trades = []
for t in stock:
if strategy(t) == 'buy' and self.position != 'long':
self.trades.append(Trade('buy', t))
elif strategy(t) == 'sell' and self.position != 'short':
self.trades.append(Trade('sell', t))
return self
def __repr__(self):
return 'BackTest(trades=[{1}], position={0.position}, gross={0.gross}, \
net={0.net})'.format(self, len(self.trades))
@property
def trade_cost(self):
""" trade cost for the backtest period """
return self._trade_cost(len(self.stock) - 1)
def _trade_cost(self, tick_index):
""" trade cost from start to tick_index """
return sum(self.cost(abs(trade.tick.close)) for trade in self.trades
if trade.tick.index <= tick_index)
@property
def gross(self):
""" gross pnl for the backtest period """
return self._gross(len(self.stock) - 1)
def _gross(self, tick_index):
""" gross pnl from start to tick_index """
sign = lambda trade: 1 if trade.order == 'sell' else -1
return sum(sign(trade) * trade.tick.close for trade in self.trades
if trade.tick.index <= tick_index)
@property
def net(self):
""" net pnl for the backtest period """
return self._net(len(self.stock) - 1)
def _net(self, tick_index):
""" net pnl from start to tick_index """
result = 0
if self._position(tick_index) == 'long':
result += self.stock[tick_index].close
elif self._position(tick_index) == 'short':
result -= self.stock[tick_index].close
result += self._gross(tick_index)
result -= self._trade_cost(tick_index)
return result
@property
def position(self):
""" position at the end of the backtest period """
return self._position(len(self.stock) - 1)
def _position(self, tick_index, numeric_flag=False):
""" position at tick_index 1/0/-1 if numeric_flag """
position_ = {1: 'long', 0: None, -1: 'short'}
counter = Counter(trade.order for trade in self.trades
if trade.tick.index <= tick_index)
numeric = counter['buy'] - counter['sell']
if numeric_flag:
return numeric
return position_[counter['buy'] - counter['sell']]
def plot(self):
date = [tick.date for tick in self.stock]
net = [self._net(tick.index) for tick in self.stock]
position = [self._position(tick.index, True) for tick in self.stock]
plot_net = subplot2grid((3, 1), (0, 0), rowspan=2)
plot(date, net)
plot_position = subplot2grid((3, 1), (2, 0), sharex=plot_net)
ylim(-1.5, 1.5)
yticks((-1, 0, 1), ('short', '...', 'long'))
fill_between(date, position)
savefig('png/{0}_{1}.png'.format(self.stock.symbol,
self.strategy.__class__.__name__))
clf()
|
UTF-8
|
Python
| false | false | 2,012 |
3,161,095,930,312 |
6321d70a94cbd90e3e56ae8271a46155b2544567
|
dde7893d03f140491983d4d545a2626a7074a7ef
|
/trunk/libs/python/jld/jld/registry/exception.py
|
0d9efa2c69c4729a726930d1d44a2fa674edc245
|
[] |
no_license
|
jldupont/jldupont.com
|
https://github.com/jldupont/jldupont.com
|
847a206a7cb878bd68a48bc198ce8d0e8400d0b1
|
92244651d393f857652452cd190c329298ab6f64
|
refs/heads/master
| 2021-01-02T23:06:27.822347 | 2010-09-25T22:50:17 | 2010-09-25T22:50:17 | 710,175 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" Registry Exception class
"""
__author__ = "Jean-Lou Dupont"
__version__ = "$Id: exception.py 708 2008-12-04 13:40:35Z JeanLou.Dupont $"
class RegistryException(Exception):
""" An exception class for Registry
"""
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
|
UTF-8
|
Python
| false | false | 2,010 |
309,237,680,071 |
93aeb2eb8690e741ff15645a2581eca503ed11d6
|
2d067e6b3ca7e1794eff170bf2eab95d06cf4741
|
/kcech-manager-app-reader/main.py
|
c3a9d975eeb5ca0b4d83ade783603c25296701f3
|
[] |
no_license
|
DanGe42/scratch
|
https://github.com/DanGe42/scratch
|
a575372a2a387ae4ca57595b2c346ac65239d324
|
ff3eb1360541835b2b070d8c81c72bc707487df5
|
refs/heads/master
| 2021-01-19T07:46:57.303656 | 2013-06-10T07:19:12 | 2013-06-10T07:19:12 | 9,994,720 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from csv_reader import Parser
from html_out import generate_html
from sys import argv
import os
csv_file = argv[1]
parser = Parser(csv_file)
TITLE = 3
app_dir = "./applications/"
if not os.path.exists(app_dir):
os.makedirs(app_dir)
for entry in parser.data:
out_file = open(app_dir + entry[TITLE] + ".html", 'w')
out_file.write(generate_html(parser.header, entry, TITLE))
out_file.close()
|
UTF-8
|
Python
| false | false | 2,013 |
16,904,991,319,322 |
8e96c47f4edd6ab155102d38d413412dcd58cc2e
|
22388b602a20784df7df6a5995c71e3ab01a9dc5
|
/python/GeneralClasses/extractMatlabMatrix.py
|
1eacacd9e79a63f732968ac2907bb954897d0c7b
|
[] |
no_license
|
sebastianffx/Zentity
|
https://github.com/sebastianffx/Zentity
|
07f16a2019c53268cf19a4b66c33e3862e669ad5
|
07edb5d88123b2102d94648672fc0898c0557476
|
refs/heads/master
| 2021-01-18T10:48:16.178699 | 2011-07-25T18:26:29 | 2011-07-25T18:26:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import scipy.io as sio
from numpy import *
from os.path import join
class MatlabExtract:
def __init__(self,path):
self.path = path
def getFTMatrix(self,filename = 'Ft.mat'):
return array(sio.loadmat(join(self.path,filename))['Ft'])
def getWTMatrix(self,filename = 'Wt.mat'):
return array(sio.loadmat(join(self.path,filename))['Wt'])
def getHMatrix(self,filename = 'H.mat'):
return array(sio.loadmat(join(self.path,filename))['H_norm'])
def getFVMatrix(self,filename = 'Fv.mat'):
return array(sio.loadmat(join(self.path,filename))['Fv'])
def getWVMatrix(self,filename = 'Wv.mat'):
return array(sio.loadmat(join(self.path,filename))['Wv'])
def getBsMatrix(self, filename = 'Bs.mat'):
return array(sio.loadmat(join(self.path,filename))['Bs'])
#Array wherrre the id of images are sorted as in Xv and Xt
def getTtMatrix(self,filename = 'Tt.mat'):
return array(sio.loadmat(join(self.path,filename))['Tt'])
def getSortedLTMatrix(self,filename = 'SortedLT.mat'):
return array(sio.loadmat(join(self.path,filename))['indexSortedLT'])
def getImgIds(self,filename = 'imgIds.mat'):
return array(sio.loadmat(join(self.path,filename))['imgIdsTraining'])
def getImgDictionary(self,filename = 'imgIds.mat'):
imgIndex = self.getImgIds(filename)
imgDictionary = {}
for i in range (len(imgIndex)):
imgDictionary[str(imgIndex[i][0][0].split(".")[0])]=i
return imgDictionary
|
UTF-8
|
Python
| false | false | 2,011 |
5,325,759,453,389 |
bdab2dd69e528a129edbfc0b2be905fc95a86d57
|
61b66926171279d27b21b6ee75d2709f54214c9f
|
/new_user.py
|
87356ecc91dd247650f500b9680722017f831367
|
[] |
no_license
|
ehuenuman/TareaI-INFO175
|
https://github.com/ehuenuman/TareaI-INFO175
|
7fa3c0e681168173525e4739009ca5e5608e2bc6
|
b0b5761a9246741bce446a4e75d3f558637e33ce
|
refs/heads/master
| 2021-01-22T17:22:11.839359 | 2014-06-09T03:23:42 | 2014-06-09T03:23:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'new_user.ui'
#
# Created: Sun Jun 8 21:56:42 2014
# by: pyside-uic 0.2.13 running on PySide 1.1.1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_NewUser(object):
def setupUi(self, Dialog):
"""Constructor del formulario nuevo usuario
@param Dialog"""
Dialog.setObjectName("NewUser")
Dialog.resize(320, 250)
self.nombreLabel = QtGui.QLabel(Dialog)
self.nombreLabel.setGeometry(QtCore.QRect(50, 40, 61, 31))
self.nombreLabel.setObjectName("nombreLabel")
self.nombreLineEdit = QtGui.QLineEdit(Dialog)
self.nombreLineEdit.setGeometry(QtCore.QRect(140, 40, 113, 31))
self.nombreLineEdit.setObjectName("nombreLineEdit")
self.usuarioLineEdit = QtGui.QLineEdit(Dialog)
self.usuarioLineEdit.setGeometry(QtCore.QRect(140, 90, 113, 31))
self.usuarioLineEdit.setObjectName("usuarioLineEdit")
self.usuarioLabel = QtGui.QLabel(Dialog)
self.usuarioLabel.setGeometry(QtCore.QRect(50, 90, 61, 31))
self.usuarioLabel.setObjectName("usuarioLabel")
self.passLabel = QtGui.QLabel(Dialog)
self.passLabel.setGeometry(QtCore.QRect(50, 140, 81, 31))
self.passLabel.setObjectName("passLabel")
self.passLineEdit = QtGui.QLineEdit(Dialog)
self.passLineEdit.setGeometry(QtCore.QRect(140, 140, 113, 31))
self.passLineEdit.setInputMethodHints(QtCore.Qt.ImhHiddenText|QtCore.Qt.ImhNoAutoUppercase|QtCore.Qt.ImhNoPredictiveText)
self.passLineEdit.setEchoMode(QtGui.QLineEdit.Password)
self.passLineEdit.setReadOnly(False)
self.passLineEdit.setObjectName("passLineEdit")
self.instLabel = QtGui.QLabel(Dialog)
self.instLabel.setGeometry(QtCore.QRect(20, 0, 281, 51))
self.instLabel.setObjectName("instLabel")
self.crearButton = QtGui.QPushButton(Dialog)
self.crearButton.setGeometry(QtCore.QRect(70, 190, 95, 31))
font = QtGui.QFont()
font.setPointSize(12)
font.setWeight(75)
font.setBold(True)
self.crearButton.setFont(font)
self.crearButton.setObjectName("crearButton")
self.cancelarButton = QtGui.QPushButton(Dialog)
self.cancelarButton.setGeometry(QtCore.QRect(180, 190, 95, 31))
self.cancelarButton.setObjectName("cancelarButton")
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.cancelarButton, QtCore.SIGNAL("clicked()"), Dialog.close)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def closeDialog(self, Dialog):
"""Funcion que cierra el formulario nuevo usuario
@param Dialog"""
Dialog.close()
def retranslateUi(self, Dialog):
"""Funcion que asigna los textos a los elementos del
formulario nuevo usuario
@param Dialog"""
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Nuevo Usuario", None, QtGui.QApplication.UnicodeUTF8))
self.nombreLabel.setText(QtGui.QApplication.translate("Dialog", "Nombre:", None, QtGui.QApplication.UnicodeUTF8))
self.usuarioLabel.setText(QtGui.QApplication.translate("Dialog", "Usuario:", None, QtGui.QApplication.UnicodeUTF8))
self.passLabel.setText(QtGui.QApplication.translate("Dialog", "Contraseña:", None, QtGui.QApplication.UnicodeUTF8))
self.instLabel.setText(QtGui.QApplication.translate("Dialog", "Ingrese sus datos", None, QtGui.QApplication.UnicodeUTF8))
self.crearButton.setText(QtGui.QApplication.translate("Dialog", "&Crear", None, QtGui.QApplication.UnicodeUTF8))
self.cancelarButton.setText(QtGui.QApplication.translate("Dialog", "&Cancelar", None, QtGui.QApplication.UnicodeUTF8))
|
UTF-8
|
Python
| false | false | 2,014 |
10,703,058,517,307 |
1740c4e27e76253b075277d8a943932d93a62a31
|
8cecbbad4062a87b8a9ef68339f2228bcd8d053f
|
/2013-11-27/6.1/1130310226_段艺_6.1/1130310226_段艺_6.1/mysite/addr_book/models.py
|
6d21ae5152eb91de842ddf8087dd3847b16b6507
|
[] |
no_license
|
XianYX/Python-
|
https://github.com/XianYX/Python-
|
0a5fe645fe20b5d480d8f13d9d5c7e3e79d23016
|
ae79ce935b84de59caaa82acf535e0c89f130e79
|
refs/heads/master
| 2020-12-30T23:23:21.526941 | 2014-10-17T17:38:48 | 2014-10-17T17:38:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
class People(models.Model):
"""docstring for People"""
student_num = models.CharField(max_length = 15)
name = models.CharField(max_length = 30)
sex = models.BooleanField(default = True)
phone = models.CharField(max_length = 15)
email = models.EmailField()
QQ = models.CharField(max_length = 11)
address = models.CharField(max_length = 50)
birthday = models.CharField(max_length = 8)
|
UTF-8
|
Python
| false | false | 2,014 |
13,537,736,938,298 |
17e273d7c67aae969e4acf688e5c087799927c35
|
ffd3691d4eaf21fa83c58c03ff5894c6de14168f
|
/skrill/tests/models_tests.py
|
24fc150cdf76d8b397baa2233ff2e4e75d1cc8e4
|
[] |
no_license
|
Eduedsky/django-skrill
|
https://github.com/Eduedsky/django-skrill
|
b2fdb32d14efc8ff37e68e53fd46ebcf424b3fc6
|
adb6215a1c6fda1856d41baea1da8b1c4e97ab60
|
refs/heads/master
| 2021-05-28T16:38:28.577478 | 2014-11-27T09:44:21 | 2014-11-27T09:44:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.test import TestCase
from skrill.models import StatusReport
from skrill.tests.factories import StatusReportFactory, generate_md5_signature
class StatusReportTestCase(TestCase):
def test_valid_md5_signature(self):
status_report = StatusReportFactory()
status_report.md5sig = generate_md5_signature(status_report)
status_report.validate_md5sig()
def test_invalid_md5_signature(self):
status_report = StatusReportFactory()
status_report.md5sig = "foobar"
try:
status_report.validate_md5sig()
except StatusReport.InvalidMD5Signature, e:
str(e)
else:
raise Exception("test_invalid_md5_signature failed")
|
UTF-8
|
Python
| false | false | 2,014 |
18,915,035,991,442 |
cc7438a093e83d77079da35ab6165a142b5ac3dc
|
6909a320ca46b6ad89deed4a9aee86e1b21fc7d3
|
/microblog/deletePermisos.py
|
7062f60d2f3b9b496d7526a7e0b342eb6e8f1ef3
|
[] |
no_license
|
CristianCandia/MicroblogRemRep
|
https://github.com/CristianCandia/MicroblogRemRep
|
097328b6483981d52f5370d42dad892f48d7c176
|
7aa5ec01da50859f1358d3733b626f1075f073a9
|
refs/heads/master
| 2021-01-01T17:00:35.064194 | 2014-04-20T05:36:48 | 2014-04-20T05:36:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 17/05/2013
@author: cristian
'''
'''
Created on 17/05/2013
@author: cristian
'''
#!usr/lib/python
from app import db
from app.modelo import Permiso, Rol, User2
try:
permisos = Permiso.query.all()
if permisos is not None:
for p in permisos:
db.session.delete(p)
print "Se elimino" + p.nombre
db.session.commit()
except Exception, error:
db.session.rollback()
print str(error)
|
UTF-8
|
Python
| false | false | 2,014 |
9,088,150,801,745 |
0d4b5c894fa5d12797f5a685a8d187e8122a2aab
|
dd5485047d2bcd522ccb8787b6d29d5e722ed63a
|
/eulerLorenzAtractor.py
|
7758e0f1f999738b725ce6ab4c415bededd813b9
|
[
"MIT"
] |
permissive
|
HAYASAKA-Ryosuke/LorenzAtractor
|
https://github.com/HAYASAKA-Ryosuke/LorenzAtractor
|
7c744f2e2b984d8f36e2ed014fc671891d7c16d3
|
6cefbf10b88fc3b60ad8c9c3c97089d603ae2acb
|
refs/heads/master
| 2016-09-06T19:48:22.830169 | 2013-12-26T07:04:25 | 2013-12-26T07:04:25 | 12,295,888 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#coding:utf-8
import matplotlib.pyplot as plt
import numpy
from mpl_toolkits.mplot3d import Axes3D
class LorenzAtractor(object):
def __init__(self,p,r,b,t,dt,x,y,z):
self._p=p
self._r=r
self._b=b
self._t=t
self._dt=dt
self._x=x
self._y=y
self._z=z
def calc(self):
resultx=[]
resulty=[]
resultz=[]
F_dx=lambda value: -self._p*value[0]+self._p*value[1]
F_dy=lambda value: -value[0]*value[2]+self._r*value[0]-value[1]
F_dz=lambda value: value[0]*value[1]-self._b*value[2]
for i in range(0,len(self._t)):
xx=F_dx([self._x,self._y,self._z])*self._dt
yy=F_dy([self._x,self._y,self._z])*self._dt
zz=F_dz([self._x,self._y,self._z])*self._dt
self._x+=xx
self._y+=yy
self._z+=zz
resultx.append(self._x)
resulty.append(self._y)
resultz.append(self._z)
self.show(resultx,resulty,resultz)
def show(self,x,y,z):
fig=plt.figure()
ax=fig.add_subplot(111,projection='3d')
ax.plot(x,y,z,'o')
plt.show()
p=10.0
r=28.0
b=8/3.0
x=0.1
y=0.1
z=0.1
dt=0.01
t=numpy.arange(0,100,dt)
LorenzAtractor(p,r,b,t,dt,x,y,z).calc()
|
UTF-8
|
Python
| false | false | 2,013 |
17,214,228,946,493 |
c92bcd47e310b0baf13bb93fcbd984863d9dbe71
|
70a692bdd9c23320650c2e2ab24acf9ef36c7f56
|
/src/SConscript
|
f4315eb9a25700a69daefcfee80df63de94efc0b
|
[] |
no_license
|
aside8/app
|
https://github.com/aside8/app
|
69cdf80c15e3e31b85fca52f2289dfcd507d9b5a
|
c3aa5d1f98de5095d530d4d825a0d3c8a0845b4d
|
refs/heads/master
| 2015-08-13T00:12:04.827951 | 2014-08-14T11:24:19 | 2014-08-14T11:24:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
Import("env")
env = env.Clone()
dirs = [
"socket",
"util"
]
for d in dirs:
env.IbDirs(d)
|
UTF-8
|
Python
| false | false | 2,014 |
9,285,719,336,234 |
18ec744053986304293b82120ef07b341b77ac21
|
3effddd612dcbfc857826048acf5db4a60f658b2
|
/pruebas/primo_test.py
|
f0587eab411dd7c0be4ad5a15f5e302427c131b5
|
[] |
no_license
|
migreyes30/test-project30
|
https://github.com/migreyes30/test-project30
|
9d56e3a0b83bdafd2ba6fd29660f16a6a0fc55eb
|
1666afbaaf7dd1fb65b5667de844018774fa7af0
|
refs/heads/master
| 2021-01-10T06:36:12.345907 | 2008-11-02T19:06:32 | 2008-11-02T19:06:32 | 36,206,882 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# File: primos_test.py
import sys
from unittest import TestCase, makeSuite, TextTestRunner
from carbine import parse, CarbineException
from StringIO import StringIO
from carbine import CarbineException
from carbine_interpreter import *
class CarbinePrimosPLYTetsCase(TestCase):
global source
source= '''
@@elemento mas grande
var lst
func mas_grande(lst)
var accum,i
accum is lst[0]
for i in lst
if accum < i then
accum is i
end
end
return accum
end
var lista is [1,2,5,4,1]
write summon mas_grande(lista)
'''
def setUp(self):
self.output = StringIO()
sys.stdout = self.output
def tearDown(self):
self.output.close()
sys.stdout = sys.__stdout__
def testPrimos(self):
"fun elemento mas grande Test"
self.assertEquals(parse(source),
[['var', 'lst'],
[['func', 'mas_grande', ['lst'],
[['var', 'accum', 'i'],
['is', 'accum', ['lst', 0, 'lst']],
['for', 'i', 'lst',
[['if', ['<', 'accum', 'i'],
[['is', 'accum', 'i']]]]],
['return', 'accum']]],
['var', ['is', 'lista', [1, 2, 5, 4, 1]]],
['write', ['summon', 'mas_grande', ['lista']]]]]
)
def testFunGrande(self):
'prueba de fun mas grande'
parseo = parse(source)
table = create_table(parseo[0])
execute_statements(table,parseo[1])
self.assertEquals(self.output.getvalue(), '5\n')
def run_test():
TextTestRunner(verbosity=2).run(makeSuite(CarbinePrimosPLYTetsCase))
if __name__ == '__main__':
run_test()
|
UTF-8
|
Python
| false | false | 2,008 |
5,325,759,453,781 |
f645eebcc420b25bbf0194857754bfe2d4f98a1b
|
5cf8b26233dd6e26a1019bfa7ab10381d246570d
|
/setup.py
|
c024838591ccfda431f73353e3957630ac3d41e9
|
[
"BSD-3-Clause"
] |
permissive
|
ixc/glamkit-eventtools
|
https://github.com/ixc/glamkit-eventtools
|
bd4468291bfca288dffb65dd3847b9a313ee985a
|
f94726c145f52bb7771b1c5352a39903d5fa33f3
|
refs/heads/master
| 2020-04-05T23:15:59.642052 | 2013-03-09T04:45:51 | 2013-03-09T04:45:51 | 757,956 | 15 | 3 | null | true | 2012-09-25T08:10:06 | 2010-07-05T15:24:33 | 2012-09-25T08:10:05 | 2012-09-25T08:10:05 | 168 | null | 8 | 1 |
Python
| null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='glamkit-eventtools',
version='1.0.0a1',
description='An event management app for Django.',
author='Greg Turner',
author_email='[email protected]',
url='http://github.com/glamkit/glamkit-eventtools',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=['setuptools', 'vobject==0.8.1c', 'python-dateutil==1.5', 'django-mptt>=0.5'],
license='BSD',
test_suite = "eventtools.tests",
)
# also requires libraries in REQUIREMENTS.txt
# pip install -r REQUIREMENTS.txt
|
UTF-8
|
Python
| false | false | 2,013 |
13,846,974,592,632 |
08be0ad1fb23dd96cd602c29d55715cea50898a5
|
a025d770eb959378271336fcf3a791890ae067b0
|
/ymg-spider/src/com/youmogan/test.py
|
1a2a352308e423af46e866f2e27f5d2fd338f547
|
[] |
no_license
|
leoinfomail/mygithub
|
https://github.com/leoinfomail/mygithub
|
e3ee662e8c0ef7e606b9630ce6c3a6a326981d14
|
f0285def4361b0bc3083a5c950b59b39000fb2b0
|
refs/heads/master
| 2021-01-10T20:38:20.734992 | 2009-05-05T14:24:56 | 2009-05-05T14:24:56 | 192,986 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python
#coding=utf8
__author__ = "jun.yanj"
__date__ = "$2008-11-20 23:21:47$"
# out_dir = YMG_OUT_SOURCE + os.path.sep + str(today) + os.path.sep + 'haha365'
# create_dir(out_dir)
#resource = urllib.urlopen('http://www.haha365.com/fqxh/205714.htm')
#for i in range(1000):
# data = resource.readline()
# if data.strip().lower() == '<TD valigN=botTom height="57" ClaSS="L26"> <FONT cOlOR=#000099><B>'.lower():
# print resource.readline()
#url1 = 'http://www.haha365.com/fqxh/205714.htm' #404
#url2 = 'http://www.lizhong-hardware.com/' #302
#url3 = 'http://www.haha365.com/xd_joke/20080721163706.htm' # 200 gb18030 no ct
#url4 = 'http://www.javaeye.com/' #200 utf-8
#url5 = 'http://www.youmowang.com/' #200 gbk
#
#baseUrl = 'http://www.youmowang.com/show.php?tid='
#urlList = [baseUrl + str(elem) for elem in range(1)]
# # digest data
# pattern_title = r'target="_blank"><h1>(.*?)</h1>'
# pattern_body = r'<div class="pb_box_lrad2_show">(.*?)<div class="clear"></div>'
if __name__ == "__main__":
print "Hel你好lo World";
|
UTF-8
|
Python
| false | false | 2,009 |
15,917,148,840,833 |
63147fc767f7eaeb30d3438e2efb6a5d342f53cf
|
347523b5ea88c36f6a7d7916426f219aafc4bbf8
|
/src/SMESH_SWIG/smeshBuilder.py
|
fe63e726b055c95b706df6619fa9836ee80927ae
|
[
"LGPL-2.1-only"
] |
non_permissive
|
FedoraScientific/salome-smesh
|
https://github.com/FedoraScientific/salome-smesh
|
397d95dc565b50004190755b56333c1dab86e9e1
|
9933995f6cd20e2169cbcf751f8647f9598c58f4
|
refs/heads/master
| 2020-06-04T08:05:59.662739 | 2014-11-20T13:06:53 | 2014-11-20T13:06:53 | 26,962,696 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (C) 2007-2014 CEA/DEN, EDF R&D, OPEN CASCADE
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# File : smeshBuilder.py
# Author : Francis KLOSS, OCC
# Module : SMESH
## @package smeshBuilder
# Python API for SALOME %Mesh module
## @defgroup l1_auxiliary Auxiliary methods and structures
## @defgroup l1_creating Creating meshes
## @{
## @defgroup l2_impexp Importing and exporting meshes
## @defgroup l2_construct Constructing meshes
## @defgroup l2_algorithms Defining Algorithms
## @{
## @defgroup l3_algos_basic Basic meshing algorithms
## @defgroup l3_algos_proj Projection Algorithms
## @defgroup l3_algos_radialp Radial Prism
## @defgroup l3_algos_segmarv Segments around Vertex
## @defgroup l3_algos_3dextr 3D extrusion meshing algorithm
## @}
## @defgroup l2_hypotheses Defining hypotheses
## @{
## @defgroup l3_hypos_1dhyps 1D Meshing Hypotheses
## @defgroup l3_hypos_2dhyps 2D Meshing Hypotheses
## @defgroup l3_hypos_maxvol Max Element Volume hypothesis
## @defgroup l3_hypos_quad Quadrangle Parameters hypothesis
## @defgroup l3_hypos_additi Additional Hypotheses
## @}
## @defgroup l2_submeshes Constructing submeshes
## @defgroup l2_compounds Building Compounds
## @defgroup l2_editing Editing Meshes
## @}
## @defgroup l1_meshinfo Mesh Information
## @defgroup l1_controls Quality controls and Filtering
## @defgroup l1_grouping Grouping elements
## @{
## @defgroup l2_grps_create Creating groups
## @defgroup l2_grps_edit Editing groups
## @defgroup l2_grps_operon Using operations on groups
## @defgroup l2_grps_delete Deleting Groups
## @}
## @defgroup l1_modifying Modifying meshes
## @{
## @defgroup l2_modif_add Adding nodes and elements
## @defgroup l2_modif_del Removing nodes and elements
## @defgroup l2_modif_edit Modifying nodes and elements
## @defgroup l2_modif_renumber Renumbering nodes and elements
## @defgroup l2_modif_trsf Transforming meshes (Translation, Rotation, Symmetry, Sewing, Merging)
## @defgroup l2_modif_movenode Moving nodes
## @defgroup l2_modif_throughp Mesh through point
## @defgroup l2_modif_invdiag Diagonal inversion of elements
## @defgroup l2_modif_unitetri Uniting triangles
## @defgroup l2_modif_changori Changing orientation of elements
## @defgroup l2_modif_cutquadr Cutting elements
## @defgroup l2_modif_smooth Smoothing
## @defgroup l2_modif_extrurev Extrusion and Revolution
## @defgroup l2_modif_patterns Pattern mapping
## @defgroup l2_modif_tofromqu Convert to/from Quadratic Mesh
## @}
## @defgroup l1_measurements Measurements
import salome
from salome.geom import geomBuilder
import SMESH # This is necessary for back compatibility
from SMESH import *
from salome.smesh.smesh_algorithm import Mesh_Algorithm
import SALOME
import SALOMEDS
import os
class MeshMeta(type):
def __instancecheck__(cls, inst):
"""Implement isinstance(inst, cls)."""
return any(cls.__subclasscheck__(c)
for c in {type(inst), inst.__class__})
def __subclasscheck__(cls, sub):
"""Implement issubclass(sub, cls)."""
return type.__subclasscheck__(cls, sub) or (cls.__name__ == sub.__name__ and cls.__module__ == sub.__module__)
## @addtogroup l1_auxiliary
## @{
## Converts an angle from degrees to radians
def DegreesToRadians(AngleInDegrees):
from math import pi
return AngleInDegrees * pi / 180.0
import salome_notebook
notebook = salome_notebook.notebook
# Salome notebook variable separator
var_separator = ":"
## Return list of variable values from salome notebook.
# The last argument, if is callable, is used to modify values got from notebook
def ParseParameters(*args):
Result = []
Parameters = ""
hasVariables = False
varModifFun=None
if args and callable( args[-1] ):
args, varModifFun = args[:-1], args[-1]
for parameter in args:
Parameters += str(parameter) + var_separator
if isinstance(parameter,str):
# check if there is an inexistent variable name
if not notebook.isVariable(parameter):
raise ValueError, "Variable with name '" + parameter + "' doesn't exist!!!"
parameter = notebook.get(parameter)
hasVariables = True
if varModifFun:
parameter = varModifFun(parameter)
pass
pass
Result.append(parameter)
pass
Parameters = Parameters[:-1]
Result.append( Parameters )
Result.append( hasVariables )
return Result
# Parse parameters converting variables to radians
def ParseAngles(*args):
return ParseParameters( *( args + (DegreesToRadians, )))
# Substitute PointStruct.__init__() to create SMESH.PointStruct using notebook variables.
# Parameters are stored in PointStruct.parameters attribute
def __initPointStruct(point,*args):
point.x, point.y, point.z, point.parameters,hasVars = ParseParameters(*args)
pass
SMESH.PointStruct.__init__ = __initPointStruct
# Substitute AxisStruct.__init__() to create SMESH.AxisStruct using notebook variables.
# Parameters are stored in AxisStruct.parameters attribute
def __initAxisStruct(ax,*args):
ax.x, ax.y, ax.z, ax.vx, ax.vy, ax.vz, ax.parameters,hasVars = ParseParameters(*args)
pass
SMESH.AxisStruct.__init__ = __initAxisStruct
smeshPrecisionConfusion = 1.e-07
def IsEqual(val1, val2, tol=smeshPrecisionConfusion):
if abs(val1 - val2) < tol:
return True
return False
NO_NAME = "NoName"
## Gets object name
def GetName(obj):
if obj:
# object not null
if isinstance(obj, SALOMEDS._objref_SObject):
# study object
return obj.GetName()
try:
ior = salome.orb.object_to_string(obj)
except:
ior = None
if ior:
# CORBA object
studies = salome.myStudyManager.GetOpenStudies()
for sname in studies:
s = salome.myStudyManager.GetStudyByName(sname)
if not s: continue
sobj = s.FindObjectIOR(ior)
if not sobj: continue
return sobj.GetName()
if hasattr(obj, "GetName"):
# unknown CORBA object, having GetName() method
return obj.GetName()
else:
# unknown CORBA object, no GetName() method
return NO_NAME
pass
if hasattr(obj, "GetName"):
# unknown non-CORBA object, having GetName() method
return obj.GetName()
pass
raise RuntimeError, "Null or invalid object"
## Prints error message if a hypothesis was not assigned.
def TreatHypoStatus(status, hypName, geomName, isAlgo, mesh):
if isAlgo:
hypType = "algorithm"
else:
hypType = "hypothesis"
pass
reason = ""
if hasattr( status, "__getitem__" ):
status,reason = status[0],status[1]
if status == HYP_UNKNOWN_FATAL :
reason = "for unknown reason"
elif status == HYP_INCOMPATIBLE :
reason = "this hypothesis mismatches the algorithm"
elif status == HYP_NOTCONFORM :
reason = "a non-conform mesh would be built"
elif status == HYP_ALREADY_EXIST :
if isAlgo: return # it does not influence anything
reason = hypType + " of the same dimension is already assigned to this shape"
elif status == HYP_BAD_DIM :
reason = hypType + " mismatches the shape"
elif status == HYP_CONCURENT :
reason = "there are concurrent hypotheses on sub-shapes"
elif status == HYP_BAD_SUBSHAPE :
reason = "the shape is neither the main one, nor its sub-shape, nor a valid group"
elif status == HYP_BAD_GEOMETRY:
reason = "geometry mismatches the expectation of the algorithm"
elif status == HYP_HIDDEN_ALGO:
reason = "it is hidden by an algorithm of an upper dimension, which generates elements of all dimensions"
elif status == HYP_HIDING_ALGO:
reason = "it hides algorithms of lower dimensions by generating elements of all dimensions"
elif status == HYP_NEED_SHAPE:
reason = "algorithm can't work without shape"
elif status == HYP_INCOMPAT_HYPS:
pass
else:
return
where = geomName
if where:
where = '"%s"' % geomName
if mesh:
meshName = GetName( mesh )
if meshName and meshName != NO_NAME:
where = '"%s" in "%s"' % ( geomName, meshName )
if status < HYP_UNKNOWN_FATAL and where:
print '"%s" was assigned to %s but %s' %( hypName, where, reason )
elif where:
print '"%s" was not assigned to %s : %s' %( hypName, where, reason )
else:
print '"%s" was not assigned : %s' %( hypName, reason )
pass
## Private method. Add geom (sub-shape of the main shape) into the study if not yet there
def AssureGeomPublished(mesh, geom, name=''):
if not isinstance( geom, geomBuilder.GEOM._objref_GEOM_Object ):
return
if not geom.GetStudyEntry() and \
mesh.smeshpyD.GetCurrentStudy():
## set the study
studyID = mesh.smeshpyD.GetCurrentStudy()._get_StudyId()
if studyID != mesh.geompyD.myStudyId:
mesh.geompyD.init_geom( mesh.smeshpyD.GetCurrentStudy())
## get a name
if not name and geom.GetShapeType() != geomBuilder.GEOM.COMPOUND:
# for all groups SubShapeName() returns "Compound_-1"
name = mesh.geompyD.SubShapeName(geom, mesh.geom)
if not name:
name = "%s_%s"%(geom.GetShapeType(), id(geom)%10000)
## publish
mesh.geompyD.addToStudyInFather( mesh.geom, geom, name )
return
## Return the first vertex of a geometrical edge by ignoring orientation
def FirstVertexOnCurve(mesh, edge):
vv = mesh.geompyD.SubShapeAll( edge, geomBuilder.geomBuilder.ShapeType["VERTEX"])
if not vv:
raise TypeError, "Given object has no vertices"
if len( vv ) == 1: return vv[0]
v0 = mesh.geompyD.MakeVertexOnCurve(edge,0.)
xyz = mesh.geompyD.PointCoordinates( v0 ) # coords of the first vertex
xyz1 = mesh.geompyD.PointCoordinates( vv[0] )
xyz2 = mesh.geompyD.PointCoordinates( vv[1] )
dist1, dist2 = 0,0
for i in range(3):
dist1 += abs( xyz[i] - xyz1[i] )
dist2 += abs( xyz[i] - xyz2[i] )
if dist1 < dist2:
return vv[0]
else:
return vv[1]
# end of l1_auxiliary
## @}
# Warning: smeshInst is a singleton
smeshInst = None
engine = None
doLcc = False
created = False
## This class allows to create, load or manipulate meshes
# It has a set of methods to create load or copy meshes, to combine several meshes.
# It also has methods to get infos on meshes.
class smeshBuilder(object, SMESH._objref_SMESH_Gen):
# MirrorType enumeration
POINT = SMESH_MeshEditor.POINT
AXIS = SMESH_MeshEditor.AXIS
PLANE = SMESH_MeshEditor.PLANE
# Smooth_Method enumeration
LAPLACIAN_SMOOTH = SMESH_MeshEditor.LAPLACIAN_SMOOTH
CENTROIDAL_SMOOTH = SMESH_MeshEditor.CENTROIDAL_SMOOTH
PrecisionConfusion = smeshPrecisionConfusion
# TopAbs_State enumeration
[TopAbs_IN, TopAbs_OUT, TopAbs_ON, TopAbs_UNKNOWN] = range(4)
# Methods of splitting a hexahedron into tetrahedra
Hex_5Tet, Hex_6Tet, Hex_24Tet, Hex_2Prisms, Hex_4Prisms = 1, 2, 3, 1, 2
def __new__(cls):
global engine
global smeshInst
global doLcc
#print "==== __new__", engine, smeshInst, doLcc
if smeshInst is None:
# smesh engine is either retrieved from engine, or created
smeshInst = engine
# Following test avoids a recursive loop
if doLcc:
if smeshInst is not None:
# smesh engine not created: existing engine found
doLcc = False
if doLcc:
doLcc = False
# FindOrLoadComponent called:
# 1. CORBA resolution of server
# 2. the __new__ method is called again
#print "==== smeshInst = lcc.FindOrLoadComponent ", engine, smeshInst, doLcc
smeshInst = salome.lcc.FindOrLoadComponent( "FactoryServer", "SMESH" )
else:
# FindOrLoadComponent not called
if smeshInst is None:
# smeshBuilder instance is created from lcc.FindOrLoadComponent
#print "==== smeshInst = super(smeshBuilder,cls).__new__(cls) ", engine, smeshInst, doLcc
smeshInst = super(smeshBuilder,cls).__new__(cls)
else:
# smesh engine not created: existing engine found
#print "==== existing ", engine, smeshInst, doLcc
pass
#print "====1 ", smeshInst
return smeshInst
#print "====2 ", smeshInst
return smeshInst
def __init__(self):
global created
#print "--------------- smeshbuilder __init__ ---", created
if not created:
created = True
SMESH._objref_SMESH_Gen.__init__(self)
## Dump component to the Python script
# This method overrides IDL function to allow default values for the parameters.
def DumpPython(self, theStudy, theIsPublished=True, theIsMultiFile=True):
return SMESH._objref_SMESH_Gen.DumpPython(self, theStudy, theIsPublished, theIsMultiFile)
## Set mode of DumpPython(), \a historical or \a snapshot.
# In the \a historical mode, the Python Dump script includes all commands
# performed by SMESH engine. In the \a snapshot mode, commands
# relating to objects removed from the Study are excluded from the script
# as well as commands not influencing the current state of meshes
def SetDumpPythonHistorical(self, isHistorical):
if isHistorical: val = "true"
else: val = "false"
SMESH._objref_SMESH_Gen.SetOption(self, "historical_python_dump", val)
## Sets the current study and Geometry component
# @ingroup l1_auxiliary
def init_smesh(self,theStudy,geompyD = None):
#print "init_smesh"
self.SetCurrentStudy(theStudy,geompyD)
if theStudy:
global notebook
notebook.myStudy = theStudy
## Creates a mesh. This can be either an empty mesh, possibly having an underlying geometry,
# or a mesh wrapping a CORBA mesh given as a parameter.
# @param obj either (1) a CORBA mesh (SMESH._objref_SMESH_Mesh) got e.g. by calling
# salome.myStudy.FindObjectID("0:1:2:3").GetObject() or
# (2) a Geometrical object for meshing or
# (3) none.
# @param name the name for the new mesh.
# @return an instance of Mesh class.
# @ingroup l2_construct
def Mesh(self, obj=0, name=0):
if isinstance(obj,str):
obj,name = name,obj
return Mesh(self,self.geompyD,obj,name)
## Returns a long value from enumeration
# @ingroup l1_controls
def EnumToLong(self,theItem):
return theItem._v
## Returns a string representation of the color.
# To be used with filters.
# @param c color value (SALOMEDS.Color)
# @ingroup l1_controls
def ColorToString(self,c):
val = ""
if isinstance(c, SALOMEDS.Color):
val = "%s;%s;%s" % (c.R, c.G, c.B)
elif isinstance(c, str):
val = c
else:
raise ValueError, "Color value should be of string or SALOMEDS.Color type"
return val
## Gets PointStruct from vertex
# @param theVertex a GEOM object(vertex)
# @return SMESH.PointStruct
# @ingroup l1_auxiliary
def GetPointStruct(self,theVertex):
[x, y, z] = self.geompyD.PointCoordinates(theVertex)
return PointStruct(x,y,z)
## Gets DirStruct from vector
# @param theVector a GEOM object(vector)
# @return SMESH.DirStruct
# @ingroup l1_auxiliary
def GetDirStruct(self,theVector):
vertices = self.geompyD.SubShapeAll( theVector, geomBuilder.geomBuilder.ShapeType["VERTEX"] )
if(len(vertices) != 2):
print "Error: vector object is incorrect."
return None
p1 = self.geompyD.PointCoordinates(vertices[0])
p2 = self.geompyD.PointCoordinates(vertices[1])
pnt = PointStruct(p2[0]-p1[0], p2[1]-p1[1], p2[2]-p1[2])
dirst = DirStruct(pnt)
return dirst
## Makes DirStruct from a triplet
# @param x,y,z vector components
# @return SMESH.DirStruct
# @ingroup l1_auxiliary
def MakeDirStruct(self,x,y,z):
pnt = PointStruct(x,y,z)
return DirStruct(pnt)
## Get AxisStruct from object
# @param theObj a GEOM object (line or plane)
# @return SMESH.AxisStruct
# @ingroup l1_auxiliary
def GetAxisStruct(self,theObj):
import GEOM
edges = self.geompyD.SubShapeAll( theObj, geomBuilder.geomBuilder.ShapeType["EDGE"] )
axis = None
if len(edges) > 1:
vertex1, vertex2 = self.geompyD.SubShapeAll( edges[0], geomBuilder.geomBuilder.ShapeType["VERTEX"] )
vertex3, vertex4 = self.geompyD.SubShapeAll( edges[1], geomBuilder.geomBuilder.ShapeType["VERTEX"] )
vertex1 = self.geompyD.PointCoordinates(vertex1)
vertex2 = self.geompyD.PointCoordinates(vertex2)
vertex3 = self.geompyD.PointCoordinates(vertex3)
vertex4 = self.geompyD.PointCoordinates(vertex4)
v1 = [vertex2[0]-vertex1[0], vertex2[1]-vertex1[1], vertex2[2]-vertex1[2]]
v2 = [vertex4[0]-vertex3[0], vertex4[1]-vertex3[1], vertex4[2]-vertex3[2]]
normal = [ v1[1]*v2[2]-v2[1]*v1[2], v1[2]*v2[0]-v2[2]*v1[0], v1[0]*v2[1]-v2[0]*v1[1] ]
axis = AxisStruct(vertex1[0], vertex1[1], vertex1[2], normal[0], normal[1], normal[2])
axis._mirrorType = SMESH.SMESH_MeshEditor.PLANE
elif len(edges) == 1:
vertex1, vertex2 = self.geompyD.SubShapeAll( edges[0], geomBuilder.geomBuilder.ShapeType["VERTEX"] )
p1 = self.geompyD.PointCoordinates( vertex1 )
p2 = self.geompyD.PointCoordinates( vertex2 )
axis = AxisStruct(p1[0], p1[1], p1[2], p2[0]-p1[0], p2[1]-p1[1], p2[2]-p1[2])
axis._mirrorType = SMESH.SMESH_MeshEditor.AXIS
elif theObj.GetShapeType() == GEOM.VERTEX:
x,y,z = self.geompyD.PointCoordinates( theObj )
axis = AxisStruct( x,y,z, 1,0,0,)
axis._mirrorType = SMESH.SMESH_MeshEditor.POINT
return axis
# From SMESH_Gen interface:
# ------------------------
## Sets the given name to the object
# @param obj the object to rename
# @param name a new object name
# @ingroup l1_auxiliary
def SetName(self, obj, name):
if isinstance( obj, Mesh ):
obj = obj.GetMesh()
elif isinstance( obj, Mesh_Algorithm ):
obj = obj.GetAlgorithm()
ior = salome.orb.object_to_string(obj)
SMESH._objref_SMESH_Gen.SetName(self, ior, name)
## Sets the current mode
# @ingroup l1_auxiliary
def SetEmbeddedMode( self,theMode ):
#self.SetEmbeddedMode(theMode)
SMESH._objref_SMESH_Gen.SetEmbeddedMode(self,theMode)
## Gets the current mode
# @ingroup l1_auxiliary
def IsEmbeddedMode(self):
#return self.IsEmbeddedMode()
return SMESH._objref_SMESH_Gen.IsEmbeddedMode(self)
## Sets the current study
# @ingroup l1_auxiliary
def SetCurrentStudy( self, theStudy, geompyD = None ):
#self.SetCurrentStudy(theStudy)
if not geompyD:
from salome.geom import geomBuilder
geompyD = geomBuilder.geom
pass
self.geompyD=geompyD
self.SetGeomEngine(geompyD)
SMESH._objref_SMESH_Gen.SetCurrentStudy(self,theStudy)
global notebook
if theStudy:
notebook = salome_notebook.NoteBook( theStudy )
else:
notebook = salome_notebook.NoteBook( salome_notebook.PseudoStudyForNoteBook() )
## Gets the current study
# @ingroup l1_auxiliary
def GetCurrentStudy(self):
#return self.GetCurrentStudy()
return SMESH._objref_SMESH_Gen.GetCurrentStudy(self)
## Creates a Mesh object importing data from the given UNV file
# @return an instance of Mesh class
# @ingroup l2_impexp
def CreateMeshesFromUNV( self,theFileName ):
aSmeshMesh = SMESH._objref_SMESH_Gen.CreateMeshesFromUNV(self,theFileName)
aMesh = Mesh(self, self.geompyD, aSmeshMesh)
return aMesh
## Creates a Mesh object(s) importing data from the given MED file
# @return a tuple ( list of Mesh class instances, SMESH.DriverMED_ReadStatus )
# @ingroup l2_impexp
def CreateMeshesFromMED( self,theFileName ):
aSmeshMeshes, aStatus = SMESH._objref_SMESH_Gen.CreateMeshesFromMED(self,theFileName)
aMeshes = [ Mesh(self, self.geompyD, m) for m in aSmeshMeshes ]
return aMeshes, aStatus
## Creates a Mesh object(s) importing data from the given SAUV file
# @return a tuple ( list of Mesh class instances, SMESH.DriverMED_ReadStatus )
# @ingroup l2_impexp
def CreateMeshesFromSAUV( self,theFileName ):
aSmeshMeshes, aStatus = SMESH._objref_SMESH_Gen.CreateMeshesFromSAUV(self,theFileName)
aMeshes = [ Mesh(self, self.geompyD, m) for m in aSmeshMeshes ]
return aMeshes, aStatus
## Creates a Mesh object importing data from the given STL file
# @return an instance of Mesh class
# @ingroup l2_impexp
def CreateMeshesFromSTL( self, theFileName ):
aSmeshMesh = SMESH._objref_SMESH_Gen.CreateMeshesFromSTL(self,theFileName)
aMesh = Mesh(self, self.geompyD, aSmeshMesh)
return aMesh
## Creates Mesh objects importing data from the given CGNS file
# @return a tuple ( list of Mesh class instances, SMESH.DriverMED_ReadStatus )
# @ingroup l2_impexp
def CreateMeshesFromCGNS( self, theFileName ):
aSmeshMeshes, aStatus = SMESH._objref_SMESH_Gen.CreateMeshesFromCGNS(self,theFileName)
aMeshes = [ Mesh(self, self.geompyD, m) for m in aSmeshMeshes ]
return aMeshes, aStatus
## Creates a Mesh object importing data from the given GMF file.
# GMF files must have .mesh extension for the ASCII format and .meshb for
# the binary format.
# @return [ an instance of Mesh class, SMESH.ComputeError ]
# @ingroup l2_impexp
def CreateMeshesFromGMF( self, theFileName ):
aSmeshMesh, error = SMESH._objref_SMESH_Gen.CreateMeshesFromGMF(self,
theFileName,
True)
if error.comment: print "*** CreateMeshesFromGMF() errors:\n", error.comment
return Mesh(self, self.geompyD, aSmeshMesh), error
## Concatenate the given meshes into one mesh.
# @return an instance of Mesh class
# @param meshes the meshes to combine into one mesh
# @param uniteIdenticalGroups if true, groups with same names are united, else they are renamed
# @param mergeNodesAndElements if true, equal nodes and elements aremerged
# @param mergeTolerance tolerance for merging nodes
# @param allGroups forces creation of groups of all elements
# @param name name of a new mesh
def Concatenate( self, meshes, uniteIdenticalGroups,
mergeNodesAndElements = False, mergeTolerance = 1e-5, allGroups = False,
name = ""):
if not meshes: return None
for i,m in enumerate(meshes):
if isinstance(m, Mesh):
meshes[i] = m.GetMesh()
mergeTolerance,Parameters,hasVars = ParseParameters(mergeTolerance)
meshes[0].SetParameters(Parameters)
if allGroups:
aSmeshMesh = SMESH._objref_SMESH_Gen.ConcatenateWithGroups(
self,meshes,uniteIdenticalGroups,mergeNodesAndElements,mergeTolerance)
else:
aSmeshMesh = SMESH._objref_SMESH_Gen.Concatenate(
self,meshes,uniteIdenticalGroups,mergeNodesAndElements,mergeTolerance)
aMesh = Mesh(self, self.geompyD, aSmeshMesh, name=name)
return aMesh
## Create a mesh by copying a part of another mesh.
# @param meshPart a part of mesh to copy, either a Mesh, a sub-mesh or a group;
# to copy nodes or elements not contained in any mesh object,
# pass result of Mesh.GetIDSource( list_of_ids, type ) as meshPart
# @param meshName a name of the new mesh
# @param toCopyGroups to create in the new mesh groups the copied elements belongs to
# @param toKeepIDs to preserve order of the copied elements or not
# @return an instance of Mesh class
def CopyMesh( self, meshPart, meshName, toCopyGroups=False, toKeepIDs=False):
if (isinstance( meshPart, Mesh )):
meshPart = meshPart.GetMesh()
mesh = SMESH._objref_SMESH_Gen.CopyMesh( self,meshPart,meshName,toCopyGroups,toKeepIDs )
return Mesh(self, self.geompyD, mesh)
## From SMESH_Gen interface
# @return the list of integer values
# @ingroup l1_auxiliary
def GetSubShapesId( self, theMainObject, theListOfSubObjects ):
return SMESH._objref_SMESH_Gen.GetSubShapesId(self,theMainObject, theListOfSubObjects)
## From SMESH_Gen interface. Creates a pattern
# @return an instance of SMESH_Pattern
#
# <a href="../tui_modifying_meshes_page.html#tui_pattern_mapping">Example of Patterns usage</a>
# @ingroup l2_modif_patterns
def GetPattern(self):
return SMESH._objref_SMESH_Gen.GetPattern(self)
## Sets number of segments per diagonal of boundary box of geometry by which
# default segment length of appropriate 1D hypotheses is defined.
# Default value is 10
# @ingroup l1_auxiliary
def SetBoundaryBoxSegmentation(self, nbSegments):
SMESH._objref_SMESH_Gen.SetBoundaryBoxSegmentation(self,nbSegments)
# Filtering. Auxiliary functions:
# ------------------------------
## Creates an empty criterion
# @return SMESH.Filter.Criterion
# @ingroup l1_controls
def GetEmptyCriterion(self):
Type = self.EnumToLong(FT_Undefined)
Compare = self.EnumToLong(FT_Undefined)
Threshold = 0
ThresholdStr = ""
ThresholdID = ""
UnaryOp = self.EnumToLong(FT_Undefined)
BinaryOp = self.EnumToLong(FT_Undefined)
Tolerance = 1e-07
TypeOfElement = ALL
Precision = -1 ##@1e-07
return Filter.Criterion(Type, Compare, Threshold, ThresholdStr, ThresholdID,
UnaryOp, BinaryOp, Tolerance, TypeOfElement, Precision)
## Creates a criterion by the given parameters
# \n Criterion structures allow to define complex filters by combining them with logical operations (AND / OR) (see example below)
# @param elementType the type of elements(NODE, EDGE, FACE, VOLUME)
# @param CritType the type of criterion (FT_Taper, FT_Area, FT_RangeOfIds, FT_LyingOnGeom etc.)
# @param Compare belongs to {FT_LessThan, FT_MoreThan, FT_EqualTo}
# @param Threshold the threshold value (range of ids as string, shape, numeric)
# @param UnaryOp FT_LogicalNOT or FT_Undefined
# @param BinaryOp a binary logical operation FT_LogicalAND, FT_LogicalOR or
# FT_Undefined (must be for the last criterion of all criteria)
# @param Tolerance the tolerance used by FT_BelongToGeom, FT_BelongToSurface,
# FT_LyingOnGeom, FT_CoplanarFaces criteria
# @return SMESH.Filter.Criterion
#
# <a href="../tui_filters_page.html#combining_filters">Example of Criteria usage</a>
# @ingroup l1_controls
def GetCriterion(self,elementType,
CritType,
Compare = FT_EqualTo,
Threshold="",
UnaryOp=FT_Undefined,
BinaryOp=FT_Undefined,
Tolerance=1e-07):
if not CritType in SMESH.FunctorType._items:
raise TypeError, "CritType should be of SMESH.FunctorType"
aCriterion = self.GetEmptyCriterion()
aCriterion.TypeOfElement = elementType
aCriterion.Type = self.EnumToLong(CritType)
aCriterion.Tolerance = Tolerance
aThreshold = Threshold
if Compare in [FT_LessThan, FT_MoreThan, FT_EqualTo]:
aCriterion.Compare = self.EnumToLong(Compare)
elif Compare == "=" or Compare == "==":
aCriterion.Compare = self.EnumToLong(FT_EqualTo)
elif Compare == "<":
aCriterion.Compare = self.EnumToLong(FT_LessThan)
elif Compare == ">":
aCriterion.Compare = self.EnumToLong(FT_MoreThan)
elif Compare != FT_Undefined:
aCriterion.Compare = self.EnumToLong(FT_EqualTo)
aThreshold = Compare
if CritType in [FT_BelongToGeom, FT_BelongToPlane, FT_BelongToGenSurface,
FT_BelongToCylinder, FT_LyingOnGeom]:
# Checks that Threshold is GEOM object
if isinstance(aThreshold, geomBuilder.GEOM._objref_GEOM_Object):
aCriterion.ThresholdStr = GetName(aThreshold)
aCriterion.ThresholdID = aThreshold.GetStudyEntry()
if not aCriterion.ThresholdID:
name = aCriterion.ThresholdStr
if not name:
name = "%s_%s"%(aThreshold.GetShapeType(), id(aThreshold)%10000)
aCriterion.ThresholdID = self.geompyD.addToStudy( aThreshold, name )
#raise RuntimeError, "Threshold shape must be published"
else:
print "Error: The Threshold should be a shape."
return None
if isinstance(UnaryOp,float):
aCriterion.Tolerance = UnaryOp
UnaryOp = FT_Undefined
pass
elif CritType == FT_RangeOfIds:
# Checks that Threshold is string
if isinstance(aThreshold, str):
aCriterion.ThresholdStr = aThreshold
else:
print "Error: The Threshold should be a string."
return None
elif CritType == FT_CoplanarFaces:
# Checks the Threshold
if isinstance(aThreshold, int):
aCriterion.ThresholdID = str(aThreshold)
elif isinstance(aThreshold, str):
ID = int(aThreshold)
if ID < 1:
raise ValueError, "Invalid ID of mesh face: '%s'"%aThreshold
aCriterion.ThresholdID = aThreshold
else:
raise ValueError,\
"The Threshold should be an ID of mesh face and not '%s'"%aThreshold
elif CritType == FT_ConnectedElements:
# Checks the Threshold
if isinstance(aThreshold, geomBuilder.GEOM._objref_GEOM_Object): # shape
aCriterion.ThresholdID = aThreshold.GetStudyEntry()
if not aCriterion.ThresholdID:
name = aThreshold.GetName()
if not name:
name = "%s_%s"%(aThreshold.GetShapeType(), id(aThreshold)%10000)
aCriterion.ThresholdID = self.geompyD.addToStudy( aThreshold, name )
elif isinstance(aThreshold, int): # node id
aCriterion.Threshold = aThreshold
elif isinstance(aThreshold, list): # 3 point coordinates
if len( aThreshold ) < 3:
raise ValueError, "too few point coordinates, must be 3"
aCriterion.ThresholdStr = " ".join( [str(c) for c in aThreshold[:3]] )
elif isinstance(aThreshold, str):
if aThreshold.isdigit():
aCriterion.Threshold = aThreshold # node id
else:
aCriterion.ThresholdStr = aThreshold # hope that it's point coordinates
else:
raise ValueError,\
"The Threshold should either a VERTEX, or a node ID, "\
"or a list of point coordinates and not '%s'"%aThreshold
elif CritType == FT_ElemGeomType:
# Checks the Threshold
try:
aCriterion.Threshold = self.EnumToLong(aThreshold)
assert( aThreshold in SMESH.GeometryType._items )
except:
if isinstance(aThreshold, int):
aCriterion.Threshold = aThreshold
else:
print "Error: The Threshold should be an integer or SMESH.GeometryType."
return None
pass
pass
elif CritType == FT_EntityType:
# Checks the Threshold
try:
aCriterion.Threshold = self.EnumToLong(aThreshold)
assert( aThreshold in SMESH.EntityType._items )
except:
if isinstance(aThreshold, int):
aCriterion.Threshold = aThreshold
else:
print "Error: The Threshold should be an integer or SMESH.EntityType."
return None
pass
pass
elif CritType == FT_GroupColor:
# Checks the Threshold
try:
aCriterion.ThresholdStr = self.ColorToString(aThreshold)
except:
print "Error: The threshold value should be of SALOMEDS.Color type"
return None
pass
elif CritType in [FT_FreeBorders, FT_FreeEdges, FT_FreeNodes, FT_FreeFaces,
FT_LinearOrQuadratic, FT_BadOrientedVolume,
FT_BareBorderFace, FT_BareBorderVolume,
FT_OverConstrainedFace, FT_OverConstrainedVolume,
FT_EqualNodes,FT_EqualEdges,FT_EqualFaces,FT_EqualVolumes ]:
# At this point the Threshold is unnecessary
if aThreshold == FT_LogicalNOT:
aCriterion.UnaryOp = self.EnumToLong(FT_LogicalNOT)
elif aThreshold in [FT_LogicalAND, FT_LogicalOR]:
aCriterion.BinaryOp = aThreshold
else:
# Check Threshold
try:
aThreshold = float(aThreshold)
aCriterion.Threshold = aThreshold
except:
print "Error: The Threshold should be a number."
return None
if Threshold == FT_LogicalNOT or UnaryOp == FT_LogicalNOT:
aCriterion.UnaryOp = self.EnumToLong(FT_LogicalNOT)
if Threshold in [FT_LogicalAND, FT_LogicalOR]:
aCriterion.BinaryOp = self.EnumToLong(Threshold)
if UnaryOp in [FT_LogicalAND, FT_LogicalOR]:
aCriterion.BinaryOp = self.EnumToLong(UnaryOp)
if BinaryOp in [FT_LogicalAND, FT_LogicalOR]:
aCriterion.BinaryOp = self.EnumToLong(BinaryOp)
return aCriterion
## Creates a filter with the given parameters
# @param elementType the type of elements in the group
# @param CritType the type of criterion ( FT_Taper, FT_Area, FT_RangeOfIds, FT_LyingOnGeom etc. )
# @param Compare belongs to {FT_LessThan, FT_MoreThan, FT_EqualTo}
# @param Threshold the threshold value (range of id ids as string, shape, numeric)
# @param UnaryOp FT_LogicalNOT or FT_Undefined
# @param Tolerance the tolerance used by FT_BelongToGeom, FT_BelongToSurface,
# FT_LyingOnGeom, FT_CoplanarFaces and FT_EqualNodes criteria
# @param mesh the mesh to initialize the filter with
# @return SMESH_Filter
#
# <a href="../tui_filters_page.html#tui_filters">Example of Filters usage</a>
# @ingroup l1_controls
def GetFilter(self,elementType,
CritType=FT_Undefined,
Compare=FT_EqualTo,
Threshold="",
UnaryOp=FT_Undefined,
Tolerance=1e-07,
mesh=None):
aCriterion = self.GetCriterion(elementType, CritType, Compare, Threshold, UnaryOp, FT_Undefined,Tolerance)
aFilterMgr = self.CreateFilterManager()
aFilter = aFilterMgr.CreateFilter()
aCriteria = []
aCriteria.append(aCriterion)
aFilter.SetCriteria(aCriteria)
if mesh:
if isinstance( mesh, Mesh ): aFilter.SetMesh( mesh.GetMesh() )
else : aFilter.SetMesh( mesh )
aFilterMgr.UnRegister()
return aFilter
## Creates a filter from criteria
# @param criteria a list of criteria
# @param binOp binary operator used when binary operator of criteria is undefined
# @return SMESH_Filter
#
# <a href="../tui_filters_page.html#tui_filters">Example of Filters usage</a>
# @ingroup l1_controls
def GetFilterFromCriteria(self,criteria, binOp=SMESH.FT_LogicalAND):
for i in range( len( criteria ) - 1 ):
if criteria[i].BinaryOp == self.EnumToLong( SMESH.FT_Undefined ):
criteria[i].BinaryOp = self.EnumToLong( binOp )
aFilterMgr = self.CreateFilterManager()
aFilter = aFilterMgr.CreateFilter()
aFilter.SetCriteria(criteria)
aFilterMgr.UnRegister()
return aFilter
## Creates a numerical functor by its type
# @param theCriterion FT_...; functor type
# @return SMESH_NumericalFunctor
# @ingroup l1_controls
def GetFunctor(self,theCriterion):
if isinstance( theCriterion, SMESH._objref_NumericalFunctor ):
return theCriterion
aFilterMgr = self.CreateFilterManager()
functor = None
if theCriterion == FT_AspectRatio:
functor = aFilterMgr.CreateAspectRatio()
elif theCriterion == FT_AspectRatio3D:
functor = aFilterMgr.CreateAspectRatio3D()
elif theCriterion == FT_Warping:
functor = aFilterMgr.CreateWarping()
elif theCriterion == FT_MinimumAngle:
functor = aFilterMgr.CreateMinimumAngle()
elif theCriterion == FT_Taper:
functor = aFilterMgr.CreateTaper()
elif theCriterion == FT_Skew:
functor = aFilterMgr.CreateSkew()
elif theCriterion == FT_Area:
functor = aFilterMgr.CreateArea()
elif theCriterion == FT_Volume3D:
functor = aFilterMgr.CreateVolume3D()
elif theCriterion == FT_MaxElementLength2D:
functor = aFilterMgr.CreateMaxElementLength2D()
elif theCriterion == FT_MaxElementLength3D:
functor = aFilterMgr.CreateMaxElementLength3D()
elif theCriterion == FT_MultiConnection:
functor = aFilterMgr.CreateMultiConnection()
elif theCriterion == FT_MultiConnection2D:
functor = aFilterMgr.CreateMultiConnection2D()
elif theCriterion == FT_Length:
functor = aFilterMgr.CreateLength()
elif theCriterion == FT_Length2D:
functor = aFilterMgr.CreateLength2D()
else:
print "Error: given parameter is not numerical functor type."
aFilterMgr.UnRegister()
return functor
## Creates hypothesis
# @param theHType mesh hypothesis type (string)
# @param theLibName mesh plug-in library name
# @return created hypothesis instance
def CreateHypothesis(self, theHType, theLibName="libStdMeshersEngine.so"):
hyp = SMESH._objref_SMESH_Gen.CreateHypothesis(self, theHType, theLibName )
if isinstance( hyp, SMESH._objref_SMESH_Algo ):
return hyp
# wrap hypothesis methods
#print "HYPOTHESIS", theHType
for meth_name in dir( hyp.__class__ ):
if not meth_name.startswith("Get") and \
not meth_name in dir ( SMESH._objref_SMESH_Hypothesis ):
method = getattr ( hyp.__class__, meth_name )
if callable(method):
setattr( hyp, meth_name, hypMethodWrapper( hyp, method ))
return hyp
## Gets the mesh statistic
# @return dictionary "element type" - "count of elements"
# @ingroup l1_meshinfo
def GetMeshInfo(self, obj):
if isinstance( obj, Mesh ):
obj = obj.GetMesh()
d = {}
if hasattr(obj, "GetMeshInfo"):
values = obj.GetMeshInfo()
for i in range(SMESH.Entity_Last._v):
if i < len(values): d[SMESH.EntityType._item(i)]=values[i]
pass
return d
## Get minimum distance between two objects
#
# If @a src2 is None, and @a id2 = 0, distance from @a src1 / @a id1 to the origin is computed.
# If @a src2 is None, and @a id2 != 0, it is assumed that both @a id1 and @a id2 belong to @a src1.
#
# @param src1 first source object
# @param src2 second source object
# @param id1 node/element id from the first source
# @param id2 node/element id from the second (or first) source
# @param isElem1 @c True if @a id1 is element id, @c False if it is node id
# @param isElem2 @c True if @a id2 is element id, @c False if it is node id
# @return minimum distance value
# @sa GetMinDistance()
# @ingroup l1_measurements
def MinDistance(self, src1, src2=None, id1=0, id2=0, isElem1=False, isElem2=False):
result = self.GetMinDistance(src1, src2, id1, id2, isElem1, isElem2)
if result is None:
result = 0.0
else:
result = result.value
return result
## Get measure structure specifying minimum distance data between two objects
#
# If @a src2 is None, and @a id2 = 0, distance from @a src1 / @a id1 to the origin is computed.
# If @a src2 is None, and @a id2 != 0, it is assumed that both @a id1 and @a id2 belong to @a src1.
#
# @param src1 first source object
# @param src2 second source object
# @param id1 node/element id from the first source
# @param id2 node/element id from the second (or first) source
# @param isElem1 @c True if @a id1 is element id, @c False if it is node id
# @param isElem2 @c True if @a id2 is element id, @c False if it is node id
# @return Measure structure or None if input data is invalid
# @sa MinDistance()
# @ingroup l1_measurements
def GetMinDistance(self, src1, src2=None, id1=0, id2=0, isElem1=False, isElem2=False):
if isinstance(src1, Mesh): src1 = src1.mesh
if isinstance(src2, Mesh): src2 = src2.mesh
if src2 is None and id2 != 0: src2 = src1
if not hasattr(src1, "_narrow"): return None
src1 = src1._narrow(SMESH.SMESH_IDSource)
if not src1: return None
unRegister = genObjUnRegister()
if id1 != 0:
m = src1.GetMesh()
e = m.GetMeshEditor()
if isElem1:
src1 = e.MakeIDSource([id1], SMESH.FACE)
else:
src1 = e.MakeIDSource([id1], SMESH.NODE)
unRegister.set( src1 )
pass
if hasattr(src2, "_narrow"):
src2 = src2._narrow(SMESH.SMESH_IDSource)
if src2 and id2 != 0:
m = src2.GetMesh()
e = m.GetMeshEditor()
if isElem2:
src2 = e.MakeIDSource([id2], SMESH.FACE)
else:
src2 = e.MakeIDSource([id2], SMESH.NODE)
unRegister.set( src2 )
pass
pass
aMeasurements = self.CreateMeasurements()
unRegister.set( aMeasurements )
result = aMeasurements.MinDistance(src1, src2)
return result
## Get bounding box of the specified object(s)
# @param objects single source object or list of source objects
# @return tuple of six values (minX, minY, minZ, maxX, maxY, maxZ)
# @sa GetBoundingBox()
# @ingroup l1_measurements
def BoundingBox(self, objects):
result = self.GetBoundingBox(objects)
if result is None:
result = (0.0,)*6
else:
result = (result.minX, result.minY, result.minZ, result.maxX, result.maxY, result.maxZ)
return result
## Get measure structure specifying bounding box data of the specified object(s)
# @param objects single source object or list of source objects
# @return Measure structure
# @sa BoundingBox()
# @ingroup l1_measurements
def GetBoundingBox(self, objects):
if isinstance(objects, tuple):
objects = list(objects)
if not isinstance(objects, list):
objects = [objects]
srclist = []
for o in objects:
if isinstance(o, Mesh):
srclist.append(o.mesh)
elif hasattr(o, "_narrow"):
src = o._narrow(SMESH.SMESH_IDSource)
if src: srclist.append(src)
pass
pass
aMeasurements = self.CreateMeasurements()
result = aMeasurements.BoundingBox(srclist)
aMeasurements.UnRegister()
return result
## Get sum of lengths of all 1D elements in the mesh object.
# @param obj mesh, submesh or group
# @return sum of lengths of all 1D elements
# @ingroup l1_measurements
def GetLength(self, obj):
if isinstance(obj, Mesh): obj = obj.mesh
if isinstance(obj, Mesh_Algorithm): obj = obj.GetSubMesh()
aMeasurements = self.CreateMeasurements()
value = aMeasurements.Length(obj)
aMeasurements.UnRegister()
return value
## Get sum of areas of all 2D elements in the mesh object.
# @param obj mesh, submesh or group
# @return sum of areas of all 2D elements
# @ingroup l1_measurements
def GetArea(self, obj):
if isinstance(obj, Mesh): obj = obj.mesh
if isinstance(obj, Mesh_Algorithm): obj = obj.GetSubMesh()
aMeasurements = self.CreateMeasurements()
value = aMeasurements.Area(obj)
aMeasurements.UnRegister()
return value
## Get sum of volumes of all 3D elements in the mesh object.
# @param obj mesh, submesh or group
# @return sum of volumes of all 3D elements
# @ingroup l1_measurements
def GetVolume(self, obj):
if isinstance(obj, Mesh): obj = obj.mesh
if isinstance(obj, Mesh_Algorithm): obj = obj.GetSubMesh()
aMeasurements = self.CreateMeasurements()
value = aMeasurements.Volume(obj)
aMeasurements.UnRegister()
return value
pass # end of class smeshBuilder
import omniORB
#Registering the new proxy for SMESH_Gen
omniORB.registerObjref(SMESH._objref_SMESH_Gen._NP_RepositoryId, smeshBuilder)
## Create a new smeshBuilder instance.The smeshBuilder class provides the Python
# interface to create or load meshes.
#
# Typical use is:
# \code
# import salome
# salome.salome_init()
# from salome.smesh import smeshBuilder
# smesh = smeshBuilder.New(theStudy)
# \endcode
# @param study SALOME study, generally obtained by salome.myStudy.
# @param instance CORBA proxy of SMESH Engine. If None, the default Engine is used.
# @return smeshBuilder instance
def New( study, instance=None):
"""
Create a new smeshBuilder instance.The smeshBuilder class provides the Python
interface to create or load meshes.
Typical use is:
import salome
salome.salome_init()
from salome.smesh import smeshBuilder
smesh = smeshBuilder.New(theStudy)
Parameters:
study SALOME study, generally obtained by salome.myStudy.
instance CORBA proxy of SMESH Engine. If None, the default Engine is used.
Returns:
smeshBuilder instance
"""
global engine
global smeshInst
global doLcc
engine = instance
if engine is None:
doLcc = True
smeshInst = smeshBuilder()
assert isinstance(smeshInst,smeshBuilder), "Smesh engine class is %s but should be smeshBuilder.smeshBuilder. Import salome.smesh.smeshBuilder before creating the instance."%smeshInst.__class__
smeshInst.init_smesh(study)
return smeshInst
# Public class: Mesh
# ==================
## This class allows defining and managing a mesh.
# It has a set of methods to build a mesh on the given geometry, including the definition of sub-meshes.
# It also has methods to define groups of mesh elements, to modify a mesh (by addition of
# new nodes and elements and by changing the existing entities), to get information
# about a mesh and to export a mesh into different formats.
class Mesh:
__metaclass__ = MeshMeta
geom = 0
mesh = 0
editor = 0
## Constructor
#
# Creates a mesh on the shape \a obj (or an empty mesh if \a obj is equal to 0) and
# sets the GUI name of this mesh to \a name.
# @param smeshpyD an instance of smeshBuilder class
# @param geompyD an instance of geomBuilder class
# @param obj Shape to be meshed or SMESH_Mesh object
# @param name Study name of the mesh
# @ingroup l2_construct
def __init__(self, smeshpyD, geompyD, obj=0, name=0):
self.smeshpyD=smeshpyD
self.geompyD=geompyD
if obj is None:
obj = 0
objHasName = False
if obj != 0:
if isinstance(obj, geomBuilder.GEOM._objref_GEOM_Object):
self.geom = obj
objHasName = True
# publish geom of mesh (issue 0021122)
if not self.geom.GetStudyEntry() and smeshpyD.GetCurrentStudy():
objHasName = False
studyID = smeshpyD.GetCurrentStudy()._get_StudyId()
if studyID != geompyD.myStudyId:
geompyD.init_geom( smeshpyD.GetCurrentStudy())
pass
if name:
geo_name = name + " shape"
else:
geo_name = "%s_%s to mesh"%(self.geom.GetShapeType(), id(self.geom)%100)
geompyD.addToStudy( self.geom, geo_name )
self.SetMesh( self.smeshpyD.CreateMesh(self.geom) )
elif isinstance(obj, SMESH._objref_SMESH_Mesh):
self.SetMesh(obj)
else:
self.SetMesh( self.smeshpyD.CreateEmptyMesh() )
if name:
self.smeshpyD.SetName(self.mesh, name)
elif objHasName:
self.smeshpyD.SetName(self.mesh, GetName(obj)) # + " mesh"
if not self.geom:
self.geom = self.mesh.GetShapeToMesh()
self.editor = self.mesh.GetMeshEditor()
self.functors = [None] * SMESH.FT_Undefined._v
# set self to algoCreator's
for attrName in dir(self):
attr = getattr( self, attrName )
if isinstance( attr, algoCreator ):
#print "algoCreator ", attrName
setattr( self, attrName, attr.copy( self ))
pass
pass
pass
## Destructor. Clean-up resources
def __del__(self):
if self.mesh:
#self.mesh.UnRegister()
pass
pass
## Initializes the Mesh object from an instance of SMESH_Mesh interface
# @param theMesh a SMESH_Mesh object
# @ingroup l2_construct
def SetMesh(self, theMesh):
# do not call Register() as this prevents mesh servant deletion at closing study
#if self.mesh: self.mesh.UnRegister()
self.mesh = theMesh
if self.mesh:
#self.mesh.Register()
self.geom = self.mesh.GetShapeToMesh()
pass
## Returns the mesh, that is an instance of SMESH_Mesh interface
# @return a SMESH_Mesh object
# @ingroup l2_construct
def GetMesh(self):
return self.mesh
## Gets the name of the mesh
# @return the name of the mesh as a string
# @ingroup l2_construct
def GetName(self):
name = GetName(self.GetMesh())
return name
## Sets a name to the mesh
# @param name a new name of the mesh
# @ingroup l2_construct
def SetName(self, name):
self.smeshpyD.SetName(self.GetMesh(), name)
## Gets the subMesh object associated to a \a theSubObject geometrical object.
# The subMesh object gives access to the IDs of nodes and elements.
# @param geom a geometrical object (shape)
# @param name a name for the submesh
# @return an object of type SMESH_SubMesh, representing a part of mesh, which lies on the given shape
# @ingroup l2_submeshes
def GetSubMesh(self, geom, name):
AssureGeomPublished( self, geom, name )
submesh = self.mesh.GetSubMesh( geom, name )
return submesh
## Returns the shape associated to the mesh
# @return a GEOM_Object
# @ingroup l2_construct
def GetShape(self):
return self.geom
## Associates the given shape to the mesh (entails the recreation of the mesh)
# @param geom the shape to be meshed (GEOM_Object)
# @ingroup l2_construct
def SetShape(self, geom):
self.mesh = self.smeshpyD.CreateMesh(geom)
## Loads mesh from the study after opening the study
def Load(self):
self.mesh.Load()
## Returns true if the hypotheses are defined well
# @param theSubObject a sub-shape of a mesh shape
# @return True or False
# @ingroup l2_construct
def IsReadyToCompute(self, theSubObject):
return self.smeshpyD.IsReadyToCompute(self.mesh, theSubObject)
## Returns errors of hypotheses definition.
# The list of errors is empty if everything is OK.
# @param theSubObject a sub-shape of a mesh shape
# @return a list of errors
# @ingroup l2_construct
def GetAlgoState(self, theSubObject):
return self.smeshpyD.GetAlgoState(self.mesh, theSubObject)
## Returns a geometrical object on which the given element was built.
# The returned geometrical object, if not nil, is either found in the
# study or published by this method with the given name
# @param theElementID the id of the mesh element
# @param theGeomName the user-defined name of the geometrical object
# @return GEOM::GEOM_Object instance
# @ingroup l2_construct
def GetGeometryByMeshElement(self, theElementID, theGeomName):
return self.smeshpyD.GetGeometryByMeshElement( self.mesh, theElementID, theGeomName )
## Returns the mesh dimension depending on the dimension of the underlying shape
# or, if the mesh is not based on any shape, basing on deimension of elements
# @return mesh dimension as an integer value [0,3]
# @ingroup l1_auxiliary
def MeshDimension(self):
if self.mesh.HasShapeToMesh():
shells = self.geompyD.SubShapeAllIDs( self.geom, self.geompyD.ShapeType["SOLID"] )
if len( shells ) > 0 :
return 3
elif self.geompyD.NumberOfFaces( self.geom ) > 0 :
return 2
elif self.geompyD.NumberOfEdges( self.geom ) > 0 :
return 1
else:
return 0;
else:
if self.NbVolumes() > 0: return 3
if self.NbFaces() > 0: return 2
if self.NbEdges() > 0: return 1
return 0
## Evaluates size of prospective mesh on a shape
# @return a list where i-th element is a number of elements of i-th SMESH.EntityType
# To know predicted number of e.g. edges, inquire it this way
# Evaluate()[ EnumToLong( Entity_Edge )]
def Evaluate(self, geom=0):
if geom == 0 or not isinstance(geom, geomBuilder.GEOM._objref_GEOM_Object):
if self.geom == 0:
geom = self.mesh.GetShapeToMesh()
else:
geom = self.geom
return self.smeshpyD.Evaluate(self.mesh, geom)
## Computes the mesh and returns the status of the computation
# @param geom geomtrical shape on which mesh data should be computed
# @param discardModifs if True and the mesh has been edited since
# a last total re-compute and that may prevent successful partial re-compute,
# then the mesh is cleaned before Compute()
# @return True or False
# @ingroup l2_construct
def Compute(self, geom=0, discardModifs=False):
if geom == 0 or not isinstance(geom, geomBuilder.GEOM._objref_GEOM_Object):
if self.geom == 0:
geom = self.mesh.GetShapeToMesh()
else:
geom = self.geom
ok = False
try:
if discardModifs and self.mesh.HasModificationsToDiscard(): # issue 0020693
self.mesh.Clear()
ok = self.smeshpyD.Compute(self.mesh, geom)
except SALOME.SALOME_Exception, ex:
print "Mesh computation failed, exception caught:"
print " ", ex.details.text
except:
import traceback
print "Mesh computation failed, exception caught:"
traceback.print_exc()
if True:#not ok:
allReasons = ""
# Treat compute errors
computeErrors = self.smeshpyD.GetComputeErrors( self.mesh, geom )
for err in computeErrors:
shapeText = ""
if self.mesh.HasShapeToMesh():
try:
mainIOR = salome.orb.object_to_string(geom)
for sname in salome.myStudyManager.GetOpenStudies():
s = salome.myStudyManager.GetStudyByName(sname)
if not s: continue
mainSO = s.FindObjectIOR(mainIOR)
if not mainSO: continue
if err.subShapeID == 1:
shapeText = ' on "%s"' % mainSO.GetName()
subIt = s.NewChildIterator(mainSO)
while subIt.More():
subSO = subIt.Value()
subIt.Next()
obj = subSO.GetObject()
if not obj: continue
go = obj._narrow( geomBuilder.GEOM._objref_GEOM_Object )
if not go: continue
ids = go.GetSubShapeIndices()
if len(ids) == 1 and ids[0] == err.subShapeID:
shapeText = ' on "%s"' % subSO.GetName()
break
if not shapeText:
shape = self.geompyD.GetSubShape( geom, [err.subShapeID])
if shape:
shapeText = " on %s #%s" % (shape.GetShapeType(), err.subShapeID)
else:
shapeText = " on subshape #%s" % (err.subShapeID)
except:
shapeText = " on subshape #%s" % (err.subShapeID)
errText = ""
stdErrors = ["OK", #COMPERR_OK
"Invalid input mesh", #COMPERR_BAD_INPUT_MESH
"std::exception", #COMPERR_STD_EXCEPTION
"OCC exception", #COMPERR_OCC_EXCEPTION
"..", #COMPERR_SLM_EXCEPTION
"Unknown exception", #COMPERR_EXCEPTION
"Memory allocation problem", #COMPERR_MEMORY_PB
"Algorithm failed", #COMPERR_ALGO_FAILED
"Unexpected geometry", #COMPERR_BAD_SHAPE
"Warning", #COMPERR_WARNING
"Computation cancelled",#COMPERR_CANCELED
"No mesh on sub-shape"] #COMPERR_NO_MESH_ON_SHAPE
if err.code > 0:
if err.code < len(stdErrors): errText = stdErrors[err.code]
else:
errText = "code %s" % -err.code
if errText: errText += ". "
errText += err.comment
if allReasons != "":allReasons += "\n"
if ok:
allReasons += '- "%s"%s - %s' %(err.algoName, shapeText, errText)
else:
allReasons += '- "%s" failed%s. Error: %s' %(err.algoName, shapeText, errText)
pass
# Treat hyp errors
errors = self.smeshpyD.GetAlgoState( self.mesh, geom )
for err in errors:
if err.isGlobalAlgo:
glob = "global"
else:
glob = "local"
pass
dim = err.algoDim
name = err.algoName
if len(name) == 0:
reason = '%s %sD algorithm is missing' % (glob, dim)
elif err.state == HYP_MISSING:
reason = ('%s %sD algorithm "%s" misses %sD hypothesis'
% (glob, dim, name, dim))
elif err.state == HYP_NOTCONFORM:
reason = 'Global "Not Conform mesh allowed" hypothesis is missing'
elif err.state == HYP_BAD_PARAMETER:
reason = ('Hypothesis of %s %sD algorithm "%s" has a bad parameter value'
% ( glob, dim, name ))
elif err.state == HYP_BAD_GEOMETRY:
reason = ('%s %sD algorithm "%s" is assigned to mismatching'
'geometry' % ( glob, dim, name ))
elif err.state == HYP_HIDDEN_ALGO:
reason = ('%s %sD algorithm "%s" is ignored due to presence of a %s '
'algorithm of upper dimension generating %sD mesh'
% ( glob, dim, name, glob, dim ))
else:
reason = ("For unknown reason. "
"Developer, revise Mesh.Compute() implementation in smeshBuilder.py!")
pass
if allReasons != "":allReasons += "\n"
allReasons += "- " + reason
pass
if not ok or allReasons != "":
msg = '"' + GetName(self.mesh) + '"'
if ok: msg += " has been computed with warnings"
else: msg += " has not been computed"
if allReasons != "": msg += ":"
else: msg += "."
print msg
print allReasons
pass
if salome.sg.hasDesktop() and self.mesh.GetStudyId() >= 0:
smeshgui = salome.ImportComponentGUI("SMESH")
smeshgui.Init(self.mesh.GetStudyId())
smeshgui.SetMeshIcon( salome.ObjectToID( self.mesh ), ok, (self.NbNodes()==0) )
salome.sg.updateObjBrowser(1)
pass
return ok
## Return submesh objects list in meshing order
# @return list of list of submesh objects
# @ingroup l2_construct
def GetMeshOrder(self):
return self.mesh.GetMeshOrder()
## Return submesh objects list in meshing order
# @return list of list of submesh objects
# @ingroup l2_construct
def SetMeshOrder(self, submeshes):
return self.mesh.SetMeshOrder(submeshes)
## Removes all nodes and elements
# @ingroup l2_construct
def Clear(self):
self.mesh.Clear()
if ( salome.sg.hasDesktop() and
salome.myStudyManager.GetStudyByID( self.mesh.GetStudyId() )):
smeshgui = salome.ImportComponentGUI("SMESH")
smeshgui.Init(self.mesh.GetStudyId())
smeshgui.SetMeshIcon( salome.ObjectToID( self.mesh ), False, True )
salome.sg.updateObjBrowser(1)
## Removes all nodes and elements of indicated shape
# @ingroup l2_construct
def ClearSubMesh(self, geomId):
self.mesh.ClearSubMesh(geomId)
if salome.sg.hasDesktop():
smeshgui = salome.ImportComponentGUI("SMESH")
smeshgui.Init(self.mesh.GetStudyId())
smeshgui.SetMeshIcon( salome.ObjectToID( self.mesh ), False, True )
salome.sg.updateObjBrowser(1)
## Computes a tetrahedral mesh using AutomaticLength + MEFISTO + Tetrahedron
# @param fineness [0.0,1.0] defines mesh fineness
# @return True or False
# @ingroup l3_algos_basic
def AutomaticTetrahedralization(self, fineness=0):
dim = self.MeshDimension()
# assign hypotheses
self.RemoveGlobalHypotheses()
self.Segment().AutomaticLength(fineness)
if dim > 1 :
self.Triangle().LengthFromEdges()
pass
if dim > 2 :
self.Tetrahedron()
pass
return self.Compute()
## Computes an hexahedral mesh using AutomaticLength + Quadrangle + Hexahedron
# @param fineness [0.0, 1.0] defines mesh fineness
# @return True or False
# @ingroup l3_algos_basic
def AutomaticHexahedralization(self, fineness=0):
dim = self.MeshDimension()
# assign the hypotheses
self.RemoveGlobalHypotheses()
self.Segment().AutomaticLength(fineness)
if dim > 1 :
self.Quadrangle()
pass
if dim > 2 :
self.Hexahedron()
pass
return self.Compute()
## Assigns a hypothesis
# @param hyp a hypothesis to assign
# @param geom a subhape of mesh geometry
# @return SMESH.Hypothesis_Status
# @ingroup l2_hypotheses
def AddHypothesis(self, hyp, geom=0):
if isinstance( hyp, Mesh_Algorithm ):
hyp = hyp.GetAlgorithm()
pass
if not geom:
geom = self.geom
if not geom:
geom = self.mesh.GetShapeToMesh()
pass
isApplicable = True
if self.mesh.HasShapeToMesh():
hyp_type = hyp.GetName()
lib_name = hyp.GetLibName()
checkAll = ( not geom.IsSame( self.mesh.GetShapeToMesh() ))
if checkAll and geom:
checkAll = geom.GetType() == 37
isApplicable = self.smeshpyD.IsApplicable(hyp_type, lib_name, geom, checkAll)
if isApplicable:
AssureGeomPublished( self, geom, "shape for %s" % hyp.GetName())
status = self.mesh.AddHypothesis(geom, hyp)
else:
status = HYP_BAD_GEOMETRY,""
hyp_name = GetName( hyp )
geom_name = ""
if geom:
geom_name = geom.GetName()
isAlgo = hyp._narrow( SMESH_Algo )
TreatHypoStatus( status, hyp_name, geom_name, isAlgo, self )
return status
## Return True if an algorithm of hypothesis is assigned to a given shape
# @param hyp a hypothesis to check
# @param geom a subhape of mesh geometry
# @return True of False
# @ingroup l2_hypotheses
def IsUsedHypothesis(self, hyp, geom):
if not hyp: # or not geom
return False
if isinstance( hyp, Mesh_Algorithm ):
hyp = hyp.GetAlgorithm()
pass
hyps = self.GetHypothesisList(geom)
for h in hyps:
if h.GetId() == hyp.GetId():
return True
return False
## Unassigns a hypothesis
# @param hyp a hypothesis to unassign
# @param geom a sub-shape of mesh geometry
# @return SMESH.Hypothesis_Status
# @ingroup l2_hypotheses
def RemoveHypothesis(self, hyp, geom=0):
if not hyp:
return None
if isinstance( hyp, Mesh_Algorithm ):
hyp = hyp.GetAlgorithm()
pass
shape = geom
if not shape:
shape = self.geom
pass
if self.IsUsedHypothesis( hyp, shape ):
return self.mesh.RemoveHypothesis( shape, hyp )
hypName = GetName( hyp )
geoName = GetName( shape )
print "WARNING: RemoveHypothesis() failed as '%s' is not assigned to '%s' shape" % ( hypName, geoName )
return None
## Gets the list of hypotheses added on a geometry
# @param geom a sub-shape of mesh geometry
# @return the sequence of SMESH_Hypothesis
# @ingroup l2_hypotheses
def GetHypothesisList(self, geom):
return self.mesh.GetHypothesisList( geom )
## Removes all global hypotheses
# @ingroup l2_hypotheses
def RemoveGlobalHypotheses(self):
current_hyps = self.mesh.GetHypothesisList( self.geom )
for hyp in current_hyps:
self.mesh.RemoveHypothesis( self.geom, hyp )
pass
pass
## Exports the mesh in a file in MED format and chooses the \a version of MED format
## allowing to overwrite the file if it exists or add the exported data to its contents
# @param f is the file name
# @param auto_groups boolean parameter for creating/not creating
# the groups Group_On_All_Nodes, Group_On_All_Faces, ... ;
# the typical use is auto_groups=false.
# @param version MED format version(MED_V2_1 or MED_V2_2)
# @param overwrite boolean parameter for overwriting/not overwriting the file
# @param meshPart a part of mesh (group, sub-mesh) to export instead of the mesh
# @param autoDimension: if @c True (default), a space dimension of a MED mesh can be either
# - 1D if all mesh nodes lie on OX coordinate axis, or
# - 2D if all mesh nodes lie on XOY coordinate plane, or
# - 3D in the rest cases.
# If @a autoDimension is @c False, the space dimension is always 3.
# @param fields : list of GEOM fields defined on the shape to mesh.
# @param geomAssocFields : each character of this string means a need to export a
# corresponding field; correspondence between fields and characters is following:
# - 'v' stands for _vertices_ field;
# - 'e' stands for _edges_ field;
# - 'f' stands for _faces_ field;
# - 's' stands for _solids_ field.
# @ingroup l2_impexp
def ExportMED(self, f, auto_groups=0, version=MED_V2_2,
overwrite=1, meshPart=None, autoDimension=True, fields=[], geomAssocFields=''):
if meshPart or fields or geomAssocFields:
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
self.mesh.ExportPartToMED( meshPart, f, auto_groups, version, overwrite, autoDimension,
fields, geomAssocFields)
else:
self.mesh.ExportToMEDX(f, auto_groups, version, overwrite, autoDimension)
## Exports the mesh in a file in SAUV format
# @param f is the file name
# @param auto_groups boolean parameter for creating/not creating
# the groups Group_On_All_Nodes, Group_On_All_Faces, ... ;
# the typical use is auto_groups=false.
# @ingroup l2_impexp
def ExportSAUV(self, f, auto_groups=0):
self.mesh.ExportSAUV(f, auto_groups)
## Exports the mesh in a file in DAT format
# @param f the file name
# @param meshPart a part of mesh (group, sub-mesh) to export instead of the mesh
# @ingroup l2_impexp
def ExportDAT(self, f, meshPart=None):
if meshPart:
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
self.mesh.ExportPartToDAT( meshPart, f )
else:
self.mesh.ExportDAT(f)
## Exports the mesh in a file in UNV format
# @param f the file name
# @param meshPart a part of mesh (group, sub-mesh) to export instead of the mesh
# @ingroup l2_impexp
def ExportUNV(self, f, meshPart=None):
if meshPart:
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
self.mesh.ExportPartToUNV( meshPart, f )
else:
self.mesh.ExportUNV(f)
## Export the mesh in a file in STL format
# @param f the file name
# @param ascii defines the file encoding
# @param meshPart a part of mesh (group, sub-mesh) to export instead of the mesh
# @ingroup l2_impexp
def ExportSTL(self, f, ascii=1, meshPart=None):
if meshPart:
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
self.mesh.ExportPartToSTL( meshPart, f, ascii )
else:
self.mesh.ExportSTL(f, ascii)
## Exports the mesh in a file in CGNS format
# @param f is the file name
# @param overwrite boolean parameter for overwriting/not overwriting the file
# @param meshPart a part of mesh (group, sub-mesh) to export instead of the mesh
# @ingroup l2_impexp
def ExportCGNS(self, f, overwrite=1, meshPart=None):
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
if isinstance( meshPart, Mesh ):
meshPart = meshPart.mesh
elif not meshPart:
meshPart = self.mesh
self.mesh.ExportCGNS(meshPart, f, overwrite)
## Exports the mesh in a file in GMF format.
# GMF files must have .mesh extension for the ASCII format and .meshb for
# the bynary format. Other extensions are not allowed.
# @param f is the file name
# @param meshPart a part of mesh (group, sub-mesh) to export instead of the mesh
# @ingroup l2_impexp
def ExportGMF(self, f, meshPart=None):
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
if isinstance( meshPart, Mesh ):
meshPart = meshPart.mesh
elif not meshPart:
meshPart = self.mesh
self.mesh.ExportGMF(meshPart, f, True)
## Deprecated, used only for compatibility! Please, use ExportToMEDX() method instead.
# Exports the mesh in a file in MED format and chooses the \a version of MED format
## allowing to overwrite the file if it exists or add the exported data to its contents
# @param f the file name
# @param version values are SMESH.MED_V2_1, SMESH.MED_V2_2
# @param opt boolean parameter for creating/not creating
# the groups Group_On_All_Nodes, Group_On_All_Faces, ...
# @param overwrite boolean parameter for overwriting/not overwriting the file
# @param autoDimension: if @c True (default), a space dimension of a MED mesh can be either
# - 1D if all mesh nodes lie on OX coordinate axis, or
# - 2D if all mesh nodes lie on XOY coordinate plane, or
# - 3D in the rest cases.
#
# If @a autoDimension is @c False, the space dimension is always 3.
# @ingroup l2_impexp
def ExportToMED(self, f, version, opt=0, overwrite=1, autoDimension=True):
self.mesh.ExportToMEDX(f, opt, version, overwrite, autoDimension)
# Operations with groups:
# ----------------------
## Creates an empty mesh group
# @param elementType the type of elements in the group
# @param name the name of the mesh group
# @return SMESH_Group
# @ingroup l2_grps_create
def CreateEmptyGroup(self, elementType, name):
return self.mesh.CreateGroup(elementType, name)
## Creates a mesh group based on the geometric object \a grp
# and gives a \a name, \n if this parameter is not defined
# the name is the same as the geometric group name \n
# Note: Works like GroupOnGeom().
# @param grp a geometric group, a vertex, an edge, a face or a solid
# @param name the name of the mesh group
# @return SMESH_GroupOnGeom
# @ingroup l2_grps_create
def Group(self, grp, name=""):
return self.GroupOnGeom(grp, name)
## Creates a mesh group based on the geometrical object \a grp
# and gives a \a name, \n if this parameter is not defined
# the name is the same as the geometrical group name
# @param grp a geometrical group, a vertex, an edge, a face or a solid
# @param name the name of the mesh group
# @param typ the type of elements in the group. If not set, it is
# automatically detected by the type of the geometry
# @return SMESH_GroupOnGeom
# @ingroup l2_grps_create
def GroupOnGeom(self, grp, name="", typ=None):
AssureGeomPublished( self, grp, name )
if name == "":
name = grp.GetName()
if not typ:
typ = self._groupTypeFromShape( grp )
return self.mesh.CreateGroupFromGEOM(typ, name, grp)
## Pivate method to get a type of group on geometry
def _groupTypeFromShape( self, shape ):
tgeo = str(shape.GetShapeType())
if tgeo == "VERTEX":
typ = NODE
elif tgeo == "EDGE":
typ = EDGE
elif tgeo == "FACE" or tgeo == "SHELL":
typ = FACE
elif tgeo == "SOLID" or tgeo == "COMPSOLID":
typ = VOLUME
elif tgeo == "COMPOUND":
sub = self.geompyD.SubShapeAll( shape, self.geompyD.ShapeType["SHAPE"])
if not sub:
raise ValueError,"_groupTypeFromShape(): empty geometric group or compound '%s'" % GetName(shape)
return self._groupTypeFromShape( sub[0] )
else:
raise ValueError, \
"_groupTypeFromShape(): invalid geometry '%s'" % GetName(shape)
return typ
## Creates a mesh group with given \a name based on the \a filter which
## is a special type of group dynamically updating it's contents during
## mesh modification
# @param typ the type of elements in the group
# @param name the name of the mesh group
# @param filter the filter defining group contents
# @return SMESH_GroupOnFilter
# @ingroup l2_grps_create
def GroupOnFilter(self, typ, name, filter):
return self.mesh.CreateGroupFromFilter(typ, name, filter)
## Creates a mesh group by the given ids of elements
# @param groupName the name of the mesh group
# @param elementType the type of elements in the group
# @param elemIDs the list of ids
# @return SMESH_Group
# @ingroup l2_grps_create
def MakeGroupByIds(self, groupName, elementType, elemIDs):
group = self.mesh.CreateGroup(elementType, groupName)
if hasattr( elemIDs, "GetIDs" ):
if hasattr( elemIDs, "SetMesh" ):
elemIDs.SetMesh( self.GetMesh() )
group.AddFrom( elemIDs )
else:
group.Add(elemIDs)
return group
## Creates a mesh group by the given conditions
# @param groupName the name of the mesh group
# @param elementType the type of elements in the group
# @param CritType the type of criterion( FT_Taper, FT_Area, FT_RangeOfIds, FT_LyingOnGeom etc. )
# @param Compare belongs to {FT_LessThan, FT_MoreThan, FT_EqualTo}
# @param Threshold the threshold value (range of id ids as string, shape, numeric)
# @param UnaryOp FT_LogicalNOT or FT_Undefined
# @param Tolerance the tolerance used by FT_BelongToGeom, FT_BelongToSurface,
# FT_LyingOnGeom, FT_CoplanarFaces criteria
# @return SMESH_GroupOnFilter
# @ingroup l2_grps_create
def MakeGroup(self,
groupName,
elementType,
CritType=FT_Undefined,
Compare=FT_EqualTo,
Threshold="",
UnaryOp=FT_Undefined,
Tolerance=1e-07):
aCriterion = self.smeshpyD.GetCriterion(elementType, CritType, Compare, Threshold, UnaryOp, FT_Undefined,Tolerance)
group = self.MakeGroupByCriterion(groupName, aCriterion)
return group
## Creates a mesh group by the given criterion
# @param groupName the name of the mesh group
# @param Criterion the instance of Criterion class
# @return SMESH_GroupOnFilter
# @ingroup l2_grps_create
def MakeGroupByCriterion(self, groupName, Criterion):
return self.MakeGroupByCriteria( groupName, [Criterion] )
## Creates a mesh group by the given criteria (list of criteria)
# @param groupName the name of the mesh group
# @param theCriteria the list of criteria
# @param binOp binary operator used when binary operator of criteria is undefined
# @return SMESH_GroupOnFilter
# @ingroup l2_grps_create
def MakeGroupByCriteria(self, groupName, theCriteria, binOp=SMESH.FT_LogicalAND):
aFilter = self.smeshpyD.GetFilterFromCriteria( theCriteria, binOp )
group = self.MakeGroupByFilter(groupName, aFilter)
return group
## Creates a mesh group by the given filter
# @param groupName the name of the mesh group
# @param theFilter the instance of Filter class
# @return SMESH_GroupOnFilter
# @ingroup l2_grps_create
def MakeGroupByFilter(self, groupName, theFilter):
#group = self.CreateEmptyGroup(theFilter.GetElementType(), groupName)
#theFilter.SetMesh( self.mesh )
#group.AddFrom( theFilter )
group = self.GroupOnFilter( theFilter.GetElementType(), groupName, theFilter )
return group
## Removes a group
# @ingroup l2_grps_delete
def RemoveGroup(self, group):
self.mesh.RemoveGroup(group)
## Removes a group with its contents
# @ingroup l2_grps_delete
def RemoveGroupWithContents(self, group):
self.mesh.RemoveGroupWithContents(group)
## Gets the list of groups existing in the mesh in the order of creation (starting from the oldest one)
# @return a sequence of SMESH_GroupBase
# @ingroup l2_grps_create
def GetGroups(self):
return self.mesh.GetGroups()
## Gets the number of groups existing in the mesh
# @return the quantity of groups as an integer value
# @ingroup l2_grps_create
def NbGroups(self):
return self.mesh.NbGroups()
## Gets the list of names of groups existing in the mesh
# @return list of strings
# @ingroup l2_grps_create
def GetGroupNames(self):
groups = self.GetGroups()
names = []
for group in groups:
names.append(group.GetName())
return names
## Produces a union of two groups
# A new group is created. All mesh elements that are
# present in the initial groups are added to the new one
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def UnionGroups(self, group1, group2, name):
return self.mesh.UnionGroups(group1, group2, name)
## Produces a union list of groups
# New group is created. All mesh elements that are present in
# initial groups are added to the new one
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def UnionListOfGroups(self, groups, name):
return self.mesh.UnionListOfGroups(groups, name)
## Prodices an intersection of two groups
# A new group is created. All mesh elements that are common
# for the two initial groups are added to the new one.
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def IntersectGroups(self, group1, group2, name):
return self.mesh.IntersectGroups(group1, group2, name)
## Produces an intersection of groups
# New group is created. All mesh elements that are present in all
# initial groups simultaneously are added to the new one
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def IntersectListOfGroups(self, groups, name):
return self.mesh.IntersectListOfGroups(groups, name)
## Produces a cut of two groups
# A new group is created. All mesh elements that are present in
# the main group but are not present in the tool group are added to the new one
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def CutGroups(self, main_group, tool_group, name):
return self.mesh.CutGroups(main_group, tool_group, name)
## Produces a cut of groups
# A new group is created. All mesh elements that are present in main groups
# but do not present in tool groups are added to the new one
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def CutListOfGroups(self, main_groups, tool_groups, name):
return self.mesh.CutListOfGroups(main_groups, tool_groups, name)
## Produces a group of elements of specified type using list of existing groups
# A new group is created. System
# 1) extracts all nodes on which groups elements are built
# 2) combines all elements of specified dimension laying on these nodes
# @return an instance of SMESH_Group
# @ingroup l2_grps_operon
def CreateDimGroup(self, groups, elem_type, name):
return self.mesh.CreateDimGroup(groups, elem_type, name)
## Convert group on geom into standalone group
# @ingroup l2_grps_delete
def ConvertToStandalone(self, group):
return self.mesh.ConvertToStandalone(group)
# Get some info about mesh:
# ------------------------
## Returns the log of nodes and elements added or removed
# since the previous clear of the log.
# @param clearAfterGet log is emptied after Get (safe if concurrents access)
# @return list of log_block structures:
# commandType
# number
# coords
# indexes
# @ingroup l1_auxiliary
def GetLog(self, clearAfterGet):
return self.mesh.GetLog(clearAfterGet)
## Clears the log of nodes and elements added or removed since the previous
# clear. Must be used immediately after GetLog if clearAfterGet is false.
# @ingroup l1_auxiliary
def ClearLog(self):
self.mesh.ClearLog()
## Toggles auto color mode on the object.
# @param theAutoColor the flag which toggles auto color mode.
# @ingroup l1_auxiliary
def SetAutoColor(self, theAutoColor):
self.mesh.SetAutoColor(theAutoColor)
## Gets flag of object auto color mode.
# @return True or False
# @ingroup l1_auxiliary
def GetAutoColor(self):
return self.mesh.GetAutoColor()
## Gets the internal ID
# @return integer value, which is the internal Id of the mesh
# @ingroup l1_auxiliary
def GetId(self):
return self.mesh.GetId()
## Get the study Id
# @return integer value, which is the study Id of the mesh
# @ingroup l1_auxiliary
def GetStudyId(self):
return self.mesh.GetStudyId()
## Checks the group names for duplications.
# Consider the maximum group name length stored in MED file.
# @return True or False
# @ingroup l1_auxiliary
def HasDuplicatedGroupNamesMED(self):
return self.mesh.HasDuplicatedGroupNamesMED()
## Obtains the mesh editor tool
# @return an instance of SMESH_MeshEditor
# @ingroup l1_modifying
def GetMeshEditor(self):
return self.editor
## Wrap a list of IDs of elements or nodes into SMESH_IDSource which
# can be passed as argument to a method accepting mesh, group or sub-mesh
# @return an instance of SMESH_IDSource
# @ingroup l1_auxiliary
def GetIDSource(self, ids, elemType):
return self.editor.MakeIDSource(ids, elemType)
# Get informations about mesh contents:
# ------------------------------------
## Gets the mesh stattistic
# @return dictionary type element - count of elements
# @ingroup l1_meshinfo
def GetMeshInfo(self, obj = None):
if not obj: obj = self.mesh
return self.smeshpyD.GetMeshInfo(obj)
## Returns the number of nodes in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbNodes(self):
return self.mesh.NbNodes()
## Returns the number of elements in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbElements(self):
return self.mesh.NbElements()
## Returns the number of 0d elements in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def Nb0DElements(self):
return self.mesh.Nb0DElements()
## Returns the number of ball discrete elements in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbBalls(self):
return self.mesh.NbBalls()
## Returns the number of edges in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbEdges(self):
return self.mesh.NbEdges()
## Returns the number of edges with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbEdgesOfOrder(self, elementOrder):
return self.mesh.NbEdgesOfOrder(elementOrder)
## Returns the number of faces in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbFaces(self):
return self.mesh.NbFaces()
## Returns the number of faces with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbFacesOfOrder(self, elementOrder):
return self.mesh.NbFacesOfOrder(elementOrder)
## Returns the number of triangles in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbTriangles(self):
return self.mesh.NbTriangles()
## Returns the number of triangles with the given order in the mesh
# @param elementOrder is the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbTrianglesOfOrder(self, elementOrder):
return self.mesh.NbTrianglesOfOrder(elementOrder)
## Returns the number of biquadratic triangles in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbBiQuadTriangles(self):
return self.mesh.NbBiQuadTriangles()
## Returns the number of quadrangles in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbQuadrangles(self):
return self.mesh.NbQuadrangles()
## Returns the number of quadrangles with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbQuadranglesOfOrder(self, elementOrder):
return self.mesh.NbQuadranglesOfOrder(elementOrder)
## Returns the number of biquadratic quadrangles in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbBiQuadQuadrangles(self):
return self.mesh.NbBiQuadQuadrangles()
## Returns the number of polygons in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbPolygons(self):
return self.mesh.NbPolygons()
## Returns the number of volumes in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbVolumes(self):
return self.mesh.NbVolumes()
## Returns the number of volumes with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbVolumesOfOrder(self, elementOrder):
return self.mesh.NbVolumesOfOrder(elementOrder)
## Returns the number of tetrahedrons in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbTetras(self):
return self.mesh.NbTetras()
## Returns the number of tetrahedrons with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbTetrasOfOrder(self, elementOrder):
return self.mesh.NbTetrasOfOrder(elementOrder)
## Returns the number of hexahedrons in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbHexas(self):
return self.mesh.NbHexas()
## Returns the number of hexahedrons with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbHexasOfOrder(self, elementOrder):
return self.mesh.NbHexasOfOrder(elementOrder)
## Returns the number of triquadratic hexahedrons in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbTriQuadraticHexas(self):
return self.mesh.NbTriQuadraticHexas()
## Returns the number of pyramids in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbPyramids(self):
return self.mesh.NbPyramids()
## Returns the number of pyramids with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbPyramidsOfOrder(self, elementOrder):
return self.mesh.NbPyramidsOfOrder(elementOrder)
## Returns the number of prisms in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbPrisms(self):
return self.mesh.NbPrisms()
## Returns the number of prisms with the given order in the mesh
# @param elementOrder the order of elements:
# ORDER_ANY, ORDER_LINEAR or ORDER_QUADRATIC
# @return an integer value
# @ingroup l1_meshinfo
def NbPrismsOfOrder(self, elementOrder):
return self.mesh.NbPrismsOfOrder(elementOrder)
## Returns the number of hexagonal prisms in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbHexagonalPrisms(self):
return self.mesh.NbHexagonalPrisms()
## Returns the number of polyhedrons in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbPolyhedrons(self):
return self.mesh.NbPolyhedrons()
## Returns the number of submeshes in the mesh
# @return an integer value
# @ingroup l1_meshinfo
def NbSubMesh(self):
return self.mesh.NbSubMesh()
## Returns the list of mesh elements IDs
# @return the list of integer values
# @ingroup l1_meshinfo
def GetElementsId(self):
return self.mesh.GetElementsId()
## Returns the list of IDs of mesh elements with the given type
# @param elementType the required type of elements (SMESH.NODE, SMESH.EDGE, SMESH.FACE or SMESH.VOLUME)
# @return list of integer values
# @ingroup l1_meshinfo
def GetElementsByType(self, elementType):
return self.mesh.GetElementsByType(elementType)
## Returns the list of mesh nodes IDs
# @return the list of integer values
# @ingroup l1_meshinfo
def GetNodesId(self):
return self.mesh.GetNodesId()
# Get the information about mesh elements:
# ------------------------------------
## Returns the type of mesh element
# @return the value from SMESH::ElementType enumeration
# @ingroup l1_meshinfo
def GetElementType(self, id, iselem):
return self.mesh.GetElementType(id, iselem)
## Returns the geometric type of mesh element
# @return the value from SMESH::EntityType enumeration
# @ingroup l1_meshinfo
def GetElementGeomType(self, id):
return self.mesh.GetElementGeomType(id)
## Returns the shape type of mesh element
# @return the value from SMESH::GeometryType enumeration
# @ingroup l1_meshinfo
def GetElementShape(self, id):
return self.mesh.GetElementShape(id)
## Returns the list of submesh elements IDs
# @param Shape a geom object(sub-shape) IOR
# Shape must be the sub-shape of a ShapeToMesh()
# @return the list of integer values
# @ingroup l1_meshinfo
def GetSubMeshElementsId(self, Shape):
if ( isinstance( Shape, geomBuilder.GEOM._objref_GEOM_Object)):
ShapeID = Shape.GetSubShapeIndices()[0]
else:
ShapeID = Shape
return self.mesh.GetSubMeshElementsId(ShapeID)
## Returns the list of submesh nodes IDs
# @param Shape a geom object(sub-shape) IOR
# Shape must be the sub-shape of a ShapeToMesh()
# @param all If true, gives all nodes of submesh elements, otherwise gives only submesh nodes
# @return the list of integer values
# @ingroup l1_meshinfo
def GetSubMeshNodesId(self, Shape, all):
if ( isinstance( Shape, geomBuilder.GEOM._objref_GEOM_Object)):
ShapeID = self.geompyD.GetSubShapeID( self.geom, Shape )
else:
ShapeID = Shape
return self.mesh.GetSubMeshNodesId(ShapeID, all)
## Returns type of elements on given shape
# @param Shape a geom object(sub-shape) IOR
# Shape must be a sub-shape of a ShapeToMesh()
# @return element type
# @ingroup l1_meshinfo
def GetSubMeshElementType(self, Shape):
if ( isinstance( Shape, geomBuilder.GEOM._objref_GEOM_Object)):
ShapeID = Shape.GetSubShapeIndices()[0]
else:
ShapeID = Shape
return self.mesh.GetSubMeshElementType(ShapeID)
## Gets the mesh description
# @return string value
# @ingroup l1_meshinfo
def Dump(self):
return self.mesh.Dump()
# Get the information about nodes and elements of a mesh by its IDs:
# -----------------------------------------------------------
## Gets XYZ coordinates of a node
# \n If there is no nodes for the given ID - returns an empty list
# @return a list of double precision values
# @ingroup l1_meshinfo
def GetNodeXYZ(self, id):
return self.mesh.GetNodeXYZ(id)
## Returns list of IDs of inverse elements for the given node
# \n If there is no node for the given ID - returns an empty list
# @return a list of integer values
# @ingroup l1_meshinfo
def GetNodeInverseElements(self, id):
return self.mesh.GetNodeInverseElements(id)
## @brief Returns the position of a node on the shape
# @return SMESH::NodePosition
# @ingroup l1_meshinfo
def GetNodePosition(self,NodeID):
return self.mesh.GetNodePosition(NodeID)
## @brief Returns the position of an element on the shape
# @return SMESH::ElementPosition
# @ingroup l1_meshinfo
def GetElementPosition(self,ElemID):
return self.mesh.GetElementPosition(ElemID)
## If the given element is a node, returns the ID of shape
# \n If there is no node for the given ID - returns -1
# @return an integer value
# @ingroup l1_meshinfo
def GetShapeID(self, id):
return self.mesh.GetShapeID(id)
## Returns the ID of the result shape after
# FindShape() from SMESH_MeshEditor for the given element
# \n If there is no element for the given ID - returns -1
# @return an integer value
# @ingroup l1_meshinfo
def GetShapeIDForElem(self,id):
return self.mesh.GetShapeIDForElem(id)
## Returns the number of nodes for the given element
# \n If there is no element for the given ID - returns -1
# @return an integer value
# @ingroup l1_meshinfo
def GetElemNbNodes(self, id):
return self.mesh.GetElemNbNodes(id)
## Returns the node ID the given (zero based) index for the given element
# \n If there is no element for the given ID - returns -1
# \n If there is no node for the given index - returns -2
# @return an integer value
# @ingroup l1_meshinfo
def GetElemNode(self, id, index):
return self.mesh.GetElemNode(id, index)
## Returns the IDs of nodes of the given element
# @return a list of integer values
# @ingroup l1_meshinfo
def GetElemNodes(self, id):
return self.mesh.GetElemNodes(id)
## Returns true if the given node is the medium node in the given quadratic element
# @ingroup l1_meshinfo
def IsMediumNode(self, elementID, nodeID):
return self.mesh.IsMediumNode(elementID, nodeID)
## Returns true if the given node is the medium node in one of quadratic elements
# @ingroup l1_meshinfo
def IsMediumNodeOfAnyElem(self, nodeID, elementType):
return self.mesh.IsMediumNodeOfAnyElem(nodeID, elementType)
## Returns the number of edges for the given element
# @ingroup l1_meshinfo
def ElemNbEdges(self, id):
return self.mesh.ElemNbEdges(id)
## Returns the number of faces for the given element
# @ingroup l1_meshinfo
def ElemNbFaces(self, id):
return self.mesh.ElemNbFaces(id)
## Returns nodes of given face (counted from zero) for given volumic element.
# @ingroup l1_meshinfo
def GetElemFaceNodes(self,elemId, faceIndex):
return self.mesh.GetElemFaceNodes(elemId, faceIndex)
## Returns three components of normal of given mesh face
# (or an empty array in KO case)
# @ingroup l1_meshinfo
def GetFaceNormal(self, faceId, normalized=False):
return self.mesh.GetFaceNormal(faceId,normalized)
## Returns an element based on all given nodes.
# @ingroup l1_meshinfo
def FindElementByNodes(self,nodes):
return self.mesh.FindElementByNodes(nodes)
## Returns true if the given element is a polygon
# @ingroup l1_meshinfo
def IsPoly(self, id):
return self.mesh.IsPoly(id)
## Returns true if the given element is quadratic
# @ingroup l1_meshinfo
def IsQuadratic(self, id):
return self.mesh.IsQuadratic(id)
## Returns diameter of a ball discrete element or zero in case of an invalid \a id
# @ingroup l1_meshinfo
def GetBallDiameter(self, id):
return self.mesh.GetBallDiameter(id)
## Returns XYZ coordinates of the barycenter of the given element
# \n If there is no element for the given ID - returns an empty list
# @return a list of three double values
# @ingroup l1_meshinfo
def BaryCenter(self, id):
return self.mesh.BaryCenter(id)
## Passes mesh elements through the given filter and return IDs of fitting elements
# @param theFilter SMESH_Filter
# @return a list of ids
# @ingroup l1_controls
def GetIdsFromFilter(self, theFilter):
theFilter.SetMesh( self.mesh )
return theFilter.GetIDs()
## Verifies whether a 2D mesh element has free edges (edges connected to one face only)\n
# Returns a list of special structures (borders).
# @return a list of SMESH.FreeEdges.Border structure: edge id and ids of two its nodes.
# @ingroup l1_controls
def GetFreeBorders(self):
aFilterMgr = self.smeshpyD.CreateFilterManager()
aPredicate = aFilterMgr.CreateFreeEdges()
aPredicate.SetMesh(self.mesh)
aBorders = aPredicate.GetBorders()
aFilterMgr.UnRegister()
return aBorders
# Get mesh measurements information:
# ------------------------------------
## Get minimum distance between two nodes, elements or distance to the origin
# @param id1 first node/element id
# @param id2 second node/element id (if 0, distance from @a id1 to the origin is computed)
# @param isElem1 @c True if @a id1 is element id, @c False if it is node id
# @param isElem2 @c True if @a id2 is element id, @c False if it is node id
# @return minimum distance value
# @sa GetMinDistance()
def MinDistance(self, id1, id2=0, isElem1=False, isElem2=False):
aMeasure = self.GetMinDistance(id1, id2, isElem1, isElem2)
return aMeasure.value
## Get measure structure specifying minimum distance data between two objects
# @param id1 first node/element id
# @param id2 second node/element id (if 0, distance from @a id1 to the origin is computed)
# @param isElem1 @c True if @a id1 is element id, @c False if it is node id
# @param isElem2 @c True if @a id2 is element id, @c False if it is node id
# @return Measure structure
# @sa MinDistance()
def GetMinDistance(self, id1, id2=0, isElem1=False, isElem2=False):
if isElem1:
id1 = self.editor.MakeIDSource([id1], SMESH.FACE)
else:
id1 = self.editor.MakeIDSource([id1], SMESH.NODE)
if id2 != 0:
if isElem2:
id2 = self.editor.MakeIDSource([id2], SMESH.FACE)
else:
id2 = self.editor.MakeIDSource([id2], SMESH.NODE)
pass
else:
id2 = None
aMeasurements = self.smeshpyD.CreateMeasurements()
aMeasure = aMeasurements.MinDistance(id1, id2)
genObjUnRegister([aMeasurements,id1, id2])
return aMeasure
## Get bounding box of the specified object(s)
# @param objects single source object or list of source objects or list of nodes/elements IDs
# @param isElem if @a objects is a list of IDs, @c True value in this parameters specifies that @a objects are elements,
# @c False specifies that @a objects are nodes
# @return tuple of six values (minX, minY, minZ, maxX, maxY, maxZ)
# @sa GetBoundingBox()
def BoundingBox(self, objects=None, isElem=False):
result = self.GetBoundingBox(objects, isElem)
if result is None:
result = (0.0,)*6
else:
result = (result.minX, result.minY, result.minZ, result.maxX, result.maxY, result.maxZ)
return result
## Get measure structure specifying bounding box data of the specified object(s)
# @param IDs single source object or list of source objects or list of nodes/elements IDs
# @param isElem if @a IDs is a list of IDs, @c True value in this parameters specifies that @a objects are elements,
# @c False specifies that @a objects are nodes
# @return Measure structure
# @sa BoundingBox()
def GetBoundingBox(self, IDs=None, isElem=False):
if IDs is None:
IDs = [self.mesh]
elif isinstance(IDs, tuple):
IDs = list(IDs)
if not isinstance(IDs, list):
IDs = [IDs]
if len(IDs) > 0 and isinstance(IDs[0], int):
IDs = [IDs]
srclist = []
unRegister = genObjUnRegister()
for o in IDs:
if isinstance(o, Mesh):
srclist.append(o.mesh)
elif hasattr(o, "_narrow"):
src = o._narrow(SMESH.SMESH_IDSource)
if src: srclist.append(src)
pass
elif isinstance(o, list):
if isElem:
srclist.append(self.editor.MakeIDSource(o, SMESH.FACE))
else:
srclist.append(self.editor.MakeIDSource(o, SMESH.NODE))
unRegister.set( srclist[-1] )
pass
pass
aMeasurements = self.smeshpyD.CreateMeasurements()
unRegister.set( aMeasurements )
aMeasure = aMeasurements.BoundingBox(srclist)
return aMeasure
# Mesh edition (SMESH_MeshEditor functionality):
# ---------------------------------------------
## Removes the elements from the mesh by ids
# @param IDsOfElements is a list of ids of elements to remove
# @return True or False
# @ingroup l2_modif_del
def RemoveElements(self, IDsOfElements):
return self.editor.RemoveElements(IDsOfElements)
## Removes nodes from mesh by ids
# @param IDsOfNodes is a list of ids of nodes to remove
# @return True or False
# @ingroup l2_modif_del
def RemoveNodes(self, IDsOfNodes):
return self.editor.RemoveNodes(IDsOfNodes)
## Removes all orphan (free) nodes from mesh
# @return number of the removed nodes
# @ingroup l2_modif_del
def RemoveOrphanNodes(self):
return self.editor.RemoveOrphanNodes()
## Add a node to the mesh by coordinates
# @return Id of the new node
# @ingroup l2_modif_add
def AddNode(self, x, y, z):
x,y,z,Parameters,hasVars = ParseParameters(x,y,z)
if hasVars: self.mesh.SetParameters(Parameters)
return self.editor.AddNode( x, y, z)
## Creates a 0D element on a node with given number.
# @param IDOfNode the ID of node for creation of the element.
# @return the Id of the new 0D element
# @ingroup l2_modif_add
def Add0DElement(self, IDOfNode):
return self.editor.Add0DElement(IDOfNode)
## Create 0D elements on all nodes of the given elements except those
# nodes on which a 0D element already exists.
# @param theObject an object on whose nodes 0D elements will be created.
# It can be mesh, sub-mesh, group, list of element IDs or a holder
# of nodes IDs created by calling mesh.GetIDSource( nodes, SMESH.NODE )
# @param theGroupName optional name of a group to add 0D elements created
# and/or found on nodes of \a theObject.
# @return an object (a new group or a temporary SMESH_IDSource) holding
# IDs of new and/or found 0D elements. IDs of 0D elements
# can be retrieved from the returned object by calling GetIDs()
# @ingroup l2_modif_add
def Add0DElementsToAllNodes(self, theObject, theGroupName=""):
unRegister = genObjUnRegister()
if isinstance( theObject, Mesh ):
theObject = theObject.GetMesh()
if isinstance( theObject, list ):
theObject = self.GetIDSource( theObject, SMESH.ALL )
unRegister.set( theObject )
return self.editor.Create0DElementsOnAllNodes( theObject, theGroupName )
## Creates a ball element on a node with given ID.
# @param IDOfNode the ID of node for creation of the element.
# @param diameter the bal diameter.
# @return the Id of the new ball element
# @ingroup l2_modif_add
def AddBall(self, IDOfNode, diameter):
return self.editor.AddBall( IDOfNode, diameter )
## Creates a linear or quadratic edge (this is determined
# by the number of given nodes).
# @param IDsOfNodes the list of node IDs for creation of the element.
# The order of nodes in this list should correspond to the description
# of MED. \n This description is located by the following link:
# http://www.code-aster.org/outils/med/html/modele_de_donnees.html#3.
# @return the Id of the new edge
# @ingroup l2_modif_add
def AddEdge(self, IDsOfNodes):
return self.editor.AddEdge(IDsOfNodes)
## Creates a linear or quadratic face (this is determined
# by the number of given nodes).
# @param IDsOfNodes the list of node IDs for creation of the element.
# The order of nodes in this list should correspond to the description
# of MED. \n This description is located by the following link:
# http://www.code-aster.org/outils/med/html/modele_de_donnees.html#3.
# @return the Id of the new face
# @ingroup l2_modif_add
def AddFace(self, IDsOfNodes):
return self.editor.AddFace(IDsOfNodes)
## Adds a polygonal face to the mesh by the list of node IDs
# @param IdsOfNodes the list of node IDs for creation of the element.
# @return the Id of the new face
# @ingroup l2_modif_add
def AddPolygonalFace(self, IdsOfNodes):
return self.editor.AddPolygonalFace(IdsOfNodes)
## Creates both simple and quadratic volume (this is determined
# by the number of given nodes).
# @param IDsOfNodes the list of node IDs for creation of the element.
# The order of nodes in this list should correspond to the description
# of MED. \n This description is located by the following link:
# http://www.code-aster.org/outils/med/html/modele_de_donnees.html#3.
# @return the Id of the new volumic element
# @ingroup l2_modif_add
def AddVolume(self, IDsOfNodes):
return self.editor.AddVolume(IDsOfNodes)
## Creates a volume of many faces, giving nodes for each face.
# @param IdsOfNodes the list of node IDs for volume creation face by face.
# @param Quantities the list of integer values, Quantities[i]
# gives the quantity of nodes in face number i.
# @return the Id of the new volumic element
# @ingroup l2_modif_add
def AddPolyhedralVolume (self, IdsOfNodes, Quantities):
return self.editor.AddPolyhedralVolume(IdsOfNodes, Quantities)
## Creates a volume of many faces, giving the IDs of the existing faces.
# @param IdsOfFaces the list of face IDs for volume creation.
#
# Note: The created volume will refer only to the nodes
# of the given faces, not to the faces themselves.
# @return the Id of the new volumic element
# @ingroup l2_modif_add
def AddPolyhedralVolumeByFaces (self, IdsOfFaces):
return self.editor.AddPolyhedralVolumeByFaces(IdsOfFaces)
## @brief Binds a node to a vertex
# @param NodeID a node ID
# @param Vertex a vertex or vertex ID
# @return True if succeed else raises an exception
# @ingroup l2_modif_add
def SetNodeOnVertex(self, NodeID, Vertex):
if ( isinstance( Vertex, geomBuilder.GEOM._objref_GEOM_Object)):
VertexID = Vertex.GetSubShapeIndices()[0]
else:
VertexID = Vertex
try:
self.editor.SetNodeOnVertex(NodeID, VertexID)
except SALOME.SALOME_Exception, inst:
raise ValueError, inst.details.text
return True
## @brief Stores the node position on an edge
# @param NodeID a node ID
# @param Edge an edge or edge ID
# @param paramOnEdge a parameter on the edge where the node is located
# @return True if succeed else raises an exception
# @ingroup l2_modif_add
def SetNodeOnEdge(self, NodeID, Edge, paramOnEdge):
if ( isinstance( Edge, geomBuilder.GEOM._objref_GEOM_Object)):
EdgeID = Edge.GetSubShapeIndices()[0]
else:
EdgeID = Edge
try:
self.editor.SetNodeOnEdge(NodeID, EdgeID, paramOnEdge)
except SALOME.SALOME_Exception, inst:
raise ValueError, inst.details.text
return True
## @brief Stores node position on a face
# @param NodeID a node ID
# @param Face a face or face ID
# @param u U parameter on the face where the node is located
# @param v V parameter on the face where the node is located
# @return True if succeed else raises an exception
# @ingroup l2_modif_add
def SetNodeOnFace(self, NodeID, Face, u, v):
if ( isinstance( Face, geomBuilder.GEOM._objref_GEOM_Object)):
FaceID = Face.GetSubShapeIndices()[0]
else:
FaceID = Face
try:
self.editor.SetNodeOnFace(NodeID, FaceID, u, v)
except SALOME.SALOME_Exception, inst:
raise ValueError, inst.details.text
return True
## @brief Binds a node to a solid
# @param NodeID a node ID
# @param Solid a solid or solid ID
# @return True if succeed else raises an exception
# @ingroup l2_modif_add
def SetNodeInVolume(self, NodeID, Solid):
if ( isinstance( Solid, geomBuilder.GEOM._objref_GEOM_Object)):
SolidID = Solid.GetSubShapeIndices()[0]
else:
SolidID = Solid
try:
self.editor.SetNodeInVolume(NodeID, SolidID)
except SALOME.SALOME_Exception, inst:
raise ValueError, inst.details.text
return True
## @brief Bind an element to a shape
# @param ElementID an element ID
# @param Shape a shape or shape ID
# @return True if succeed else raises an exception
# @ingroup l2_modif_add
def SetMeshElementOnShape(self, ElementID, Shape):
if ( isinstance( Shape, geomBuilder.GEOM._objref_GEOM_Object)):
ShapeID = Shape.GetSubShapeIndices()[0]
else:
ShapeID = Shape
try:
self.editor.SetMeshElementOnShape(ElementID, ShapeID)
except SALOME.SALOME_Exception, inst:
raise ValueError, inst.details.text
return True
## Moves the node with the given id
# @param NodeID the id of the node
# @param x a new X coordinate
# @param y a new Y coordinate
# @param z a new Z coordinate
# @return True if succeed else False
# @ingroup l2_modif_movenode
def MoveNode(self, NodeID, x, y, z):
x,y,z,Parameters,hasVars = ParseParameters(x,y,z)
if hasVars: self.mesh.SetParameters(Parameters)
return self.editor.MoveNode(NodeID, x, y, z)
## Finds the node closest to a point and moves it to a point location
# @param x the X coordinate of a point
# @param y the Y coordinate of a point
# @param z the Z coordinate of a point
# @param NodeID if specified (>0), the node with this ID is moved,
# otherwise, the node closest to point (@a x,@a y,@a z) is moved
# @return the ID of a node
# @ingroup l2_modif_throughp
def MoveClosestNodeToPoint(self, x, y, z, NodeID):
x,y,z,Parameters,hasVars = ParseParameters(x,y,z)
if hasVars: self.mesh.SetParameters(Parameters)
return self.editor.MoveClosestNodeToPoint(x, y, z, NodeID)
## Finds the node closest to a point
# @param x the X coordinate of a point
# @param y the Y coordinate of a point
# @param z the Z coordinate of a point
# @return the ID of a node
# @ingroup l2_modif_throughp
def FindNodeClosestTo(self, x, y, z):
#preview = self.mesh.GetMeshEditPreviewer()
#return preview.MoveClosestNodeToPoint(x, y, z, -1)
return self.editor.FindNodeClosestTo(x, y, z)
## Finds the elements where a point lays IN or ON
# @param x the X coordinate of a point
# @param y the Y coordinate of a point
# @param z the Z coordinate of a point
# @param elementType type of elements to find (SMESH.ALL type
# means elements of any type excluding nodes, discrete and 0D elements)
# @param meshPart a part of mesh (group, sub-mesh) to search within
# @return list of IDs of found elements
# @ingroup l2_modif_throughp
def FindElementsByPoint(self, x, y, z, elementType = SMESH.ALL, meshPart=None):
if meshPart:
return self.editor.FindAmongElementsByPoint( meshPart, x, y, z, elementType );
else:
return self.editor.FindElementsByPoint(x, y, z, elementType)
# Return point state in a closed 2D mesh in terms of TopAbs_State enumeration:
# 0-IN, 1-OUT, 2-ON, 3-UNKNOWN
# TopAbs_UNKNOWN state means that either mesh is wrong or the analysis fails.
def GetPointState(self, x, y, z):
return self.editor.GetPointState(x, y, z)
## Finds the node closest to a point and moves it to a point location
# @param x the X coordinate of a point
# @param y the Y coordinate of a point
# @param z the Z coordinate of a point
# @return the ID of a moved node
# @ingroup l2_modif_throughp
def MeshToPassThroughAPoint(self, x, y, z):
return self.editor.MoveClosestNodeToPoint(x, y, z, -1)
## Replaces two neighbour triangles sharing Node1-Node2 link
# with the triangles built on the same 4 nodes but having other common link.
# @param NodeID1 the ID of the first node
# @param NodeID2 the ID of the second node
# @return false if proper faces were not found
# @ingroup l2_modif_invdiag
def InverseDiag(self, NodeID1, NodeID2):
return self.editor.InverseDiag(NodeID1, NodeID2)
## Replaces two neighbour triangles sharing Node1-Node2 link
# with a quadrangle built on the same 4 nodes.
# @param NodeID1 the ID of the first node
# @param NodeID2 the ID of the second node
# @return false if proper faces were not found
# @ingroup l2_modif_unitetri
def DeleteDiag(self, NodeID1, NodeID2):
return self.editor.DeleteDiag(NodeID1, NodeID2)
## Reorients elements by ids
# @param IDsOfElements if undefined reorients all mesh elements
# @return True if succeed else False
# @ingroup l2_modif_changori
def Reorient(self, IDsOfElements=None):
if IDsOfElements == None:
IDsOfElements = self.GetElementsId()
return self.editor.Reorient(IDsOfElements)
## Reorients all elements of the object
# @param theObject mesh, submesh or group
# @return True if succeed else False
# @ingroup l2_modif_changori
def ReorientObject(self, theObject):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
return self.editor.ReorientObject(theObject)
## Reorient faces contained in \a the2DObject.
# @param the2DObject is a mesh, sub-mesh, group or list of IDs of 2D elements
# @param theDirection is a desired direction of normal of \a theFace.
# It can be either a GEOM vector or a list of coordinates [x,y,z].
# @param theFaceOrPoint defines a face of \a the2DObject whose normal will be
# compared with theDirection. It can be either ID of face or a point
# by which the face will be found. The point can be given as either
# a GEOM vertex or a list of point coordinates.
# @return number of reoriented faces
# @ingroup l2_modif_changori
def Reorient2D(self, the2DObject, theDirection, theFaceOrPoint ):
unRegister = genObjUnRegister()
# check the2DObject
if isinstance( the2DObject, Mesh ):
the2DObject = the2DObject.GetMesh()
if isinstance( the2DObject, list ):
the2DObject = self.GetIDSource( the2DObject, SMESH.FACE )
unRegister.set( the2DObject )
# check theDirection
if isinstance( theDirection, geomBuilder.GEOM._objref_GEOM_Object):
theDirection = self.smeshpyD.GetDirStruct( theDirection )
if isinstance( theDirection, list ):
theDirection = self.smeshpyD.MakeDirStruct( *theDirection )
# prepare theFace and thePoint
theFace = theFaceOrPoint
thePoint = PointStruct(0,0,0)
if isinstance( theFaceOrPoint, geomBuilder.GEOM._objref_GEOM_Object):
thePoint = self.smeshpyD.GetPointStruct( theFaceOrPoint )
theFace = -1
if isinstance( theFaceOrPoint, list ):
thePoint = PointStruct( *theFaceOrPoint )
theFace = -1
if isinstance( theFaceOrPoint, PointStruct ):
thePoint = theFaceOrPoint
theFace = -1
return self.editor.Reorient2D( the2DObject, theDirection, theFace, thePoint )
## Reorient faces according to adjacent volumes.
# @param the2DObject is a mesh, sub-mesh, group or list of
# either IDs of faces or face groups.
# @param the3DObject is a mesh, sub-mesh, group or list of IDs of volumes.
# @param theOutsideNormal to orient faces to have their normals
# pointing either \a outside or \a inside the adjacent volumes.
# @return number of reoriented faces.
# @ingroup l2_modif_changori
def Reorient2DBy3D(self, the2DObject, the3DObject, theOutsideNormal=True ):
unRegister = genObjUnRegister()
# check the2DObject
if not isinstance( the2DObject, list ):
the2DObject = [ the2DObject ]
elif the2DObject and isinstance( the2DObject[0], int ):
the2DObject = self.GetIDSource( the2DObject, SMESH.FACE )
unRegister.set( the2DObject )
the2DObject = [ the2DObject ]
for i,obj2D in enumerate( the2DObject ):
if isinstance( obj2D, Mesh ):
the2DObject[i] = obj2D.GetMesh()
if isinstance( obj2D, list ):
the2DObject[i] = self.GetIDSource( obj2D, SMESH.FACE )
unRegister.set( the2DObject[i] )
# check the3DObject
if isinstance( the3DObject, Mesh ):
the3DObject = the3DObject.GetMesh()
if isinstance( the3DObject, list ):
the3DObject = self.GetIDSource( the3DObject, SMESH.VOLUME )
unRegister.set( the3DObject )
return self.editor.Reorient2DBy3D( the2DObject, the3DObject, theOutsideNormal )
## Fuses the neighbouring triangles into quadrangles.
# @param IDsOfElements The triangles to be fused,
# @param theCriterion is a numerical functor, in terms of enum SMESH.FunctorType, used to
# choose a neighbour to fuse with.
# @param MaxAngle is the maximum angle between element normals at which the fusion
# is still performed; theMaxAngle is mesured in radians.
# Also it could be a name of variable which defines angle in degrees.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_unitetri
def TriToQuad(self, IDsOfElements, theCriterion, MaxAngle):
MaxAngle,Parameters,hasVars = ParseAngles(MaxAngle)
self.mesh.SetParameters(Parameters)
if not IDsOfElements:
IDsOfElements = self.GetElementsId()
Functor = self.smeshpyD.GetFunctor(theCriterion)
return self.editor.TriToQuad(IDsOfElements, Functor, MaxAngle)
## Fuses the neighbouring triangles of the object into quadrangles
# @param theObject is mesh, submesh or group
# @param theCriterion is a numerical functor, in terms of enum SMESH.FunctorType, used to
# choose a neighbour to fuse with.
# @param MaxAngle a max angle between element normals at which the fusion
# is still performed; theMaxAngle is mesured in radians.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_unitetri
def TriToQuadObject (self, theObject, theCriterion, MaxAngle):
MaxAngle,Parameters,hasVars = ParseAngles(MaxAngle)
self.mesh.SetParameters(Parameters)
if isinstance( theObject, Mesh ):
theObject = theObject.GetMesh()
Functor = self.smeshpyD.GetFunctor(theCriterion)
return self.editor.TriToQuadObject(theObject, Functor, MaxAngle)
## Splits quadrangles into triangles.
# @param IDsOfElements the faces to be splitted.
# @param theCriterion is a numerical functor, in terms of enum SMESH.FunctorType, used to
# choose a diagonal for splitting. If @a theCriterion is None, which is a default
# value, then quadrangles will be split by the smallest diagonal.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_cutquadr
def QuadToTri (self, IDsOfElements, theCriterion = None):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if theCriterion is None:
theCriterion = FT_MaxElementLength2D
Functor = self.smeshpyD.GetFunctor(theCriterion)
return self.editor.QuadToTri(IDsOfElements, Functor)
## Splits quadrangles into triangles.
# @param theObject the object from which the list of elements is taken,
# this is mesh, submesh or group
# @param theCriterion is a numerical functor, in terms of enum SMESH.FunctorType, used to
# choose a diagonal for splitting. If @a theCriterion is None, which is a default
# value, then quadrangles will be split by the smallest diagonal.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_cutquadr
def QuadToTriObject (self, theObject, theCriterion = None):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if theCriterion is None:
theCriterion = FT_MaxElementLength2D
Functor = self.smeshpyD.GetFunctor(theCriterion)
return self.editor.QuadToTriObject(theObject, Functor)
## Splits each of given quadrangles into 4 triangles. A node is added at the center of
# a quadrangle.
# @param theElements the faces to be splitted. This can be either mesh, sub-mesh,
# group or a list of face IDs. By default all quadrangles are split
# @ingroup l2_modif_cutquadr
def QuadTo4Tri (self, theElements=[]):
unRegister = genObjUnRegister()
if isinstance( theElements, Mesh ):
theElements = theElements.mesh
elif not theElements:
theElements = self.mesh
elif isinstance( theElements, list ):
theElements = self.GetIDSource( theElements, SMESH.FACE )
unRegister.set( theElements )
return self.editor.QuadTo4Tri( theElements )
## Splits quadrangles into triangles.
# @param IDsOfElements the faces to be splitted
# @param Diag13 is used to choose a diagonal for splitting.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_cutquadr
def SplitQuad (self, IDsOfElements, Diag13):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
return self.editor.SplitQuad(IDsOfElements, Diag13)
## Splits quadrangles into triangles.
# @param theObject the object from which the list of elements is taken,
# this is mesh, submesh or group
# @param Diag13 is used to choose a diagonal for splitting.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_cutquadr
def SplitQuadObject (self, theObject, Diag13):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
return self.editor.SplitQuadObject(theObject, Diag13)
## Finds a better splitting of the given quadrangle.
# @param IDOfQuad the ID of the quadrangle to be splitted.
# @param theCriterion is a numerical functor, in terms of enum SMESH.FunctorType, used to
# choose a diagonal for splitting.
# @return 1 if 1-3 diagonal is better, 2 if 2-4
# diagonal is better, 0 if error occurs.
# @ingroup l2_modif_cutquadr
def BestSplit (self, IDOfQuad, theCriterion):
return self.editor.BestSplit(IDOfQuad, self.smeshpyD.GetFunctor(theCriterion))
## Splits volumic elements into tetrahedrons
# @param elems either a list of elements or a mesh or a group or a submesh or a filter
# @param method flags passing splitting method:
# smesh.Hex_5Tet, smesh.Hex_6Tet, smesh.Hex_24Tet.
# smesh.Hex_5Tet - to split the hexahedron into 5 tetrahedrons, etc.
# @ingroup l2_modif_cutquadr
def SplitVolumesIntoTetra(self, elems, method=smeshBuilder.Hex_5Tet ):
unRegister = genObjUnRegister()
if isinstance( elems, Mesh ):
elems = elems.GetMesh()
if ( isinstance( elems, list )):
elems = self.editor.MakeIDSource(elems, SMESH.VOLUME)
unRegister.set( elems )
self.editor.SplitVolumesIntoTetra(elems, method)
## Splits hexahedra into prisms
# @param elems either a list of elements or a mesh or a group or a submesh or a filter
# @param startHexPoint a point used to find a hexahedron for which @a facetNormal
# gives a normal vector defining facets to split into triangles.
# @a startHexPoint can be either a triple of coordinates or a vertex.
# @param facetNormal a normal to a facet to split into triangles of a
# hexahedron found by @a startHexPoint.
# @a facetNormal can be either a triple of coordinates or an edge.
# @param method flags passing splitting method: smesh.Hex_2Prisms, smesh.Hex_4Prisms.
# smesh.Hex_2Prisms - to split the hexahedron into 2 prisms, etc.
# @param allDomains if @c False, only hexahedra adjacent to one closest
# to @a startHexPoint are split, else @a startHexPoint
# is used to find the facet to split in all domains present in @a elems.
# @ingroup l2_modif_cutquadr
def SplitHexahedraIntoPrisms(self, elems, startHexPoint, facetNormal,
method=smeshBuilder.Hex_2Prisms, allDomains=False ):
# IDSource
unRegister = genObjUnRegister()
if isinstance( elems, Mesh ):
elems = elems.GetMesh()
if ( isinstance( elems, list )):
elems = self.editor.MakeIDSource(elems, SMESH.VOLUME)
unRegister.set( elems )
pass
# axis
if isinstance( startHexPoint, geomBuilder.GEOM._objref_GEOM_Object):
startHexPoint = self.smeshpyD.GetPointStruct( startHexPoint )
elif isinstance( startHexPoint, list ):
startHexPoint = SMESH.PointStruct( startHexPoint[0],
startHexPoint[1],
startHexPoint[2])
if isinstance( facetNormal, geomBuilder.GEOM._objref_GEOM_Object):
facetNormal = self.smeshpyD.GetDirStruct( facetNormal )
elif isinstance( facetNormal, list ):
facetNormal = self.smeshpyD.MakeDirStruct( facetNormal[0],
facetNormal[1],
facetNormal[2])
self.mesh.SetParameters( startHexPoint.parameters + facetNormal.PS.parameters )
self.editor.SplitHexahedraIntoPrisms(elems, startHexPoint, facetNormal, method, allDomains)
## Splits quadrangle faces near triangular facets of volumes
#
# @ingroup l1_auxiliary
def SplitQuadsNearTriangularFacets(self):
faces_array = self.GetElementsByType(SMESH.FACE)
for face_id in faces_array:
if self.GetElemNbNodes(face_id) == 4: # quadrangle
quad_nodes = self.mesh.GetElemNodes(face_id)
node1_elems = self.GetNodeInverseElements(quad_nodes[1 -1])
isVolumeFound = False
for node1_elem in node1_elems:
if not isVolumeFound:
if self.GetElementType(node1_elem, True) == SMESH.VOLUME:
nb_nodes = self.GetElemNbNodes(node1_elem)
if 3 < nb_nodes and nb_nodes < 7: # tetra or penta, or prism
volume_elem = node1_elem
volume_nodes = self.mesh.GetElemNodes(volume_elem)
if volume_nodes.count(quad_nodes[2 -1]) > 0: # 1,2
if volume_nodes.count(quad_nodes[4 -1]) > 0: # 1,2,4
isVolumeFound = True
if volume_nodes.count(quad_nodes[3 -1]) == 0: # 1,2,4 & !3
self.SplitQuad([face_id], False) # diagonal 2-4
elif volume_nodes.count(quad_nodes[3 -1]) > 0: # 1,2,3 & !4
isVolumeFound = True
self.SplitQuad([face_id], True) # diagonal 1-3
elif volume_nodes.count(quad_nodes[4 -1]) > 0: # 1,4 & !2
if volume_nodes.count(quad_nodes[3 -1]) > 0: # 1,4,3 & !2
isVolumeFound = True
self.SplitQuad([face_id], True) # diagonal 1-3
## @brief Splits hexahedrons into tetrahedrons.
#
# This operation uses pattern mapping functionality for splitting.
# @param theObject the object from which the list of hexahedrons is taken; this is mesh, submesh or group.
# @param theNode000,theNode001 within the range [0,7]; gives the orientation of the
# pattern relatively each hexahedron: the (0,0,0) key-point of the pattern
# will be mapped into <VAR>theNode000</VAR>-th node of each volume, the (0,0,1)
# key-point will be mapped into <VAR>theNode001</VAR>-th node of each volume.
# The (0,0,0) key-point of the used pattern corresponds to a non-split corner.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l1_auxiliary
def SplitHexaToTetras (self, theObject, theNode000, theNode001):
# Pattern: 5.---------.6
# /|#* /|
# / | #* / |
# / | # * / |
# / | # /* |
# (0,0,1) 4.---------.7 * |
# |#* |1 | # *|
# | # *.----|---#.2
# | #/ * | /
# | /# * | /
# | / # * | /
# |/ #*|/
# (0,0,0) 0.---------.3
pattern_tetra = "!!! Nb of points: \n 8 \n\
!!! Points: \n\
0 0 0 !- 0 \n\
0 1 0 !- 1 \n\
1 1 0 !- 2 \n\
1 0 0 !- 3 \n\
0 0 1 !- 4 \n\
0 1 1 !- 5 \n\
1 1 1 !- 6 \n\
1 0 1 !- 7 \n\
!!! Indices of points of 6 tetras: \n\
0 3 4 1 \n\
7 4 3 1 \n\
4 7 5 1 \n\
6 2 5 7 \n\
1 5 2 7 \n\
2 3 1 7 \n"
pattern = self.smeshpyD.GetPattern()
isDone = pattern.LoadFromFile(pattern_tetra)
if not isDone:
print 'Pattern.LoadFromFile :', pattern.GetErrorCode()
return isDone
pattern.ApplyToHexahedrons(self.mesh, theObject.GetIDs(), theNode000, theNode001)
isDone = pattern.MakeMesh(self.mesh, False, False)
if not isDone: print 'Pattern.MakeMesh :', pattern.GetErrorCode()
# split quafrangle faces near triangular facets of volumes
self.SplitQuadsNearTriangularFacets()
return isDone
## @brief Split hexahedrons into prisms.
#
# Uses the pattern mapping functionality for splitting.
# @param theObject the object (mesh, submesh or group) from where the list of hexahedrons is taken;
# @param theNode000,theNode001 (within the range [0,7]) gives the orientation of the
# pattern relatively each hexahedron: keypoint (0,0,0) of the pattern
# will be mapped into the <VAR>theNode000</VAR>-th node of each volume, keypoint (0,0,1)
# will be mapped into the <VAR>theNode001</VAR>-th node of each volume.
# Edge (0,0,0)-(0,0,1) of used pattern connects two not split corners.
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l1_auxiliary
def SplitHexaToPrisms (self, theObject, theNode000, theNode001):
# Pattern: 5.---------.6
# /|# /|
# / | # / |
# / | # / |
# / | # / |
# (0,0,1) 4.---------.7 |
# | | | |
# | 1.----|----.2
# | / * | /
# | / * | /
# | / * | /
# |/ *|/
# (0,0,0) 0.---------.3
pattern_prism = "!!! Nb of points: \n 8 \n\
!!! Points: \n\
0 0 0 !- 0 \n\
0 1 0 !- 1 \n\
1 1 0 !- 2 \n\
1 0 0 !- 3 \n\
0 0 1 !- 4 \n\
0 1 1 !- 5 \n\
1 1 1 !- 6 \n\
1 0 1 !- 7 \n\
!!! Indices of points of 2 prisms: \n\
0 1 3 4 5 7 \n\
2 3 1 6 7 5 \n"
pattern = self.smeshpyD.GetPattern()
isDone = pattern.LoadFromFile(pattern_prism)
if not isDone:
print 'Pattern.LoadFromFile :', pattern.GetErrorCode()
return isDone
pattern.ApplyToHexahedrons(self.mesh, theObject.GetIDs(), theNode000, theNode001)
isDone = pattern.MakeMesh(self.mesh, False, False)
if not isDone: print 'Pattern.MakeMesh :', pattern.GetErrorCode()
# Splits quafrangle faces near triangular facets of volumes
self.SplitQuadsNearTriangularFacets()
return isDone
## Smoothes elements
# @param IDsOfElements the list if ids of elements to smooth
# @param IDsOfFixedNodes the list of ids of fixed nodes.
# Note that nodes built on edges and boundary nodes are always fixed.
# @param MaxNbOfIterations the maximum number of iterations
# @param MaxAspectRatio varies in range [1.0, inf]
# @param Method is either Laplacian (smesh.LAPLACIAN_SMOOTH)
# or Centroidal (smesh.CENTROIDAL_SMOOTH)
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_smooth
def Smooth(self, IDsOfElements, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
MaxNbOfIterations,MaxAspectRatio,Parameters,hasVars = ParseParameters(MaxNbOfIterations,MaxAspectRatio)
self.mesh.SetParameters(Parameters)
return self.editor.Smooth(IDsOfElements, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method)
## Smoothes elements which belong to the given object
# @param theObject the object to smooth
# @param IDsOfFixedNodes the list of ids of fixed nodes.
# Note that nodes built on edges and boundary nodes are always fixed.
# @param MaxNbOfIterations the maximum number of iterations
# @param MaxAspectRatio varies in range [1.0, inf]
# @param Method is either Laplacian (smesh.LAPLACIAN_SMOOTH)
# or Centroidal (smesh.CENTROIDAL_SMOOTH)
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_smooth
def SmoothObject(self, theObject, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
return self.editor.SmoothObject(theObject, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method)
## Parametrically smoothes the given elements
# @param IDsOfElements the list if ids of elements to smooth
# @param IDsOfFixedNodes the list of ids of fixed nodes.
# Note that nodes built on edges and boundary nodes are always fixed.
# @param MaxNbOfIterations the maximum number of iterations
# @param MaxAspectRatio varies in range [1.0, inf]
# @param Method is either Laplacian (smesh.LAPLACIAN_SMOOTH)
# or Centroidal (smesh.CENTROIDAL_SMOOTH)
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_smooth
def SmoothParametric(self, IDsOfElements, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
MaxNbOfIterations,MaxAspectRatio,Parameters,hasVars = ParseParameters(MaxNbOfIterations,MaxAspectRatio)
self.mesh.SetParameters(Parameters)
return self.editor.SmoothParametric(IDsOfElements, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method)
## Parametrically smoothes the elements which belong to the given object
# @param theObject the object to smooth
# @param IDsOfFixedNodes the list of ids of fixed nodes.
# Note that nodes built on edges and boundary nodes are always fixed.
# @param MaxNbOfIterations the maximum number of iterations
# @param MaxAspectRatio varies in range [1.0, inf]
# @param Method is either Laplacian (smesh.LAPLACIAN_SMOOTH)
# or Centroidal (smesh.CENTROIDAL_SMOOTH)
# @return TRUE in case of success, FALSE otherwise.
# @ingroup l2_modif_smooth
def SmoothParametricObject(self, theObject, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
return self.editor.SmoothParametricObject(theObject, IDsOfFixedNodes,
MaxNbOfIterations, MaxAspectRatio, Method)
## Converts the mesh to quadratic or bi-quadratic, deletes old elements, replacing
# them with quadratic with the same id.
# @param theForce3d new node creation method:
# 0 - the medium node lies at the geometrical entity from which the mesh element is built
# 1 - the medium node lies at the middle of the line segments connecting start and end node of a mesh element
# @param theSubMesh a group or a sub-mesh to convert; WARNING: in this case the mesh can become not conformal
# @param theToBiQuad If True, converts the mesh to bi-quadratic
# @ingroup l2_modif_tofromqu
def ConvertToQuadratic(self, theForce3d, theSubMesh=None, theToBiQuad=False):
if isinstance( theSubMesh, Mesh ):
theSubMesh = theSubMesh.mesh
if theToBiQuad:
self.editor.ConvertToBiQuadratic(theForce3d,theSubMesh)
else:
if theSubMesh:
self.editor.ConvertToQuadraticObject(theForce3d,theSubMesh)
else:
self.editor.ConvertToQuadratic(theForce3d)
error = self.editor.GetLastError()
if error and error.comment:
print error.comment
## Converts the mesh from quadratic to ordinary,
# deletes old quadratic elements, \n replacing
# them with ordinary mesh elements with the same id.
# @param theSubMesh a group or a sub-mesh to convert; WARNING: in this case the mesh can become not conformal
# @ingroup l2_modif_tofromqu
def ConvertFromQuadratic(self, theSubMesh=None):
if theSubMesh:
self.editor.ConvertFromQuadraticObject(theSubMesh)
else:
return self.editor.ConvertFromQuadratic()
## Creates 2D mesh as skin on boundary faces of a 3D mesh
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def Make2DMeshFrom3D(self):
return self.editor. Make2DMeshFrom3D()
## Creates missing boundary elements
# @param elements - elements whose boundary is to be checked:
# mesh, group, sub-mesh or list of elements
# if elements is mesh, it must be the mesh whose MakeBoundaryMesh() is called
# @param dimension - defines type of boundary elements to create:
# SMESH.BND_2DFROM3D, SMESH.BND_1DFROM3D, SMESH.BND_1DFROM2D
# SMESH.BND_1DFROM3D creates mesh edges on all borders of free facets of 3D cells
# @param groupName - a name of group to store created boundary elements in,
# "" means not to create the group
# @param meshName - a name of new mesh to store created boundary elements in,
# "" means not to create the new mesh
# @param toCopyElements - if true, the checked elements will be copied into
# the new mesh else only boundary elements will be copied into the new mesh
# @param toCopyExistingBondary - if true, not only new but also pre-existing
# boundary elements will be copied into the new mesh
# @return tuple (mesh, group) where boundary elements were added to
# @ingroup l2_modif_edit
def MakeBoundaryMesh(self, elements, dimension=SMESH.BND_2DFROM3D, groupName="", meshName="",
toCopyElements=False, toCopyExistingBondary=False):
unRegister = genObjUnRegister()
if isinstance( elements, Mesh ):
elements = elements.GetMesh()
if ( isinstance( elements, list )):
elemType = SMESH.ALL
if elements: elemType = self.GetElementType( elements[0], iselem=True)
elements = self.editor.MakeIDSource(elements, elemType)
unRegister.set( elements )
mesh, group = self.editor.MakeBoundaryMesh(elements,dimension,groupName,meshName,
toCopyElements,toCopyExistingBondary)
if mesh: mesh = self.smeshpyD.Mesh(mesh)
return mesh, group
##
# @brief Creates missing boundary elements around either the whole mesh or
# groups of elements
# @param dimension - defines type of boundary elements to create
# @param groupName - a name of group to store all boundary elements in,
# "" means not to create the group
# @param meshName - a name of a new mesh, which is a copy of the initial
# mesh + created boundary elements; "" means not to create the new mesh
# @param toCopyAll - if true, the whole initial mesh will be copied into
# the new mesh else only boundary elements will be copied into the new mesh
# @param groups - groups of elements to make boundary around
# @retval tuple( long, mesh, groups )
# long - number of added boundary elements
# mesh - the mesh where elements were added to
# group - the group of boundary elements or None
#
def MakeBoundaryElements(self, dimension=SMESH.BND_2DFROM3D, groupName="", meshName="",
toCopyAll=False, groups=[]):
nb, mesh, group = self.editor.MakeBoundaryElements(dimension,groupName,meshName,
toCopyAll,groups)
if mesh: mesh = self.smeshpyD.Mesh(mesh)
return nb, mesh, group
## Renumber mesh nodes (Obsolete, does nothing)
# @ingroup l2_modif_renumber
def RenumberNodes(self):
self.editor.RenumberNodes()
## Renumber mesh elements (Obsole, does nothing)
# @ingroup l2_modif_renumber
def RenumberElements(self):
self.editor.RenumberElements()
## Generates new elements by rotation of the elements around the axis
# @param IDsOfElements the list of ids of elements to sweep
# @param Axis the axis of rotation, AxisStruct or line(geom object)
# @param AngleInRadians the angle of Rotation (in radians) or a name of variable which defines angle in degrees
# @param NbOfSteps the number of steps
# @param Tolerance tolerance
# @param MakeGroups forces the generation of new groups from existing ones
# @param TotalAngle gives meaning of AngleInRadians: if True then it is an angular size
# of all steps, else - size of each step
# @return the list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def RotationSweep(self, IDsOfElements, Axis, AngleInRadians, NbOfSteps, Tolerance,
MakeGroups=False, TotalAngle=False):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,AngleParameters,hasVars = ParseAngles(AngleInRadians)
NbOfSteps,Tolerance,Parameters,hasVars = ParseParameters(NbOfSteps,Tolerance)
Parameters = Axis.parameters + var_separator + AngleParameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if TotalAngle and NbOfSteps:
AngleInRadians /= NbOfSteps
if MakeGroups:
return self.editor.RotationSweepMakeGroups(IDsOfElements, Axis,
AngleInRadians, NbOfSteps, Tolerance)
self.editor.RotationSweep(IDsOfElements, Axis, AngleInRadians, NbOfSteps, Tolerance)
return []
## Generates new elements by rotation of the elements of object around the axis
# @param theObject object which elements should be sweeped.
# It can be a mesh, a sub mesh or a group.
# @param Axis the axis of rotation, AxisStruct or line(geom object)
# @param AngleInRadians the angle of Rotation
# @param NbOfSteps number of steps
# @param Tolerance tolerance
# @param MakeGroups forces the generation of new groups from existing ones
# @param TotalAngle gives meaning of AngleInRadians: if True then it is an angular size
# of all steps, else - size of each step
# @return the list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def RotationSweepObject(self, theObject, Axis, AngleInRadians, NbOfSteps, Tolerance,
MakeGroups=False, TotalAngle=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,AngleParameters,hasVars = ParseAngles(AngleInRadians)
NbOfSteps,Tolerance,Parameters,hasVars = ParseParameters(NbOfSteps,Tolerance)
Parameters = Axis.parameters + var_separator + AngleParameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if TotalAngle and NbOfSteps:
AngleInRadians /= NbOfSteps
if MakeGroups:
return self.editor.RotationSweepObjectMakeGroups(theObject, Axis, AngleInRadians,
NbOfSteps, Tolerance)
self.editor.RotationSweepObject(theObject, Axis, AngleInRadians, NbOfSteps, Tolerance)
return []
## Generates new elements by rotation of the elements of object around the axis
# @param theObject object which elements should be sweeped.
# It can be a mesh, a sub mesh or a group.
# @param Axis the axis of rotation, AxisStruct or line(geom object)
# @param AngleInRadians the angle of Rotation
# @param NbOfSteps number of steps
# @param Tolerance tolerance
# @param MakeGroups forces the generation of new groups from existing ones
# @param TotalAngle gives meaning of AngleInRadians: if True then it is an angular size
# of all steps, else - size of each step
# @return the list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def RotationSweepObject1D(self, theObject, Axis, AngleInRadians, NbOfSteps, Tolerance,
MakeGroups=False, TotalAngle=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,AngleParameters,hasVars = ParseAngles(AngleInRadians)
NbOfSteps,Tolerance,Parameters,hasVars = ParseParameters(NbOfSteps,Tolerance)
Parameters = Axis.parameters + var_separator + AngleParameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if TotalAngle and NbOfSteps:
AngleInRadians /= NbOfSteps
if MakeGroups:
return self.editor.RotationSweepObject1DMakeGroups(theObject, Axis, AngleInRadians,
NbOfSteps, Tolerance)
self.editor.RotationSweepObject1D(theObject, Axis, AngleInRadians, NbOfSteps, Tolerance)
return []
## Generates new elements by rotation of the elements of object around the axis
# @param theObject object which elements should be sweeped.
# It can be a mesh, a sub mesh or a group.
# @param Axis the axis of rotation, AxisStruct or line(geom object)
# @param AngleInRadians the angle of Rotation
# @param NbOfSteps number of steps
# @param Tolerance tolerance
# @param MakeGroups forces the generation of new groups from existing ones
# @param TotalAngle gives meaning of AngleInRadians: if True then it is an angular size
# of all steps, else - size of each step
# @return the list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def RotationSweepObject2D(self, theObject, Axis, AngleInRadians, NbOfSteps, Tolerance,
MakeGroups=False, TotalAngle=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,AngleParameters,hasVars = ParseAngles(AngleInRadians)
NbOfSteps,Tolerance,Parameters,hasVars = ParseParameters(NbOfSteps,Tolerance)
Parameters = Axis.parameters + var_separator + AngleParameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if TotalAngle and NbOfSteps:
AngleInRadians /= NbOfSteps
if MakeGroups:
return self.editor.RotationSweepObject2DMakeGroups(theObject, Axis, AngleInRadians,
NbOfSteps, Tolerance)
self.editor.RotationSweepObject2D(theObject, Axis, AngleInRadians, NbOfSteps, Tolerance)
return []
## Generates new elements by extrusion of the elements with given ids
# @param IDsOfElements the list of elements ids for extrusion
# @param StepVector vector or DirStruct or 3 vector components, defining
# the direction and value of extrusion for one step (the total extrusion
# length will be NbOfSteps * ||StepVector||)
# @param NbOfSteps the number of steps
# @param MakeGroups forces the generation of new groups from existing ones
# @param IsNodes is True if elements with given ids are nodes
# @return the list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def ExtrusionSweep(self, IDsOfElements, StepVector, NbOfSteps, MakeGroups=False, IsNodes = False):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if isinstance( StepVector, geomBuilder.GEOM._objref_GEOM_Object):
StepVector = self.smeshpyD.GetDirStruct(StepVector)
if isinstance( StepVector, list ):
StepVector = self.smeshpyD.MakeDirStruct(*StepVector)
NbOfSteps,Parameters,hasVars = ParseParameters(NbOfSteps)
Parameters = StepVector.PS.parameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if MakeGroups:
if(IsNodes):
return self.editor.ExtrusionSweepMakeGroups0D(IDsOfElements, StepVector, NbOfSteps)
else:
return self.editor.ExtrusionSweepMakeGroups(IDsOfElements, StepVector, NbOfSteps)
if(IsNodes):
self.editor.ExtrusionSweep0D(IDsOfElements, StepVector, NbOfSteps)
else:
self.editor.ExtrusionSweep(IDsOfElements, StepVector, NbOfSteps)
return []
## Generates new elements by extrusion of the elements with given ids
# @param IDsOfElements is ids of elements
# @param StepVector vector or DirStruct or 3 vector components, defining
# the direction and value of extrusion for one step (the total extrusion
# length will be NbOfSteps * ||StepVector||)
# @param NbOfSteps the number of steps
# @param ExtrFlags sets flags for extrusion
# @param SewTolerance uses for comparing locations of nodes if flag
# EXTRUSION_FLAG_SEW is set
# @param MakeGroups forces the generation of new groups from existing ones
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def AdvancedExtrusion(self, IDsOfElements, StepVector, NbOfSteps,
ExtrFlags, SewTolerance, MakeGroups=False):
if ( isinstance( StepVector, geomBuilder.GEOM._objref_GEOM_Object)):
StepVector = self.smeshpyD.GetDirStruct(StepVector)
if isinstance( StepVector, list ):
StepVector = self.smeshpyD.MakeDirStruct(*StepVector)
if MakeGroups:
return self.editor.AdvancedExtrusionMakeGroups(IDsOfElements, StepVector, NbOfSteps,
ExtrFlags, SewTolerance)
self.editor.AdvancedExtrusion(IDsOfElements, StepVector, NbOfSteps,
ExtrFlags, SewTolerance)
return []
## Generates new elements by extrusion of the elements which belong to the object
# @param theObject the object which elements should be processed.
# It can be a mesh, a sub mesh or a group.
# @param StepVector vector or DirStruct or 3 vector components, defining
# the direction and value of extrusion for one step (the total extrusion
# length will be NbOfSteps * ||StepVector||)
# @param NbOfSteps the number of steps
# @param MakeGroups forces the generation of new groups from existing ones
# @param IsNodes is True if elements which belong to the object are nodes
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def ExtrusionSweepObject(self, theObject, StepVector, NbOfSteps, MakeGroups=False, IsNodes=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( StepVector, geomBuilder.GEOM._objref_GEOM_Object)):
StepVector = self.smeshpyD.GetDirStruct(StepVector)
if isinstance( StepVector, list ):
StepVector = self.smeshpyD.MakeDirStruct(*StepVector)
NbOfSteps,Parameters,hasVars = ParseParameters(NbOfSteps)
Parameters = StepVector.PS.parameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if MakeGroups:
if(IsNodes):
return self.editor.ExtrusionSweepObject0DMakeGroups(theObject, StepVector, NbOfSteps)
else:
return self.editor.ExtrusionSweepObjectMakeGroups(theObject, StepVector, NbOfSteps)
if(IsNodes):
self.editor.ExtrusionSweepObject0D(theObject, StepVector, NbOfSteps)
else:
self.editor.ExtrusionSweepObject(theObject, StepVector, NbOfSteps)
return []
## Generates new elements by extrusion of the elements which belong to the object
# @param theObject object which elements should be processed.
# It can be a mesh, a sub mesh or a group.
# @param StepVector vector or DirStruct or 3 vector components, defining
# the direction and value of extrusion for one step (the total extrusion
# length will be NbOfSteps * ||StepVector||)
# @param NbOfSteps the number of steps
# @param MakeGroups to generate new groups from existing ones
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def ExtrusionSweepObject1D(self, theObject, StepVector, NbOfSteps, MakeGroups=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( StepVector, geomBuilder.GEOM._objref_GEOM_Object)):
StepVector = self.smeshpyD.GetDirStruct(StepVector)
if isinstance( StepVector, list ):
StepVector = self.smeshpyD.MakeDirStruct(*StepVector)
NbOfSteps,Parameters,hasVars = ParseParameters(NbOfSteps)
Parameters = StepVector.PS.parameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if MakeGroups:
return self.editor.ExtrusionSweepObject1DMakeGroups(theObject, StepVector, NbOfSteps)
self.editor.ExtrusionSweepObject1D(theObject, StepVector, NbOfSteps)
return []
## Generates new elements by extrusion of the elements which belong to the object
# @param theObject object which elements should be processed.
# It can be a mesh, a sub mesh or a group.
# @param StepVector vector or DirStruct or 3 vector components, defining
# the direction and value of extrusion for one step (the total extrusion
# length will be NbOfSteps * ||StepVector||)
# @param NbOfSteps the number of steps
# @param MakeGroups forces the generation of new groups from existing ones
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_extrurev
def ExtrusionSweepObject2D(self, theObject, StepVector, NbOfSteps, MakeGroups=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( StepVector, geomBuilder.GEOM._objref_GEOM_Object)):
StepVector = self.smeshpyD.GetDirStruct(StepVector)
if isinstance( StepVector, list ):
StepVector = self.smeshpyD.MakeDirStruct(*StepVector)
NbOfSteps,Parameters,hasVars = ParseParameters(NbOfSteps)
Parameters = StepVector.PS.parameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if MakeGroups:
return self.editor.ExtrusionSweepObject2DMakeGroups(theObject, StepVector, NbOfSteps)
self.editor.ExtrusionSweepObject2D(theObject, StepVector, NbOfSteps)
return []
## Generates new elements by extrusion of the given elements
# The path of extrusion must be a meshed edge.
# @param Base mesh or group, or submesh, or list of ids of elements for extrusion
# @param Path - 1D mesh or 1D sub-mesh, along which proceeds the extrusion
# @param NodeStart the start node from Path. Defines the direction of extrusion
# @param HasAngles allows the shape to be rotated around the path
# to get the resulting mesh in a helical fashion
# @param Angles list of angles in radians
# @param LinearVariation forces the computation of rotation angles as linear
# variation of the given Angles along path steps
# @param HasRefPoint allows using the reference point
# @param RefPoint the point around which the elements are rotated (the mass
# center of the elements by default).
# The User can specify any point as the Reference Point.
# RefPoint can be either GEOM Vertex, [x,y,z] or SMESH.PointStruct
# @param MakeGroups forces the generation of new groups from existing ones
# @param ElemType type of elements for extrusion (if param Base is a mesh)
# @return list of created groups (SMESH_GroupBase) and SMESH::Extrusion_Error if MakeGroups=True,
# only SMESH::Extrusion_Error otherwise
# @ingroup l2_modif_extrurev
def ExtrusionAlongPathX(self, Base, Path, NodeStart,
HasAngles, Angles, LinearVariation,
HasRefPoint, RefPoint, MakeGroups, ElemType):
if isinstance( RefPoint, geomBuilder.GEOM._objref_GEOM_Object):
RefPoint = self.smeshpyD.GetPointStruct(RefPoint)
pass
elif isinstance( RefPoint, list ):
RefPoint = PointStruct(*RefPoint)
pass
Angles,AnglesParameters,hasVars = ParseAngles(Angles)
Parameters = AnglesParameters + var_separator + RefPoint.parameters
self.mesh.SetParameters(Parameters)
if (isinstance(Path, Mesh)): Path = Path.GetMesh()
if isinstance(Base, list):
IDsOfElements = []
if Base == []: IDsOfElements = self.GetElementsId()
else: IDsOfElements = Base
return self.editor.ExtrusionAlongPathX(IDsOfElements, Path, NodeStart,
HasAngles, Angles, LinearVariation,
HasRefPoint, RefPoint, MakeGroups, ElemType)
else:
if isinstance(Base, Mesh): Base = Base.GetMesh()
if isinstance(Base, SMESH._objref_SMESH_Mesh) or isinstance(Base, SMESH._objref_SMESH_Group) or isinstance(Base, SMESH._objref_SMESH_subMesh):
return self.editor.ExtrusionAlongPathObjX(Base, Path, NodeStart,
HasAngles, Angles, LinearVariation,
HasRefPoint, RefPoint, MakeGroups, ElemType)
else:
raise RuntimeError, "Invalid Base for ExtrusionAlongPathX"
## Generates new elements by extrusion of the given elements
# The path of extrusion must be a meshed edge.
# @param IDsOfElements ids of elements
# @param PathMesh mesh containing a 1D sub-mesh on the edge, along which proceeds the extrusion
# @param PathShape shape(edge) defines the sub-mesh for the path
# @param NodeStart the first or the last node on the edge. Defines the direction of extrusion
# @param HasAngles allows the shape to be rotated around the path
# to get the resulting mesh in a helical fashion
# @param Angles list of angles in radians
# @param HasRefPoint allows using the reference point
# @param RefPoint the point around which the shape is rotated (the mass center of the shape by default).
# The User can specify any point as the Reference Point.
# @param MakeGroups forces the generation of new groups from existing ones
# @param LinearVariation forces the computation of rotation angles as linear
# variation of the given Angles along path steps
# @return list of created groups (SMESH_GroupBase) and SMESH::Extrusion_Error if MakeGroups=True,
# only SMESH::Extrusion_Error otherwise
# @ingroup l2_modif_extrurev
def ExtrusionAlongPath(self, IDsOfElements, PathMesh, PathShape, NodeStart,
HasAngles, Angles, HasRefPoint, RefPoint,
MakeGroups=False, LinearVariation=False):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( RefPoint, geomBuilder.GEOM._objref_GEOM_Object)):
RefPoint = self.smeshpyD.GetPointStruct(RefPoint)
pass
if ( isinstance( PathMesh, Mesh )):
PathMesh = PathMesh.GetMesh()
Angles,AnglesParameters,hasVars = ParseAngles(Angles)
Parameters = AnglesParameters + var_separator + RefPoint.parameters
self.mesh.SetParameters(Parameters)
if HasAngles and Angles and LinearVariation:
Angles = self.editor.LinearAnglesVariation( PathMesh, PathShape, Angles )
pass
if MakeGroups:
return self.editor.ExtrusionAlongPathMakeGroups(IDsOfElements, PathMesh,
PathShape, NodeStart, HasAngles,
Angles, HasRefPoint, RefPoint)
return self.editor.ExtrusionAlongPath(IDsOfElements, PathMesh, PathShape,
NodeStart, HasAngles, Angles, HasRefPoint, RefPoint)
## Generates new elements by extrusion of the elements which belong to the object
# The path of extrusion must be a meshed edge.
# @param theObject the object which elements should be processed.
# It can be a mesh, a sub mesh or a group.
# @param PathMesh mesh containing a 1D sub-mesh on the edge, along which the extrusion proceeds
# @param PathShape shape(edge) defines the sub-mesh for the path
# @param NodeStart the first or the last node on the edge. Defines the direction of extrusion
# @param HasAngles allows the shape to be rotated around the path
# to get the resulting mesh in a helical fashion
# @param Angles list of angles
# @param HasRefPoint allows using the reference point
# @param RefPoint the point around which the shape is rotated (the mass center of the shape by default).
# The User can specify any point as the Reference Point.
# @param MakeGroups forces the generation of new groups from existing ones
# @param LinearVariation forces the computation of rotation angles as linear
# variation of the given Angles along path steps
# @return list of created groups (SMESH_GroupBase) and SMESH::Extrusion_Error if MakeGroups=True,
# only SMESH::Extrusion_Error otherwise
# @ingroup l2_modif_extrurev
def ExtrusionAlongPathObject(self, theObject, PathMesh, PathShape, NodeStart,
HasAngles, Angles, HasRefPoint, RefPoint,
MakeGroups=False, LinearVariation=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( RefPoint, geomBuilder.GEOM._objref_GEOM_Object)):
RefPoint = self.smeshpyD.GetPointStruct(RefPoint)
if ( isinstance( PathMesh, Mesh )):
PathMesh = PathMesh.GetMesh()
Angles,AnglesParameters,hasVars = ParseAngles(Angles)
Parameters = AnglesParameters + var_separator + RefPoint.parameters
self.mesh.SetParameters(Parameters)
if HasAngles and Angles and LinearVariation:
Angles = self.editor.LinearAnglesVariation( PathMesh, PathShape, Angles )
pass
if MakeGroups:
return self.editor.ExtrusionAlongPathObjectMakeGroups(theObject, PathMesh,
PathShape, NodeStart, HasAngles,
Angles, HasRefPoint, RefPoint)
return self.editor.ExtrusionAlongPathObject(theObject, PathMesh, PathShape,
NodeStart, HasAngles, Angles, HasRefPoint,
RefPoint)
## Generates new elements by extrusion of the elements which belong to the object
# The path of extrusion must be a meshed edge.
# @param theObject the object which elements should be processed.
# It can be a mesh, a sub mesh or a group.
# @param PathMesh mesh containing a 1D sub-mesh on the edge, along which the extrusion proceeds
# @param PathShape shape(edge) defines the sub-mesh for the path
# @param NodeStart the first or the last node on the edge. Defines the direction of extrusion
# @param HasAngles allows the shape to be rotated around the path
# to get the resulting mesh in a helical fashion
# @param Angles list of angles
# @param HasRefPoint allows using the reference point
# @param RefPoint the point around which the shape is rotated (the mass center of the shape by default).
# The User can specify any point as the Reference Point.
# @param MakeGroups forces the generation of new groups from existing ones
# @param LinearVariation forces the computation of rotation angles as linear
# variation of the given Angles along path steps
# @return list of created groups (SMESH_GroupBase) and SMESH::Extrusion_Error if MakeGroups=True,
# only SMESH::Extrusion_Error otherwise
# @ingroup l2_modif_extrurev
def ExtrusionAlongPathObject1D(self, theObject, PathMesh, PathShape, NodeStart,
HasAngles, Angles, HasRefPoint, RefPoint,
MakeGroups=False, LinearVariation=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( RefPoint, geomBuilder.GEOM._objref_GEOM_Object)):
RefPoint = self.smeshpyD.GetPointStruct(RefPoint)
if ( isinstance( PathMesh, Mesh )):
PathMesh = PathMesh.GetMesh()
Angles,AnglesParameters,hasVars = ParseAngles(Angles)
Parameters = AnglesParameters + var_separator + RefPoint.parameters
self.mesh.SetParameters(Parameters)
if HasAngles and Angles and LinearVariation:
Angles = self.editor.LinearAnglesVariation( PathMesh, PathShape, Angles )
pass
if MakeGroups:
return self.editor.ExtrusionAlongPathObject1DMakeGroups(theObject, PathMesh,
PathShape, NodeStart, HasAngles,
Angles, HasRefPoint, RefPoint)
return self.editor.ExtrusionAlongPathObject1D(theObject, PathMesh, PathShape,
NodeStart, HasAngles, Angles, HasRefPoint,
RefPoint)
## Generates new elements by extrusion of the elements which belong to the object
# The path of extrusion must be a meshed edge.
# @param theObject the object which elements should be processed.
# It can be a mesh, a sub mesh or a group.
# @param PathMesh mesh containing a 1D sub-mesh on the edge, along which the extrusion proceeds
# @param PathShape shape(edge) defines the sub-mesh for the path
# @param NodeStart the first or the last node on the edge. Defines the direction of extrusion
# @param HasAngles allows the shape to be rotated around the path
# to get the resulting mesh in a helical fashion
# @param Angles list of angles
# @param HasRefPoint allows using the reference point
# @param RefPoint the point around which the shape is rotated (the mass center of the shape by default).
# The User can specify any point as the Reference Point.
# @param MakeGroups forces the generation of new groups from existing ones
# @param LinearVariation forces the computation of rotation angles as linear
# variation of the given Angles along path steps
# @return list of created groups (SMESH_GroupBase) and SMESH::Extrusion_Error if MakeGroups=True,
# only SMESH::Extrusion_Error otherwise
# @ingroup l2_modif_extrurev
def ExtrusionAlongPathObject2D(self, theObject, PathMesh, PathShape, NodeStart,
HasAngles, Angles, HasRefPoint, RefPoint,
MakeGroups=False, LinearVariation=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( RefPoint, geomBuilder.GEOM._objref_GEOM_Object)):
RefPoint = self.smeshpyD.GetPointStruct(RefPoint)
if ( isinstance( PathMesh, Mesh )):
PathMesh = PathMesh.GetMesh()
Angles,AnglesParameters,hasVars = ParseAngles(Angles)
Parameters = AnglesParameters + var_separator + RefPoint.parameters
self.mesh.SetParameters(Parameters)
if HasAngles and Angles and LinearVariation:
Angles = self.editor.LinearAnglesVariation( PathMesh, PathShape, Angles )
pass
if MakeGroups:
return self.editor.ExtrusionAlongPathObject2DMakeGroups(theObject, PathMesh,
PathShape, NodeStart, HasAngles,
Angles, HasRefPoint, RefPoint)
return self.editor.ExtrusionAlongPathObject2D(theObject, PathMesh, PathShape,
NodeStart, HasAngles, Angles, HasRefPoint,
RefPoint)
## Creates a symmetrical copy of mesh elements
# @param IDsOfElements list of elements ids
# @param Mirror is AxisStruct or geom object(point, line, plane)
# @param theMirrorType is POINT, AXIS or PLANE
# If the Mirror is a geom object this parameter is unnecessary
# @param Copy allows to copy element (Copy is 1) or to replace with its mirroring (Copy is 0)
# @param MakeGroups forces the generation of new groups from existing ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_trsf
def Mirror(self, IDsOfElements, Mirror, theMirrorType=None, Copy=0, MakeGroups=False):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Mirror, geomBuilder.GEOM._objref_GEOM_Object)):
Mirror = self.smeshpyD.GetAxisStruct(Mirror)
theMirrorType = Mirror._mirrorType
else:
self.mesh.SetParameters(Mirror.parameters)
if Copy and MakeGroups:
return self.editor.MirrorMakeGroups(IDsOfElements, Mirror, theMirrorType)
self.editor.Mirror(IDsOfElements, Mirror, theMirrorType, Copy)
return []
## Creates a new mesh by a symmetrical copy of mesh elements
# @param IDsOfElements the list of elements ids
# @param Mirror is AxisStruct or geom object (point, line, plane)
# @param theMirrorType is POINT, AXIS or PLANE
# If the Mirror is a geom object this parameter is unnecessary
# @param MakeGroups to generate new groups from existing ones
# @param NewMeshName a name of the new mesh to create
# @return instance of Mesh class
# @ingroup l2_modif_trsf
def MirrorMakeMesh(self, IDsOfElements, Mirror, theMirrorType=0, MakeGroups=0, NewMeshName=""):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Mirror, geomBuilder.GEOM._objref_GEOM_Object)):
Mirror = self.smeshpyD.GetAxisStruct(Mirror)
theMirrorType = Mirror._mirrorType
else:
self.mesh.SetParameters(Mirror.parameters)
mesh = self.editor.MirrorMakeMesh(IDsOfElements, Mirror, theMirrorType,
MakeGroups, NewMeshName)
return Mesh(self.smeshpyD,self.geompyD,mesh)
## Creates a symmetrical copy of the object
# @param theObject mesh, submesh or group
# @param Mirror AxisStruct or geom object (point, line, plane)
# @param theMirrorType is POINT, AXIS or PLANE
# If the Mirror is a geom object this parameter is unnecessary
# @param Copy allows copying the element (Copy is 1) or replacing it with its mirror (Copy is 0)
# @param MakeGroups forces the generation of new groups from existing ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_trsf
def MirrorObject (self, theObject, Mirror, theMirrorType=None, Copy=0, MakeGroups=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( Mirror, geomBuilder.GEOM._objref_GEOM_Object)):
Mirror = self.smeshpyD.GetAxisStruct(Mirror)
theMirrorType = Mirror._mirrorType
else:
self.mesh.SetParameters(Mirror.parameters)
if Copy and MakeGroups:
return self.editor.MirrorObjectMakeGroups(theObject, Mirror, theMirrorType)
self.editor.MirrorObject(theObject, Mirror, theMirrorType, Copy)
return []
## Creates a new mesh by a symmetrical copy of the object
# @param theObject mesh, submesh or group
# @param Mirror AxisStruct or geom object (point, line, plane)
# @param theMirrorType POINT, AXIS or PLANE
# If the Mirror is a geom object this parameter is unnecessary
# @param MakeGroups forces the generation of new groups from existing ones
# @param NewMeshName the name of the new mesh to create
# @return instance of Mesh class
# @ingroup l2_modif_trsf
def MirrorObjectMakeMesh (self, theObject, Mirror, theMirrorType=0,MakeGroups=0,NewMeshName=""):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( Mirror, geomBuilder.GEOM._objref_GEOM_Object)):
Mirror = self.smeshpyD.GetAxisStruct(Mirror)
theMirrorType = Mirror._mirrorType
else:
self.mesh.SetParameters(Mirror.parameters)
mesh = self.editor.MirrorObjectMakeMesh(theObject, Mirror, theMirrorType,
MakeGroups, NewMeshName)
return Mesh( self.smeshpyD,self.geompyD,mesh )
## Translates the elements
# @param IDsOfElements list of elements ids
# @param Vector the direction of translation (DirStruct or vector or 3 vector components)
# @param Copy allows copying the translated elements
# @param MakeGroups forces the generation of new groups from existing ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_trsf
def Translate(self, IDsOfElements, Vector, Copy, MakeGroups=False):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Vector, geomBuilder.GEOM._objref_GEOM_Object)):
Vector = self.smeshpyD.GetDirStruct(Vector)
if isinstance( Vector, list ):
Vector = self.smeshpyD.MakeDirStruct(*Vector)
self.mesh.SetParameters(Vector.PS.parameters)
if Copy and MakeGroups:
return self.editor.TranslateMakeGroups(IDsOfElements, Vector)
self.editor.Translate(IDsOfElements, Vector, Copy)
return []
## Creates a new mesh of translated elements
# @param IDsOfElements list of elements ids
# @param Vector the direction of translation (DirStruct or vector or 3 vector components)
# @param MakeGroups forces the generation of new groups from existing ones
# @param NewMeshName the name of the newly created mesh
# @return instance of Mesh class
# @ingroup l2_modif_trsf
def TranslateMakeMesh(self, IDsOfElements, Vector, MakeGroups=False, NewMeshName=""):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Vector, geomBuilder.GEOM._objref_GEOM_Object)):
Vector = self.smeshpyD.GetDirStruct(Vector)
if isinstance( Vector, list ):
Vector = self.smeshpyD.MakeDirStruct(*Vector)
self.mesh.SetParameters(Vector.PS.parameters)
mesh = self.editor.TranslateMakeMesh(IDsOfElements, Vector, MakeGroups, NewMeshName)
return Mesh ( self.smeshpyD, self.geompyD, mesh )
## Translates the object
# @param theObject the object to translate (mesh, submesh, or group)
# @param Vector direction of translation (DirStruct or geom vector or 3 vector components)
# @param Copy allows copying the translated elements
# @param MakeGroups forces the generation of new groups from existing ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_trsf
def TranslateObject(self, theObject, Vector, Copy, MakeGroups=False):
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( Vector, geomBuilder.GEOM._objref_GEOM_Object)):
Vector = self.smeshpyD.GetDirStruct(Vector)
if isinstance( Vector, list ):
Vector = self.smeshpyD.MakeDirStruct(*Vector)
self.mesh.SetParameters(Vector.PS.parameters)
if Copy and MakeGroups:
return self.editor.TranslateObjectMakeGroups(theObject, Vector)
self.editor.TranslateObject(theObject, Vector, Copy)
return []
## Creates a new mesh from the translated object
# @param theObject the object to translate (mesh, submesh, or group)
# @param Vector the direction of translation (DirStruct or geom vector or 3 vector components)
# @param MakeGroups forces the generation of new groups from existing ones
# @param NewMeshName the name of the newly created mesh
# @return instance of Mesh class
# @ingroup l2_modif_trsf
def TranslateObjectMakeMesh(self, theObject, Vector, MakeGroups=False, NewMeshName=""):
if isinstance( theObject, Mesh ):
theObject = theObject.GetMesh()
if isinstance( Vector, geomBuilder.GEOM._objref_GEOM_Object ):
Vector = self.smeshpyD.GetDirStruct(Vector)
if isinstance( Vector, list ):
Vector = self.smeshpyD.MakeDirStruct(*Vector)
self.mesh.SetParameters(Vector.PS.parameters)
mesh = self.editor.TranslateObjectMakeMesh(theObject, Vector, MakeGroups, NewMeshName)
return Mesh( self.smeshpyD, self.geompyD, mesh )
## Scales the object
# @param theObject - the object to translate (mesh, submesh, or group)
# @param thePoint - base point for scale
# @param theScaleFact - list of 1-3 scale factors for axises
# @param Copy - allows copying the translated elements
# @param MakeGroups - forces the generation of new groups from existing
# ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True,
# empty list otherwise
def Scale(self, theObject, thePoint, theScaleFact, Copy, MakeGroups=False):
unRegister = genObjUnRegister()
if ( isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if ( isinstance( theObject, list )):
theObject = self.GetIDSource(theObject, SMESH.ALL)
unRegister.set( theObject )
if ( isinstance( theScaleFact, float )):
theScaleFact = [theScaleFact]
if ( isinstance( theScaleFact, int )):
theScaleFact = [ float(theScaleFact)]
self.mesh.SetParameters(thePoint.parameters)
if Copy and MakeGroups:
return self.editor.ScaleMakeGroups(theObject, thePoint, theScaleFact)
self.editor.Scale(theObject, thePoint, theScaleFact, Copy)
return []
## Creates a new mesh from the translated object
# @param theObject - the object to translate (mesh, submesh, or group)
# @param thePoint - base point for scale
# @param theScaleFact - list of 1-3 scale factors for axises
# @param MakeGroups - forces the generation of new groups from existing ones
# @param NewMeshName - the name of the newly created mesh
# @return instance of Mesh class
def ScaleMakeMesh(self, theObject, thePoint, theScaleFact, MakeGroups=False, NewMeshName=""):
unRegister = genObjUnRegister()
if (isinstance(theObject, Mesh)):
theObject = theObject.GetMesh()
if ( isinstance( theObject, list )):
theObject = self.GetIDSource(theObject,SMESH.ALL)
unRegister.set( theObject )
if ( isinstance( theScaleFact, float )):
theScaleFact = [theScaleFact]
if ( isinstance( theScaleFact, int )):
theScaleFact = [ float(theScaleFact)]
self.mesh.SetParameters(thePoint.parameters)
mesh = self.editor.ScaleMakeMesh(theObject, thePoint, theScaleFact,
MakeGroups, NewMeshName)
return Mesh( self.smeshpyD, self.geompyD, mesh )
## Rotates the elements
# @param IDsOfElements list of elements ids
# @param Axis the axis of rotation (AxisStruct or geom line)
# @param AngleInRadians the angle of rotation (in radians) or a name of variable which defines angle in degrees
# @param Copy allows copying the rotated elements
# @param MakeGroups forces the generation of new groups from existing ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_trsf
def Rotate (self, IDsOfElements, Axis, AngleInRadians, Copy, MakeGroups=False):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,Parameters,hasVars = ParseAngles(AngleInRadians)
Parameters = Axis.parameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
if Copy and MakeGroups:
return self.editor.RotateMakeGroups(IDsOfElements, Axis, AngleInRadians)
self.editor.Rotate(IDsOfElements, Axis, AngleInRadians, Copy)
return []
## Creates a new mesh of rotated elements
# @param IDsOfElements list of element ids
# @param Axis the axis of rotation (AxisStruct or geom line)
# @param AngleInRadians the angle of rotation (in radians) or a name of variable which defines angle in degrees
# @param MakeGroups forces the generation of new groups from existing ones
# @param NewMeshName the name of the newly created mesh
# @return instance of Mesh class
# @ingroup l2_modif_trsf
def RotateMakeMesh (self, IDsOfElements, Axis, AngleInRadians, MakeGroups=0, NewMeshName=""):
if IDsOfElements == []:
IDsOfElements = self.GetElementsId()
if ( isinstance( Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,Parameters,hasVars = ParseAngles(AngleInRadians)
Parameters = Axis.parameters + var_separator + Parameters
self.mesh.SetParameters(Parameters)
mesh = self.editor.RotateMakeMesh(IDsOfElements, Axis, AngleInRadians,
MakeGroups, NewMeshName)
return Mesh( self.smeshpyD, self.geompyD, mesh )
## Rotates the object
# @param theObject the object to rotate( mesh, submesh, or group)
# @param Axis the axis of rotation (AxisStruct or geom line)
# @param AngleInRadians the angle of rotation (in radians) or a name of variable which defines angle in degrees
# @param Copy allows copying the rotated elements
# @param MakeGroups forces the generation of new groups from existing ones (if Copy)
# @return list of created groups (SMESH_GroupBase) if MakeGroups=True, empty list otherwise
# @ingroup l2_modif_trsf
def RotateObject (self, theObject, Axis, AngleInRadians, Copy, MakeGroups=False):
if (isinstance(theObject, Mesh)):
theObject = theObject.GetMesh()
if (isinstance(Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,Parameters,hasVars = ParseAngles(AngleInRadians)
Parameters = Axis.parameters + ":" + Parameters
self.mesh.SetParameters(Parameters)
if Copy and MakeGroups:
return self.editor.RotateObjectMakeGroups(theObject, Axis, AngleInRadians)
self.editor.RotateObject(theObject, Axis, AngleInRadians, Copy)
return []
## Creates a new mesh from the rotated object
# @param theObject the object to rotate (mesh, submesh, or group)
# @param Axis the axis of rotation (AxisStruct or geom line)
# @param AngleInRadians the angle of rotation (in radians) or a name of variable which defines angle in degrees
# @param MakeGroups forces the generation of new groups from existing ones
# @param NewMeshName the name of the newly created mesh
# @return instance of Mesh class
# @ingroup l2_modif_trsf
def RotateObjectMakeMesh(self, theObject, Axis, AngleInRadians, MakeGroups=0,NewMeshName=""):
if (isinstance( theObject, Mesh )):
theObject = theObject.GetMesh()
if (isinstance(Axis, geomBuilder.GEOM._objref_GEOM_Object)):
Axis = self.smeshpyD.GetAxisStruct(Axis)
AngleInRadians,Parameters,hasVars = ParseAngles(AngleInRadians)
Parameters = Axis.parameters + ":" + Parameters
mesh = self.editor.RotateObjectMakeMesh(theObject, Axis, AngleInRadians,
MakeGroups, NewMeshName)
self.mesh.SetParameters(Parameters)
return Mesh( self.smeshpyD, self.geompyD, mesh )
## Finds groups of adjacent nodes within Tolerance.
# @param Tolerance the value of tolerance
# @return the list of pairs of nodes IDs (e.g. [[1,12],[25,4]])
# @ingroup l2_modif_trsf
def FindCoincidentNodes (self, Tolerance):
return self.editor.FindCoincidentNodes(Tolerance)
## Finds groups of ajacent nodes within Tolerance.
# @param Tolerance the value of tolerance
# @param SubMeshOrGroup SubMesh or Group
# @param exceptNodes list of either SubMeshes, Groups or node IDs to exclude from search
# @return the list of pairs of nodes IDs (e.g. [[1,12],[25,4]])
# @ingroup l2_modif_trsf
def FindCoincidentNodesOnPart (self, SubMeshOrGroup, Tolerance, exceptNodes=[]):
unRegister = genObjUnRegister()
if (isinstance( SubMeshOrGroup, Mesh )):
SubMeshOrGroup = SubMeshOrGroup.GetMesh()
if not isinstance( exceptNodes, list):
exceptNodes = [ exceptNodes ]
if exceptNodes and isinstance( exceptNodes[0], int):
exceptNodes = [ self.GetIDSource( exceptNodes, SMESH.NODE)]
unRegister.set( exceptNodes )
return self.editor.FindCoincidentNodesOnPartBut(SubMeshOrGroup, Tolerance,exceptNodes)
## Merges nodes
# @param GroupsOfNodes a list of pairs of nodes IDs for merging (e.g. [[1,12],[25,4]])
# @ingroup l2_modif_trsf
def MergeNodes (self, GroupsOfNodes):
self.editor.MergeNodes(GroupsOfNodes)
## Finds the elements built on the same nodes.
# @param MeshOrSubMeshOrGroup Mesh or SubMesh, or Group of elements for searching
# @return the list of pairs of equal elements IDs (e.g. [[1,12],[25,4]])
# @ingroup l2_modif_trsf
def FindEqualElements (self, MeshOrSubMeshOrGroup):
if ( isinstance( MeshOrSubMeshOrGroup, Mesh )):
MeshOrSubMeshOrGroup = MeshOrSubMeshOrGroup.GetMesh()
return self.editor.FindEqualElements(MeshOrSubMeshOrGroup)
## Merges elements in each given group.
# @param GroupsOfElementsID a list of pairs of elements IDs for merging (e.g. [[1,12],[25,4]])
# @ingroup l2_modif_trsf
def MergeElements(self, GroupsOfElementsID):
self.editor.MergeElements(GroupsOfElementsID)
## Leaves one element and removes all other elements built on the same nodes.
# @ingroup l2_modif_trsf
def MergeEqualElements(self):
self.editor.MergeEqualElements()
## Sews free borders
# @return SMESH::Sew_Error
# @ingroup l2_modif_trsf
def SewFreeBorders (self, FirstNodeID1, SecondNodeID1, LastNodeID1,
FirstNodeID2, SecondNodeID2, LastNodeID2,
CreatePolygons, CreatePolyedrs):
return self.editor.SewFreeBorders(FirstNodeID1, SecondNodeID1, LastNodeID1,
FirstNodeID2, SecondNodeID2, LastNodeID2,
CreatePolygons, CreatePolyedrs)
## Sews conform free borders
# @return SMESH::Sew_Error
# @ingroup l2_modif_trsf
def SewConformFreeBorders (self, FirstNodeID1, SecondNodeID1, LastNodeID1,
FirstNodeID2, SecondNodeID2):
return self.editor.SewConformFreeBorders(FirstNodeID1, SecondNodeID1, LastNodeID1,
FirstNodeID2, SecondNodeID2)
## Sews border to side
# @return SMESH::Sew_Error
# @ingroup l2_modif_trsf
def SewBorderToSide (self, FirstNodeIDOnFreeBorder, SecondNodeIDOnFreeBorder, LastNodeIDOnFreeBorder,
FirstNodeIDOnSide, LastNodeIDOnSide, CreatePolygons, CreatePolyedrs):
return self.editor.SewBorderToSide(FirstNodeIDOnFreeBorder, SecondNodeIDOnFreeBorder, LastNodeIDOnFreeBorder,
FirstNodeIDOnSide, LastNodeIDOnSide, CreatePolygons, CreatePolyedrs)
## Sews two sides of a mesh. The nodes belonging to Side1 are
# merged with the nodes of elements of Side2.
# The number of elements in theSide1 and in theSide2 must be
# equal and they should have similar nodal connectivity.
# The nodes to merge should belong to side borders and
# the first node should be linked to the second.
# @return SMESH::Sew_Error
# @ingroup l2_modif_trsf
def SewSideElements (self, IDsOfSide1Elements, IDsOfSide2Elements,
NodeID1OfSide1ToMerge, NodeID1OfSide2ToMerge,
NodeID2OfSide1ToMerge, NodeID2OfSide2ToMerge):
return self.editor.SewSideElements(IDsOfSide1Elements, IDsOfSide2Elements,
NodeID1OfSide1ToMerge, NodeID1OfSide2ToMerge,
NodeID2OfSide1ToMerge, NodeID2OfSide2ToMerge)
## Sets new nodes for the given element.
# @param ide the element id
# @param newIDs nodes ids
# @return If the number of nodes does not correspond to the type of element - returns false
# @ingroup l2_modif_edit
def ChangeElemNodes(self, ide, newIDs):
return self.editor.ChangeElemNodes(ide, newIDs)
## If during the last operation of MeshEditor some nodes were
# created, this method returns the list of their IDs, \n
# if new nodes were not created - returns empty list
# @return the list of integer values (can be empty)
# @ingroup l1_auxiliary
def GetLastCreatedNodes(self):
return self.editor.GetLastCreatedNodes()
## If during the last operation of MeshEditor some elements were
# created this method returns the list of their IDs, \n
# if new elements were not created - returns empty list
# @return the list of integer values (can be empty)
# @ingroup l1_auxiliary
def GetLastCreatedElems(self):
return self.editor.GetLastCreatedElems()
## Clears sequences of nodes and elements created by mesh edition oparations
# @ingroup l1_auxiliary
def ClearLastCreated(self):
self.editor.ClearLastCreated()
## Creates Duplicates given elements, i.e. creates new elements based on the
# same nodes as the given ones.
# @param theElements - container of elements to duplicate. It can be a Mesh,
# sub-mesh, group, filter or a list of element IDs.
# @param theGroupName - a name of group to contain the generated elements.
# If a group with such a name already exists, the new elements
# are added to the existng group, else a new group is created.
# If \a theGroupName is empty, new elements are not added
# in any group.
# @return a group where the new elements are added. None if theGroupName == "".
# @ingroup l2_modif_edit
def DoubleElements(self, theElements, theGroupName=""):
unRegister = genObjUnRegister()
if isinstance( theElements, Mesh ):
theElements = theElements.mesh
elif isinstance( theElements, list ):
theElements = self.GetIDSource( theElements, SMESH.ALL )
unRegister.set( theElements )
return self.editor.DoubleElements(theElements, theGroupName)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# @param theNodes identifiers of nodes to be doubled
# @param theModifiedElems identifiers of elements to be updated by the new (doubled)
# nodes. If list of element identifiers is empty then nodes are doubled but
# they not assigned to elements
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def DoubleNodes(self, theNodes, theModifiedElems):
return self.editor.DoubleNodes(theNodes, theModifiedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theNodeId identifiers of node to be doubled
# @param theModifiedElems identifiers of elements to be updated
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def DoubleNode(self, theNodeId, theModifiedElems):
return self.editor.DoubleNode(theNodeId, theModifiedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theNodes group of nodes to be doubled
# @param theModifiedElems group of elements to be updated.
# @param theMakeGroup forces the generation of a group containing new nodes.
# @return TRUE or a created group if operation has been completed successfully,
# FALSE or None otherwise
# @ingroup l2_modif_edit
def DoubleNodeGroup(self, theNodes, theModifiedElems, theMakeGroup=False):
if theMakeGroup:
return self.editor.DoubleNodeGroupNew(theNodes, theModifiedElems)
return self.editor.DoubleNodeGroup(theNodes, theModifiedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theNodes list of groups of nodes to be doubled
# @param theModifiedElems list of groups of elements to be updated.
# @param theMakeGroup forces the generation of a group containing new nodes.
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def DoubleNodeGroups(self, theNodes, theModifiedElems, theMakeGroup=False):
if theMakeGroup:
return self.editor.DoubleNodeGroupsNew(theNodes, theModifiedElems)
return self.editor.DoubleNodeGroups(theNodes, theModifiedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# @param theElems - the list of elements (edges or faces) to be replicated
# The nodes for duplication could be found from these elements
# @param theNodesNot - list of nodes to NOT replicate
# @param theAffectedElems - the list of elements (cells and edges) to which the
# replicated nodes should be associated to.
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def DoubleNodeElem(self, theElems, theNodesNot, theAffectedElems):
return self.editor.DoubleNodeElem(theElems, theNodesNot, theAffectedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# @param theElems - the list of elements (edges or faces) to be replicated
# The nodes for duplication could be found from these elements
# @param theNodesNot - list of nodes to NOT replicate
# @param theShape - shape to detect affected elements (element which geometric center
# located on or inside shape).
# The replicated nodes should be associated to affected elements.
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def DoubleNodeElemInRegion(self, theElems, theNodesNot, theShape):
return self.editor.DoubleNodeElemInRegion(theElems, theNodesNot, theShape)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theElems - group of of elements (edges or faces) to be replicated
# @param theNodesNot - group of nodes not to replicated
# @param theAffectedElems - group of elements to which the replicated nodes
# should be associated to.
# @param theMakeGroup forces the generation of a group containing new elements.
# @param theMakeNodeGroup forces the generation of a group containing new nodes.
# @return TRUE or created groups (one or two) if operation has been completed successfully,
# FALSE or None otherwise
# @ingroup l2_modif_edit
def DoubleNodeElemGroup(self, theElems, theNodesNot, theAffectedElems,
theMakeGroup=False, theMakeNodeGroup=False):
if theMakeGroup or theMakeNodeGroup:
twoGroups = self.editor.DoubleNodeElemGroup2New(theElems, theNodesNot,
theAffectedElems,
theMakeGroup, theMakeNodeGroup)
if theMakeGroup and theMakeNodeGroup:
return twoGroups
else:
return twoGroups[ int(theMakeNodeGroup) ]
return self.editor.DoubleNodeElemGroup(theElems, theNodesNot, theAffectedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theElems - group of of elements (edges or faces) to be replicated
# @param theNodesNot - group of nodes not to replicated
# @param theShape - shape to detect affected elements (element which geometric center
# located on or inside shape).
# The replicated nodes should be associated to affected elements.
# @ingroup l2_modif_edit
def DoubleNodeElemGroupInRegion(self, theElems, theNodesNot, theShape):
return self.editor.DoubleNodeElemGroupInRegion(theElems, theNodesNot, theShape)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theElems - list of groups of elements (edges or faces) to be replicated
# @param theNodesNot - list of groups of nodes not to replicated
# @param theAffectedElems - group of elements to which the replicated nodes
# should be associated to.
# @param theMakeGroup forces the generation of a group containing new elements.
# @param theMakeNodeGroup forces the generation of a group containing new nodes.
# @return TRUE or created groups (one or two) if operation has been completed successfully,
# FALSE or None otherwise
# @ingroup l2_modif_edit
def DoubleNodeElemGroups(self, theElems, theNodesNot, theAffectedElems,
theMakeGroup=False, theMakeNodeGroup=False):
if theMakeGroup or theMakeNodeGroup:
twoGroups = self.editor.DoubleNodeElemGroups2New(theElems, theNodesNot,
theAffectedElems,
theMakeGroup, theMakeNodeGroup)
if theMakeGroup and theMakeNodeGroup:
return twoGroups
else:
return twoGroups[ int(theMakeNodeGroup) ]
return self.editor.DoubleNodeElemGroups(theElems, theNodesNot, theAffectedElems)
## Creates a hole in a mesh by doubling the nodes of some particular elements
# This method provided for convenience works as DoubleNodes() described above.
# @param theElems - list of groups of elements (edges or faces) to be replicated
# @param theNodesNot - list of groups of nodes not to replicated
# @param theShape - shape to detect affected elements (element which geometric center
# located on or inside shape).
# The replicated nodes should be associated to affected elements.
# @return TRUE if operation has been completed successfully, FALSE otherwise
# @ingroup l2_modif_edit
def DoubleNodeElemGroupsInRegion(self, theElems, theNodesNot, theShape):
return self.editor.DoubleNodeElemGroupsInRegion(theElems, theNodesNot, theShape)
## Identify the elements that will be affected by node duplication (actual duplication is not performed.
# This method is the first step of DoubleNodeElemGroupsInRegion.
# @param theElems - list of groups of elements (edges or faces) to be replicated
# @param theNodesNot - list of groups of nodes not to replicated
# @param theShape - shape to detect affected elements (element which geometric center
# located on or inside shape).
# The replicated nodes should be associated to affected elements.
# @return groups of affected elements
# @ingroup l2_modif_edit
def AffectedElemGroupsInRegion(self, theElems, theNodesNot, theShape):
return self.editor.AffectedElemGroupsInRegion(theElems, theNodesNot, theShape)
## Double nodes on shared faces between groups of volumes and create flat elements on demand.
# The list of groups must describe a partition of the mesh volumes.
# The nodes of the internal faces at the boundaries of the groups are doubled.
# In option, the internal faces are replaced by flat elements.
# Triangles are transformed in prisms, and quadrangles in hexahedrons.
# @param theDomains - list of groups of volumes
# @param createJointElems - if TRUE, create the elements
# @param onAllBoundaries - if TRUE, the nodes and elements are also created on
# the boundary between \a theDomains and the rest mesh
# @return TRUE if operation has been completed successfully, FALSE otherwise
def DoubleNodesOnGroupBoundaries(self, theDomains, createJointElems, onAllBoundaries=False ):
return self.editor.DoubleNodesOnGroupBoundaries( theDomains, createJointElems, onAllBoundaries )
## Double nodes on some external faces and create flat elements.
# Flat elements are mainly used by some types of mechanic calculations.
#
# Each group of the list must be constituted of faces.
# Triangles are transformed in prisms, and quadrangles in hexahedrons.
# @param theGroupsOfFaces - list of groups of faces
# @return TRUE if operation has been completed successfully, FALSE otherwise
def CreateFlatElementsOnFacesGroups(self, theGroupsOfFaces ):
return self.editor.CreateFlatElementsOnFacesGroups( theGroupsOfFaces )
## identify all the elements around a geom shape, get the faces delimiting the hole
#
def CreateHoleSkin(self, radius, theShape, groupName, theNodesCoords):
return self.editor.CreateHoleSkin( radius, theShape, groupName, theNodesCoords )
def _getFunctor(self, funcType ):
fn = self.functors[ funcType._v ]
if not fn:
fn = self.smeshpyD.GetFunctor(funcType)
fn.SetMesh(self.mesh)
self.functors[ funcType._v ] = fn
return fn
def _valueFromFunctor(self, funcType, elemId):
fn = self._getFunctor( funcType )
if fn.GetElementType() == self.GetElementType(elemId, True):
val = fn.GetValue(elemId)
else:
val = 0
return val
## Get length of 1D element or sum of lengths of all 1D mesh elements
# @param elemId mesh element ID (if not defined - sum of length of all 1D elements will be calculated)
# @return element's length value if \a elemId is specified or sum of all 1D mesh elements' lengths otherwise
# @ingroup l1_measurements
def GetLength(self, elemId=None):
length = 0
if elemId == None:
length = self.smeshpyD.GetLength(self)
else:
length = self._valueFromFunctor(SMESH.FT_Length, elemId)
return length
## Get area of 2D element or sum of areas of all 2D mesh elements
# @param elemId mesh element ID (if not defined - sum of areas of all 2D elements will be calculated)
# @return element's area value if \a elemId is specified or sum of all 2D mesh elements' areas otherwise
# @ingroup l1_measurements
def GetArea(self, elemId=None):
area = 0
if elemId == None:
area = self.smeshpyD.GetArea(self)
else:
area = self._valueFromFunctor(SMESH.FT_Area, elemId)
return area
## Get volume of 3D element or sum of volumes of all 3D mesh elements
# @param elemId mesh element ID (if not defined - sum of volumes of all 3D elements will be calculated)
# @return element's volume value if \a elemId is specified or sum of all 3D mesh elements' volumes otherwise
# @ingroup l1_measurements
def GetVolume(self, elemId=None):
volume = 0
if elemId == None:
volume = self.smeshpyD.GetVolume(self)
else:
volume = self._valueFromFunctor(SMESH.FT_Volume3D, elemId)
return volume
## Get maximum element length.
# @param elemId mesh element ID
# @return element's maximum length value
# @ingroup l1_measurements
def GetMaxElementLength(self, elemId):
if self.GetElementType(elemId, True) == SMESH.VOLUME:
ftype = SMESH.FT_MaxElementLength3D
else:
ftype = SMESH.FT_MaxElementLength2D
return self._valueFromFunctor(ftype, elemId)
## Get aspect ratio of 2D or 3D element.
# @param elemId mesh element ID
# @return element's aspect ratio value
# @ingroup l1_measurements
def GetAspectRatio(self, elemId):
if self.GetElementType(elemId, True) == SMESH.VOLUME:
ftype = SMESH.FT_AspectRatio3D
else:
ftype = SMESH.FT_AspectRatio
return self._valueFromFunctor(ftype, elemId)
## Get warping angle of 2D element.
# @param elemId mesh element ID
# @return element's warping angle value
# @ingroup l1_measurements
def GetWarping(self, elemId):
return self._valueFromFunctor(SMESH.FT_Warping, elemId)
## Get minimum angle of 2D element.
# @param elemId mesh element ID
# @return element's minimum angle value
# @ingroup l1_measurements
def GetMinimumAngle(self, elemId):
return self._valueFromFunctor(SMESH.FT_MinimumAngle, elemId)
## Get taper of 2D element.
# @param elemId mesh element ID
# @return element's taper value
# @ingroup l1_measurements
def GetTaper(self, elemId):
return self._valueFromFunctor(SMESH.FT_Taper, elemId)
## Get skew of 2D element.
# @param elemId mesh element ID
# @return element's skew value
# @ingroup l1_measurements
def GetSkew(self, elemId):
return self._valueFromFunctor(SMESH.FT_Skew, elemId)
## Return minimal and maximal value of a given functor.
# @param funType a functor type, an item of SMESH.FunctorType enum
# (one of SMESH.FunctorType._items)
# @param meshPart a part of mesh (group, sub-mesh) to treat
# @return tuple (min,max)
# @ingroup l1_measurements
def GetMinMax(self, funType, meshPart=None):
unRegister = genObjUnRegister()
if isinstance( meshPart, list ):
meshPart = self.GetIDSource( meshPart, SMESH.ALL )
unRegister.set( meshPart )
if isinstance( meshPart, Mesh ):
meshPart = meshPart.mesh
fun = self._getFunctor( funType )
if fun:
if meshPart:
hist = fun.GetLocalHistogram( 1, False, meshPart )
else:
hist = fun.GetHistogram( 1, False )
if hist:
return hist[0].min, hist[0].max
return None
pass # end of Mesh class
## Helper class for wrapping of SMESH.SMESH_Pattern CORBA class
#
class Pattern(SMESH._objref_SMESH_Pattern):
def ApplyToMeshFaces(self, theMesh, theFacesIDs, theNodeIndexOnKeyPoint1, theReverse):
decrFun = lambda i: i-1
theNodeIndexOnKeyPoint1,Parameters,hasVars = ParseParameters(theNodeIndexOnKeyPoint1, decrFun)
theMesh.SetParameters(Parameters)
return SMESH._objref_SMESH_Pattern.ApplyToMeshFaces( self, theMesh, theFacesIDs, theNodeIndexOnKeyPoint1, theReverse )
def ApplyToHexahedrons(self, theMesh, theVolumesIDs, theNode000Index, theNode001Index):
decrFun = lambda i: i-1
theNode000Index,theNode001Index,Parameters,hasVars = ParseParameters(theNode000Index,theNode001Index, decrFun)
theMesh.SetParameters(Parameters)
return SMESH._objref_SMESH_Pattern.ApplyToHexahedrons( self, theMesh, theVolumesIDs, theNode000Index, theNode001Index )
# Registering the new proxy for Pattern
omniORB.registerObjref(SMESH._objref_SMESH_Pattern._NP_RepositoryId, Pattern)
## Private class used to bind methods creating algorithms to the class Mesh
#
class algoCreator:
def __init__(self):
self.mesh = None
self.defaultAlgoType = ""
self.algoTypeToClass = {}
# Stores a python class of algorithm
def add(self, algoClass):
if type( algoClass ).__name__ == 'classobj' and \
hasattr( algoClass, "algoType"):
self.algoTypeToClass[ algoClass.algoType ] = algoClass
if not self.defaultAlgoType and \
hasattr( algoClass, "isDefault") and algoClass.isDefault:
self.defaultAlgoType = algoClass.algoType
#print "Add",algoClass.algoType, "dflt",self.defaultAlgoType
# creates a copy of self and assign mesh to the copy
def copy(self, mesh):
other = algoCreator()
other.defaultAlgoType = self.defaultAlgoType
other.algoTypeToClass = self.algoTypeToClass
other.mesh = mesh
return other
# creates an instance of algorithm
def __call__(self,algo="",geom=0,*args):
algoType = self.defaultAlgoType
for arg in args + (algo,geom):
if isinstance( arg, geomBuilder.GEOM._objref_GEOM_Object ):
geom = arg
if isinstance( arg, str ) and arg:
algoType = arg
if not algoType and self.algoTypeToClass:
algoType = self.algoTypeToClass.keys()[0]
if self.algoTypeToClass.has_key( algoType ):
#print "Create algo",algoType
return self.algoTypeToClass[ algoType ]( self.mesh, geom )
raise RuntimeError, "No class found for algo type %s" % algoType
return None
# Private class used to substitute and store variable parameters of hypotheses.
#
class hypMethodWrapper:
def __init__(self, hyp, method):
self.hyp = hyp
self.method = method
#print "REBIND:", method.__name__
return
# call a method of hypothesis with calling SetVarParameter() before
def __call__(self,*args):
if not args:
return self.method( self.hyp, *args ) # hypothesis method with no args
#print "MethWrapper.__call__",self.method.__name__, args
try:
parsed = ParseParameters(*args) # replace variables with their values
self.hyp.SetVarParameter( parsed[-2], self.method.__name__ )
result = self.method( self.hyp, *parsed[:-2] ) # call hypothesis method
except omniORB.CORBA.BAD_PARAM: # raised by hypothesis method call
# maybe there is a replaced string arg which is not variable
result = self.method( self.hyp, *args )
except ValueError, detail: # raised by ParseParameters()
try:
result = self.method( self.hyp, *args )
except omniORB.CORBA.BAD_PARAM:
raise ValueError, detail # wrong variable name
return result
pass
# A helper class that call UnRegister() of SALOME.GenericObj'es stored in it
class genObjUnRegister:
def __init__(self, genObj=None):
self.genObjList = []
self.set( genObj )
return
def set(self, genObj):
"Store one or a list of of SALOME.GenericObj'es"
if isinstance( genObj, list ):
self.genObjList.extend( genObj )
else:
self.genObjList.append( genObj )
return
def __del__(self):
for genObj in self.genObjList:
if genObj and hasattr( genObj, "UnRegister" ):
genObj.UnRegister()
for pluginName in os.environ[ "SMESH_MeshersList" ].split( ":" ):
#
#print "pluginName: ", pluginName
pluginBuilderName = pluginName + "Builder"
try:
exec( "from salome.%s.%s import *" % (pluginName, pluginBuilderName))
except Exception, e:
from salome_utils import verbose
if verbose(): print "Exception while loading %s: %s" % ( pluginBuilderName, e )
continue
exec( "from salome.%s import %s" % (pluginName, pluginBuilderName))
plugin = eval( pluginBuilderName )
#print " plugin:" , str(plugin)
# add methods creating algorithms to Mesh
for k in dir( plugin ):
if k[0] == '_': continue
algo = getattr( plugin, k )
#print " algo:", str(algo)
if type( algo ).__name__ == 'classobj' and hasattr( algo, "meshMethod" ):
#print " meshMethod:" , str(algo.meshMethod)
if not hasattr( Mesh, algo.meshMethod ):
setattr( Mesh, algo.meshMethod, algoCreator() )
pass
getattr( Mesh, algo.meshMethod ).add( algo )
pass
pass
pass
del pluginName
|
UTF-8
|
Python
| false | false | 2,014 |
10,952,166,609,979 |
04ae3b241cd4cd2e4699c66af87c20089996062c
|
9cca40bd2d65d2372d1953549aef6cd82211ad8a
|
/interface.py
|
e6ce5eff9d716fa6d2ed34c3b7f3ee192497430e
|
[] |
no_license
|
GAVLab/CAN-GUI
|
https://github.com/GAVLab/CAN-GUI
|
1db828bb19d129b80ba37e1c4a4b18dd0384df28
|
ccd4dd8a7da942df11d0c943f77f67dd11c3a017
|
refs/heads/master
| 2016-09-05T21:29:39.954010 | 2014-12-05T08:14:24 | 2014-12-05T08:14:24 | 27,530,781 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
from time import sleep
from pprint import pprint
from pymoos.MOOSCommClient import MOOSCommClient
from copy import deepcopy
import cfg
from cfg import VehicleData
from cfg import wheel_rpm_to_speed
class MOOSInterface(MOOSCommClient):
def __init__(self):
self.data = deepcopy(VehicleData)
MOOSCommClient.__init__(self)
self.SetOnConnectCallBack(self._on_connect)
self.SetOnDisconnectCallBack(self._on_disconnect)
self.SetOnMailCallBack(self._on_mail)
self.Run(cfg.moos_ip, cfg.moos_port, cfg.moos_name, cfg.moos_freq)
for x in range(30):
if not self.IsConnected():
sleep(0.1)
continue
print('CAN GUI Connected')
return
print('CAN GUI Could not connect to MOOSDB')
sys.exit()
def _on_connect(self):
for key in cfg.moos_vars:
self.Register(cfg.moos_vars[key])
def _on_disconnect(self):
print('CAN GUI Disconnected')
sys.exit()
def _on_mail(self):
for msg in self.FetchRecentMail():
if cfg.is_live:
val = float(msg.GetString())
else:
val = msg.GetDouble()
# figure out the key as defined in cfg.moos_vars
key = [k for k,v in cfg.moos_vars.iteritems() if v == msg.GetKey()]
if len(key):
key = key[0]
else:
continue
# print 'key: ' + key + ' val: ' + str(msg.GetDouble())
if 'wheel_rpm' in key:
speed = wheel_rpm_to_speed(val)
data_key = 'wheel_speed_'+'_'.join(key.split('_')[2:])
self.data[data_key] = speed
elif key == 'steer_position':
self.data['steer_angle'] = val
# print '\nMOOSInterface'
# pprint(self.data)
def get_latest(self):
return deepcopy(self.data)
def exit(self):
self.Close()
sys.exit()
|
UTF-8
|
Python
| false | false | 2,014 |
11,235,634,463,823 |
45e1705616dc5c85cde36a67902fa76513555f18
|
73d4e29d4f1760396b4053a5afdf3766368ab16e
|
/designate/backend/base.py
|
ca661b053d4142914ed4d2ec70fb26584d364bc1
|
[
"Apache-2.0"
] |
permissive
|
NeCTAR-RC/designate
|
https://github.com/NeCTAR-RC/designate
|
57a5f95d22edfb16cee6fb85b0e2d980bab8f377
|
331ee1958271990ae383203e7f7970f8f41ca003
|
refs/heads/master
| 2021-12-12T10:32:01.298798 | 2014-07-14T20:40:39 | 2014-07-14T20:40:39 | 16,677,944 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from designate.openstack.common import log as logging
from designate.openstack.common.gettextutils import _LW
from designate import exceptions
from designate.context import DesignateContext
from designate.plugin import DriverPlugin
LOG = logging.getLogger(__name__)
class Backend(DriverPlugin):
"""Base class for backend implementations"""
__plugin_type__ = 'backend'
__plugin_ns__ = 'designate.backend'
def __init__(self, central_service):
super(Backend, self).__init__()
self.central_service = central_service
self.admin_context = DesignateContext.get_admin_context()
self.admin_context.all_tenants = True
def start(self):
pass
def stop(self):
pass
def create_tsigkey(self, context, tsigkey):
"""Create a TSIG Key"""
raise exceptions.NotImplemented(
'TSIG is not supported by this backend')
def update_tsigkey(self, context, tsigkey):
"""Update a TSIG Key"""
raise exceptions.NotImplemented(
'TSIG is not supported by this backend')
def delete_tsigkey(self, context, tsigkey):
"""Delete a TSIG Key"""
raise exceptions.NotImplemented(
'TSIG is not supported by this backend')
@abc.abstractmethod
def create_domain(self, context, domain):
"""Create a DNS domain"""
@abc.abstractmethod
def update_domain(self, context, domain):
"""Update a DNS domain"""
@abc.abstractmethod
def delete_domain(self, context, domain):
"""Delete a DNS domain"""
def create_recordset(self, context, domain, recordset):
"""Create a DNS recordset"""
@abc.abstractmethod
def update_recordset(self, context, domain, recordset):
"""Update a DNS recordset"""
@abc.abstractmethod
def delete_recordset(self, context, domain, recordset):
"""Delete a DNS recordset"""
@abc.abstractmethod
def create_record(self, context, domain, recordset, record):
"""Create a DNS record"""
@abc.abstractmethod
def update_record(self, context, domain, recordset, record):
"""Update a DNS record"""
@abc.abstractmethod
def delete_record(self, context, domain, recordset, record):
"""Delete a DNS record"""
@abc.abstractmethod
def create_server(self, context, server):
"""Create a DNS server"""
@abc.abstractmethod
def update_server(self, context, server):
"""Update a DNS server"""
@abc.abstractmethod
def delete_server(self, context, server):
"""Delete a DNS server"""
def sync_domain(self, context, domain, rdata):
"""
Re-Sync a DNS domain
This is the default, naive, domain synchronization implementation.
"""
# First up, delete the domain from the backend.
try:
self.delete_domain(context, domain)
except exceptions.DomainNotFound as e:
# NOTE(Kiall): This means a domain was missing from the backend.
# Good thing we're doing a sync!
LOG.warn(_LW("Failed to delete domain '%(domain)s' during sync. "
"Message: %(message)s") %
{'domain': domain['id'], 'message': str(e)})
# Next, re-create the domain in the backend.
self.create_domain(context, domain)
# Finally, re-create the records for the domain.
for recordset, records in rdata:
# Re-create the record in the backend.
self.create_recordset(context, domain, recordset)
for record in records:
self.create_record(context, domain, recordset, record)
def sync_record(self, context, domain, recordset, record):
"""
Re-Sync a DNS record.
This is the default, naive, record synchronization implementation.
"""
# First up, delete the record from the backend.
try:
self.delete_record(context, domain, recordset, record)
except exceptions.RecordNotFound as e:
# NOTE(Kiall): This means a record was missing from the backend.
# Good thing we're doing a sync!
LOG.warn(_LW("Failed to delete record '%(record)s' "
"in domain '%(domain)s' during sync. "
"Message: %(message)s") %
{'record': record['id'], 'domain': domain['id'],
'message': str(e)})
# Finally, re-create the record in the backend.
self.create_record(context, domain, recordset, record)
def ping(self, context):
"""Ping the Backend service"""
return {
'status': None
}
|
UTF-8
|
Python
| false | false | 2,014 |
2,293,512,578,956 |
8ac2b279429c96429f62c007c8f2f3741026376b
|
518b9c6591a81487f73d4d936dd94fb9de5ca258
|
/Chapter7/71_rectangle.py
|
081ebe85bcdec570cbb875517f606def1bd77a34
|
[] |
no_license
|
charles-ma/Python_code
|
https://github.com/charles-ma/Python_code
|
5cac86dfc9895e6e24b399f698a09529926fff80
|
1cbdeff2564d3082accbd3808ca86b569fe68c58
|
refs/heads/master
| 2021-01-22T02:39:27.199073 | 2013-02-07T13:57:51 | 2013-02-07T13:57:51 | 6,115,962 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Rectangle(object):
'''Encapsulates some calculations about the rectangles.'''
def __init__(self, height, width):
self.height = height
self.width = width
def get_area(self):
return self.height * self.width
def get_peri(self):
return 2 * (self.height + self.width)
def is_square(self):
if self.height == self.width:
return True
return False
def get_width(self):
return self.width
def get_height(self):
return self.height
rectangle = Rectangle(3,4)
print rectangle.get_area(), rectangle.get_peri(), rectangle.is_square(), rectangle.get_width(), rectangle.get_height()
print Rectangle.get_area(rectangle)
|
UTF-8
|
Python
| false | false | 2,013 |
14,044,543,059,028 |
a2e8c04e50f0d97fe4b6ce8e01a3c200feca0852
|
b0366c55c4564a978c859f5fdd493a576158c487
|
/problem_5.py
|
5e9db7a39fd142426a1e0cf0ec43d2a4d3a007a3
|
[] |
no_license
|
ekump/projecteuler.net_python
|
https://github.com/ekump/projecteuler.net_python
|
1153fd0549694861d714dd4c002beaea960e6a1a
|
5794c9803be683f7f73bdcacc63fcf6d66fdfcb2
|
refs/heads/master
| 2016-08-07T13:24:59.662312 | 2012-10-27T04:49:41 | 2012-10-27T04:49:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
print "Calculating smallest number divisible by 1..20"
#init variables
n = 40
maxDiv = 20
divisors = range(maxDiv+1)
#removing - and 1 from list
divisors.remove(0)
divisors.remove(1)
found = False
#brute force approach
while(found == False):
for x in divisors:
if( n % x != 0):
n = n + 1
break
elif( x == maxDiv):
found = True
print "solution is", n
|
UTF-8
|
Python
| false | false | 2,012 |
10,239,202,063,259 |
951a1086c90a5009df1b4e95db12475d530ae410
|
aae5e122d689e3131325060fd5bd068a946c058a
|
/players.py
|
780cf731e4e5a297ac188f5ce187328d554d8c61
|
[] |
no_license
|
stredger/draft-tools
|
https://github.com/stredger/draft-tools
|
02807b42b463cef62f2d49491db6186a91d585c5
|
7413cfdf1fff87938ef7b5b46675066b234d7a3e
|
refs/heads/master
| 2016-09-09T20:53:15.906145 | 2013-09-26T22:52:00 | 2013-09-26T22:52:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class PlayerSchemaException(Exception): pass
STATS_TO_TRACK = { 'Player':'name',
'Age':'age',
'Tm':'team',
'Pos':'pos',
'GP':'gp',
'G':'g',
'A':'a',
'PTS':'pts',
'GC':'glcreat',
'+/-':'plsmin',
'PIM':'pim',
'EV':'evgl',
'PP':'ppgl',
'SH':'shgl',
'GW':'gwgl',
'S':'sh',
'S%':'shpercent',
'TOI':'toi',
'ATOI':'atoi'
}
def get_value(str, sp1, sp2='', default=0):
val = str.split(sp1)[-1]
if sp2: val = val.split(sp2)[0]
if val: return val
return default
def to_mins(timestr):
m, s = timestr.split(':')
return float(m) + float(s) / 60
class Skater():
def __init__(self, htmltxt, scheme):
fields = htmltxt.split('</td>')
try:
keys = scheme.keys()
# special cases
case = 'name'
self.__dict__[case] = get_value(fields[scheme[case]], 'csk="', '">')
keys.remove(case)
case = 'team'
self.__dict__[case] = get_value(fields[scheme[case]], 'html">', '</a')
keys.remove(case)
case = 'pos'
self.__dict__[case] = get_value(fields[scheme[case]], '">')
keys.remove(case)
case = 'shpercent'
self.__dict__[case] = float( get_value(fields[scheme[case]], '">') )
keys.remove(case)
case = 'atoi'
self.__dict__[case] = to_mins( get_value(fields[scheme[case]], '">') )
keys.remove(case)
# remaining cases should all be integers
for k in keys:
self.__dict__[k] = int( get_value(fields[scheme[k]], '">') )
self.ptspg = self.pts / float( self.gp )
self.valid = True
except Exception, e:
# print e
# print fields
self.valid = False
def __str__(self):
return str(self.__dict__)
def printattr(self, attr):
return '%s, %s' % (self.name, self.__dict__[attr])
class Goalie(Skater):
def __init__(self, htmltxt):
fields = htmltxt.splt('<td')
try:
self.name = get_value(fields[2], 'csk="', '">')
self.age = get_value(fields[3], '">', '</td')
self.team = get_value(fields[4], '">', '</a')
self.pos = 'G'
self.gp = int( get_value(fields[6], '">', '</td') )
self.w = int( get_value(fields[7], '">', '</td') )
self.l = int( get_value(fields[8], '">', '</td') )
self.otl = int( get_value(fields[9], '">', '</td') )
self.ga = int( get_value(fields[10], '">', '</td') )
self.sa = int( get_value(fields[11], '">', '</td') )
self.sv = int( get_value(fields[12], '">', '</td') )
self.svp = float( get_value(fields[13], '">', '</td', 0) )
self.gaa = float( get_value(fields[13], '">', '</td', 0) )
self.so = int( get_value(fields[12], '">', '</td') )
self.min = int( get_value(fields[12], '">', '</td') )
except Exception, e:
# print e
self.valid = False
def parse_stat_schema(htmllist):
startstr = '<thead>'
endstr = '</thead>'
htmllen = len(htmllist)
i = 0
schemestr = None
while i < htmllen:
if startstr in htmllist[i]:
j = i
while endstr not in htmllist[i] and i < htmllen:
i+= 1
schemestr = ''.join(htmllist[j:i])
break
i += 1
if not schemestr:
raise PlayerSchemaException('Unable to get player schema')
schemelist = [ s.split('">')[-1] for s in schemestr.split('</th>')]
scheme = {}
i = len(schemelist) - 1
# we go in reverse as the stat names are not unique, but
# the ones we want to track are at the from of the list.
# this way when we encounter a duplicate, we overwrite it
while i > 0:
val = STATS_TO_TRACK.get(schemelist[i])
if val: scheme[val] = i
i -= 1
return scheme
def create_skater_list(htmllist, scheme):
startstr = '<tr'
endstr = '</tr>'
htmllen = len(htmllist)
# players = SkaterList(year)
players = []
i = 0
while i < htmllen:
if startstr in htmllist[i]:
j = i
while endstr not in htmllist[i]: i += 1
p = Skater(''.join(htmllist[j:i]), scheme)
# if p.valid: players.add_player(p)
if p.valid: players.append(p)
i += 1
return players
if __name__ == "__main__":
text = open('alls13.txt').read().split()
s = parse_stat_schema(text)
pl = create_skater_list(text, s)
print pl[0]
|
UTF-8
|
Python
| false | false | 2,013 |
4,277,787,439,683 |
ad9712f9b20424b2c06db7d65c13629fbc66ba0a
|
79bc38bf9be87ed2eec7c764d91b61da2061b2c0
|
/gmbcg/python_version/colorMd.py
|
436415c58701601f43c3372e023f48c561d8ca5b
|
[] |
no_license
|
jgbrainstorm/pyjh
|
https://github.com/jgbrainstorm/pyjh
|
a70a8bc2acf90c5e0f3d9867f37677859c6cddad
|
7f4e840623d15df7c9ab6056451847e2a61bafbc
|
refs/heads/master
| 2016-09-05T17:44:38.198646 | 2013-01-15T18:17:31 | 2013-01-15T18:17:31 | 33,557,344 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
This script calcualte the color model based on the DES mock.
"""
import pyfits as pf
import numpy as np
import pylab as pl
import binplot as bp
data0 = pf.getdata('/home/jghao/research/data/des_mock/v3.04/truthCat/DES_Mock_v3.04_Baseline_truth_00.fit')
data1 = pf.getdata('/home/jghao/research/data/des_mock/v3.04/truthCat/DES_Mock_v3.04_Baseline_truth_01.fit')
data2 = pf.getdata('/home/jghao/research/data/des_mock/v3.04/truthCat/DES_Mock_v3.04_Baseline_truth_02.fit')
data3 = pf.getdata('/home/jghao/research/data/des_mock/v3.04/truthCat/DES_Mock_v3.04_Baseline_truth_03.fit')
bg0 = data0[data0.field('central') == 1]
bg1 = data1[data1.field('central') == 1]
bg2 = data2[data2.field('central') == 1]
bg3 = data3[data3.field('central') == 1]
gmr = np.concatenate((bg0.field('omag')[:,0]-bg0.field('omag')[:,1],bg1.field('omag')[:,0]-bg1.field('omag')[:,1],bg2.field('omag')[:,0]-bg2.field('omag')[:,1],bg3.field('omag')[:,0]-bg3.field('omag')[:,1]))
rmi = np.concatenate((bg0.field('omag')[:,1]-bg0.field('omag')[:,2],bg1.field('omag')[:,1]-bg1.field('omag')[:,2],bg2.field('omag')[:,1]-bg2.field('omag')[:,2],bg3.field('omag')[:,1]-bg3.field('omag')[:,2]))
imz = np.concatenate((bg0.field('omag')[:,2]-bg0.field('omag')[:,3],bg1.field('omag')[:,2]-bg1.field('omag')[:,3],bg2.field('omag')[:,2]-bg2.field('omag')[:,3],bg3.field('omag')[:,2]-bg3.field('omag')[:,3]))
zmy = np.concatenate((bg0.field('omag')[:,3]-bg0.field('omag')[:,4],bg1.field('omag')[:,3]-bg1.field('omag')[:,4],bg2.field('omag')[:,3]-bg2.field('omag')[:,4],bg3.field('omag')[:,3]-bg3.field('omag')[:,4]))
z = np.concatenate((bg0.field('z'), bg1.field('z'),bg2.field('z'),bg3.field('z')))
idxgr = z < 0.4
idxri = (z >=0.4) * (z <0.75)
idxiz = (z >= 0.75) * (z< 1.15)
idxzy = z >= 1.15
pl.figure(figsize=(14,10))
pl.subplot(2,2,1)
resgmr = np.polyfit(z[idxgr],gmr[idxgr],1)
pl.plot(z[idxgr],gmr[idxgr],'b.',alpha=0.5)
bp.bin_scatter(z[idxgr],gmr[idxgr],binsize = 0.05,fmt = 'ro',scatter=True)
pl.plot(z[idxgr],np.polyval(resgmr,z[idxgr]),'r,')
pl.xlabel('z')
pl.ylabel('g - r')
pl.title('slope, intercept:'+str(resgmr))
pl.subplot(2,2,2)
resrmi = np.polyfit(z[idxri],rmi[idxri],1)
pl.plot(z[idxri],rmi[idxri],'b.',alpha=0.5)
bp.bin_scatter(z[idxri],rmi[idxri],binsize = 0.05,fmt = 'ro',scatter=True)
pl.plot(z[idxri],np.polyval(resrmi,z[idxri]),'r,')
pl.xlabel('z')
pl.ylabel('r - i')
pl.title('slope, intercept:'+str(resrmi))
pl.subplot(2,2,3)
resimz = np.polyfit(z[idxiz],imz[idxiz],1)
pl.plot(z[idxiz],imz[idxiz],'b.',alpha=0.5)
bp.bin_scatter(z[idxiz],imz[idxiz],binsize = 0.05,fmt = 'ro',scatter=True)
pl.plot(z[idxiz],np.polyval(resimz,z[idxiz]),'r,')
pl.xlabel('z')
pl.ylabel('i - z')
pl.title('slope, intercept:'+str(resimz))
pl.subplot(2,2,4)
reszmy = np.polyfit(z[idxzy],zmy[idxzy],1)
pl.plot(z[idxzy],zmy[idxzy],'b.',alpha=0.5)
bp.bin_scatter(z[idxzy],zmy[idxzy],binsize = 0.05,fmt = 'ro',scatter=True)
pl.plot(z[idxzy],np.polyval(reszmy,z[idxzy]),'r,')
pl.xlabel('z')
pl.ylabel('z - y')
pl.title('slope, intercept:'+str(reszmy))
pl.savefig('/home/jghao/research/data/des_mock/v3.04/truthCat/des_mockV3.04_color.png')
#-----the inversion -------
# y = a x + b -> x = 1/a * y - b/a
def gmrz(gmr):
z = 1./2.681*gmr - 0.665/2.681
return z
def rmiz(rmi):
z = 1./2.063*rmi - (-0.266/2.063)
return z
def imzz(imz):
z = 1./1.667*imz - (-0.708/1.667)
return z
def zmyz(zmy):
z = 1./0.192 * zmy - (0.223/0.192)
return z
# verify ---
pl.plot(gmr[idxgr],z[idxgr],'b,')
pl.plot(gmr[idxgr],gmrz(gmr[idxgr]),'r,')
pl.plot(rmi[idxri],z[idxri],'b,')
pl.plot(rmi[idxri],rmiz(rmi[idxri]),'r,')
pl.plot(imz[idxiz],z[idxiz],'b,')
pl.plot(imz[idxiz],imzz(imz[idxiz]),'r,')
pl.plot(zmy[idxzy],z[idxzy],'b,')
pl.plot(zmy[idxzy],zmyz(zmy[idxzy]),'r,')
|
UTF-8
|
Python
| false | false | 2,013 |
8,847,632,654,051 |
4e9fc1a73b6bb28f899ed0bafc656a63760afb35
|
880a5e7c669cb8a9fc082644f081ba34059b7cf2
|
/saveTodb.py
|
cbd72418ed1333446f0c83b74a53263a1a69c92f
|
[] |
no_license
|
d7688326/PythonDataAnalysis
|
https://github.com/d7688326/PythonDataAnalysis
|
cd4f2c62d00ecfc97f1730bf691132441aa244e2
|
db026e952cf9d948fe61f367ca502a881b2fcc7a
|
refs/heads/master
| 2020-06-06T07:57:16.210421 | 2014-07-08T07:47:58 | 2014-07-08T07:47:58 | 21,603,640 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Script for reading downloaded website file(.txt) and store data into Mongodb
'''
import os,sys,re,string
import pymongo
# connect to mongodb
connection=pymongo.Connection('localhost',27017)
db=connection.courses
savecourse=db.sixteen_courses
nameGlobal=''
f=open('rating_pages1600.txt')
# read line by line
for line in f:
thislink=line.strip()
# regex for find the data we want from the file
regName = re.compile(r'<h3>\s*<span\s*>(.*)</span\s*>')
regCategory = re.compile(r'<a\shref="/category/\d+/(.*)\.htm"\s>')
regProvider = re.compile(r'<a\s*href="/providers/\d+/.*"\s*>(\w+)</a>')
regFree = re.compile(r'<span\s*class="freecourse"\s*>(.*?)</span>')
regVote = re.compile(r'<span\sclass="votes"\s*id=.*>(\d+)\svotes</span>')
regVValue = re.compile(r'<div\s*class="ratingstar"\s*id=.*data-rating="(.*)"\s*style=.*>')
regSimilar = re.compile(r'<div\s*class="limittitle"\s*>\s*<a.*>(.*)</a></div>')
regBook = re.compile(r'<span\s*class="title">(.*)</span\s*>')
regiTunes = re.compile(r'<h3>iTunes\s*Podcasts</h1>.*<span\s*class="title">(.*)</span>')
try:
# store name into database
nameOfCourse = re.search(regName, line)
if nameOfCourse is not None:
name = str(nameOfCourse.group(1))
name=name.decode('utf-8','ignore')
name=name.encode('utf-8')
nameToAdd={"courseName":name}
savecourse.insert(nameToAdd)
nameGlobal=name
categoryOfCourse = re.search(regCategory, line)
if categoryOfCourse is not None:
cate = str(categoryOfCourse.group(1))
cate = string.replace(cate,'+',' ')
savecourse.update(
{"courseName":nameGlobal},
{"$addToSet": { "catogary": cate } }
)
provider = re.search(regProvider, line)
if provider is not None :
prov = str(provider.group(1))
savecourse.update(
{"courseName":nameGlobal},
{"$set": { "provider": prov } }
)
free=re.search(regFree,line)
if free is not None:
fre=str(free.group(1))
savecourse.update(
{"courseName":nameGlobal},
{"$set": { "free": fre } }
)
votes = re.search(regVote,line)
if votes is not None:
vote=int(votes.group(1))
savecourse.update(
{"courseName":nameGlobal},
{"$set": { "voteCount": vote } }
)
# w.write("vote num:"+vote+"\n")
voteValue=re.search(regVValue,line)
if voteValue is not None:
value=float(voteValue.group(1))
savecourse.update(
{"courseName":nameGlobal},
{"$set": { "voteValue": value } }
)
similar=re.search(regSimilar,line)
if similar is not None:
simi=str(similar.group(1))
simi=simi.decode('utf-8','ignore')
simi=simi.encode('utf-8')
savecourse.update(
{"courseName":nameGlobal},
{"$addToSet": {"SimiCourse":simi} }
)
books=re.search(regBook,line)
if books is not None:
book=str(books.group(1))
savecourse.update(
{"courseName":nameGlobal},
{"$addToSet": {"relatedBooks":book} }
)
w.write("Related books are:"+book+"\n")
except AttributeError as e:
print sys.exc_info()
print 'All done!'
|
UTF-8
|
Python
| false | false | 2,014 |
16,862,041,645,831 |
99025b2a51b55077a1b984496b50dd5560f3b9a7
|
5e83403f4c2dc1d16ba1fea1ccfc1a32cbd42d9a
|
/isourceview.py
|
d109db1f475710df55104f666a2153968d74a316
|
[] |
no_license
|
hokaccha/i-sourceview
|
https://github.com/hokaccha/i-sourceview
|
15842707a9d3791d5b8d706acd027e584c797a3c
|
32387880a0d607ee5655265f1cb6219258ff2bd3
|
refs/heads/master
| 2016-09-08T01:36:24.885804 | 2010-09-26T12:24:57 | 2010-09-26T12:24:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import urllib2
import time
from urlparse import urlparse
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
from google.appengine.api import memcache
REQUEST_INTERVAL = 3
def request(url):
domain = urlparse(url).netloc
if not domain:
raise RequestError('"%s" is invalid URI' % url)
now = int(time.time())
cached_time = memcache.get(domain)
if cached_time and not is_dev_server():
if now - cached_time > REQUEST_INTERVAL:
memcache.set(domain, now)
else:
raise RequestError('Too many request same domain.')
else:
memcache.set(domain, now)
response = urllib2.urlopen(url)
raw_html = response.read()
return encode(raw_html)
def convert_html(html):
lexer = get_lexer_by_name('html')
format = HtmlFormatter(nowrap = True)
html = highlight(html, lexer, format)
return spanize(html)
def get_line_number(html):
lines = len(html.split('\n')) + 1
lines = '\n'.join([str(n) for n in range(1, lines)])
return spanize(lines)
def spanize(text):
res = []
for line in text.split('\n'):
res.append('<span>' + line + '</span>')
return '\n'.join(res)
def encode(data):
for charset in ['utf-8', 'shift_jis', 'euc-jp', 'iso2022-jp']:
try: return data.decode(charset)
except: pass
raise EncodeError('Encoding failed.')
def is_dev_server():
return os.environ.get('SERVER_SOFTWARE')[:11] == 'Development'
class RequestError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class EncodeError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
|
UTF-8
|
Python
| false | false | 2,010 |
9,380,208,623,541 |
c57f4fa2c3d99c8a5fc9383a7f502d67aaa869bc
|
6530e0e09af09c31cf7c1d17370a6ae8b0e57a15
|
/3-datastructures/stack.py
|
14cdc0085940415254e5482fd88fcc2d98fe109a
|
[] |
no_license
|
jenphillips/pythonds
|
https://github.com/jenphillips/pythonds
|
475b358fa92e75e425690ec5c0d175139ba2179c
|
2323985acd3f638fdf8247e5561aca1e0abcc591
|
refs/heads/master
| 2022-10-18T00:51:52.264016 | 2014-10-24T15:31:02 | 2014-10-24T15:31:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Stack():
''' Stack data structure implementation
Push, pop, peek, isEmpty, size
'''
def __init__(self):
self.items = []
def isEmpty(self):
return(len(self.items) == 0)
# return self.items == [] # His solution
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[-1]
# return self.items[len(self.items) - 1] # His solution
def size(self):
return len(self.items)
def revstring(mystr):
''' Use a stack to reverse a string. '''
s = Stack()
for c in mystr:
s.push(c)
reversed_str = ''
while s.isEmpty() is False:
reversed_str += s.pop()
return reversed_str
def parenChecker(parenStr):
''' Check whether a string of parentheses is balanced. '''
s = Stack()
for p in parenStr:
if p == '(':
# Push to stack to wait for a closing symbol
s.push(p)
elif p == ')':
# Remove most recent open paren from stack
try:
s.pop()
except:
# May throw error if more close than open parens
return 'Not balanced! Too many close parens.'
if s.isEmpty():
return 'Parentheses are balanced'
else:
return 'Not balanced! Too many open parens.'
def symbolChecker(symbolString):
s = Stack()
balanced = True
index = 0
while index < len(symbolString) and balanced:
symbol = symbolString[index]
if symbol in "([{":
s.push(symbol)
else:
if s.isEmpty():
balanced = False
else:
top = s.pop()
if not matches(top, symbol):
balanced = False
index = index + 1
if balanced and s.isEmpty():
return True
else:
return False
def matches(open, close):
print 'open: %s, close: %s' % (open, close)
openers = "([{"
closers = ")]}"
return openers.index(open) == closers.index(close)
def divideBy2(decNumber):
''' Convert a positive base 10 integer to a binary number using
divide by 2 algorithm. '''
remainderStack = Stack()
while decNumber > 0:
rem = decNumber % 2
remainderStack.push(rem)
decNumber = decNumber // 2
binaryStr = ''
while not remainderStack.isEmpty():
binaryStr += remainderStack.pop()
return binaryStr
def baseConverter(decNumber, base):
''' Convert a positive base 10 integer to a binary number using
divide by 2 algorithm. '''
digits = "0123456789ABCDEF"
remainderStack = Stack()
while decNumber > 0:
rem = decNumber % base
remainderStack.push(rem)
decNumber = decNumber // base
newStr = ''
while not remainderStack.isEmpty():
binaryStr += digits[remainderStack.pop()]
return binaryStr
def infixToPostfix(infixexpr):
prec = {}
prec["^"] = 4
prec["*"] = 3
prec["/"] = 3
prec["+"] = 2
prec["-"] = 2
prec["("] = 1
opStack = Stack()
postfixList = []
tokenList = infixexpr.split()
for token in tokenList:
if token in "ABCDEFGHIJKLMNOPQRSTUVWXYZ" or token in "0123456789":
postfixList.append(token)
elif token == '(':
opStack.push(token)
elif token == ')':
topToken = opStack.pop()
while topToken != '(':
postfixList.append(topToken)
topToken = opStack.pop()
else:
while (not opStack.isEmpty()) and \
(prec[opStack.peek()] >= prec[token]):
postfixList.append(opStack.pop())
opStack.push(token)
while not opStack.isEmpty():
postfixList.append(opStack.pop())
return " ".join(postfixList)
# print(infixToPostfix("A * B + C * D"))
# print(infixToPostfix("( A + B ) * C - ( D - E ) * ( F + G )"))
# print(infixToPostfix("5 * 3 ^ (4 - 2)"))
|
UTF-8
|
Python
| false | false | 2,014 |
18,116,172,078,208 |
af244f84cae6a1dcef68a6eb2e77a4c522108139
|
2c515c9063f42d095110f125d19d0edca5a53e1b
|
/MarleenDetector/src/marleendetector/controller.py
|
a1cd2aa5f62c5ff37612c96be0cb8ecf186186ac
|
[] |
no_license
|
engmwm/marleendetector
|
https://github.com/engmwm/marleendetector
|
5b5fcac05ff293d5422279313d17cab176eebb8a
|
44589e704fa861b9cea1a5b16e3cf890bd5d2aad
|
refs/heads/master
| 2016-08-11T07:30:00.417489 | 2009-06-30T14:34:32 | 2009-06-30T14:34:32 | 50,950,657 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Controller
# main-method:
# 1. Download a sequence of pictures from the web
# 2. Detect faces in these images
# 3. Normalize the face-images
import logging
from marleendetector.fetcher import *
from marleendetector.detectmanager import *
from marleendetector.gallerymanager import *
from marleendetector.normalizer import *
from marleendetector.faces.faceorigins import *
class Controller:
def __init__(self, downloadImages=False):
"""
Initializes the Controller. The Controller downloads images, detects faces,
saves the faces to file and normalizes the images
@param downloadImages: if downloadImages is True the images will be (re)downloaded from the web,
if False the images are presumed to be already saved on disk in the library dir
"""
self.downloadImages = downloadImages
self.gallerymanager = GalleryManager()
self.facedataDB = FaceOriginsDB()
print "Done creating"
# fetch images
def __fetchImages(self, address, prefix, start, end, fetch=True):
"""
Returns a list of image locations using a %-formatted url and range defined by start to end+1
returns [(index, org_image_name, local_image_id, local_path), ...]
@param address: address is a %-formatted string, it will be used to generate a list of URLs ranging from start to end+1
e.g.: http://myholiday.mysite.com/pictures/img_%04d.jpg
@type address: %-formatted String with only one %-item which should be a decimal
@param prefix: a prefix to distinguish between different image sources
@type prefix: string
@param start: the first number of the range
@type start: integer
@param end: the last number of the range
@type end: integer
@param fetch: if true, (re)-download all the images in the url-range
"""
#address = "http://zellamsee.boereburg.nl/ZellamSee2008stapcamerafotos/stamcamera_0001.jpg"
#address = "http://zellamsee.boereburg.nl/ZellamSee2008stapcamerafotos/stamcamera_%04d.jpg"
fetcher = ImageFetcher(address, prefix, start, end, output_dir=self.gallerymanager.getLibrary(prefix))
if fetch:
image_list = fetcher.fetchImages() # redownload the images
else:
image_list = fetcher.getOutputImages() # generate the filename list
return image_list
def __extractFaces(self, image_location, prefix, id):
"""
Detect faces in the image and save them separately
image_location is the location of the image file on disk
prefix is the name of this image collection
id is the unique name of this image
"""
try:
man = FaceDetectorManager(image_location, prefix, id)
man.startDetection()
#man.getFaces()
calc_super_face=False # return all the face boxes (faces will be detected multiple times with different classifiers)
man.getFaces(super_faces=calc_super_face)
faces_data = man.saveFacesToFile(super_faces=calc_super_face)
# faces_data = [face_data, face_data, ...]
# face_data = (org_image_id, face_image_id, face_rectangle)
#man.showResult()
return faces_data
except Exception, inst:
print type(inst) # the exception instance
print inst.args # arguments stored in .args
print inst # __str__ allows args to printed directly
print "Exception while detecting images! Skipping image..."
def main(self, fetchData):
"""
1. Download a sequence of pictures from the web and returns a list with image file_locations
2. Detect faces in these images
3. Normalize the face-images
"""
address, start, end, prefix = fetchData
print "Fetching images..."
downloadImages = self.downloadImages # only use this when the images are already downloaded
image_list = self.__fetchImages(address, prefix, start, end, fetch=downloadImages) # saves the images in GALLERY_LIBRARY
# image_list = [(image_url, filename, filepath), ... ]
print "Done fetching images..."
for index, org_image_name, local_image_id, local_path in image_list:
self.facedataDB.addImageOrigin(org_image_name, local_image_id, local_path)
all_face_data = []
print "Extracting faces..." # saves the faces in GALLERY_CROPPED
for index, image_url, filename, filepath in image_list:
id = "%04d" % (index, )
print filepath
faces_data = self.__extractFaces(filepath, prefix, prefix + "_" + id)
# faces_data = [face_data, face_data, ...]
# face_data = (org_image_id, face_image_id, face_rectangle)
if faces_data is not None:
all_face_data.extend(faces_data)
print "Done extracting faces..."
print all_face_data
for face_data in all_face_data:
(org_image_id, face_image_id, face_rectangle) = face_data
(ul, dr) = face_rectangle
self.facedataDB.addFaceData(face_data)
#return
print "Normalizing faces..." # save normalized faces in GALLERY_NORM
normalizer = FaceNormalizer(prefix)
normalizer.normalizeFaces()
print "Done normalizing faces..."
if __name__ == "__main__":
# run main program
controller = Controller()
controller.downloadImages = True # download the images
# %-formatted url, only one format variable is allowed
address = "http://www.boereburg.nl/BZBALLEREMMENLOS/bzb23012009_%03d.jpg"
#address = "http://zellamsee.boereburg.nl/ZellamSee2008stapcamerafotos/stamcamera_%04d.jpg"
start = 0 # images-url range should start with this number
end = 100 # last number of the images-url range
prefix = "BARL" # unique prefix for this photo-set
fetchData = (address, start, end, prefix)
controller.main(fetchData)
|
UTF-8
|
Python
| false | false | 2,009 |
8,813,272,914,090 |
88bc4bbf40fd7a323169fc9315d6dd00e5403765
|
3be8da1d39bef1e09e4c8e7a6b736d7fc74a3c0f
|
/webserver/opentrain/backup.py
|
796ae35ecac7864ef5da4a3a48860ef169ca1171
|
[
"BSD-3-Clause"
] |
permissive
|
amitzini/OpenTrain
|
https://github.com/amitzini/OpenTrain
|
bbe5b2fc1b1b118931f7aac94667083c1b5cf4da
|
25ff81df668a9eba1c4369f9a789e34c60b44096
|
refs/heads/master
| 2020-04-01T22:36:01.131143 | 2014-10-27T22:07:40 | 2014-10-27T22:07:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
import os
os.environ['DJANGO_SETTINGS_MODULE']='opentrain.settings'
import reports.logic
reports.logic.backup_reports('/tmp/backup.gz')
|
UTF-8
|
Python
| false | false | 2,014 |
10,557,029,617,655 |
ca41ecdba665971372568c85627ff68b7c709dfb
|
9eb82b8d4f4f74f929aa191a139215a91a4244f3
|
/cgi-bin/apertoind.py
|
f47b59a6f6b0d00c3b7c6952063b748a5a17eafc
|
[] |
no_license
|
ashmikuz/Techweb12
|
https://github.com/ashmikuz/Techweb12
|
c3cd33b9115ca9d3cb83f42f619dc0675a230607
|
1b029c041add35686568f2a9e47433e6ab62d7e9
|
refs/heads/master
| 2020-12-24T14:44:53.618365 | 2012-06-29T12:33:43 | 2012-06-29T12:33:43 | 3,639,464 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import cgi
import error
import codecs
import trasforma
import os
import urllib2
import operator
from trasforma import metadata
from aggrutils import getaggrurl
finallist=[]
loclist=[]
ops = {
"and": operator.and_,
"or": operator.or_,
"not": operator.not_
}
def matchesoperator(datelist, operatore, location):
boollist=[]
xoraux=False
if(operatore=="and" or operatore=="not"):
result=True
elif(operatore=="or" or operatore=="xor"):
result=False
else:
return 406
for date in datelist:
boollist.append(location.aperto(date))
for item in boollist:
if(operatore=="xor"):
if(item):
if(xoraux):
return False
xoraux=True
elif(operatore=="not"):
result=result and not item
else:
result=ops[operatore](result, item)
return result
def getopened(dates, operator,loclist):
datelist=dates.split("/")
tobeadded=False
for location in loclist:
if(matchesoperator(datelist, operator, location)):
finallist.append(location)
return finallist
def main():
fs = cgi.FieldStorage()
aggr=fs.getvalue("aggr")
operator=fs.getvalue("operator")
dates=fs.getvalue("dates")
if ((not aggr) or (not operator) or (not dates)):
error.errhttp("406")
return
urlaggr=getaggrurl(aggr)
if(urlaggr=="404"):
error.errhttp("404")
return
aggr=aggr.lower()
operator=operator.lower()
dates=dates.lower()
req=urllib2.Request(url=urlaggr)
req.add_header('Accept', 'application/xml, text/turtle, text/csv, application/json')
response = urllib2.urlopen(req)
restype= response.info().gettype()
resource=response.read()
response.close()
if(restype=="application/xml"):
meta=trasforma.locationfromxml(resource,loclist)
elif(restype=="text/turtle"):
meta=trasforma.locationfromturtle(resource,loclist)
elif(restype=="text/csv"):
meta=trasforma.locationfromcsv(resource,loclist)
elif(restype=="application/json"):
meta=trasforma.locationfromjson(resource,loclist)
else:
error.errhttp("406")
return
finallist=getopened(dates, operator,loclist)
if(isinstance(finallist, ( int, long ))):
error.errcode(str(finallist))
return
trasforma.formatresult(os.environ["HTTP_ACCEPT"], finallist, meta)
main()
|
UTF-8
|
Python
| false | false | 2,012 |
5,961,414,646,973 |
d72a9599eb41ebbad952946a870345b12bfa029b
|
e253986495a6e8ec9644edcee2154823df8628bb
|
/docHelper.py
|
a3c0cc7c2eff72e5ee5635f9709d9395da915ee2
|
[] |
no_license
|
aasa11/autoperftest
|
https://github.com/aasa11/autoperftest
|
8623a1bc5899a7a484008663406b6d480b9af0ef
|
03149b1fb65d241987afe41802af4af893115277
|
refs/heads/master
| 2016-09-10T18:50:50.361457 | 2013-10-17T07:21:39 | 2013-10-17T07:21:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# !/usr/bin/
# coding=gbk
'''
Created on 2013Äê10ÔÂ10ÈÕ
@summary:
@author: huxiufeng
'''
from win32com.client import Dispatch
import os
class docHelper:
'''
Some convenience methods for Excel documents accessed
through COM.
'''
def __init__(self, visible=False ):
self.wdApp = Dispatch('Word.Application')
self.wdApp.Visible = visible
def open(self, filename=None):
if filename:
self.filename=filename
if os.path.exists(self.filename):
try:
self.wdApp.Documents.Open(self.filename)
except Exception , e:
print "open err£º "
self.printerr(e)
return False
else:
self.wdApp.Documents.Add()
self.save(self.filename,True)
else:
self.wdApp.Documents.Add()
self.filename='Untitle'
return True
def visible(self, visible=True):
self.wdApp.Visible = visible
def find(self, text, MatchWildcards=False):
'''
Find the string
'''
find = self.wdApp.Selection.Find
find.ClearFormatting()
find.Execute(text, False, False, MatchWildcards, False, False, True, 0)
return self.wdApp.Selection.Text
def replaceAll(self, oldStr, newStr):
'''
Find the oldStr and replace with the newStr.
'''
find = self.wdApp.Selection.Find
find.ClearFormatting()
find.Replacement.ClearFormatting()
find.Execute(oldStr, False, False, False, False, False, True, 1, True, newStr, 2)
def updateToc(self):
for tocitem in self.wdApp.ActiveDocument.TablesOfContents:
tocitem.Update()
def save(self, filename = None, delete_existing=True):
'''
Save the active document
'''
if filename is None:
self.wdApp.ActiveDocument.Save()
else:
if delete_existing and os.path.exists(filename):
os.remove(filename)
self.wdApp.ActiveDocument.SaveAs(FileName=filename)
def printerr(self, e):
for v in e:
#print type (v)
if type(v) == type([]) or type(v) == type((1,2)):
for vv in v:
print "\t\t", vv
else:
print "\t", v
def close(self):
'''
Close the active workbook.
'''
try:
#self.wdApp.ActiveDocument.Close()
self.wdApp.Documents.Close()
except Exception, e:
print "close err£º "
self.printerr(e)
del self.wdApp
def quit(self):
'''
Quit Word
'''
return self.wdApp.Quit()
def AddTable(self, lsttable):
row = len(lsttable)
col = 0
for lst in lsttable:
if len(lst) > col:
col = len(lst)
#print row, col
tb = self.wdApp.ActiveDocument.Tables.Add(self.wdApp.Selection.Range, row, col)
tb.Rows[0].Cells[0].Range.Text = "Table0"
'''
for l in range(row-2):
self.wdApp.ActiveDocument.Tables[tnum].Rows.Add()
for l in range(col-2):
self.wdApp.ActiveDocument.Tables[tnum].Columns.Add()
'''
#self.wdApp.ActiveDocument.Tables.Add(self.wdApp.Selection.Range, row+1, col+1)
i = 0
for lst in lsttable:
j = 0
for l in lst :
tb.Rows[i].Cells[j].Range.Text = str(l)
j = j+1
i = i+1
tb.AutoFormat(ApplyBorders=True)
def goTop(self):
'''return to the head of file'''
#Selection.HomeKey unit:=wdStory
#self.wdApp.Selection.HomeKey(wdStory)
self.wdApp.ActiveDocument.GoTo(What=11)
self.wdApp.ActiveDocument.Select()
# self.wdApp.ActiveDocument.Range(
# Start = 0,
# End = 0
# ).Select()
def goEnd(self):
self.wdApp.ActiveDocument.Range(
Start = self.wdApp.ActiveDocument.Content.End-1,
End = self.wdApp.ActiveDocument.Content.End-1
).Select()
def AddPic(self,oldstr,filename, height = 150, width = 250):
self.goTop()
strs = self.find(oldstr)
if strs == oldstr and os.path.isfile(filename):
shp = self.wdApp.Selection.InlineShapes.AddPicture(filename)
shp.Height = height
shp.Width =width
def do_paste(self,tag = None):
try:
if tag is None :
self.wdApp.Selection.Paste()
else:
self.goTop()
if self.find(tag) == tag:
self.wdApp.Selection.Paste()
else:
print "do not find tag: ", tag, " in the doc"
return True
except Exception, e:
print "paste err: "
self.printerr(e)
return False
#----------------------It is a split line--------------------------------------
def main():
docpath = r'res/doc_demo.doc'
abspath = os.path.abspath(docpath)
doc = docHelper(True)
if not doc.open(abspath):
print "open ", abspath, " error"
return
datalst = [["col1", "col2", "col3"], [1,2,3], [3,4,5],[5,6,7],[9,8,7],[6,5,4]]
# doc.do_paste()
# doc.goEnd()
# doc.AddTable(datalst)
# doc.goTop()
# doc.do_paste()
# doc.goEnd()
doc.do_paste()
doc.save()
#doc.close()
#----------------------It is a split line--------------------------------------
if __name__ == "__main__":
main()
print "It's ok"
|
WINDOWS-1252
|
Python
| false | false | 2,013 |
15,393,162,813,909 |
be882e84bdd0faf03f5d6cb8cb7017a2fa4ec6f0
|
93c08c736e3352295270aae1962149f4c32c9c40
|
/bin/health_check.py
|
3db2018cd887b05e285ea1e05f63c5b02c36efa1
|
[] |
no_license
|
KarthikMasi/pyxnat_notes
|
https://github.com/KarthikMasi/pyxnat_notes
|
6cc7a55fcc2900db742409ebc40281a5128ee7a5
|
a6499ea94a5a898601a2fef801b4cc2ddf56ed9d
|
refs/heads/master
| 2021-05-11T08:27:33.507002 | 2012-06-22T16:02:25 | 2012-06-22T16:02:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__license__ = 'BSD 3-Clause'
import os
import subprocess as sp
import time
from glob import glob
from xnat.mail import mail
# Simple way to connect to XNAT
from xnat.util import xnat
def dump():
""" Dump database to rolling weekly directory
Dumps can be found in /data/dumps/
"""
dump_file = os.path.join('/', 'data', 'dumps', '%s.pg_dump' % time.strftime('%A').lower())
cmd = 'pg_dump -f %s' % dump_file
ec = sp.Popen(cmd.split()).wait()
return dump_file, ec
def dh():
out, ec = run_cmd('df -h')
return out, ec
def run_cmd(cmd):
""" Run shell command
out: stdout/stderr (combined)
ec: exit code
"""
try:
out = sp.check_output(cmd.split(), stderr=sp.STDOUT)
ec = 0
except sp.CalledProcessError as e:
out = e.output
ec = 1
return out, ec
def tail(fp, n=10):
out, ec = run_cmd('tail -n %d %s' % (n, fp))
return out, ec
if __name__ == '__main__':
""" Check health of XNAT system """
body = ''
body += "Running XNAT health checks...\n"
body += "Began at %s\n\n" % time.strftime('%H:%M:%S')
""" DB Dump """
body += "Dumping database....\n"
df, error = dump()
if not error:
body += "Successful database dump to %s\n\n" % df
else:
body += "Error dumping to %s\n\n" % df
""" End DB dump """
""" Disk usage """
body += "Disk usage...\n"
out, error = dh()
body += out
body += "End disk usage \n\n"
""" DCM Relay Error files """
body += "Begin Error files...\n"
for err in glob(os.path.expanduser('~/dcmrelay/*.err')):
text_lines = []
text_lines.append("Begin %s..." % err)
out, _ = tail(err, n=20)
text_lines.append(out)
text_lines.append("End %s.\n\n" % err)
body += '\n'.join(text_lines)
body += "End Error files.\n"
""" Xnat statistics """
body += "XNAT system statistics...\n"
x = xnat()
body += "# users: %d\n" % len(x.manage.users())
all_projs = x.select.projects().get()
body += "# projects: %d\n" % len(all_projs)
# There's probably a better way to do this, but for now...
all_sessions = []
for p in all_projs:
proj = x.select.project(p)
all_sessions.extend(proj.experiments().get())
body += "# sessions: %d\n" % len(all_sessions)
# We could go deeper into database if we wanted...
body += "Finish at %s" % time.strftime('%H:%M:%S')
to = ['[email protected]', '[email protected]']
sub = "XNAT Health Check %s" % time.strftime('%a %d %b %Y %H:%M')
mail(to=to, subject=sub, body=body)
|
UTF-8
|
Python
| false | false | 2,012 |
4,526,895,535,605 |
5c27284ee2fed595b5d3b73bff00a1c4fbc89ee6
|
fa049ac1a9ef9cfe746a7e75127bf3e918833c8f
|
/volta/channels/views.py
|
9bb2a8af76d08a9efc923fb40c94a3589da6d53b
|
[
"GPL-3.0-only"
] |
non_permissive
|
bodacea/crisisleadtools
|
https://github.com/bodacea/crisisleadtools
|
6c7a307d986a2f41908fa33954af9b094b3e4887
|
ee48be1783a797d087232e11fca563c4926cd6c0
|
refs/heads/master
| 2021-01-24T17:48:54.071525 | 2014-08-15T21:14:09 | 2014-08-15T21:14:09 | 23,002,714 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#FIXIT: To build:
#Upload Skypegroup user list
#Upload Ning profile list
#Upload Googlegroup profile list
#Upload Skypegroup chat: cut and paste
#Upload Skypegroup chat: downloaded from Skype
#Download Skypegroup chat from Skype
#Send Skype contact request to everyone on a list
#Create Skypechat
#Invite everyone on a list into a Skypechat
from django import forms
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.utils.encoding import smart_unicode
from django.contrib.auth.decorators import login_required
import csv
from skypechatmanager import *
from skypechatconnector import *
from models import Channel, Googlemember, Ningmember, Skypemember
from groups.models import Group
class SkypechatCreateForm(forms.Form):
chatname = forms.CharField(max_length=40)
skypeidlist = forms.CharField()
skypeidsfile = forms.FileField(required=False)
invitemessage = forms.CharField()
class SkypechatFileForm(forms.Form):
skypechatfile = forms.FileField(required=False) #OR
skypechannel = forms.CharField(required=False)
class NewChannelForm(forms.Form):
channelname = forms.CharField()
channeltype = forms.ChoiceField(choices=Channel.CHANNEL_TYPES, initial='S')
membersfile = forms.FileField(required=False) #
remoteexists = forms.BooleanField(required=False) #OR
createremote = forms.BooleanField(required=False) #Skype only for now
remotename = forms.CharField(required=False) #Name in remote channel
class FriendForm(forms.Form):
idlist = forms.CharField()
message = forms.CharField()
idsfile = forms.FileField(required=False) #
def write_tmp_file(uploadedfile):
# open a new file to write the contents into
new_file_name = 'media/' + uploadedfile.name
destination = open(new_file_name, 'wb+')
destination.write(uploadedfile.file.read())
destination.close()
return str(new_file_name)
def get_list_from_uploaded_file(uploadedfile, column="0", header=False,
lowercase=False):
#Read list in from file
#Column numbering starts at 0, not 1
csvin = csv.reader(uploadedfile.file)
#Ignore header, if used
if header == True:
headers = csvin.next()
#Get list from rest of file
outlist = []
for row in csvin:
if len(row) <= column:
continue
item = row[column]
outlist += [item]
#FIXIT: yes, I know there's a pythonic way to do this, but I'm on a plane
#and can't remember it at the moment
if lowercase == True:
templist = outlist
outlist = []
for item in templist:
if type(item) == str or type(item) == unicode:
item = item.lower()
outlist += [item]
return(outlist)
#Add members to a channel
def add_members_to_channel(memberslist, channel):
if channel.network == "G":
for memberid in memberslist:
dbaseid = Googlemember.objects.create(
channel = channel,
userid_in_channel = memberid,
stated_fullname = "unknown"
)
channel_members = Googlemember.objects.filter(channel = channel)
elif channel.network == "N":
for memberid in memberslist:
dbaseid = Ningmember.objects.create(
channel = channel,
userid_in_channel = memberid,
stated_fullname = "unknown"
)
channel_members = Ningmember.objects.filter(channel = channel)
elif channel.network == "S":
for memberid in memberslist:
dbaseid = Skypemember.objects.create(
channel = channel,
userid_in_channel = memberid,
stated_fullname = "unknown"
)
channel_members = Skypemember.objects.filter(channel = channel)
return channel_members
@login_required
def index(request):
return render_to_response('channels/index.html',
{},
context_instance=RequestContext(request))
#Doesn't need login
def sandbox(request):
return render_to_response('channels/sandbox.html',
{},
context_instance=RequestContext(request))
@login_required
def viewchannel(request):
return render_to_response('channels/viewchannel.html',
{},
context_instance=RequestContext(request))
@login_required
def addchannel(request):
if request.method == 'POST':
form = NewChannelForm(request.POST, request.FILES)
if form.is_valid():
channelname = form.cleaned_data['channelname']
channeltype = form.cleaned_data['channeltype']
remoteexists = form.cleaned_data['remoteexists']
remotename = form.cleaned_data['remotename']
createremote = form.cleaned_data['createremote']
if request.FILES.has_key('membersfile'):
membersfile = request.FILES['membersfile']
memberslist = get_list_from_uploaded_file(
membersfile, 0, True, False)
else:
memberslist = []
#Add channel to the database, in group named "None".
try:
none_group = Group.objects.get(name="None")
except Group.DoesNotExist:
none_group = Group.objects.create(name="None", parent=None,
owner=request.user)
channel = Channel.objects.create(
owner = request.user,
group = none_group,
name = channelname,
network = channeltype,
remote_name = remotename)
#Add members to the channel
channel_members = add_members_to_channel(memberslist, channel)
#Create remote channel if possible and requested
#Fixit: add code for googlegroup and ning groups
if createremote == True:
print("creating remote channel")
if channeltype == "S":
s = SkypechatConnector()
groupid, newbuddies = s.create_chatroom(remotename, memberslist,
"Welcome to "+remotename)
remoteexists = True
data = {}
data['channel'] = channel
data['members'] = channel_members
return render_to_response('channels/viewchannel.html',
{'data': data},
context_instance=RequestContext(request))
else:
form = NewChannelForm()
return render_to_response('channels/addchannel.html', {'form': form},
context_instance=RequestContext(request))
@login_required
def action(request):
return render_to_response('channels/nothereyet.html',
{},
context_instance=RequestContext(request))
def nothereyet(request):
return HttpResponse("This function hasn't been included yet. Please check back later!")
def friend_all(request):
if request.method == 'POST':
form = FriendForm(request.POST, request.FILES)
if form.is_valid():
formidlist = form.cleaned_data['idlist']
idlist = formidlist.split(",")
message = form.cleaned_data['message']
if request.FILES.has_key('idsfile'):
idsfile = request.FILES['idsfile']
print("Not using uploaded file " + idsfile.name + "at the moment")
s = SkypechatConnector()
newbuddies, oldbuddies = s.make_buddies(idlist, message)
results = {}
results['message'] = message
results['oldbuddies'] = oldbuddies
results['newbuddies'] = newbuddies
return render_to_response('channels/friendsummary.html',
{'results': results},
context_instance=RequestContext(request))
else:
form = FriendForm()
return render_to_response('channels/friendrequests.html', {'form': form},
context_instance=RequestContext(request))
def create_skypechat(request):
if request.method == 'POST':
form = SkypechatCreateForm(request.POST, request.FILES)
if form.is_valid():
chatname = form.cleaned_data['chatname']
formskypeidlist = form.cleaned_data['skypeidlist']
skypeidlist = formskypeidlist.split(",")
invitemessage = form.cleaned_data['invitemessage']
if request.FILES.has_key('skypeidsfile'):
skypeidsfile = request.FILES['skypeidsfile']
print("Not using uploaded file " + skypeidsfile.name + "at the moment")
s = SkypechatConnector()
groupid, newbuddies = s.create_chatroom(chatname, skypeidlist, invitemessage)
results = {}
results['skypechatname'] = chatname
results['members'] = skypeidlist
results['newbuddies'] = newbuddies
return render_to_response('channels/skyperoomsummary.html',
{'results': results},
context_instance=RequestContext(request))
else:
form = SkypechatCreateForm()
return render_to_response('channels/skypechatcreate.html', {'form': form},
context_instance=RequestContext(request))
#Summarise a cut-and-pasted set of messages from a Skypechat
def summarise_skypechat(uploadedfile="", channelid=""):
if channelid == "":
skypemessages = read_skypemessages_cutpaste_stream(uploadedfile)
skypeusers = {}
## userstats = count_countributions(skypemessages)
userstats, addgraph, texthist = analyse_skypechat(skypemessages, skypeusers)
#Sort array by contributions, with largest numbers first
revcnt = sorted(userstats.iteritems(), reverse=True, key=lambda x:x[1]['num_messages'])
print(texthist)
else:
s = SkypechatConnector()
print("Connecting to "+channelid)
i = s.get_chatroom(channelid)
userstats = s.get_messages(i)
addgraph = {} #FIXIT: get these from the chatroom data
texthist = {} #FIXIT: get these from the chatroom data
revcnt = sorted(userstats.iteritems(), reverse=True, key=lambda x:x[1]['num_messages'])
return revcnt, addgraph, texthist
def upload_skypechat(request):
if request.method == 'POST':
form = SkypechatFileForm(request.POST, request.FILES)
if form.is_valid():
results = {}
if request.FILES == {}:
#No file uploaded - assume have been given a skypechat name
channel = form.cleaned_data['skypechannel']
contribs, addgraph, texthist = summarise_skypechat("", channel)
print(channel)
else:
uploadedfile = request.FILES['skypechatfile']
##print("Uploaded file " + uploadedfile.name)
contribs, addgraph, texthist = summarise_skypechat(uploadedfile)
results['filename'] = uploadedfile.name
results['contribs'] = contribs
results['addgraph'] = addgraph
results['texthist'] = texthist
return render_to_response('channels/skypechatsummary.html',
{'results': results},
context_instance=RequestContext(request))
else:
form = SkypechatFileForm()
return render_to_response('channels/skypechatupload.html', {'form': form},
context_instance=RequestContext(request))
def summarise_skypeninggoogle(request):
temp_filename = write_tmp_file(request.Files['skypeuserfile'],
request.Files['ningexcelfile'],
request.Files['googleuserfile'])
members = compare_skypeninggoogle(temp_filename)
return members
def upload_skypeninggoogle(request):
if request.method == 'POST':
form = SkypeNingGoogleFileForm(request.POST, request.FILES)
if form.is_valid():
print("Uploaded skype file " + uploadedskype.name)
members = summarise_skypeninggoogle(request)
results = {}
results['request'] = request
results['members'] = members
return render_to_response('channels/skypeninggooglesummary.html',
{'results': results},
context_instance=RequestContext(request))
else:
form = SkypechatFileForm()
return render_to_response('channels/skypeninggoogleup.html', {'form': form},
context_instance=RequestContext(request))
|
UTF-8
|
Python
| false | false | 2,014 |
11,699,490,951,717 |
913e86885b7cd2f2ecad40c24eb8edaaa24ef16b
|
39244e4bbf001718f4e103218b5f1f2323c24de3
|
/fixtemplate.py
|
8385bbd6ee936666e798803da6e4293d7a8445e8
|
[
"GPL-2.0-only"
] |
non_permissive
|
traverseda/pylize-traverseda
|
https://github.com/traverseda/pylize-traverseda
|
e407cacdcfcdbf007d2683c7e8cf86dac56ffb49
|
bb813effe75dedc2f06140c2ed4469f95448d44b
|
refs/heads/master
| 2016-08-04T19:50:28.455335 | 2014-01-08T01:09:46 | 2014-01-08T01:09:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys, re
def main(args):
try:
file = args.pop(0)
except IndexError:
print "Usage: fixtemplate.py FILE"
sys.exit(0)
fp = open(file)
text = fp.read()
fp.close()
rx = re.compile('{(.*?)}', re.DOTALL)
text = rx.sub(lambda x: "@(%s)" % x.group(1), text)
fp = open(file, 'w')
fp.write(text)
fp.close()
if __name__ == '__main__':
main(sys.argv[1:])
|
UTF-8
|
Python
| false | false | 2,014 |
1,589,137,915,430 |
a89a20ea43be67956ffe85f5d7f5a800b4138340
|
a967f6ed3bd23097d1968e2d3e1ef254df1f64ec
|
/src/gui/adddialog.py
|
64d4319d6d1c5cf8006908fdada3d6a7713bb3a1
|
[
"GPL-3.0-only"
] |
non_permissive
|
omaciel/billreminder
|
https://github.com/omaciel/billreminder
|
81a99eda4333ffb8c784bcb0d776c4e63273d636
|
891d1a10af0e66d09750c3fe6b2c4777b9dc6644
|
refs/heads/master
| 2020-05-18T07:01:08.483384 | 2011-06-17T04:07:38 | 2011-06-17T04:07:38 | 1,178,441 | 4 | 5 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
__all__ = ['AddDialog']
import os
import pygtk
pygtk.require('2.0')
import gtk
import datetime
import locale
import gobject
from lib import utils
from lib import common
from lib import scheduler
from db.entities import Bill, Category
from lib.actions import Actions
from lib.utils import create_pixbuf
from lib import i18n
from gui.widgets.datebutton import DateButton
from gui.widgets.datepicker import DatePicker
from gui.categoriesdialog import CategoriesDialog
#from lib.config import Configuration
from lib.Settings import Settings as Configuration
class AddDialog(object):
"""
Class used to generate dialog to allow user to enter/edit records.
"""
def __init__(self, title=None, parent=None, record=None, selectedDate=None):
self.ui = gtk.Builder()
self.ui.add_from_file(os.path.join(common.DEFAULT_CFG_PATH, "add_bill.ui"))
self.window = self.ui.get_object("add_bill_dialog")
self.window.set_icon_from_file(common.APP_ICON)
if parent:
self.window.set_transient_for(parent)
# If we have a selected date, then set calendar to use it
if not selectedDate:
selectedDate = datetime.date.today()
self.selectedDate = selectedDate
self.gconf_client = Configuration()
# Private copy of any record passed
self.currentrecord = record
self.alarm = [None, None]
# TODO: This needs to be run BEFORE connecting the widgets
self._set_currency()
# Set up the UI
self._initialize_dialog_widgets()
self._populate_widgets()
self.category_index_before = 0
self.ui.connect_signals(self)
def _set_currency(self):
self.decimal_sep = locale.localeconv()['mon_decimal_point']
self.thousands_sep = locale.localeconv()['mon_thousands_sep']
self.allowed_digts = [self.decimal_sep , self.thousands_sep]
self.allowed_digts += [str(i) for i in range(10)]
def _initialize_dialog_widgets(self):
self.frequency = self.ui.get_object("frequency")
self.dueDate = DatePicker()
self.ui.get_object("due_date_box").add(self.dueDate)
self.dueDate.connect('date_changed', self._on_datepicker_date_changed)
self.endDate = DatePicker()
self.ui.get_object("end_date_box").add(self.endDate)
self.endDate.connect('date_changed', self._on_datepicker_date_changed)
self.payee = self.ui.get_object("payee")
self.payeecompletion = gtk.EntryCompletion()
self.payee.child.set_completion(self.payeecompletion)
self.amount = self.ui.get_object("amount")
self.category = self.ui.get_object("category")
px = gtk.CellRendererPixbuf()
txt = gtk.CellRendererText()
self.category.pack_start(px, False)
self.category.pack_start(txt, False)
self.category.add_attribute(px, "pixbuf", 0)
self.category.add_attribute(txt, "text", 1)
self.category.set_row_separator_func(self._determine_separator)
self.categorybutton = self.ui.get_object("edit_categories")
self.notes = self.ui.get_object("notes")
self.txtbuffer = self.notes.get_buffer()
self.alarmbutton = DateButton(self.window)
self.alarmbutton.set_tooltip_text(_("Select Date and Time"))
self.ui.get_object("alarm_button_box").add(self.alarmbutton)
self.ui.get_object("alarm_label").set_mnemonic_widget(self.alarmbutton)
self.window.show_all()
def _populate_widgets(self):
""" Populate dialog widgets so they can be used. """
self._populate_frequency()
self._populate_payee() # Populate combobox with payee from db
self._populate_category() # Populate combobox with category from db
# If a record was passed, we're in edit mode
if self.currentrecord:
self._populate_widgets_with_record()
#in edit mode we must disable repetition
self.frequency.set_sensitive(False)
self.endDate.set_sensitive(False)
else:
self.dueDate.set_date(self.selectedDate)
self.endDate.set_date(self.selectedDate)
# Use alarm values from preferences
showalarm = self.gconf_client.get('show_alarm')
atime = self.gconf_client.get('show_alarm_at_time')
adays = self.gconf_client.get('show_alarm_before_days')
if showalarm:
alarmDate = scheduler.get_alarm_timestamp(adays, atime, self.selectedDate)
self.alarmbutton.set_date(alarmDate)
def _determine_separator(self, model, iter, data=None):
return model.get_value(iter, 1) == "---"
def _populate_widgets_with_record(self):
# Format the amount field
if self.currentrecord.amount:
self.amount.set_text(utils.float_to_currency(self.currentrecord.amount))
else:
self.amount.set_text("")
# Format the dueDate field
dt = self.currentrecord.dueDate
self.dueDate.set_date(dt)
utils.select_combo_text(self.payee, self.currentrecord.payee)
if self.currentrecord.category:
actions = Actions()
cat_name = self.currentrecord.category.name
records = actions.get_categories(name=cat_name)
if records:
categoryname = records[0].name
utils.select_combo_text(self.category, categoryname, 1)
else:
self.category.set_active(0)
if self.currentrecord.notes:
self.txtbuffer.set_text(self.currentrecord.notes)
#self.chkPaid.set_active(self.currentrecord.Paid)
if self.currentrecord.alarmDate:
self.alarmbutton.set_date(self.currentrecord.alarmDate)
def _populate_payee(self):
""" Populates combobox with existing payees """
# Connects to the database
actions = Actions()
# List of payees from database
payees = []
records = actions.get_bills()
for rec in records:
if rec.payee not in payees:
payees.append(rec.payee)
store = gtk.ListStore(gobject.TYPE_STRING)
for payee in payees:
store.append([payee])
self.payee.set_model(store)
self.payeecompletion.set_model(store)
self.payee.set_text_column(0)
self.payeecompletion.set_text_column(0)
self.payeeEntry = self.payee.child
self.selectedText = ''
def _get_payee(self):
""" Extracts information typed into comboboxentry """
if self.payee.get_active_iter() is not None:
model = self.payee.get_model()
iteration = self.payee.get_active_iter()
if iteration:
return model.get_value(iteration, 0)
else:
return self.payeeEntry.get_text()
def _populate_frequency(self):
""" Populates combobox with allowable frequency. """
store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_INT)
self.frequency.set_model(store)
cell = gtk.CellRendererText()
self.frequency.pack_start(cell, True)
self.frequency.add_attribute(cell, 'text', 0)
for i, frequency in enumerate([scheduler.SC_ONCE,
scheduler.SC_MONTHLY,
scheduler.SC_WEEKLY]):
store.append([frequency, i])
# Set SC_ONCE as default
self.frequency.set_active(0)
self.on_frequency_changed(self.frequency)
def _populate_category(self, categoryname=None):
""" Populates combobox with existing categories """
# Connects to the database
actions = Actions()
# List of categories from database
categories = []
records = actions.get_categories()
ret = 0
empty_color = create_pixbuf()
for rec in records:
#if [rec['categoryname'], rec['id']] not in categories:
#TODO: Better put color creation in a function
color = rec.color and rec.color or '#000'
categories.append([create_pixbuf(color=color), rec.name, int(rec.id)])
if categoryname and categoryname == rec.name:
ret = len(categories) + 1
store = gtk.ListStore(gtk.gdk.Pixbuf, str, int)
self.category.set_model(store)
store.append([empty_color, _("None"), 0])
store.append([None, "---", -1])
for category in categories:
store.append(category)
store.append([None, "---", -1])
store.append([empty_color, _("New Category"), -2])
self.category.set_active(ret)
return ret
def _get_category(self):
""" Extracts information typed into comboboxentry """
actions = Actions()
if self.category.get_active_iter() is not None:
model = self.category.get_model()
iteration = self.category.get_active_iter()
if iteration:
name = model.get_value(iteration, 1)
else:
name = None
if not name or name == _("None"):
return None
records = actions.get_categories(name=name)
if records:
return records[0]
else:
return None
def get_record(self):
frequency = self.frequency.get_active_text()
# Extracts the date off the calendar widget
# Create datetime object
selectedDate = self.dueDate.get_date()
# End date
if frequency != scheduler.SC_ONCE:
endDate = self.endDate.get_date()
# Notify user that the endDate is set in the past
if endDate < selectedDate:
endDate = selectedDate
message = utils.Message()
text = _("The end date is set to a date prior to the start date. Setting it to match the start date.")
title = _("Date set in the past")
message.ShowInfo(text=text, parentWindow=self.window, title=title)
else:
endDate = None
#buffer = self.txtNotes.get_buffer()
startiter, enditer = self.txtbuffer.get_bounds()
sbuffer = self.txtbuffer.get_text(startiter, enditer)
# Gets the payee
payee = self._get_payee()
# Gets the category
category = self._get_category()
# Gets the alarm date
alarm = self.alarmbutton.get_date() or None
# Validate form
if not payee.strip():
return None
if self.amount.get_text().strip():
amount = utils.currency_to_float(self.amount.get_text())
else:
amount = None
if self.currentrecord is None:
# Verify how many bills will be inserted
# this will only work for new bills
records = []
# Figures out how many times we're repeating this bill
days = scheduler.get_schedule_date(
frequency, selectedDate, endDate)
for day in days:
if alarm:
alarm = self.__get_alarm_date(day)
rec = Bill(payee=payee, amount=amount, dueDate=day, alarmDate=alarm, notes=sbuffer, repeats=False)
# Bill repeats...
if len(days) > 1:
rec.repeats = True
# ... and has a category.
if category:
rec.category = category
records.append(rec)
return records
else:
# Edit existing bill
self.currentrecord.payee = payee
self.currentrecord.dueDate = selectedDate
self.currentrecord.amount = amount
self.currentrecord.notes = sbuffer
self.currentrecord.alarmDate = alarm
if category:
self.currentrecord.category = category
#return the bill
return [self.currentrecord]
def on_frequency_changed(self, widget):
startDate = self.dueDate.get_date()
endDate = self.endDate.get_date()
frequency = widget.get_active_text()
if frequency == scheduler.SC_ONCE:
self.endDate.set_sensitive(False)
else:
self.endDate.set_sensitive(True)
if startDate > endDate:
self.endDate.set_date(self.dueDate.get_date())
def on_edit_categories_clicked(self, button, new = False):
category = None
# if new == True, a simpler categories dialog pops up
self.window.category = self.category
categories = CategoriesDialog(parent = self.window, new = new)
ret = categories.run()
if ret == gtk.RESPONSE_OK:
#TODO: We should handle the saving in the dialog itself.
# the category hasn't been saved yet... so save it.
if new:
categories._on_savebutton_clicked(None)
category = categories.currentrecord
categories.destroy()
# Always re-populate the categories dropdown widget, regardless if
# newer category was added. If something was returned, select it.
if category:
self._populate_category(category.name)
else:
self._populate_category()
return ret
def on_category_changed(self, combobox):
index = self.category.get_active()
model = self.category.get_model()
if index == len(model) - 1:
self.category.set_active(self.category_index_before)
self.on_edit_categories_clicked(combobox, True)
self.category_index_before = index
def on_amount_insert_text(self, entry, string, len, position):
for char in string:
if char not in self.allowed_digts:
print "Invalid Character: %s" % char
entry.emit_stop_by_name("insert-text")
gtk.gdk.beep()
return
def on_save_clicked(self, widget):
message = utils.Message()
startDate = self.dueDate.get_date()
endDate = self.endDate.get_date()
if not self._get_payee().strip() and \
not self.amount.get_text().strip():
message.ShowError(_("\"%s\" and \"%s\" are required fields.") \
% (_("Payee"), _("Amount")), self.window)
self.payee.grab_focus()
elif not self._get_payee().strip():
message.ShowError(_("\"%s\" is required field.") % _("Payee"), self.window)
self.payee.grab_focus()
elif not self.amount.get_text().strip():
message.ShowError(_("\"%s\" is required field.") % _("Amount"), self.window)
self.amount.grab_focus()
elif self.endDate.get_sensitive() and startDate > endDate:
message.ShowError(_("The end date is set to a date prior to the start date."), self.window)
else:
self.window.response(gtk.RESPONSE_ACCEPT)
def _on_datepicker_date_changed(self, widget, args):
startDate = self.dueDate.get_date()
endDate = self.endDate.get_date()
if widget == self.dueDate:
if self.endDate.get_sensitive() and startDate > endDate:
# Update endDate to be equal to dueDate
self.endDate.set_date(self.dueDate.get_date())
else:
if self.endDate.get_sensitive():
if startDate > endDate:
# Update endDate to be equal to dueDate
self.endDate.set_date(self.dueDate.get_date())
if self.alarmbutton.get_date():
# Extracts the date off the datepicker widget
alarmDate = self.__get_alarm_date(self.dueDate.get_date())
self.alarmbutton.set_date(alarmDate)
def __get_alarm_date(self, date):
# Use alarm values from preferences
atime = self.gconf_client.get('show_alarm_at_time')
adays = self.gconf_client.get('show_alarm_before_days')
return scheduler.get_alarm_timestamp(adays, atime, date)
|
UTF-8
|
Python
| false | false | 2,011 |
6,828,998,050,429 |
3b8a0777d6f4bb3a954a728ba11525eccb79e425
|
473adc2e6b3f5dc20745aa17c2af35c351c80900
|
/hddosya/models.py
|
9b3e400c07f6445573e7f72e395c91c58e2323a3
|
[] |
no_license
|
halitalptekin/HDBlog
|
https://github.com/halitalptekin/HDBlog
|
29ffd2fb0375707a180a2c4948d63b59ba074e7b
|
24b997d095a6abcdcdf303e58d2b705e946e1d80
|
refs/heads/master
| 2016-09-06T12:32:35.525715 | 2014-04-29T13:53:31 | 2014-04-29T13:53:31 | 4,286,647 | 6 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from unidecode import unidecode
from django.contrib.sitemaps import ping_google
class Dosyalar(models.Model):
DOSYA_RESIMLERI = (
('c','C'),
('cplus','C++'),
('eklenti','Eklenti'),
('exe','Exe'),
('gif','Gif'),
('header','Header'),
('html','Html'),
('java','Java'),
('jpeg','Jpeg'),
('js','Js'),
('mp3','Mp3'),
('pdf','Pdf'),
('php','Php'),
('plain','Plain'),
('png','Png'),
('python','Python'),
('script','Bash Script'),
('sifreli','Sifreli'),
('text','Text'),
('widget','Widget'),
('xml','Xml'),
('zip','Zip/Rar'),
)
baslik = models.CharField(max_length=255, verbose_name="Baslik", help_text = "Dosya Basligi")
sef_baslik = models.CharField(max_length=255, verbose_name="Baslik Sef", help_text = "Baslik Sef", blank=True)
slug = models.SlugField(max_length=255, verbose_name="Slug")
aciklama = models.TextField(max_length=500, verbose_name="Aciklama", help_text = "Dosya Aciklamasi")
aciklama_sef = models.TextField(max_length=500, verbose_name="Aciklama Sef", help_text = "Aciklama Sef", blank=True)
dosya = models.FileField(upload_to="upload/", blank=True)
dosya_turu = models.CharField(max_length=10, verbose_name="Dosya Turu", help_text = "Dosya Turu", blank=True)
dosya_resim = models.CharField(max_length=255, verbose_name="Dosya Resmi", help_text = "Dosya Resmi", choices=DOSYA_RESIMLERI)
dosya_kaynak = models.CharField(max_length=255, verbose_name="Dosya Kaynagi", help_text = "Dosya Kaynagi", blank=True)
gizli = models.BooleanField(verbose_name="Dosya Gizli Mi?", default=False, help_text = "Dosya Gizli Mi?")
olusturulma = models.DateTimeField(auto_now_add=True, verbose_name="Olusturulma Tarihi")
degistirilme = models.DateTimeField(auto_now=True, verbose_name="Degistirilme Tarihi")
def __unicode__(self):
return self.baslik
def save(self, *args, **kwargs):
self.sef_baslik = unidecode(self.baslik)
super(Dosyalar, self).save(*args, **kwargs)
self.aciklama_sef = unidecode(self.aciklama)
super(Dosyalar, self).save(*args, **kwargs)
try:
ping_google()
except Exception:
pass
class Meta:
verbose_name_plural = "Dosyalar"
def get_absolute_url(self):
return "/dosya/%s/" %self.slug
|
UTF-8
|
Python
| false | false | 2,014 |
14,534,169,372,651 |
3f632720a2b8c9167a3755b99aed306ce8e6f51c
|
d0e93ed3c691a686d70badf7fb03a6f6ffa32973
|
/pws/rr/arch/god/MyUtil/__init__.py
|
098a75c44916696547bd4dbbafc5e14471edbcf8
|
[] |
no_license
|
pedia/stuff
|
https://github.com/pedia/stuff
|
fbac9011518ca1608d72ddec1a0efd4d9514a338
|
c699b45214294db4332c536dc89f55a0b6e1c31f
|
refs/heads/master
| 2016-08-07T01:04:48.973983 | 2012-06-26T07:59:41 | 2012-06-26T07:59:41 | 3,031,329 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Generated by slice2py - DO NOT EDIT!
#
# Modules:
import Util_ice
import RFeed_ice
import FeedView_ice
import Passport_ice
# Submodules:
|
UTF-8
|
Python
| false | false | 2,012 |
5,153,960,758,788 |
eaa22440967b13232483044cab3227cf2da1e987
|
4dde24cb43576e2e124fa9e94270fa00ca04650c
|
/pysrc/get_stk_curl.py
|
1b41cd3c395eda9cb090edad1673a4d1a5e13d2b
|
[] |
no_license
|
gdmmiru/fin_utils
|
https://github.com/gdmmiru/fin_utils
|
ecef351a75cbf29e027b80ebf3732cc125a78246
|
9e975223c43b0a8b6b9579e64faf6108840eef94
|
refs/heads/master
| 2020-05-17T23:47:17.502264 | 2013-04-21T23:11:47 | 2013-04-21T23:11:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# filename: get_stk_curl.py
# purpose : retrieves stock data from finance.yahoo using curl, dumps them to a
# csv file in the current dir
# notes : requires curl installed
# changes : 01.03.2012 - initial commit
# define imports
import os
import getopt
import sys
from datetime import datetime
from subprocess import Popen, PIPE
import ftutil.MongoDBManager as MongoDBManager
import ftutil.FTLogger as FTLogger
#### define methods
def usage():
print ""
print "get_stk_curl.py "
print "-i, --ip ip of target mongodb"
print "-s, --stocks comma separated list for all interested stocks"
print "-f, --file file listing stocks to retrieve"
print "-d, --dump dump to csv file"
print "-h, --help displays this help menu"
print ""
def get_symbol_info_to_csv(symbol):
g_logger.Info('called get_symbol_info ' + symbol)
(byear, bmonth, bdate) = (2012, 0, 1)
(eyear, emonth, edate) = (2012, 11, 31)
query = """ "http://ichart.finance.yahoo.com/table.csv?a=%s&b=%s&c=%s&d=%s&e=%s&f=%s&g=d&ignore=.csv&s=%s" """ %\
(bmonth, bdate, byear, emonth, edate, eyear, sym)
print query
filename = "data.%s.csv" %(sym)
fullquery = "%s %s > %s" %(exename, query, filename)
print "fullquery:" + fullquery
os.system(fullquery)
def get_symbol_info(symbol):
g_logger.Info('called get_symbol_info ' + symbol)
#get data starting from jan 2000 until now.... mite be a bit too much, but okay for now
(byear, bmonth, bdate) = (2000, 0, 1)
today = datetime.now()
eyear = today.year
emonth = today.month -1 #yahoo month starts at 0
edate = today.day
query = """http://ichart.finance.yahoo.com/table.csv?a=%s&b=%s&c=%s&d=%s&e=%s&f=%s&g=d&ignore=.csv&s=%s""" %\
(bmonth, bdate, byear, emonth, edate, eyear, sym)
filename = "data.%s.csv" %(sym)
fullquery = "%s %s" %(exename, query)
g_logger.Info("get_symbol_info fullquery:"+fullquery)
process = Popen([exename, query], stdout = PIPE)
output = process.communicate()[0]
lines = output.split("\n")
header = lines[0]
datalist = list()
for line in lines[1:]:
vals = line.split(",")
if(len(vals) != 7):
continue
d = dict()
d['date'] = vals[0]
d['open'] = vals[1]
d['high'] = vals[2]
d['low'] = vals[3]
d['close'] = vals[4]
d['volume'] = vals[5]
d['adj_close'] = vals[6]
datalist.append(d)
return datalist
def get_coll_key(symbol):
return symbol+":S"
def remove_and_insert_to_mongo_db(symbol,data):
mongodb = MongoDBManager.MongoDBManager(g_ip,27017)
mongodb.sel_db("hist_data")
mongodb.sel_coll('hist_px', get_coll_key(symbol))
mongodb.remove_coll()
mongodb.insert_doc(data)
mongodb.retrieve_doc()
def blind_insert_to_mongo_db(symbol,data):
mongodb = MongoDBManager(g_ip,27017)
mongodb.sel_db("hist_data")
mongodb.sel_coll('hist_px', get_coll_key(symbol))
mongodb.insert_doc(data)
#### define the default values
g_symbols = ['AAPL']
#symbols from washington post, historical
#g_symbols = ['COG', 'MA', 'BIIB', 'CMG', 'PRGO', 'ROST']
exename = "curl"
g_logger = FTLogger.FTLogger()
g_ip = "localhost"
g_file = None
#### parse out the arguments
options, remainder = getopt.getopt(sys.argv[1:], 'i:s:f:b:e:dvh',
['ip=='
'stocks=',
'file='
'begdate=',
'enddate=',
'dump',
'version',
'help'])
print 'OPTIONS :', options
begdate = ""
enddate = ""
dump = False
for opt, arg in options:
if(opt in ('-i', '--ip')):
g_ip = arg
elif(opt in ('-s', '--stocks')):
g_symbols = arg.split(',')
elif(opt in ('-f', '--file')):
g_file = arg
elif opt in ('-b', '--begdate'):
begdate = arg
elif opt in ('-e', '--enddate'):
enddate= arg
elif opt in ('-v', '--version'):
print "Version is 1.0.0"
sys.exit(0)
elif opt in ('-d', '--dump'):
dump = True
elif opt in ('-h', '--help'):
usage()
sys.exit(0)
byr = begdate[0:3]
bmn = begdate[4:5]
bdate = begdate[6:]
g_logger.Info( "preparsed date:" + begdate)
g_logger.Info( "parsed date:"+ byr + bmn + bdate)
#if a filename is set, read the stock names, assume a stock per line
if(g_file != None):
fileHandle = open(g_file)
g_symbols = []
for line in fileHandle.readlines():
g_symbols.append(line.rstrip())
fileHandle.close()
#run through each symbol name, curl yahoo and then push to mongodb
for sym in g_symbols:
if(dump == True):
get_symbol_info_to_csv(sym)
else:
datalist = get_symbol_info(sym)
remove_and_insert_to_mongo_db(sym, datalist)
g_logger.Info("exiting....")
|
UTF-8
|
Python
| false | false | 2,013 |
4,372,276,713,324 |
8dc450ae0e7451f56d513a44c37550130667020b
|
fc4c57b034b4951131d51052b17414c6e8ce7475
|
/dice/game/models.py
|
473aa0aa47ccbb2b8c04325653aa52659e5dcde2
|
[] |
no_license
|
bizangles/dice-game
|
https://github.com/bizangles/dice-game
|
89cc360a13770521fa51623e3332de9a81af79a1
|
d6edd4f299ff539f8ddfafd2d388ff1989fd5f78
|
refs/heads/master
| 2021-01-25T06:37:27.866441 | 2011-04-18T12:59:35 | 2011-04-18T12:59:35 | 1,526,437 | 2 | 0 | null | false | 2017-05-02T18:16:01 | 2011-03-25T17:20:51 | 2013-11-27T20:37:03 | 2017-05-02T18:05:51 | 300 | 3 | 0 | 0 |
Python
| null | null |
import random
import uuid
from django.db import models
from django.utils import simplejson as json
class Game(models.Model):
guid = models.CharField(max_length=32, primary_key=True)
def __init__(self, *args, **kwargs):
super(Game, self).__init__(*args, **kwargs)
if not self.guid:
self.guid = uuid.uuid1().hex
def get_json(self):
return json.dumps({'game': self.guid})
class Roll(models.Model):
game = models.OneToOneField('Game')
dice = models.CommaSeparatedIntegerField(max_length=100)
guid = models.CharField(max_length=32)
def __init__(self, *args, **kwargs):
super(Roll, self).__init__(*args, **kwargs)
if not self.guid:
self.guid = uuid.uuid1().hex
def __unicode__(self):
return "%s" % self.dice
def roll(self, num_dice):
self.dice = [random.randint(1,6) for n in range(num_dice)]
self.save()
def get_json(self):
return json.dumps({'dice': self.dice, 'guid': self.guid})
class Goal(models.Model):
name = models.CharField(max_length=50)
template = models.CharField(max_length=50)
js_args = models.CharField(max_length=100, null=True, blank=True)
action = models.OneToOneField('Action', null=True, blank=True)
def __unicode__(self):
return "%s" % self.name
class Action(models.Model):
name = models.CharField(max_length=50)
template = models.CharField(max_length=50)
dialog = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return "%s" % self.name
|
UTF-8
|
Python
| false | false | 2,011 |
5,050,881,589,667 |
16c4544db3fcfc8e2e8e1fdc07a2fcf9ef8c9b67
|
734f1ca1ff0748bdfcb2fafedd659d5438a217b4
|
/settings.py
|
89762dc3576712aee23750a2e4b77d1836ba4a1a
|
[
"GPL-3.0-or-later"
] |
non_permissive
|
Matael/pyre-todo
|
https://github.com/Matael/pyre-todo
|
0e5d1cb9c3fd9b8024196e1396c432c95c24b3bb
|
b5cff1d86b49fbc764c6cc60573d0532cdeea14e
|
refs/heads/master
| 2020-06-01T01:06:59.834767 | 2012-06-15T20:18:38 | 2012-06-15T20:18:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Settings for pyre-todo
# Redis Host
REDIS_HOST ="localhost"
# Redis Port
REDIS_PORT = 6379
# Redis Database
REDIS_DB = 2
# Redis prefix for this app
REDIS_PREFIX = "pyre:"
# Path to static files
STATIC_ROOT = "/home/matael/workspace/projects/pyre-todo/static"
# Path to templates
TEMPLATE_PATH = "/home/matael/workspace/projects/pyre-todo/templates"
|
UTF-8
|
Python
| false | false | 2,012 |
4,346,506,905,402 |
1739c945af2c3a6ef176e49cf00fc208e36ad5e4
|
cc48e3f8070b152def8b7c2f8bc40a3ac67cdb1b
|
/bin/nfd2nfc.py
|
7ae6b57436f284fc439a748465dbae1c937cda4d
|
[
"BSD-3-Clause"
] |
permissive
|
lingpy/qlc
|
https://github.com/lingpy/qlc
|
fbaa0ffbfdea55e9d400b87ec185829f3a11f0b9
|
53f26ca8bb87ee7412133ebd631f754ca43b656c
|
refs/heads/master
| 2016-09-06T10:17:43.346087 | 2012-11-19T14:44:30 | 2012-11-19T14:44:30 | 5,875,404 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import codecs, unicodedata, sys
def main(argv):
if len(argv) < 3:
print "call: nfd2nfc.py in.txt out.txt"
exit(1)
IN = codecs.open(argv[1], "r", "utf-8")
OUT = codecs.open(argv[2], "w", "utf-8")
for line in IN:
o = unicodedata.normalize("NFC", line)
OUT.write(o)
if __name__ == "__main__":
main(sys.argv)
|
UTF-8
|
Python
| false | false | 2,012 |
7,086,696,049,775 |
ae35cd250a3f7276b7e5d0b18dac813c914c30e3
|
f8033cc2c73380ba76ea9afaa490cfae4a8956ca
|
/Data/unittester.py
|
9ffa9e7c2ebd0d37e05d5482ae22e28bc99e81c8
|
[
"MIT"
] |
permissive
|
Englebabz/CasPyTeX
|
https://github.com/Englebabz/CasPyTeX
|
5886e0b15b31d4f2e48271275882a7508b71dcf0
|
bc0fce21fba0d7d26ad387b85a85a4bd23a8c490
|
refs/heads/master
| 2018-12-29T14:04:19.999636 | 2014-02-15T19:51:16 | 2014-02-15T19:51:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import textparser
from debugger import *
"""
This is the script one can run to test all the tests in Data/Tests
It should be fairly straight forward to see what's going on
The syntax of a testing file should be simple; just look at the Mixed.test
"""
debug.lvl=0
def unittest(filename,approx=False):
print("Starting "+filename.replace(".test","")+"\n---")
f=open('Tests/'+filename)
problems = f.readlines()
errors=0
for problem in problems:
if problem[0]=="#":continue
errors+=problemtest(problem,approx)
numofproblems=len(problems)
print(filename.replace(".test","")+"test results: "+str(errors)+" errors out of "+str(len(problems))+" problems"+ "\n")
f.close()
return [errors, numofproblems]
def problemtest(problem,approx=False):
parts=problem.split("\"")
parts=[parts[1],parts[3],parts[5]]
inputstr=parts[0]
resultstr=parts[1]
focus=parts[2]
if focus=="None":focus=None
else:focus=textparser.TextToCAS(focus)
print(" Trying to simplify: "+inputstr+" = "+resultstr)
returningstr=textparser.TextToCAS(inputstr).simplify(focus).tostring()
if approx:
returningstr=textparser.TextToCAS(inputstr).simplify(focus).approx().tostring()
if returningstr!=resultstr:
print("ERROR:Tried to simplify: "+inputstr+"\n CAS returned : "+returningstr+"\n But the answer was : "+resultstr)
return 1
return 0
errors=[0,0]
def vecadd(a,b):
return [a[0]+b[0],a[1]+b[1]]
errors=vecadd(errors,unittest("Antidistributive.test"))
errors=vecadd(errors,unittest("Samerootofexponent.test"))
errors=vecadd(errors,unittest("sameexponentfrac.test"))
errors=vecadd(errors,unittest("sameroot.test"))
errors=vecadd(errors,unittest("antisameroot.test"))
errors=vecadd(errors,unittest("antisameexponentfrac.test"))
errors=vecadd(errors,unittest("sameexponent.test"))
errors=vecadd(errors,unittest("distributive.test"))
errors=vecadd(errors,unittest("antisameexponent.test"))
errors=vecadd(errors,unittest("Mixed.test"))
print("\nSTARTING APPROX TESTS\n")
errors=vecadd(errors,unittest("Approx.test",True))
print("Unittest finished: "+str(errors[0])+" errors out of "+str(errors[1])+" problems")
|
UTF-8
|
Python
| false | false | 2,014 |
7,335,804,176,590 |
cb184138ad289287c2373e8dc8c6ffdcae5ab3ab
|
f4930a7efe8cc97e4c79c8dbc15fca71fcab475d
|
/PythonThink/src/serapt/td/study/base.py
|
0eb19823b8dd47acae062d0c02354164afe93c25
|
[] |
no_license
|
atgsdjb/td_python_tools
|
https://github.com/atgsdjb/td_python_tools
|
64826e09d0fb351be736000282ee50e442a26d10
|
d0be971c81edcda3c6ef274ce9f4f9b72c4bb07c
|
refs/heads/master
| 2020-05-30T21:15:27.499990 | 2012-12-03T13:41:39 | 2012-12-03T13:41:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
'''
if __name__ == '__main__':
f =open('test.txt','r')
s = f.read()
print s,type(s)
|
UTF-8
|
Python
| false | false | 2,012 |
14,534,169,367,529 |
468cefacbb8f43703e90536abbc34d9b5a29bfe1
|
edb367cee66fd1aa061a8ce2a951cd7a0b298409
|
/rtb.py
|
09328baff21d1e087e200e110e4c1e4e57f9c822
|
[] |
no_license
|
zgcgreat/rtb
|
https://github.com/zgcgreat/rtb
|
8ced96dbcf32f6bb5f32cf3903ce81d69020adbe
|
37d8d74278ee08171323ed2505faec3a7eb24757
|
refs/heads/master
| 2021-05-29T05:03:34.867306 | 2014-12-01T17:05:41 | 2014-12-01T17:05:41 | 104,637,241 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import csv
import random
import numpy as np
import pandas as pd
from sklearn import linear_model, tree, lda, naive_bayes
from sklearn.metrics import confusion_matrix, classification_report
import matplotlib.pyplot as plt
import seaborn as sns
CHARTS_DIR_PATH = './charts/'
TABLES_DIR_PATH = './tables/'
DATA_PATH = './data/'
IMP_TRAIN_PATH = './raw_data/imp_train.txt'
CLICK_TRAIN_PATH = './raw_data/click_train.txt'
IMP_TEST_PATH = './raw_data/imp_test.txt'
CLICK_TEST_PATH = './raw_data/click_test.txt'
################################################################################
# DATA PREPROCESSING
################################################################################
def feature_selection_imps(file_path):
'''
This method will take a txt file as input and creates a CSV file n
Output: imps_featureselection.csv
'''
matrix = []
header = ['bid_id',
'ipinyou_id',
'timestamp',
'hour',
'browser_chrome',
'browser_ie',
'browser_safari',
'browser_firefox',
'mobile',
'iphone',
'ipad',
'android',
'windows',
'linux',
'region_id',
'ad_exchange',
'domain',
'ad_slot_id',
'ad_slot_size',
'ad_slot_visibility',
'ad_slot',
'ad_slot_floor_price',
'paying_price']
matrix.append(header)
with open(file_path) as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
# label the attributes
bid_id = row[0]
timestamp = row[1]
log_type = row[2]
ipinyou_id = row[3]
useragent = row[4]
ip_address = row[5]
region_id = row[6]
city_id = row[7]
ad_exchange = row[8]
domain = row[9]
url = row[10]
anonymous_url_id = row[11]
ad_slot_id = row[12]
ad_slot_width = row[13]
ad_slot_height = row[14]
ad_slot_visibility = row[15]
ad_slot = row[16]
ad_slot_floor_price = row[17]
creative_id = row[18]
bidding_price = row[19]
paying_price = row[20]
key_page_url = row[21]
advertiser_id = row[22]
user_tags = row[23]
# create derivative attributes
year = timestamp[0:5]
month = timestamp[5:7]
day = timestamp[7:9]
hour = timestamp[8:10]
minute = timestamp[10:12]
if 'Chome' in useragent:
browser_chrome = 1
else:
browser_chrome = 0
if 'MSIE' in useragent:
browser_ie = 1
else:
browser_ie = 0
if 'Safari' in useragent:
browser_safari = 1
else:
browser_safari = 0
if 'Firefox' in useragent:
browser_firefox = 1
else:
browser_firefox = 0
if 'Mobile' in useragent:
mobile = 1
else:
mobile = 0
if 'iPhone' in useragent:
iphone = 1
else:
iphone = 0
if 'iPad' in useragent:
ipad = 1
else:
ipad = 0
if 'Android' in useragent:
android = 1
else:
android = 0
if 'Windows NT' in useragent:
windows = 1
else:
windows = 0
if 'Linux' in useragent:
linux = 1
else:
linux = 0
ad_slot_size = str(ad_slot_width)+'x'+str(ad_slot_height)
vector = [bid_id,
ipinyou_id,
timestamp,
hour,
browser_chrome,
browser_ie,
browser_safari,
browser_firefox,
mobile,
iphone,
ipad,
android,
windows,
linux,
region_id,
ad_exchange,
domain,
ad_slot_id,
ad_slot_size,
ad_slot_visibility,
ad_slot,
ad_slot_floor_price,
paying_price]
matrix.append(vector)
df = pd.DataFrame(matrix)
df.columns = matrix[0]
df = df.ix[1:]
return df
def feature_selection_clicks(file_path):
matrix = []
header = ['bid_id',
'ipinyou_id',
'timestamp']
matrix.append(header)
with open(file_path) as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
# label the attributes
bid_id = row[0]
timestamp = row[1]
ipinyou_id = row[3]
vector = [bid_id,
ipinyou_id,
timestamp]
matrix.append(vector)
df = pd.DataFrame(matrix)
df.columns = matrix[0]
df = df.ix[1:]
return df
def merge_impression_click(imps, clicks):
imps = imps.set_index(imps['bid_id'])
imps = imps.drop(['bid_id'], axis=1)
clicks = clicks.set_index(clicks['bid_id'])
clicks = clicks.drop(['bid_id'], axis=1)
click_indicator = []
for index, value in imps.iterrows():
# find the timestamp and userid
timestamp = value['timestamp']
userid = value['ipinyou_id']
# if the userid and (timestamp + 5 minutes) is in the click log
if userid in list(clicks['ipinyou_id']):
click_indicator.append(1)
else:
click_indicator.append(0)
imps['clicks'] = click_indicator
return imps
def transform_ssf(df):
'''
Transforms categorical variables to dummy variables
'''
df = df.set_index(df['timestamp'])
hour_dummies = pd.get_dummies(df['hour'])
hour_dummies_columns = []
for v in hour_dummies.columns.values:
hour_dummies_columns.append('hour_%s' % str(v))
hour_dummies.columns = hour_dummies_columns
region_dummies = pd.get_dummies(df['region_id'])
region_dummies_columns = []
for v in region_dummies.columns.values:
region_dummies_columns.append('region_%s' % str(v))
region_dummies.columns = region_dummies_columns
adexchange_dummies = pd.get_dummies(df['ad_exchange'])
adexchange_dummies_columns = []
for v in adexchange_dummies.columns.values:
adexchange_dummies_columns.append('exchange_%s' % str(v))
adexchange_dummies.columns = adexchange_dummies_columns
# domain_dummies = pd.get_dummies(df['domain'])
# domain_dummies_columns = []
# for v in domain_dummies.columns.values:
# domain_dummies_columns.append('domain_%s' % str(v))
# domain_dummies.columns = domain_dummies_columns
adslotsize_dummies = pd.get_dummies(df['ad_slot_size'])
adslotsize_dummies_columns = []
for v in adslotsize_dummies.columns.values:
adslotsize_dummies_columns.append('adslotsize_%s' % str(v))
adslotsize_dummies.columns = adslotsize_dummies_columns
adslotvisibility_dummies = pd.get_dummies(df['ad_slot_visibility'])
adslotvisibility_dummies_columns = []
for v in adslotvisibility_dummies.columns.values:
adslotvisibility_dummies_columns.append('adslotvis_%s' % str(v))
adslotvisibility_dummies.columns = adslotvisibility_dummies_columns
adslot_dummies = pd.get_dummies(df['ad_slot'])
adslot_dummies_columns = []
for v in adslot_dummies.columns.values:
adslot_dummies_columns.append('adslot_%s' % str(v))
adslot_dummies.columns = adslot_dummies_columns
df = df.drop(['ipinyou_id',
'timestamp',
'hour',
'region_id',
'ad_exchange',
'domain',
'ad_slot_id',
'ad_slot_size',
'ad_slot_visibility',
'ad_slot',
'ad_slot_floor_price'], axis=1)
df_ssf = pd.concat([df,
hour_dummies,
region_dummies,
adexchange_dummies,
adslotsize_dummies,
adslotvisibility_dummies,
adslot_dummies], axis=1)
return df_ssf
def sample_train_data(df, perc_no_clicks=500):
df = df.reset_index()
df_clicks = df[df['clicks'] == 1]
df_noclicks = df[df['clicks'] == 0]
df_noclicks_sample_rows = \
random.sample(df_noclicks.index, int(len(df_clicks)*(perc_no_clicks/100)))
df_noclicks_sample = df_noclicks.ix[df_noclicks_sample_rows]
df = pd.concat([df_clicks, df_noclicks_sample])
df = df.set_index('timestamp')
return df
def partition(df):
df = df.sort_index()
df_length = df.shape[0]
train = df.iloc[0:int(0.8*df_length)]
test = df.iloc[(int(0.8*df_length)+1):]
return train, test
################################################################################
# DATA EXPLORATION
################################################################################
def data_exploration():
# create data frame from raw data
# file_path = './data/impression_sample.txt'
file_path = './raw_data/impression_sample.txt'
matrix = []
header = ['bid_id',
'year',
'day',
'hour',
'minute',
'log_type',
'ipinyou_id',
'browser_chrome',
'browser_ie',
'browser_safari',
'browser_firefox',
'browser_other',
'mobile',
'iphone',
'ipad',
'android',
'windows',
'linux',
'region_id',
'city_id',
'ad_exchange',
'domain',
'url',
'ad_slot_id',
'ad_slot_size',
'ad_slot_visibility',
'ad_slot',
'ad_slot_floor_price',
'creative_id',
'key_page_url',
'advertiser_id',
'user_tags']
matrix.append(header)
with open(file_path) as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
# label the attributes
bid_id = row[0]
timestamp = row[1]
log_type = row[2]
ipinyou_id = row[3]
useragent = row[4]
ip_address = row[5]
region_id = row[6]
city_id = row[7]
ad_exchange = row[8]
domain = row[9]
url = row[10]
anonymous_url_id = row[11]
ad_slot_id = row[12]
ad_slot_width = row[13]
ad_slot_height = row[14]
ad_slot_visibility = row[15]
ad_slot = row[16]
ad_slot_floor_price = row[17]
creative_id = row[18]
bidding_price = row[19]
paying_price = row[20]
key_page_url = row[21]
advertiser_id = row[22]
user_tags = row[23]
# create derivative attributes
year = timestamp[0:4]
month = timestamp[4:6]
day = timestamp[6:8]
hour = timestamp[8:10]
minute = timestamp[10:12]
if 'Chome' in useragent:
browser_chrome = 1
else:
browser_chrome = 0
if 'MSIE' in useragent:
browser_ie = 1
else:
browser_ie = 0
if 'Safari' in useragent:
browser_safari = 1
else:
browser_safari = 0
if 'Firefox' in useragent:
browser_firefox = 1
else:
browser_firefox = 0
if 'Chrome' not in useragent \
and 'MSIE' not in useragent \
and 'Safari' not in useragent \
and 'Firefox' not in useragent:
browser_other = 1
else:
browser_other = 0
if 'Mobile' in useragent:
mobile = 1
else:
mobile = 0
if 'iPhone' in useragent:
iphone = 1
else:
iphone = 0
if 'iPad' in useragent:
ipad = 1
else:
ipad = 0
if 'Android' in useragent:
android = 1
else:
android = 0
if 'Windows NT' in useragent:
windows = 1
else:
windows = 0
if 'Linux' in useragent:
linux = 1
else:
linux = 0
ad_slot_size = str(ad_slot_width) + 'x' + str(ad_slot_height)
vector = [bid_id,
year,
day,
hour,
minute,
log_type,
ipinyou_id,
browser_chrome,
browser_ie,
browser_safari,
browser_firefox,
browser_other,
mobile,
iphone,
ipad,
android,
windows,
linux,
region_id,
city_id,
ad_exchange,
domain,
url,
ad_slot_id,
ad_slot_size,
ad_slot_visibility,
ad_slot,
ad_slot_floor_price,
creative_id,
key_page_url,
advertiser_id,
user_tags]
matrix.append(vector)
# turn matrix into pandas dataframe
df = pd.DataFrame(matrix)
df.columns = matrix[0]
df = df.ix[1:]
df.index = df['bid_id']
df = df.drop(['bid_id'], axis=1)
def histogram(feature):
vc = df[feature].value_counts()
freq = vc.values
plt.hist(freq)
plt.savefig(CHARTS_DIR_PATH + ('de-%s-hist.png' % feature))
plt.clf()
def piechart(feature):
vc = df[feature].value_counts()
vc = vc.sort_index()
labels = vc.index.values
fracs = vc.values
plt.pie(fracs, labels=labels, autopct='%1.1f%%')
plt.savefig(CHARTS_DIR_PATH + ('de_%s-pie.png' % feature))
plt.clf()
# TIMESTAMP
events_per_hour = df['hour'].value_counts()
events_per_hour = events_per_hour.sort_index()
events_per_hour.plot()
plt.savefig(CHARTS_DIR_PATH + 'de_events-per-hour.png')
plt.clf()
# FREQUENCY
histogram('ipinyou_id')
# BROWSER
num_firefox = df['browser_firefox'].sum()
num_safari = df['browser_safari'].sum()
num_ie = df['browser_ie'].sum()
num_chrome = df['browser_chrome'].sum()
num_other = df['browser_other'].sum()
ind = np.array([0,1,2,3,4])
width = 0.75
fig, ax = plt.subplots()
rects = ax.bar(left=ind,
height=[num_firefox, num_safari, num_ie, num_chrome, num_other],
width=width)
ax.set_xticks(ind + (width/2))
ax.set_xticklabels(['Firefox', 'Safari', 'IE', 'Chrome', 'Other'])
plt.savefig(CHARTS_DIR_PATH + 'de_browser-distribution.png')
plt.clf()
# MOBILE VS NONMOBILE
num_mobile = df['mobile'].sum()
num_nonmobile = df['mobile'].count() - num_mobile
labels = 'Mobile', 'Non-mobile'
fracs = [num_mobile, num_nonmobile]
plt.pie(fracs, labels=labels)
plt.savefig(CHARTS_DIR_PATH + 'de_mobile-vs-nonmobile.png')
plt.clf()
# REGIONS AND CITIES
num_regions = len(df['region_id'].unique())
num_cities = len(df['city_id'].unique())
print '### REGIONS AND CITIES ###'
print 'Number of unique regions: %s' % str(num_regions)
print 'Number of unique cities: %s' % str(num_cities)
print
print '--'
# AD EXCHANGE ID
piechart('ad_exchange')
# DOMAIN
histogram('domain')
# URL
histogram('url')
# AD SLOT ID
histogram('ad_slot_id')
# AD SLOT SIZE
piechart('ad_slot_size')
# AD SLOT VISIBILITY
piechart('ad_slot_visibility')
# AD SLOT
piechart('ad_slot')
# AD SLOT FLOOR PRICE
histogram('ad_slot_floor_price')
# CREATIVE ID
print df['creative_id'].value_counts()
# KEY PAGE URL
# ADVERTISER ID
# USER TAGS
print
################################################################################
# MODELS AND MODEL EVALUATION
################################################################################
def fit_model(df, model='logistic_regression', max_depth=5):
# split the target attribute from the explanatory variables
y = df['clicks']
x = df.drop(['clicks', 'paying_price'], axis=1)
# fit the model
if model == 'logistic_regression':
m = linear_model.LogisticRegression()
m.fit(x, y)
df_coef = pd.DataFrame(columns=['attribute','logreg_coef'])
df_coef['attribute'] = x.columns.values
df_coef['logreg_coef'] = np.ravel(m.coef_)
df_coef.to_csv(TABLES_DIR_PATH + 'logreg_coef.csv')
print 'Logistic regression coefficients:'
print df_coef
elif model == 'tree':
m = tree.DecisionTreeClassifier(max_depth=max_depth)
m.fit(x, y)
elif model == 'lda':
m = lda.LDA()
m.fit(x, y)
elif model == 'naivebayes':
m = naive_bayes.GaussianNB()
m.fit(x, y)
return m
def create_propensity_histogram(df_test, model):
# keep a running total of the propensities
propensities_list = []
# for each impression in the log
for index, value in df_test.iterrows():
# grab the attributes of the impression (drop clicks)
attributes = value
attributes = attributes.drop(['clicks', 'paying_price'])
# calculate the impression utility and append to propensities list
imp_propensity = model.predict_proba(attributes)[0][1]
propensities_list.append(imp_propensity)
# append propensities to df_test
df_test['propensity'] = propensities_list
# create histogram of propensities
fig = plt.figure()
ax = fig.add_subplot(111)
ax.hist(propensities_list)
ax.set_xlabel('Propensity To Click')
ax.set_ylabel('Frequency')
if 'LogisticRegression' in str(type(model)):
ax.set_title('Histogram of Propensity To Click (Logistic Regression)')
plt.savefig(CHARTS_DIR_PATH + 'hist_prop_to_click_logreg.png')
elif 'DecisionTreeClassifier' in str(type(model)):
ax.set_title('Histogram of Propensity To Click (Decision Tree)')
plt.savefig(CHARTS_DIR_PATH + 'hist_prop_to_click_tree.png')
elif 'LDA' in str(type(model)):
ax.set_title('Histogram of Propensity To Click (LDA)')
plt.savefig(CHARTS_DIR_PATH + 'hist_prop_to_click_lda.png')
elif 'GaussianNB' in str(type(model)):
ax.set_title('Histogram of Propensity To Click (Naive Bayes)')
plt.savefig(CHARTS_DIR_PATH + 'hist_prop_to_click_naivebayes.png')
plt.clf()
def evaluate_model_on_full_set(df_test, model):
# for each impression in the log
predictions = []
propensities = []
for index, value in df_test.iterrows():
attributes = value
attributes = attributes.drop(['clicks', 'paying_price'])
propensity = model.predict_proba(attributes)[0][1]
pred = model.predict(attributes)[0]
propensities.append(propensity)
predictions.append(pred)
df_analysis = df_test.copy()
df_analysis['propensity'] = propensities
df_analysis['pred'] = predictions
df_analysis = df_analysis.sort(columns=['propensity'], ascending=False)
df_analysis['cum_click'] = df_analysis['clicks'].cumsum()
df_analysis['cum_click_perc'] = \
100*df_analysis['cum_click'] / df_analysis['clicks'].sum()
ninety_perc_click_threshold = \
df_analysis[df_analysis['cum_click_perc'] < 0.9].shape[0] / float(df_analysis.shape[0])
cm = confusion_matrix(df_analysis['clicks'], df_analysis['pred'])
accuracy = (cm[0][0]+cm[1][1]) / float(cm.sum())
recall = cm[1][1] / float(cm[1][0] + cm[1][1])
precision = cm[1][1] / float(cm[0][1] + cm[1][1])
print cm
return accuracy, recall, precision, cm, ninety_perc_click_threshold*100.0
################################################################################
# SIMULATE PERFORMANCE
################################################################################
def simulate_flat_bid(bid=300, print_output=True):
# create df with all impressions
df = pd.DataFrame.from_csv('./data/test_ssf_100.csv')
# keep running total of ad spend, impressions, clicks
ad_spend = 0
impressions = 0
clicks = 0
# for each impression in the log
for index, value in df.iterrows():
# grab the attributes of the impression (drop clicks)
attributes = value
# calculate the impression utility
imp_utility = bid
# calculate the minimum utility needed
min_utility = 0
adjusted_utility = imp_utility - min_utility
# calculate the bid price
bid_price = adjusted_utility
# find the paying price
paying_price = value['paying_price']
# if bid_price > paying_price, update campaign metrics
if bid_price >= paying_price:
impressions += 1
ad_spend += paying_price/1000.0
if value['clicks'] == 1:
clicks += 1
try:
cpm = float(ad_spend) / (impressions) * 1000
ctr = float(clicks) / impressions
except ZeroDivisionError:
cpm = 0.0
ctr = 0.0
try:
cpc = float(ad_spend) / clicks
except ZeroDivisionError:
cpc = 0.0
if print_output == False:
return ad_spend, impressions, clicks, cpm, ctr, cpc
else:
print 'Total ad spend: %.2f Yen' % ad_spend
print 'Number of impressions delivered: %d' % impressions
print 'Number of clicks: %d' % clicks
print 'eCPM: %.2f Yen' % cpm
print 'CTR: %0.2f %%' % (ctr*100)
print 'CPC: %.2f Yen' % cpc
def simulate_goal_bid(goal, model, print_output=True):
# create df with all impressions
df = pd.DataFrame.from_csv('./data/test_ssf_100.csv')
# keep track of bids
bids = []
# for each impression in the log
for index, value in df.iterrows():
# grab the attributes of the impression (drop clicks)
attributes = value
attributes = attributes.drop(['clicks', 'paying_price'])
# calculate the bid
imp_utility = model.predict_proba(attributes)[0][1]
bid = imp_utility*goal
bids.append(bid)
df['bid'] = bids
df['impression'] = (df['bid'] > df['paying_price'])*1
df_winning_bids = df[df['impression'] == 1]
impressions = df_winning_bids.shape[0]
clicks = df_winning_bids['clicks'].sum()
ad_spend = df_winning_bids['paying_price'].sum()/1000.0
cpm = float(ad_spend) / (impressions) * 1000
ctr = float(clicks) / impressions
cpc = float(ad_spend) / clicks
if print_output == False:
return ad_spend, impressions, clicks, cpm, ctr, cpc
# create bid and paying_price histogram
plt.hist(list(df['bid']), alpha=0.8, label='bid')
plt.hist(list(df['paying_price']), alpha=0.5, label='actual')
plt.legend(loc='upper right')
plt.xlabel('Price')
plt.ylabel('Frequency')
plt.title('Bid and Price Distribution')
plt.savefig(CHARTS_DIR_PATH + 'bid-distribution_goal_%s.png' % str(goal))
# print output
print 'Goal: %s' % str(goal)
print 'Total ad spend: %.2f Yen' % ad_spend
print 'Number of impressions delivered: %d' % impressions
print 'Number of clicks: %d' % clicks
print 'eCPM: %.2f Yen' % cpm
print 'CTR: %0.2f %%' % (ctr*100)
print 'CPC: %.2f Yen' % cpc
print '-'*50
return ad_spend, impressions, clicks, cpm, ctr, cpc
def calculate_spend_urgency(budget, df_imps, timestamp):
# calculate the total worth of the bids left
total_worth_of_bids_left = \
df_imps[df_imps.index > timestamp]['paying_price'].sum()
# print total_worth
spend_urgency = (budget*1000.0 / total_worth_of_bids_left)*100.0
if spend_urgency > 0:
return spend_urgency
else:
return 0.0
def calculate_cpe_threshold(historical_bids, spend_urgency, model):
# randomly sample historical bids
bids_sample_idx = random.sample(historical_bids.index, 500)
bids_sample = historical_bids.ix[bids_sample_idx]
# calculate propensity to convert for all impressions
propensities = []
for index, value in bids_sample.iterrows():
attributes = value.drop(['clicks', 'paying_price'])
propensity = model.predict_proba(attributes)[0][1]
propensities.append(propensity)
bids_sample['propensity'] = propensities
# calculate cpe for each bid
bids_sample['cpe'] = bids_sample['paying_price'] / bids_sample['propensity']
# sort the impressions be cpe
bids_sample = bids_sample.sort(columns=['cpe'], ascending=True)
# calculate the total ad spend in df
total_ad_spend = bids_sample['paying_price'].sum()
# create a column in df representing cumulative perc of ad spend
bids_sample['cum_spend'] = bids_sample['paying_price'].cumsum()
bids_sample['cum_spend_perc'] = \
100*bids_sample['cum_spend'] / bids_sample['paying_price'].sum()
cpe_threshold = \
bids_sample[bids_sample['cum_spend_perc'] < spend_urgency].iloc[-1]['cpe']
return cpe_threshold
def simulate_dynamic_bid(budget, model, print_output=True):
budget0 = budget
print 'Budget0: %s' % str(budget)
# end date
end_time = 20130609000110181
# create df with all impressions
df = pd.DataFrame.from_csv('./data/test_ssf_100.csv')
# keep track of bids
num_bids = 0
impressions = 0
clicks = 0
bids = []
# define starting cpe threshold
cpe_threshold = 80
# for each impression in the log
for index, value in df.iterrows():
# update max CPE
num_bids += 1
if 0.02 > float(budget) / budget0 > 0.95:
if num_bids % 500 == 0:
# create data frame of historical bids
historical_bids = df[df.index <= index]
# compute the spend urgency
spend_urgency = calculate_spend_urgency(budget, df, index)
# calculate new max CPE threshold
if spend_urgency == 0:
cpe_threshold = np.inf
else:
cpe_threshold = \
calculate_cpe_threshold(historical_bids, spend_urgency, model)
else:
if num_bids % 5000 == 0:
# create data frame of historical bids
historical_bids = df[df.index <= index]
# compute the spend urgency
spend_urgency = calculate_spend_urgency(budget, df, index)
# calculate new max CPE threshold
if spend_urgency == 0:
cpe_threshold = np.inf
else:
cpe_threshold = \
calculate_cpe_threshold(historical_bids, spend_urgency, model)
# grab the attributes of the impression (drop clicks)
attributes = value
price = attributes['paying_price']
attributes = attributes.drop(['clicks', 'paying_price'])
# calculate CPE
propensity = model.predict_proba(attributes)[0][1]
# calculate bid
bid = propensity * cpe_threshold
if budget < 0:
bid = 0.0
bids.append(bid)
# update real time metrics
if bid >= price:
budget -= price/1000.0
impressions += 1
df['bid'] = bids
df['impression'] = (df['bid'] > df['paying_price'])*1
df_winning_bids = df[df['impression'] == 1]
impressions = df_winning_bids.shape[0]
clicks = df_winning_bids['clicks'].sum()
ad_spend = df_winning_bids['paying_price'].sum()/1000.0
cpm = float(ad_spend) / (impressions) * 1000.0
ctr = float(clicks) / impressions
cpc = float(ad_spend) / clicks
if print_output == False:
return ad_spend, impressions, clicks, cpm, ctr, cpc
else:
print 'Total ad spend: %.2f Yen' % ad_spend
print 'Number of impressions delivered: %d' % impressions
print 'Number of clicks: %d' % clicks
print 'eCPM: %.2f Yen' % cpm
print 'CTR: %0.2f %%' % (ctr*100)
print 'CPC: %.2f Yen' % cpc
print '-'*50
return ad_spend, impressions, clicks, cpm, ctr, cpc
################################################################################
# MAIN
################################################################################
def evaluate_models(df_train, df_test):
# logistic regression
model_logreg = fit_model(df_train, model='logistic_regression')
accuracy_unseen_logreg, recall_unseen_logreg, precision_unseen_logreg, \
cm_unseen_logreg, ninety_perc_logreg = \
evaluate_model_on_full_set(df_test, model_logreg)
print '### LOGISTIC REGRESSION ###'
print accuracy_unseen_logreg*100
print recall_unseen_logreg*100
print precision_unseen_logreg*100
print ninety_perc_logreg
print '-'*100
# decision tree (max depth = 2)
model_tree2 = fit_model(df_train, model='tree', max_depth=2)
accuracy_unseen_tree2, recall_unseen_tree2, precision_unseen_tree2, \
cm_unseen_tree2, ninety_perc_tree2 = \
evaluate_model_on_full_set(df_test, model_tree2)
print '### DECISION TREE (max_depth=2) ###'
print accuracy_unseen_tree2*100
print recall_unseen_tree2*100
print precision_unseen_tree2*100
print ninety_perc_tree2
print '-'*100
# decision tree (max depth = 5)
model_tree5 = fit_model(df_train, model='tree', max_depth=5)
accuracy_unseen_tree5, recall_unseen_tree5, precision_unseen_tree5, \
cm_unseen_tree5, ninety_perc_tree5 = \
evaluate_model_on_full_set(df_test, model_tree5)
print '### DECISION TREE (max_depth=5) ###'
print accuracy_unseen_tree5*100
print recall_unseen_tree5*100
print precision_unseen_tree5*100
print ninety_perc_tree5
print '-'*100
# lda
model_lda = fit_model(df_train, model='lda')
accuracy_unseen_lda, recall_unseen_lda, precision_unseen_lda, \
cm_unseen_lda, ninety_perc_lda = \
evaluate_model_on_full_set(df_test, model_lda)
print '### LDA ###'
print accuracy_unseen_lda*100
print recall_unseen_lda*100
print precision_unseen_lda*100
print ninety_perc_lda
print '-'*100
# nb
model_nb = fit_model(df_train, model='naivebayes')
accuracy_unseen_nb, recall_unseen_nb, precision_unseen_nb, \
cm_unseen_nb, ninety_perc_nb = \
evaluate_model_on_full_set(df_test, model_nb)
print '### NAIVE BAYES ###'
print accuracy_unseen_nb*100
print recall_unseen_nb*100
print precision_unseen_nb*100
print ninety_perc_nb
print '-'*100
return accuracy_unseen_logreg, recall_unseen_logreg, precision_unseen_logreg, \
accuracy_unseen_tree2, recall_unseen_tree2, precision_unseen_tree2, \
accuracy_unseen_tree5, recall_unseen_tree5, precision_unseen_tree5, \
accuracy_unseen_lda, recall_unseen_lda, precision_unseen_lda, \
accuracy_unseen_nb, recall_unseen_nb, precision_unseen_nb, \
ninety_perc_logreg, ninety_perc_tree2, ninety_perc_tree5, \
ninety_perc_lda, ninety_perc_nb
def evaluate_flat_bid():
print '### SIMULATING FLAT BID ###'
print 'ad_spend, imp, click, cpm, ctr, cpc'
ad_spend_vector = []
imp_vector = []
click_vector = []
flat_bids = [5,10,20,30,40,50,75,100,150,200,250,300]
for flat_bid in flat_bids:
ad_spend, imp, click, cpm, ctr, cpc = \
simulate_flat_bid(flat_bid, print_output=False)
ad_spend_vector.append(ad_spend)
imp_vector.append(imp)
click_vector.append(click)
print ad_spend, imp, click, cpm, ctr, cpc
print
print '-'*100
return flat_bids, ad_spend_vector, imp_vector, click_vector
def evaluate_constant_goal_bid():
print '### SIMULATING GOAL BID ###'
# load the data
df_train = pd.DataFrame.from_csv(DATA_PATH + 'train_sample_100.csv')
# train the model
model_logreg = \
fit_model(df_train, model='logistic_regression')
# simulate for multiple goals
ad_spend_vector = []
impressions_vector = []
clicks_vector = []
goals_vector = [10,25,50,75,100,150,200,500,1000]
for g in goals_vector:
print 'Goal: %s' % str(g)
ad_spend, impressions, clicks, cpm, ctr, cpc = \
simulate_goal_bid(goal=g,model=model_logreg)
ad_spend_vector.append(ad_spend)
impressions_vector.append(impressions)
clicks_vector.append(clicks)
print 'g, ad_spend, imp, click, cpm, ctr, cpc'
print g, ad_spend, impressions, clicks, cpm, ctr,cpc
return goals_vector, ad_spend_vector, impressions_vector, clicks_vector
def evaluate_min_cpe_bid(flat_goal_budgets):
print '### SIMULATING DYNAMIC BID ###'
# load the data
df_train = pd.DataFrame.from_csv(DATA_PATH + 'train_sample_100.csv')
# fit the model
model_logreg = \
fit_model(df_train, model='logistic_regression')
# simulate for different budgets
ad_spend_vector = []
impressions_vector = []
clicks_vector = []
budgets_vector = flat_goal_budgets
for b in budgets_vector:
ad_spend, impressions, clicks, cpm, ctr, cpc = \
simulate_dynamic_bid(budget=b, model=model_logreg)
ad_spend_vector.append(ad_spend)
impressions_vector.append(impressions)
clicks_vector.append(clicks)
return flat_goal_budgets, ad_spend_vector, impressions_vector, clicks_vector
def preprocess_data():
df_imp_train_featureselection = feature_selection_imps(IMP_TRAIN_PATH)
df_imp_train_featureselection.to_csv(DATA_PATH + 'train_imp_fs.csv')
print 'train imps feature selection complete.'
df_click_train_featureselection = feature_selection_clicks(CLICK_TRAIN_PATH)
df_click_train_featureselection.to_csv(DATA_PATH + 'train_click_fs.csv')
print 'train clicks feature selection complete.'
df_train_merge = \
merge_impression_click(df_imp_train_featureselection, df_click_train_featureselection)
df_train_merge.to_csv(DATA_PATH + 'train_merge.csv')
print 'train imp click merge complete.'
df_train_ssf = transform_ssf(df_train_merge)
df_train_ssf.to_csv(DATA_PATH + 'train_ssf.csv')
print 'train ssf transform complete.'
df_imp_test_featureselection = feature_selection_imps(IMP_TEST_PATH)
df_imp_test_featureselection.to_csv(DATA_PATH + 'test_imp_fs.csv')
print 'test imps feature selection complete.'
df_click_test_featureselection = feature_selection_clicks(CLICK_TEST_PATH)
df_click_test_featureselection.to_csv(DATA_PATH + 'test_click_fs.csv')
print 'test clicks feature selection complete.'
df_test_merge = merge_impression_click(df_imp_test_featureselection, df_click_test_featureselection)
df_test_merge.to_csv(DATA_PATH + 'test_merge.csv')
print 'test imp click merge complete.'
df_test_ssf = transform_ssf(df_test_merge)
df_test_ssf.to_csv(DATA_PATH + 'test_ssf.csv')
print 'test ssf transform complete.'
for ss_ratio in [100,200,500,1000,2000,5000]:
print ss_ratio
df_sample = sample_train_data(df_train_ssf, perc_no_clicks=ss_ratio)
df_test = df_test_ssf.copy()
# rename hour attributes in df_sample
for c in df_sample.columns:
if c[0:5] == 'hour_' and len(c) == 6:
new_name = c[0:5] + '0' + c[-1]
df_sample = df_sample.rename(columns={c: new_name})
# sort the columns alphabetically
df_test = df_test.reindex_axis(sorted(df_test.columns), axis=1)
df_sample = df_sample.reindex_axis(sorted(df_sample.columns), axis=1)
# remove train attributes if they do not show up in the test data
for c in df_sample.columns:
if c not in df_test.columns:
df_sample = df_sample.drop([c], axis=1)
# remove test attributes if they do not show up in the train data
for c in df_test.columns:
if c not in df_sample.columns:
df_test = df_test.drop([c], axis=1)
# write the new sets to csv
df_test.to_csv(DATA_PATH + 'test_ssf_%s.csv' % str(ss_ratio))
df_sample.to_csv(DATA_PATH + 'train_sample_%s.csv' % str(ss_ratio))
def predicting_propensity_to_click():
accuracies_logreg = []
accuracies_tree2 = []
accuracies_tree5 = []
accuracies_lda = []
accuracies_nb = []
recalls_logreg = []
recalls_tree2 = []
recalls_tree5 = []
recalls_lda = []
recalls_nb = []
precisions_logreg = []
precisions_tree2 = []
precisions_tree5 = []
precisions_lda = []
precisions_nb = []
ninety_cums_logreg = []
ninety_cums_tree2 = []
ninety_cums_tree5 = []
ninety_cums_lda = []
ninety_cums_nb = []
ss_ratios = [100, 200, 500, 1000, 5000]
# ss_ratios = [5000, 10000, 20000]
for ssr in ss_ratios:
df_train = pd.DataFrame.from_csv(DATA_PATH + 'train_sample_%s.csv' % str(ssr))
df_test = pd.DataFrame.from_csv(DATA_PATH + 'test_ssf_%s.csv' % str(ssr))
df_train = df_train.replace([np.inf, -np.inf], np.nan)
df_train = df_train.dropna()
df_test = df_test.replace([np.inf, -np.inf], np.nan)
df_test = df_test.dropna()
accuracy_unseen_logreg, recall_unseen_logreg, precision_unseen_logreg, \
accuracy_unseen_tree2, recall_unseen_tree2, precision_unseen_tree2, \
accuracy_unseen_tree5, recall_unseen_tree5, precision_unseen_tree5, \
accuracy_unseen_lda, recall_unseen_lda, precision_unseen_lda, \
accuracy_unseen_nb, recall_unseen_nb, precision_unseen_nb, \
ninety_cum_logreg, ninety_cum_tree2, ninety_cum_tree5, \
ninety_cum_lda, ninety_cum_nb = evaluate_models(df_train, df_test)
accuracies_logreg.append(accuracy_unseen_logreg)
accuracies_tree2.append(accuracy_unseen_tree2)
accuracies_tree5.append(accuracy_unseen_tree5)
accuracies_lda.append(accuracy_unseen_lda)
accuracies_nb.append(accuracy_unseen_nb)
recalls_logreg.append(recall_unseen_logreg)
recalls_tree2.append(recall_unseen_tree2)
recalls_tree5.append(recall_unseen_tree5)
recalls_lda.append(recall_unseen_lda)
recalls_nb.append(recall_unseen_nb)
precisions_logreg.append(precision_unseen_logreg)
precisions_tree2.append(precision_unseen_tree2)
precisions_tree5.append(precision_unseen_tree5)
precisions_lda.append(precision_unseen_lda)
precisions_nb.append(precision_unseen_nb)
ninety_cums_logreg.append(ninety_cum_logreg)
ninety_cums_tree2.append(ninety_cum_tree2)
ninety_cums_tree5.append(ninety_cum_tree5)
ninety_cums_lda.append(ninety_cum_lda)
ninety_cums_nb.append(ninety_cum_nb)
print ssr
# create charts
plt.plot(ss_ratios, accuracies_logreg)
plt.plot(ss_ratios, accuracies_tree2)
plt.plot(ss_ratios, accuracies_tree5)
plt.plot(ss_ratios, accuracies_lda)
plt.plot(ss_ratios, accuracies_nb)
plt.title('Test Accuracy At Different Strafied Sampling Thresholds')
plt.ylabel('Accuracy')
plt.xlabel('Strafied Sample %')
plt.legend(['logreg', 'tree2', 'tree5', 'lda', 'nb'])
plt.savefig(CHARTS_DIR_PATH + 'ss_accuracy.png')
plt.clf()
plt.plot(ss_ratios, recalls_logreg)
plt.plot(ss_ratios, recalls_tree2)
plt.plot(ss_ratios, recalls_tree5)
plt.plot(ss_ratios, recalls_lda)
plt.plot(ss_ratios, recalls_nb)
plt.title('Test Recall At Different Strafied Sampling Thresholds')
plt.ylabel('Recall')
plt.xlabel('Strafied Sample %')
plt.legend(['logreg', 'tree2', 'tree5', 'lda', 'nb'])
plt.savefig(CHARTS_DIR_PATH + 'ss_recall.png')
plt.clf()
plt.plot(ss_ratios, precisions_logreg)
plt.plot(ss_ratios, precisions_tree2)
plt.plot(ss_ratios, precisions_tree5)
plt.plot(ss_ratios, precisions_lda)
plt.plot(ss_ratios, precisions_nb)
plt.title('Test Precision At Different Strafied Sampling Thresholds')
plt.ylabel('Precision')
plt.xlabel('Strafied Sample %')
plt.legend(['logreg', 'tree2', 'tree5', 'lda', 'nb'])
plt.savefig(CHARTS_DIR_PATH + 'ss_precision.png')
plt.clf()
plt.plot(ss_ratios, ninety_cums_logreg)
plt.plot(ss_ratios, ninety_cums_tree2)
plt.plot(ss_ratios, ninety_cums_tree5)
plt.plot(ss_ratios, ninety_cums_lda)
plt.plot(ss_ratios, ninety_cums_nb)
plt.title('Ninety Percent Cumulative Click At Different Strafied Sampling Thresholds')
plt.ylabel('90%% Cumulative Click')
plt.xlabel('Strafied Sample %')
plt.legend(['logreg', 'tree2', 'tree5', 'lda', 'nb'])
plt.savefig(CHARTS_DIR_PATH + 'ss_ninety.png')
plt.clf()
# print type(ss_ratios), type(fscores_logreg)
# plt.plot(ss_ratios, fscores_logreg)
# plt.plot(ss_ratios, fscores_tree2)
# plt.plot(ss_ratios, fscores_tree5)
# plt.plot(ss_ratios, fscores_lda)
# plt.plot(ss_ratios, fscores_nb)
# plt.title('F Score At Different Strafied Sampling Thresholds')
# plt.ylabel('F Score')
# plt.xlabel('Strafied Sample %')
# plt.legend(['logreg', 'tree2', 'tree5', 'lda', 'nb'])
# plt.savefig(CHARTS_DIR_PATH + 'ss_fscore.png')
# plt.clf()
preprocess_data()
predicting_propensity_to_click()
flat_bids, ad_spend_fb, imps_fb, clicks_fb = evaluate_flat_bid()
goals, ad_spend_fg, imps_fg, clicks_fg = evaluate_constant_goal_bid()
budgets, ad_spend_dg, imps_dg, clicks_dg = evaluate_min_cpe_bid(ad_spend_fg)
df_fb = pd.DataFrame(columns=['flat_bid', 'ad_spend', 'imps', 'clicks'])
df_fb['flat_bid'] = flat_bids
df_fb['ad_spend'] = ad_spend_fb
df_fb['imps'] = imps_fb
df_fb['clicks'] = clicks_fb
df_fb['cpm'] = df_fb['ad_spend'].divide(df_fb['imps'])*1000
df_fb['ctr'] = df_fb['clicks'].divide(df_fb['imps'])*100
df_fb['cpc'] = df_fb['ad_spend'].divide(df_fb['clicks'])
df_fg = pd.DataFrame(columns=['goal', 'ad_spend', 'imps', 'clicks'])
df_fg['goal'] = goals
df_fg['ad_spend'] = ad_spend_fg
df_fg['imps'] = imps_fg
df_fg['clicks'] = clicks_fg
df_fg['cpm'] = df_fg['ad_spend'].divide(df_fg['imps'])*1000
df_fg['ctr'] = df_fg['clicks'].divide(df_fg['imps'])*100
df_fg['cpc'] = df_fg['ad_spend'].divide(df_fg['clicks'])
df_dg = pd.DataFrame(columns=['budget', 'ad_spend', 'imps', 'clicks'])
df_dg['budget'] = budgets
df_dg['ad_spend'] = ad_spend_dg
df_dg['imps'] = imps_dg
df_dg['clicks'] = clicks_dg
df_dg['cpm'] = df_dg['ad_spend'].divide(df_dg['imps'])*1000
df_dg['ctr'] = df_dg['clicks'].divide(df_dg['imps'])*100
df_dg['cpc'] = df_dg['ad_spend'].divide(df_dg['clicks'])
# flat bid
fig, ax1 = plt.subplots()
ax1.plot(df_fb['flat_bid'], df_fb['cpc'], marker='o')
ax1.plot(0,0,color='#2ca25f', marker='^')
ax1.set_xlabel('Flat Bid (Yen)')
ax1.set_ylabel('Cost Per Click (Yen)')
ax2 = ax1.twinx()
ax2.plot(df_fb['flat_bid'], df_fb['ctr'], color='#2ca25f', marker='^')
ax2.set_ylabel('Click Through Rate (%)')
ax1.legend(['cpc','ctr'])
plt.title('Performance At Varying Flat Bid Amounts')
plt.savefig(CHARTS_DIR_PATH + 'flatbid.png')
plt.clf()
# flat goal
fig, ax1 = plt.subplots()
ax1.plot(df_fg['goal'], df_fg['cpc'], marker='o')
ax1.plot(0,0,color='#2ca25f', marker='^')
ax1.set_xlabel('Goal (Yen)')
ax1.set_ylabel('Cost Per Click (Yen)')
ax2 = ax1.twinx()
ax2.plot(df_fg['goal'], df_fg['ctr'], color='#2ca25f', marker='^')
ax2.set_ylabel('Click Through Rate (%)')
ax1.legend(['cpc','ctr'])
plt.title('Performance At Varying Flat Goal Amounts')
plt.savefig(CHARTS_DIR_PATH + 'flatgoal.png')
plt.clf()
# dynamic goal
fig, ax1 = plt.subplots()
ax1.plot(df_dg['budget'], df_dg['cpc'], marker='o')
ax1.plot(0,0,color='#2ca25f', marker='^')
ax1.set_xlabel('Budget (Yen)')
ax1.set_ylabel('Cost Per Click (Yen)')
ax2 = ax1.twinx()
ax2.plot(df_dg['budget'], df_dg['ctr'], color='#2ca25f', marker='^')
ax2.set_ylabel('Click Through Rate (%)')
ax1.legend(['cpc','ctr'])
plt.title('Performance At Varying Budgets')
plt.savefig(CHARTS_DIR_PATH + 'dynamicgoal.png')
plt.clf()
# plot cpm vs ad spend
plt.plot(df_fb['ad_spend'], df_fb['cpm'], marker='o')
plt.plot(df_fg['ad_spend'], df_fg['cpm'], marker='^')
plt.plot(df_dg['ad_spend'], df_dg['cpm'], marker='s')
plt.title('CPM At Varying Budget Levels')
plt.xlabel('Ad Spend (Yen)')
plt.ylabel('Cost Per Thousand Impressions (Yen)')
plt.legend(['flat bid', 'flat goal', 'dynamic goal'])
plt.savefig(CHARTS_DIR_PATH + 'cpm_vs_adspend.png')
plt.clf()
# plot ctr vs ad spend
plt.plot(df_fb['ad_spend'], df_fb['ctr'], marker='o')
plt.plot(df_fg['ad_spend'], df_fg['ctr'], marker='^')
plt.plot(df_dg['ad_spend'], df_dg['ctr'], marker='s')
plt.title('Click Through Rate At Varying Budget Levels')
plt.xlabel('Ad Spend (Yen)')
plt.ylabel('Click Through Rate (%)')
plt.legend(['flat bid', 'flat goal', 'dynamic goal'])
plt.savefig(CHARTS_DIR_PATH + 'ctr_vs_adspend.png')
plt.clf()
# plot cpc vs ad spend
plt.plot(df_fb['ad_spend'], df_fb['cpc'], marker='o')
plt.plot(df_fg['ad_spend'], df_fg['cpc'], marker='^')
plt.plot(df_dg['ad_spend'], df_dg['cpc'], marker='s')
plt.title('Cost Per Click At Varying Budget Levels')
plt.xlabel('Ad Spend (Yen)')
plt.ylabel('Cost Per Click (Yen)')
plt.legend(['flat bid', 'flat goal', 'dynamic goal'])
plt.savefig(CHARTS_DIR_PATH + 'cpc_vs_adspend.png')
plt.clf()
|
UTF-8
|
Python
| false | false | 2,014 |
8,194,797,639,560 |
f2a6bafc3e2206fc9f3aa4ee7db6835a441d7548
|
d1b82119bc37ea1a679ab416f8ec66bec22a8c79
|
/jom/templatetags/jom_tags.py
|
bfd4cf140c0a0bde6273ab61b165cb83732fb435
|
[] |
no_license
|
msama/django-jom
|
https://github.com/msama/django-jom
|
a306c4c0ee441714165c039cac501e1aa74f72ae
|
eabd1ec79ac1190d1646aab1242727d9a0368852
|
refs/heads/master
| 2020-12-24T22:29:52.721481 | 2012-10-13T20:19:10 | 2012-10-13T20:19:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Jul 18, 2012
@author: rax
'''
from django import template
from jom.factory import JomFactory
register = template.Library()
@register.inclusion_tag('jom/templatetags/register_instance.js',
takes_context = True)
def register_instance(context, instance):
""" Creates an associative array which can
initialize a JomInstance.
"""
jomInstance = JomFactory.default().getJomInstance(instance)
if not jomInstance:
raise AssertionError(
"Model not registered: %s" % instance.__class__)
return {'jomInstance': jomInstance,
'clazz': jomInstance.descriptor.__class__.__name__,
'instance': instance
}
@register.inclusion_tag('jom/templatetags/register_queryset.js',
takes_context = True)
def register_queryset(context, queryset):
""" Creates an associative array which can
initialize a JomInstance.
"""
return {'queryset': queryset}
|
UTF-8
|
Python
| false | false | 2,012 |
18,047,452,592,435 |
36a8f8a8eef47357df263f05cc26a2b4dad45e8f
|
980e3b3a905f50015a840f671a3c050b86785538
|
/experiments/pymunk_test.py
|
28a1d309fb1ab4bff83315a91283e0b27b65fb83
|
[] |
no_license
|
jeremyosborne/ant_cities
|
https://github.com/jeremyosborne/ant_cities
|
2c4a9ad6cebb44bc2f718212bcab11fa4e0f4f4a
|
4147d4b700cfd9a9c3072f7532d0ad6c38ea1c3c
|
refs/heads/master
| 2019-01-02T09:10:57.961591 | 2014-02-26T04:07:35 | 2014-02-26T04:07:35 | 21,100,045 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Jul 4, 2013
@author: john
'''
import pygame, math
from pygame.locals import *
import pymunk
from pymunk.vec2d import Vec2d
class Ant(object):
def __init__(self):
self.location = Vec2d(0.0, 0.0)
self.velocity = Vec2d(0.0, 0.0)
self.acceleration = Vec2d(0.0 ,0.0)
self.max_steering_force = 0.0
self.max_speed = 0.0
self.graphic = pygame.image.load('../assets/red-ant.png')
#from gameobjects.vector2 import Vector2
start_position = Vec2d(20, 300)
end_position = Vec2d(20, 100)
destination = end_position
speed = 0. #Our starting speed.
speed_up_acceleration = 10.
acceleration = speed_up_acceleration
target_speed = 20.
direction = 0.
slow_down_distance = 120.
slow_down_acceleration = -30.
current_heading = (destination - location)
current_heading = current_heading.normalized()
desired_heading = current_heading
steering_force = 360. #Degrees per second
#Steering force = desired velocity - current velocity
if __name__ == '__main__':
pygame.init()
screen = pygame.display.set_mode((800, 600))
ant = Ant()
ant.location = start_position
ant.max_speed = 4
ant.max_steering_force = 0.1
clock = pygame.time.Clock()
background = pygame.surface.Surface((800, 600)).convert()
background.fill((0, 0, 0))
#Show initial position
screen.blit(background, (0,0))
screen.blit(ant, (location))
pygame.display.update()
time_passed = clock.tick(30)
#run loop
while True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: #left click and new destination selected
mouse_x, mouse_y = pygame.mouse.get_pos()
destination = Vec2d(mouse_x, mouse_y)
acceleration = speed_up_acceleration
time_passed = float(clock.tick(30)/1000.)
if location != destination:
#Acceleration code
if acceleration > 0: #Then we're accelerating.
if speed < target_speed:
speed += acceleration * time_passed
elif acceleration < 0: #Then we're slowing down.
if (speed + acceleration) > 0:
speed += acceleration * time_passed
else:
speed = (slow_down_acceleration / 2.) * -1.
vec_to_destination = destination - location
distance_to_destination = vec_to_destination.get_length()
new_heading = vec_to_destination.normalized()
print "New Heading: ", new_heading
travel_distance = min(distance_to_destination, time_passed * speed)
#Change current_heading towards the new heading
#if current_heading != new_heading:
#current_heading += steering_force
print "Current_heading: ", current_heading
location += travel_distance * current_heading
print "Travel Distance: ", travel_distance, "Distance_to_destination: ", distance_to_destination
print "Speed: ", speed
#Direction used for rotating the image.
direction = (math.atan2(current_heading.y, current_heading.x)*(180/math.pi))
print "Direction: ", direction, "heading.y: ", current_heading.y, "heading.x:", current_heading.x
#Are we there yet?
if distance_to_destination <= slow_down_distance:
#replace with formula that takes into consideration current speed.
#also change test, such that the slow down distance is also based
#on the current speed.
acceleration = slow_down_acceleration
screen.blit(background, (0,0))
#screen.blit(ant, (location))
screen.blit((pygame.transform.rotate(ant, (direction*-1.))), location)
#Finalize
pygame.display.update()
|
UTF-8
|
Python
| false | false | 2,014 |
15,187,004,369,953 |
8cbe3d755ff29b357e0b094781cd3b950629737a
|
9b1823b73a03f7517c2039ccdb3889300bacf455
|
/dialogs/KUM3/fPesertaKUM3New_data.py
|
d66f1d0cecc6210020a90ea94bf1489b1eb56635
|
[] |
no_license
|
ihsansolusi/BMMProgram
|
https://github.com/ihsansolusi/BMMProgram
|
d50eb6807abad265544a6c855888e6d9cef3a864
|
97428d5cf3f61256a8bc1ced551da611a8884b50
|
refs/heads/master
| 2016-03-31T02:51:25.931005 | 2013-09-12T23:38:15 | 2013-09-12T23:38:15 | 2,272,273 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import com.ihsan.foundation.pobjecthelper as phelper
def formSetDataEx(UIDefList, Parameter):
pass
def SimpanData(config, parameter, returnpacket):
recs = parameter.uippeserta.GetRecord(0)
cs_id = recs.key.split('=')
config.BeginTransaction()
param = {'tipe':'KU'}
try:
helper = phelper.PObjectHelper(config)
custData = helper.CreatePObject('CustomerPersonal')
custData.CustomerName = recs.CustomerName
custData.AddressStreet = recs.AddressStreet
custData.AddressKelurahan = recs.AddressKelurahan
custData.AddressKecamatan = recs.AddressKecamatan
custData.AddressSubDistrict = recs.AddressSubDistrict
custData.AddressCity = recs.AddressCity
custData.AddressState = recs.AddressState
custData.IdPropinsi = recs.GetFieldByName('LPropinsi.IdPropinsi')
custData.IdNegara = recs.GetFieldByName('LNegara.IdNegara')
custData.AddressPostalCode = recs.AddressPostalCode
custData.PhoneNumber = recs.PhoneNumber
custData.IdentityNumber = recs.IdentityNumber
custData.IdentityType = recs.IdentityType
custData.Gender = recs.Gender
custData.BirthDate = recs.BirthDate
custData.BirthPlace = recs.BirthPlace
custData.Religion = recs.Religion
custData.Marital = recs.Marital
custData.FormalEducation = recs.FormalEducation
custData.WorkSector = recs.WorkSector
custData.TotalFamily = recs.TotalFamily
iAppt = helper.CreatePObject('Mustahiq')
iAppt.CustomerId = custData.CustomerId
assProd = helper.CreatePObject('MustahiqProduct', param)
assProd.ProductId = recs.GetFieldByName('LProduct.ProductId')
assProd.MustahiqId = iAppt.MustahiqId
config.Commit()
except:
config.Rollback()
raise
|
UTF-8
|
Python
| false | false | 2,013 |
17,128,329,608,934 |
16f397a085caa3568a3ae17b1efba163fdd3fcd3
|
9921238a4bba03a98b62b32a982bf34db84884bd
|
/glue/qt/widgets/mpl_widget.py
|
ec138d8393b2fd1727f0729298e35e31ef8f4916
|
[] |
no_license
|
eteq/glue
|
https://github.com/eteq/glue
|
3a9ebc86f30e1ac276de1502ea1902e81ce8c681
|
683cb0fbdd1e67cdac7139e474b64517298002eb
|
refs/heads/master
| 2021-01-17T23:41:06.816926 | 2012-10-19T13:50:07 | 2012-10-19T13:50:07 | 6,298,204 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# Python Qt4 bindings for GUI objects
from PyQt4 import QtGui
from PyQt4.QtCore import pyqtSignal
# import the Qt4Agg FigureCanvas object, that binds Figure to
# Qt4Agg backend. It also inherits from QWidget
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as \
FigureCanvas
# Matplotlib Figure object
from matplotlib.figure import Figure
class MplCanvas(FigureCanvas):
"""Class to represent the FigureCanvas widget"""
#signals
rightDrag = pyqtSignal(float, float)
leftDrag = pyqtSignal(float, float)
def __init__(self):
# setup Matplotlib Figure and Axis
self.fig = Figure(facecolor='#ffffff')
try:
self.fig.set_tight_layout(True)
except AttributeError: # matplotlib < 1.1
pass
# initialization of the canvas
FigureCanvas.__init__(self, self.fig)
# we define the widget as expandable
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
# notify the system of updated policy
FigureCanvas.updateGeometry(self)
class MplWidget(QtGui.QWidget):
"""Widget defined in Qt Designer"""
#signals
rightDrag = pyqtSignal(float, float)
leftDrag = pyqtSignal(float, float)
def __init__(self, parent=None):
# initialization of Qt MainWindow widget
QtGui.QWidget.__init__(self, parent)
# set the canvas to the Matplotlib widget
self.canvas = MplCanvas()
# create a vertical box layout
self.vbl = QtGui.QVBoxLayout()
self.vbl.setContentsMargins(0, 0, 0, 0)
self.vbl.setSpacing(0)
# add mpl widget to the vertical box
self.vbl.addWidget(self.canvas)
# set the layout to the vertical box
self.setLayout(self.vbl)
self.canvas.rightDrag.connect(self.rightDrag)
self.canvas.leftDrag.connect(self.leftDrag)
|
UTF-8
|
Python
| false | false | 2,012 |
5,574,867,562,599 |
5c15411ebac48ca3dac5d5e0cc65b575922ed7d2
|
6108bac5de6ef9c79d1607e4c3b14be4848d3e00
|
/plugin.video.movie25/resources/libs/main.py
|
29ca07562bb0b2c9f617ffdc5b836a96b3c79832
|
[] |
no_license
|
nadav1110/mash2k3-repository
|
https://github.com/nadav1110/mash2k3-repository
|
abf2f702e56d3fd6fbb22be779f8e7d153b46cf9
|
ad87742e098460d94fac55c2623260f3eb020b05
|
refs/heads/master
| 2020-04-11T07:41:29.498166 | 2013-03-31T06:09:56 | 2013-03-31T06:09:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import urllib,urllib2,re,cookielib,string, urlparse,sys,os
import xbmc, xbmcgui, xbmcaddon, xbmcplugin
import urlresolver
from t0mm0.common.addon import Addon
from t0mm0.common.net import Net as net
from metahandler import metahandlers
import datetime,time
#Mash Up - by Mash2k3 2012.
Mainurl ='http://www.movie25.com/movies/'
addon_id = 'plugin.video.movie25'
selfAddon = xbmcaddon.Addon(id=addon_id)
addon = Addon(addon_id)
grab = metahandlers.MetaData(preparezip = False)
################################################################################ Common Calls ##########################################################################################################
datapath = addon.get_profile()
if selfAddon.getSetting('visitor_ga')=='':
from random import randint
selfAddon.setSetting('visitor_ga',str(randint(0, 0x7fffffff)))
VERSION = "1.2.6"
PATH = "Movie25-"
UATRACK="UA-38312513-1"
def OPENURL(url):
print "openurl = " + url
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
link=link.replace(''',"'").replace('"','"').replace('&',"&").replace("'","'").replace('<i>','').replace("#8211;","-").replace('</i>','').replace("’","'").replace('&quot;','"').replace('×','').replace('&','').replace('‘','').replace('–','').replace('“','').replace('”','').replace('—','')
return link
def REDIRECT(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.geturl()
return link
def Clearhistory(url):
os.remove(url)
################################################################################ AutoView ##########################################################################################################
def VIEWS():
if selfAddon.getSetting("auto-view") == "true":
if selfAddon.getSetting("choose-skin") == "true":
if selfAddon.getSetting("con-view") == "0":
xbmc.executebuiltin("Container.SetViewMode(50)")
elif selfAddon.getSetting("con-view") == "1":
xbmc.executebuiltin("Container.SetViewMode(51)")
elif selfAddon.getSetting("con-view") == "2":
xbmc.executebuiltin("Container.SetViewMode(500)")
elif selfAddon.getSetting("con-view") == "3":
xbmc.executebuiltin("Container.SetViewMode(501)")
elif selfAddon.getSetting("con-view") == "4":
xbmc.executebuiltin("Container.SetViewMode(508)")
elif selfAddon.getSetting("con-view") == "5":
xbmc.executebuiltin("Container.SetViewMode(504)")
elif selfAddon.getSetting("con-view") == "6":
xbmc.executebuiltin("Container.SetViewMode(503)")
elif selfAddon.getSetting("con-view") == "7":
xbmc.executebuiltin("Container.SetViewMode(515)")
return
elif selfAddon.getSetting("choose-skin") == "false":
if selfAddon.getSetting("xpr-view") == "0":
xbmc.executebuiltin("Container.SetViewMode(50)")
elif selfAddon.getSetting("xpr-view") == "1":
xbmc.executebuiltin("Container.SetViewMode(52)")
elif selfAddon.getSetting("xpr-view") == "2":
xbmc.executebuiltin("Container.SetViewMode(501)")
elif selfAddon.getSetting("xpr-view") == "3":
xbmc.executebuiltin("Container.SetViewMode(55)")
elif selfAddon.getSetting("xpr-view") == "4":
xbmc.executebuiltin("Container.SetViewMode(54)")
elif selfAddon.getSetting("xpr-view") == "5":
xbmc.executebuiltin("Container.SetViewMode(60)")
elif selfAddon.getSetting("xpr-view") == "6":
xbmc.executebuiltin("Container.SetViewMode(53)")
return
else:
return
def VIEWSB():
if selfAddon.getSetting("auto-view") == "true":
if selfAddon.getSetting("home-view") == "0":
xbmc.executebuiltin("Container.SetViewMode(50)")
elif selfAddon.getSetting("home-view") == "1":
xbmc.executebuiltin("Container.SetViewMode(500)")
return
################################################################################ Movies Metahandler ##########################################################################################################
def GETMETA(mname,genre,year,thumb):
if selfAddon.getSetting("meta-view") == "true":
mname=mname.replace(' 720p BRRip','').replace(' 720p HDRip','').replace(' 720p WEBRip','').replace(' 720p BluRay','').replace('()','')
mname=mname.replace('[DVD]','').replace('[TS]','').replace('[TC]','').replace('[CAM]','').replace('[SCREENER]','').replace('[COLOR blue]','').replace('[COLOR red]','').replace('[/COLOR]','')
namelen=len(mname)
print mname[-1:namelen]
if mname[-1:namelen]==')':
nam= namelen- 5
name= mname[0:namelen-6]
else:
name = mname
name=name.replace('-','').replace('&','').replace('acute;','').replace('C ','')
name = name.decode("ascii", "ignore")
if year =='':
year=''
meta = grab.get_meta('movie',name,None,None,year,overlay=6)# first is Type/movie or tvshow, name of show,tvdb id,imdb id,string of year,unwatched = 6/watched = 7
print "Movie mode: %s"%name
infoLabels = {'rating': meta['rating'],'duration': meta['duration'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],
'plot': meta['plot'],'title': meta['title'],'writer': meta['writer'],'cover_url': meta['cover_url'],
'director': meta['director'],'cast': meta['cast'],'backdrop_url': meta['backdrop_url'],'tmdb_id': meta['tmdb_id'],'year': meta['year']}
if infoLabels['genre']=='':
infoLabels['genre']=genre
if infoLabels['cover_url']=='':
infoLabels['cover_url']=thumb
else:
infoLabels = {'genre': genre,'title': mname,'cover_url': thumb,'year': year,'backdrop_url': ''}
return infoLabels
def GETMETAB(name,genre,year,thumb):
if selfAddon.getSetting("meta-view") == "true":
meta = grab.get_meta('movie',name,None,None,year,overlay=6)# first is Type/movie or tvshow, name of show,tvdb id,imdb id,string of year,unwatched = 6/watched = 7
print "Movie mode: %s"%name
infoLabels = {'rating': meta['rating'],'duration': meta['duration'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],
'plot': meta['plot'],'title': meta['title'],'writer': meta['writer'],'cover_url': meta['cover_url'],
'director': meta['director'],'cast': meta['cast'],'backdrop_url': meta['backdrop_url'],'tmdb_id': meta['tmdb_id'],'year': meta['year']}
if infoLabels['genre']=='':
infoLabels['genre']=genre
if infoLabels['cover_url']=='':
infoLabels['cover_url']=thumb
else:
infoLabels = {'genre': genre,'title': name,'cover_url': thumb,'year': year,'backdrop_url': ''}
return infoLabels
################################################################################ TV Shows Metahandler ##########################################################################################################
def GETMETAShow(mname):
if selfAddon.getSetting("meta-view") == "true":
name=mname.replace(' [COLOR red]Recently Updated[/COLOR]','').replace('.','').replace('M.D.','').replace('<span class="updated">Updated!</span>','')
year=''
namelen=len(name)
if name[-1:namelen] == ' ':
name= name[0:namelen-1]
namelen=len(name)
if name[-1:namelen] == ' ':
name= name[0:namelen-1]
namelen=len(name)
if name[-1:namelen] == ' ':
name= name[0:namelen-1]
if name == 'Chase':
name = 'Chase (2010)'
elif name == 'Castle':
name = 'Castle (2009)'
name= name.replace('-','').replace('-2012','').replace('acute;','').replace('Vampire Diaries','The Vampire Diaries').replace('Comedy Central Roast','Comedy Central Roasts')
name= name.replace('Doctor Who 2005','Doctor Who').replace(' (US)','(US)').replace(' (UK)','(UK)').replace(' (AU)','(AU)').replace('%','')
meta = grab.get_meta('tvshow',name,None,None,year,overlay=6)# first is Type/movie or tvshow, name of show,tvdb id,imdb id,string of year,unwatched = 6/watched = 7
print "Tv Mode: %s"%name
infoLabels = {'rating': meta['rating'],'duration': meta['duration'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],
'plot': meta['plot'],'title': mname,'cover_url': meta['cover_url'],
'cast': meta['cast'],'studio': meta['studio'],'banner_url': meta['banner_url'],
'backdrop_url': meta['backdrop_url'],'status': meta['status']}
if infoLabels['cover_url']=='':
infoLabels['cover_url']="%s/art/vidicon.png"%selfAddon.getAddonInfo("path")
else:
infoLabels = {'title': mname,'cover_url': "%s/art/vidicon.png"%selfAddon.getAddonInfo("path"),'backdrop_url': ''}
return infoLabels
def GETMETAEpi(mname,data):
if selfAddon.getSetting("meta-view") == "true":
match=re.compile('(.+?)xoxc(.+?)xoxc(.+?)xoxc(.+?)xoxc').findall(data)
for showname, sea, epi, epiname in match:
showname= showname.replace('-','').replace('-2012','').replace('acute;','').replace('Comedy Central Roast','Comedy Central Roasts')
showname= showname.replace('Doctor Who 2005','Doctor Who').replace(' (US)','(US)').replace(' (UK)','(UK)').replace(' (AU)','(AU)').replace('%','').replace(' [COLOR red]Recently Updated[/COLOR]','').replace('.','').replace('M.D.','').replace('<span class="updated">Updated!</span>','')
print showname+' '+sea+' '+epi+' '+epiname
meta = grab.get_episode_meta(str(showname),None, int(sea), int(epi),episode_title=str(epiname), overlay='6')
print "Episode Mode: Name %s Season %s - Episode %s"%(str(epiname),str(sea),str(epi))
infoLabels = {'rating': meta['rating'],'duration': meta['duration'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],
'plot': meta['plot'],'title': meta['title'],'cover_url': meta['cover_url'],
'poster': meta['poster'],'season': meta['season'],'episode': meta['episode'],'backdrop_url': meta['backdrop_url']}
else:
infoLabels = {'title': mname,'cover_url': '','backdrop_url': ''}
return infoLabels
################################################################################ Google Analytics ##########################################################################################################
def parseDate(dateString):
try:
return datetime.datetime.fromtimestamp(time.mktime(time.strptime(dateString.encode('utf-8', 'replace'), "%Y-%m-%d %H:%M:%S")))
except:
return datetime.datetime.today() - datetime.timedelta(days = 1) #force update
def checkGA():
secsInHour = 60 * 60
threshold = 2 * secsInHour
now = datetime.datetime.today()
prev = parseDate(selfAddon.getSetting('ga_time'))
delta = now - prev
nDays = delta.days
nSecs = delta.seconds
doUpdate = (nDays > 0) or (nSecs > threshold)
if not doUpdate:
return
selfAddon.setSetting('ga_time', str(now).split('.')[0])
APP_LAUNCH()
def send_request_to_google_analytics(utm_url):
ua='Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
import urllib2
try:
req = urllib2.Request(utm_url, None,
{'User-Agent':ua}
)
response = urllib2.urlopen(req).read()
except:
print ("GA fail: %s" % utm_url)
return response
def GA(group,name):
try:
try:
from hashlib import md5
except:
from md5 import md5
from random import randint
import time
from urllib import unquote, quote
from os import environ
from hashlib import sha1
VISITOR = selfAddon.getSetting('visitor_ga')
utm_gif_location = "http://www.google-analytics.com/__utm.gif"
if not group=="None":
utm_track = utm_gif_location + "?" + \
"utmwv=" + VERSION + \
"&utmn=" + str(randint(0, 0x7fffffff)) + \
"&utmt=" + "event" + \
"&utme="+ quote("5("+PATH+"*"+group+"*"+name+")")+\
"&utmp=" + quote(PATH) + \
"&utmac=" + UATRACK + \
"&utmcc=__utma=%s" % ".".join(["1", VISITOR, VISITOR, VISITOR,VISITOR,"2"])
try:
print "============================ POSTING TRACK EVENT ============================"
send_request_to_google_analytics(utm_track)
except:
print "============================ CANNOT POST TRACK EVENT ============================"
if name=="None":
utm_url = utm_gif_location + "?" + \
"utmwv=" + VERSION + \
"&utmn=" + str(randint(0, 0x7fffffff)) + \
"&utmp=" + quote(PATH) + \
"&utmac=" + UATRACK + \
"&utmcc=__utma=%s" % ".".join(["1", VISITOR, VISITOR, VISITOR, VISITOR,"2"])
else:
if group=="None":
utm_url = utm_gif_location + "?" + \
"utmwv=" + VERSION + \
"&utmn=" + str(randint(0, 0x7fffffff)) + \
"&utmp=" + quote(PATH+"/"+name) + \
"&utmac=" + UATRACK + \
"&utmcc=__utma=%s" % ".".join(["1", VISITOR, VISITOR, VISITOR, VISITOR,"2"])
else:
utm_url = utm_gif_location + "?" + \
"utmwv=" + VERSION + \
"&utmn=" + str(randint(0, 0x7fffffff)) + \
"&utmp=" + quote(PATH+"/"+group+"/"+name) + \
"&utmac=" + UATRACK + \
"&utmcc=__utma=%s" % ".".join(["1", VISITOR, VISITOR, VISITOR, VISITOR,"2"])
print "============================ POSTING ANALYTICS ============================"
send_request_to_google_analytics(utm_url)
except:
print "================ CANNOT POST TO ANALYTICS ================"
def APP_LAUNCH():
versionNumber = int(xbmc.getInfoLabel("System.BuildVersion" )[0:2])
if versionNumber < 12:
if xbmc.getCondVisibility('system.platform.osx'):
if xbmc.getCondVisibility('system.platform.atv2'):
log_path = '/var/mobile/Library/Preferences'
else:
log_path = os.path.join(os.path.expanduser('~'), 'Library/Logs')
elif xbmc.getCondVisibility('system.platform.ios'):
log_path = '/var/mobile/Library/Preferences'
elif xbmc.getCondVisibility('system.platform.windows'):
log_path = xbmc.translatePath('special://home')
log = os.path.join(log_path, 'xbmc.log')
logfile = open(log, 'r').read()
elif xbmc.getCondVisibility('system.platform.linux'):
log_path = xbmc.translatePath('special://home/temp')
else:
log_path = xbmc.translatePath('special://logpath')
log = os.path.join(log_path, 'xbmc.log')
logfile = open(log, 'r').read()
match=re.compile('Starting XBMC \((.+?) Git:.+?Platform: (.+?)\. Built.+?').findall(logfile)
elif versionNumber > 11:
print '======================= more than ===================='
log_path = xbmc.translatePath('special://logpath')
log = os.path.join(log_path, 'xbmc.log')
logfile = open(log, 'r').read()
match=re.compile('Starting XBMC \((.+?) Git:.+?Platform: (.+?)\. Built.+?').findall(logfile)
else:
logfile='Starting XBMC (Unknown Git:.+?Platform: Unknown. Built.+?'
match=re.compile('Starting XBMC \((.+?) Git:.+?Platform: (.+?)\. Built.+?').findall(logfile)
print '========================== '+PATH+' '+VERSION+' =========================='
try:
from hashlib import md5
except:
from md5 import md5
from random import randint
import time
from urllib import unquote, quote
from os import environ
from hashlib import sha1
import platform
VISITOR = selfAddon.getSetting('visitor_ga')
for build, PLATFORM in match:
if re.search('12.0',build,re.IGNORECASE):
build="Frodo"
if re.search('11.0',build,re.IGNORECASE):
build="Eden"
if re.search('13.0',build,re.IGNORECASE):
build="Gotham"
print build
print PLATFORM
utm_gif_location = "http://www.google-analytics.com/__utm.gif"
utm_track = utm_gif_location + "?" + \
"utmwv=" + VERSION + \
"&utmn=" + str(randint(0, 0x7fffffff)) + \
"&utmt=" + "event" + \
"&utme="+ quote("5(APP LAUNCH*"+build+"*"+PLATFORM+")")+\
"&utmp=" + quote(PATH) + \
"&utmac=" + UATRACK + \
"&utmcc=__utma=%s" % ".".join(["1", VISITOR, VISITOR, VISITOR,VISITOR,"2"])
try:
print "============================ POSTING APP LAUNCH TRACK EVENT ============================"
send_request_to_google_analytics(utm_track)
except:
print "============================ CANNOT POST APP LAUNCH TRACK EVENT ============================"
checkGA()
################################################################################ Types of Directories ##########################################################################################################
def addLink(name,url,iconimage):
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/link.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('fanart_image', "%s/fanart.jpg"%selfAddon.getAddonInfo("path"))
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=url,listitem=liz)
return ok
def addPlay(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('fanart_image', "%s/fanart.jpg"%selfAddon.getAddonInfo("path"))
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz)
return ok
def addDir(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('fanart_image', "%s/fanart.jpg"%selfAddon.getAddonInfo("path"))
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addDir2(name,url,mode,iconimage,desc):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": desc } )
liz.setProperty('fanart_image', "%s/fanart.jpg"%selfAddon.getAddonInfo("path"))
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addSport(name,url,mode,iconimage,desc,dur,gen):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": desc, "Year": dur ,"Genre": gen} )
liz.setProperty('fanart_image', "%s/fanart.jpg"%selfAddon.getAddonInfo("path"))
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz)
return ok
def addDirb(name,url,mode,iconimage,fan):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('fanart_image', fan)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addPlayb(name,url,mode,iconimage,fan):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('fanart_image', fan)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz)
return ok
def addInfo(name,url,mode,iconimage,gen,year):
ok=True
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
name=name.replace('()','')
infoLabels = GETMETA(name,gen,year,iconimage)
if selfAddon.getSetting("meta-view") == "true":
tmdbid=infoLabels['tmdb_id']
args=[(url,name)]
script1="%s/resources/addFavs.py"%selfAddon.getAddonInfo('path')
script2="%s/resources/delFavs.py"%selfAddon.getAddonInfo('path')
script3="%s/resources/Trailers.py"%selfAddon.getAddonInfo('path')
Commands=[("[B][COLOR blue]Add[/COLOR][/B] to My Fav's","XBMC.RunScript(" + script1 + ", " + str(args) + ")"),
("[B][COLOR red]Remove[/COLOR][/B] from My Fav's","XBMC.RunScript(" + script2 + ", " + str(args) + ")")]
if selfAddon.getSetting("meta-view") == "true":
Commands.append(("Play Trailer","XBMC.RunScript(" + script3 + ", " + str(tmdbid) + ")"))
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=infoLabels['cover_url'])
liz.addContextMenuItems( Commands )
liz.setInfo( type="Video", infoLabels = infoLabels)
liz.setProperty('fanart_image', infoLabels['backdrop_url'])
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addInfo2(name,url,mode,iconimage,plot):
ok=True
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
infoLabels = GETMETAShow(name)
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=infoLabels['cover_url'])
liz.setInfo( type="Video", infoLabels=infoLabels)
liz.setProperty('fanart_image', infoLabels['backdrop_url'])
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addEpi(name,url,mode,iconimage,data):
ok=True
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
infoLabels = GETMETAEpi(name,data)
liz=xbmcgui.ListItem(name, iconImage="%s/art/vidicon.png"%selfAddon.getAddonInfo("path"), thumbnailImage=infoLabels['cover_url'])
liz.setInfo( type="Video", infoLabels=infoLabels)
liz.setProperty('fanart_image', infoLabels['backdrop_url'])
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addSpecial(name,url,mode,iconimage):
liz=xbmcgui.ListItem(name,iconImage="",thumbnailImage = iconimage)
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
|
UTF-8
|
Python
| false | false | 2,013 |
8,306,466,774,342 |
35d6f8223447c56607574bdcfffda2c8a76460ae
|
090f4f71e0ca67c0b8702fe31f31b4a307d8c488
|
/plonetheme/globms/intranet_view.py
|
7071993fa13d3bf87b3e47618971df3e9d5409f7
|
[] |
no_license
|
toutpt/globms
|
https://github.com/toutpt/globms
|
b39b44d1c4e085d7954af499f9dd024eef9d84b6
|
77a1702b0c0401d4ee65bd6717676242f3be35fa
|
refs/heads/master
| 2021-01-23T13:29:18.636707 | 2013-08-14T12:43:18 | 2013-08-14T12:43:18 | 2,622,892 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from zope import component
from Products.Five import BrowserView
class Intranet(BrowserView):
"""Intranet view"""
def __init__(self, context, request):
self.context = context
self.request = request
def news(self):
news = []
query = {'portal_type':'News Item', 'sort_order':'Effective',
'sort_limit':2}
catalog = self.tools.catalog()
brains = catalog(**query)
for brain in brains:
news.append({'title':brain.Title,
'description':brain.Description,
'url':brain.getURL()})
return news
def folders(self):
filter = {'portal_type':'Folder'}
return self.context.listFolderContents(contentFilter=filter)
def files(self):
path = '/'.join(self.context.getPhysicalPath())
query = {'portal_type':'File', 'sort_on':'Date','sort_order':'reverse',
'path':{'query': path},'sort_limit':5}
catalog = self.tools.catalog()
brains = catalog(**query)
files = []
for brain in brains:
files.append({'title':brain.Title,
'description':brain.Description,
'url':brain.getURL(),
'icon':brain.getIcon})
return files
@property
def tools(self):
return component.getMultiAdapter((self.context, self.request),
name=u'plone_tools')
|
UTF-8
|
Python
| false | false | 2,013 |
15,625,091,067,809 |
665e8023a74ab737c9ab25825810564338281e47
|
2d55ab945bbeb9adf604c229de8d42f6489c3e33
|
/cms/db/SmartMappedCollection.py
|
03485b6c7e4ee66349490194e9eacf3c12540963
|
[
"AGPL-3.0-only",
"CC-BY-SA-3.0",
"MIT",
"GPL-2.0-only",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"AGPL-3.0-or-later"
] |
non_permissive
|
mloc/cms
|
https://github.com/mloc/cms
|
253cf0929a3372a590d0560bd1f67e4abfc19fa5
|
b445b192ed6d60c67181cbc2a5a677136997fe88
|
refs/heads/master
| 2022-03-20T08:48:50.863495 | 2013-06-24T07:53:45 | 2013-06-24T07:53:45 | 10,921,661 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Programming contest management system
# Copyright © 2013 Luca Wehrstedt <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import util
from sqlalchemy import event
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm.collections import \
collection, collection_adapter, MappedCollection, \
__set as sa_set, __del as sa_del
# XXX When dropping support for SQLAlchemy pre-0.7.6, remove these two
# lines.
from sqlalchemy.orm.collections import _instrument_class
_instrument_class(MappedCollection)
# XXX When SQLAlchemy will support removal of attribute events, remove
# the following class and global variable:
class _EventManager(object):
def __init__(self):
self.handlers = dict()
def listen(self, cls, prp, handler):
if (cls, prp) not in self.handlers:
self.handlers[(cls, prp)] = list()
event.listen(class_mapper(cls)._props[prp],
'set', self.make_callback(cls, prp))
self.handlers[(cls, prp)].append(handler)
def remove(self, cls, prp, handler):
assert (cls, prp) in self.handlers
assert handler in self.handlers[(cls, prp)]
# The following doesn't work due to the equality operator of
# the collection, but it's exactly what we want to achieve.
#self.handlers[(cls, prp)].remove(handler)
for i in xrange(len(self.handlers[(cls, prp)])):
if self.handlers[(cls, prp)][i] is handler:
del self.handlers[(cls, prp)][i]
break
def make_callback(self, cls, prp):
def callback(target, new_key, old_key, _sa_initiator):
for handler in self.handlers[(cls, prp)]:
handler._on_column_change(target, new_key, old_key,
_sa_initiator)
return callback
_event_manager = _EventManager()
class SmartMappedCollection(MappedCollection):
def __init__(self, column):
self._column = column
self._linked = False
self._parent_rel = None
self._parent_obj = None
self._parent_cls = None
self._child_rel = None
self._child_cls = None
# XXX When dropping support for SQLAlchemy pre-0.8, rename this
# decorator to 'collection.linker'.
@collection.link
def _link(self, adapter):
assert adapter == collection_adapter(self)
if adapter is not None:
# LINK
assert not self._linked
self._linked = True
assert self is adapter.data
self._parent_rel = adapter.attr.key
self._parent_obj = adapter.owner_state.obj()
self._parent_cls = type(self._parent_obj)
parent_rel_prop = \
class_mapper(self._parent_cls)._props[self._parent_rel]
self._child_rel = parent_rel_prop.back_populates
self._child_cls = parent_rel_prop.mapper.class_
child_rel_prop = \
class_mapper(self._child_cls)._props[self._child_rel]
# XXX When SQLAlchemy will support removal of attribute
# events, use the following code:
#event.listen(class_mapper(self._child_cls)._props[self._column],
# 'set', self._on_column_change)
# In the meanwhile we have to use this:
_event_manager.listen(self._child_cls, self._column, self)
else:
# UNLINK
assert self._linked
self._linked = False
# XXX When SQLAlchemy will support removal of attribute
# events, use the following code:
#event.remove(class_mapper(self._child_cls)._props[self._column],
# 'set', self._on_column_change)
# In the meanwhile we have to use this:
_event_manager.remove(self._child_cls, self._column, self)
self._parent_rel = None
self._parent_obj = None
self._parent_cls = None
self._child_rel = None
self._child_cls = None
# XXX When dropping support for SQLAlchemy pre-0.8, remove this line.
_sa_on_link = _link
# The following two methods do all the hard work. Their mission is
# to keep everything consistent, that is to get from a (hopefully
# consistent) initial state to a consistent final state after
# having dome something useful (i.e. what they were written for).
# This is what we consider a consistent state:
# - self.values() is equal to all and only those objects of the
# self._child_cls class that have the self._child_rel attribute
# set to self._parent_obj;
# - all elements of self.values() are distinct (that is, this is an
# invertible mapping);
# - for each (key, value) in self.items(), key is equal to the
# self._column attribute of value.
# This method is called before the attribute is really changed, and
# trying to change it again from inside this method will cause an
# infinte recursion loop. Don't do that! This also means that the
# method does actually leave the collection in an inconsistent
# state, but SQLAlchemy will fix that immediately after.
def _on_column_change(self, value, new_key, old_key, _sa_initiator):
assert self._linked
if getattr(value, self._child_rel) is self._parent_obj:
# Get the old_key (the parameter may not be reliable) and
# do some consistency checks.
assert value in self.itervalues()
old_key = list(k for k, v in self.iteritems() if v is value)
assert len(old_key) == 1
old_key = old_key[0]
assert old_key == getattr(value, self._column)
# If necessary, move this object (and remove any old object
# with this key).
if new_key != old_key:
dict.__delitem__(self, old_key)
if new_key in self:
sa_del(self, dict.__getitem__(self, new_key), _sa_initiator)
dict.__delitem__(self, new_key)
dict.__setitem__(self, new_key, value)
# When this method gets called, the child object may think it's
# already bound to the collection (i.e. its self._child_rel is set
# to self._parent_obj) but it actually isn't (i.e. it's not in
# self.values()). This method has to fix that.
@collection.internally_instrumented
def __setitem__(self, new_key, value, _sa_initiator=None):
# TODO We could check if the object's type is correct.
assert self._linked
if value in self.itervalues():
# Just some consistency checks, for extra safety!
assert getattr(value, self._child_rel) is self._parent_obj
old_key = list(k for k, v in self.iteritems() if v is value)
assert len(old_key) == 1
old_key = old_key[0]
assert old_key == getattr(value, self._column)
# If needed, we make SQLAlchemy call _on_column_changed to
# do the rest of the job (and repeat the above checks).
if new_key != getattr(value, self._column):
setattr(value, self._column, new_key)
else:
# We change the attribute before adding it to the collection
# to prevent the (unavoidable) call to _on_column_change
# from doing any damage.
if new_key != getattr(value, self._column):
setattr(value, self._column, new_key)
# Remove any old object with this key and add this instead.
if new_key in self:
sa_del(self, dict.__getitem__(self, new_key), _sa_initiator)
dict.__delitem__(self, new_key)
value = sa_set(self, value, _sa_initiator)
dict.__setitem__(self, new_key, value)
def keyfunc(self, value):
return getattr(value, self._column)
@collection.converter
def _convert(self, collection):
# TODO We could check if the objects' type is correct.
type_ = util.duck_type_collection(collection)
if type_ is dict:
for key, value in util.dictlike_iteritems(collection):
if key != self.keyfunc(value):
raise TypeError(
"Found incompatible key '%r' for value '%r'" %
(key, value))
yield value
elif type_ in (list, set):
for value in collection:
yield value
else:
raise TypeError("Object '%r' is not dict-like nor iterable" %
collection)
# XXX When dropping support for SQLAlchemy pre-0.8, remove this.
_sa_converter = _convert
def __iadd__(self, collection):
for value in self._convert(collection):
self.set(value)
return self
def smart_mapped_collection(column):
return lambda: SmartMappedCollection(column)
|
UTF-8
|
Python
| false | false | 2,013 |
8,435,315,797,022 |
7db7be4ffd86d652678c014e90c17a02f1d0e8af
|
910a71b040a33b2a235b23d2b24b391dbc07f660
|
/simple_history/tests/tests.py
|
ab4642732ef5d44ed23b7dfa4bb072becff621b8
|
[
"BSD-3-Clause"
] |
permissive
|
Kyruus/django-simple-history
|
https://github.com/Kyruus/django-simple-history
|
4862edc857ca15b6b4d4a76d579fa82d5d68a719
|
5ba8d2b4d72819f154a11f297796e6a2bb7172bf
|
refs/heads/master
| 2021-01-16T21:26:47.598524 | 2014-01-14T04:46:32 | 2014-01-14T04:46:32 | 15,886,388 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import unicode_literals
from datetime import datetime, timedelta
from django import VERSION
from django.test import TestCase
from django_webtest import WebTest
from django.core.files.base import ContentFile
from django.core.urlresolvers import reverse
from simple_history.tests.models import AdminProfile, Bookcase, MultiOneToOne
from simple_history.models import HistoricalRecords
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError: # django 1.4 compatibility
from django.contrib.auth.models import User
from .models import (Poll, Choice, Restaurant, Person, FileModel, Document,
Book, Library, State, SelfFK)
from .models import ExternalModel1, ExternalModel3
from simple_history import register
from simple_history.tests.external.models import ExternalModel2, ExternalModel4
today = datetime(2021, 1, 1, 10, 0)
tomorrow = today + timedelta(days=1)
def get_fake_file(filename):
fake_file = ContentFile('file data')
fake_file.name = filename
return fake_file
class HistoricalRecordsTest(TestCase):
def assertDatetimesEqual(self, time1, time2):
self.assertAlmostEqual(time1, time2, delta=timedelta(seconds=2))
def assertRecordValues(self, record, klass, values_dict):
for key, value in values_dict.items():
self.assertEqual(getattr(record, key), value)
self.assertEqual(record.history_object.__class__, klass)
for key, value in values_dict.items():
if key != 'history_type':
self.assertEqual(getattr(record.history_object, key), value)
def test_create(self):
p = Poll(question="what's up?", pub_date=today)
p.save()
record, = p.history.all()
self.assertRecordValues(record, Poll, {
'question': "what's up?",
'pub_date': today,
'id': p.id,
'history_type': "+"
})
self.assertDatetimesEqual(record.history_date, datetime.now())
def test_update(self):
Poll.objects.create(question="what's up?", pub_date=today)
p = Poll.objects.get()
p.pub_date = tomorrow
p.save()
update_record, create_record = p.history.all()
self.assertRecordValues(create_record, Poll, {
'question': "what's up?",
'pub_date': today,
'id': p.id,
'history_type': "+"
})
self.assertRecordValues(update_record, Poll, {
'question': "what's up?",
'pub_date': tomorrow,
'id': p.id,
'history_type': "~"
})
def test_delete(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
poll_id = p.id
p.delete()
delete_record, create_record = Poll.history.all()
self.assertRecordValues(create_record, Poll, {
'question': "what's up?",
'pub_date': today,
'id': poll_id,
'history_type': "+"
})
self.assertRecordValues(delete_record, Poll, {
'question': "what's up?",
'pub_date': today,
'id': poll_id,
'history_type': "-"
})
def test_save_without_historical_record(self):
pizza_place = Restaurant.objects.create(name='Pizza Place', rating=3)
pizza_place.rating = 4
pizza_place.save_without_historical_record()
pizza_place.rating = 6
pizza_place.save()
update_record, create_record = Restaurant.updates.all()
self.assertRecordValues(create_record, Restaurant, {
'name': "Pizza Place",
'rating': 3,
'id': pizza_place.id,
'history_type': "+",
})
self.assertRecordValues(update_record, Restaurant, {
'name': "Pizza Place",
'rating': 6,
'id': pizza_place.id,
'history_type': "~",
})
def test_save_without_historical_record_for_registered_model(self):
registered_model = ExternalModel3.objects.create(name='registered model')
self.assertTrue(hasattr(registered_model, 'save_without_historical_record'))
def test_save_raises_exception(self):
anthony = Person(name='Anthony Gillard')
with self.assertRaises(RuntimeError):
anthony.save_without_historical_record()
self.assertFalse(hasattr(anthony, 'skip_history_when_saving'))
self.assertEqual(Person.history.count(), 0)
anthony.save()
self.assertEqual(Person.history.count(), 1)
def test_foreignkey_field(self):
why_poll = Poll.objects.create(question="why?", pub_date=today)
how_poll = Poll.objects.create(question="how?", pub_date=today)
choice = Choice.objects.create(poll=why_poll, votes=0)
choice.poll = how_poll
choice.save()
update_record, create_record = Choice.history.all()
self.assertRecordValues(create_record, Choice, {
'poll_id': why_poll.id,
'votes': 0,
'id': choice.id,
'history_type': "+",
})
self.assertRecordValues(update_record, Choice, {
'poll_id': how_poll.id,
'votes': 0,
'id': choice.id,
'history_type': "~",
})
def test_foreignkey_still_allows_reverse_lookup_via_set_attribute(self):
lib = Library.objects.create()
state = State.objects.create(library=lib)
self.assertTrue(hasattr(lib, 'state_set'))
self.assertIsNone(state._meta.get_field('library').rel.related_name,
"the '+' shouldn't leak through to the original "
"model's field related_name")
def test_file_field(self):
model = FileModel.objects.create(file=get_fake_file('name'))
self.assertEqual(model.file.name, 'files/name')
model.file.delete()
update_record, create_record = model.history.all()
self.assertEqual(create_record.file, 'files/name')
self.assertEqual(update_record.file, '')
def test_inheritance(self):
pizza_place = Restaurant.objects.create(name='Pizza Place', rating=3)
pizza_place.rating = 4
pizza_place.save()
update_record, create_record = Restaurant.updates.all()
self.assertRecordValues(create_record, Restaurant, {
'name': "Pizza Place",
'rating': 3,
'id': pizza_place.id,
'history_type': "+",
})
self.assertRecordValues(update_record, Restaurant, {
'name': "Pizza Place",
'rating': 4,
'id': pizza_place.id,
'history_type': "~",
})
def test_specify_history_user(self):
user1 = User.objects.create_user('user1', '[email protected]')
user2 = User.objects.create_user('user2', '[email protected]')
document = Document.objects.create(changed_by=user1)
document.changed_by = user2
document.save()
document.changed_by = None
document.save()
self.assertEqual([d.history_user for d in document.history.all()],
[None, user2, user1])
def test_non_default_primary_key_save(self):
book1 = Book.objects.create(isbn='1-84356-028-1')
book2 = Book.objects.create(isbn='1-84356-028-2')
library = Library.objects.create(book=book1)
library.book = book2
library.save()
library.book = None
library.save()
self.assertEqual([l.book_id for l in library.history.all()],
[None, book2.pk, book1.pk])
def test_string_defined_foreign_key_save(self):
library1 = Library.objects.create()
library2 = Library.objects.create()
state = State.objects.create(library=library1)
state.library = library2
state.save()
state.library = None
state.save()
self.assertEqual([s.library_id for s in state.history.all()],
[None, library2.pk, library1.pk])
def test_self_referential_foreign_key(self):
model = SelfFK.objects.create()
other = SelfFK.objects.create()
model.fk = model
model.save()
model.fk = other
model.save()
self.assertEqual([m.fk_id for m in model.history.all()],
[other.id, model.id, None])
def test_raw_save(self):
document = Document()
document.save_base(raw=True)
self.assertEqual(document.history.count(), 0)
document.save()
self.assertRecordValues(document.history.get(), Document, {
'changed_by_id': None,
'id': document.id,
'history_type': "~",
})
def test_user_can_set_verbose_name(self):
b = Book(isbn='54321')
b.save()
self.assertEqual('dead trees', b.history.all()[0]._meta.verbose_name)
def test_historical_verbose_name_follows_model_verbose_name(self):
l = Library()
l.save()
self.assertEqual('historical quiet please',
l.history.get()._meta.verbose_name)
class RegisterTest(TestCase):
def test_register_no_args(self):
self.assertEqual(len(Choice.history.all()), 0)
poll = Poll.objects.create(pub_date=today)
choice = Choice.objects.create(poll=poll, votes=0)
self.assertEqual(len(choice.history.all()), 1)
def test_register_separate_app(self):
get_history = lambda model: model.history
self.assertRaises(AttributeError, get_history, User)
self.assertEqual(len(User.histories.all()), 0)
user = User.objects.create(username='bob', password='pass')
self.assertEqual(len(User.histories.all()), 1)
self.assertEqual(len(user.histories.all()), 1)
def test_reregister(self):
register(Restaurant, manager_name='again')
register(User, manager_name='again')
self.assertTrue(hasattr(Restaurant, 'updates'))
self.assertFalse(hasattr(Restaurant, 'again'))
self.assertTrue(hasattr(User, 'histories'))
self.assertFalse(hasattr(User, 'again'))
class CreateHistoryModelTests(TestCase):
def test_create_history_model_with_one_to_one_field_to_integer_field(self):
records = HistoricalRecords()
records.module = AdminProfile.__module__
try:
records.create_history_model(AdminProfile)
except:
self.fail("SimpleHistory should handle foreign keys to one to one"
"fields to integer fields without throwing an exception")
def test_create_history_model_with_one_to_one_field_to_char_field(self):
records = HistoricalRecords()
records.module = Bookcase.__module__
try:
records.create_history_model(Bookcase)
except:
self.fail("SimpleHistory should handle foreign keys to one to one"
"fields to char fields without throwing an exception.")
def test_create_history_model_with_multiple_one_to_ones(self):
records = HistoricalRecords()
records.module = MultiOneToOne.__module__
try:
records.create_history_model(MultiOneToOne)
except:
self.fail("SimpleHistory should handle foreign keys to one to one"
"fields to one to one fields without throwing an "
"exception.")
class AppLabelTest(TestCase):
def get_table_name(self, manager):
return manager.model._meta.db_table
def test_explicit_app_label(self):
self.assertEqual(self.get_table_name(ExternalModel1.objects),
'external_externalmodel1')
self.assertEqual(self.get_table_name(ExternalModel1.history),
'external_historicalexternalmodel1')
def test_default_app_label(self):
self.assertEqual(self.get_table_name(ExternalModel2.objects),
'external_externalmodel2')
self.assertEqual(self.get_table_name(ExternalModel2.history),
'external_historicalexternalmodel2')
def test_register_app_label(self):
self.assertEqual(self.get_table_name(ExternalModel3.objects),
'tests_externalmodel3')
self.assertEqual(self.get_table_name(ExternalModel3.histories),
'external_historicalexternalmodel3')
self.assertEqual(self.get_table_name(ExternalModel4.objects),
'external_externalmodel4')
self.assertEqual(self.get_table_name(ExternalModel4.histories),
'tests_historicalexternalmodel4')
class HistoryManagerTest(TestCase):
def test_most_recent(self):
poll = Poll.objects.create(question="what's up?", pub_date=today)
poll.question = "how's it going?"
poll.save()
poll.question = "why?"
poll.save()
poll.question = "how?"
most_recent = poll.history.most_recent()
self.assertEqual(most_recent.__class__, Poll)
self.assertEqual(most_recent.question, "why?")
def test_most_recent_on_model_class(self):
Poll.objects.create(question="what's up?", pub_date=today)
self.assertRaises(TypeError, Poll.history.most_recent)
def test_most_recent_nonexistant(self):
# Unsaved poll
poll = Poll(question="what's up?", pub_date=today)
self.assertRaises(Poll.DoesNotExist, poll.history.most_recent)
# Deleted poll
poll.save()
poll.delete()
self.assertRaises(Poll.DoesNotExist, poll.history.most_recent)
def test_as_of(self):
poll = Poll.objects.create(question="what's up?", pub_date=today)
poll.question = "how's it going?"
poll.save()
poll.question = "why?"
poll.save()
poll.question = "how?"
most_recent = poll.history.most_recent()
self.assertEqual(most_recent.question, "why?")
times = [r.history_date for r in poll.history.all()]
question_as_of = lambda time: poll.history.as_of(time).question
self.assertEqual(question_as_of(times[0]), "why?")
self.assertEqual(question_as_of(times[1]), "how's it going?")
self.assertEqual(question_as_of(times[2]), "what's up?")
def test_as_of_on_model_class(self):
Poll.objects.create(question="what's up?", pub_date=today)
time = Poll.history.all()[0].history_date
self.assertRaises(TypeError, Poll.history.as_of, time)
def test_as_of_nonexistant(self):
# Unsaved poll
poll = Poll(question="what's up?", pub_date=today)
time = datetime.now()
self.assertRaises(Poll.DoesNotExist, poll.history.as_of, time)
# Deleted poll
poll.save()
poll.delete()
self.assertRaises(Poll.DoesNotExist, poll.history.as_of, time)
def test_foreignkey_field(self):
why_poll = Poll.objects.create(question="why?", pub_date=today)
how_poll = Poll.objects.create(question="how?", pub_date=today)
choice = Choice.objects.create(poll=why_poll, votes=0)
choice.poll = how_poll
choice.save()
most_recent = choice.history.most_recent()
self.assertEqual(most_recent.poll.pk, how_poll.pk)
times = [r.history_date for r in choice.history.all()]
poll_as_of = lambda time: choice.history.as_of(time).poll
self.assertEqual(poll_as_of(times[0]).pk, how_poll.pk)
self.assertEqual(poll_as_of(times[1]).pk, why_poll.pk)
def get_history_url(model, history_index=None):
info = model._meta.app_label, model._meta.module_name
if history_index is not None:
history = model.history.order_by('history_id')[history_index]
return reverse('admin:%s_%s_simple_history' % info,
args=[model.pk, history.history_id])
else:
return reverse('admin:%s_%s_history' % info, args=[model.pk])
class AdminSiteTest(WebTest):
def setUp(self):
self.user = User.objects.create_superuser('user_login',
'[email protected]', 'pass')
def login(self, user=None):
if user is None:
user = self.user
form = self.app.get(reverse('admin:index')).form
form['username'] = user.username
form['password'] = 'pass'
return form.submit()
def test_history_list(self):
if VERSION >= (1, 5):
self.assertEqual(self.user._meta.module_name, 'customuser')
self.login()
poll = Poll(question="why?", pub_date=today)
poll._history_user = self.user
poll.save()
response = self.app.get(get_history_url(poll))
self.assertIn(get_history_url(poll, 0), response.unicode_normal_body)
self.assertIn("Poll object", response.unicode_normal_body)
self.assertIn("Created", response.unicode_normal_body)
self.assertIn(self.user.username, response.unicode_normal_body)
def test_history_form_permission(self):
self.login(self.user)
person = Person.objects.create(name='Sandra Hale')
self.app.get(get_history_url(person, 0), status=403)
def test_invalid_history_form(self):
self.login()
poll = Poll.objects.create(question="why?", pub_date=today)
response = self.app.get(get_history_url(poll, 0))
response.form['question'] = ""
response = response.form.submit()
self.assertEqual(response.status_code, 200)
self.assertIn("This field is required", response.unicode_normal_body)
def test_history_form(self):
self.login()
poll = Poll.objects.create(question="why?", pub_date=today)
poll.question = "how?"
poll.save()
# Make sure form for initial version is correct
response = self.app.get(get_history_url(poll, 0))
self.assertEqual(response.form['question'].value, "why?")
self.assertEqual(response.form['pub_date_0'].value, "2021-01-01")
self.assertEqual(response.form['pub_date_1'].value, "10:00:00")
# Create new version based on original version
response.form['question'] = "what?"
response.form['pub_date_0'] = "2021-01-02"
response = response.form.submit()
self.assertEqual(response.status_code, 302)
if VERSION < (1, 4, 0):
self.assertTrue(response.headers['location']
.endswith(get_history_url(poll)))
else:
self.assertTrue(response.headers['location']
.endswith(reverse('admin:tests_poll_changelist')))
# Ensure form for second version is correct
response = self.app.get(get_history_url(poll, 1))
self.assertEqual(response.form['question'].value, "how?")
self.assertEqual(response.form['pub_date_0'].value, "2021-01-01")
self.assertEqual(response.form['pub_date_1'].value, "10:00:00")
# Ensure form for new third version is correct
response = self.app.get(get_history_url(poll, 2))
self.assertEqual(response.form['question'].value, "what?")
self.assertEqual(response.form['pub_date_0'].value, "2021-01-02")
self.assertEqual(response.form['pub_date_1'].value, "10:00:00")
# Ensure current version of poll is correct
poll = Poll.objects.get()
self.assertEqual(poll.question, "what?")
self.assertEqual(poll.pub_date, tomorrow)
self.assertEqual([p.history_user for p in Poll.history.all()],
[self.user, None, None])
def test_history_user_on_save_in_admin(self):
self.login()
# Ensure polls created via admin interface save correct user
add_page = self.app.get(reverse('admin:tests_poll_add'))
add_page.form['question'] = "new poll?"
add_page.form['pub_date_0'] = "2012-01-01"
add_page.form['pub_date_1'] = "10:00:00"
changelist_page = add_page.form.submit().follow()
self.assertEqual(Poll.history.get().history_user, self.user)
# Ensure polls saved on edit page in admin interface save correct user
change_page = changelist_page.click("Poll object")
change_page.form.submit()
self.assertEqual([p.history_user for p in Poll.history.all()],
[self.user, self.user])
|
UTF-8
|
Python
| false | false | 2,014 |
8,186,207,691,701 |
3d00f51e9b9b5452d8e57d5dde72b4f050c1b6e9
|
f13344b0d6cf0d77e71cded713dcb211cc04a0cf
|
/correctDictionary3.py
|
31db702436939f145584a969b14050fc0fe65d0e
|
[] |
no_license
|
r-arun/word
|
https://github.com/r-arun/word
|
d02c4d41f87e70f4e09c56481fdcf0d84770fb44
|
4c513a07e6828adb6f74342a4f48df8c34451b9f
|
refs/heads/master
| 2021-01-13T02:08:11.369898 | 2011-08-13T13:18:16 | 2011-08-13T13:18:16 | 1,654,239 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from spaceStrip import strip
import pickle
def specialForm(sentence):
if(sentence.find('.')>=0):
arr=sentence.split('.')[0]
if(arr.find(' ')<0):
return arr
return False
def selectDirect(sentence):
if(sentence.find('.')>=0):
arr=sentence.split('.')
newstring=' '.join(arr[1:])
if(newstring.find(':')>=0):
word=newstring.split(':')[0]
return word
return False
def selectIndirect(word,sentence):
if(sentence.find('.')>=0):
arr=sentence.split('.')
newstring=' '.join(arr[1:])
if(newstring.find(':')<0 and newstring.find(' ')>=0):
newarr=newstring.split(' ')
rest=' '
newword=newarr[0]
if(len(newarr)>1):
rest=' '.join(newarr[1:])
ilen=len(word)
jlen=len(newword)
mylen=min(ilen,jlen)
cnt=0
while(cnt<3 and mylen-cnt>=0):
if(word[:mylen-cnt]==newword[:mylen-cnt]):
return newword,rest
cnt+=1
return False
oppcount=0
relcount=0
fd=open('dictionary','rb')
dic=pickle.load(fd)
newdic={}
for i in dic.keys():
i=strip(i)
newdic[i]={}
meaning=[]
example=[]
related=[]
otherword={}
opposite=[]
part=[]
for j in dic[i]:
spl=specialForm(j)
if(not spl):
meaning.append(j)
elif(spl=='Ex'):
example.append(' '.join(j.split('.')[1:]))
elif(spl in ['V','N','v','ADJ','ADV']):
if(spl=='v'): spl='V'
if(spl not in part):
part.append(spl)
word=selectDirect(j)
if(word):
exp=j.split(':')[1]
if(not otherword.has_key(spl)):
otherword[spl]=[]
otherword[spl].append(word)
if(not newdic.has_key(word)):
newdic[word]={}
newdic[word]['meaning']=[]
newdic[word]['meaning'].append(exp)
else:
word=selectIndirect(i,j)
if(word):
word,exp=word[0],word[1]
word=strip(word)
if(not newdic.has_key(word)):
newdic[word]={}
newdic[word]['meaning']=[]
newdic[word]['meaning'].append(exp)
elif(spl=='OP'):
opposite.append(' '.join(j.split('.')[1:]))
oppcount+=1
else:
related.append(' '.join(j.split('.')[1:]))
relcount+=1
newdic[i]['meaning']=meaning
newdic[i]['example']=example
newdic[i]['otherword']=otherword
newdic[i]['part']=part
newdic[i]['opposite']=opposite
newdic[i]['related']=related
print i,newdic[i]
print oppcount
print "New Words"
newarr=[]
for i in newdic.keys():
if i not in dic.keys():
newarr.append(i)
print newarr
print len(newarr)
print relcount
print newdic.keys()
|
UTF-8
|
Python
| false | false | 2,011 |
5,686,536,742,082 |
f74d3625a1ebcb8aef30df12a8b4bb856a143886
|
ed454a2d052a550eec18b8395d075ace4bda4234
|
/taxitogether/taxitogether/views/duck.py
|
69f45b1118bdcc34932d5953c77288e59f5f7f1d
|
[] |
no_license
|
sparcs-kaist/taxitogether2.0
|
https://github.com/sparcs-kaist/taxitogether2.0
|
b31d2d7c30df4e56dcd6697d4612ab759e6bed26
|
0077e6127362b9ef84e580dd56524da3aeea1946
|
refs/heads/master
| 2021-01-23T08:04:39.151011 | 2013-12-09T12:04:10 | 2013-12-09T12:04:10 | 14,748,046 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from rest_framework import viewsets, mixins, permissions
from taxitogether.models import Duck, Device
from taxitogether import serializers
class DuckViewSet(mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet):
model = Duck
def get_serializer_class(self):
if self.request.method == 'POST':
return serializers.DuckRegisterSerializer
elif self.request.method == 'GET':
return serializers.DuckPublicSerializer
else:
raise Exception("Not allowed method %s" % str(self.request.method))
class DeviceViewSet(viewsets.ModelViewSet):
model = Device
serializer_class = serializers.DeviceSerializer
permission_classes = [permissions.IsAuthenticated]
def pre_save(self, obj):
# Set device owner to the requested user
obj.owner = self.request.user
def get_queryset(self):
# Filter only the device of requested user
return self.request.user.devices.all()
|
UTF-8
|
Python
| false | false | 2,013 |
3,264,175,160,810 |
54be897501e0873309352bbdd40849b37bce22da
|
693bc4dc617b2f09758cf12a4471e5d15ced6c90
|
/teaman.py
|
0f4e9f02b48925cd6086fb952e0b0e0db2fa6802
|
[] |
no_license
|
patrickmn/teaman
|
https://github.com/patrickmn/teaman
|
1d1378903eb6493a3ec8a004124d41a644b9bffd
|
cf2f599d20879808cde60917211fcc71a91cdd56
|
refs/heads/master
| 2023-08-27T06:01:43.441690 | 2011-05-01T18:25:10 | 2011-05-01T18:25:10 | 1,655,063 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""
Teaman
Simple console app that alerts you (with sound) when your cup of tea is ready.
by Patrick Mylund Nielsen
https://github.com/pmylund/teaman
"""
__version__ = '1.0'
import sys
import time
import datetime
play_ready_sound = True
ready_sound_file = 'ready.wav'
def main():
argument = None
duration = None
if len(sys.argv) == 2:
argument = sys.argv[1]
if argument:
try:
duration = int(argument)
except:
pass
if not duration:
duration = int(raw_input("How long (in seconds) until your cup of tea is ready? "))
try:
countdownToTea(duration)
except KeyboardInterrupt:
return
def countdownToTea(duration):
print "\r\nYour tea will be ready at %s." % (datetime.datetime.now() + datetime.timedelta(seconds=duration)).strftime('%X')
time.sleep(duration)
print "\r\nYour tea is ready!"
if play_ready_sound:
try:
playSound(ready_sound_file)
except:
pass
answer = raw_input("\r\nHaving another cup? (Y/N/secs) ").lower()
if answer == 'y' or not answer:
countdownToTea(duration)
elif answer == 'n':
return
else:
try:
duration = int(answer)
except:
pass
else:
countdownToTea(duration)
def isLinux():
return 'linux' in sys.platform
def isWindows():
return sys.platform == 'win32'
def playSound(sound_file):
if isWindows():
import winsound
winsound.PlaySound(sound_file, winsound.SND_FILENAME)
elif isLinux():
import subprocess
import errno
try:
FNULL = open('/dev/null', 'w')
subprocess.Popen(['aplay', '-q', sound_file], stdin=FNULL, stdout=FNULL, stderr=FNULL)
except OSError, e:
if e.errno == errno.ENOENT:
from wave import open as waveOpen
from ossaudiodev import open as ossOpen
s = waveOpen(sound_file, 'rb')
(nc, sw, fr, nf, comptype, compname) = s.getparams()
dsp = ossOpen('/dev/dsp', 'w')
try:
from ossaudiodev import AFMT_S16_NE
except ImportError:
if byteorder == "little":
AFMT_S16_NE = ossaudiodev.AFMT_S16_LE
else:
AFMT_S16_NE = ossaudiodev.AFMT_S16_BE
dsp.setparameters(AFMT_S16_NE, nc, fr)
data = s.readframes(nf)
s.close()
dsp.write(data)
dsp.close()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,011 |
4,647,154,624,148 |
5f9977a1fce1594d6f2f9e61d0cdf74b37974d2f
|
3dd12506c2d968d37e2d9ff044622f3e3b3295ab
|
/code/nn_patchmodel.py
|
87d385fb8674f2d2d4d9ebee1a80b05e2f9ae2cf
|
[] |
no_license
|
rossfadely/nonnegative
|
https://github.com/rossfadely/nonnegative
|
ad8cb106996851105e40d0e909869ef50cd5555f
|
fab49ad4a218fc54afcc4a2e9ae93a4f9dd5f6eb
|
refs/heads/master
| 2021-01-19T12:56:29.599009 | 2012-10-30T21:17:25 | 2012-10-30T21:17:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
class PsfModels(object):
"""
Create a set of psf patches, drawing
from a power law in
'score' = (flux)**2/(fwhm)**2/sigma_const**2
"""
def __init__(self,psfhwhm=1.5,
patchshape=(15,15),
npatches=10,
scorerange=(50,1000),
flux=1000,
bkg=0):
self.psfhwhm = psfhwhm
self.patchshape = patchshape
self.npatches = npatches
self.scorerange = scorerange
self.flux = flux
self.bkg = bkg
assert ((np.mod(patchshape[0],2)) &
(np.mod(patchshape[1],2))), \
'Patch size must be odd by odd'
# PSF stars in center of patch
self.psfpos = ((patchshape[0]-1)/2,
(patchshape[1]-1)/2)
self.make_patches()
self.avg_patches()
def gaussianpsf(self,flux,xgrid,ygrid,x0,y0,psfhwhm):
"""
Return a gaussian psf on grid of pixels
"""
psf = flux * np.exp(-0.5 * ((xgrid-x0) ** 2 + (ygrid-y0) ** 2)
/ psfhwhm ** 2) / np.sqrt(2. * np.pi * psfhwhm ** 2)
return psf
def make_patch(self,bkg_sigma):
"""
Make a patch of pixels with constant noise and gaussian psf
"""
patch = np.random.normal(size=self.patchshape) * bkg_sigma
patch += self.bkg
nx,ny = self.patchshape
x,y = np.meshgrid(range(nx),range(ny))
patch += self.gaussianpsf(self.flux,x,y,self.psfpos[0],
self.psfpos[1],self.psfhwhm)
return patch
def draw_score(self):
"""
Draw score from a power law... how
did I define that again? -2?
"""
scoremin = self.scorerange[0]
scoremax = self.scorerange[1]
scorerng = scoremax-scoremin
noscore = True
while noscore:
score = np.random.rand() * \
(scorerng) + scoremin
cdf = scoremax * (score-scoremin) / \
score / (scorerng)
if 1-cdf > np.random.rand():
noscore = False
return score
def make_patches(self):
"""
Make the patches
"""
self.patches = np.zeros((self.npatches,self.patchshape[0],
self.patchshape[1]))
self.scores = np.zeros(self.npatches)
self.bkg_sigmas = np.zeros(self.npatches)
for i in range(self.npatches):
self.scores[i] = self.draw_score()
self.bkg_sigmas[i] = np.sqrt(self.flux**2. / (2*self.psfhwhm)**2. / \
self.scores[i])
self.patches[i] = self.make_patch(self.bkg_sigmas[i])
def avg_patches(self):
"""
Create an average psf which sums to one
"""
self.avgpsf = np.zeros(self.patchshape)
for i in range(self.npatches):
self.avgpsf += self.patches[i] / self.patches[i].sum() / self.npatches
|
UTF-8
|
Python
| false | false | 2,012 |
7,808,250,585,529 |
258ce80733b50c6f42946b3ab0f6463cef359515
|
3fb7e63d1a88985eed96af80be9e1c2d1eabfe1c
|
/scripts/corrector_test.py
|
150f9a0c03265e19730ac8e877d55544856dc43a
|
[] |
no_license
|
mkrainin/ecto_corrector
|
https://github.com/mkrainin/ecto_corrector
|
afefac3531f5cd3a519b19eedd15159adf32e547
|
ffe1b2812d87bacb385faa9924268ef895e9ca9e
|
refs/heads/master
| 2020-05-29T14:45:29.762113 | 2011-09-20T20:29:08 | 2011-09-20T20:29:08 | 2,115,363 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# test of componentized corrector along with todservicecaller
import ecto, ecto_ros, ecto_pcl, ecto_pcl_ros, ecto_sensor_msgs, ecto_opencv.highgui, ecto_X
import ecto_corrector
from blackboxes import *
import sys
import math
visualize = True
debug_graphs = True
dir = "/wg/stor2a/mkrainin/object_data/perception challenge/object_meshes"
#noise
rotation = 10*math.pi/180
translation = 0.03
#optimization params
use_icp = False #uses sensor model instead if false
restarts = 10
if __name__ == "__main__":
node_name = "corrector_test"
ecto_ros.init(sys.argv, node_name,False)
"""
Pose refinement subplasm
"""
sub_plasm = ecto.Plasm()
#subscribers/converters
sub_cloud = ecto_sensor_msgs.Subscriber_PointCloud2("Cloud2 Subscriber",
topic_name="/camera/rgb/points")
sub_info = ecto_sensor_msgs.Subscriber_CameraInfo("Cam Info Subscriber",
topic_name="/camera/rgb/camera_info")
msg2cloud = ecto_pcl_ros.Message2PointCloud("Cloud2 To Type-Erased",
format=ecto_pcl.XYZRGB)
cloud2typed = ecto_pcl.PointCloud2PointCloudT("Type-Erased To XYZRGB",
format=ecto_pcl.XYZRGB)
#artificial noise
noise_adder = ecto_corrector.AddNoise("Noise Adder",
rotation=rotation,translation=translation)
#model loading
model_loader = ecto_corrector.ModelLoader("Model Loader")
#region of interest
roi = ecto_corrector.ModelROI("ROI",expansion=70,binning=2)
apply_roi = ecto_corrector.ApplyROI("Apply ROI")
#pose correction
corrector = ecto_corrector.Corrector("Corrector",
use_icp=use_icp,use_sensor_model=(not use_icp),
restarts=restarts,iterations=(8 if use_icp else 4),
inner_loops=8, window_half=1)
sub_graph = [
#conversion
sub_cloud[:] >> msg2cloud[:],
apply_roi[:] >> cloud2typed[:],
#artificial noise
model_loader[:] >> noise_adder["model"],
#region of interest
noise_adder["out_pose"] >> roi["pose"],
model_loader[:] >> roi["model"],
sub_info[:] >> roi["in_camera_info"],
roi[:] >> apply_roi["info"],
msg2cloud[:] >> apply_roi["input"],
#icp correction
noise_adder["out_pose"] >> corrector["input_pose"],
model_loader["model"] >> corrector["model"],
roi["out_camera_info"] >> corrector["camera_info"],
cloud2typed[:] >> corrector["input"],
]
if use_icp:
#edge detection for boundary correspondence removal
edge_detector = ecto_corrector.DepthEdgeDetector("Edge Detector",
depth_threshold=0.02, erode_size=3,open_size=3)
sub_graph += [
apply_roi[:] >> edge_detector[:],
edge_detector["depth_edges"]>> corrector["depth_edges"],
]
if restarts > 0:
#normals
normals = ecto_pcl.NormalEstimation("Normals", k_search=0, radius_search=0.006,
spatial_locator=ecto_pcl.KDTREE_ORGANIZED_INDEX)
sub_graph += [ apply_roi[:] >> normals[:] ]
#segmentation for SEGMENTATION_SENSOR_MODEL restart comparison
segmenter = ecto_corrector.Segmenter("Segmenter",pixel_step=2,
depth_threshold=0.0015,
normal_threshold=0.99,
curvature_threshold=10, #not using curvature threshold
max_depth = 1.5)
sub_graph += [ apply_roi[:] >> segmenter["input"],
normals[:] >> segmenter["normals"],
segmenter["valid_segments"] >> corrector["valid_segments"],
segmenter["invalid"] >> corrector["invalid"], ]
if visualize:
pre_correct_vertices = VerticesPubModule(sub_plasm,node_name+"/pre_correct")
post_correct_vertices = VerticesPubModule(sub_plasm,node_name+"/post_correct")
sub_graph += [
#pre-correct visualization
noise_adder["out_pose"] >> pre_correct_vertices["pose"],
model_loader["model"] >> pre_correct_vertices["model"],
#icp-correct visualization
corrector["output_pose"] >> post_correct_vertices["pose"],
model_loader["model"] >> post_correct_vertices["model"],
]
if use_icp:
depth_drawer = ecto_opencv.highgui.imshow("Drawer",name="depth edges", waitKey=10)
sub_graph += [edge_detector[:] >> depth_drawer[:]]
if restarts > 0:
seg2mat = ecto_corrector.SegmentsToMat("Seg2Mat",min_size=10)
seg_drawer = ecto_opencv.highgui.imshow("Drawer",name="segments", waitKey=10)
sub_graph += [ segmenter["valid_segments"] >> seg2mat["segments"],
apply_roi[:] >> seg2mat["input"],
seg2mat[:] >> seg_drawer[:],
]
sub_plasm.connect(sub_graph)
if(debug_graphs):
ecto.view_plasm(sub_plasm)
#conditional executer for correction subplams
executer = ecto_X.Executer(plasm=sub_plasm, niter=1, outputs={},
inputs={"in_pose":noise_adder,"ply_file":model_loader})
correction_subgraph_if = ecto.If('Correction if success',cell=executer)
"""
Main Plasm
"""
main_plasm = ecto.Plasm()
#triggering
sub_image = ecto_sensor_msgs.Subscriber_Image("Image Subscriber",
topic_name="/camera/rgb/image_color")
img2mat = ecto_ros.Image2Mat("Image to Mat", swap_rgb=True)
show_triggers = {'d_key':ord('d')}
show = ecto_opencv.highgui.imshow("imshow",waitKey=10,triggers=show_triggers)
trigger_and = ecto.And("Trigger And",ninput=2)
#object detection
tod_detector = ecto_corrector.TODServiceCaller("TOD",ply_dir=dir)
tod_detector_if = ecto.If('TOD if key',cell=tod_detector)
main_graph = [
#display
sub_image[:] >> img2mat[:],
img2mat[:] >> show[:],
#triggering
show["d_key"] >> (tod_detector_if["__test__"],trigger_and["in1"]),
tod_detector_if["success"] >> trigger_and["in2"],
trigger_and["out"] >> correction_subgraph_if["__test__"],
#correction subgraph
tod_detector_if["ply_file","pose"]>>correction_subgraph_if["ply_file","in_pose"],
]
main_plasm.connect(main_graph)
if(debug_graphs):
ecto.view_plasm(main_plasm)
#run the plasm
print "Using subscribers + TOD for inputs"
print "Ensure OpenNI node and TOD node are running"
print "Press 'd' in imshow window to detect. 'q' to quit"
sched = ecto.schedulers.Singlethreaded(main_plasm)
sched.execute()
|
UTF-8
|
Python
| false | false | 2,011 |
15,255,723,843,973 |
e0052e1792ed25af090dfd133e9941f8135b6b21
|
bc1207a867c73860c05f70610ec03ceead8a8818
|
/test/test_prime_factors.py
|
2d4318ec45662d4f85518235cf534eb3449d6b52
|
[
"LGPL-3.0-only"
] |
non_permissive
|
ajauhri/math_util
|
https://github.com/ajauhri/math_util
|
14f7407de4ae3ec778f5431bc0621d48d6ed2ef4
|
3a7692c5b59445f6078f48268166b8d3f44bb869
|
refs/heads/master
| 2016-09-06T03:02:53.260373 | 2013-04-19T05:08:09 | 2013-04-19T05:08:09 | 1,498,868 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import unittest
from prime_factors import PrimeFactors, PrimeFactorsError
class Test(unittest.TestCase):
def test_input_as_one(self):
pfactor = PrimeFactors(1)
self.assertEqual(pfactor.compute_prime_factors(), [])
def test_input_as_zero(self):
pfactor = PrimeFactors(0)
self.assertRaises(PrimeFactorsError,pfactor.compute_prime_factors)
def test_input_as_square(self):
pfactor = PrimeFactors(49)
self.assertEqual(pfactor.compute_prime_factors(), [7,7])
if __name__ == "__main__":
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
523,986,053,776 |
c7bc1e1548c95155ad6dd08036c21836a0a80b63
|
dfe4cc8de905a9901db1fcd75926409a36aebae8
|
/tut3.2_mouse_pos_down_click.py
|
2c071d2e475e8f821120502ce64b580e07a5470c
|
[] |
no_license
|
angrek/pygame_tutorial_exercises
|
https://github.com/angrek/pygame_tutorial_exercises
|
cfa7c743badef8f6d1b7dea8477402078f353e6a
|
6c6ec6f119a19f12378a65181f4ce271c3b4df8d
|
refs/heads/master
| 2020-12-24T17:36:18.131249 | 2014-02-23T22:16:21 | 2014-02-23T22:16:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!c:/Python27/python.exe
import pygame
running = 1
width = 640
height = 400
bgcolor = 0, 0, 0
screen = pygame.display.set_mode((width, height))
while running:
#this activates the window close X button
event = pygame.event.poll()
if event.type == pygame.QUIT:
running = 0
elif event.type == pygame.MOUSEBUTTONDOWN:
print "mouse at (%d, %d)" % event.pos
screen.fill(bgcolor)
pygame.display.flip()
#note that having back to back flips makes it flicker
|
UTF-8
|
Python
| false | false | 2,014 |
15,659,450,767,351 |
48ed5293dfb706df7e81023784b8a7ab521ac088
|
e05a3e25f111403f9a3d4aeef07a7030b87881f3
|
/ping.py
|
9c27e9d5938824ef8d081835f0df91cdc322f72b
|
[] |
no_license
|
DTEGlobal/manejadorEventosG4
|
https://github.com/DTEGlobal/manejadorEventosG4
|
6295e9db53b841737039d7ba94aa5885d62d3fb7
|
61e643deb440e801b4ee5046223e68fa6e7ee9d4
|
refs/heads/master
| 2021-01-01T18:22:44.633578 | 2014-07-29T23:43:52 | 2014-07-29T23:43:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Emmy'
#-------------------------------------------------------------------------------
# Name: ping
# Purpose:
#
# Author: Emmanuel
#
# Created: 06/05/2014
# Copyright: (c) Petrolog 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
import config
import time
import os
import re
import mosquitto
import comunicacionG4
# Create Mosquitto Client for Watchdog broker
mqttcWC = mosquitto.Mosquitto("pingWC")
def on_connect_pingWC(mosq, obj, rc):
config.logging.info("ping: ping Watchdog Client connected")
mqttcWC.subscribe("#", 0)
raspberrypiKiller = 0
#Killer Array
killerArray = [False, False, False, False]
def pingDaemon():
global raspberrypiKiller
raspberrypiKiller = 0
config.logging.info("ping: Thread Running ...")
# Connect to mqtt watchdog server
mqttcWC.on_connect = on_connect_pingWC
mqttcWC.connect('localhost', 1884)
c = 0
while True:
t = 0
while t < config.delayPing:
# mqtt client loop for watchdog keep alive
config.logging.debug("ping: Watchdog Keep Alive")
mqttcWC.loop(0)
time.sleep(1)
t += 1
config.logging.info("ping: Trying to ping default gateway")
pingResult = os.popen("ping -c 1 192.168.1.254").read()
pingMatch = re.search(', 1 received', pingResult)
if pingMatch != None:
config.logging.info("ping: ping to default gateway successful")
raspberrypiKiller = 0
c = 0
else:
config.logging.critical("ping: No Network Connection")
config.logging.info("ping: verifying usb port connection to wireless adapter")
lsusbResult = os.popen("lsusb").read()
lsusbMatch = re.search('Edimax.+Wireless', lsusbResult)
if lsusbMatch != None:
config.logging.info("ping: wireless adapter is detected")
raspberrypiKiller = 0
c += 1
config.logging.critical("ping: {} Times without network connection".format(c))
if c >= config.rebootCount:
raspberrypiKiller = 1
while killerArray != [True, True, True, True]:
time.sleep(0.5)
raspberrypiKiller = 1
config.logging.critical("ping: ready to reboot... ")
time.sleep(1)
os.popen("reboot")
else:
config.logging.critical("ping: wireless adapter is not detected")
raspberrypiKiller = 1
while killerArray != [True, True, True, True]:
time.sleep(0.5)
raspberrypiKiller = 1
config.logging.critical("ping: ready to shutdown... powering off")
time.sleep(1)
comunicacionG4.SendCommand("01A60")
os.popen("shutdown now")
|
UTF-8
|
Python
| false | false | 2,014 |
5,927,054,916,731 |
eb81500b11bd4e1614b6457c5fa9c0726461ed51
|
bf68058a50e7a87095615c8b985b796698ea11ec
|
/7. Crazy Kangaroo.py
|
9e6f2a12700ec84dea2e309f72f271b5231669f0
|
[] |
no_license
|
khannasarthak/HackerEarth
|
https://github.com/khannasarthak/HackerEarth
|
88fdc689cd2c63a5b5ba5e8ee84c4342f8ffe428
|
50ff5e01a19a9a14268b0059ba5d8c54ae11b5ae
|
refs/heads/master
| 2016-05-29T22:17:04.077224 | 2014-12-19T18:49:47 | 2014-12-19T18:49:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 03 17:05:01 2014
@author: Sarthak Khanna
"""
n = raw_input()
n = int(n)
x=[]
y=[]
z=[]
k=[]
l = []
d = []
i = 0
for i in range(n):
a,b,c = raw_input().split()
x.append(a)
y.append(b)
z.append(c)
k.append(int(0))
l.append(int(0))
d.append(int(0))
i =0
for i in range(n):
k[i] = range(int(x[i]),int(y[i])+1)
print k
print x
print y
print z
i = 0
for i in range(n):
l[i] = len(k[i])
print l
i =0
for i in range(n):
for j in range(l[i]):
if k[i][j]%int(z[i]) == 0:
d[i] = d[i] + 1
else:
continue
i = 0
for i in range(n):
print d[i]
|
UTF-8
|
Python
| false | false | 2,014 |
3,667,902,088,276 |
72a4d592de80e3ade54df42768d9a584918ad7e7
|
52229c1107b6fd4f7fb2e2f113bfc24b92c5d796
|
/music/migrations/0001_initial.py
|
ba7bea9701ee31cb2940cab9bbbb81abd0ea8440
|
[] |
no_license
|
bansalsamarth/music-streaming-app
|
https://github.com/bansalsamarth/music-streaming-app
|
9b7d05d6c6e424f21732d8760f7b8971a5b0719a
|
bc3dd3e56488029134a562254306af57f8ed6625
|
refs/heads/master
| 2021-01-01T20:01:12.224462 | 2014-12-04T04:59:49 | 2014-12-04T04:59:49 | 25,387,895 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Artist'
db.create_table(u'music_artist', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('img', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal(u'music', ['Artist'])
# Adding model 'Album'
db.create_table(u'music_album', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.TextField')()),
('release_year', self.gf('django.db.models.fields.IntegerField')()),
('img', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('added', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'music', ['Album'])
# Adding model 'Track'
db.create_table(u'music_track', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.TextField')()),
('album', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['music.Album'])),
('img', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('duration', self.gf('django.db.models.fields.CharField')(max_length=10)),
('bitrate', self.gf('django.db.models.fields.CharField')(max_length=10)),
('release_year', self.gf('django.db.models.fields.IntegerField')()),
('added', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'music', ['Track'])
# Adding M2M table for field artist on 'Track'
m2m_table_name = db.shorten_name(u'music_track_artist')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('track', models.ForeignKey(orm[u'music.track'], null=False)),
('artist', models.ForeignKey(orm[u'music.artist'], null=False))
))
db.create_unique(m2m_table_name, ['track_id', 'artist_id'])
def backwards(self, orm):
# Deleting model 'Artist'
db.delete_table(u'music_artist')
# Deleting model 'Album'
db.delete_table(u'music_album')
# Deleting model 'Track'
db.delete_table(u'music_track')
# Removing M2M table for field artist on 'Track'
db.delete_table(db.shorten_name(u'music_track_artist'))
models = {
u'music.album': {
'Meta': {'object_name': 'Album'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'release_year': ('django.db.models.fields.IntegerField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
},
u'music.artist': {
'Meta': {'object_name': 'Artist'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'music.track': {
'Meta': {'object_name': 'Track'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'album': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['music.Album']"}),
'artist': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['music.Artist']", 'symmetrical': 'False'}),
'bitrate': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'duration': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'release_year': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['music']
|
UTF-8
|
Python
| false | false | 2,014 |
13,846,974,579,302 |
23dade2971a6d3b0decd24fc2dfae780e0f5bd7f
|
781bdeeb974a46434a707d988ceb4f0265870689
|
/plugins/attacks.py
|
feb00b813ddb704094647647c0c086cf5336be38
|
[
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-only",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"GPL-3.0-or-later"
] |
non_permissive
|
CloudBotIRC/CloudBotLegacy
|
https://github.com/CloudBotIRC/CloudBotLegacy
|
bdb31205b21912830575661e88368854b6707cc8
|
ae126dbf4515b2b2ff43f2f20d5b2f78874b1ef9
|
refs/heads/develop
| 2021-01-13T01:40:56.877214 | 2014-12-08T16:17:44 | 2014-12-08T16:17:44 | 2,812,896 | 6 | 4 |
GPL-3.0
| false | 2023-08-31T00:47:01 | 2011-11-20T09:12:49 | 2023-03-10T15:46:00 | 2021-01-12T01:37:04 | 13,535 | 58 | 54 | 10 |
Python
| false | false |
import random
from util import hook
with open("plugins/data/larts.txt") as f:
larts = [line.strip() for line in f.readlines()
if not line.startswith("//")]
with open("plugins/data/insults.txt") as f:
insults = [line.strip() for line in f.readlines()
if not line.startswith("//")]
with open("plugins/data/flirts.txt") as f:
flirts = [line.strip() for line in f.readlines()
if not line.startswith("//")]
@hook.command
def lart(inp, action=None, nick=None, conn=None, notice=None):
"""lart <user> -- LARTs <user>."""
target = inp.strip()
if " " in target:
notice("Invalid username!")
return
# if the user is trying to make the bot slap itself, slap them
if target.lower() == conn.nick.lower() or target.lower() == "itself":
target = nick
values = {"user": target}
phrase = random.choice(larts)
# act out the message
action(phrase.format(**values))
@hook.command
def insult(inp, nick=None, action=None, conn=None, notice=None):
"""insult <user> -- Makes the bot insult <user>."""
target = inp.strip()
if " " in target:
notice("Invalid username!")
return
if target == conn.nick.lower() or target == "itself":
target = nick
else:
target = inp
out = 'insults {}... "{}"'.format(target, random.choice(insults))
action(out)
@hook.command
def flirt(inp, action=None, conn=None, notice=None):
"""flirt <user> -- Make the bot flirt with <user>."""
target = inp.strip()
if " " in target:
notice("Invalid username!")
return
if target == conn.nick.lower() or target == "itself":
target = 'itself'
else:
target = inp
out = 'flirts with {}... "{}"'.format(target, random.choice(flirts))
action(out)
|
UTF-8
|
Python
| false | false | 2,014 |
4,071,629,005,813 |
573064fd883a00106b8336030b70c37f086e66b2
|
45f4a7db6f83f535a3ca458a5b0f7b74503946bd
|
/AddTruth2ExpressBins.py
|
b372c45b51d26bb9b95ef178db252c7a89c6754f
|
[] |
no_license
|
ivangrub/scripts
|
https://github.com/ivangrub/scripts
|
806c7daf2388d2fcbb19f38dbbed88d611a17fba
|
1c2ae5ca343f975e8f50ba61d4e6f97cf6824f55
|
refs/heads/master
| 2021-01-19T11:48:54.549130 | 2013-09-10T19:27:14 | 2013-09-10T19:27:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
results = open('Express_TBP.results.xprs')
out = open('Express_TBP.bins.truth.bed','w')
i = 0
for line in results:
if i == 0:
i += 1
continue
s = line.strip().split()
bin = s[1].split('!')
try:
out.write('%s\t%s\t%s\t%s\n' % (bin[1],bin[2],bin[3],s[13]))
except IndexError:
print line
out.close()
results.close()
|
UTF-8
|
Python
| false | false | 2,013 |
3,058,016,739,360 |
b4a31ff6d42175939506acba1f1e7490c071c300
|
fb16a2a07ab5ee75cf828655add4b918a93c4597
|
/algo-2/left_process_5.py
|
310630d19d0aa00afc5e93925aab596014b0f094
|
[] |
no_license
|
felixzhao/WordSegmentation
|
https://github.com/felixzhao/WordSegmentation
|
d04f4728caf79d996bce4ac60e8385ee87a88514
|
b16bbcbc07a9115e630126e0f25ffba1f3b92e6d
|
refs/heads/master
| 2021-01-01T15:59:52.544328 | 2014-01-23T06:32:29 | 2014-01-23T06:32:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Queue import *
log = open('log\left_process_5.txt','w')
def process_in_s(sentence, pos, W_dict, C_dict):
result = 0
if pos == len(sentence):
return 0
unk_flag = 'u'
#q = Queue()
#q.put(sentence)
#while q.empty() == False :
#cur_s = q.get()
cur_s = sentence
print >> log, '='*50
print >> log, '1> process sentence : ', cur_s
for i in xrange(pos,len(cur_s)):
print >> log, '2> Round ', i
print >> log, '2> dict : ', W_dict
print >> log, '2> process : ', cur_s[i]
## get chars which in all words in dict i position
w_d = []
for w in W_dict:
w_d.append(w[i])
print >> log, '2> chars at i positon in word dict : ', w_d
## if no word at i position in dict match current process char then next sentence
if len(w_d) == 0:
break
cur_word = cur_s[i]
## chenck the word is unk
if cur_word.startswith(unk_flag) == False :
print >> log, '3> in known word brunch.'
## remove all word if i position not equal sentence_i(cur_word)
print >> log, '3> before update dict :', W_dict
print >> log, '3> process word : ', cur_word
print >> log, '3> process position : ', i
W_dict = [x for x in W_dict if len(x)> i and x[i] == cur_word]
print >> log, '3> after update dict : ', W_dict
if len(W_dict) > 0:
result += 1
print >> log, '==> add one, match with a known word. '
print >> log, '3> end known word brunch.\n'
else:
print >> log, '3> in unk brunch.'
w_s = C_dict[cur_s[i]]
print >> log, '3> process word : ', cur_s[i]
print >> log, '3> cands : ', w_s
inter_list = list(set(w_d) & set(w_s))#get_intersection(words_in_dict, w_s)
print >> log, '3> inters list : ', inter_list
if len(inter_list) == 0:
print >> log, '==> no match, end algo. '
break
else: ## | inter words | > 0
print >> log, '3*> in mulit cands brunch.'
max_sub_score = 0
for w in inter_list:
print >> log, '4> process inter word : ', w
print >> log, '4> process sentence : ', cur_s
print >> log, '4> replace position : ', i
cur_s[i] = w
print >> log, '4> updated sentence : ', cur_s, '\n'
sub_score = 0
sub_score += process_in_s(cur_s, i+1, W_dict, C_dict)
if sub_score > max_sub_score: max_sub_score = sub_score
result += max_sub_score + 1 ## because this unk get match, so add one
break
print >> log, '1> end of sentence process : ', cur_s, '\n'
print >> log, '='*50
return result
if __name__ == '__main__':
s = 'a u1 k'.split()
W_d = ['abk','cde']
C_d = {'u1':['b']}
Expected = 3
Actual = process_in_s(s, 0, W_d, C_d)
print >> log, Actual
print >> log, Actual == Expected
print Actual
print Actual == Expected
|
UTF-8
|
Python
| false | false | 2,014 |
7,773,890,845,906 |
e1aa9358449abd7805e81cdf2a2cf91d8ece3636
|
4416c81774b29f3d24826f644294881cd65d2cc1
|
/demo/demo/core/views.py
|
a7d135d5737abe78c0b4bca51f64052e6efb417e
|
[] |
no_license
|
linovia/django-jigsawview
|
https://github.com/linovia/django-jigsawview
|
b8d5b7ccf55b459b78078f9b65dcc11149c5d909
|
6a0acc418ca8dbbc65c192601673f0e8cd033a9f
|
refs/heads/master
| 2021-01-01T17:16:55.347411 | 2013-02-07T21:40:11 | 2013-02-07T21:40:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Bug tracker views.
"""
from jigsawview import JigsawView
from jigsawview.pieces import ObjectPiece
from demo.core.models import Project, Milestone, Bug
from django.core.urlresolvers import reverse
class ProjectMixin(ObjectPiece):
model = Project
pk_url_kwarg = 'project_id'
def get_success_url(self, obj=None):
return reverse('projects')
def get_queryset(self):
return Project.objects.filter(members=self.request.user.id)
class MilestoneMixin(ObjectPiece):
model = Milestone
pk_url_kwarg = 'milestone_id'
def get_success_url(self):
return reverse('milestones')
def get_queryset(self):
return Milestone.objects.filter(project=self.view.context['project'])
class BugMixin(ObjectPiece):
model = Bug
pk_url_kwarg = 'bug_id'
def get_success_url(self, obj=None):
return reverse('bugs', kwargs={'project_id': obj.project.id})
def get_queryset(self):
# Limits the bugs to the current project's ones
# and possibly the milestone if we have one
qs = Bug.objects.filter(project=self.view.context['project'])
if 'milestone' in self.view.context:
qs = qs.filter(milestone=self.view.context['milestone'])
return qs
class ProjectView(JigsawView):
project = ProjectMixin(default_mode='detail')
class MilestoneView(ProjectView):
milestone = MilestoneMixin(default_mode='detail')
class BugView(ProjectView):
milestones = MilestoneMixin(mode='list')
bug = BugMixin(default_mode='detail')
class BugMilestoneView(MilestoneView):
bug = BugMixin(default_mode='detail')
|
UTF-8
|
Python
| false | false | 2,013 |
120,259,124,865 |
5199bb629bcd1c2cbf7aa1d3cf0db56fc33ac583
|
879e84259153c05d5d4ac3611ee4d06045b79566
|
/setup.py
|
44b214dc655e62fb3022b7c7ca82680b7dec4d1a
|
[] |
no_license
|
lfiaschi/pystruct
|
https://github.com/lfiaschi/pystruct
|
9cfdda890271714355a39c2a42fe9ce5bd8b8895
|
d2d160976a7e60a97475975669574405a62981a7
|
refs/heads/master
| 2021-01-17T08:42:39.767355 | 2013-02-18T10:22:34 | 2013-02-18T10:22:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
import numpy as np
import glob
import sys
pyxfolder='problems/'
standard='include'
others=["/home/lfiaschi/include"]
ext_modules = cythonize([
Extension(
"utils",
[pyxfolder+"utils.pyx"],
include_dirs=[np.get_include(),standard]+others,
language="c++",
extra_compile_args=[],#['-O3'],
extra_link_args=[]#['-fopenmp']
),
# Extension(
# "_flow_graph",
# [pyxfolder+"_flow_graph.pyx"],
# include_dirs=[np.get_include(),standard]+others,
# language="c++",
# extra_compile_args=[],#['-O3'],
# extra_link_args=[]#['-fopenmp']
# ),
#
# Extension(
# "_superpixel_graph",
# [pyxfolder+"_superpixel_graph.pyx"],
# include_dirs=[np.get_include(),standard]+others,
# language="c++",
# extra_compile_args=[],#['-O3'],
# extra_link_args=[]#['-fopenmp']
# ),
])
setup(
name="extmodule",
ext_modules = ext_modules
)
|
UTF-8
|
Python
| false | false | 2,013 |
8,976,481,656,990 |
07e9fe943b9027eb0a8a99f061ef092830a17c56
|
ff1e80e3fc0e6b007f2e07ca0d09c596dae72821
|
/src/ha_gateway_service.py
|
0608f1af0425686e071f5de559b7367df54a3901
|
[
"GPL-1.0-or-later"
] |
non_permissive
|
mdingman/pyinsteon
|
https://github.com/mdingman/pyinsteon
|
6c994eecd863742d0b623607a14ca6fe4f7d33f4
|
c7848f72469c79a528d547886683a3d382b32b6c
|
refs/heads/master
| 2020-12-24T11:18:17.327327 | 2012-06-24T22:01:26 | 2012-06-24T22:01:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
File:
ha_gateway_service.py
Description:
Gateway service for Home Automation protocols
- Support xPL
- Support for XMPP? (http://xmpppy.sourceforge.net/ ?)
Author(s):
Jason Sharpee <[email protected]> http://www.sharpee.com
License:
This free software is licensed under the terms of the GNU public license, Version 1
Usage:
- Run it.
Example: (see bottom of file)
Notes:
- Prototype that will largely not function currently.
Created on Apr 3, 2011
@author: jason
'''
import select
from ha_common import *
from pyinsteon import InsteonPLM
from pyxpl import PyxPL
import time
if __name__ == '__main__':
print "Start"
def insteon_received(*params):
command = params[1]
#xpl.send('lighting.basic',"command='%s'" % (command))
def xpl_received(*params):
print "Here", params
# jlight.set('on')
# jlight.set('off')
#Lets get this party started
insteonPLM = InsteonPLM(TCP('192.168.13.146',9761))
# insteonPLM = InsteonPLM(Serial('/dev/ttyMI0'))
jlight = InsteonDevice('19.05.7b',insteonPLM)
jRelay = X10Device('m1',insteonPLM)
insteonPLM.start()
jlight.set('on')
jlight.set('off')
jRelay.set('on')
jRelay.set('off')
# Need to get a callback implemented
# insteon.onReceivedInsteon(insteon_received)
xpl = PyxPL(UDP('0.0.0.0',9763,'255.255.255.255',3865))
#xpl.onReceive(xpl_received)
#sit and spin, let the magic happen
select.select([],[],[])
|
UTF-8
|
Python
| false | false | 2,012 |
5,403,068,899,551 |
3294c6d39dc1a58f54ab13482a36834b632c443a
|
9ca6885d197aaf6869e2080901b361b034e4cc37
|
/RecoLocalMuon/DTSegment/test/DTLPPatternRecoTest_cfg.py
|
9c72fd5b41385022a8a4cc4f1d898c416910149c
|
[] |
no_license
|
ktf/cmssw-migration
|
https://github.com/ktf/cmssw-migration
|
153ff14346b20086f908a370029aa96575a2c51a
|
583340dd03481dff673a52a2075c8bb46fa22ac6
|
refs/heads/master
| 2020-07-25T15:37:45.528173 | 2013-07-11T04:54:56 | 2013-07-11T04:54:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import FWCore.ParameterSet.Config as cms
process = cms.Process("DTLPtest")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
# the source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
# '/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/EC608867-AE6B-DE11-9952-000423D94E1C.root',
# '/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/EAC844CB-E16B-DE11-87C6-001D09F29533.root',
'/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/9C372501-B06B-DE11-81BC-001D09F24448.root',
'/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/9887DA27-B06B-DE11-B743-000423D94E1C.root',
'/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/6AE5271F-AF6B-DE11-A616-001D09F232B9.root',
'/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/38BCDD1E-AF6B-DE11-A968-001D09F2AD4D.root',
'/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/2011B6F1-B26B-DE11-82CC-000423D6CAF2.root',
'/store/relval/CMSSW_3_1_1/RelValSingleMuPt10/GEN-SIM-DIGI-RAW-HLTDEBUG/MC_31X_V2-v1/0002/10C7AFB7-AC6B-DE11-8C78-001D09F28E80.root'
),
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.load("Configuration.StandardSequences.RawToDigi_cff")
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff" )
process.GlobalTag.globaltag = "MC_31X_V3::All"
process.load("RecoLocalMuon.DTRecHit.dt1DRecHits_ParamDrift_cfi")
process.load("RecoLocalMuon.DTSegment.dt2DSegments_LPPatternReco2D_ParamDrift_cfi")
#process.load("RecoLocalMuon.DTSegment.dt2DSegments_CombPatternReco2D_ParamDrift_cfi")
# Magnetic fiuld: force mag field to be 3.8 tesla
process.load("Configuration.StandardSequences.MagneticField_38T_cff")
#To do the validation
process.load("Validation.DTRecHits.DTRecHitQuality_cfi")
dtlocalreco = cms.Sequence(process.dt1DRecHits*process.dt2DSegments)
#process.jobPath = cms.Path(process.muonDTDigis + dtlocalreco)
#if instead we want also the validation
process.jobPath = cms.Path(process.muonDTDigis + dtlocalreco +process.seg2dvalidation)
|
UTF-8
|
Python
| false | false | 2,013 |
15,685,220,594,133 |
6dc429822fd19bf8973ff90098feb7f63e4e9bc2
|
efa84f8a83bcf62f1c3b8b183cfe7e9d18dd2274
|
/actividades/views.py
|
ff2e2710e4c2393b90b3db1840ff0161ba30abc3
|
[] |
no_license
|
comunidad/comunidad
|
https://github.com/comunidad/comunidad
|
25815210175af45386d12a60fa7acb6c71da7317
|
76dcaf1c9cd27669d9213f855f5c274895566ddb
|
refs/heads/master
| 2016-09-15T23:03:37.461476 | 2014-04-03T20:51:31 | 2014-04-03T20:51:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render_to_response
from django.template import RequestContext
from actividades.models import Actividad
# Create your views here.
#def blog(request):
# articulos = Articulo.objects.all()
# return render_to_response('blog/blog.html',
# {'articulos': articulos}, context_instance=RequestContext(request))
def cronograma(request):
usuario = request.user
actividades = Actividad.objects.all()
return render_to_response('actividades/cronograma.html',
{'usuario': usuario, 'actividades': actividades},context_instance=RequestContext(request))
|
UTF-8
|
Python
| false | false | 2,014 |
2,911,987,858,533 |
d1c9bb109b147024f22a1c1c0bf53b3daae7aaf4
|
4b63224fd47bad8c5cdfe393d9b3beadadb130ab
|
/projects/TropixProteomics/resources/main/edu/umn/msi/tropix/proteomics/sequest/impl/fake_sequest.py
|
3b56486a0714f8d497efd04b2b469705e962e319
|
[
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"Apache-2.0",
"EPL-1.0"
] |
non_permissive
|
jmchilton/TINT
|
https://github.com/jmchilton/TINT
|
0e7469509c5d74bb85aea9ae8878fb7a075aca0c
|
ddfade25698c1f83a5a57730037d9c07865686bf
|
refs/heads/master
| 2021-01-22T12:08:34.180015 | 2013-10-14T15:31:43 | 2013-10-14T15:31:43 | 2,486,513 | 1 | 1 | null | false | 2015-09-02T14:53:01 | 2011-09-30T02:48:28 | 2013-10-14T15:32:52 | 2013-10-14T15:32:02 | 28,211 | 2 | 1 | 1 |
Java
| null | null |
#!/usr/bin/env python
import sys
import os
#http://stackoverflow.com/questions/1158076/implement-touch-using-python
def touch(fname, times = None):
with file(fname, 'a'):
os.utime(fname, times)
def main():
argv = sys.argv
files_path_arg = argv[len(argv) - 1]
files_path = files_path_arg[2:] # strip -R
contents = ""
with open(files_path, "r") as file:
contents = file.read()
import re
files = re.split("[\n\r]", contents)
for file in files:
out_file = file[0:len(file)-4] + ".out"
touch(out_file)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,013 |
8,409,546,005,181 |
29d9656797f16b17212d658a231b6e7f6dda8ec8
|
b17a356e274a16c39c76dab7312dd9a933a6502d
|
/sg/sg/urls.py
|
5a04c8505a86c6a4879ef4b7516dfffb9350dc79
|
[] |
no_license
|
ecama/movilnet
|
https://github.com/ecama/movilnet
|
479760211aabb6a5e001062c573e3e2d6165fed7
|
b3f9e4cb1d6dee6f8f340cf797ad1b9271a7476c
|
refs/heads/master
| 2016-09-06T07:38:28.148871 | 2014-08-11T20:51:55 | 2014-08-11T20:51:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^media/(?P<path>.*)$','django.views.static.serve',
{'document_root':settings.MEDIA_ROOT, } ),
#INICIO
url(r'^', include('apps.inicio.urls')),
#PAGINA PRINCIPAL
url(r'^principal/', include('apps.principal.urls')),
#EQUIPO
url(r'^equipo/' , include('apps.equipo.urls')),
#UBICACION
url(r'^ubicacion/' , include('apps.ubicacion.urls')),
#INTERFAZ
url(r'^interfaz/' , include('apps.interfaz.urls')),
#GERENCIA
# url(r'^gerencia/' , include('apps.gerencia.urls')),
)
|
UTF-8
|
Python
| false | false | 2,014 |
8,529,805,078,147 |
b8d240e0aba38ff5a79ee12d23de0bb121760bb6
|
3b88e865a1e5f9e6ab65b85e30651fabb8f0ad49
|
/cad/src/commands/RotaryMotorProperties/RotaryMotor_EditCommand.py
|
fd0852ca5be022521def8e5ac2070aefadcd157f
|
[
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-or-later",
"GPL-1.0-or-later",
"GPL-2.0-only"
] |
non_permissive
|
elfion/nanoengineer
|
https://github.com/elfion/nanoengineer
|
2fe63b09e1ba03f6e98a42dd62d87ea808ac38fc
|
844802e54e72de1114a59fb6ff12d9b7be313129
|
refs/heads/master
| 2020-12-25T06:26:20.794034 | 2012-09-20T09:04:09 | 2012-09-20T09:04:09 | 5,874,926 | 4 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2007 Nanorex, Inc. See LICENSE file for details.
"""
RotaryMotor_EditCommand.py
@author: Ninad
@copyright: 2007 Nanorex, Inc. See LICENSE file for details.
@version: $Id$
History:
ninad 2007-10-09: Created. This deprecates 'RotoryMotorGenerator'
"""
import foundation.env as env
from utilities.Log import redmsg, greenmsg, orangemsg
from model.jigs_motors import RotaryMotor
from operations.jigmakers_Mixin import atom_limit_exceeded_and_confirmed
from command_support.Motor_EditCommand import Motor_EditCommand
from commands.RotaryMotorProperties.RotaryMotorPropertyManager import RotaryMotorPropertyManager
class RotaryMotor_EditCommand(Motor_EditCommand):
"""
The RotaryMotor_EditCommand class provides an editCommand Object.
The editCommand, depending on what client code needs it to do, may create
a new rotary motor or it may be used for an existing rotary motor.
"""
#Property Manager
PM_class = RotaryMotorPropertyManager
cmd = greenmsg("Rotary Motor: ")
commandName = 'ROTARY_MOTOR'
featurename = "Rotary Motor"
def _gatherParameters(self):
"""
Return all the parameters from the Rotary Motor Property Manager.
"""
torque = self.propMgr.torqueDblSpinBox.value()
initial_speed = self.propMgr.initialSpeedDblSpinBox.value()
final_speed = self.propMgr.finalSpeedDblSpinBox.value()
dampers_state = self.propMgr.dampersCheckBox.isChecked()
enable_minimize_state = self.propMgr.enableMinimizeCheckBox.isChecked()
color = self.struct.color
atoms = self.win.assy.selatoms_list()
return (torque,
initial_speed,
final_speed,
dampers_state,
enable_minimize_state,
color,
atoms)
def _getStructureType(self):
"""
Subclasses override this method to define their own structure type.
Returns the type of the structure this editCommand supports.
This is used in isinstance test.
@see: EditCommand._getStructureType() (overridden here)
"""
return RotaryMotor
def _createStructure(self):
"""
Create a Rotary Motor object. (The model object which this edit
controller creates)
"""
assert not self.struct
atoms = self.win.assy.selatoms_list()
atomNumberRequirementMet, logMessage = \
self._checkMotorAtomLimits(len(atoms))
if atomNumberRequirementMet:
self.win.assy.part.ensure_toplevel_group()
motor = RotaryMotor(self.win.assy)
motor.findCenterAndAxis(atoms, self.win.glpane)
self.win.assy.place_new_jig(motor)
else:
motor = None
env.history.message(redmsg(logMessage))
return motor
def _modifyStructure(self, params):
"""
Modifies the structure (Rotary Motor) using the provided params.
@param params: The parameters used as an input to modify the structure
(Rotary Motor created using this
RotaryMotor_EditCommand)
@type params: tuple
"""
assert self.struct
assert params
assert len(params) == 7
torque, initial_speed, final_speed, \
dampers_state, enable_minimize_state, \
color, atoms = params
numberOfAtoms = len(atoms)
atomNumberRequirementMet, logMessage = \
self._checkMotorAtomLimits(numberOfAtoms)
if not atomNumberRequirementMet:
atoms = self.struct.atoms[:]
logMessage = logMessage + " Motor will remain attached to the"\
" atoms listed in the 'Motor Atoms' list in this" \
" property manager"
logMessage = orangemsg(logMessage)
self.propMgr.updateMessage(logMessage)
assert len(atoms) > 0
self.struct.cancelled = False
self.struct.torque = torque
self.struct.initial_speed = initial_speed
self.struct.speed = final_speed
self.struct.dampers_enabled = dampers_state
self.struct.enable_minimize = enable_minimize_state
self.struct.color = color
#Not sure if it is safe to do self.struct.atoms = atoms
#Instead using 'setShaft method -- ninad 2007-10-09
self.struct.setShaft(atoms)
self.struct.findCenterAndAxis(atoms, self.win.glpane)
self.propMgr.updateAttachedAtomListWidget(atomList = atoms)
self.win.win_update() # Update model tree
self.win.assy.changed()
##=====================================##
def _checkMotorAtomLimits(self, numberOfAtoms):
"""
Check if the number of atoms selected by the user, to which the motor
is to be attached, is within acceptable limits.
@param numberOfAtoms: Number of atoms selected by the user, to which the
motor needs to be attached.
@type numberOfAtoms: int
"""
logMessage = ""
isAtomRequirementMet = False
if numberOfAtoms == 0:
logMessage = "No Atoms selected to create a Rotary Motor."
isAtomRequirementMet = False
return (isAtomRequirementMet, logMessage)
# wware 051216, bug 1114, need >= 2 atoms for rotary motor
if numberOfAtoms < 2:
msg = redmsg("You must select at least two atoms to create"\
" a Rotary Motor.")
logMessage = msg
isAtomRequirementMet = False
return (isAtomRequirementMet, logMessage)
if numberOfAtoms >= 2 and numberOfAtoms < 200:
isAtomRequirementMet = True
logMessage = ""
return (isAtomRequirementMet, logMessage)
# Print warning if over 200 atoms are selected.
# The warning should be displayed in a MessageGroupBox. Mark 2007-05-28
if numberOfAtoms > 200:
if not atom_limit_exceeded_and_confirmed(self.win,
numberOfAtoms,
limit = 200):
logMessage = "Warning: Motor is attached to more than 200 "\
"atoms. This may result in a performance degradation"
isAtomRequirementMet = True
else:
logMessage = "%s creation cancelled" % (self.cmdname)
isAtomRequirementMet = False
return (isAtomRequirementMet, logMessage)
|
UTF-8
|
Python
| false | false | 2,012 |
94,489,320,073 |
d7404378a14703ae3244c16c54de0de05d43317a
|
5b5e9788f82c298e27ca998c8c59250b64d9adc6
|
/vweb/utils/models/category.py
|
44163a8248dffab73998c012cef41a8edc1a4e88
|
[] |
no_license
|
vuuvv/vweb
|
https://github.com/vuuvv/vweb
|
c1eba699fda8f757b8b537ea34b487e7662bac39
|
2b15abfef7149f3f87f004f2da90426e2cd3cf19
|
refs/heads/master
| 2021-01-01T19:55:24.802898 | 2013-01-14T12:43:48 | 2013-01-14T12:43:48 | 3,486,289 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.db.transaction import commit_on_success
from mptt.models import MPTTModel, TreeForeignKey
class TreeCategory(MPTTModel):
parent = TreeForeignKey('self', null=True, blank=True, verbose_name=_('Parent'), related_name='children')
name = models.CharField(_('Name'), max_length=100)
slug = models.CharField(_('Slug'), max_length=100)
url = models.CharField(_('URL'), max_length=255, blank=True, editable=False, db_index=True)
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(TreeCategory, self).__init__(*args, **kwargs)
self._old_url = self.url
def __unicode__(self):
return u'%s' % (self.name)
@commit_on_success
def save(self, *args, **kwargs):
if self.slug is None:
self.slug = self.name.lower()
cached_urls = {}
if self.is_root_node():
self.url = u'/%s/' % self.slug if self.slug else u'/'
else:
self.url = u'%s%s/' % (self.parent.url, self.slug)
super(TreeCategory, self).save(*args, **kwargs)
if self.is_leaf_node() or self.url == self._old_url:
return
descendants = self.get_descendants()
stack = [self]
last_page = self
for node in descendants:
parent = stack[-1]
if node.rght < last_page.rght:
# child node
stack.append(last_page)
parent = last_page
else:
# tree up
while node.rght > parent.rght:
stack.pop()
parent = stack[-1]
node.url = u'%s%s/' % (parent.url, node.slug)
super(TreeCategory, node).save()
last_page = node
|
UTF-8
|
Python
| false | false | 2,013 |
12,584,254,196,284 |
e08eee59a78944a71a71e76e197f4269fd747352
|
535f972a89d1b7f31697edc7b982e714974288f8
|
/braille_alphabets.py
|
16bc84bad26d616f74bbbf0831ed7fcd9bebdbec
|
[] |
no_license
|
herlesupreeth/Braille-Alphabets-Activity
|
https://github.com/herlesupreeth/Braille-Alphabets-Activity
|
d7159082674e5b5b3aef38a816ec4998811501e6
|
e3c8b0e7b06d22c253e5bc48728789f7d0cf9e6d
|
refs/heads/master
| 2021-01-17T04:52:04.111528 | 2013-12-27T00:38:09 | 2013-12-27T00:38:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# gcompris - braille_alphabets.py
#
# Copyright (C) 2011 Bruno Coudoin | Srishti Sethi
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
# braille_alphabets activity.
import gtk
import gtk.gdk
import gcompris
import gcompris.utils
import gcompris.skin
import gcompris.bonus
import goocanvas
import pango
import gcompris.sound
import string
import random
from gcompris import gcompris_gettext as _
from BrailleChar import *
from BrailleMap import *
CELL_WIDTH = 30
COLOR_ON = 0xFF0000FFL
COLOR_OFF = 0X00000000L
CIRCLE_FILL = "white"
CIRCLE_STROKE = "black"
MAP_STROKE = "black"
MAP_FILL = "light blue"
#Array Declaration
letter_arr_one = ['A','B','C','D','E','F','G']
random.shuffle(letter_arr_one)
letter_arr_two = ['H','I','J','K','L','M','N']
random.shuffle(letter_arr_two)
letter_arr_three = ['O','P','Q','R','S','T','U']
random.shuffle(letter_arr_three)
letter_arr_four = ['V','W','V','X','Y','Z']
random.shuffle(letter_arr_four)
letter_arr_five = [0,1,2,3,4,5,6,7,8,9]
random.shuffle(letter_arr_five)
class Gcompris_braille_alphabets:
"""Empty gcompris python class"""
def __init__(self, gcomprisBoard):
# Save the gcomprisBoard, it defines everything we need
# to know from the core
#defining the number of levels in activity
self.counter = 0
self.gcomprisBoard = gcomprisBoard
self.gcomprisBoard.level = 1
self.gcomprisBoard.maxlevel=6
self.gcomprisBoard.sublevel=1
self.gcomprisBoard.number_of_sublevel=1
#Boolean variable decaration
self.mapActive = False
# Needed to get key_press
gcomprisBoard.disable_im_context = True
def start(self):
# Set the buttons we want in the bar
gcompris.bar_set(gcompris.BAR_LEVEL)
gcompris.bar_set_level(self.gcomprisBoard)
pixmap = gcompris.utils.load_svg("braille_alphabets/target.svg")
gcompris.bar_set_repeat_icon(pixmap)
gcompris.bar_set(gcompris.BAR_LEVEL|gcompris.BAR_REPEAT_ICON)
gcompris.bar_location(20, -1, 0.6)
# Create our rootitem. We put each canvas item in it so at the end we
# only have to kill it. The canvas deletes all the items it contains
# automaticaly.
self.rootitem = goocanvas.Group(parent=
self.gcomprisBoard.canvas.get_root_item())
self.board_upper(self.gcomprisBoard.level)
def end(self):
# Remove the root item removes all the others inside it
self.rootitem.remove()
def ok(self):
print("learnbraille ok.")
def repeat(self):
if(self.mapActive):
self.end()
self.start()
self.mapActive = False
else :
self.rootitem.props.visibility = goocanvas.ITEM_INVISIBLE
self.rootitem = goocanvas.Group(parent=
self.gcomprisBoard.canvas.get_root_item())
gcompris.set_default_background(self.gcomprisBoard.canvas.get_root_item())
map_obj = BrailleMap(self.rootitem, COLOR_ON, COLOR_OFF,MAP_FILL, MAP_STROKE)
# Move back item
self.backitem = goocanvas.Image(parent = self.rootitem,
pixbuf = gcompris.utils.load_pixmap("braille_alphabets/back.svg"),
x = 600,
y = 450,
tooltip = _("Back to the activity")
)
self.backitem.connect("button_press_event", self.move_back)
gcompris.utils.item_focus_init(self.backitem, None)
self.mapActive = True
def move_back(self,event,target,item):
self.end()
self.start()
def config(self):
print("learnbraille config.")
def key_press(self, keyval, commit_str, preedit_str):
utf8char = gtk.gdk.keyval_to_unicode(keyval)
strn = u'%c' % utf8char
print("Gcompris_learnbraille key press keyval=%i %s" % (keyval, strn))
def pause(self,pause):
if(pause == 0):
self.counter +=1
if (self.counter == self.sublevel):
self.increment_level()
self.end()
self.start()
def set_level(self,level):
gcompris.sound.play_ogg("sounds/receive.wav")
self.gcomprisBoard.level = level
self.gcomprisBoard.sublevel = 1
gcompris.bar_set_level(self.gcomprisBoard)
self.end()
self.start()
def increment_level(self):
self.counter = 0
gcompris.sound.play_ogg("sounds/bleep.wav")
self.gcomprisBoard.sublevel += 1
if(self.gcomprisBoard.sublevel>self.gcomprisBoard.number_of_sublevel):
self.gcomprisBoard.sublevel=1
self.gcomprisBoard.level += 1
if(self.gcomprisBoard.level > self.gcomprisBoard.maxlevel):
self.gcomprisBoard.level = 1
def board_upper(self,level):
if(level == 1):
gcompris.set_background(self.gcomprisBoard.canvas.get_root_item(),
"braille_alphabets/braille_tux.svgz")
goocanvas.Text(parent=self.rootitem,
x = 385,
y = 100,
fill_color = "black",
font = gcompris.skin.get_font("gcompris/title"),
anchor = gtk.ANCHOR_CENTER,
text = _("Braille : Unlocking the Code"))
# Braille Intro
text = _("A system of writing for the blinds that uses characters made "
"of raised dots.")
goocanvas.Text(parent=self.rootitem,
x = 520,
y = 200,
fill_color = "black",
font = gcompris.skin.get_font("gcompris/subtitle"),
width = 400,
anchor = gtk.ANCHOR_CENTER,
text = text)
# Braille Description
text = _("The braille cell is composed of 6 dot cells organized in form "
"of two vertical columns with 3 dots {1,2,3} side by side on "
"left and 3 dots side by side on right {4,5,6}")
goocanvas.Text(parent=self.rootitem,
x=520,
y=300,
fill_color = "black",
font = gcompris.skin.get_font("gcompris/subtitle"),
width = 400,
anchor = gtk.ANCHOR_CENTER,
text = text)
# TUX svghandle
svghandle = gcompris.utils.load_svg("braille_alphabets/braille_tux.svgz")
self.tuxitem = goocanvas.Svg(
parent = self.rootitem,
svg_handle = svghandle,
svg_id = "#TUX-5",
tooltip = _("I am braille TUX")
)
self.tuxitem.connect("button_press_event", self.next_level)
gcompris.utils.item_focus_init(self.tuxitem, None)
goocanvas.Text(parent = self.rootitem,
x = 435,
y = 475,
fill_color ="black",
font = "Sans 10",
anchor= gtk.ANCHOR_CENTER,
width = 355,
text = _("Finished reading braille ! Now click on "
"me and try reproducing braille characters"))
elif(level == 2):
range_lower= 0
range_upper= 7
self.sublevel = range_upper - range_lower
self.board_tile(range_lower,range_upper)
self.random_letter = letter_arr_one[self.counter]
self.braille_cell(level)
elif(level == 3) :
range_lower= 7
range_upper= 14
self.sublevel = range_upper - range_lower
self.board_tile(range_lower,range_upper)
self.random_letter = letter_arr_two[self.counter]
self.braille_cell(level)
elif(level == 4):
range_lower= 14
range_upper= 21
self.sublevel = range_upper - range_lower
self.board_tile(range_lower,range_upper)
self.random_letter = letter_arr_three[self.counter]
self.braille_cell(level)
elif(level == 5):
range_lower= 21
range_upper= 26
self.sublevel = range_upper - range_lower
self.board_tile(range_lower,range_upper)
self.random_letter = letter_arr_four[self.counter]
self.braille_cell(level)
elif(level == 6):
range_lower= 0
range_upper= 10
self.sublevel = range_upper - range_lower
self.board_number(range_lower,range_upper)
self.random_letter = letter_arr_five[self.counter]
self.braille_letter = "number"
self.braille_cell(level)
def next_level(self,event,target,item):
self.increment_level()
self.end()
self.start()
def board_tile(self,range_x,range_y):
for i, letter in enumerate(string.ascii_uppercase[range_x:range_y]):
tile = BrailleChar(self.rootitem, i*(CELL_WIDTH+60)+60,
80, 50, letter ,COLOR_ON ,COLOR_OFF ,CIRCLE_FILL,
CIRCLE_STROKE, True ,False ,True, None)
def board_number(self,num_1,num_2):
for letter in range(num_1,num_2):
tile = BrailleChar(self.rootitem,letter *(CELL_WIDTH+30)+60,
80, 50, letter ,COLOR_ON ,COLOR_OFF ,CIRCLE_FILL,
CIRCLE_STROKE, True,False ,True, None)
def display_letter(self,letter):
goocanvas.Text(parent=self.rootitem,
x=690,
y=330,
fill_color="black",
font="Sans 78",
anchor=gtk.ANCHOR_CENTER,
text=str(letter))
def braille_cell(self, level):
if (level == 6):
self.letter = "number"
else :
self.letter = "alphabet"
gcompris.set_background(self.gcomprisBoard.canvas.get_root_item(),
"braille_alphabets/mosaic.svgz")
#Translators : Do not translate the token {letter}
goocanvas.Text(parent = self.rootitem,
x = 100,
y = 200,
text=_("Click on the dots in braille cell area to produce letter {letter}").format(letter = self.random_letter),
fill_color="black",
font='SANS 15')
goocanvas.Text(parent=self.rootitem,
x=160.0, y=250.0,
text=_("Braille Cell"),
fill_color="black",
font='Sans BOLD')
BrailleChar(self.rootitem, 150, 270, 120, '',COLOR_ON ,COLOR_OFF,CIRCLE_FILL,CIRCLE_STROKE,
False,True,False,callback = self.letter_change , braille_letter = self.letter)
for i in range(2):
for j in range(3):
goocanvas.Text(parent=self.rootitem,
text=(str(j + 1 + i * 3)),
font='Sans 20',
fill_color="black",
x=i * 120 + 140,
y=j * 45 + 290)
# OK Button
ok = goocanvas.Svg(parent = self.rootitem,
svg_handle = gcompris.skin.svg_get(),
svg_id = "#OK",
tooltip = _("Click to confirm your selection of dots")
)
ok.translate( -165,-155)
ok.connect("button_press_event", self.ok_event)
gcompris.utils.item_focus_init(ok, None)
def ok_event(self,item,target,event):
if(self.random_letter == self.correct_letter):
self.display_letter(self.correct_letter)
gcompris.bonus.display(gcompris.bonus.WIN,gcompris.bonus.SMILEY)
else :
gcompris.bonus.display(gcompris.bonus.LOOSE,gcompris.bonus.SMILEY)
def letter_change(self,letter):
self.correct_letter = letter
|
UTF-8
|
Python
| false | false | 2,013 |
1,683,627,192,542 |
4e13d9256de4d0058b4eb268b602faae99087d67
|
f7e8eadcf498c755ce8447bd1ead37328c606cb0
|
/trail/nfamaps.py
|
7991355685776a0579cee8c473e8f2b9e3ac4a90
|
[] |
no_license
|
leomauro/langscape
|
https://github.com/leomauro/langscape
|
3eb5eecb856e9b04126119f40ace1f4f387935a3
|
d73c37c821443098f3de2c64db5f3afbef1a3384
|
refs/heads/master
| 2021-01-10T08:40:18.854339 | 2011-07-09T18:42:29 | 2011-07-09T18:42:29 | 49,111,749 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__all__ = ["create_nfa_skeleton", "create_sk_map", "nfa_comp"]
def create_nfa_skeleton(nfa):
'''
An nfa-skeleton abstracts away concrete state information but preserves the
relations between states ( follow sets ). Each state will be reduced to an integer
number normalized to the range [-1, 0, ... len(nfa) - 1 ]. The number -1 is
reserved for the single FINAL state.
NFA-skeletons are used to compare NFAs which usually requires an additional
Maptracker which is described below.
An nfa-skeleton will look like ::
sk_nfa = {0: set([1, 3, 6, -1, 7]),
1: set([2]),
2: set([1, 3, 6, -1, 7]),
3: set([6, 3, 4, 5, -1]),
4: set([4, 5]),
5: set([3, 6, -1]),
6: set([-1]),
7: set([-1])}
'''
trans = nfa[2]
skeleton = {}
states = sorted(trans.keys(), key = lambda state: state[1])
for s in trans:
L = []
follow = trans[s]
for f in follow:
if f[0] is None:
L.append(-1)
else:
L.append(states.index(f))
L.sort()
skeleton[states.index(s)] = set(L)
return skeleton
class Maptracker(object):
'''
A Maptracker is used to find a bijection between two NFA-skeletons.
Example: let the following two NFA-skeletons be defined::
sk1 = {0: set([1, 3, 6, -1, 7]),
1: set([2]),
2: set([1, 3, 6, -1, 7]),
3: set([6, 3, 4, 5, -1]),
4: set([4, 5]),
5: set([3, 6, -1]),
6: set([-1]),
7: set([-1])}
sk2 = {0: set([1, 3, 4, -1, 7]),
1: set([2]),
2: set([1, 3, 4, -1, 7]),
3: set([-1]),
4: set([6, 4, 5, -1, 7]),
5: set([5, 6]),
6: set([4, -1, 7]),
7: set([-1])}
When we want to figure out if they are equivalent we need to
find a bijection of the set {0 .. 7} onto itself ::
>>> mt = Maptracker(sk1, sk2)
>>> mt.run()
{0: 0, 1: 1, 2: 2, 3: 4, 4: 5, 5: 6, 6: 7, 7: 3}
The values 0 and -1 are fixpoints. If a Maptracker fails to find a bijection
it returns an empty dict.
'''
def __init__(self, sk1, sk2):
self.sk1 = sk1
self.sk2 = sk2
def accept(self, value, stack):
e1, e2 = value # e1 -> e2
V1 = self.sk1[e1]
V2 = self.sk2[e2]
#
# e1 -> e2 => v1 -> v2
#
# check consistency of the choice of the mapping
if len(V1)!=len(V2):
return False
m = dict(p for (p,q) in stack)
if e2 in m.values():
return False
for v1 in V1:
if v1 == e1:
if e2 not in V2:
return False
if v1 in m:
if m[v1] not in V2:
return False
for s in m:
if e1 in self.sk1[s]:
if e2 not in self.sk2[m[s]]:
return False
return True
def run(self):
'''
Creates the NFA-skeleton map as a dict. Returns an empty dict when it can't
be constructed.
'''
stack = []
if len(self.sk1) != len(self.sk2):
return {}
sig1 = sorted(len(v) for v in self.sk1.values())
sig2 = sorted(len(v) for v in self.sk2.values())
if sig1!=sig2:
return {}
L1 = self.sk1.keys()
L2 = self.sk2.keys()
i = j = 0
while i<len(L1):
e1 = L1[i]
while j<len(L2):
e2 = L2[j]
if self.accept((e1,e2),stack):
stack.append(((e1,e2),(i,j)))
j = 0
break
j+=1
else:
if stack:
_, (i,j) = stack.pop()
if j == -1:
return {}
j+=1
continue
else:
return {}
i+=1
return dict(elem[0] for elem in stack)
def nfa_comp(nfa1, nfa2):
'''
Compares two NFAs by means of their NFA-skeleton.
'''
s_nfa1 = create_nfa_skeleton(nfa1)
s_nfa2 = create_nfa_skeleton(nfa2)
map = Maptracker(s_nfa1, s_nfa2).run()
if map:
return True
return False
if __name__ == '__main__':
import pprint
import langscape
from langscape.ls_const import*
python = langscape.load_langlet("python")
nfa = python.parse_nfa.nfas[python.symbol.varargslist]
print "RULE", nfa[0]
#pprint.pprint(nfa)
#L = create_traces(python, python.symbol.file_input)
sk1 = {0: set([1, 3, 6, -1, 7]),
1: set([2]),
2: set([1, 3, 6, -1, 7]),
3: set([6, 3, 4, 5, -1]),
4: set([4, 5]),
5: set([3, 6, -1]),
6: set([-1]),
7: set([-1])}
sk2 = {0: set([1, 3, 4, -1, 7]),
1: set([2]),
2: set([1, 3, 4, -1, 7]),
3: set([-1]),
4: set([6, 4, 5, -1, 7]),
5: set([5, 6]),
6: set([4, -1, 7]),
7: set([-1])}
GR2 = {0: set([1, 3, 6, -1, 7]),
1: set([2]),
2: set([1, 3, 6, -1, 7]),
3: set([6, 3, 4, 5, -1]),
4: set([4, 5]),
5: set([3, 6, -1]),
6: set([-1]),
7: set([-1])}
GR1 = {0: set([1, 3, 4, -1, 7]),
1: set([2]),
2: set([1, 3, 4, -1, 7]),
3: set([-1]),
4: set([6, 4, 5, -1, 7]),
5: set([5, 6]),
6: set([4, -1, 7]),
7: set([-1])}
phi = {0: 0, 1: 1, 2: 2, 3: 4, 4: 5, 5: 6, 6: 7, 7: 1}
r = Maptracker(GR1, GR2).run()
print r
P = lambda k: (-1 if k == -1 else r[k] )
print GR2 == dict( (P(key), set(map(P, value))) for (key, value) in GR1.items() )
|
UTF-8
|
Python
| false | false | 2,011 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.