commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
f798066d20116d2cfd35cae0bf0771799677f6c2
|
py509/bin/verify.py
|
py509/bin/verify.py
|
"""Verify a certificate."""
import argparse
import logging
import sys
import certifi
from OpenSSL import crypto
from py509.x509 import load_x509_certificates
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
def main():
trust_store = []
with open(certifi.where()) as fh:
#with open('/Users/sholsapp/workspace/py509/test.pem') as fh:
trust_store = list(load_x509_certificates(fh.read()))
x509store = crypto.X509Store()
for ca in trust_store:
print ca.get_subject()
x509store.add_cert(ca)
x509cert = crypto.load_certificate(crypto.FILETYPE_PEM, sys.stdin.read())
try:
crypto.X509StoreContext(x509store, x509cert).verify_certificate()
print 'Success'
except crypto.X509StoreContextError as e:
print 'Failed on {0}'.format(e.certificate.get_subject())
print 'Issuer {0}'.format(e.certificate.get_issuer())
print 'Message: {0}'.format(e)
|
"""Verify a certificate."""
import argparse
import logging
import sys
import certifi
from OpenSSL import crypto
from py509.x509 import load_x509_certificates
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--ca', required=False, default=certifi.where())
args = parser.parse_args()
trust_store = []
with open(args.ca) as fh:
trust_store = list(load_x509_certificates(fh.read()))
x509store = crypto.X509Store()
for ca in trust_store:
print ca.get_subject()
x509store.add_cert(ca)
x509cert = crypto.load_certificate(crypto.FILETYPE_PEM, sys.stdin.read())
try:
crypto.X509StoreContext(x509store, x509cert).verify_certificate()
print 'Success'
except crypto.X509StoreContextError as e:
print 'Failed on {0}'.format(e.certificate.get_subject())
print 'Issuer {0}'.format(e.certificate.get_issuer())
print 'Message: {0}'.format(e)
|
Allow --ca parameter to specify trust store
|
Allow --ca parameter to specify trust store
|
Python
|
apache-2.0
|
sholsapp/py509
|
python
|
## Code Before:
"""Verify a certificate."""
import argparse
import logging
import sys
import certifi
from OpenSSL import crypto
from py509.x509 import load_x509_certificates
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
def main():
trust_store = []
with open(certifi.where()) as fh:
#with open('/Users/sholsapp/workspace/py509/test.pem') as fh:
trust_store = list(load_x509_certificates(fh.read()))
x509store = crypto.X509Store()
for ca in trust_store:
print ca.get_subject()
x509store.add_cert(ca)
x509cert = crypto.load_certificate(crypto.FILETYPE_PEM, sys.stdin.read())
try:
crypto.X509StoreContext(x509store, x509cert).verify_certificate()
print 'Success'
except crypto.X509StoreContextError as e:
print 'Failed on {0}'.format(e.certificate.get_subject())
print 'Issuer {0}'.format(e.certificate.get_issuer())
print 'Message: {0}'.format(e)
## Instruction:
Allow --ca parameter to specify trust store
## Code After:
"""Verify a certificate."""
import argparse
import logging
import sys
import certifi
from OpenSSL import crypto
from py509.x509 import load_x509_certificates
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--ca', required=False, default=certifi.where())
args = parser.parse_args()
trust_store = []
with open(args.ca) as fh:
trust_store = list(load_x509_certificates(fh.read()))
x509store = crypto.X509Store()
for ca in trust_store:
print ca.get_subject()
x509store.add_cert(ca)
x509cert = crypto.load_certificate(crypto.FILETYPE_PEM, sys.stdin.read())
try:
crypto.X509StoreContext(x509store, x509cert).verify_certificate()
print 'Success'
except crypto.X509StoreContextError as e:
print 'Failed on {0}'.format(e.certificate.get_subject())
print 'Issuer {0}'.format(e.certificate.get_issuer())
print 'Message: {0}'.format(e)
|
...
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--ca', required=False, default=certifi.where())
args = parser.parse_args()
trust_store = []
with open(args.ca) as fh:
trust_store = list(load_x509_certificates(fh.read()))
x509store = crypto.X509Store()
...
|
7df596443ac992039d239aa34212ff83943f6d7f
|
test/time-test.c
|
test/time-test.c
|
void test_zero_milliseconds_to_timeval() {
size_t milliseconds = 0;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 0);
}
void test_one_millisecond_to_timeval() {
size_t milliseconds = 1;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 1000);
}
void test_one_thousand_milliseconds_to_timeval() {
size_t milliseconds = 1000;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 0);
}
int main() {
add_test(test_zero_milliseconds_to_timeval);
add_test(test_one_millisecond_to_timeval);
add_test(test_one_thousand_milliseconds_to_timeval);
return run_tests();
}
|
void test_zero_milliseconds_to_timeval() {
size_t milliseconds = 0;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 0);
}
void test_one_millisecond_to_timeval() {
size_t milliseconds = 1;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 1000);
}
void test_one_thousand_milliseconds_to_timeval() {
size_t milliseconds = 1000;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 0);
}
void test_one_thousand_and_one_milliseconds_to_timeval() {
size_t milliseconds = 1001;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 1000);
}
int main() {
add_test(test_zero_milliseconds_to_timeval);
add_test(test_one_millisecond_to_timeval);
add_test(test_one_thousand_milliseconds_to_timeval);
add_test(test_one_thousand_and_one_milliseconds_to_timeval);
return run_tests();
}
|
Add milliseconds to timeval test
|
Add milliseconds to timeval test
|
C
|
unlicense
|
mmurdoch/disco,mmurdoch/disco
|
c
|
## Code Before:
void test_zero_milliseconds_to_timeval() {
size_t milliseconds = 0;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 0);
}
void test_one_millisecond_to_timeval() {
size_t milliseconds = 1;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 1000);
}
void test_one_thousand_milliseconds_to_timeval() {
size_t milliseconds = 1000;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 0);
}
int main() {
add_test(test_zero_milliseconds_to_timeval);
add_test(test_one_millisecond_to_timeval);
add_test(test_one_thousand_milliseconds_to_timeval);
return run_tests();
}
## Instruction:
Add milliseconds to timeval test
## Code After:
void test_zero_milliseconds_to_timeval() {
size_t milliseconds = 0;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 0);
}
void test_one_millisecond_to_timeval() {
size_t milliseconds = 1;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 0 && t.tv_usec == 1000);
}
void test_one_thousand_milliseconds_to_timeval() {
size_t milliseconds = 1000;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 0);
}
void test_one_thousand_and_one_milliseconds_to_timeval() {
size_t milliseconds = 1001;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 1000);
}
int main() {
add_test(test_zero_milliseconds_to_timeval);
add_test(test_one_millisecond_to_timeval);
add_test(test_one_thousand_milliseconds_to_timeval);
add_test(test_one_thousand_and_one_milliseconds_to_timeval);
return run_tests();
}
|
# ... existing code ...
expect(t.tv_sec == 1 && t.tv_usec == 0);
}
void test_one_thousand_and_one_milliseconds_to_timeval() {
size_t milliseconds = 1001;
struct timeval t = milliseconds_to_timeval(milliseconds);
expect(t.tv_sec == 1 && t.tv_usec == 1000);
}
int main() {
add_test(test_zero_milliseconds_to_timeval);
add_test(test_one_millisecond_to_timeval);
add_test(test_one_thousand_milliseconds_to_timeval);
add_test(test_one_thousand_and_one_milliseconds_to_timeval);
return run_tests();
}
# ... rest of the code ...
|
727f221767c662e95585f54e06c0c8b4e4a77d88
|
smartfile/exceptions.py
|
smartfile/exceptions.py
|
from requests.exceptions import ConnectionError
class SmartFileException(Exception):
pass
class SmartFileConnException(SmartFileException):
""" Exception for issues regarding a request. """
def __init__(self, exc, *args, **kwargs):
self.exc = exc
if isinstance(exc, ConnectionError):
self.detail = exc.message.strerror
else:
self.detail = '{0}: {1}'.format(exc.__class__, exc)
super(SmartFileConnException, self).__init__(*args, **kwargs)
def __str__(self):
return self.detail
class SmartFileResponseException(SmartFileException):
""" Exception for issues regarding a response. """
def __init__(self, response, *args, **kwargs):
self.response = response
self.status_code = response.status_code
self.detail = response.json.get('detail', 'Check response for errors')
super(SmartFileResponseException, self).__init__(*args, **kwargs)
def __str__(self):
return 'Response {0}: {1}'.format(self.status_code, self.detail)
|
from requests.exceptions import ConnectionError
class SmartFileException(Exception):
pass
class SmartFileConnException(SmartFileException):
""" Exception for issues regarding a request. """
def __init__(self, exc, *args, **kwargs):
self.exc = exc
if isinstance(exc, ConnectionError):
self.detail = exc.message.strerror
else:
self.detail = u'{0}: {1}'.format(exc.__class__, exc)
super(SmartFileConnException, self).__init__(*args, **kwargs)
def __str__(self):
return self.detail
class SmartFileResponseException(SmartFileException):
""" Exception for issues regarding a response. """
def __init__(self, response, *args, **kwargs):
self.response = response
self.status_code = response.status_code
if not response.json or not 'detail' in response.json:
self.detail = u'Check response for errors'
else:
self.detail = response.json['detail']
super(SmartFileResponseException, self).__init__(*args, **kwargs)
def __str__(self):
return 'Response {0}: {1}'.format(self.status_code, self.detail)
|
Handle responses without JSON or detail field
|
Handle responses without JSON or detail field
Check the response for JSON and a detail field before trying to access
them within SmartFileResponseException. This could occur if the server
returns a 500.
|
Python
|
mit
|
smartfile/client-python
|
python
|
## Code Before:
from requests.exceptions import ConnectionError
class SmartFileException(Exception):
pass
class SmartFileConnException(SmartFileException):
""" Exception for issues regarding a request. """
def __init__(self, exc, *args, **kwargs):
self.exc = exc
if isinstance(exc, ConnectionError):
self.detail = exc.message.strerror
else:
self.detail = '{0}: {1}'.format(exc.__class__, exc)
super(SmartFileConnException, self).__init__(*args, **kwargs)
def __str__(self):
return self.detail
class SmartFileResponseException(SmartFileException):
""" Exception for issues regarding a response. """
def __init__(self, response, *args, **kwargs):
self.response = response
self.status_code = response.status_code
self.detail = response.json.get('detail', 'Check response for errors')
super(SmartFileResponseException, self).__init__(*args, **kwargs)
def __str__(self):
return 'Response {0}: {1}'.format(self.status_code, self.detail)
## Instruction:
Handle responses without JSON or detail field
Check the response for JSON and a detail field before trying to access
them within SmartFileResponseException. This could occur if the server
returns a 500.
## Code After:
from requests.exceptions import ConnectionError
class SmartFileException(Exception):
pass
class SmartFileConnException(SmartFileException):
""" Exception for issues regarding a request. """
def __init__(self, exc, *args, **kwargs):
self.exc = exc
if isinstance(exc, ConnectionError):
self.detail = exc.message.strerror
else:
self.detail = u'{0}: {1}'.format(exc.__class__, exc)
super(SmartFileConnException, self).__init__(*args, **kwargs)
def __str__(self):
return self.detail
class SmartFileResponseException(SmartFileException):
""" Exception for issues regarding a response. """
def __init__(self, response, *args, **kwargs):
self.response = response
self.status_code = response.status_code
if not response.json or not 'detail' in response.json:
self.detail = u'Check response for errors'
else:
self.detail = response.json['detail']
super(SmartFileResponseException, self).__init__(*args, **kwargs)
def __str__(self):
return 'Response {0}: {1}'.format(self.status_code, self.detail)
|
...
if isinstance(exc, ConnectionError):
self.detail = exc.message.strerror
else:
self.detail = u'{0}: {1}'.format(exc.__class__, exc)
super(SmartFileConnException, self).__init__(*args, **kwargs)
def __str__(self):
...
def __init__(self, response, *args, **kwargs):
self.response = response
self.status_code = response.status_code
if not response.json or not 'detail' in response.json:
self.detail = u'Check response for errors'
else:
self.detail = response.json['detail']
super(SmartFileResponseException, self).__init__(*args, **kwargs)
def __str__(self):
...
|
0200f506b06b8fce7ae6872910915f1e40ccf1b7
|
framework/include/base/ComputeInitialConditionThread.h
|
framework/include/base/ComputeInitialConditionThread.h
|
/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "ParallelUniqueId.h"
// libmesh
#include "libmesh/elem_range.h"
class FEProblemBase;
class ComputeInitialConditionThread
{
public:
ComputeInitialConditionThread(FEProblemBase & fe_problem);
// Splitting Constructor
ComputeInitialConditionThread(ComputeInitialConditionThread & x, Threads::split split);
void operator() (const ConstElemRange & range);
void join(const ComputeInitialConditionThread & /*y*/);
protected:
FEProblemBase & _fe_problem;
THREAD_ID _tid;
};
#endif //COMPUTEINITIALCONDITIONTHREAD_H
|
/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "MooseTypes.h"
// libmesh
#include "libmesh/elem_range.h"
#include "libmesh/threads.h"
class FEProblemBase;
class ComputeInitialConditionThread
{
public:
ComputeInitialConditionThread(FEProblemBase & fe_problem);
// Splitting Constructor
ComputeInitialConditionThread(ComputeInitialConditionThread & x, Threads::split split);
void operator() (const ConstElemRange & range);
void join(const ComputeInitialConditionThread & /*y*/);
protected:
FEProblemBase & _fe_problem;
THREAD_ID _tid;
};
#endif //COMPUTEINITIALCONDITIONTHREAD_H
|
Include headers we need directly
|
Include headers we need directly
|
C
|
lgpl-2.1
|
backmari/moose,bwspenc/moose,permcody/moose,idaholab/moose,nuclear-wizard/moose,backmari/moose,idaholab/moose,liuwenf/moose,permcody/moose,jessecarterMOOSE/moose,liuwenf/moose,SudiptaBiswas/moose,sapitts/moose,laagesen/moose,andrsd/moose,dschwen/moose,bwspenc/moose,harterj/moose,sapitts/moose,andrsd/moose,liuwenf/moose,lindsayad/moose,idaholab/moose,idaholab/moose,andrsd/moose,harterj/moose,YaqiWang/moose,milljm/moose,sapitts/moose,nuclear-wizard/moose,lindsayad/moose,lindsayad/moose,idaholab/moose,SudiptaBiswas/moose,bwspenc/moose,sapitts/moose,milljm/moose,milljm/moose,jessecarterMOOSE/moose,Chuban/moose,backmari/moose,friedmud/moose,lindsayad/moose,laagesen/moose,laagesen/moose,andrsd/moose,nuclear-wizard/moose,dschwen/moose,permcody/moose,SudiptaBiswas/moose,lindsayad/moose,Chuban/moose,YaqiWang/moose,liuwenf/moose,harterj/moose,dschwen/moose,harterj/moose,friedmud/moose,sapitts/moose,jessecarterMOOSE/moose,milljm/moose,liuwenf/moose,laagesen/moose,yipenggao/moose,bwspenc/moose,YaqiWang/moose,Chuban/moose,permcody/moose,jessecarterMOOSE/moose,harterj/moose,yipenggao/moose,friedmud/moose,SudiptaBiswas/moose,milljm/moose,liuwenf/moose,laagesen/moose,dschwen/moose,nuclear-wizard/moose,andrsd/moose,jessecarterMOOSE/moose,yipenggao/moose,bwspenc/moose,SudiptaBiswas/moose,backmari/moose,Chuban/moose,friedmud/moose,dschwen/moose,yipenggao/moose,YaqiWang/moose
|
c
|
## Code Before:
/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "ParallelUniqueId.h"
// libmesh
#include "libmesh/elem_range.h"
class FEProblemBase;
class ComputeInitialConditionThread
{
public:
ComputeInitialConditionThread(FEProblemBase & fe_problem);
// Splitting Constructor
ComputeInitialConditionThread(ComputeInitialConditionThread & x, Threads::split split);
void operator() (const ConstElemRange & range);
void join(const ComputeInitialConditionThread & /*y*/);
protected:
FEProblemBase & _fe_problem;
THREAD_ID _tid;
};
#endif //COMPUTEINITIALCONDITIONTHREAD_H
## Instruction:
Include headers we need directly
## Code After:
/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "MooseTypes.h"
// libmesh
#include "libmesh/elem_range.h"
#include "libmesh/threads.h"
class FEProblemBase;
class ComputeInitialConditionThread
{
public:
ComputeInitialConditionThread(FEProblemBase & fe_problem);
// Splitting Constructor
ComputeInitialConditionThread(ComputeInitialConditionThread & x, Threads::split split);
void operator() (const ConstElemRange & range);
void join(const ComputeInitialConditionThread & /*y*/);
protected:
FEProblemBase & _fe_problem;
THREAD_ID _tid;
};
#endif //COMPUTEINITIALCONDITIONTHREAD_H
|
# ... existing code ...
#ifndef COMPUTEREINITIALCONDITIONTHREAD_H
#define COMPUTEREINITIALCONDITIONTHREAD_H
#include "MooseTypes.h"
// libmesh
#include "libmesh/elem_range.h"
#include "libmesh/threads.h"
class FEProblemBase;
# ... rest of the code ...
|
f40da1b097d900c0c435d7550e891b0ece99bd91
|
lib/torque_accounting.py
|
lib/torque_accounting.py
|
def parse_line(line):
event = line.split(';')
job_name = event[2]
event_type = event[1]
event_time = event[0]
properties={}
prop_strings = event.split(" ")
for p in prop_strings:
prop=p.split("=")
if len(prop)=2:
properties[prop[0]] = prop[1]
return (job_name, event_type, event_time, properties)
def parse_records(text):
jobs = {}
lines=text.split("\n")
for line in lines:
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
jobs[job_name]['events'] = {}
jobs[job_name]['events'][event_type]=event_time
for p in properties:
jobs[job_name][p]=properties[p]
return jobs
|
def parse_line(line):
event = line.split(';')
job_name = event[2]
event_type = event[1]
event_time = event[0]
properties={}
prop_strings = event.split(" ")
for p in prop_strings:
prop=p.split("=")
if len(prop)=2:
properties[prop[0]] = prop[1]
return (job_name, event_type, event_time, properties)
def parse_records(text):
jobs = {}
lines=text.split("\n")
for line in lines:
if len(line)==0:
continue
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
jobs[job_name]['events'] = {}
jobs[job_name]['events'][event_type]=event_time
for p in properties:
jobs[job_name][p]=properties[p]
return jobs
def parse_files(filenames):
texts=[]
for fname in filenames:
f = open(fname,'r')
texts.append(f.read())
f.close
return parse_records("\n".join(texts))
|
Add parse_files method to loop through a bunch of files
|
Add parse_files method to loop through a bunch of files
|
Python
|
mit
|
ajdecon/torque_qhistory,ajdecon/torque_qhistory
|
python
|
## Code Before:
def parse_line(line):
event = line.split(';')
job_name = event[2]
event_type = event[1]
event_time = event[0]
properties={}
prop_strings = event.split(" ")
for p in prop_strings:
prop=p.split("=")
if len(prop)=2:
properties[prop[0]] = prop[1]
return (job_name, event_type, event_time, properties)
def parse_records(text):
jobs = {}
lines=text.split("\n")
for line in lines:
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
jobs[job_name]['events'] = {}
jobs[job_name]['events'][event_type]=event_time
for p in properties:
jobs[job_name][p]=properties[p]
return jobs
## Instruction:
Add parse_files method to loop through a bunch of files
## Code After:
def parse_line(line):
event = line.split(';')
job_name = event[2]
event_type = event[1]
event_time = event[0]
properties={}
prop_strings = event.split(" ")
for p in prop_strings:
prop=p.split("=")
if len(prop)=2:
properties[prop[0]] = prop[1]
return (job_name, event_type, event_time, properties)
def parse_records(text):
jobs = {}
lines=text.split("\n")
for line in lines:
if len(line)==0:
continue
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
jobs[job_name]['events'] = {}
jobs[job_name]['events'][event_type]=event_time
for p in properties:
jobs[job_name][p]=properties[p]
return jobs
def parse_files(filenames):
texts=[]
for fname in filenames:
f = open(fname,'r')
texts.append(f.read())
f.close
return parse_records("\n".join(texts))
|
# ... existing code ...
lines=text.split("\n")
for line in lines:
if len(line)==0:
continue
job_name, event_type, event_time, properties = parse_line(line)
if not job_name in jobs:
jobs[job_name] = {}
# ... modified code ...
jobs[job_name][p]=properties[p]
return jobs
def parse_files(filenames):
texts=[]
for fname in filenames:
f = open(fname,'r')
texts.append(f.read())
f.close
return parse_records("\n".join(texts))
# ... rest of the code ...
|
bd18f52c2ee41bbc9c33a3b98fdac1ce2ea18ea7
|
rest/urls.py
|
rest/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/[0-9a-fA-F\-]+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
Revert "Handle second service UUID better."
|
Revert "Handle second service UUID better."
Realized I actually made the url parsing worse, this isn't what we wanted.
|
Python
|
apache-2.0
|
CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project
|
python
|
## Code Before:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/[0-9a-fA-F\-]+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
## Instruction:
Revert "Handle second service UUID better."
Realized I actually made the url parsing worse, this isn't what we wanted.
## Code After:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
...
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
...
|
d9d0e8817074851d9113c67012908d9c40447615
|
web/src/main/java/com/eyekabob/Main.java
|
web/src/main/java/com/eyekabob/Main.java
|
package com.eyekabob;
import org.json.JSONException;
import org.json.JSONObject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
/**
* © Copyright 2012 Brien Coffield
* All rights reserved
* <p/>
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
@Path("/main")
public class Main {
@GET
@Produces("application/json")
public String helloWorld() throws JSONException {
JSONObject response = new JSONObject();
response.put("response", "Hello, world!");
return response.toString();
}
}
|
package com.eyekabob;
import org.json.JSONException;
import org.json.JSONObject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
/**
* © Copyright 2012 Brien Coffield
* All rights reserved
* <p/>
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
@Path("/main")
public class Main {
@GET
@Produces("application/json")
public String helloWorld(@QueryParam("param") String param) throws JSONException {
JSONObject response = new JSONObject();
response.put("response", "Hello, world! " + param);
return response.toString();
}
}
|
Add query param to the hello world web service
|
Add query param to the hello world web service
|
Java
|
apache-2.0
|
coffbr01/eyekabob
|
java
|
## Code Before:
package com.eyekabob;
import org.json.JSONException;
import org.json.JSONObject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
/**
* © Copyright 2012 Brien Coffield
* All rights reserved
* <p/>
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
@Path("/main")
public class Main {
@GET
@Produces("application/json")
public String helloWorld() throws JSONException {
JSONObject response = new JSONObject();
response.put("response", "Hello, world!");
return response.toString();
}
}
## Instruction:
Add query param to the hello world web service
## Code After:
package com.eyekabob;
import org.json.JSONException;
import org.json.JSONObject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
/**
* © Copyright 2012 Brien Coffield
* All rights reserved
* <p/>
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
@Path("/main")
public class Main {
@GET
@Produces("application/json")
public String helloWorld(@QueryParam("param") String param) throws JSONException {
JSONObject response = new JSONObject();
response.put("response", "Hello, world! " + param);
return response.toString();
}
}
|
# ... existing code ...
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
/**
* © Copyright 2012 Brien Coffield
# ... modified code ...
public class Main {
@GET
@Produces("application/json")
public String helloWorld(@QueryParam("param") String param) throws JSONException {
JSONObject response = new JSONObject();
response.put("response", "Hello, world! " + param);
return response.toString();
}
}
# ... rest of the code ...
|
ecbabd56f6afc4474402d3293bf11e3b6eb2e8f4
|
server/__init__.py
|
server/__init__.py
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import(
genRESTEndPointsForSlicerCLIsInSubDirs,
genRESTEndPointsForSlicerCLIsInDocker
)
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
genRESTEndPointsForSlicerCLIsInDocker(info,
'HistomicsTK',
'dsarchive/histomicstk')
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import(
genRESTEndPointsForSlicerCLIsInSubDirs,
genRESTEndPointsForSlicerCLIsInDocker
)
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
_ = genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
|
Switch to generating REST end points from docker image
|
Switch to generating REST end points from docker image
|
Python
|
apache-2.0
|
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
|
python
|
## Code Before:
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import(
genRESTEndPointsForSlicerCLIsInSubDirs,
genRESTEndPointsForSlicerCLIsInDocker
)
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
genRESTEndPointsForSlicerCLIsInDocker(info,
'HistomicsTK',
'dsarchive/histomicstk')
## Instruction:
Switch to generating REST end points from docker image
## Code After:
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import(
genRESTEndPointsForSlicerCLIsInSubDirs,
genRESTEndPointsForSlicerCLIsInDocker
)
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
_ = genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
|
# ... existing code ...
# cliRootDir = os.path.dirname(__file__)
# genRESTEndPointsForSlicerCLIsInSubDirs(info, 'HistomicsTK', cliRootDir)
_ = genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
# ... rest of the code ...
|
e45fff968f37f558a49cf82b582d1f514a97b5af
|
tests/test_pool.py
|
tests/test_pool.py
|
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
|
import asyncio
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector, ConnectionPool
from aioes.transport import Endpoint
from aioes.connection import Connection
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestConnectionPool(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_pool(self):
conn = Connection(Endpoint('localhost', 9200), loop=self.loop)
pool = ConnectionPool([conn], loop=self.loop)
self.addCleanup(pool.close)
return pool
def test_ctor(self):
pool = self.make_pool()
self.assertAlmostEqual(60, pool.dead_timeout)
self.assertAlmostEqual(5, pool.timeout_cutoff)
|
Add more tests for pool
|
Add more tests for pool
|
Python
|
apache-2.0
|
aio-libs/aioes
|
python
|
## Code Before:
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
## Instruction:
Add more tests for pool
## Code After:
import asyncio
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector, ConnectionPool
from aioes.transport import Endpoint
from aioes.connection import Connection
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestConnectionPool(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_pool(self):
conn = Connection(Endpoint('localhost', 9200), loop=self.loop)
pool = ConnectionPool([conn], loop=self.loop)
self.addCleanup(pool.close)
return pool
def test_ctor(self):
pool = self.make_pool()
self.assertAlmostEqual(60, pool.dead_timeout)
self.assertAlmostEqual(5, pool.timeout_cutoff)
|
// ... existing code ...
import asyncio
import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector, ConnectionPool
from aioes.transport import Endpoint
from aioes.connection import Connection
class TestRandomSelector(unittest.TestCase):
// ... modified code ...
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestConnectionPool(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_pool(self):
conn = Connection(Endpoint('localhost', 9200), loop=self.loop)
pool = ConnectionPool([conn], loop=self.loop)
self.addCleanup(pool.close)
return pool
def test_ctor(self):
pool = self.make_pool()
self.assertAlmostEqual(60, pool.dead_timeout)
self.assertAlmostEqual(5, pool.timeout_cutoff)
// ... rest of the code ...
|
8c81f606499ebadddaf2a362bc8845eb69a21e8d
|
lds-gen.py
|
lds-gen.py
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
|
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
|
Stop exporting internal symbols from the shared libraries.
|
Stop exporting internal symbols from the shared libraries.
|
Python
|
bsd-2-clause
|
orthrus/librdkafka,klonikar/librdkafka,klonikar/librdkafka,senior7515/librdkafka,janmejay/librdkafka,senior7515/librdkafka,orthrus/librdkafka,klonikar/librdkafka,janmejay/librdkafka,orthrus/librdkafka,janmejay/librdkafka,senior7515/librdkafka,senior7515/librdkafka,klonikar/librdkafka,orthrus/librdkafka,janmejay/librdkafka
|
python
|
## Code Before:
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
## Instruction:
Stop exporting internal symbols from the shared libraries.
## Code After:
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
|
// ... existing code ...
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
// ... rest of the code ...
|
d66a958cd881ee4477e62ee36bc7febb48dcd70d
|
app/src/main/java/com/x1unix/avi/dashboard/FavoritesTabFragment.java
|
app/src/main/java/com/x1unix/avi/dashboard/FavoritesTabFragment.java
|
package com.x1unix.avi.dashboard;
import com.x1unix.avi.R;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.x1unix.avi.model.KPMovie;
import com.x1unix.avi.storage.MoviesRepository;
import java.util.ArrayList;
public class FavoritesTabFragment extends DashboardTabFragment {
@Override
protected ArrayList<KPMovie> getContentItems() {
return moviesRepository.getFavoritesMovies();
}
@Override
protected int getTabView() {
return R.layout.tab_favorites;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return super.onCreateView(inflater, container, savedInstanceState);
}
public static DashboardTabFragment getInstance(MoviesRepository m) {
return (new FavoritesTabFragment()).setMoviesRepository(m);
}
}
|
package com.x1unix.avi.dashboard;
import com.x1unix.avi.R;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.x1unix.avi.model.KPMovie;
import com.x1unix.avi.storage.MoviesRepository;
import java.util.ArrayList;
public class FavoritesTabFragment extends DashboardTabFragment {
@Override
protected ArrayList<KPMovie> getContentItems() {
return moviesRepository.getFavoritesMovies();
}
@Override
protected int getTabView() {
return R.layout.tab_favorites;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
protected String getPlaylistGenitivusName() {
return getResources().getString(R.string.playlist_genitivus_favorites);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return super.onCreateView(inflater, container, savedInstanceState);
}
public static DashboardTabFragment getInstance(MoviesRepository m) {
return (new FavoritesTabFragment()).setMoviesRepository(m);
}
}
|
Set name for favs tab
|
Set name for favs tab
|
Java
|
bsd-3-clause
|
odin3/Avi,odin3/Avi,odin3/Avi
|
java
|
## Code Before:
package com.x1unix.avi.dashboard;
import com.x1unix.avi.R;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.x1unix.avi.model.KPMovie;
import com.x1unix.avi.storage.MoviesRepository;
import java.util.ArrayList;
public class FavoritesTabFragment extends DashboardTabFragment {
@Override
protected ArrayList<KPMovie> getContentItems() {
return moviesRepository.getFavoritesMovies();
}
@Override
protected int getTabView() {
return R.layout.tab_favorites;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return super.onCreateView(inflater, container, savedInstanceState);
}
public static DashboardTabFragment getInstance(MoviesRepository m) {
return (new FavoritesTabFragment()).setMoviesRepository(m);
}
}
## Instruction:
Set name for favs tab
## Code After:
package com.x1unix.avi.dashboard;
import com.x1unix.avi.R;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.x1unix.avi.model.KPMovie;
import com.x1unix.avi.storage.MoviesRepository;
import java.util.ArrayList;
public class FavoritesTabFragment extends DashboardTabFragment {
@Override
protected ArrayList<KPMovie> getContentItems() {
return moviesRepository.getFavoritesMovies();
}
@Override
protected int getTabView() {
return R.layout.tab_favorites;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
protected String getPlaylistGenitivusName() {
return getResources().getString(R.string.playlist_genitivus_favorites);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return super.onCreateView(inflater, container, savedInstanceState);
}
public static DashboardTabFragment getInstance(MoviesRepository m) {
return (new FavoritesTabFragment()).setMoviesRepository(m);
}
}
|
// ... existing code ...
}
@Override
protected String getPlaylistGenitivusName() {
return getResources().getString(R.string.playlist_genitivus_favorites);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return super.onCreateView(inflater, container, savedInstanceState);
// ... rest of the code ...
|
3774f234f0d3eaf08bf3b6ed713a949346e69fea
|
setup.py
|
setup.py
|
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='[email protected]',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
|
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='[email protected]',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
Update license and add networkx dependency
|
Update license and add networkx dependency
|
Python
|
bsd-3-clause
|
mininet/mininet,mininet/mininet,mininet/mininet
|
python
|
## Code Before:
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='[email protected]',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
## Instruction:
Update license and add networkx dependency
## Code After:
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='[email protected]',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
// ... existing code ...
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
// ... rest of the code ...
|
4283aa4bc2c831dc99968929c24b11496078fd26
|
nightreads/emails/admin.py
|
nightreads/emails/admin.py
|
from django.contrib import admin
from .models import Email
class EmailAdmin(admin.ModelAdmin):
exclude = ('targetted_users', 'is_sent')
admin.site.register(Email, EmailAdmin)
|
from django.contrib import admin
from .models import Email
class EmailAdmin(admin.ModelAdmin):
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message', 'post'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
admin.site.register(Email, EmailAdmin)
|
Customize how fields on Email are displayed while adding & editing
|
Customize how fields on Email are displayed while adding & editing
- Hide fields `targetted_users`, `is_sent` while adding a new
Email object
- Display all fields but make `targetted_users`, `is_sent` fields
read only when editing an Email object
|
Python
|
mit
|
avinassh/nightreads,avinassh/nightreads
|
python
|
## Code Before:
from django.contrib import admin
from .models import Email
class EmailAdmin(admin.ModelAdmin):
exclude = ('targetted_users', 'is_sent')
admin.site.register(Email, EmailAdmin)
## Instruction:
Customize how fields on Email are displayed while adding & editing
- Hide fields `targetted_users`, `is_sent` while adding a new
Email object
- Display all fields but make `targetted_users`, `is_sent` fields
read only when editing an Email object
## Code After:
from django.contrib import admin
from .models import Email
class EmailAdmin(admin.ModelAdmin):
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message', 'post'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
admin.site.register(Email, EmailAdmin)
|
# ... existing code ...
class EmailAdmin(admin.ModelAdmin):
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message', 'post'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
admin.site.register(Email, EmailAdmin)
# ... rest of the code ...
|
57461a7ebd35544c506e6b5021ff11c3b6dd943e
|
normandy/studies/models.py
|
normandy/studies/models.py
|
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
class Meta:
ordering = ('-id',)
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
Add ordering to Extension model
|
Add ordering to Extension model
|
Python
|
mpl-2.0
|
mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy
|
python
|
## Code Before:
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
## Instruction:
Add ordering to Extension model
## Code After:
from django.db import models
from django.template.loader import render_to_string
from normandy.recipes.models import Recipe
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
class Meta:
ordering = ('-id',)
@property
def recipes_used_by(self):
"""Set of enabled recipes that are using this extension."""
return Recipe.objects.filter(
latest_revision__arguments_json__contains=self.xpi.url,
)
def recipes_used_by_html(self):
return render_to_string('admin/field_recipe_list.html', {
'recipes': self.recipes_used_by.order_by('latest_revision__name'),
})
recipes_used_by_html.short_description = 'Used in Recipes'
|
// ... existing code ...
class Extension(models.Model):
name = models.CharField(max_length=255)
xpi = models.FileField(upload_to='extensions')
class Meta:
ordering = ('-id',)
@property
def recipes_used_by(self):
// ... rest of the code ...
|
cdbb42aa9c62a05ff2f8897de513db987533187c
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
DESCR = ('This package provides a Deform autocomplete widget that '
'stores a value that may be different from the one shown '
'to the user.')
requires = ('deform',
)
setup(name='deform_ext_autocomplete',
version='0.1',
description=DESCR,
long_description=README + '\n\n' + CHANGES,
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2'),
author='Damien Baty',
author_email='[email protected]',
url='http://readthedocs.org/projects/deform_ext_autocomplete/',
keywords='deform form autocomplete',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
test_suite='deform_ext_autocomplete',
)
|
import os
from setuptools import find_packages
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
DESCR = ('This package provides a Deform autocomplete widget that '
'stores a value that may be different from the one shown '
'to the user.')
requires = ('deform',
)
setup(name='deform_ext_autocomplete',
version='0.1',
description=DESCR,
long_description=README + '\n\n' + CHANGES,
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2'),
author='Damien Baty',
author_email='[email protected]',
url='http://readthedocs.org/projects/deform_ext_autocomplete/',
keywords='deform form autocomplete',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
test_suite='deform_ext_autocomplete',
)
|
Mark as compatible with Python 3 with the proper classifier.
|
Mark as compatible with Python 3 with the proper classifier.
|
Python
|
bsd-3-clause
|
dbaty/deform_ext_autocomplete,dbaty/deform_ext_autocomplete
|
python
|
## Code Before:
import os
from setuptools import find_packages
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
DESCR = ('This package provides a Deform autocomplete widget that '
'stores a value that may be different from the one shown '
'to the user.')
requires = ('deform',
)
setup(name='deform_ext_autocomplete',
version='0.1',
description=DESCR,
long_description=README + '\n\n' + CHANGES,
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2'),
author='Damien Baty',
author_email='[email protected]',
url='http://readthedocs.org/projects/deform_ext_autocomplete/',
keywords='deform form autocomplete',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
test_suite='deform_ext_autocomplete',
)
## Instruction:
Mark as compatible with Python 3 with the proper classifier.
## Code After:
import os
from setuptools import find_packages
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
DESCR = ('This package provides a Deform autocomplete widget that '
'stores a value that may be different from the one shown '
'to the user.')
requires = ('deform',
)
setup(name='deform_ext_autocomplete',
version='0.1',
description=DESCR,
long_description=README + '\n\n' + CHANGES,
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2'),
author='Damien Baty',
author_email='[email protected]',
url='http://readthedocs.org/projects/deform_ext_autocomplete/',
keywords='deform form autocomplete',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
test_suite='deform_ext_autocomplete',
)
|
...
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2'),
author='Damien Baty',
author_email='[email protected]',
...
|
553b2e10575dca72a1a273ca50a885a0e0191603
|
elixir/main.c
|
elixir/main.c
|
// Regular C libs
#include <stdio.h>
// Elixir libs -- clang doesn't know where the hell this is
#include "erl_nif.h"
// Needs to figure out what ERL_NIF_TERM means
static ERL_NIF_TERM hello(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[]) {
// We need some variables
char *s;
int i, num;
// Grab the arguments from Elixir
enif_get_string(env, argv[0], s, 1024, ERL_NIF_LATIN1);
enif_get_int(env, argv[1], &num);
for (i = 0; i < num; i++) {
printf("Hello, %s!\n", s);
}
// Fancy version of return 0
return enif_make_int(env, 0);
}
static ErlNifFunc funcs[] = {
{"hello", 2, hello}
};
ERL_NIF_INIT(Elixir.Hello, funcs, NULL, NULL, NULL, NULL)
|
// Regular C libs
#include <stdio.h>
// Elixir libs
#include "erl_nif.h"
#define MAXLEN 1024
// Needs to figure out what ERL_NIF_TERM means
static ERL_NIF_TERM hello(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[]) {
// We need some variables
char buf[MAXLEN];
int i, num;
// Grab the arguments from Elixir
enif_get_string(env, argv[0], buf, MAXLEN, ERL_NIF_LATIN1);
enif_get_int(env, argv[1], &num);
for (i = 0; i < num; i++) {
printf("Hello, %s!\n", buf);
}
// Fancy version of return 0
return enif_make_int(env, 0);
}
// Map Elixir functions to C functions
static ErlNifFunc funcs[] = {
// Function name in Elixir, number of arguments, C function
{"hello", 2, hello}
};
ERL_NIF_INIT(Elixir.Hello, funcs, NULL, NULL, NULL, NULL)
|
Use buf as var name
|
Use buf as var name
|
C
|
unlicense
|
bentranter/binding,bentranter/binding,bentranter/binding
|
c
|
## Code Before:
// Regular C libs
#include <stdio.h>
// Elixir libs -- clang doesn't know where the hell this is
#include "erl_nif.h"
// Needs to figure out what ERL_NIF_TERM means
static ERL_NIF_TERM hello(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[]) {
// We need some variables
char *s;
int i, num;
// Grab the arguments from Elixir
enif_get_string(env, argv[0], s, 1024, ERL_NIF_LATIN1);
enif_get_int(env, argv[1], &num);
for (i = 0; i < num; i++) {
printf("Hello, %s!\n", s);
}
// Fancy version of return 0
return enif_make_int(env, 0);
}
static ErlNifFunc funcs[] = {
{"hello", 2, hello}
};
ERL_NIF_INIT(Elixir.Hello, funcs, NULL, NULL, NULL, NULL)
## Instruction:
Use buf as var name
## Code After:
// Regular C libs
#include <stdio.h>
// Elixir libs
#include "erl_nif.h"
#define MAXLEN 1024
// Needs to figure out what ERL_NIF_TERM means
static ERL_NIF_TERM hello(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[]) {
// We need some variables
char buf[MAXLEN];
int i, num;
// Grab the arguments from Elixir
enif_get_string(env, argv[0], buf, MAXLEN, ERL_NIF_LATIN1);
enif_get_int(env, argv[1], &num);
for (i = 0; i < num; i++) {
printf("Hello, %s!\n", buf);
}
// Fancy version of return 0
return enif_make_int(env, 0);
}
// Map Elixir functions to C functions
static ErlNifFunc funcs[] = {
// Function name in Elixir, number of arguments, C function
{"hello", 2, hello}
};
ERL_NIF_INIT(Elixir.Hello, funcs, NULL, NULL, NULL, NULL)
|
...
// Regular C libs
#include <stdio.h>
// Elixir libs
#include "erl_nif.h"
#define MAXLEN 1024
// Needs to figure out what ERL_NIF_TERM means
static ERL_NIF_TERM hello(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[]) {
// We need some variables
char buf[MAXLEN];
int i, num;
// Grab the arguments from Elixir
enif_get_string(env, argv[0], buf, MAXLEN, ERL_NIF_LATIN1);
enif_get_int(env, argv[1], &num);
for (i = 0; i < num; i++) {
printf("Hello, %s!\n", buf);
}
// Fancy version of return 0
...
return enif_make_int(env, 0);
}
// Map Elixir functions to C functions
static ErlNifFunc funcs[] = {
// Function name in Elixir, number of arguments, C function
{"hello", 2, hello}
};
...
|
634ae735db61ebb211b9e3159ca4dac7861e5553
|
cluster/update_jobs.py
|
cluster/update_jobs.py
|
from django.contrib.auth.models import User
from models import Job
from interface import get_all_jobs
def run_all():
for user in User.objects.all():
creds = user.credentials.all()
for i, cluster in enumerate(get_all_jobs(user)):
cred = creds[i]
jobs = {}
for job in cluster["jobs"]:
status = job[-1]
job_id = job[0]
if status in jobs:
jobs[status].append(job_id)
else:
jobs[status] = [job_id]
Job.update_states(cred, jobs)
if __name__ == "__main__":
run_all()
|
from django.contrib.auth.models import User
from models import Job
from interface import get_all_jobs
def run_all():
for user in User.objects.all():
creds = user.credentials.all()
for i, cluster in enumerate(get_all_jobs(user)):
cred = creds[i]
jobs = {}
jobids = []
for job in cluster["jobs"]:
status = job[-1]
job_id = job[0]
jobids.append(job_id)
if status in jobs:
jobs[status].append(job_id)
else:
jobs[status] = [job_id]
running = Job.get_running_jobs(credential=cred)
unknown = running.exclude(jobid__in=set(jobids)).values_list('jobid', flat=True)
if unknown:
jobs[Job.UNKNOWN] = list(unknown)
Job.update_states(cred, jobs)
if __name__ == "__main__":
run_all()
|
Add updating of jobs if their state is now unknown
|
Add updating of jobs if their state is now unknown
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
python
|
## Code Before:
from django.contrib.auth.models import User
from models import Job
from interface import get_all_jobs
def run_all():
for user in User.objects.all():
creds = user.credentials.all()
for i, cluster in enumerate(get_all_jobs(user)):
cred = creds[i]
jobs = {}
for job in cluster["jobs"]:
status = job[-1]
job_id = job[0]
if status in jobs:
jobs[status].append(job_id)
else:
jobs[status] = [job_id]
Job.update_states(cred, jobs)
if __name__ == "__main__":
run_all()
## Instruction:
Add updating of jobs if their state is now unknown
## Code After:
from django.contrib.auth.models import User
from models import Job
from interface import get_all_jobs
def run_all():
for user in User.objects.all():
creds = user.credentials.all()
for i, cluster in enumerate(get_all_jobs(user)):
cred = creds[i]
jobs = {}
jobids = []
for job in cluster["jobs"]:
status = job[-1]
job_id = job[0]
jobids.append(job_id)
if status in jobs:
jobs[status].append(job_id)
else:
jobs[status] = [job_id]
running = Job.get_running_jobs(credential=cred)
unknown = running.exclude(jobid__in=set(jobids)).values_list('jobid', flat=True)
if unknown:
jobs[Job.UNKNOWN] = list(unknown)
Job.update_states(cred, jobs)
if __name__ == "__main__":
run_all()
|
// ... existing code ...
for i, cluster in enumerate(get_all_jobs(user)):
cred = creds[i]
jobs = {}
jobids = []
for job in cluster["jobs"]:
status = job[-1]
job_id = job[0]
jobids.append(job_id)
if status in jobs:
jobs[status].append(job_id)
else:
jobs[status] = [job_id]
running = Job.get_running_jobs(credential=cred)
unknown = running.exclude(jobid__in=set(jobids)).values_list('jobid', flat=True)
if unknown:
jobs[Job.UNKNOWN] = list(unknown)
Job.update_states(cred, jobs)
// ... rest of the code ...
|
1421866ac3c4e4f1f09d17019d058aa903597df5
|
modules/menus_reader.py
|
modules/menus_reader.py
|
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
pass
#return get_menus()[index]
|
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
return get_menus()[index]
def is_week_menu_created(week):
return week in get_menus() # True/False
|
Add new feature: find out is current week menu created already
|
Add new feature: find out is current week menu created already
|
Python
|
mit
|
Jntz/RuokalistaCommandLine
|
python
|
## Code Before:
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
pass
#return get_menus()[index]
## Instruction:
Add new feature: find out is current week menu created already
## Code After:
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
return get_menus()[index]
def is_week_menu_created(week):
return week in get_menus() # True/False
|
# ... existing code ...
return data["menus"]
def get_menu(index): #get recipe with spesific index
return get_menus()[index]
def is_week_menu_created(week):
return week in get_menus() # True/False
# ... rest of the code ...
|
f5d00ed283da255b8cd2c82b36e19ab9504a7dd4
|
webmanager/management/commands/create_default_super_user.py
|
webmanager/management/commands/create_default_super_user.py
|
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "[email protected]")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()
|
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "[email protected]")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()
|
Check before creating default super user.
|
Check before creating default super user.
|
Python
|
bsd-3-clause
|
weijia/webmanager,weijia/webmanager,weijia/webmanager
|
python
|
## Code Before:
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
create_admin(super_username,
super_password, "[email protected]")
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()
## Instruction:
Check before creating default super user.
## Code After:
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
from web_manage_tools.user_creator import create_admin
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "[email protected]")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
create_default_admin()
|
...
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from djangoautoconf.local_key_manager import get_local_key, ConfigurableAttributeGetter
...
def create_default_admin():
super_username = get_local_key("admin_account.admin_username", "webmanager.keys_default")
super_password = get_local_key("admin_account.admin_password", "webmanager.keys_default")
if not User.objects.filter(username=super_username).exists:
create_admin(super_username, super_password, "[email protected]")
print "default admin already created"
else:
print "default admin created"
class Command(BaseCommand):
...
|
90eb1118c69a1b9e9785145c59a98d7c48613650
|
nlppln/commands/ls_chunk.py
|
nlppln/commands/ls_chunk.py
|
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
Make name an argument instead of an option
|
Make name an argument instead of an option
It is required to specify a chunk name.
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
python
|
## Code Before:
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.option('--name', '-n')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
## Instruction:
Make name an argument instead of an option
It is required to specify a chunk name.
## Code After:
import click
import os
import json
from nlppln.utils import cwl_file
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
files_out = [cwl_file(os.path.abspath(os.path.join(in_dir, f)))
for f in files]
stdout_text = click.get_text_stream('stdout')
stdout_text.write(json.dumps({'out_files': files_out}))
if __name__ == '__main__':
ls_chunk()
|
...
@click.command()
@click.argument('in_dir', type=click.Path(exists=True))
@click.argument('chunks', type=click.File(encoding='utf-8'))
@click.argument('name')
def ls_chunk(in_dir, chunks, name):
div = json.load(chunks)
files = div.get(name, [])
...
|
6f83b42ae9aaf9cd23bc8d15b66157a75bbc3aed
|
util/createCollector.py
|
util/createCollector.py
|
import os
import sys
import subprocesses
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
sigCacheDir = os.path.join(subprocesses.normExpUserPath("~"), "fuzzsigcache")
if not os.path.exists(sigCacheDir):
os.mkdir(sigCacheDir)
collector = Collector(tool=tool, sigCacheDir=sigCacheDir)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
import os
import sys
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
collector = Collector(tool=tool)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
Use the signature (cache) directory specified in .fuzzmanagerconf
|
Use the signature (cache) directory specified in .fuzzmanagerconf
|
Python
|
mpl-2.0
|
nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,nth10sd/funfuzz
|
python
|
## Code Before:
import os
import sys
import subprocesses
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
sigCacheDir = os.path.join(subprocesses.normExpUserPath("~"), "fuzzsigcache")
if not os.path.exists(sigCacheDir):
os.mkdir(sigCacheDir)
collector = Collector(tool=tool, sigCacheDir=sigCacheDir)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
## Instruction:
Use the signature (cache) directory specified in .fuzzmanagerconf
## Code After:
import os
import sys
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
collector = Collector(tool=tool)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
// ... existing code ...
import os
import sys
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
// ... modified code ...
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
collector = Collector(tool=tool)
return collector
// ... rest of the code ...
|
6888f7293c815a4cd4c313c1a9bf57cc95f7d043
|
Pod/Classes/MapperMacros/VOKManagedObjectMapperMacros.h
|
Pod/Classes/MapperMacros/VOKManagedObjectMapperMacros.h
|
//
// VOKManagedObjectMapperMacros.h
// Vokoder
//
// Copyright © 2016 Vokal.
//
#ifndef VOKManagedObjectMapperMacros_h
#define VOKManagedObjectMapperMacros_h
#import "VOKManagedObjectMap.h"
#import <VOKKeyPathHelper.h>
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol.
* @param klass The class on which the local selector symbol is defined.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalClassProperty
# define VOKMapForeignToLocalClassProperty(inputKeyPath, klass, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForInstanceOf(klass, coreDataSelectorSymbol)]
#endif
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol on the class of self.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalForSelf
# define VOKMapForeignToLocalForSelf(inputKeyPath, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForSelf(coreDataSelectorSymbol)]
#endif
#endif /* VOKManagedObjectMapperMacros_h */
|
//
// VOKManagedObjectMapperMacros.h
// Vokoder
//
// Copyright © 2016 Vokal.
//
#ifndef VOKManagedObjectMapperMacros_h
#define VOKManagedObjectMapperMacros_h
#import "VOKManagedObjectMap.h"
#import <VOKUtilities/VOKKeyPathHelper.h>
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol.
* @param klass The class on which the local selector symbol is defined.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalClassProperty
# define VOKMapForeignToLocalClassProperty(inputKeyPath, klass, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForInstanceOf(klass, coreDataSelectorSymbol)]
#endif
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol on the class of self.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalForSelf
# define VOKMapForeignToLocalForSelf(inputKeyPath, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForSelf(coreDataSelectorSymbol)]
#endif
#endif /* VOKManagedObjectMapperMacros_h */
|
Include VOKKeyPathHelper.h as a module header
|
Include VOKKeyPathHelper.h as a module header
|
C
|
mit
|
vokal/Vokoder,vokal/Vokoder,designatednerd/Vokoder,vokal/Vokoder,designatednerd/Vokoder,brockboland/Vokoder,brockboland/Vokoder,brockboland/Vokoder,designatednerd/Vokoder,vokal/Vokoder,designatednerd/Vokoder,brockboland/Vokoder,brockboland/Vokoder
|
c
|
## Code Before:
//
// VOKManagedObjectMapperMacros.h
// Vokoder
//
// Copyright © 2016 Vokal.
//
#ifndef VOKManagedObjectMapperMacros_h
#define VOKManagedObjectMapperMacros_h
#import "VOKManagedObjectMap.h"
#import <VOKKeyPathHelper.h>
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol.
* @param klass The class on which the local selector symbol is defined.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalClassProperty
# define VOKMapForeignToLocalClassProperty(inputKeyPath, klass, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForInstanceOf(klass, coreDataSelectorSymbol)]
#endif
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol on the class of self.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalForSelf
# define VOKMapForeignToLocalForSelf(inputKeyPath, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForSelf(coreDataSelectorSymbol)]
#endif
#endif /* VOKManagedObjectMapperMacros_h */
## Instruction:
Include VOKKeyPathHelper.h as a module header
## Code After:
//
// VOKManagedObjectMapperMacros.h
// Vokoder
//
// Copyright © 2016 Vokal.
//
#ifndef VOKManagedObjectMapperMacros_h
#define VOKManagedObjectMapperMacros_h
#import "VOKManagedObjectMap.h"
#import <VOKUtilities/VOKKeyPathHelper.h>
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol.
* @param klass The class on which the local selector symbol is defined.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalClassProperty
# define VOKMapForeignToLocalClassProperty(inputKeyPath, klass, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForInstanceOf(klass, coreDataSelectorSymbol)]
#endif
/**
* Creates a map with the default date mapper.
*
* @param inputKeyPath The foreign key to match with the local key.
* @param coreDataSelectorSymbol The local selector symbol on the class of self.
*
* @return A VOKManagedObjectMap
*/
#ifndef VOKMapForeignToLocalForSelf
# define VOKMapForeignToLocalForSelf(inputKeyPath, coreDataSelectorSymbol) \
[VOKManagedObjectMap mapWithForeignKeyPath:inputKeyPath coreDataKey:VOKKeyForSelf(coreDataSelectorSymbol)]
#endif
#endif /* VOKManagedObjectMapperMacros_h */
|
...
#import "VOKManagedObjectMap.h"
#import <VOKUtilities/VOKKeyPathHelper.h>
/**
* Creates a map with the default date mapper.
...
|
f374ac8bb3789ed533a2371eae78a9f98e1def60
|
tests/integrations/current/test_read.py
|
tests/integrations/current/test_read.py
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
|
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
Test file reading for current view
|
Test file reading for current view
|
Python
|
apache-2.0
|
PressLabs/gitfs,ksmaheshkumar/gitfs,bussiere/gitfs,rowhit/gitfs,PressLabs/gitfs
|
python
|
## Code Before:
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
## Instruction:
Test file reading for current view
## Code After:
import os
from tests.integrations.base import BaseTest
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
|
...
class TestReadCurrentView(BaseTest):
def test_listdirs(self):
assert os.listdir("%s/current" % self.mount_path) == ['testing', 'me']
def test_read_from_a_file(self):
with open("%s/current/testing" % self.mount_path) as f:
assert f.read() == "just testing around here\n"
...
|
d06d65cea4ae9efa547af43a551e24a459e0627e
|
tbmodels/_legacy_decode.py
|
tbmodels/_legacy_decode.py
|
from ._tb_model import Model
def _decode(hf):
if 'tb_model' in hf or 'hop' in hf:
return _decode_model(hf)
elif 'val' in hf:
return _decode_val(hf)
elif '0' in hf:
return _decode_iterable(hf)
else:
raise ValueError('File structure not understood.')
def _decode_iterable(hf):
return [_decode(hf[key]) for key in sorted(hf, key=int)]
def _decode_model(hf):
return Model.from_hdf5(hf)
def _decode_val(hf):
return hf['val'].value
|
from ._tb_model import Model
def _decode(hdf5_handle):
"""
Decode the object at the given HDF5 node.
"""
if 'tb_model' in hdf5_handle or 'hop' in hdf5_handle:
return _decode_model(hdf5_handle)
elif 'val' in hdf5_handle:
return _decode_val(hdf5_handle)
elif '0' in hdf5_handle:
return _decode_iterable(hdf5_handle)
else:
raise ValueError('File structure not understood.')
def _decode_iterable(hdf5_handle):
return [_decode(hdf5_handle[key]) for key in sorted(hdf5_handle, key=int)]
def _decode_model(hdf5_handle):
return Model.from_hdf5(hdf5_handle)
def _decode_val(hdf5_handle):
return hdf5_handle['val'].value
|
Fix pylint issues in legacy_decode.
|
Fix pylint issues in legacy_decode.
|
Python
|
apache-2.0
|
Z2PackDev/TBmodels,Z2PackDev/TBmodels
|
python
|
## Code Before:
from ._tb_model import Model
def _decode(hf):
if 'tb_model' in hf or 'hop' in hf:
return _decode_model(hf)
elif 'val' in hf:
return _decode_val(hf)
elif '0' in hf:
return _decode_iterable(hf)
else:
raise ValueError('File structure not understood.')
def _decode_iterable(hf):
return [_decode(hf[key]) for key in sorted(hf, key=int)]
def _decode_model(hf):
return Model.from_hdf5(hf)
def _decode_val(hf):
return hf['val'].value
## Instruction:
Fix pylint issues in legacy_decode.
## Code After:
from ._tb_model import Model
def _decode(hdf5_handle):
"""
Decode the object at the given HDF5 node.
"""
if 'tb_model' in hdf5_handle or 'hop' in hdf5_handle:
return _decode_model(hdf5_handle)
elif 'val' in hdf5_handle:
return _decode_val(hdf5_handle)
elif '0' in hdf5_handle:
return _decode_iterable(hdf5_handle)
else:
raise ValueError('File structure not understood.')
def _decode_iterable(hdf5_handle):
return [_decode(hdf5_handle[key]) for key in sorted(hdf5_handle, key=int)]
def _decode_model(hdf5_handle):
return Model.from_hdf5(hdf5_handle)
def _decode_val(hdf5_handle):
return hdf5_handle['val'].value
|
# ... existing code ...
from ._tb_model import Model
def _decode(hdf5_handle):
"""
Decode the object at the given HDF5 node.
"""
if 'tb_model' in hdf5_handle or 'hop' in hdf5_handle:
return _decode_model(hdf5_handle)
elif 'val' in hdf5_handle:
return _decode_val(hdf5_handle)
elif '0' in hdf5_handle:
return _decode_iterable(hdf5_handle)
else:
raise ValueError('File structure not understood.')
def _decode_iterable(hdf5_handle):
return [_decode(hdf5_handle[key]) for key in sorted(hdf5_handle, key=int)]
def _decode_model(hdf5_handle):
return Model.from_hdf5(hdf5_handle)
def _decode_val(hdf5_handle):
return hdf5_handle['val'].value
# ... rest of the code ...
|
b725eac62c72dd3674f35898ff6704c613e7272d
|
bears/julia/JuliaLintBear.py
|
bears/julia/JuliaLintBear.py
|
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class JuliaLintBear(LocalBear, Lint):
executable = 'julia'
arguments = '-e \'import Lint.lintfile; lintfile({filename})\''
output_regex = r'(^.*\.jl):(?P<line>\d+) (?P<severity>.)\d+ (?P<message>.*)'
use_stdout = True
severity_map = {
"E": RESULT_SEVERITY.MAJOR,
"W": RESULT_SEVERITY.NORMAL,
"I": RESULT_SEVERITY.INFO
}
def run(self, filename, file):
'''
Lints Julia code using ``Lint.jl``.
https://github.com/tonyhffong/Lint.jl
'''
return self.lint(filename, file)
|
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class JuliaLintBear(LocalBear, Lint):
executable = 'julia'
arguments = '-e \'import Lint.lintfile; lintfile({filename})\''
prerequisite_command = ['julia', '-e', 'import Lint.lintfile']
prerequisite_fail_msg = 'Run `Pkg.add("Lint")` from Julia to install Lint.'
output_regex = r'(^.*\.jl):(?P<line>\d+) (?P<severity>.)\d+ (?P<message>.*)'
use_stdout = True
severity_map = {
"E": RESULT_SEVERITY.MAJOR,
"W": RESULT_SEVERITY.NORMAL,
"I": RESULT_SEVERITY.INFO
}
def run(self, filename, file):
'''
Lints Julia code using ``Lint.jl``.
https://github.com/tonyhffong/Lint.jl
'''
return self.lint(filename, file)
|
Add Skip Condition for JuliaBear
|
bears/julia: Add Skip Condition for JuliaBear
Add prerequisite_command and prerequisite_fail_msg
to JuliaBear.
Fixes https://github.com/coala-analyzer/coala-bears/issues/222
|
Python
|
agpl-3.0
|
yash-nisar/coala-bears,naveentata/coala-bears,kaustubhhiware/coala-bears,mr-karan/coala-bears,shreyans800755/coala-bears,sims1253/coala-bears,coala-analyzer/coala-bears,coala/coala-bears,coala-analyzer/coala-bears,chriscoyfish/coala-bears,seblat/coala-bears,Vamshi99/coala-bears,seblat/coala-bears,sounak98/coala-bears,LWJensen/coala-bears,yashtrivedi96/coala-bears,LWJensen/coala-bears,shreyans800755/coala-bears,arjunsinghy96/coala-bears,vijeth-aradhya/coala-bears,mr-karan/coala-bears,incorrectusername/coala-bears,madhukar01/coala-bears,horczech/coala-bears,meetmangukiya/coala-bears,SanketDG/coala-bears,ku3o/coala-bears,srisankethu/coala-bears,horczech/coala-bears,Shade5/coala-bears,ankit01ojha/coala-bears,incorrectusername/coala-bears,sounak98/coala-bears,chriscoyfish/coala-bears,madhukar01/coala-bears,dosarudaniel/coala-bears,yash-nisar/coala-bears,Vamshi99/coala-bears,seblat/coala-bears,Vamshi99/coala-bears,naveentata/coala-bears,mr-karan/coala-bears,kaustubhhiware/coala-bears,meetmangukiya/coala-bears,refeed/coala-bears,horczech/coala-bears,naveentata/coala-bears,Shade5/coala-bears,yash-nisar/coala-bears,meetmangukiya/coala-bears,sals1275/coala-bears,coala/coala-bears,gs0510/coala-bears,vijeth-aradhya/coala-bears,madhukar01/coala-bears,chriscoyfish/coala-bears,Shade5/coala-bears,kaustubhhiware/coala-bears,sals1275/coala-bears,seblat/coala-bears,yash-nisar/coala-bears,sounak98/coala-bears,yashtrivedi96/coala-bears,aptrishu/coala-bears,Shade5/coala-bears,yash-nisar/coala-bears,srisankethu/coala-bears,ku3o/coala-bears,chriscoyfish/coala-bears,yashtrivedi96/coala-bears,sals1275/coala-bears,seblat/coala-bears,chriscoyfish/coala-bears,sounak98/coala-bears,dosarudaniel/coala-bears,kaustubhhiware/coala-bears,coala/coala-bears,srisankethu/coala-bears,horczech/coala-bears,shreyans800755/coala-bears,arjunsinghy96/coala-bears,coala/coala-bears,mr-karan/coala-bears,Vamshi99/coala-bears,dosarudaniel/coala-bears,coala/coala-bears,yashtrivedi96/coala-bears,SanketDG/coala-bears,coala-analyzer/coala-bears,meetmangukiya/coala-bears,arjunsinghy96/coala-bears,damngamerz/coala-bears,srisankethu/coala-bears,gs0510/coala-bears,Asnelchristian/coala-bears,damngamerz/coala-bears,madhukar01/coala-bears,horczech/coala-bears,srisankethu/coala-bears,mr-karan/coala-bears,Shade5/coala-bears,dosarudaniel/coala-bears,chriscoyfish/coala-bears,damngamerz/coala-bears,LWJensen/coala-bears,yashtrivedi96/coala-bears,coala/coala-bears,naveentata/coala-bears,Asnelchristian/coala-bears,SanketDG/coala-bears,madhukar01/coala-bears,Asnelchristian/coala-bears,damngamerz/coala-bears,shreyans800755/coala-bears,damngamerz/coala-bears,refeed/coala-bears,vijeth-aradhya/coala-bears,horczech/coala-bears,Vamshi99/coala-bears,sals1275/coala-bears,LWJensen/coala-bears,sims1253/coala-bears,sounak98/coala-bears,yashtrivedi96/coala-bears,arjunsinghy96/coala-bears,refeed/coala-bears,srisankethu/coala-bears,Asnelchristian/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,yash-nisar/coala-bears,ku3o/coala-bears,LWJensen/coala-bears,damngamerz/coala-bears,yash-nisar/coala-bears,coala/coala-bears,kaustubhhiware/coala-bears,Shade5/coala-bears,LWJensen/coala-bears,refeed/coala-bears,chriscoyfish/coala-bears,sims1253/coala-bears,meetmangukiya/coala-bears,ankit01ojha/coala-bears,mr-karan/coala-bears,incorrectusername/coala-bears,refeed/coala-bears,refeed/coala-bears,srisankethu/coala-bears,yashtrivedi96/coala-bears,ku3o/coala-bears,ku3o/coala-bears,mr-karan/coala-bears,seblat/coala-bears,SanketDG/coala-bears,ku3o/coala-bears,aptrishu/coala-bears,gs0510/coala-bears,sims1253/coala-bears,aptrishu/coala-bears,sounak98/coala-bears,dosarudaniel/coala-bears,SanketDG/coala-bears,coala-analyzer/coala-bears,arjunsinghy96/coala-bears,sals1275/coala-bears,coala-analyzer/coala-bears,naveentata/coala-bears,Shade5/coala-bears,arjunsinghy96/coala-bears,ankit01ojha/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,yash-nisar/coala-bears,coala/coala-bears,incorrectusername/coala-bears,aptrishu/coala-bears,sounak98/coala-bears,Shade5/coala-bears,ankit01ojha/coala-bears,kaustubhhiware/coala-bears,SanketDG/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,naveentata/coala-bears,SanketDG/coala-bears,damngamerz/coala-bears,ankit01ojha/coala-bears,horczech/coala-bears,sims1253/coala-bears,horczech/coala-bears,naveentata/coala-bears,Asnelchristian/coala-bears,Vamshi99/coala-bears,srisankethu/coala-bears,sims1253/coala-bears,sals1275/coala-bears,Vamshi99/coala-bears,dosarudaniel/coala-bears,yashtrivedi96/coala-bears,incorrectusername/coala-bears,dosarudaniel/coala-bears,coala-analyzer/coala-bears,gs0510/coala-bears,Vamshi99/coala-bears,refeed/coala-bears,kaustubhhiware/coala-bears,sals1275/coala-bears,LWJensen/coala-bears,sims1253/coala-bears,coala-analyzer/coala-bears,madhukar01/coala-bears,coala/coala-bears,coala/coala-bears,yash-nisar/coala-bears,vijeth-aradhya/coala-bears,srisankethu/coala-bears,aptrishu/coala-bears,srisankethu/coala-bears,coala-analyzer/coala-bears,coala/coala-bears,gs0510/coala-bears,aptrishu/coala-bears,Asnelchristian/coala-bears,gs0510/coala-bears,ankit01ojha/coala-bears,ankit01ojha/coala-bears,vijeth-aradhya/coala-bears,ankit01ojha/coala-bears,dosarudaniel/coala-bears,incorrectusername/coala-bears,refeed/coala-bears,shreyans800755/coala-bears,incorrectusername/coala-bears,ankit01ojha/coala-bears,damngamerz/coala-bears,coala-analyzer/coala-bears,shreyans800755/coala-bears,gs0510/coala-bears,kaustubhhiware/coala-bears,damngamerz/coala-bears,refeed/coala-bears,aptrishu/coala-bears,kaustubhhiware/coala-bears,meetmangukiya/coala-bears,gs0510/coala-bears,vijeth-aradhya/coala-bears,sals1275/coala-bears,LWJensen/coala-bears,sounak98/coala-bears,Vamshi99/coala-bears,SanketDG/coala-bears,vijeth-aradhya/coala-bears,damngamerz/coala-bears,yash-nisar/coala-bears,seblat/coala-bears,ku3o/coala-bears,refeed/coala-bears,dosarudaniel/coala-bears,madhukar01/coala-bears,SanketDG/coala-bears,arjunsinghy96/coala-bears,Vamshi99/coala-bears,ankit01ojha/coala-bears,incorrectusername/coala-bears,refeed/coala-bears,yashtrivedi96/coala-bears,srisankethu/coala-bears,damngamerz/coala-bears,meetmangukiya/coala-bears,horczech/coala-bears,madhukar01/coala-bears,aptrishu/coala-bears,horczech/coala-bears,gs0510/coala-bears,meetmangukiya/coala-bears,shreyans800755/coala-bears,chriscoyfish/coala-bears,aptrishu/coala-bears,LWJensen/coala-bears,ku3o/coala-bears,horczech/coala-bears,arjunsinghy96/coala-bears,incorrectusername/coala-bears,sims1253/coala-bears,vijeth-aradhya/coala-bears,coala/coala-bears,aptrishu/coala-bears,vijeth-aradhya/coala-bears,Vamshi99/coala-bears,meetmangukiya/coala-bears,naveentata/coala-bears,seblat/coala-bears,naveentata/coala-bears,ankit01ojha/coala-bears,arjunsinghy96/coala-bears,sounak98/coala-bears,yash-nisar/coala-bears,Shade5/coala-bears,aptrishu/coala-bears,ku3o/coala-bears,shreyans800755/coala-bears,madhukar01/coala-bears,mr-karan/coala-bears
|
python
|
## Code Before:
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class JuliaLintBear(LocalBear, Lint):
executable = 'julia'
arguments = '-e \'import Lint.lintfile; lintfile({filename})\''
output_regex = r'(^.*\.jl):(?P<line>\d+) (?P<severity>.)\d+ (?P<message>.*)'
use_stdout = True
severity_map = {
"E": RESULT_SEVERITY.MAJOR,
"W": RESULT_SEVERITY.NORMAL,
"I": RESULT_SEVERITY.INFO
}
def run(self, filename, file):
'''
Lints Julia code using ``Lint.jl``.
https://github.com/tonyhffong/Lint.jl
'''
return self.lint(filename, file)
## Instruction:
bears/julia: Add Skip Condition for JuliaBear
Add prerequisite_command and prerequisite_fail_msg
to JuliaBear.
Fixes https://github.com/coala-analyzer/coala-bears/issues/222
## Code After:
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class JuliaLintBear(LocalBear, Lint):
executable = 'julia'
arguments = '-e \'import Lint.lintfile; lintfile({filename})\''
prerequisite_command = ['julia', '-e', 'import Lint.lintfile']
prerequisite_fail_msg = 'Run `Pkg.add("Lint")` from Julia to install Lint.'
output_regex = r'(^.*\.jl):(?P<line>\d+) (?P<severity>.)\d+ (?P<message>.*)'
use_stdout = True
severity_map = {
"E": RESULT_SEVERITY.MAJOR,
"W": RESULT_SEVERITY.NORMAL,
"I": RESULT_SEVERITY.INFO
}
def run(self, filename, file):
'''
Lints Julia code using ``Lint.jl``.
https://github.com/tonyhffong/Lint.jl
'''
return self.lint(filename, file)
|
...
class JuliaLintBear(LocalBear, Lint):
executable = 'julia'
arguments = '-e \'import Lint.lintfile; lintfile({filename})\''
prerequisite_command = ['julia', '-e', 'import Lint.lintfile']
prerequisite_fail_msg = 'Run `Pkg.add("Lint")` from Julia to install Lint.'
output_regex = r'(^.*\.jl):(?P<line>\d+) (?P<severity>.)\d+ (?P<message>.*)'
use_stdout = True
severity_map = {
...
|
a50cca78f400077d56b328a20661c1a9d1e2aff4
|
app/tests/test_generate_profiles.py
|
app/tests/test_generate_profiles.py
|
import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
|
import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
|
Add a requirement for serving the assets in all tests
|
Add a requirement for serving the assets in all tests
|
Python
|
mit
|
nyodas/enjoliver,kirek007/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,JulienBalestra/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver
|
python
|
## Code Before:
import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
## Instruction:
Add a requirement for serving the assets in all tests
## Code After:
import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
|
# ... existing code ...
import os
import subprocess
from unittest import TestCase
import re
# ... modified code ...
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
# ... rest of the code ...
|
b3f7b677edb0a87abff2ef64dadb64547d757d6b
|
elasticsearch_django/migrations/0004_auto_20161129_1135.py
|
elasticsearch_django/migrations/0004_auto_20161129_1135.py
|
from django.db import migrations
from ..db.fields import JSONField
class Migration(migrations.Migration):
dependencies = [("elasticsearch_django", "0003_auto_20160926_2021")]
operations = [
migrations.AlterField(
model_name="searchquery",
name="hits",
field=JSONField(
help_text="The list of meta info for each of the query matches returned."
),
),
migrations.AlterField(
model_name="searchquery",
name="query",
field=JSONField(help_text="The raw ElasticSearch DSL query."),
),
]
|
from django.contrib.postgres.fields import JSONField
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("elasticsearch_django", "0003_auto_20160926_2021")]
operations = [
migrations.AlterField(
model_name="searchquery",
name="hits",
field=JSONField(
help_text="The list of meta info for each of the query matches returned."
),
),
migrations.AlterField(
model_name="searchquery",
name="query",
field=JSONField(help_text="The raw ElasticSearch DSL query."),
),
]
|
Update migration to use native JSONField
|
Update migration to use native JSONField
|
Python
|
mit
|
yunojuno/elasticsearch-django
|
python
|
## Code Before:
from django.db import migrations
from ..db.fields import JSONField
class Migration(migrations.Migration):
dependencies = [("elasticsearch_django", "0003_auto_20160926_2021")]
operations = [
migrations.AlterField(
model_name="searchquery",
name="hits",
field=JSONField(
help_text="The list of meta info for each of the query matches returned."
),
),
migrations.AlterField(
model_name="searchquery",
name="query",
field=JSONField(help_text="The raw ElasticSearch DSL query."),
),
]
## Instruction:
Update migration to use native JSONField
## Code After:
from django.contrib.postgres.fields import JSONField
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("elasticsearch_django", "0003_auto_20160926_2021")]
operations = [
migrations.AlterField(
model_name="searchquery",
name="hits",
field=JSONField(
help_text="The list of meta info for each of the query matches returned."
),
),
migrations.AlterField(
model_name="searchquery",
name="query",
field=JSONField(help_text="The raw ElasticSearch DSL query."),
),
]
|
// ... existing code ...
from django.contrib.postgres.fields import JSONField
from django.db import migrations
class Migration(migrations.Migration):
// ... rest of the code ...
|
0a6b43f2202cb63aad18c119d7d46916b4d54873
|
examples/site-api.py
|
examples/site-api.py
|
from flask import Flask, send_from_directory
from flask_sockets import Sockets
import json
app = Flask(__name__)
sockets = Sockets(app)
channels = {}
@sockets.route('/channel/<name>')
def channel_socket(ws, name):
if name in channels:
channels[name].append(ws)
else:
channels[name] = [ws]
while not ws.closed:
message = ws.receive()
print "Got msg:", message
if message is None:
continue
for other_ws in channels[name]:
if ws is not other_ws:
other_ws.send(message)
channels[name].remove(ws)
for other_ws in channels[name]:
other_ws.send(json.dumps({"type": "client_disconnected", "msg": {}}))
@app.route('/static/<path:path>')
def send_static(path):
return app.send_from_directory('static', path)
@app.route('/index.html')
def serve_site():
return app.send_static_file("index.html")
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
server = pywsgi.WSGIServer(('', 5000), app, handler_class=WebSocketHandler)
server.serve_forever()
|
from flask import Flask, send_from_directory
from flask_sockets import Sockets
import json
app = Flask(__name__)
sockets = Sockets(app)
channels = {}
@sockets.route('/channel/<name>')
def channel_socket(ws, name):
if name in channels:
channels[name].append(ws)
else:
channels[name] = [ws]
while not ws.closed:
message = ws.receive()
print "Got msg:", message
if message is None:
continue
for other_ws in channels[name]:
if ws is not other_ws:
other_ws.send(message)
channels[name].remove(ws)
for other_ws in channels[name]:
other_ws.send(json.dumps({"type": "client_disconnected", "msg": {}}))
@app.route('/static/<path:path>')
def send_static(path):
return app.send_from_directory('static', path)
@app.route('/')
def serve_site():
return app.send_static_file("index.html")
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
server = pywsgi.WSGIServer(('', 5000), app, handler_class=WebSocketHandler)
server.serve_forever()
|
Make it easier to load the initial page
|
Make it easier to load the initial page
|
Python
|
bsd-3-clause
|
chadnickbok/librtcdcpp,chadnickbok/librtcdcpp
|
python
|
## Code Before:
from flask import Flask, send_from_directory
from flask_sockets import Sockets
import json
app = Flask(__name__)
sockets = Sockets(app)
channels = {}
@sockets.route('/channel/<name>')
def channel_socket(ws, name):
if name in channels:
channels[name].append(ws)
else:
channels[name] = [ws]
while not ws.closed:
message = ws.receive()
print "Got msg:", message
if message is None:
continue
for other_ws in channels[name]:
if ws is not other_ws:
other_ws.send(message)
channels[name].remove(ws)
for other_ws in channels[name]:
other_ws.send(json.dumps({"type": "client_disconnected", "msg": {}}))
@app.route('/static/<path:path>')
def send_static(path):
return app.send_from_directory('static', path)
@app.route('/index.html')
def serve_site():
return app.send_static_file("index.html")
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
server = pywsgi.WSGIServer(('', 5000), app, handler_class=WebSocketHandler)
server.serve_forever()
## Instruction:
Make it easier to load the initial page
## Code After:
from flask import Flask, send_from_directory
from flask_sockets import Sockets
import json
app = Flask(__name__)
sockets = Sockets(app)
channels = {}
@sockets.route('/channel/<name>')
def channel_socket(ws, name):
if name in channels:
channels[name].append(ws)
else:
channels[name] = [ws]
while not ws.closed:
message = ws.receive()
print "Got msg:", message
if message is None:
continue
for other_ws in channels[name]:
if ws is not other_ws:
other_ws.send(message)
channels[name].remove(ws)
for other_ws in channels[name]:
other_ws.send(json.dumps({"type": "client_disconnected", "msg": {}}))
@app.route('/static/<path:path>')
def send_static(path):
return app.send_from_directory('static', path)
@app.route('/')
def serve_site():
return app.send_static_file("index.html")
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
server = pywsgi.WSGIServer(('', 5000), app, handler_class=WebSocketHandler)
server.serve_forever()
|
// ... existing code ...
return app.send_from_directory('static', path)
@app.route('/')
def serve_site():
return app.send_static_file("index.html")
// ... rest of the code ...
|
2cc759f4757dbd50b73c264d47fe08efa0d47f3a
|
app/src/main/java/com/veyndan/redditclient/ui/recyclerview/itemdecoration/TreeInsetItemDecoration.java
|
app/src/main/java/com/veyndan/redditclient/ui/recyclerview/itemdecoration/TreeInsetItemDecoration.java
|
package com.veyndan.redditclient.ui.recyclerview.itemdecoration;
import android.content.Context;
import android.graphics.Rect;
import android.support.annotation.DimenRes;
import android.support.annotation.NonNull;
import android.support.annotation.Px;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.veyndan.redditclient.post.DepthCalculatorDelegate;
public class TreeInsetItemDecoration extends RecyclerView.ItemDecoration {
@Px private final int childInsetMultiplier;
public TreeInsetItemDecoration(@NonNull final Context context,
@DimenRes final int childInsetMultiplierRes) {
childInsetMultiplier = context.getResources().getDimensionPixelOffset(childInsetMultiplierRes);
}
@Override
public void getItemOffsets(final Rect outRect, final View view, final RecyclerView parent,
final RecyclerView.State state) {
if (parent.getAdapter() instanceof DepthCalculatorDelegate) {
final DepthCalculatorDelegate depthCalculatorDelegate = (DepthCalculatorDelegate) parent.getAdapter();
final RecyclerView.LayoutParams layoutParams = (RecyclerView.LayoutParams) view.getLayoutParams();
final int position = layoutParams.getViewLayoutPosition();
final int inset = depthCalculatorDelegate.depthForPosition(position) * childInsetMultiplier;
outRect.set(inset, 0, 0, 0);
} else {
throw new IllegalStateException("RecyclerView's Adapter must implement " +
"DepthCalculatorDelegate in order for TreeInsetItemDecoration to be used as " +
"a decoration");
}
}
}
|
package com.veyndan.redditclient.ui.recyclerview.itemdecoration;
import android.content.Context;
import android.graphics.Rect;
import android.support.annotation.DimenRes;
import android.support.annotation.NonNull;
import android.support.annotation.Px;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.veyndan.redditclient.post.DepthCalculatorDelegate;
public class TreeInsetItemDecoration extends RecyclerView.ItemDecoration {
@Px private final int childInsetMultiplier;
public TreeInsetItemDecoration(@NonNull final Context context,
@DimenRes final int childInsetMultiplierRes) {
childInsetMultiplier = context.getResources().getDimensionPixelOffset(childInsetMultiplierRes);
}
@Override
public void getItemOffsets(final Rect outRect, final View view, final RecyclerView parent,
final RecyclerView.State state) {
if (parent.getAdapter() instanceof DepthCalculatorDelegate) {
final DepthCalculatorDelegate depthCalculatorDelegate = (DepthCalculatorDelegate) parent.getAdapter();
final int position = parent.getChildAdapterPosition(view);
final int inset = position == RecyclerView.NO_POSITION
? 0
: depthCalculatorDelegate.depthForPosition(position) * childInsetMultiplier;
outRect.set(inset, 0, 0, 0);
} else {
throw new IllegalStateException("RecyclerView's Adapter must implement " +
"DepthCalculatorDelegate in order for TreeInsetItemDecoration to be used as " +
"a decoration");
}
}
}
|
Use adapter position instead of layout params position as causing IndexOutOfBoundsException otherwise
|
Use adapter position instead of layout params position as causing IndexOutOfBoundsException otherwise
|
Java
|
mit
|
veyndan/reddit-client,veyndan/paper-for-reddit,veyndan/paper-for-reddit
|
java
|
## Code Before:
package com.veyndan.redditclient.ui.recyclerview.itemdecoration;
import android.content.Context;
import android.graphics.Rect;
import android.support.annotation.DimenRes;
import android.support.annotation.NonNull;
import android.support.annotation.Px;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.veyndan.redditclient.post.DepthCalculatorDelegate;
public class TreeInsetItemDecoration extends RecyclerView.ItemDecoration {
@Px private final int childInsetMultiplier;
public TreeInsetItemDecoration(@NonNull final Context context,
@DimenRes final int childInsetMultiplierRes) {
childInsetMultiplier = context.getResources().getDimensionPixelOffset(childInsetMultiplierRes);
}
@Override
public void getItemOffsets(final Rect outRect, final View view, final RecyclerView parent,
final RecyclerView.State state) {
if (parent.getAdapter() instanceof DepthCalculatorDelegate) {
final DepthCalculatorDelegate depthCalculatorDelegate = (DepthCalculatorDelegate) parent.getAdapter();
final RecyclerView.LayoutParams layoutParams = (RecyclerView.LayoutParams) view.getLayoutParams();
final int position = layoutParams.getViewLayoutPosition();
final int inset = depthCalculatorDelegate.depthForPosition(position) * childInsetMultiplier;
outRect.set(inset, 0, 0, 0);
} else {
throw new IllegalStateException("RecyclerView's Adapter must implement " +
"DepthCalculatorDelegate in order for TreeInsetItemDecoration to be used as " +
"a decoration");
}
}
}
## Instruction:
Use adapter position instead of layout params position as causing IndexOutOfBoundsException otherwise
## Code After:
package com.veyndan.redditclient.ui.recyclerview.itemdecoration;
import android.content.Context;
import android.graphics.Rect;
import android.support.annotation.DimenRes;
import android.support.annotation.NonNull;
import android.support.annotation.Px;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.veyndan.redditclient.post.DepthCalculatorDelegate;
public class TreeInsetItemDecoration extends RecyclerView.ItemDecoration {
@Px private final int childInsetMultiplier;
public TreeInsetItemDecoration(@NonNull final Context context,
@DimenRes final int childInsetMultiplierRes) {
childInsetMultiplier = context.getResources().getDimensionPixelOffset(childInsetMultiplierRes);
}
@Override
public void getItemOffsets(final Rect outRect, final View view, final RecyclerView parent,
final RecyclerView.State state) {
if (parent.getAdapter() instanceof DepthCalculatorDelegate) {
final DepthCalculatorDelegate depthCalculatorDelegate = (DepthCalculatorDelegate) parent.getAdapter();
final int position = parent.getChildAdapterPosition(view);
final int inset = position == RecyclerView.NO_POSITION
? 0
: depthCalculatorDelegate.depthForPosition(position) * childInsetMultiplier;
outRect.set(inset, 0, 0, 0);
} else {
throw new IllegalStateException("RecyclerView's Adapter must implement " +
"DepthCalculatorDelegate in order for TreeInsetItemDecoration to be used as " +
"a decoration");
}
}
}
|
// ... existing code ...
final RecyclerView.State state) {
if (parent.getAdapter() instanceof DepthCalculatorDelegate) {
final DepthCalculatorDelegate depthCalculatorDelegate = (DepthCalculatorDelegate) parent.getAdapter();
final int position = parent.getChildAdapterPosition(view);
final int inset = position == RecyclerView.NO_POSITION
? 0
: depthCalculatorDelegate.depthForPosition(position) * childInsetMultiplier;
outRect.set(inset, 0, 0, 0);
} else {
throw new IllegalStateException("RecyclerView's Adapter must implement " +
// ... rest of the code ...
|
88dc55b86d432b2fcc9e214acbd3f9064e4debdb
|
tests/test_datasets.py
|
tests/test_datasets.py
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
Add test for latest datasets
|
Add test for latest datasets
|
Python
|
mit
|
shamrt/LCBOAPI
|
python
|
## Code Before:
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
## Instruction:
Add test for latest datasets
## Code After:
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
// ... existing code ...
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
// ... rest of the code ...
|
b7dc11b7e630345db37adbb85d26f5ea43794a79
|
trianglify/src/main/java/com/sdsmdg/kd/trianglify/models/Triangle.java
|
trianglify/src/main/java/com/sdsmdg/kd/trianglify/models/Triangle.java
|
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
private boolean isClockwise (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x) < 0f;
}
public boolean contains (Point p) {
return isClockwise(p, a, b) == isClockwise(p, b, c) == isClockwise(p, c, a);
}
}
|
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
private boolean isClockwise (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x) < 0;
}
public boolean contains (Point p) {
return isClockwise(p, a, b) == isClockwise(p, b, c) == isClockwise(p, c, a);
}
public boolean isInCircumcircle (Point p) {
int d11 = a.x - p.x;
int d21 = b.x - p.x;
int d31 = c.x - p.x;
int d12 = a.y - p.y;
int d22 = b.y - p.y;
int d32 = c.y - p.y;
int d13 = d11 * d11 + d12 * d12;
int d23 = d21 * d21 + d22 * d22;
int d33 = d31 * d31 + d32 * d32;
int determinant = d11 * (d22 * d33 - d32 * d23)
- d12 * (d21 * d33 - d31 * d23)
+ d13 * (d21 * d32 - d31 * d22);
return isClockwise(a, b, c) ? determinant < 0 : determinant > 0;
}
}
|
Add a method to check if point is in circumcircle of triangle.
|
Add a method to check if point is in circumcircle of triangle.
|
Java
|
mit
|
sdsmdg/trianglify
|
java
|
## Code Before:
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
private boolean isClockwise (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x) < 0f;
}
public boolean contains (Point p) {
return isClockwise(p, a, b) == isClockwise(p, b, c) == isClockwise(p, c, a);
}
}
## Instruction:
Add a method to check if point is in circumcircle of triangle.
## Code After:
package com.sdsmdg.kd.trianglify.models;
import android.graphics.Point;
public class Triangle {
public Point a;
public Point b;
public Point c;
public Triangle (Point a, Point b, Point c) {
this.a = a;
this.b = b;
this.c = c;
}
private boolean isClockwise (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x) < 0;
}
public boolean contains (Point p) {
return isClockwise(p, a, b) == isClockwise(p, b, c) == isClockwise(p, c, a);
}
public boolean isInCircumcircle (Point p) {
int d11 = a.x - p.x;
int d21 = b.x - p.x;
int d31 = c.x - p.x;
int d12 = a.y - p.y;
int d22 = b.y - p.y;
int d32 = c.y - p.y;
int d13 = d11 * d11 + d12 * d12;
int d23 = d21 * d21 + d22 * d22;
int d33 = d31 * d31 + d32 * d32;
int determinant = d11 * (d22 * d33 - d32 * d23)
- d12 * (d21 * d33 - d31 * d23)
+ d13 * (d21 * d32 - d31 * d22);
return isClockwise(a, b, c) ? determinant < 0 : determinant > 0;
}
}
|
// ... existing code ...
}
private boolean isClockwise (Point p1, Point p2, Point p3) {
return (p1.x - p3.x) * (p2.y - p3.y) - (p1.y - p3.y) * (p2.x - p3.x) < 0;
}
public boolean contains (Point p) {
return isClockwise(p, a, b) == isClockwise(p, b, c) == isClockwise(p, c, a);
}
public boolean isInCircumcircle (Point p) {
int d11 = a.x - p.x;
int d21 = b.x - p.x;
int d31 = c.x - p.x;
int d12 = a.y - p.y;
int d22 = b.y - p.y;
int d32 = c.y - p.y;
int d13 = d11 * d11 + d12 * d12;
int d23 = d21 * d21 + d22 * d22;
int d33 = d31 * d31 + d32 * d32;
int determinant = d11 * (d22 * d33 - d32 * d23)
- d12 * (d21 * d33 - d31 * d23)
+ d13 * (d21 * d32 - d31 * d22);
return isClockwise(a, b, c) ? determinant < 0 : determinant > 0;
}
}
// ... rest of the code ...
|
0c44232208b87c84a381d397ab9e9261063d500d
|
Bindings/Java/tests/TestDataAdapter.java
|
Bindings/Java/tests/TestDataAdapter.java
|
import java.io.File;
import org.opensim.modeling.*;
class TestDataAdapter {
public static void test_C3DFileAdapter() {
C3DFileAdapter c3dAdapter = new C3DFileAdapter();
StdMapStringTimeSeriesTableVec3 tables =
c3dAdapter.read("../../../../OpenSim/Common/Test/walking5.c3d");
TimeSeriesTableVec3 markerTable = tables.get("markers");
assert markerTable.getNumRows() == 1103;
assert markerTable.getNumColumns() == 40;
TimeSeriesTable markerTableFlat = markerTable.flatten();
assert markerTableFlat.getNumRows() == 1103;
assert markerTableFlat.getNumColumns() == 40 * 3;
String fileName = new String("testDataAdapter.mot");
STOFileAdapter stoAdapter = new STOFileAdapter();
stoAdapter.write(markerTableFlat, fileName);
TimeSeriesTable markerTableDouble = stoAdapter.read(fileName);
assert markerTableDouble.getNumRows() == 1103;
assert markerTableDouble.getNumColumns() == 40 * 3;
File file = new File(fileName);
file.delete();
}
public static void main(String[] args) {
test_C3DFileAdapter();
}
};
|
import java.io.File;
import org.opensim.modeling.*;
class TestDataAdapter {
public static void test_C3DFileAdapter() {
C3DFileAdapter c3dAdapter = new C3DFileAdapter();
StdMapStringTimeSeriesTableVec3 tables =
c3dAdapter.read("../../../../OpenSim/Common/Test/walking5.c3d");
TimeSeriesTableVec3 markerTable = tables.get("markers");
assert markerTable.getNumRows() == 1103;
assert markerTable.getNumColumns() == 40;
TimeSeriesTable markerTableFlat = markerTable.flatten();
assert markerTableFlat.getNumRows() == 1103;
assert markerTableFlat.getNumColumns() == 40 * 3;
String markerFileName = new String("markers.mot");
STOFileAdapter stoAdapter = new STOFileAdapter();
stoAdapter.write(markerTableFlat, markerFileName);
TimeSeriesTable markerTableDouble = stoAdapter.read(markerFileName);
assert markerTableDouble.getNumRows() == 1103;
assert markerTableDouble.getNumColumns() == 40 * 3;
TimeSeriesTableVec3 forceTable = tables.get("forces");
assert forceTable.getNumRows() == 8824;
assert forceTable.getNumColumns() == 6;
TimeSeriesTable forceTableFlat = forceTable.flatten();
assert forceTableFlat.getNumRows() == 8824;
assert forceTableFlat.getNumColumns() == 6 * 3;
File file = new File(markerFileName);
file.delete();
file = new File(forceFileName);
file.delete();
}
public static void main(String[] args) {
test_C3DFileAdapter();
}
};
|
Add Java test to read/write forces table from C3D along with markers table.
|
Add Java test to read/write forces table from C3D along with markers table.
|
Java
|
apache-2.0
|
opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core
|
java
|
## Code Before:
import java.io.File;
import org.opensim.modeling.*;
class TestDataAdapter {
public static void test_C3DFileAdapter() {
C3DFileAdapter c3dAdapter = new C3DFileAdapter();
StdMapStringTimeSeriesTableVec3 tables =
c3dAdapter.read("../../../../OpenSim/Common/Test/walking5.c3d");
TimeSeriesTableVec3 markerTable = tables.get("markers");
assert markerTable.getNumRows() == 1103;
assert markerTable.getNumColumns() == 40;
TimeSeriesTable markerTableFlat = markerTable.flatten();
assert markerTableFlat.getNumRows() == 1103;
assert markerTableFlat.getNumColumns() == 40 * 3;
String fileName = new String("testDataAdapter.mot");
STOFileAdapter stoAdapter = new STOFileAdapter();
stoAdapter.write(markerTableFlat, fileName);
TimeSeriesTable markerTableDouble = stoAdapter.read(fileName);
assert markerTableDouble.getNumRows() == 1103;
assert markerTableDouble.getNumColumns() == 40 * 3;
File file = new File(fileName);
file.delete();
}
public static void main(String[] args) {
test_C3DFileAdapter();
}
};
## Instruction:
Add Java test to read/write forces table from C3D along with markers table.
## Code After:
import java.io.File;
import org.opensim.modeling.*;
class TestDataAdapter {
public static void test_C3DFileAdapter() {
C3DFileAdapter c3dAdapter = new C3DFileAdapter();
StdMapStringTimeSeriesTableVec3 tables =
c3dAdapter.read("../../../../OpenSim/Common/Test/walking5.c3d");
TimeSeriesTableVec3 markerTable = tables.get("markers");
assert markerTable.getNumRows() == 1103;
assert markerTable.getNumColumns() == 40;
TimeSeriesTable markerTableFlat = markerTable.flatten();
assert markerTableFlat.getNumRows() == 1103;
assert markerTableFlat.getNumColumns() == 40 * 3;
String markerFileName = new String("markers.mot");
STOFileAdapter stoAdapter = new STOFileAdapter();
stoAdapter.write(markerTableFlat, markerFileName);
TimeSeriesTable markerTableDouble = stoAdapter.read(markerFileName);
assert markerTableDouble.getNumRows() == 1103;
assert markerTableDouble.getNumColumns() == 40 * 3;
TimeSeriesTableVec3 forceTable = tables.get("forces");
assert forceTable.getNumRows() == 8824;
assert forceTable.getNumColumns() == 6;
TimeSeriesTable forceTableFlat = forceTable.flatten();
assert forceTableFlat.getNumRows() == 8824;
assert forceTableFlat.getNumColumns() == 6 * 3;
File file = new File(markerFileName);
file.delete();
file = new File(forceFileName);
file.delete();
}
public static void main(String[] args) {
test_C3DFileAdapter();
}
};
|
# ... existing code ...
C3DFileAdapter c3dAdapter = new C3DFileAdapter();
StdMapStringTimeSeriesTableVec3 tables =
c3dAdapter.read("../../../../OpenSim/Common/Test/walking5.c3d");
TimeSeriesTableVec3 markerTable = tables.get("markers");
assert markerTable.getNumRows() == 1103;
assert markerTable.getNumColumns() == 40;
# ... modified code ...
assert markerTableFlat.getNumRows() == 1103;
assert markerTableFlat.getNumColumns() == 40 * 3;
String markerFileName = new String("markers.mot");
STOFileAdapter stoAdapter = new STOFileAdapter();
stoAdapter.write(markerTableFlat, markerFileName);
TimeSeriesTable markerTableDouble = stoAdapter.read(markerFileName);
assert markerTableDouble.getNumRows() == 1103;
assert markerTableDouble.getNumColumns() == 40 * 3;
TimeSeriesTableVec3 forceTable = tables.get("forces");
assert forceTable.getNumRows() == 8824;
assert forceTable.getNumColumns() == 6;
TimeSeriesTable forceTableFlat = forceTable.flatten();
assert forceTableFlat.getNumRows() == 8824;
assert forceTableFlat.getNumColumns() == 6 * 3;
File file = new File(markerFileName);
file.delete();
file = new File(forceFileName);
file.delete();
}
# ... rest of the code ...
|
dd52536149c2c9fb23b9a940b7d8fe9c3b2b5624
|
src/main/java/shouty/Shouty.java
|
src/main/java/shouty/Shouty.java
|
package shouty;
import java.util.*;
public class Shouty {
private final int MESSAGE_RANGE = 1000;
private Map<String, Coordinate> locations = new HashMap<String, Coordinate>();
private Map<String, String> messages = new HashMap<String, String>();
public void setLocation(String person, Coordinate location) {
locations.put(person, location);
}
public void shout(String person, String message) {
messages.put(person, message);
}
public List<String> getMessagesHeardBy(String listener) {
List<String> messagesHeard = new ArrayList<String>();
for (Map.Entry<String, String> entry : messages.entrySet()) {
messagesHeard.add(entry.getValue());
}
return messagesHeard;
}
}
|
package shouty;
import java.util.*;
public class Shouty {
private final int MESSAGE_RANGE = 1000;
private Map<String, Coordinate> locations = new HashMap<String, Coordinate>();
private Map<String, String> messages = new HashMap<String, String>();
public void setLocation(String person, Coordinate location) {
locations.put(person, location);
}
public void shout(String person, String message) {
messages.put(person, message);
}
public Map<String, String> getMessagesHeardBy(String listener) {
HashMap<String, String> result = new HashMap<String, String>();
for (Map.Entry<String, String> entry: messages.entrySet()) {
result.put(entry.getKey(), entry.getValue());
}
return result;
}
}
|
Change the signature of getMessagesHeardBy
|
Change the signature of getMessagesHeardBy
This makes the exercise "Add a new scenario" easier, as for those steps
to work, you need to know the identity of the shouter.
|
Java
|
mit
|
cucumber-ltd/shouty.java
|
java
|
## Code Before:
package shouty;
import java.util.*;
public class Shouty {
private final int MESSAGE_RANGE = 1000;
private Map<String, Coordinate> locations = new HashMap<String, Coordinate>();
private Map<String, String> messages = new HashMap<String, String>();
public void setLocation(String person, Coordinate location) {
locations.put(person, location);
}
public void shout(String person, String message) {
messages.put(person, message);
}
public List<String> getMessagesHeardBy(String listener) {
List<String> messagesHeard = new ArrayList<String>();
for (Map.Entry<String, String> entry : messages.entrySet()) {
messagesHeard.add(entry.getValue());
}
return messagesHeard;
}
}
## Instruction:
Change the signature of getMessagesHeardBy
This makes the exercise "Add a new scenario" easier, as for those steps
to work, you need to know the identity of the shouter.
## Code After:
package shouty;
import java.util.*;
public class Shouty {
private final int MESSAGE_RANGE = 1000;
private Map<String, Coordinate> locations = new HashMap<String, Coordinate>();
private Map<String, String> messages = new HashMap<String, String>();
public void setLocation(String person, Coordinate location) {
locations.put(person, location);
}
public void shout(String person, String message) {
messages.put(person, message);
}
public Map<String, String> getMessagesHeardBy(String listener) {
HashMap<String, String> result = new HashMap<String, String>();
for (Map.Entry<String, String> entry: messages.entrySet()) {
result.put(entry.getKey(), entry.getValue());
}
return result;
}
}
|
// ... existing code ...
messages.put(person, message);
}
public Map<String, String> getMessagesHeardBy(String listener) {
HashMap<String, String> result = new HashMap<String, String>();
for (Map.Entry<String, String> entry: messages.entrySet()) {
result.put(entry.getKey(), entry.getValue());
}
return result;
}
}
// ... rest of the code ...
|
016d955319b6971fec42ac6ada1052f88d867cee
|
freepacktbook/__init__.py
|
freepacktbook/__init__.py
|
import os
from bs4 import BeautifulSoup
import requests
class FreePacktBook(object):
base_url = 'https://www.packtpub.com'
url = base_url + '/packt/offers/free-learning/'
def __init__(self, email=None, password=None):
self.session = requests.Session()
self.email = email
self.password = password
def claim_free_ebook(self):
response = self.session.post(self.url, {
'email': self.email,
'password': self.password,
'form_id': 'packt_user_login_form'})
parser = BeautifulSoup(response.text, 'html.parser')
claim_url = self.base_url + parser.find('div', {
'class': 'free-ebook'}).a['href']
response = self.session.get(claim_url)
assert response.status_code == 200
def claim_free_ebook():
client = FreePacktBook(
os.environ.get('PACKTPUB_EMAIL'), os.environ.get('PACKTPUB_PASSWORD'))
client.claim_free_ebook()
|
import os
from bs4 import BeautifulSoup
import requests
class FreePacktBook(object):
base_url = 'https://www.packtpub.com'
url = base_url + '/packt/offers/free-learning/'
def __init__(self, email=None, password=None):
self.session = requests.Session()
self.email = email
self.password = password
def claim_free_ebook(self):
response = self.session.post(self.url, {
'email': self.email,
'password': self.password,
'form_id': 'packt_user_login_form'})
parser = BeautifulSoup(response.text, 'html.parser')
claim_url = self.base_url + parser.find('div', {
'class': 'free-ebook'}).a['href']
response = self.session.get(claim_url)
assert response.status_code == 200
def get_book_details(self):
response = self.session.get(self.url)
parser = BeautifulSoup(response.text, 'html.parser')
summary = parser.find('div', {'class': 'dotd-main-book-summary'})
title = summary.find('div', {'class': 'dotd-title'}).getText().strip()
description = summary.find('div', {'class': None}).getText().strip()
main_book_image = parser.find('div', {'class': 'dotd-main-book-image'})
image_url = 'https:%s' % main_book_image.img['src']
url = self.base_url + main_book_image.a['href']
return {'title': title, 'description': description,
'url': url, 'image_url': image_url}
def claim_free_ebook():
client = FreePacktBook(
os.environ.get('PACKTPUB_EMAIL'), os.environ.get('PACKTPUB_PASSWORD'))
client.claim_free_ebook()
|
Add ability to get book details
|
Add ability to get book details
|
Python
|
mit
|
bogdal/freepacktbook
|
python
|
## Code Before:
import os
from bs4 import BeautifulSoup
import requests
class FreePacktBook(object):
base_url = 'https://www.packtpub.com'
url = base_url + '/packt/offers/free-learning/'
def __init__(self, email=None, password=None):
self.session = requests.Session()
self.email = email
self.password = password
def claim_free_ebook(self):
response = self.session.post(self.url, {
'email': self.email,
'password': self.password,
'form_id': 'packt_user_login_form'})
parser = BeautifulSoup(response.text, 'html.parser')
claim_url = self.base_url + parser.find('div', {
'class': 'free-ebook'}).a['href']
response = self.session.get(claim_url)
assert response.status_code == 200
def claim_free_ebook():
client = FreePacktBook(
os.environ.get('PACKTPUB_EMAIL'), os.environ.get('PACKTPUB_PASSWORD'))
client.claim_free_ebook()
## Instruction:
Add ability to get book details
## Code After:
import os
from bs4 import BeautifulSoup
import requests
class FreePacktBook(object):
base_url = 'https://www.packtpub.com'
url = base_url + '/packt/offers/free-learning/'
def __init__(self, email=None, password=None):
self.session = requests.Session()
self.email = email
self.password = password
def claim_free_ebook(self):
response = self.session.post(self.url, {
'email': self.email,
'password': self.password,
'form_id': 'packt_user_login_form'})
parser = BeautifulSoup(response.text, 'html.parser')
claim_url = self.base_url + parser.find('div', {
'class': 'free-ebook'}).a['href']
response = self.session.get(claim_url)
assert response.status_code == 200
def get_book_details(self):
response = self.session.get(self.url)
parser = BeautifulSoup(response.text, 'html.parser')
summary = parser.find('div', {'class': 'dotd-main-book-summary'})
title = summary.find('div', {'class': 'dotd-title'}).getText().strip()
description = summary.find('div', {'class': None}).getText().strip()
main_book_image = parser.find('div', {'class': 'dotd-main-book-image'})
image_url = 'https:%s' % main_book_image.img['src']
url = self.base_url + main_book_image.a['href']
return {'title': title, 'description': description,
'url': url, 'image_url': image_url}
def claim_free_ebook():
client = FreePacktBook(
os.environ.get('PACKTPUB_EMAIL'), os.environ.get('PACKTPUB_PASSWORD'))
client.claim_free_ebook()
|
// ... existing code ...
response = self.session.get(claim_url)
assert response.status_code == 200
def get_book_details(self):
response = self.session.get(self.url)
parser = BeautifulSoup(response.text, 'html.parser')
summary = parser.find('div', {'class': 'dotd-main-book-summary'})
title = summary.find('div', {'class': 'dotd-title'}).getText().strip()
description = summary.find('div', {'class': None}).getText().strip()
main_book_image = parser.find('div', {'class': 'dotd-main-book-image'})
image_url = 'https:%s' % main_book_image.img['src']
url = self.base_url + main_book_image.a['href']
return {'title': title, 'description': description,
'url': url, 'image_url': image_url}
def claim_free_ebook():
client = FreePacktBook(
// ... rest of the code ...
|
f3fcddaf7aa4e081322db6779ce2ad6d7e0db10a
|
pytac/device.py
|
pytac/device.py
|
class Device(object):
"""A representation of a property of an element associated with a field.
Typically a control system will be used to set and get values on a
device.
"""
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
raise NotImplementedError()
def set_value(self, value):
"""Set the value on the device.
Args:
value (float): the value to set.
"""
raise NotImplementedError()
def get_value(self):
"""Read the value from the device.
Returns:
float: the value of the PV.
"""
raise NotImplementedError()
|
class Device(object):
"""A representation of a property of an element associated with a field.
Typically a control system will be used to set and get values on a
device.
"""
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
raise NotImplementedError()
def set_value(self, value):
"""Set the value on the device.
Args:
value (float): the value to set.
"""
raise NotImplementedError()
def get_value(self):
"""Read the value from the device.
Returns:
float: the value of the PV.
"""
raise NotImplementedError()
class BasicDevice(Device):
"""A basic implementation of the device class. This device does not have a
pv associated with it, nor does it interact with a simulator. In short
this device acts as simple storage for data that rarely changes, as it is
not affected by changes to other aspects of the accelerator.
"""
def __init__(self, value, enabled=True):
"""Args:
value (?): can be a number, string or a list of strings or numbers.
enabled (bool-like): Whether the device is enabled. May be a
PvEnabler object.
"""
self.value = value
self._enabled = enabled
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
return bool(self._enabled)
def set_value(self, value):
"""Set the value on the device.
Args:
value (?): the value to set.
"""
self.value = value
def get_value(self):
"""Read the value from the device.
Returns:
?: the value of the PV.
"""
return self.value
|
Add a code for a BasicDevice class.
|
Add a code for a BasicDevice class.
|
Python
|
apache-2.0
|
willrogers/pytac,willrogers/pytac
|
python
|
## Code Before:
class Device(object):
"""A representation of a property of an element associated with a field.
Typically a control system will be used to set and get values on a
device.
"""
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
raise NotImplementedError()
def set_value(self, value):
"""Set the value on the device.
Args:
value (float): the value to set.
"""
raise NotImplementedError()
def get_value(self):
"""Read the value from the device.
Returns:
float: the value of the PV.
"""
raise NotImplementedError()
## Instruction:
Add a code for a BasicDevice class.
## Code After:
class Device(object):
"""A representation of a property of an element associated with a field.
Typically a control system will be used to set and get values on a
device.
"""
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
raise NotImplementedError()
def set_value(self, value):
"""Set the value on the device.
Args:
value (float): the value to set.
"""
raise NotImplementedError()
def get_value(self):
"""Read the value from the device.
Returns:
float: the value of the PV.
"""
raise NotImplementedError()
class BasicDevice(Device):
"""A basic implementation of the device class. This device does not have a
pv associated with it, nor does it interact with a simulator. In short
this device acts as simple storage for data that rarely changes, as it is
not affected by changes to other aspects of the accelerator.
"""
def __init__(self, value, enabled=True):
"""Args:
value (?): can be a number, string or a list of strings or numbers.
enabled (bool-like): Whether the device is enabled. May be a
PvEnabler object.
"""
self.value = value
self._enabled = enabled
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
return bool(self._enabled)
def set_value(self, value):
"""Set the value on the device.
Args:
value (?): the value to set.
"""
self.value = value
def get_value(self):
"""Read the value from the device.
Returns:
?: the value of the PV.
"""
return self.value
|
# ... existing code ...
Typically a control system will be used to set and get values on a
device.
"""
def is_enabled(self):
"""Whether the device is enabled.
# ... modified code ...
float: the value of the PV.
"""
raise NotImplementedError()
class BasicDevice(Device):
"""A basic implementation of the device class. This device does not have a
pv associated with it, nor does it interact with a simulator. In short
this device acts as simple storage for data that rarely changes, as it is
not affected by changes to other aspects of the accelerator.
"""
def __init__(self, value, enabled=True):
"""Args:
value (?): can be a number, string or a list of strings or numbers.
enabled (bool-like): Whether the device is enabled. May be a
PvEnabler object.
"""
self.value = value
self._enabled = enabled
def is_enabled(self):
"""Whether the device is enabled.
Returns:
bool: whether the device is enabled.
"""
return bool(self._enabled)
def set_value(self, value):
"""Set the value on the device.
Args:
value (?): the value to set.
"""
self.value = value
def get_value(self):
"""Read the value from the device.
Returns:
?: the value of the PV.
"""
return self.value
# ... rest of the code ...
|
f1254e6116b22923ab6f988c6cf5dca91623c678
|
setup.py
|
setup.py
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='[email protected]',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='[email protected]',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
Allow users to run Step Stool as either `step-stool` or `stepstool`.
|
Allow users to run Step Stool as either `step-stool` or `stepstool`.
|
Python
|
mit
|
chriskrycho/step-stool,chriskrycho/step-stool
|
python
|
## Code Before:
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='[email protected]',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
## Instruction:
Allow users to run Step Stool as either `step-stool` or `stepstool`.
## Code After:
from setuptools import setup
requires = ['Markdown', 'PyRSS2Gen', 'Pygments', 'PyYAML >= 3.10', 'typogrify']
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
classifiers = [
'Environment :: Console',
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop'
'License :: OSI Approved :: MIT License',
'Natural Language :: English'
'Operating System :: OS Independent'
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Topic :: Communications'
]
try:
import argparse
except ImportError:
requires.append('argparse')
README = open('README.md').read()
setup(
name='step-stool',
version='0.1',
url='http://step-stool.io',
description='A(nother) static site generator in Python',
author='Chris Krycho',
author_email='[email protected]',
packages=packages,
install_requires=requires,
entry_points=entry_points,
classifiers=classifiers
)
|
# ... existing code ...
packages = ['step_stool']
entry_points = {
'console_scripts': [
'stepstool = step_stool:main',
'step-stool = step_stool:main'
]
}
# ... rest of the code ...
|
67b2729c1c2a7027be7ad7a9d641609e94769671
|
quickstart/python/autopilot/create-hello-world-samples/create_hello_world_samples.6.x.py
|
quickstart/python/autopilot/create-hello-world-samples/create_hello_world_samples.6.x.py
|
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
phrases = [
'hello',
'hi',
'Hello',
'Hi there'
]
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
# Replace 'UDXXX...' with the SID for the task you just created.
for phrase in phrases:
sample = client.autopilot \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks('UDXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.samples \
.create(language='en-us', tagged_text=phrase)
print(sample.sid)
|
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
phrases = [
'hello',
'hi',
'Hello',
'Hi there'
]
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
for phrase in phrases:
sample = client.autopilot \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks('hello-world') \
.samples \
.create(language='en-us', tagged_text=phrase)
print(sample.sid)
|
Update to use unique_name for task update
|
Update to use unique_name for task update
|
Python
|
mit
|
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets
|
python
|
## Code Before:
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
phrases = [
'hello',
'hi',
'Hello',
'Hi there'
]
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
# Replace 'UDXXX...' with the SID for the task you just created.
for phrase in phrases:
sample = client.autopilot \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks('UDXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.samples \
.create(language='en-us', tagged_text=phrase)
print(sample.sid)
## Instruction:
Update to use unique_name for task update
## Code After:
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
phrases = [
'hello',
'hi',
'Hello',
'Hi there'
]
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
for phrase in phrases:
sample = client.autopilot \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks('hello-world') \
.samples \
.create(language='en-us', tagged_text=phrase)
print(sample.sid)
|
# ... existing code ...
]
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
for phrase in phrases:
sample = client.autopilot \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks('hello-world') \
.samples \
.create(language='en-us', tagged_text=phrase)
# ... rest of the code ...
|
3225c14ed1c3d09a68d6cde8af6d83d54a6f5f76
|
simple_history/__init__.py
|
simple_history/__init__.py
|
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
records_class = records_class or models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
if records_class is None:
records_class = models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
Change style of setting records_class default
|
Change style of setting records_class default
|
Python
|
bsd-3-clause
|
emergence/django-simple-history,luzfcb/django-simple-history,treyhunner/django-simple-history,pombredanne/django-simple-history,pombredanne/django-simple-history,treyhunner/django-simple-history,emergence/django-simple-history,luzfcb/django-simple-history
|
python
|
## Code Before:
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
records_class = records_class or models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
## Instruction:
Change style of setting records_class default
## Code After:
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
if records_class is None:
records_class = models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
# ... existing code ...
"""
from . import models
if model._meta.db_table not in models.registered_models:
if records_class is None:
records_class = models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
# ... rest of the code ...
|
19da6c14a5063d3d0361b9b887fd0e4ed8d7a83d
|
nflpool/data/seasoninfo.py
|
nflpool/data/seasoninfo.py
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
Update SeasonInfo database table info
|
Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the season
|
Python
|
mit
|
prcutler/nflpool,prcutler/nflpool
|
python
|
## Code Before:
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
## Instruction:
Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the season
## Code After:
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
# ... existing code ...
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
# ... rest of the code ...
|
74b9cd392009d75c2b9679e4135000b6d50c2b22
|
src/edu/oakland/OUSoft/Main.java
|
src/edu/oakland/OUSoft/Main.java
|
package edu.oakland.OUSoft;
import edu.oakland.OUSoft.database.OUPeople;
public class Main {
public static void main(String[] args) {
OUPeople people = new OUPeople();
}
}
|
package edu.oakland.OUSoft;
import edu.oakland.OUSoft.database.OUPeople;
public class Main {
public static void main(String[] args) {
OUPeople people = new OUPeople();
TextInterface ti = new TextInterface(people);
ti.startup();
}
}
|
Update main class to start text interface
|
Update main class to start text interface
|
Java
|
isc
|
Deliquescence/cse231
|
java
|
## Code Before:
package edu.oakland.OUSoft;
import edu.oakland.OUSoft.database.OUPeople;
public class Main {
public static void main(String[] args) {
OUPeople people = new OUPeople();
}
}
## Instruction:
Update main class to start text interface
## Code After:
package edu.oakland.OUSoft;
import edu.oakland.OUSoft.database.OUPeople;
public class Main {
public static void main(String[] args) {
OUPeople people = new OUPeople();
TextInterface ti = new TextInterface(people);
ti.startup();
}
}
|
# ... existing code ...
public static void main(String[] args) {
OUPeople people = new OUPeople();
TextInterface ti = new TextInterface(people);
ti.startup();
}
}
# ... rest of the code ...
|
ab505406a414bf76f1921e6ab8c998ae59339228
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='[email protected]',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='[email protected]',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
Add new author Bumping commit and mention additional contributor to Shavar
|
Add new author
Bumping commit and mention additional contributor to Shavar
|
Python
|
mpl-2.0
|
mozilla-services/shavar,mozilla-services/shavar
|
python
|
## Code Before:
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='[email protected]',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
## Instruction:
Add new author
Bumping commit and mention additional contributor to Shavar
## Code After:
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='[email protected]',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
# ... existing code ...
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='[email protected]',
url='',
keywords='web pyramid pylons',
# ... rest of the code ...
|
ea6c57de01f420bdd344194e5529a0e91036c634
|
greenfan/management/commands/create-job-from-testspec.py
|
greenfan/management/commands/create-job-from-testspec.py
|
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
|
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
|
Allow us to create both virtual and physical jobs
|
Allow us to create both virtual and physical jobs
|
Python
|
apache-2.0
|
sorenh/python-django-greenfan,sorenh/python-django-greenfan
|
python
|
## Code Before:
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
## Instruction:
Allow us to create both virtual and physical jobs
## Code After:
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
|
# ... existing code ...
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
# ... rest of the code ...
|
c7c44817240d437344a0a13ba13164cde36d9b15
|
src/main/java/com/mesc/service/config/ApplicationContext.java
|
src/main/java/com/mesc/service/config/ApplicationContext.java
|
package com.mesc.service.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
@Configuration
@EnableWebMvc
@ComponentScan(basePackages = { "com.mesc.service.controller" })
@Import({PersistenceContext.class})
public class ApplicationContext {
@Configuration
@PropertySource("classpath:application.properties")
static class ApplicationProperties {}
@Bean
PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
|
package com.mesc.service.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
@Configuration
@EnableWebMvc
@ComponentScan(basePackages = { "com.mesc.service.controller", "com.mesc.service.services" })
@Import({PersistenceContext.class})
public class ApplicationContext {
@Configuration
@PropertySource("classpath:application.properties")
static class ApplicationProperties {}
@Bean
PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
|
Change on component scan to support AutoWired
|
Change on component scan to support AutoWired
|
Java
|
apache-2.0
|
hm-yap/service-mesc
|
java
|
## Code Before:
package com.mesc.service.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
@Configuration
@EnableWebMvc
@ComponentScan(basePackages = { "com.mesc.service.controller" })
@Import({PersistenceContext.class})
public class ApplicationContext {
@Configuration
@PropertySource("classpath:application.properties")
static class ApplicationProperties {}
@Bean
PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
## Instruction:
Change on component scan to support AutoWired
## Code After:
package com.mesc.service.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
@Configuration
@EnableWebMvc
@ComponentScan(basePackages = { "com.mesc.service.controller", "com.mesc.service.services" })
@Import({PersistenceContext.class})
public class ApplicationContext {
@Configuration
@PropertySource("classpath:application.properties")
static class ApplicationProperties {}
@Bean
PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
|
// ... existing code ...
@Configuration
@EnableWebMvc
@ComponentScan(basePackages = { "com.mesc.service.controller", "com.mesc.service.services" })
@Import({PersistenceContext.class})
public class ApplicationContext {
@Configuration
// ... rest of the code ...
|
cc4a17db1e4ba81019ed312cbe324874430b9814
|
billybot/tests/test_billybot.py
|
billybot/tests/test_billybot.py
|
import time
import datetime
import unittest
from unittest.mock import patch, call
from billybot.billybot import MessageTriage
class TestMessageTriage(unittest.TestCase):
def setUp(self):
self.thread1 = MessageTriage('USERID1', 'user1', 'Warren', 'testchanl')
self.thread1.daemon = True
self.thread2 = MessageTriage('USERID2', 'user2', 'Markey', 'testchanl')
self.thread2.daemon = True
self.thread3 = MessageTriage('USERID3', 'user3', 'Capuano', 'testchanl')
self.thread3.daemon = True
def test_time_alive(self):
time.sleep(3)
time_alive = self.thread1.time_alive
# Checking that time alive is around 3 but it won't be 3
# exactly, so we check that it's between 2 and 4
self.assertTrue(time_alive > 2)
self.assertTrue(time_alive < 4)
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
print(self.thread1.start())
self.assertTrue(1 == 2)
|
import time
import datetime
import unittest
from unittest.mock import patch, call
from billybot.billybot import MessageTriage
class TestMessageTriage(unittest.TestCase):
def setUp(self):
self.thread1 = MessageTriage('USERID1', 'user1', 'Warren', 'testchanl')
self.thread1.daemon = True
self.thread2 = MessageTriage('USERID2', 'user2', 'Markey', 'testchanl')
self.thread2.daemon = True
self.thread3 = MessageTriage('USERID3', 'user3', 'Capuano', 'testchanl')
self.thread3.daemon = True
def test_time_alive(self):
time.sleep(3)
time_alive = self.thread1.time_alive
# Checking that time alive is around 3 but it won't be 3
# exactly, so we check that it's between 2 and 4
self.assertTrue(time_alive > 2)
self.assertTrue(time_alive < 4)
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
mock_run.time_delay = lambda delay: time.sleep(delay)
mock_run.time_delay(5)
self.thread1.start()
self.assertTrue(1 == 2)
|
Use a lambda to add a time delay function onto the mocked run method
|
Use a lambda to add a time delay function onto the mocked run method
|
Python
|
mit
|
mosegontar/billybot
|
python
|
## Code Before:
import time
import datetime
import unittest
from unittest.mock import patch, call
from billybot.billybot import MessageTriage
class TestMessageTriage(unittest.TestCase):
def setUp(self):
self.thread1 = MessageTriage('USERID1', 'user1', 'Warren', 'testchanl')
self.thread1.daemon = True
self.thread2 = MessageTriage('USERID2', 'user2', 'Markey', 'testchanl')
self.thread2.daemon = True
self.thread3 = MessageTriage('USERID3', 'user3', 'Capuano', 'testchanl')
self.thread3.daemon = True
def test_time_alive(self):
time.sleep(3)
time_alive = self.thread1.time_alive
# Checking that time alive is around 3 but it won't be 3
# exactly, so we check that it's between 2 and 4
self.assertTrue(time_alive > 2)
self.assertTrue(time_alive < 4)
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
print(self.thread1.start())
self.assertTrue(1 == 2)
## Instruction:
Use a lambda to add a time delay function onto the mocked run method
## Code After:
import time
import datetime
import unittest
from unittest.mock import patch, call
from billybot.billybot import MessageTriage
class TestMessageTriage(unittest.TestCase):
def setUp(self):
self.thread1 = MessageTriage('USERID1', 'user1', 'Warren', 'testchanl')
self.thread1.daemon = True
self.thread2 = MessageTriage('USERID2', 'user2', 'Markey', 'testchanl')
self.thread2.daemon = True
self.thread3 = MessageTriage('USERID3', 'user3', 'Capuano', 'testchanl')
self.thread3.daemon = True
def test_time_alive(self):
time.sleep(3)
time_alive = self.thread1.time_alive
# Checking that time alive is around 3 but it won't be 3
# exactly, so we check that it's between 2 and 4
self.assertTrue(time_alive > 2)
self.assertTrue(time_alive < 4)
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
mock_run.time_delay = lambda delay: time.sleep(delay)
mock_run.time_delay(5)
self.thread1.start()
self.assertTrue(1 == 2)
|
# ... existing code ...
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
mock_run.time_delay = lambda delay: time.sleep(delay)
mock_run.time_delay(5)
self.thread1.start()
self.assertTrue(1 == 2)
# ... rest of the code ...
|
037796d721cd0eec3ea779c2901ec8c62aaa5fc7
|
cmt/utils/run_dir.py
|
cmt/utils/run_dir.py
|
import os
class RunDir(object):
def __init__(self, dir, create=False):
self._run_dir = dir
self._create = create
def __enter__(self):
self._starting_dir = os.path.abspath(os.getcwd())
if self._create and not os.path.isdir(self._run_dir):
os.makedirs(self._run_dir)
os.chdir(self._run_dir)
def __exit__(self, type, value, traceback):
os.chdir(self._starting_dir)
def open_run_dir(dir, **kwds):
return RunDir(dir, **kwds)
|
import os
class RunDir(object):
def __init__(self, directory, create=False):
self._run_dir = directory
self._create = create
def __enter__(self):
self._starting_dir = os.path.abspath(os.getcwd())
if self._create and not os.path.isdir(self._run_dir):
os.makedirs(self._run_dir)
os.chdir(self._run_dir)
def __exit__(self, exception_type, value, traceback):
os.chdir(self._starting_dir)
def open_run_dir(directory, **kwds):
return RunDir(directory, **kwds)
|
Rename dir variable to directory.
|
Rename dir variable to directory.
|
Python
|
mit
|
csdms/coupling,csdms/coupling,csdms/pymt
|
python
|
## Code Before:
import os
class RunDir(object):
def __init__(self, dir, create=False):
self._run_dir = dir
self._create = create
def __enter__(self):
self._starting_dir = os.path.abspath(os.getcwd())
if self._create and not os.path.isdir(self._run_dir):
os.makedirs(self._run_dir)
os.chdir(self._run_dir)
def __exit__(self, type, value, traceback):
os.chdir(self._starting_dir)
def open_run_dir(dir, **kwds):
return RunDir(dir, **kwds)
## Instruction:
Rename dir variable to directory.
## Code After:
import os
class RunDir(object):
def __init__(self, directory, create=False):
self._run_dir = directory
self._create = create
def __enter__(self):
self._starting_dir = os.path.abspath(os.getcwd())
if self._create and not os.path.isdir(self._run_dir):
os.makedirs(self._run_dir)
os.chdir(self._run_dir)
def __exit__(self, exception_type, value, traceback):
os.chdir(self._starting_dir)
def open_run_dir(directory, **kwds):
return RunDir(directory, **kwds)
|
# ... existing code ...
import os
class RunDir(object):
def __init__(self, directory, create=False):
self._run_dir = directory
self._create = create
def __enter__(self):
# ... modified code ...
os.makedirs(self._run_dir)
os.chdir(self._run_dir)
def __exit__(self, exception_type, value, traceback):
os.chdir(self._starting_dir)
def open_run_dir(directory, **kwds):
return RunDir(directory, **kwds)
# ... rest of the code ...
|
a2e3f0590d5bd25993be5291c058c722896aa773
|
tests/test_utils.py
|
tests/test_utils.py
|
import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main()
|
import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main()
|
Fix broken utils test with seed
|
Fix broken utils test with seed
|
Python
|
apache-2.0
|
HazyResearch/metal,HazyResearch/metal
|
python
|
## Code Before:
import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix broken utils test with seed
## Code After:
import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
// ... rest of the code ...
|
7bb8da851d07f1628593eb9fdac905b28dff973b
|
appctx/src/main/java/splitties/init/DirectBootCtx.kt
|
appctx/src/main/java/splitties/init/DirectBootCtx.kt
|
/*
* Copyright (c) 2017. Louis Cognault Ayeva Derman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package splitties.init
import android.content.Context
import android.os.Build.VERSION.SDK_INT
import android.support.annotation.RequiresApi
/**
* Lazily creates a device protected storage Context on Android N+ devices,
* or initializes itself to [appCtx] if the device runs Android M or an older version.
* See [Direct Boot documentation](https://developer.android.com/training/articles/direct-boot.html)
* to learn more.
*/
inline val directBootCtx: Context get() = if (SDK_INT < 24) appCtx else deviceProtectedStorageCtx.value
@PublishedApi
@RequiresApi(24)
internal val deviceProtectedStorageCtx = lazy { appCtx.createDeviceProtectedStorageContext() }
|
/*
* Copyright (c) 2017. Louis Cognault Ayeva Derman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package splitties.init
import android.content.Context
import android.os.Build.VERSION.SDK_INT
import android.os.Build.VERSION_CODES.N
import android.support.annotation.RequiresApi
/**
* Lazily creates a device protected storage Context on Android N+ devices,
* or initializes itself to [appCtx] if the device runs Android M or an older version.
* See [Direct Boot documentation](https://developer.android.com/training/articles/direct-boot.html)
* to learn more.
*/
inline val directBootCtx: Context get() = if (SDK_INT < N) appCtx else deviceProtectedStorageCtx.value
@PublishedApi
@RequiresApi(N)
internal val deviceProtectedStorageCtx = lazy { appCtx.createDeviceProtectedStorageContext() }
|
Replace 24 with N, because Nougat tastes better than digits
|
Replace 24 with N, because Nougat tastes better than digits
|
Kotlin
|
apache-2.0
|
LouisCAD/Splitties,LouisCAD/Splitties,LouisCAD/Splitties
|
kotlin
|
## Code Before:
/*
* Copyright (c) 2017. Louis Cognault Ayeva Derman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package splitties.init
import android.content.Context
import android.os.Build.VERSION.SDK_INT
import android.support.annotation.RequiresApi
/**
* Lazily creates a device protected storage Context on Android N+ devices,
* or initializes itself to [appCtx] if the device runs Android M or an older version.
* See [Direct Boot documentation](https://developer.android.com/training/articles/direct-boot.html)
* to learn more.
*/
inline val directBootCtx: Context get() = if (SDK_INT < 24) appCtx else deviceProtectedStorageCtx.value
@PublishedApi
@RequiresApi(24)
internal val deviceProtectedStorageCtx = lazy { appCtx.createDeviceProtectedStorageContext() }
## Instruction:
Replace 24 with N, because Nougat tastes better than digits
## Code After:
/*
* Copyright (c) 2017. Louis Cognault Ayeva Derman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package splitties.init
import android.content.Context
import android.os.Build.VERSION.SDK_INT
import android.os.Build.VERSION_CODES.N
import android.support.annotation.RequiresApi
/**
* Lazily creates a device protected storage Context on Android N+ devices,
* or initializes itself to [appCtx] if the device runs Android M or an older version.
* See [Direct Boot documentation](https://developer.android.com/training/articles/direct-boot.html)
* to learn more.
*/
inline val directBootCtx: Context get() = if (SDK_INT < N) appCtx else deviceProtectedStorageCtx.value
@PublishedApi
@RequiresApi(N)
internal val deviceProtectedStorageCtx = lazy { appCtx.createDeviceProtectedStorageContext() }
|
// ... existing code ...
import android.content.Context
import android.os.Build.VERSION.SDK_INT
import android.os.Build.VERSION_CODES.N
import android.support.annotation.RequiresApi
/**
// ... modified code ...
* See [Direct Boot documentation](https://developer.android.com/training/articles/direct-boot.html)
* to learn more.
*/
inline val directBootCtx: Context get() = if (SDK_INT < N) appCtx else deviceProtectedStorageCtx.value
@PublishedApi
@RequiresApi(N)
internal val deviceProtectedStorageCtx = lazy { appCtx.createDeviceProtectedStorageContext() }
// ... rest of the code ...
|
c9da64ac1c90abdee8fc72488a4bef58a95aa7c6
|
biwako/bin/fields/compounds.py
|
biwako/bin/fields/compounds.py
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
return values
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
Fix List to use the new decoding system
|
Fix List to use the new decoding system
|
Python
|
bsd-3-clause
|
gulopine/steel
|
python
|
## Code Before:
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
return values
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
## Instruction:
Fix List to use the new decoding system
## Code After:
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
# ... existing code ...
value_bytes = b''
values = []
if self.instance:
instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
# ... rest of the code ...
|
a2b9777cc7ec4d606d3a33400c4f242bc9177fab
|
awx/main/migrations/0004_rbac_migrations.py
|
awx/main/migrations/0004_rbac_migrations.py
|
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
|
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
Add migrate_users and migrate_projects to our migration plan
|
Add migrate_users and migrate_projects to our migration plan
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx
|
python
|
## Code Before:
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
]
## Instruction:
Add migrate_users and migrate_projects to our migration plan
## Code After:
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_rbac_changes'),
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
|
...
]
operations = [
migrations.RunPython(rbac.migrate_users),
migrations.RunPython(rbac.migrate_organization),
migrations.RunPython(rbac.migrate_credential),
migrations.RunPython(rbac.migrate_team),
migrations.RunPython(rbac.migrate_inventory),
migrations.RunPython(rbac.migrate_projects),
]
...
|
208d421c4f32fd8fd7dfccedfb18f36a0444168f
|
hw/mcu/stm/stm32f4xx/include/mcu/cmsis_nvic.h
|
hw/mcu/stm/stm32f4xx/include/mcu/cmsis_nvic.h
|
/* mbed Microcontroller Library - cmsis_nvic
* Copyright (c) 2009-2011 ARM Limited. All rights reserved.
*
* CMSIS-style functionality to support dynamic vectors
*/
#ifndef MBED_CMSIS_NVIC_H
#define MBED_CMSIS_NVIC_H
#include <stdint.h>
#if defined(STM32F407xx)
#define MCU_NUM_PERIPH_VECTORS 82
#elif defined(STM32F401xE)
#define MCU_NUM_PERIPH_VECTORS 85
#elif defined(STM32F427xx) || defined(STM32F429xx)
#define MCU_NUM_PERIPH_VECTORS 91
#elif defined(STM32F413xx)
#define MCU_NUM_PERIPH_VECTORS 102
#else
#error "Number of peripheral vectors not defined for this MCU."
#endif
#define NVIC_USER_IRQ_OFFSET 16
#define NVIC_NUM_VECTORS (16 + MCU_NUM_PERIPH_VECTORS)
#include "stm32f4xx.h"
#ifdef __cplusplus
extern "C" {
#endif
void NVIC_Relocate(void);
void NVIC_SetVector(IRQn_Type IRQn, uint32_t vector);
uint32_t NVIC_GetVector(IRQn_Type IRQn);
#ifdef __cplusplus
}
#endif
#endif
|
/* mbed Microcontroller Library - cmsis_nvic
* Copyright (c) 2009-2011 ARM Limited. All rights reserved.
*
* CMSIS-style functionality to support dynamic vectors
*/
#ifndef MBED_CMSIS_NVIC_H
#define MBED_CMSIS_NVIC_H
#include <stdint.h>
#if defined(STM32F401xE) || defined(STM32F407xx)
#define MCU_NUM_PERIPH_VECTORS 82
#elif defined(STM32F427xx) || defined(STM32F429xx)
#define MCU_NUM_PERIPH_VECTORS 91
#elif defined(STM32F413xx)
#define MCU_NUM_PERIPH_VECTORS 102
#else
#error "Number of peripheral vectors not defined for this MCU."
#endif
#define NVIC_USER_IRQ_OFFSET 16
#define NVIC_NUM_VECTORS (16 + MCU_NUM_PERIPH_VECTORS)
#include "stm32f4xx.h"
#ifdef __cplusplus
extern "C" {
#endif
void NVIC_Relocate(void);
void NVIC_SetVector(IRQn_Type IRQn, uint32_t vector);
uint32_t NVIC_GetVector(IRQn_Type IRQn);
#ifdef __cplusplus
}
#endif
#endif
|
Fix number of peripheral vectors for stm32f401xE
|
Fix number of peripheral vectors for stm32f401xE
|
C
|
apache-2.0
|
mlaz/mynewt-core,andrzej-kaczmarek/apache-mynewt-core,andrzej-kaczmarek/apache-mynewt-core,andrzej-kaczmarek/incubator-mynewt-core,mlaz/mynewt-core,andrzej-kaczmarek/incubator-mynewt-core,andrzej-kaczmarek/incubator-mynewt-core,mlaz/mynewt-core,andrzej-kaczmarek/apache-mynewt-core,mlaz/mynewt-core,andrzej-kaczmarek/incubator-mynewt-core,andrzej-kaczmarek/apache-mynewt-core,andrzej-kaczmarek/incubator-mynewt-core,mlaz/mynewt-core
|
c
|
## Code Before:
/* mbed Microcontroller Library - cmsis_nvic
* Copyright (c) 2009-2011 ARM Limited. All rights reserved.
*
* CMSIS-style functionality to support dynamic vectors
*/
#ifndef MBED_CMSIS_NVIC_H
#define MBED_CMSIS_NVIC_H
#include <stdint.h>
#if defined(STM32F407xx)
#define MCU_NUM_PERIPH_VECTORS 82
#elif defined(STM32F401xE)
#define MCU_NUM_PERIPH_VECTORS 85
#elif defined(STM32F427xx) || defined(STM32F429xx)
#define MCU_NUM_PERIPH_VECTORS 91
#elif defined(STM32F413xx)
#define MCU_NUM_PERIPH_VECTORS 102
#else
#error "Number of peripheral vectors not defined for this MCU."
#endif
#define NVIC_USER_IRQ_OFFSET 16
#define NVIC_NUM_VECTORS (16 + MCU_NUM_PERIPH_VECTORS)
#include "stm32f4xx.h"
#ifdef __cplusplus
extern "C" {
#endif
void NVIC_Relocate(void);
void NVIC_SetVector(IRQn_Type IRQn, uint32_t vector);
uint32_t NVIC_GetVector(IRQn_Type IRQn);
#ifdef __cplusplus
}
#endif
#endif
## Instruction:
Fix number of peripheral vectors for stm32f401xE
## Code After:
/* mbed Microcontroller Library - cmsis_nvic
* Copyright (c) 2009-2011 ARM Limited. All rights reserved.
*
* CMSIS-style functionality to support dynamic vectors
*/
#ifndef MBED_CMSIS_NVIC_H
#define MBED_CMSIS_NVIC_H
#include <stdint.h>
#if defined(STM32F401xE) || defined(STM32F407xx)
#define MCU_NUM_PERIPH_VECTORS 82
#elif defined(STM32F427xx) || defined(STM32F429xx)
#define MCU_NUM_PERIPH_VECTORS 91
#elif defined(STM32F413xx)
#define MCU_NUM_PERIPH_VECTORS 102
#else
#error "Number of peripheral vectors not defined for this MCU."
#endif
#define NVIC_USER_IRQ_OFFSET 16
#define NVIC_NUM_VECTORS (16 + MCU_NUM_PERIPH_VECTORS)
#include "stm32f4xx.h"
#ifdef __cplusplus
extern "C" {
#endif
void NVIC_Relocate(void);
void NVIC_SetVector(IRQn_Type IRQn, uint32_t vector);
uint32_t NVIC_GetVector(IRQn_Type IRQn);
#ifdef __cplusplus
}
#endif
#endif
|
// ... existing code ...
#include <stdint.h>
#if defined(STM32F401xE) || defined(STM32F407xx)
#define MCU_NUM_PERIPH_VECTORS 82
#elif defined(STM32F427xx) || defined(STM32F429xx)
#define MCU_NUM_PERIPH_VECTORS 91
#elif defined(STM32F413xx)
// ... rest of the code ...
|
bbb4496a99a5c65218b12c56de01c12ab83a1056
|
demo/recent_questions.py
|
demo/recent_questions.py
|
from __future__ import print_function
# Same directory hack
import sys
sys.path.append('.')
sys.path.append('..')
try:
get_input = raw_input
except NameError:
get_input = input
user_api_key = get_input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
sys.stdout.write('Loading...')
sys.stdout.flush()
questions = so.recent_questions(pagesize=10, filter='_b')
print('\r # vote ans view')
cur = 1
for question in questions:
print('%2d %3d %3d %3d \t%s' % (cur, question.score, len(question.answers), question.view_count, question.title))
cur += 1
num = int(get_input('Question no.: '))
qu = questions[num - 1]
print('--- %s' % qu.title)
print('%d votes, %d answers, %d views.' % (qu.score, len(qu.answers), qu.view_count))
print('Tagged: ' + ', '.join(qu.tags))
print()
print(qu.body[:250] + ('...' if len(qu.body) > 250 else ''))
|
from __future__ import print_function
from six.moves import input
# Same directory hack
import sys
sys.path.append('.')
sys.path.append('..')
user_api_key = input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
sys.stdout.write('Loading...')
sys.stdout.flush()
questions = so.recent_questions(pagesize=10, filter='_b')
print('\r # vote ans view')
cur = 1
for question in questions:
print('%2d %3d %3d %3d \t%s' % (cur, question.score, len(question.answers), question.view_count, question.title))
cur += 1
num = int(get_input('Question no.: '))
qu = questions[num - 1]
print('--- %s' % qu.title)
print('%d votes, %d answers, %d views.' % (qu.score, len(qu.answers), qu.view_count))
print('Tagged: ' + ', '.join(qu.tags))
print()
print(qu.body[:250] + ('...' if len(qu.body) > 250 else ''))
|
Use six function for input() in recent questions demo
|
Use six function for input() in recent questions demo
|
Python
|
bsd-3-clause
|
Khilo84/Py-StackExchange,lucjon/Py-StackExchange,damanjitsingh/StackExchange-python-
|
python
|
## Code Before:
from __future__ import print_function
# Same directory hack
import sys
sys.path.append('.')
sys.path.append('..')
try:
get_input = raw_input
except NameError:
get_input = input
user_api_key = get_input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
sys.stdout.write('Loading...')
sys.stdout.flush()
questions = so.recent_questions(pagesize=10, filter='_b')
print('\r # vote ans view')
cur = 1
for question in questions:
print('%2d %3d %3d %3d \t%s' % (cur, question.score, len(question.answers), question.view_count, question.title))
cur += 1
num = int(get_input('Question no.: '))
qu = questions[num - 1]
print('--- %s' % qu.title)
print('%d votes, %d answers, %d views.' % (qu.score, len(qu.answers), qu.view_count))
print('Tagged: ' + ', '.join(qu.tags))
print()
print(qu.body[:250] + ('...' if len(qu.body) > 250 else ''))
## Instruction:
Use six function for input() in recent questions demo
## Code After:
from __future__ import print_function
from six.moves import input
# Same directory hack
import sys
sys.path.append('.')
sys.path.append('..')
user_api_key = input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
sys.stdout.write('Loading...')
sys.stdout.flush()
questions = so.recent_questions(pagesize=10, filter='_b')
print('\r # vote ans view')
cur = 1
for question in questions:
print('%2d %3d %3d %3d \t%s' % (cur, question.score, len(question.answers), question.view_count, question.title))
cur += 1
num = int(get_input('Question no.: '))
qu = questions[num - 1]
print('--- %s' % qu.title)
print('%d votes, %d answers, %d views.' % (qu.score, len(qu.answers), qu.view_count))
print('Tagged: ' + ', '.join(qu.tags))
print()
print(qu.body[:250] + ('...' if len(qu.body) > 250 else ''))
|
...
from __future__ import print_function
from six.moves import input
# Same directory hack
import sys
...
sys.path.append('.')
sys.path.append('..')
user_api_key = input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
...
|
d411e2058053c5607b2bb49002cab0e110683624
|
setup.py
|
setup.py
|
import os
from github_backup import __version__
try:
from setuptools import setup
setup # workaround for pyflakes issue #13
except ImportError:
from distutils.core import setup
# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
# exit of python setup.py test # in multiprocessing/util.py _exit_function when
# running python setup.py test (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
multiprocessing
except ImportError:
pass
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='[email protected]',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='[email protected]',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
Remove python 2 specific import logic
|
Remove python 2 specific import logic
|
Python
|
mit
|
josegonzalez/python-github-backup,josegonzalez/python-github-backup
|
python
|
## Code Before:
import os
from github_backup import __version__
try:
from setuptools import setup
setup # workaround for pyflakes issue #13
except ImportError:
from distutils.core import setup
# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
# exit of python setup.py test # in multiprocessing/util.py _exit_function when
# running python setup.py test (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
multiprocessing
except ImportError:
pass
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='[email protected]',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
## Instruction:
Remove python 2 specific import logic
## Code After:
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='[email protected]',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
# ... existing code ...
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
# ... rest of the code ...
|
3313382ec12352d9c7f2458cd293ed9f901aa38f
|
webkit/glue/form_data.h
|
webkit/glue/form_data.h
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WEBKIT_GLUE_FORM_DATA_H__
#define WEBKIT_GLUE_FORM_DATA_H__
#include <vector>
#include "base/string_util.h"
#include "googleurl/src/gurl.h"
#include "webkit/glue/form_field.h"
namespace webkit_glue {
// Holds information about a form to be filled and/or submitted.
struct FormData {
// The name of the form.
string16 name;
// GET or POST.
string16 method;
// The URL (minus query parameters) containing the form.
GURL origin;
// The action target of the form.
GURL action;
// true if this form was submitted by a user gesture and not javascript.
bool user_submitted;
// A vector of all the input fields in the form.
std::vector<FormField> fields;
// Used by FormStructureTest.
inline bool operator==(const FormData& form) const {
return (name == form.name &&
StringToLowerASCII(method) == StringToLowerASCII(form.method) &&
origin == form.origin &&
action == form.action &&
user_submitted == form.user_submitted &&
fields == form.fields);
}
};
} // namespace webkit_glue
#endif // WEBKIT_GLUE_FORM_DATA_H__
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WEBKIT_GLUE_FORM_DATA_H__
#define WEBKIT_GLUE_FORM_DATA_H__
#include <vector>
#include "base/string_util.h"
#include "googleurl/src/gurl.h"
#include "webkit/glue/form_field.h"
namespace webkit_glue {
// Holds information about a form to be filled and/or submitted.
struct FormData {
// The name of the form.
string16 name;
// GET or POST.
string16 method;
// The URL (minus query parameters) containing the form.
GURL origin;
// The action target of the form.
GURL action;
// true if this form was submitted by a user gesture and not javascript.
bool user_submitted;
// A vector of all the input fields in the form.
std::vector<FormField> fields;
FormData() : user_submitted(false) {}
// Used by FormStructureTest.
inline bool operator==(const FormData& form) const {
return (name == form.name &&
StringToLowerASCII(method) == StringToLowerASCII(form.method) &&
origin == form.origin &&
action == form.action &&
user_submitted == form.user_submitted &&
fields == form.fields);
}
};
} // namespace webkit_glue
#endif // WEBKIT_GLUE_FORM_DATA_H__
|
Add a default constructor for FormData. There are too many places that create FormDatas, and we shouldn't need to initialize user_submitted for each call site.
|
AutoFill: Add a default constructor for FormData. There are too many places
that create FormDatas, and we shouldn't need to initialize user_submitted for
each call site.
BUG=50423
TEST=none
Review URL: http://codereview.chromium.org/3074023
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@54641 0039d316-1c4b-4281-b951-d872f2087c98
|
C
|
bsd-3-clause
|
rogerwang/chromium,anirudhSK/chromium,anirudhSK/chromium,rogerwang/chromium,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,M4sse/chromium.src,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,rogerwang/chromium,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,rogerwang/chromium,timopulkkinen/BubbleFish,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,anirudhSK/chromium,ltilve/chromium,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,nacl-webkit/chrome_deps,hujiajie/pa-chromium,patrickm/chromium.src,Just-D/chromium-1,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,Jonekee/chromium.src,robclark/chromium,chuan9/chromium-crosswalk,robclark/chromium,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,anirudhSK/chromium,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,markYoungH/chromium.src,dednal/chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,ltilve/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,dushu1203/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,keishi/chromium,ChromiumWebApps/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,Just-D/chromium-1,jaruba/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,rogerwang/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,keishi/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,keishi/chromium,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,keishi/chromium,junmin-zhu/chromium-rivertrail,dushu1203/chromium.src,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,Jonekee/chromium.src,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,robclark/chromium,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,robclark/chromium,nacl-webkit/chrome_deps,littlstar/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,keishi/chromium,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,ltilve/chromium,M4sse/chromium.src,keishi/chromium,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,dednal/chromium.src,ltilve/chromium,hujiajie/pa-chromium,hgl888/chromium-crosswalk,patrickm/chromium.src,dednal/chromium.src,jaruba/chromium.src,zcbenz/cefode-chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,ltilve/chromium,dushu1203/chromium.src,keishi/chromium,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Just-D/chromium-1,nacl-webkit/chrome_deps,jaruba/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,Chilledheart/chromium,M4sse/chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,hgl888/chromium-crosswalk-efl,keishi/chromium,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,ltilve/chromium,jaruba/chromium.src,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,robclark/chromium,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,anirudhSK/chromium,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,ltilve/chromium,hujiajie/pa-chromium,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,rogerwang/chromium,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,jaruba/chromium.src,littlstar/chromium.src,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,anirudhSK/chromium,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,robclark/chromium,Just-D/chromium-1,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,ondra-novak/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,dednal/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,patrickm/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,rogerwang/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,keishi/chromium,fujunwei/chromium-crosswalk,robclark/chromium,patrickm/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,hujiajie/pa-chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,dednal/chromium.src,junmin-zhu/chromium-rivertrail,rogerwang/chromium,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,robclark/chromium,rogerwang/chromium,ltilve/chromium,patrickm/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,rogerwang/chromium,patrickm/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,robclark/chromium,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,Jonekee/chromium.src,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,robclark/chromium,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,keishi/chromium,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,dushu1203/chromium.src,keishi/chromium,axinging/chromium-crosswalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,Just-D/chromium-1
|
c
|
## Code Before:
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WEBKIT_GLUE_FORM_DATA_H__
#define WEBKIT_GLUE_FORM_DATA_H__
#include <vector>
#include "base/string_util.h"
#include "googleurl/src/gurl.h"
#include "webkit/glue/form_field.h"
namespace webkit_glue {
// Holds information about a form to be filled and/or submitted.
struct FormData {
// The name of the form.
string16 name;
// GET or POST.
string16 method;
// The URL (minus query parameters) containing the form.
GURL origin;
// The action target of the form.
GURL action;
// true if this form was submitted by a user gesture and not javascript.
bool user_submitted;
// A vector of all the input fields in the form.
std::vector<FormField> fields;
// Used by FormStructureTest.
inline bool operator==(const FormData& form) const {
return (name == form.name &&
StringToLowerASCII(method) == StringToLowerASCII(form.method) &&
origin == form.origin &&
action == form.action &&
user_submitted == form.user_submitted &&
fields == form.fields);
}
};
} // namespace webkit_glue
#endif // WEBKIT_GLUE_FORM_DATA_H__
## Instruction:
AutoFill: Add a default constructor for FormData. There are too many places
that create FormDatas, and we shouldn't need to initialize user_submitted for
each call site.
BUG=50423
TEST=none
Review URL: http://codereview.chromium.org/3074023
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@54641 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WEBKIT_GLUE_FORM_DATA_H__
#define WEBKIT_GLUE_FORM_DATA_H__
#include <vector>
#include "base/string_util.h"
#include "googleurl/src/gurl.h"
#include "webkit/glue/form_field.h"
namespace webkit_glue {
// Holds information about a form to be filled and/or submitted.
struct FormData {
// The name of the form.
string16 name;
// GET or POST.
string16 method;
// The URL (minus query parameters) containing the form.
GURL origin;
// The action target of the form.
GURL action;
// true if this form was submitted by a user gesture and not javascript.
bool user_submitted;
// A vector of all the input fields in the form.
std::vector<FormField> fields;
FormData() : user_submitted(false) {}
// Used by FormStructureTest.
inline bool operator==(const FormData& form) const {
return (name == form.name &&
StringToLowerASCII(method) == StringToLowerASCII(form.method) &&
origin == form.origin &&
action == form.action &&
user_submitted == form.user_submitted &&
fields == form.fields);
}
};
} // namespace webkit_glue
#endif // WEBKIT_GLUE_FORM_DATA_H__
|
// ... existing code ...
// A vector of all the input fields in the form.
std::vector<FormField> fields;
FormData() : user_submitted(false) {}
// Used by FormStructureTest.
inline bool operator==(const FormData& form) const {
return (name == form.name &&
// ... rest of the code ...
|
4d1444e2f2a455e691342a82f0e116e210593411
|
s01/c01.py
|
s01/c01.py
|
"""Set 01 - Challenge 01."""
import base64
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = b'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded byte string."""
hex_data = bytearray.fromhex(hex_string)
# Strip trailing newline
return base64.encodebytes(hex_data)[:-1]
assert hex2b64(hex_string) == b64_string
|
"""Set 01 - Challenge 01."""
import binascii
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded string."""
return binascii.b2a_base64(binascii.a2b_hex(hex_string)).strip()
assert hex2b64(hex_string) == b64_string
|
Revert "Updated function to work on bytes rather than binascii functions."
|
Revert "Updated function to work on bytes rather than binascii functions."
This reverts commit 25176b64aed599059e4b552fbd76c5f4bc28434e.
|
Python
|
mit
|
sornars/matasano-challenges-py
|
python
|
## Code Before:
"""Set 01 - Challenge 01."""
import base64
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = b'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded byte string."""
hex_data = bytearray.fromhex(hex_string)
# Strip trailing newline
return base64.encodebytes(hex_data)[:-1]
assert hex2b64(hex_string) == b64_string
## Instruction:
Revert "Updated function to work on bytes rather than binascii functions."
This reverts commit 25176b64aed599059e4b552fbd76c5f4bc28434e.
## Code After:
"""Set 01 - Challenge 01."""
import binascii
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded string."""
return binascii.b2a_base64(binascii.a2b_hex(hex_string)).strip()
assert hex2b64(hex_string) == b64_string
|
# ... existing code ...
"""Set 01 - Challenge 01."""
import binascii
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded string."""
return binascii.b2a_base64(binascii.a2b_hex(hex_string)).strip()
assert hex2b64(hex_string) == b64_string
# ... rest of the code ...
|
38bd32fdfa345799e510ee75021293c124a4d21c
|
api/base/settings/__init__.py
|
api/base/settings/__init__.py
|
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
import os
import warnings
import itertools
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
warnings.warn('No api/base/settings/local.py settings file found. Did you remember to '
'copy local-dist.py to local.py?', ImportWarning)
if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings':
from . import local
from . import defaults
for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'):
assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting)
def load_origins_whitelist():
global ORIGINS_WHITELIST
from osf.models import Institution, PreprintProvider
institution_origins = tuple(domain.lower() for domain in itertools.chain(*[
institution.domains
for institution in Institution.find()
]))
preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))
ORIGINS_WHITELIST = institution_origins + preprintprovider_origins
|
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
import os
from urlparse import urlparse
import warnings
import itertools
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
warnings.warn('No api/base/settings/local.py settings file found. Did you remember to '
'copy local-dist.py to local.py?', ImportWarning)
if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings':
from . import local
from . import defaults
for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'):
assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting)
def load_origins_whitelist():
global ORIGINS_WHITELIST
from osf.models import Institution, PreprintProvider
institution_origins = tuple(domain.lower() for domain in itertools.chain(*[
institution.domains
for institution in Institution.find()
]))
preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))
ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
|
Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.
|
Fix whitelist construction
-Note: institutions are schemeless, causing furl and urlparse
to parse the domain as `path`. PrePriPro domai ns are validated,
so they necessarily have a scheme, causing the domain to end up
in `netloc`.
-Do some ugly magic to get only the domain.
|
Python
|
apache-2.0
|
caneruguz/osf.io,chennan47/osf.io,aaxelb/osf.io,crcresearch/osf.io,mattclark/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,felliott/osf.io,adlius/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,adlius/osf.io,mattclark/osf.io,brianjgeiger/osf.io,erinspace/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,crcresearch/osf.io,sloria/osf.io,laurenrevere/osf.io,leb2dg/osf.io,aaxelb/osf.io,pattisdr/osf.io,binoculars/osf.io,caneruguz/osf.io,leb2dg/osf.io,baylee-d/osf.io,cslzchen/osf.io,saradbowman/osf.io,adlius/osf.io,binoculars/osf.io,chrisseto/osf.io,chrisseto/osf.io,leb2dg/osf.io,TomBaxter/osf.io,pattisdr/osf.io,icereval/osf.io,chennan47/osf.io,icereval/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,sloria/osf.io,adlius/osf.io,caneruguz/osf.io,TomBaxter/osf.io,pattisdr/osf.io,laurenrevere/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,erinspace/osf.io,icereval/osf.io,laurenrevere/osf.io,felliott/osf.io,mfraezz/osf.io,felliott/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,sloria/osf.io,caneruguz/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,crcresearch/osf.io,chrisseto/osf.io,mattclark/osf.io,leb2dg/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,baylee-d/osf.io,saradbowman/osf.io,baylee-d/osf.io,binoculars/osf.io,brianjgeiger/osf.io,aaxelb/osf.io
|
python
|
## Code Before:
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
import os
import warnings
import itertools
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
warnings.warn('No api/base/settings/local.py settings file found. Did you remember to '
'copy local-dist.py to local.py?', ImportWarning)
if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings':
from . import local
from . import defaults
for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'):
assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting)
def load_origins_whitelist():
global ORIGINS_WHITELIST
from osf.models import Institution, PreprintProvider
institution_origins = tuple(domain.lower() for domain in itertools.chain(*[
institution.domains
for institution in Institution.find()
]))
preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))
ORIGINS_WHITELIST = institution_origins + preprintprovider_origins
## Instruction:
Fix whitelist construction
-Note: institutions are schemeless, causing furl and urlparse
to parse the domain as `path`. PrePriPro domai ns are validated,
so they necessarily have a scheme, causing the domain to end up
in `netloc`.
-Do some ugly magic to get only the domain.
## Code After:
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
import os
from urlparse import urlparse
import warnings
import itertools
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
warnings.warn('No api/base/settings/local.py settings file found. Did you remember to '
'copy local-dist.py to local.py?', ImportWarning)
if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings':
from . import local
from . import defaults
for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'):
assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting)
def load_origins_whitelist():
global ORIGINS_WHITELIST
from osf.models import Institution, PreprintProvider
institution_origins = tuple(domain.lower() for domain in itertools.chain(*[
institution.domains
for institution in Institution.find()
]))
preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))
ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
|
// ... existing code ...
'v2/'
'''
import os
from urlparse import urlparse
import warnings
import itertools
// ... modified code ...
preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))
ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
// ... rest of the code ...
|
9bb312c505c2749862372c0ff56ba47e087a9edc
|
searx/engines/semantic_scholar.py
|
searx/engines/semantic_scholar.py
|
from json import dumps, loads
search_url = 'https://www.semanticscholar.org/api/1/search'
def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['content-type'] = 'application/json'
params['data'] = dumps({
"queryString": query,
"page": params['pageno'],
"pageSize": 10,
"sort": "relevance",
"useFallbackRankerService": False,
"useFallbackSearchCluster": False,
"performTitleMatch": True,
"getQuerySuggestions": False,
"authors": [],
"coAuthors": [],
"venues": [],
"performTitleMatch": True,
})
return params
def response(resp):
res = loads(resp.text)
results = []
for result in res['results']:
results.append({
'url': result['primaryPaperLink']['url'],
'title': result['title']['text'],
'content': result['paperAbstractTruncated']
})
return results
|
from json import dumps, loads
search_url = 'https://www.semanticscholar.org/api/1/search'
def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['content-type'] = 'application/json'
params['data'] = dumps({
"queryString": query,
"page": params['pageno'],
"pageSize": 10,
"sort": "relevance",
"useFallbackRankerService": False,
"useFallbackSearchCluster": False,
"getQuerySuggestions": False,
"authors": [],
"coAuthors": [],
"venues": [],
"performTitleMatch": True,
})
return params
def response(resp):
res = loads(resp.text)
results = []
for result in res['results']:
results.append({
'url': result['primaryPaperLink']['url'],
'title': result['title']['text'],
'content': result['paperAbstractTruncated']
})
return results
|
Remove duplicated key from dict in Semantic Scholar
|
Remove duplicated key from dict in Semantic Scholar
|
Python
|
agpl-3.0
|
dalf/searx,dalf/searx,dalf/searx,dalf/searx
|
python
|
## Code Before:
from json import dumps, loads
search_url = 'https://www.semanticscholar.org/api/1/search'
def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['content-type'] = 'application/json'
params['data'] = dumps({
"queryString": query,
"page": params['pageno'],
"pageSize": 10,
"sort": "relevance",
"useFallbackRankerService": False,
"useFallbackSearchCluster": False,
"performTitleMatch": True,
"getQuerySuggestions": False,
"authors": [],
"coAuthors": [],
"venues": [],
"performTitleMatch": True,
})
return params
def response(resp):
res = loads(resp.text)
results = []
for result in res['results']:
results.append({
'url': result['primaryPaperLink']['url'],
'title': result['title']['text'],
'content': result['paperAbstractTruncated']
})
return results
## Instruction:
Remove duplicated key from dict in Semantic Scholar
## Code After:
from json import dumps, loads
search_url = 'https://www.semanticscholar.org/api/1/search'
def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['content-type'] = 'application/json'
params['data'] = dumps({
"queryString": query,
"page": params['pageno'],
"pageSize": 10,
"sort": "relevance",
"useFallbackRankerService": False,
"useFallbackSearchCluster": False,
"getQuerySuggestions": False,
"authors": [],
"coAuthors": [],
"venues": [],
"performTitleMatch": True,
})
return params
def response(resp):
res = loads(resp.text)
results = []
for result in res['results']:
results.append({
'url': result['primaryPaperLink']['url'],
'title': result['title']['text'],
'content': result['paperAbstractTruncated']
})
return results
|
# ... existing code ...
"sort": "relevance",
"useFallbackRankerService": False,
"useFallbackSearchCluster": False,
"getQuerySuggestions": False,
"authors": [],
"coAuthors": [],
# ... rest of the code ...
|
0ce55ae521b7f1c751a8141bfa0a8eaa3b8dd4dd
|
src/common/me/nallar/tickthreading/minecraft/commands/TPSCommand.java
|
src/common/me/nallar/tickthreading/minecraft/commands/TPSCommand.java
|
package me.nallar.tickthreading.minecraft.commands;
import java.util.List;
import me.nallar.tickthreading.minecraft.TickManager;
import me.nallar.tickthreading.minecraft.TickThreading;
import net.minecraft.command.ICommandSender;
public class TPSCommand extends Command {
public static String name = "tps";
@Override
public String getCommandName() {
return name;
}
@Override
public boolean canCommandSenderUseCommand(ICommandSender par1ICommandSender) {
return true;
}
@Override
public void processCommand(ICommandSender commandSender, List<String> arguments) {
StringBuilder tpsReport = new StringBuilder();
tpsReport.append("---- TPS Report ----\n");
long usedTime = 0;
for (TickManager tickManager : TickThreading.instance().getManagers()) {
tpsReport.append(tickManager.getBasicStats());
usedTime += tickManager.lastTickLength;
}
tpsReport.append("\nOverall TPS: ").append(Math.min(20, 1000 / usedTime))
.append("\nOverall load: ").append(usedTime / 0.5).append('%');
sendChat(commandSender, tpsReport.toString());
}
}
|
package me.nallar.tickthreading.minecraft.commands;
import java.util.List;
import me.nallar.tickthreading.minecraft.TickManager;
import me.nallar.tickthreading.minecraft.TickThreading;
import net.minecraft.command.ICommandSender;
public class TPSCommand extends Command {
public static String name = "tps";
@Override
public String getCommandName() {
return name;
}
@Override
public boolean canCommandSenderUseCommand(ICommandSender par1ICommandSender) {
return true;
}
@Override
public void processCommand(ICommandSender commandSender, List<String> arguments) {
StringBuilder tpsReport = new StringBuilder();
tpsReport.append("---- TPS Report ----\n");
long usedTime = 0;
for (TickManager tickManager : TickThreading.instance().getManagers()) {
tpsReport.append(tickManager.getBasicStats());
usedTime += tickManager.lastTickLength;
}
tpsReport.append("\nOverall TPS: ").append(Math.min(20, 1000 / (usedTime == 0 ? 1 : usedTime)))
.append("\nOverall load: ").append(usedTime * 2).append('%');
sendChat(commandSender, tpsReport.toString());
}
}
|
Fix for / by 0 in /tps
|
Fix for / by 0 in /tps
Signed-off-by: Ross Allan <[email protected]>
|
Java
|
mit
|
nallar/TickThreading
|
java
|
## Code Before:
package me.nallar.tickthreading.minecraft.commands;
import java.util.List;
import me.nallar.tickthreading.minecraft.TickManager;
import me.nallar.tickthreading.minecraft.TickThreading;
import net.minecraft.command.ICommandSender;
public class TPSCommand extends Command {
public static String name = "tps";
@Override
public String getCommandName() {
return name;
}
@Override
public boolean canCommandSenderUseCommand(ICommandSender par1ICommandSender) {
return true;
}
@Override
public void processCommand(ICommandSender commandSender, List<String> arguments) {
StringBuilder tpsReport = new StringBuilder();
tpsReport.append("---- TPS Report ----\n");
long usedTime = 0;
for (TickManager tickManager : TickThreading.instance().getManagers()) {
tpsReport.append(tickManager.getBasicStats());
usedTime += tickManager.lastTickLength;
}
tpsReport.append("\nOverall TPS: ").append(Math.min(20, 1000 / usedTime))
.append("\nOverall load: ").append(usedTime / 0.5).append('%');
sendChat(commandSender, tpsReport.toString());
}
}
## Instruction:
Fix for / by 0 in /tps
Signed-off-by: Ross Allan <[email protected]>
## Code After:
package me.nallar.tickthreading.minecraft.commands;
import java.util.List;
import me.nallar.tickthreading.minecraft.TickManager;
import me.nallar.tickthreading.minecraft.TickThreading;
import net.minecraft.command.ICommandSender;
public class TPSCommand extends Command {
public static String name = "tps";
@Override
public String getCommandName() {
return name;
}
@Override
public boolean canCommandSenderUseCommand(ICommandSender par1ICommandSender) {
return true;
}
@Override
public void processCommand(ICommandSender commandSender, List<String> arguments) {
StringBuilder tpsReport = new StringBuilder();
tpsReport.append("---- TPS Report ----\n");
long usedTime = 0;
for (TickManager tickManager : TickThreading.instance().getManagers()) {
tpsReport.append(tickManager.getBasicStats());
usedTime += tickManager.lastTickLength;
}
tpsReport.append("\nOverall TPS: ").append(Math.min(20, 1000 / (usedTime == 0 ? 1 : usedTime)))
.append("\nOverall load: ").append(usedTime * 2).append('%');
sendChat(commandSender, tpsReport.toString());
}
}
|
...
tpsReport.append(tickManager.getBasicStats());
usedTime += tickManager.lastTickLength;
}
tpsReport.append("\nOverall TPS: ").append(Math.min(20, 1000 / (usedTime == 0 ? 1 : usedTime)))
.append("\nOverall load: ").append(usedTime * 2).append('%');
sendChat(commandSender, tpsReport.toString());
}
}
...
|
b23c843fda57e0ffa56aaf430d9a590e2ed0ec9a
|
ch06/extract_airlines.py
|
ch06/extract_airlines.py
|
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
Check variable for None value before null string when filtering tail numbers
|
Check variable for None value before null string when filtering tail numbers
|
Python
|
mit
|
rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2
|
python
|
## Code Before:
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
## Instruction:
Check variable for None value before null string when filtering tail numbers
## Code After:
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
# ... existing code ...
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
# ... rest of the code ...
|
662869c4245ff2447ad1a91fdb22e696e840dea0
|
indexer/src/main/java/org/pdxfinder/dataloaders/updog/Updog.java
|
indexer/src/main/java/org/pdxfinder/dataloaders/updog/Updog.java
|
package org.pdxfinder.dataloaders.updog;
import com.ibm.icu.impl.LocaleDisplayNamesImpl;
import org.hibernate.sql.Template;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
public class Updog {
private String provider;
public Updog(String provider) {
this.provider = provider;
}
private static final Logger log = LoggerFactory.getLogger(Updog.class);
private Map<String, PdxDataTable> pdxDataTables;
private Map<String, Object> domainObjects;
private void readPdxDataTable() {
PdxDataTable pdxDataTables = new PdxDataTable(provider);
pdxDataTables.readData();
}
public boolean validateTemplate(){
//instantiate a validator class
TemplateValidator templateValidator = new TemplateValidator();
return true;
}
public void load(){
//create domain objects database nodes
DomainObjectCreator doc = new DomainObjectCreator();
//save db
}
}
|
package org.pdxfinder.dataloaders.updog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Set;
public class Updog {
private String provider;
public Updog(String provider) {
this.provider = provider;
}
private static final Logger log = LoggerFactory.getLogger(Updog.class);
private Map<String, PdxDataTable> pdxDataTables;
private Map<String, Set<Object>> domainObjects;
private void readPdxDataTable() {
PdxDataTable pdxDataTables = new PdxDataTable(provider);
pdxDataTables.readData();
}
public boolean validateTemplate(){
//instantiate a validator class
TemplateValidator templateValidator = new TemplateValidator();
return true;
}
public void load(){
//create domain objects database nodes
DomainObjectCreator doc = new DomainObjectCreator(pdxDataTables);
//save db
}
}
|
Update skeleton for new updog
|
Update skeleton for new updog
|
Java
|
apache-2.0
|
PDXFinder/pdxfinder,PDXFinder/pdxfinder,PDXFinder/pdxfinder,PDXFinder/pdxfinder
|
java
|
## Code Before:
package org.pdxfinder.dataloaders.updog;
import com.ibm.icu.impl.LocaleDisplayNamesImpl;
import org.hibernate.sql.Template;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
public class Updog {
private String provider;
public Updog(String provider) {
this.provider = provider;
}
private static final Logger log = LoggerFactory.getLogger(Updog.class);
private Map<String, PdxDataTable> pdxDataTables;
private Map<String, Object> domainObjects;
private void readPdxDataTable() {
PdxDataTable pdxDataTables = new PdxDataTable(provider);
pdxDataTables.readData();
}
public boolean validateTemplate(){
//instantiate a validator class
TemplateValidator templateValidator = new TemplateValidator();
return true;
}
public void load(){
//create domain objects database nodes
DomainObjectCreator doc = new DomainObjectCreator();
//save db
}
}
## Instruction:
Update skeleton for new updog
## Code After:
package org.pdxfinder.dataloaders.updog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Set;
public class Updog {
private String provider;
public Updog(String provider) {
this.provider = provider;
}
private static final Logger log = LoggerFactory.getLogger(Updog.class);
private Map<String, PdxDataTable> pdxDataTables;
private Map<String, Set<Object>> domainObjects;
private void readPdxDataTable() {
PdxDataTable pdxDataTables = new PdxDataTable(provider);
pdxDataTables.readData();
}
public boolean validateTemplate(){
//instantiate a validator class
TemplateValidator templateValidator = new TemplateValidator();
return true;
}
public void load(){
//create domain objects database nodes
DomainObjectCreator doc = new DomainObjectCreator(pdxDataTables);
//save db
}
}
|
// ... existing code ...
package org.pdxfinder.dataloaders.updog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Set;
public class Updog {
// ... modified code ...
private static final Logger log = LoggerFactory.getLogger(Updog.class);
private Map<String, PdxDataTable> pdxDataTables;
private Map<String, Set<Object>> domainObjects;
private void readPdxDataTable() {
...
public void load(){
//create domain objects database nodes
DomainObjectCreator doc = new DomainObjectCreator(pdxDataTables);
//save db
// ... rest of the code ...
|
d187a8434c9d64171f76efa3055bdc06afbc8981
|
scripts/pystart.py
|
scripts/pystart.py
|
import os,sys,re
from time import sleep
from pprint import pprint
home = os.path.expanduser('~')
from math import log,ceil
def clog2(num):
return int(ceil(log(num,2)))
if (sys.version_info > (3, 0)):
# Python 3 code in this block
exec(open(home+'/homedir/scripts/hexecho.py').read())
else:
# Python 2 code in this block
execfile(home+'/homedir/scripts/hexecho.py')
hexoff
print ("Imported os,sys,re,sleep,pprint. Defined clog2,hexon/hexoff")
|
import os,sys,re
from time import sleep
from pprint import pprint
home = os.path.expanduser('~')
from math import log,ceil
sys.ps1 = '\001\033[96m\002>>> \001\033[0m\002'
sys.ps2 = '\001\033[96m\002... \001\033[0m\002'
def clog2(num):
return int(ceil(log(num,2)))
if (sys.version_info > (3, 0)):
# Python 3 code in this block
exec(open(home+'/homedir/scripts/hexecho.py').read())
else:
# Python 2 code in this block
execfile(home+'/homedir/scripts/hexecho.py')
hexoff
print ("Imported os,sys,re,sleep,pprint. Defined clog2,hexon/hexoff")
|
Add color to python prompt
|
Add color to python prompt
|
Python
|
mit
|
jdanders/homedir,jdanders/homedir,jdanders/homedir,jdanders/homedir
|
python
|
## Code Before:
import os,sys,re
from time import sleep
from pprint import pprint
home = os.path.expanduser('~')
from math import log,ceil
def clog2(num):
return int(ceil(log(num,2)))
if (sys.version_info > (3, 0)):
# Python 3 code in this block
exec(open(home+'/homedir/scripts/hexecho.py').read())
else:
# Python 2 code in this block
execfile(home+'/homedir/scripts/hexecho.py')
hexoff
print ("Imported os,sys,re,sleep,pprint. Defined clog2,hexon/hexoff")
## Instruction:
Add color to python prompt
## Code After:
import os,sys,re
from time import sleep
from pprint import pprint
home = os.path.expanduser('~')
from math import log,ceil
sys.ps1 = '\001\033[96m\002>>> \001\033[0m\002'
sys.ps2 = '\001\033[96m\002... \001\033[0m\002'
def clog2(num):
return int(ceil(log(num,2)))
if (sys.version_info > (3, 0)):
# Python 3 code in this block
exec(open(home+'/homedir/scripts/hexecho.py').read())
else:
# Python 2 code in this block
execfile(home+'/homedir/scripts/hexecho.py')
hexoff
print ("Imported os,sys,re,sleep,pprint. Defined clog2,hexon/hexoff")
|
# ... existing code ...
from pprint import pprint
home = os.path.expanduser('~')
from math import log,ceil
sys.ps1 = '\001\033[96m\002>>> \001\033[0m\002'
sys.ps2 = '\001\033[96m\002... \001\033[0m\002'
def clog2(num):
return int(ceil(log(num,2)))
if (sys.version_info > (3, 0)):
# ... rest of the code ...
|
0a628ed81ca11fc4175b480aad9a136b8a4fe1c2
|
constantsgen/pythonwriter.py
|
constantsgen/pythonwriter.py
|
class PythonWriter:
def __init__(self, constants):
self.constants = constants
def write(self, out):
out.write("# This file was generated by generate_constants.\n\n")
out.write("from enum import Enum, unique\n\n")
for name, enum in self.constants.enum_values.items():
out.write("""
@unique
class {}(Enum):\n""".format(name))
for base_name, value in enum.items():
# For the enum value names remove everything up through the
# first underscore and convert the remainder to lowercase. For
# example the value NV_BOOL is assigned to bool. If there is no
# underscore, find() returns -1 and the entire string is used.
first_underscore = base_name.find("_")
name = base_name[first_underscore + 1:].lower()
out.write(" {} = {}\n".format(name, value))
for name, value in self.constants.constant_values.items():
out.write("{} = {}\n".format(name, value))
|
class PythonWriter:
def __init__(self, constants):
self.constants = constants
def write(self, out):
out.write("# This file was generated by generate_constants.\n\n")
out.write("from enum import Enum, unique\n\n")
for name, enum in self.constants.enum_values.items():
out.write("""
@unique
class {}(Enum):\n""".format(name))
for base_name, value in enum.items():
# For the enum value names remove everything up through the
# first underscore and convert the remainder to lowercase. For
# example the value NV_BOOL is assigned to bool. If there is no
# underscore, find() returns -1 and the entire string is used.
first_underscore = base_name.find("_")
name = base_name[first_underscore + 1:].lower()
out.write(" {} = {}\n".format(name, value))
out.write("\n\n")
for name, value in self.constants.constant_values.items():
out.write("{} = {}\n".format(name, value))
|
Add PEP8 whitespace around Enums
|
Add PEP8 whitespace around Enums
|
Python
|
bsd-3-clause
|
barracudanetworks/constantsgen,barracudanetworks/constantsgen,barracudanetworks/constantsgen
|
python
|
## Code Before:
class PythonWriter:
def __init__(self, constants):
self.constants = constants
def write(self, out):
out.write("# This file was generated by generate_constants.\n\n")
out.write("from enum import Enum, unique\n\n")
for name, enum in self.constants.enum_values.items():
out.write("""
@unique
class {}(Enum):\n""".format(name))
for base_name, value in enum.items():
# For the enum value names remove everything up through the
# first underscore and convert the remainder to lowercase. For
# example the value NV_BOOL is assigned to bool. If there is no
# underscore, find() returns -1 and the entire string is used.
first_underscore = base_name.find("_")
name = base_name[first_underscore + 1:].lower()
out.write(" {} = {}\n".format(name, value))
for name, value in self.constants.constant_values.items():
out.write("{} = {}\n".format(name, value))
## Instruction:
Add PEP8 whitespace around Enums
## Code After:
class PythonWriter:
def __init__(self, constants):
self.constants = constants
def write(self, out):
out.write("# This file was generated by generate_constants.\n\n")
out.write("from enum import Enum, unique\n\n")
for name, enum in self.constants.enum_values.items():
out.write("""
@unique
class {}(Enum):\n""".format(name))
for base_name, value in enum.items():
# For the enum value names remove everything up through the
# first underscore and convert the remainder to lowercase. For
# example the value NV_BOOL is assigned to bool. If there is no
# underscore, find() returns -1 and the entire string is used.
first_underscore = base_name.find("_")
name = base_name[first_underscore + 1:].lower()
out.write(" {} = {}\n".format(name, value))
out.write("\n\n")
for name, value in self.constants.constant_values.items():
out.write("{} = {}\n".format(name, value))
|
# ... existing code ...
for name, enum in self.constants.enum_values.items():
out.write("""
@unique
class {}(Enum):\n""".format(name))
for base_name, value in enum.items():
# ... modified code ...
out.write(" {} = {}\n".format(name, value))
out.write("\n\n")
for name, value in self.constants.constant_values.items():
out.write("{} = {}\n".format(name, value))
# ... rest of the code ...
|
ca1f8cba176ba3dbbbed7ebee2e0399f94c548a1
|
src/user.c
|
src/user.c
|
task usercontrol(){
int DY, DT;
bool armsLocked = false;
while(true){
//Driving
DY = threshold(PAIRED_CH2, 15);
DT = threshold(PAIRED_CH1, 15);
drive(DY, DT);
//Pistons (toggle)
if(PAIRED_BTN7R){
pistons(!PISTON_POS);
waitUntil(!PAIRED_BTN7R);
}
//Arms
if(PAIRED_BTN7L){
armsLocked = !armsLocked;
waitUntil(!PAIRED_BTN7L);
}
if(armsLocked){
arms(ARM_LOCK);
} else {
arms((PAIRED_BTN6U - PAIRED_BTN6D) * MAX_POWER);
}
//Lift
lift((PAIRED_BTN7U - PAIRED_BTN7D) * MAX_POWER);
//Intake
intake((PAIRED_BTN5U - PAIRED_BTN5D) * MAX_POWER);
}
}
|
task usercontrol(){
int DY, DT;
bool armsLocked = false;
while(true){
//Driving
DY = threshold(PAIRED_CH2, 15) + (PAIRED_BTN8U * MAX_POWER) - (PAIRED_BTN8D * MAX_POWER);
DT = threshold(PAIRED_CH1, 15) + (PAIRED_BTN8R * MAX_POWER) - (PAIRED_BTN8L * MAX_POWER);
drive(DY, DT);
//Pistons (toggle)
if(PAIRED_BTN7R){
pistons(!PISTON_POS);
waitUntil(!PAIRED_BTN7R);
}
//Arms
if(PAIRED_BTN7L){
armsLocked = !armsLocked;
waitUntil(!PAIRED_BTN7L);
}
if(armsLocked){
arms(ARM_LOCK);
} else {
arms(threshold(PAIRED_CH3, 15) + ((PAIRED_BTN5U - PAIRED_BTN5D) * MAX_POWER));
}
//Lift
lift((PAIRED_BTN7U - PAIRED_BTN7D) * MAX_POWER);
//Intake
intake((PAIRED_BTN6U - PAIRED_BTN6D) * MAX_POWER);
}
}
|
Add buttons for driving, and swap intake and arm buttons
|
Add buttons for driving, and swap intake and arm buttons
|
C
|
mit
|
18moorei/code-red-in-the-zone
|
c
|
## Code Before:
task usercontrol(){
int DY, DT;
bool armsLocked = false;
while(true){
//Driving
DY = threshold(PAIRED_CH2, 15);
DT = threshold(PAIRED_CH1, 15);
drive(DY, DT);
//Pistons (toggle)
if(PAIRED_BTN7R){
pistons(!PISTON_POS);
waitUntil(!PAIRED_BTN7R);
}
//Arms
if(PAIRED_BTN7L){
armsLocked = !armsLocked;
waitUntil(!PAIRED_BTN7L);
}
if(armsLocked){
arms(ARM_LOCK);
} else {
arms((PAIRED_BTN6U - PAIRED_BTN6D) * MAX_POWER);
}
//Lift
lift((PAIRED_BTN7U - PAIRED_BTN7D) * MAX_POWER);
//Intake
intake((PAIRED_BTN5U - PAIRED_BTN5D) * MAX_POWER);
}
}
## Instruction:
Add buttons for driving, and swap intake and arm buttons
## Code After:
task usercontrol(){
int DY, DT;
bool armsLocked = false;
while(true){
//Driving
DY = threshold(PAIRED_CH2, 15) + (PAIRED_BTN8U * MAX_POWER) - (PAIRED_BTN8D * MAX_POWER);
DT = threshold(PAIRED_CH1, 15) + (PAIRED_BTN8R * MAX_POWER) - (PAIRED_BTN8L * MAX_POWER);
drive(DY, DT);
//Pistons (toggle)
if(PAIRED_BTN7R){
pistons(!PISTON_POS);
waitUntil(!PAIRED_BTN7R);
}
//Arms
if(PAIRED_BTN7L){
armsLocked = !armsLocked;
waitUntil(!PAIRED_BTN7L);
}
if(armsLocked){
arms(ARM_LOCK);
} else {
arms(threshold(PAIRED_CH3, 15) + ((PAIRED_BTN5U - PAIRED_BTN5D) * MAX_POWER));
}
//Lift
lift((PAIRED_BTN7U - PAIRED_BTN7D) * MAX_POWER);
//Intake
intake((PAIRED_BTN6U - PAIRED_BTN6D) * MAX_POWER);
}
}
|
// ... existing code ...
while(true){
//Driving
DY = threshold(PAIRED_CH2, 15) + (PAIRED_BTN8U * MAX_POWER) - (PAIRED_BTN8D * MAX_POWER);
DT = threshold(PAIRED_CH1, 15) + (PAIRED_BTN8R * MAX_POWER) - (PAIRED_BTN8L * MAX_POWER);
drive(DY, DT);
//Pistons (toggle)
// ... modified code ...
if(armsLocked){
arms(ARM_LOCK);
} else {
arms(threshold(PAIRED_CH3, 15) + ((PAIRED_BTN5U - PAIRED_BTN5D) * MAX_POWER));
}
//Lift
...
lift((PAIRED_BTN7U - PAIRED_BTN7D) * MAX_POWER);
//Intake
intake((PAIRED_BTN6U - PAIRED_BTN6D) * MAX_POWER);
}
}
// ... rest of the code ...
|
0ad05c76d2e40c71599aa46957affb9c03748c9a
|
src/test/java/guitests/ViewCommandTest.java
|
src/test/java/guitests/ViewCommandTest.java
|
package guitests;
import static org.junit.Assert.assertEquals;
import java.util.Set;
import org.junit.Test;
import javafx.scene.Node;
import javafx.scene.control.TitledPane;
public class ViewCommandTest extends TaskManagerGuiTest {
protected final String TASK_LIST_FXML_ID = "#taskListView";
@Test
public void viewDefault() {
assertGroupsDisplay("All");
}
@Test
public void viewCalendar() {
commandBox.runCommand("view calendar");
assertGroupsDisplay("Floating", "Overdue", "Today", "Tomorrow", "Future");
}
@Test
public void viewGroups() {
commandBox.runCommand("view done today tomorrow");
assertGroupsDisplay("Done", "Today", "Tomorrow");
}
protected void assertGroupsDisplay(String... groupTitles) {
Node taskListView = mainGui.getNodeWithID(TASK_LIST_FXML_ID);
Set<Node> taskGroupNodes = taskListView.lookupAll("#titledPane");
int index = 0;
for (Node node : taskGroupNodes) {
TitledPane titledPane = (TitledPane) node;
// Title consists of title + no. of entries
// e.g. Tomorrow (4)
String title = titledPane.getText().split(" ")[0];
assertEquals(title, groupTitles[index]);
index++;
}
}
}
|
package guitests;
import static org.junit.Assert.assertEquals;
import java.util.Set;
import org.junit.Test;
import javafx.scene.Node;
import javafx.scene.control.TitledPane;
import seedu.address.logic.commands.ViewCommand;
public class ViewCommandTest extends TaskManagerGuiTest {
protected final String TASK_LIST_FXML_ID = "#taskListView";
@Test
public void viewDefault() {
assertGroupsDisplay("All");
}
@Test
public void viewCalendar() {
commandBox.runCommand("view calendar");
assertGroupsDisplay("Floating", "Overdue", "Today", "Tomorrow", "Future");
assertResultMessage(String.format(ViewCommand.MESSAGE_SUCCESS, "Calendar"));
}
@Test
public void viewGroups() {
commandBox.runCommand("view done today tomorrow");
assertGroupsDisplay("Done", "Today", "Tomorrow");
assertResultMessage(String.format(ViewCommand.MESSAGE_SUCCESS, "Done|Today|Tomorrow"));
}
@Test
public void viewWrongInput() {
commandBox.runCommand("view randomstring");
assertGroupsDisplay("All");
assertResultMessage(ViewCommand.MESSAGE_ERROR);
}
protected void assertGroupsDisplay(String... groupTitles) {
Node taskListView = mainGui.getNodeWithID(TASK_LIST_FXML_ID);
Set<Node> taskGroupNodes = taskListView.lookupAll("#titledPane");
int index = 0;
for (Node node : taskGroupNodes) {
TitledPane titledPane = (TitledPane) node;
// Title consists of title + no. of entries
// e.g. Tomorrow (4)
String title = titledPane.getText().split(" ")[0];
assertEquals(title, groupTitles[index]);
index++;
}
}
}
|
Add tests for View Command
|
Add tests for View Command
|
Java
|
mit
|
CS2103JAN2017-W13-B2/main,CS2103JAN2017-W13-B2/main
|
java
|
## Code Before:
package guitests;
import static org.junit.Assert.assertEquals;
import java.util.Set;
import org.junit.Test;
import javafx.scene.Node;
import javafx.scene.control.TitledPane;
public class ViewCommandTest extends TaskManagerGuiTest {
protected final String TASK_LIST_FXML_ID = "#taskListView";
@Test
public void viewDefault() {
assertGroupsDisplay("All");
}
@Test
public void viewCalendar() {
commandBox.runCommand("view calendar");
assertGroupsDisplay("Floating", "Overdue", "Today", "Tomorrow", "Future");
}
@Test
public void viewGroups() {
commandBox.runCommand("view done today tomorrow");
assertGroupsDisplay("Done", "Today", "Tomorrow");
}
protected void assertGroupsDisplay(String... groupTitles) {
Node taskListView = mainGui.getNodeWithID(TASK_LIST_FXML_ID);
Set<Node> taskGroupNodes = taskListView.lookupAll("#titledPane");
int index = 0;
for (Node node : taskGroupNodes) {
TitledPane titledPane = (TitledPane) node;
// Title consists of title + no. of entries
// e.g. Tomorrow (4)
String title = titledPane.getText().split(" ")[0];
assertEquals(title, groupTitles[index]);
index++;
}
}
}
## Instruction:
Add tests for View Command
## Code After:
package guitests;
import static org.junit.Assert.assertEquals;
import java.util.Set;
import org.junit.Test;
import javafx.scene.Node;
import javafx.scene.control.TitledPane;
import seedu.address.logic.commands.ViewCommand;
public class ViewCommandTest extends TaskManagerGuiTest {
protected final String TASK_LIST_FXML_ID = "#taskListView";
@Test
public void viewDefault() {
assertGroupsDisplay("All");
}
@Test
public void viewCalendar() {
commandBox.runCommand("view calendar");
assertGroupsDisplay("Floating", "Overdue", "Today", "Tomorrow", "Future");
assertResultMessage(String.format(ViewCommand.MESSAGE_SUCCESS, "Calendar"));
}
@Test
public void viewGroups() {
commandBox.runCommand("view done today tomorrow");
assertGroupsDisplay("Done", "Today", "Tomorrow");
assertResultMessage(String.format(ViewCommand.MESSAGE_SUCCESS, "Done|Today|Tomorrow"));
}
@Test
public void viewWrongInput() {
commandBox.runCommand("view randomstring");
assertGroupsDisplay("All");
assertResultMessage(ViewCommand.MESSAGE_ERROR);
}
protected void assertGroupsDisplay(String... groupTitles) {
Node taskListView = mainGui.getNodeWithID(TASK_LIST_FXML_ID);
Set<Node> taskGroupNodes = taskListView.lookupAll("#titledPane");
int index = 0;
for (Node node : taskGroupNodes) {
TitledPane titledPane = (TitledPane) node;
// Title consists of title + no. of entries
// e.g. Tomorrow (4)
String title = titledPane.getText().split(" ")[0];
assertEquals(title, groupTitles[index]);
index++;
}
}
}
|
# ... existing code ...
import javafx.scene.Node;
import javafx.scene.control.TitledPane;
import seedu.address.logic.commands.ViewCommand;
public class ViewCommandTest extends TaskManagerGuiTest {
# ... modified code ...
public void viewCalendar() {
commandBox.runCommand("view calendar");
assertGroupsDisplay("Floating", "Overdue", "Today", "Tomorrow", "Future");
assertResultMessage(String.format(ViewCommand.MESSAGE_SUCCESS, "Calendar"));
}
@Test
...
public void viewGroups() {
commandBox.runCommand("view done today tomorrow");
assertGroupsDisplay("Done", "Today", "Tomorrow");
assertResultMessage(String.format(ViewCommand.MESSAGE_SUCCESS, "Done|Today|Tomorrow"));
}
@Test
public void viewWrongInput() {
commandBox.runCommand("view randomstring");
assertGroupsDisplay("All");
assertResultMessage(ViewCommand.MESSAGE_ERROR);
}
protected void assertGroupsDisplay(String... groupTitles) {
# ... rest of the code ...
|
0c956e5bc79431bace5b6f8f2c06c8a7d7d756b3
|
cps-global-lib/src/main/java/org/jenkinsci/plugins/workflow/cps/global/GroovyShellDecoratorImpl.java
|
cps-global-lib/src/main/java/org/jenkinsci/plugins/workflow/cps/global/GroovyShellDecoratorImpl.java
|
package org.jenkinsci.plugins.workflow.cps.global;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyShell;
import hudson.Extension;
import org.jenkinsci.plugins.workflow.cps.CpsFlowExecution;
import org.jenkinsci.plugins.workflow.cps.GroovyShellDecorator;
import javax.inject.Inject;
import java.net.MalformedURLException;
/**
* Adds the global shared library space into {@link GroovyClassLoader} classpath.
*
* @author Kohsuke Kawaguchi
*/
@Extension
public class GroovyShellDecoratorImpl extends GroovyShellDecorator {
@Inject
WorkflowLibRepository repo;
@Override
public void configureShell(CpsFlowExecution context, GroovyShell shell) {
try {
shell.getClassLoader().addURL(repo.workspace.toURL());
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
}
}
|
package org.jenkinsci.plugins.workflow.cps.global;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyShell;
import hudson.Extension;
import org.jenkinsci.plugins.workflow.cps.CpsFlowExecution;
import org.jenkinsci.plugins.workflow.cps.GroovyShellDecorator;
import javax.inject.Inject;
import java.io.File;
import java.net.MalformedURLException;
/**
* Adds the global shared library space into {@link GroovyClassLoader} classpath.
*
* @author Kohsuke Kawaguchi
*/
@Extension
public class GroovyShellDecoratorImpl extends GroovyShellDecorator {
@Inject
WorkflowLibRepository repo;
@Override
public void configureShell(CpsFlowExecution context, GroovyShell shell) {
try {
shell.getClassLoader().addURL(new File(repo.workspace,"src").toURL());
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
}
}
|
Use a sub-directory so that in the future we can have other directories for jar files, custom functions, etc.
|
Use a sub-directory so that in the future we can have other directories for jar files, custom functions, etc.
|
Java
|
mit
|
ganthore/docker-workflow-plugin,ganthore/docker-workflow-plugin,ganthore/docker-workflow-plugin
|
java
|
## Code Before:
package org.jenkinsci.plugins.workflow.cps.global;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyShell;
import hudson.Extension;
import org.jenkinsci.plugins.workflow.cps.CpsFlowExecution;
import org.jenkinsci.plugins.workflow.cps.GroovyShellDecorator;
import javax.inject.Inject;
import java.net.MalformedURLException;
/**
* Adds the global shared library space into {@link GroovyClassLoader} classpath.
*
* @author Kohsuke Kawaguchi
*/
@Extension
public class GroovyShellDecoratorImpl extends GroovyShellDecorator {
@Inject
WorkflowLibRepository repo;
@Override
public void configureShell(CpsFlowExecution context, GroovyShell shell) {
try {
shell.getClassLoader().addURL(repo.workspace.toURL());
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
}
}
## Instruction:
Use a sub-directory so that in the future we can have other directories for jar files, custom functions, etc.
## Code After:
package org.jenkinsci.plugins.workflow.cps.global;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyShell;
import hudson.Extension;
import org.jenkinsci.plugins.workflow.cps.CpsFlowExecution;
import org.jenkinsci.plugins.workflow.cps.GroovyShellDecorator;
import javax.inject.Inject;
import java.io.File;
import java.net.MalformedURLException;
/**
* Adds the global shared library space into {@link GroovyClassLoader} classpath.
*
* @author Kohsuke Kawaguchi
*/
@Extension
public class GroovyShellDecoratorImpl extends GroovyShellDecorator {
@Inject
WorkflowLibRepository repo;
@Override
public void configureShell(CpsFlowExecution context, GroovyShell shell) {
try {
shell.getClassLoader().addURL(new File(repo.workspace,"src").toURL());
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
}
}
|
// ... existing code ...
import org.jenkinsci.plugins.workflow.cps.GroovyShellDecorator;
import javax.inject.Inject;
import java.io.File;
import java.net.MalformedURLException;
/**
// ... modified code ...
@Override
public void configureShell(CpsFlowExecution context, GroovyShell shell) {
try {
shell.getClassLoader().addURL(new File(repo.workspace,"src").toURL());
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
// ... rest of the code ...
|
4650c0da62324b958162ea9ab58df41f2b8f27e5
|
src/Functions/Cascades.kt
|
src/Functions/Cascades.kt
|
package Kotlin101.Functions.Cascades
fun main (Args : Array<String>){
/* This call utilizes extension function and infix call */
var superman = Superman() with {
name = "Lux Luthor"
punch()
kick()
}
}
public fun <T>T.with(operations : T.() -> Unit) : T {
this.operations()
return this
}
public class Superman() {
var name : String = "Clark Kent"
public fun punch() : Unit = println("$name punches")
public fun fly() : Unit = println("$name flies")
public fun kick() : Unit = println("$name kicks")
}
|
package Kotlin101.Functions.Cascades
fun main (Args : Array<String>){
/* This call utilizes extension function and infix call. It is handy to deal with pesky Java object initializations */
var superman = Superman() with {
name = "Lux Luthor"
punch()
kick()
sidekick = Spiderman() with {
special()
}
}
}
public fun <T>T.with(operations : T.() -> Unit) : T {
this.operations()
return this
}
public class Superman() {
var name : String = "Clark Kent"
var sidekick : Sidekick = Robin()
public fun punch() : Unit = println("$name punches")
public fun fly() : Unit = println("$name flies")
public fun kick() : Unit = println("$name kicks")
}
trait Sidekick {
public abstract fun special()
}
public class Spiderman() : Sidekick {
var name : String = "Peter Parker"
override fun special() = println("$name webs")
}
public class Robin() : Sidekick {
var name : String = "Robin"
override fun special() = println("$name is useless")
}
|
Add sub object creation in with example
|
Add sub object creation in with example
|
Kotlin
|
bsd-3-clause
|
dodyg/Kotlin101,mewebstudio/Kotlin101
|
kotlin
|
## Code Before:
package Kotlin101.Functions.Cascades
fun main (Args : Array<String>){
/* This call utilizes extension function and infix call */
var superman = Superman() with {
name = "Lux Luthor"
punch()
kick()
}
}
public fun <T>T.with(operations : T.() -> Unit) : T {
this.operations()
return this
}
public class Superman() {
var name : String = "Clark Kent"
public fun punch() : Unit = println("$name punches")
public fun fly() : Unit = println("$name flies")
public fun kick() : Unit = println("$name kicks")
}
## Instruction:
Add sub object creation in with example
## Code After:
package Kotlin101.Functions.Cascades
fun main (Args : Array<String>){
/* This call utilizes extension function and infix call. It is handy to deal with pesky Java object initializations */
var superman = Superman() with {
name = "Lux Luthor"
punch()
kick()
sidekick = Spiderman() with {
special()
}
}
}
public fun <T>T.with(operations : T.() -> Unit) : T {
this.operations()
return this
}
public class Superman() {
var name : String = "Clark Kent"
var sidekick : Sidekick = Robin()
public fun punch() : Unit = println("$name punches")
public fun fly() : Unit = println("$name flies")
public fun kick() : Unit = println("$name kicks")
}
trait Sidekick {
public abstract fun special()
}
public class Spiderman() : Sidekick {
var name : String = "Peter Parker"
override fun special() = println("$name webs")
}
public class Robin() : Sidekick {
var name : String = "Robin"
override fun special() = println("$name is useless")
}
|
...
package Kotlin101.Functions.Cascades
fun main (Args : Array<String>){
/* This call utilizes extension function and infix call. It is handy to deal with pesky Java object initializations */
var superman = Superman() with {
name = "Lux Luthor"
punch()
kick()
sidekick = Spiderman() with {
special()
}
}
}
...
public class Superman() {
var name : String = "Clark Kent"
var sidekick : Sidekick = Robin()
public fun punch() : Unit = println("$name punches")
public fun fly() : Unit = println("$name flies")
public fun kick() : Unit = println("$name kicks")
}
trait Sidekick {
public abstract fun special()
}
public class Spiderman() : Sidekick {
var name : String = "Peter Parker"
override fun special() = println("$name webs")
}
public class Robin() : Sidekick {
var name : String = "Robin"
override fun special() = println("$name is useless")
}
...
|
cda3fe6e14280a025c894c1279af291a8327475b
|
src/main/java/com/hp/autonomy/hod/client/api/textindex/IndexFlavor.java
|
src/main/java/com/hp/autonomy/hod/client/api/textindex/IndexFlavor.java
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.hod.client.api.textindex;
/**
* Enum type representing the possible options for the index flavor parameter
*/
public enum IndexFlavor {
standard,
explorer,
categorization,
custom_fields,
web_cloud,
querymanipulation
}
|
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.hod.client.api.textindex;
/**
* Enum type representing the possible options for the index flavor parameter
*/
public enum IndexFlavor {
standard,
explorer,
categorization,
custom_fields,
querymanipulation
}
|
Remove web_cloud as it can't be used to create a text index
|
Remove web_cloud as it can't be used to create a text index
|
Java
|
mit
|
hpautonomy/java-hod-client,hpautonomy/java-hod-client,hpe-idol/java-hod-client,hpe-idol/java-hod-client,hpautonomy/java-hod-client,hpe-idol/java-hod-client
|
java
|
## Code Before:
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.hod.client.api.textindex;
/**
* Enum type representing the possible options for the index flavor parameter
*/
public enum IndexFlavor {
standard,
explorer,
categorization,
custom_fields,
web_cloud,
querymanipulation
}
## Instruction:
Remove web_cloud as it can't be used to create a text index
## Code After:
/*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.hod.client.api.textindex;
/**
* Enum type representing the possible options for the index flavor parameter
*/
public enum IndexFlavor {
standard,
explorer,
categorization,
custom_fields,
querymanipulation
}
|
...
explorer,
categorization,
custom_fields,
querymanipulation
}
...
|
92ec16603de2c5ce4d32546c25c2a5a0f286397f
|
src/java/org/apache/commons/jexl/parser/ASTJexlScript.java
|
src/java/org/apache/commons/jexl/parser/ASTJexlScript.java
|
/* Generated By:JJTree: Do not edit this line. ASTJexlScript.java */
package org.apache.commons.jexl.parser;
/**
* Top of the syntax tree - parsed Jexl code.
*/
public class ASTJexlScript extends SimpleNode {
public ASTJexlScript(int id) {
super(id);
}
public ASTJexlScript(Parser p, int id) {
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
}
|
/* Generated By:JJTree: Do not edit this line. ASTJexlScript.java */
package org.apache.commons.jexl.parser;
import org.apache.commons.jexl.JexlContext;
/**
* Top of the syntax tree - parsed Jexl code.
* @since 1.1
*/
public class ASTJexlScript extends SimpleNode {
public ASTJexlScript(int id) {
super(id);
}
public ASTJexlScript(Parser p, int id) {
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
public Object value(JexlContext jc) throws Exception
{
SimpleNode child = (SimpleNode)jjtGetChild(0);
return child.value(jc);
}
}
|
Add value(context) method to support scripts
|
Add value(context) method to support scripts
git-svn-id: de0229a90a04588cc4459530912ec55932f3d65c@391008 13f79535-47bb-0310-9956-ffa450edef68
|
Java
|
apache-2.0
|
apache/commons-jexl,apache/commons-jexl,apache/commons-jexl
|
java
|
## Code Before:
/* Generated By:JJTree: Do not edit this line. ASTJexlScript.java */
package org.apache.commons.jexl.parser;
/**
* Top of the syntax tree - parsed Jexl code.
*/
public class ASTJexlScript extends SimpleNode {
public ASTJexlScript(int id) {
super(id);
}
public ASTJexlScript(Parser p, int id) {
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
}
## Instruction:
Add value(context) method to support scripts
git-svn-id: de0229a90a04588cc4459530912ec55932f3d65c@391008 13f79535-47bb-0310-9956-ffa450edef68
## Code After:
/* Generated By:JJTree: Do not edit this line. ASTJexlScript.java */
package org.apache.commons.jexl.parser;
import org.apache.commons.jexl.JexlContext;
/**
* Top of the syntax tree - parsed Jexl code.
* @since 1.1
*/
public class ASTJexlScript extends SimpleNode {
public ASTJexlScript(int id) {
super(id);
}
public ASTJexlScript(Parser p, int id) {
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
public Object value(JexlContext jc) throws Exception
{
SimpleNode child = (SimpleNode)jjtGetChild(0);
return child.value(jc);
}
}
|
// ... existing code ...
package org.apache.commons.jexl.parser;
import org.apache.commons.jexl.JexlContext;
/**
* Top of the syntax tree - parsed Jexl code.
* @since 1.1
*/
public class ASTJexlScript extends SimpleNode {
public ASTJexlScript(int id) {
// ... modified code ...
public Object jjtAccept(ParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
public Object value(JexlContext jc) throws Exception
{
SimpleNode child = (SimpleNode)jjtGetChild(0);
return child.value(jc);
}
}
// ... rest of the code ...
|
0a42f5273895da8b40388e00cae4a4a7fa41b572
|
src/org/asaph/happynumber/HappyNumber.java
|
src/org/asaph/happynumber/HappyNumber.java
|
package org.asaph.happynumber;
import java.util.HashSet;
import java.util.Set;
public class HappyNumber {
public static int nextNumber(int number) {
String numberString = Integer.toString(number);
int sum=0;
for (int i=0, length=numberString.length(); i<length; i++) {
char ch = numberString.charAt(i);
int digit = Integer.parseInt("" + ch);
int square = digit * digit;
sum += square;
}
return sum;
}
public static boolean isHappy(int number) {
return isHappy(number, new HashSet<Integer>());
}
public static boolean isHappy(int number, Set<Integer> numbersSoFar) {
int next = nextNumber(number);
if (next == 1) {
return true;
}
if (numbersSoFar.contains(next)) {
return false;
}
numbersSoFar.add(next);
return isHappy(next, numbersSoFar);
}
public static void main(String[] args) {
for (int i=1; i<=1000; i++) {
if (isHappy(i)) {
System.out.println(i);
}
}
}
}
|
package org.asaph.happynumber;
import java.util.HashSet;
import java.util.Set;
public class HappyNumber {
public static int nextNumber(int number) {
int sum = 0;
while (number != 0) {
int digit = number % 10;
sum += digit * digit;
number = (number - digit) / 10;
}
return sum;
}
public static boolean isHappy(int number) {
return isHappy(number, new HashSet<Integer>());
}
public static boolean isHappy(int number, Set<Integer> numbersSoFar) {
int next = nextNumber(number);
if (next == 1) {
return true;
}
if (numbersSoFar.contains(next)) {
return false;
}
numbersSoFar.add(next);
return isHappy(next, numbersSoFar);
}
public static void main(String[] args) {
for (int i=1; i<=1000; i++) {
if (isHappy(i)) {
System.out.println(i);
}
}
}
}
|
Break number apart into digits using math instead of string operations.
|
Refactor: Break number apart into digits using math instead of string operations.
|
Java
|
mit
|
asaph/happynumber
|
java
|
## Code Before:
package org.asaph.happynumber;
import java.util.HashSet;
import java.util.Set;
public class HappyNumber {
public static int nextNumber(int number) {
String numberString = Integer.toString(number);
int sum=0;
for (int i=0, length=numberString.length(); i<length; i++) {
char ch = numberString.charAt(i);
int digit = Integer.parseInt("" + ch);
int square = digit * digit;
sum += square;
}
return sum;
}
public static boolean isHappy(int number) {
return isHappy(number, new HashSet<Integer>());
}
public static boolean isHappy(int number, Set<Integer> numbersSoFar) {
int next = nextNumber(number);
if (next == 1) {
return true;
}
if (numbersSoFar.contains(next)) {
return false;
}
numbersSoFar.add(next);
return isHappy(next, numbersSoFar);
}
public static void main(String[] args) {
for (int i=1; i<=1000; i++) {
if (isHappy(i)) {
System.out.println(i);
}
}
}
}
## Instruction:
Refactor: Break number apart into digits using math instead of string operations.
## Code After:
package org.asaph.happynumber;
import java.util.HashSet;
import java.util.Set;
public class HappyNumber {
public static int nextNumber(int number) {
int sum = 0;
while (number != 0) {
int digit = number % 10;
sum += digit * digit;
number = (number - digit) / 10;
}
return sum;
}
public static boolean isHappy(int number) {
return isHappy(number, new HashSet<Integer>());
}
public static boolean isHappy(int number, Set<Integer> numbersSoFar) {
int next = nextNumber(number);
if (next == 1) {
return true;
}
if (numbersSoFar.contains(next)) {
return false;
}
numbersSoFar.add(next);
return isHappy(next, numbersSoFar);
}
public static void main(String[] args) {
for (int i=1; i<=1000; i++) {
if (isHappy(i)) {
System.out.println(i);
}
}
}
}
|
// ... existing code ...
public class HappyNumber {
public static int nextNumber(int number) {
int sum = 0;
while (number != 0) {
int digit = number % 10;
sum += digit * digit;
number = (number - digit) / 10;
}
return sum;
}
// ... rest of the code ...
|
5b616f5b3d605b1831d4ca8ca0a9be561f399a89
|
falmer/events/admin.py
|
falmer/events/admin.py
|
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
pass
|
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event, MSLEvent
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
list_display = ('title', 'start_time', 'end_time', )
@register(MSLEvent)
class MSLEventModelAdmin(admin.ModelAdmin):
pass
|
Improve list display of events
|
Improve list display of events
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
python
|
## Code Before:
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
pass
## Instruction:
Improve list display of events
## Code After:
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event, MSLEvent
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
list_display = ('title', 'start_time', 'end_time', )
@register(MSLEvent)
class MSLEventModelAdmin(admin.ModelAdmin):
pass
|
# ... existing code ...
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event, MSLEvent
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
list_display = ('title', 'start_time', 'end_time', )
@register(MSLEvent)
class MSLEventModelAdmin(admin.ModelAdmin):
pass
# ... rest of the code ...
|
f1fedff9247b78120df7335b64cdf46c8f60ef03
|
test/test_fixtures.py
|
test/test_fixtures.py
|
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
|
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
class TestClass:
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(self, io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
|
Add some test for method signature inspection
|
Add some test for method signature inspection
|
Python
|
apache-2.0
|
eugeniy/pytest-tornado
|
python
|
## Code Before:
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
## Instruction:
Add some test for method signature inspection
## Code After:
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
class TestClass:
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(self, io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
|
// ... existing code ...
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
class TestClass:
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(self, io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
// ... rest of the code ...
|
f0270de636bb84e89cbbb54896c6ed5037a48323
|
spiralgalaxygame/precondition.py
|
spiralgalaxygame/precondition.py
|
class PreconditionError (TypeError):
def __init__(self, callee, *args):
TypeError.__init__(self, '{0.__name__}{1!r}'.format(callee, args))
|
from types import FunctionType, MethodType
class PreconditionError (TypeError):
def __init__(self, callee, *args):
if isinstance(callee, MethodType):
name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
elif isinstance(callee, type) or isinstance(callee, FunctionType):
name = callee.__name__
TypeError.__init__(self, '{}{!r}'.format(name, args))
|
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
|
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
|
Python
|
agpl-3.0
|
nejucomo/sgg,nejucomo/sgg,nejucomo/sgg
|
python
|
## Code Before:
class PreconditionError (TypeError):
def __init__(self, callee, *args):
TypeError.__init__(self, '{0.__name__}{1!r}'.format(callee, args))
## Instruction:
Implement prettier method names in PreconditionErrors as per spec; not yet full branch coverage.
## Code After:
from types import FunctionType, MethodType
class PreconditionError (TypeError):
def __init__(self, callee, *args):
if isinstance(callee, MethodType):
name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
elif isinstance(callee, type) or isinstance(callee, FunctionType):
name = callee.__name__
TypeError.__init__(self, '{}{!r}'.format(name, args))
|
...
from types import FunctionType, MethodType
class PreconditionError (TypeError):
def __init__(self, callee, *args):
if isinstance(callee, MethodType):
name = '{0.im_class.__name__}.{0.im_func.__name__}'.format(callee)
elif isinstance(callee, type) or isinstance(callee, FunctionType):
name = callee.__name__
TypeError.__init__(self, '{}{!r}'.format(name, args))
...
|
3c1245b31011d25f7c660592d456cf9109766195
|
libyaul/math/color.h
|
libyaul/math/color.h
|
typedef union {
struct {
unsigned int r:5;
unsigned int g:5;
unsigned int b:5;
unsigned int :1;
} __packed;
uint8_t comp[3];
uint16_t raw;
} __aligned (4) color_rgb_t;
typedef union {
struct {
fix16_t h;
fix16_t s;
fix16_t v;
};
fix16_t comp[3];
} __aligned (4) color_fix16_hsv_t;
typedef union {
struct {
uint8_t h;
uint8_t s;
uint8_t v;
};
uint8_t comp[3];
} __aligned (4) color_uint8_hsv_t;
extern void color_rgb_hsv_convert(const color_rgb_t *,
color_fix16_hsv_t *);
extern void color_hsv_lerp(const color_fix16_hsv_t *, const color_fix16_hsv_t *,
fix16_t, color_fix16_hsv_t *);
#endif /* !__libfixmath_color_h__ */
|
typedef union {
struct {
unsigned int :1;
unsigned int b:5;
unsigned int g:5;
unsigned int r:5;
} __packed;
uint8_t comp[3];
uint16_t raw;
} __aligned (4) color_rgb_t;
typedef union {
struct {
fix16_t v;
fix16_t s;
fix16_t h;
};
fix16_t comp[3];
} __aligned (4) color_fix16_hsv_t;
typedef union {
struct {
uint8_t v;
uint8_t s;
uint8_t h;
};
uint8_t comp[3];
} __aligned (4) color_uint8_hsv_t;
extern void color_rgb_hsv_convert(const color_rgb_t *,
color_fix16_hsv_t *);
extern void color_hsv_lerp(const color_fix16_hsv_t *, const color_fix16_hsv_t *,
fix16_t, color_fix16_hsv_t *);
#endif /* !__libfixmath_color_h__ */
|
Change R and B components
|
Change R and B components
|
C
|
mit
|
ijacquez/libyaul,ijacquez/libyaul,ijacquez/libyaul,ijacquez/libyaul
|
c
|
## Code Before:
typedef union {
struct {
unsigned int r:5;
unsigned int g:5;
unsigned int b:5;
unsigned int :1;
} __packed;
uint8_t comp[3];
uint16_t raw;
} __aligned (4) color_rgb_t;
typedef union {
struct {
fix16_t h;
fix16_t s;
fix16_t v;
};
fix16_t comp[3];
} __aligned (4) color_fix16_hsv_t;
typedef union {
struct {
uint8_t h;
uint8_t s;
uint8_t v;
};
uint8_t comp[3];
} __aligned (4) color_uint8_hsv_t;
extern void color_rgb_hsv_convert(const color_rgb_t *,
color_fix16_hsv_t *);
extern void color_hsv_lerp(const color_fix16_hsv_t *, const color_fix16_hsv_t *,
fix16_t, color_fix16_hsv_t *);
#endif /* !__libfixmath_color_h__ */
## Instruction:
Change R and B components
## Code After:
typedef union {
struct {
unsigned int :1;
unsigned int b:5;
unsigned int g:5;
unsigned int r:5;
} __packed;
uint8_t comp[3];
uint16_t raw;
} __aligned (4) color_rgb_t;
typedef union {
struct {
fix16_t v;
fix16_t s;
fix16_t h;
};
fix16_t comp[3];
} __aligned (4) color_fix16_hsv_t;
typedef union {
struct {
uint8_t v;
uint8_t s;
uint8_t h;
};
uint8_t comp[3];
} __aligned (4) color_uint8_hsv_t;
extern void color_rgb_hsv_convert(const color_rgb_t *,
color_fix16_hsv_t *);
extern void color_hsv_lerp(const color_fix16_hsv_t *, const color_fix16_hsv_t *,
fix16_t, color_fix16_hsv_t *);
#endif /* !__libfixmath_color_h__ */
|
# ... existing code ...
typedef union {
struct {
unsigned int :1;
unsigned int b:5;
unsigned int g:5;
unsigned int r:5;
} __packed;
uint8_t comp[3];
uint16_t raw;
# ... modified code ...
typedef union {
struct {
fix16_t v;
fix16_t s;
fix16_t h;
};
fix16_t comp[3];
} __aligned (4) color_fix16_hsv_t;
...
typedef union {
struct {
uint8_t v;
uint8_t s;
uint8_t h;
};
uint8_t comp[3];
} __aligned (4) color_uint8_hsv_t;
# ... rest of the code ...
|
aae5c02f8642eec08e87c102b7d255fb74b86c94
|
pyluos/modules/led.py
|
pyluos/modules/led.py
|
from .module import Module, interact
class Led(Module):
def __init__(self, id, alias, robot):
Module.__init__(self, 'LED', id, alias, robot)
self.color = (0, 0, 0)
@property
def color(self):
return self._value
@color.setter
def color(self, new_color):
if new_color != self._value:
self._value = new_color
self._push_value('rgb', new_color)
def control(self):
def change_color(red, green, blue):
self.color = (red, green, blue)
return interact(change_color,
red=(0, 255, 1),
green=(0, 255, 1),
blue=(0, 255, 1))
|
from .module import Module, interact
class Led(Module):
def __init__(self, id, alias, robot):
Module.__init__(self, 'LED', id, alias, robot)
self.color = (0, 0, 0)
@property
def color(self):
return self._value
@color.setter
def color(self, new_color):
new_color = [int(min(max(c, 0), 255)) for c in new_color]
if new_color != self._value:
self._value = new_color
self._push_value('rgb', new_color)
def control(self):
def change_color(red, green, blue):
self.color = (red, green, blue)
return interact(change_color,
red=(0, 255, 1),
green=(0, 255, 1),
blue=(0, 255, 1))
|
Make sur the rgb values are within [0, 255] range.
|
Make sur the rgb values are within [0, 255] range.
|
Python
|
mit
|
pollen/pyrobus
|
python
|
## Code Before:
from .module import Module, interact
class Led(Module):
def __init__(self, id, alias, robot):
Module.__init__(self, 'LED', id, alias, robot)
self.color = (0, 0, 0)
@property
def color(self):
return self._value
@color.setter
def color(self, new_color):
if new_color != self._value:
self._value = new_color
self._push_value('rgb', new_color)
def control(self):
def change_color(red, green, blue):
self.color = (red, green, blue)
return interact(change_color,
red=(0, 255, 1),
green=(0, 255, 1),
blue=(0, 255, 1))
## Instruction:
Make sur the rgb values are within [0, 255] range.
## Code After:
from .module import Module, interact
class Led(Module):
def __init__(self, id, alias, robot):
Module.__init__(self, 'LED', id, alias, robot)
self.color = (0, 0, 0)
@property
def color(self):
return self._value
@color.setter
def color(self, new_color):
new_color = [int(min(max(c, 0), 255)) for c in new_color]
if new_color != self._value:
self._value = new_color
self._push_value('rgb', new_color)
def control(self):
def change_color(red, green, blue):
self.color = (red, green, blue)
return interact(change_color,
red=(0, 255, 1),
green=(0, 255, 1),
blue=(0, 255, 1))
|
// ... existing code ...
@color.setter
def color(self, new_color):
new_color = [int(min(max(c, 0), 255)) for c in new_color]
if new_color != self._value:
self._value = new_color
self._push_value('rgb', new_color)
// ... rest of the code ...
|
f9bf31e7cfdcbe8d9195b0f2ca9e159788193c50
|
unjabberlib/cmdui.py
|
unjabberlib/cmdui.py
|
import cmd
from itertools import zip_longest
INDENT = 5 * ' '
class UnjabberCmd(cmd.Cmd):
def __init__(self, queries, **cmdargs):
super().__init__(**cmdargs)
self.queries = queries
def do_who(self, arg):
"""Show list of people. Add part of a name to narrow down."""
for name in self.queries.who(arg):
print(name)
def do_show(self, arg):
"""Show conversations with people matching name (or part of)."""
previous = None
for message in self.queries.messages_for_whom(arg):
print_message(previous, message)
previous = message
def do_grep(self, arg):
"""Show messages containing text."""
for message in self.queries.grep(arg):
print(message)
def do_quit(self, arg):
"""Exit the program."""
return True
def print_message(previous, message):
day, hour, shortname = message.after(previous)
if day:
if previous:
print()
print('==', day, '==')
if shortname:
if not day:
print()
print(hour, ' -- ', shortname)
sub_print_message(INDENT, message.what)
else:
sub_print_message(hour if hour else INDENT, message.what)
def sub_print_message(hour, what):
for h, line in zip_longest([hour], what.split('\n'), fillvalue=INDENT):
print(h, line)
|
import cmd
from functools import partial
from unjabberlib import formatters
trim_print = partial(print, sep='', end='')
class StdoutFormatter(formatters.Formatter):
def append(self, text, format=None):
if format is None or format == formatters.HOUR:
trim_print(text)
elif format == formatters.NAME:
trim_print(' -- ', text)
elif format == formatters.DAY:
trim_print('== ', text, ' ==')
class UnjabberCmd(cmd.Cmd):
def __init__(self, queries, **cmdargs):
super().__init__(**cmdargs)
self.formatter = StdoutFormatter()
self.queries = queries
def do_who(self, arg):
"""Show list of people. Add part of a name to narrow down."""
for name in self.queries.who(arg):
print(name)
def do_show(self, arg):
"""Show conversations with people matching name (or part of)."""
previous = None
for message in self.queries.messages_for_whom(arg):
day, hour, shortname = message.after(previous)
self.formatter.show(previous, day, hour, shortname, message.what)
previous = message
def do_grep(self, arg):
"""Show messages containing text."""
for message in self.queries.grep(arg):
print(message)
def do_quit(self, arg):
"""Exit the program."""
return True
|
Use new formatter in cmd
|
Use new formatter in cmd
|
Python
|
mit
|
adsr303/unjabber
|
python
|
## Code Before:
import cmd
from itertools import zip_longest
INDENT = 5 * ' '
class UnjabberCmd(cmd.Cmd):
def __init__(self, queries, **cmdargs):
super().__init__(**cmdargs)
self.queries = queries
def do_who(self, arg):
"""Show list of people. Add part of a name to narrow down."""
for name in self.queries.who(arg):
print(name)
def do_show(self, arg):
"""Show conversations with people matching name (or part of)."""
previous = None
for message in self.queries.messages_for_whom(arg):
print_message(previous, message)
previous = message
def do_grep(self, arg):
"""Show messages containing text."""
for message in self.queries.grep(arg):
print(message)
def do_quit(self, arg):
"""Exit the program."""
return True
def print_message(previous, message):
day, hour, shortname = message.after(previous)
if day:
if previous:
print()
print('==', day, '==')
if shortname:
if not day:
print()
print(hour, ' -- ', shortname)
sub_print_message(INDENT, message.what)
else:
sub_print_message(hour if hour else INDENT, message.what)
def sub_print_message(hour, what):
for h, line in zip_longest([hour], what.split('\n'), fillvalue=INDENT):
print(h, line)
## Instruction:
Use new formatter in cmd
## Code After:
import cmd
from functools import partial
from unjabberlib import formatters
trim_print = partial(print, sep='', end='')
class StdoutFormatter(formatters.Formatter):
def append(self, text, format=None):
if format is None or format == formatters.HOUR:
trim_print(text)
elif format == formatters.NAME:
trim_print(' -- ', text)
elif format == formatters.DAY:
trim_print('== ', text, ' ==')
class UnjabberCmd(cmd.Cmd):
def __init__(self, queries, **cmdargs):
super().__init__(**cmdargs)
self.formatter = StdoutFormatter()
self.queries = queries
def do_who(self, arg):
"""Show list of people. Add part of a name to narrow down."""
for name in self.queries.who(arg):
print(name)
def do_show(self, arg):
"""Show conversations with people matching name (or part of)."""
previous = None
for message in self.queries.messages_for_whom(arg):
day, hour, shortname = message.after(previous)
self.formatter.show(previous, day, hour, shortname, message.what)
previous = message
def do_grep(self, arg):
"""Show messages containing text."""
for message in self.queries.grep(arg):
print(message)
def do_quit(self, arg):
"""Exit the program."""
return True
|
...
import cmd
from functools import partial
from unjabberlib import formatters
trim_print = partial(print, sep='', end='')
class StdoutFormatter(formatters.Formatter):
def append(self, text, format=None):
if format is None or format == formatters.HOUR:
trim_print(text)
elif format == formatters.NAME:
trim_print(' -- ', text)
elif format == formatters.DAY:
trim_print('== ', text, ' ==')
class UnjabberCmd(cmd.Cmd):
def __init__(self, queries, **cmdargs):
super().__init__(**cmdargs)
self.formatter = StdoutFormatter()
self.queries = queries
def do_who(self, arg):
...
"""Show conversations with people matching name (or part of)."""
previous = None
for message in self.queries.messages_for_whom(arg):
day, hour, shortname = message.after(previous)
self.formatter.show(previous, day, hour, shortname, message.what)
previous = message
def do_grep(self, arg):
...
def do_quit(self, arg):
"""Exit the program."""
return True
...
|
ffab98b03588cef69ab11a10a440d02952661edf
|
cyder/cydns/soa/forms.py
|
cyder/cydns/soa/forms.py
|
from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
SOAAVForm = get_eav_form(SOAAV, SOA)
|
from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
def clean(self, *args, **kwargs):
contact = self.cleaned_data['contact']
self.cleaned_data['contact'] = contact.replace('@', '.')
return super(SOAForm, self).clean(*args, **kwargs)
SOAAVForm = get_eav_form(SOAAV, SOA)
|
Replace @ with . in soa form clean
|
Replace @ with . in soa form clean
|
Python
|
bsd-3-clause
|
OSU-Net/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder
|
python
|
## Code Before:
from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
SOAAVForm = get_eav_form(SOAAV, SOA)
## Instruction:
Replace @ with . in soa form clean
## Code After:
from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
def clean(self, *args, **kwargs):
contact = self.cleaned_data['contact']
self.cleaned_data['contact'] = contact.replace('@', '.')
return super(SOAForm, self).clean(*args, **kwargs)
SOAAVForm = get_eav_form(SOAAV, SOA)
|
# ... existing code ...
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
def clean(self, *args, **kwargs):
contact = self.cleaned_data['contact']
self.cleaned_data['contact'] = contact.replace('@', '.')
return super(SOAForm, self).clean(*args, **kwargs)
SOAAVForm = get_eav_form(SOAAV, SOA)
# ... rest of the code ...
|
c94e3cb0f82430811f7e8cc53d29433448395f70
|
favicon/urls.py
|
favicon/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView, RedirectView
import conf
urlpatterns = patterns('',
url(r'^favicon\.ico$', 'django.views.generic.simple.redirect_to', {'url': conf.FAVICON_PATH}, name='favicon'),
)
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView, RedirectView
import conf
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url=conf.FAVICON_PATH}), name='favicon'),
)
|
Use RedirectView in urlpatterns (needed for Django 1.5)
|
Use RedirectView in urlpatterns (needed for Django 1.5)
'django.views.generic.simple.redirect_to' is not supported in Django 1.5
Use RedirectView.as_view instead.
The existing code was already importing RedirectView but still using the old 'django.views.generic.simple.redirect_to' in the url patterns.
|
Python
|
bsd-3-clause
|
littlepea/django-favicon
|
python
|
## Code Before:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView, RedirectView
import conf
urlpatterns = patterns('',
url(r'^favicon\.ico$', 'django.views.generic.simple.redirect_to', {'url': conf.FAVICON_PATH}, name='favicon'),
)
## Instruction:
Use RedirectView in urlpatterns (needed for Django 1.5)
'django.views.generic.simple.redirect_to' is not supported in Django 1.5
Use RedirectView.as_view instead.
The existing code was already importing RedirectView but still using the old 'django.views.generic.simple.redirect_to' in the url patterns.
## Code After:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView, RedirectView
import conf
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url=conf.FAVICON_PATH}), name='favicon'),
)
|
# ... existing code ...
import conf
urlpatterns = patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url=conf.FAVICON_PATH}), name='favicon'),
)
# ... rest of the code ...
|
b75153ad49280ce793a995fca4a34d0688d63cb4
|
tests/unit/checkout/mixins_tests.py
|
tests/unit/checkout/mixins_tests.py
|
import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
|
import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
|
Add tests for CheckoutSessionMixin.check_basket_is_valid method.
|
Add tests for CheckoutSessionMixin.check_basket_is_valid method.
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar
|
python
|
## Code Before:
import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import OrderPlacementMixin
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
## Instruction:
Add tests for CheckoutSessionMixin.check_basket_is_valid method.
## Code After:
import mock
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
def test_returns_none_when_no_shipping_address_passed_to_creation_method(self):
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
|
...
from django.test import TestCase
from oscar.apps.checkout.mixins import CheckoutSessionMixin, OrderPlacementMixin
from oscar.apps.checkout.exceptions import FailedPreCondition
from oscar.test import factories
from oscar.test.utils import RequestFactory
class TestOrderPlacementMixin(TestCase):
...
address = OrderPlacementMixin().create_shipping_address(
user=mock.Mock(), shipping_address=None)
self.assertEqual(address, None)
class TestCheckoutSessionMixin(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.product = factories.create_product(num_in_stock=10)
self.stock_record = self.product.stockrecords.first()
def add_product_to_basket(self, product, quantity=1):
self.request.basket.add_product(product, quantity=quantity)
self.assertEquals(len(self.request.basket.all_lines()), 1)
self.assertEquals(self.request.basket.all_lines()[0].product, product)
def test_check_basket_is_valid_no_stock_available(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.stock_record.allocate(10)
self.stock_record.save()
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
def test_check_basket_is_valid_stock_exceeded(self):
self.add_product_to_basket(self.product)
CheckoutSessionMixin().check_basket_is_valid(self.request)
self.request.basket.add_product(self.product, quantity=11)
with self.assertRaises(FailedPreCondition):
CheckoutSessionMixin().check_basket_is_valid(self.request)
...
|
3106babe5d0dd9d994d0bb6357126428f5d05feb
|
setup.py
|
setup.py
|
from setuptools import setup
from setuptools.command.install import install
import os
import sys
VERSION = '0.1.3'
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = 'Verify that the git tag matches our version'
def run(self):
tag = os.getenv('CIRCLE_TAG')
if tag != VERSION:
info = f"Git tag: {tag} does not match phial version: {VERSION}"
sys.exit(info)
setup(
name='phial-slack',
version=VERSION,
url='https://github.com/sedders123/phial/',
license='MIT',
author='James Seden Smith',
author_email='[email protected]',
description='A Slack bot framework',
long_description=open('README.rst').read(),
packages=['phial'],
include_package_data=True,
zip_safe=False,
platforms='any',
python_requires='>=3.6',
keywords=['Slack', 'bot', 'Slackbot'],
install_requires=[
'slackclient==1.0.6',
'Werkzeug==0.12.2',
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules'
],
cmdclass={
'verify': VerifyVersionCommand,
}
)
|
from setuptools import setup
from setuptools.command.install import install
import os
import sys
VERSION = '0.1.3'
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = 'Verify that the git tag matches our version'
def run(self):
tag = os.getenv('CIRCLE_TAG')
if tag != VERSION:
info = "Git tag: {0} != phial version: {1}".format(tag,
VERSION)
sys.exit(info)
setup(
name='phial-slack',
version=VERSION,
url='https://github.com/sedders123/phial/',
license='MIT',
author='James Seden Smith',
author_email='[email protected]',
description='A Slack bot framework',
long_description=open('README.rst').read(),
packages=['phial'],
include_package_data=True,
zip_safe=False,
platforms='any',
python_requires='>=3.6',
keywords=['Slack', 'bot', 'Slackbot'],
install_requires=[
'slackclient==1.0.6',
'Werkzeug==0.12.2',
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules'
],
cmdclass={
'verify': VerifyVersionCommand,
}
)
|
Remove f-string to allow python 3.4 support
|
Remove f-string to allow python 3.4 support
|
Python
|
mit
|
sedders123/phial
|
python
|
## Code Before:
from setuptools import setup
from setuptools.command.install import install
import os
import sys
VERSION = '0.1.3'
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = 'Verify that the git tag matches our version'
def run(self):
tag = os.getenv('CIRCLE_TAG')
if tag != VERSION:
info = f"Git tag: {tag} does not match phial version: {VERSION}"
sys.exit(info)
setup(
name='phial-slack',
version=VERSION,
url='https://github.com/sedders123/phial/',
license='MIT',
author='James Seden Smith',
author_email='[email protected]',
description='A Slack bot framework',
long_description=open('README.rst').read(),
packages=['phial'],
include_package_data=True,
zip_safe=False,
platforms='any',
python_requires='>=3.6',
keywords=['Slack', 'bot', 'Slackbot'],
install_requires=[
'slackclient==1.0.6',
'Werkzeug==0.12.2',
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules'
],
cmdclass={
'verify': VerifyVersionCommand,
}
)
## Instruction:
Remove f-string to allow python 3.4 support
## Code After:
from setuptools import setup
from setuptools.command.install import install
import os
import sys
VERSION = '0.1.3'
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = 'Verify that the git tag matches our version'
def run(self):
tag = os.getenv('CIRCLE_TAG')
if tag != VERSION:
info = "Git tag: {0} != phial version: {1}".format(tag,
VERSION)
sys.exit(info)
setup(
name='phial-slack',
version=VERSION,
url='https://github.com/sedders123/phial/',
license='MIT',
author='James Seden Smith',
author_email='[email protected]',
description='A Slack bot framework',
long_description=open('README.rst').read(),
packages=['phial'],
include_package_data=True,
zip_safe=False,
platforms='any',
python_requires='>=3.6',
keywords=['Slack', 'bot', 'Slackbot'],
install_requires=[
'slackclient==1.0.6',
'Werkzeug==0.12.2',
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules'
],
cmdclass={
'verify': VerifyVersionCommand,
}
)
|
...
tag = os.getenv('CIRCLE_TAG')
if tag != VERSION:
info = "Git tag: {0} != phial version: {1}".format(tag,
VERSION)
sys.exit(info)
...
|
1f914a04adb4ad7d39ca7104e2ea36acc76b18bd
|
pvextractor/tests/test_gui.py
|
pvextractor/tests/test_gui.py
|
import numpy as np
from numpy.testing import assert_allclose
import pytest
from astropy.io import fits
from ..pvextractor import extract_pv_slice
from ..geometry.path import Path
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
import matplotlib as mpl
if mpl.__version__[0] == '2':
MPLOK = False
else:
MPLOK = True
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100,200,220,330,340]
y = [100,200,300,420,430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i],y[i])
pv.fig.canvas.button_press_event(x[i],y[i],1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310,420)
pv.fig.canvas.button_press_event(410,420,1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5,2)
|
import pytest
from distutils.version import LooseVersion
import matplotlib as mpl
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
if LooseVersion(mpl.__version__) < LooseVersion('2'):
MPLOK = True
else:
MPLOK = False
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100, 200, 220, 330, 340]
y = [100, 200, 300, 420, 430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i], y[i])
pv.fig.canvas.button_press_event(x[i], y[i], 1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310, 420)
pv.fig.canvas.button_press_event(410, 420, 1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5, 2)
|
Use LooseVersion to compare version numbers
|
Use LooseVersion to compare version numbers
|
Python
|
bsd-3-clause
|
radio-astro-tools/pvextractor,keflavich/pvextractor
|
python
|
## Code Before:
import numpy as np
from numpy.testing import assert_allclose
import pytest
from astropy.io import fits
from ..pvextractor import extract_pv_slice
from ..geometry.path import Path
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
import matplotlib as mpl
if mpl.__version__[0] == '2':
MPLOK = False
else:
MPLOK = True
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100,200,220,330,340]
y = [100,200,300,420,430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i],y[i])
pv.fig.canvas.button_press_event(x[i],y[i],1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310,420)
pv.fig.canvas.button_press_event(410,420,1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5,2)
## Instruction:
Use LooseVersion to compare version numbers
## Code After:
import pytest
from distutils.version import LooseVersion
import matplotlib as mpl
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
if LooseVersion(mpl.__version__) < LooseVersion('2'):
MPLOK = True
else:
MPLOK = False
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100, 200, 220, 330, 340]
y = [100, 200, 300, 420, 430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i], y[i])
pv.fig.canvas.button_press_event(x[i], y[i], 1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310, 420)
pv.fig.canvas.button_press_event(410, 420, 1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5, 2)
|
// ... existing code ...
import pytest
from distutils.version import LooseVersion
import matplotlib as mpl
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
// ... modified code ...
except ImportError:
PYQT5OK = False
if LooseVersion(mpl.__version__) < LooseVersion('2'):
MPLOK = True
else:
MPLOK = False
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
...
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100, 200, 220, 330, 340]
y = [100, 200, 300, 420, 430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i], y[i])
pv.fig.canvas.button_press_event(x[i], y[i], 1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310, 420)
pv.fig.canvas.button_press_event(410, 420, 1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5, 2)
// ... rest of the code ...
|
0866695a2f60538d59277f45a69771664d6dee27
|
setup.py
|
setup.py
|
import sys
import platform
from setuptools import setup, Extension
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
libc_ok = is_glibc and platform.libc_ver()[1] >= '2.9'
windows = sys.platform.startswith('win')
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
if cpython and ((min_unix_version and libc_ok) or min_win_version):
_cbor2 = Extension(
'_cbor2',
# math.h routines are built-in to MSVCRT
libraries=['m'] if not windows else [],
extra_compile_args=['-std=c99'],
sources=[
'source/module.c',
'source/encoder.c',
'source/decoder.c',
'source/tags.c',
'source/halffloat.c',
]
)
kwargs = {'ext_modules': [_cbor2]}
else:
kwargs = {}
setup(
use_scm_version={
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag'
},
setup_requires=[
'setuptools >= 36.2.7',
'setuptools_scm >= 1.7.0'
],
**kwargs
)
|
import sys
import platform
from setuptools import setup, Extension
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
if is_glibc:
glibc_ver = platform.libc_ver()[1]
libc_numeric = tuple(int(x) for x in glibc_ver.split('.') if x.isdigit())
libc_ok = libc_numeric >= (2, 9)
else:
libc_ok = False
windows = sys.platform.startswith('win')
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
if cpython and ((min_unix_version and libc_ok) or min_win_version):
_cbor2 = Extension(
'_cbor2',
# math.h routines are built-in to MSVCRT
libraries=['m'] if not windows else [],
extra_compile_args=['-std=c99'],
sources=[
'source/module.c',
'source/encoder.c',
'source/decoder.c',
'source/tags.c',
'source/halffloat.c',
]
)
kwargs = {'ext_modules': [_cbor2]}
else:
kwargs = {}
setup(
use_scm_version={
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag'
},
setup_requires=[
'setuptools >= 36.2.7',
'setuptools_scm >= 1.7.0'
],
**kwargs
)
|
Fix glibc version detect string
|
Fix glibc version detect string
|
Python
|
mit
|
agronholm/cbor2,agronholm/cbor2,agronholm/cbor2
|
python
|
## Code Before:
import sys
import platform
from setuptools import setup, Extension
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
libc_ok = is_glibc and platform.libc_ver()[1] >= '2.9'
windows = sys.platform.startswith('win')
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
if cpython and ((min_unix_version and libc_ok) or min_win_version):
_cbor2 = Extension(
'_cbor2',
# math.h routines are built-in to MSVCRT
libraries=['m'] if not windows else [],
extra_compile_args=['-std=c99'],
sources=[
'source/module.c',
'source/encoder.c',
'source/decoder.c',
'source/tags.c',
'source/halffloat.c',
]
)
kwargs = {'ext_modules': [_cbor2]}
else:
kwargs = {}
setup(
use_scm_version={
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag'
},
setup_requires=[
'setuptools >= 36.2.7',
'setuptools_scm >= 1.7.0'
],
**kwargs
)
## Instruction:
Fix glibc version detect string
## Code After:
import sys
import platform
from setuptools import setup, Extension
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
if is_glibc:
glibc_ver = platform.libc_ver()[1]
libc_numeric = tuple(int(x) for x in glibc_ver.split('.') if x.isdigit())
libc_ok = libc_numeric >= (2, 9)
else:
libc_ok = False
windows = sys.platform.startswith('win')
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
if cpython and ((min_unix_version and libc_ok) or min_win_version):
_cbor2 = Extension(
'_cbor2',
# math.h routines are built-in to MSVCRT
libraries=['m'] if not windows else [],
extra_compile_args=['-std=c99'],
sources=[
'source/module.c',
'source/encoder.c',
'source/decoder.c',
'source/tags.c',
'source/halffloat.c',
]
)
kwargs = {'ext_modules': [_cbor2]}
else:
kwargs = {}
setup(
use_scm_version={
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag'
},
setup_requires=[
'setuptools >= 36.2.7',
'setuptools_scm >= 1.7.0'
],
**kwargs
)
|
// ... existing code ...
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
if is_glibc:
glibc_ver = platform.libc_ver()[1]
libc_numeric = tuple(int(x) for x in glibc_ver.split('.') if x.isdigit())
libc_ok = libc_numeric >= (2, 9)
else:
libc_ok = False
windows = sys.platform.startswith('win')
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
// ... rest of the code ...
|
3f1aeba98cd4bc2f326f9c18c34e66c396be99cf
|
scikits/statsmodels/tools/tests/test_data.py
|
scikits/statsmodels/tools/tests/test_data.py
|
import pandas
import numpy as np
from scikits.statsmodels.tools import data
def test_missing_data_pandas():
"""
Fixes GH: #144
"""
X = np.random.random((10,5))
X[1,2] = np.nan
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(rnames, [0,2,3,4,5,6,7,8,9])
|
import pandas
import numpy as np
from scikits.statsmodels.tools import data
def test_missing_data_pandas():
"""
Fixes GH: #144
"""
X = np.random.random((10,5))
X[1,2] = np.nan
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(rnames, [0,2,3,4,5,6,7,8,9])
def test_structarray():
X = np.random.random((10,)).astype([('var1', 'f8'),
('var2', 'f8'),
('var3', 'f8')])
vals, cnames, rnames = data.interpret_data(X)
np.testing.assert_equal(cnames, X.dtype.names)
np.testing.assert_equal(vals, X.view((float,3)))
np.testing.assert_equal(rnames, None)
def test_recarray():
X = np.random.random((10,)).astype([('var1', 'f8'),
('var2', 'f8'),
('var3', 'f8')])
vals, cnames, rnames = data.interpret_data(X.view(np.recarray))
np.testing.assert_equal(cnames, X.dtype.names)
np.testing.assert_equal(vals, X.view((float,3)))
np.testing.assert_equal(rnames, None)
def test_dataframe():
X = np.random.random((10,5))
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(vals, df.values)
np.testing.assert_equal(rnames, df.index)
np.testing.assert_equal(cnames, df.columns)
|
Add some tests for unused function
|
TST: Add some tests for unused function
|
Python
|
bsd-3-clause
|
Averroes/statsmodels,saketkc/statsmodels,wwf5067/statsmodels,phobson/statsmodels,musically-ut/statsmodels,hlin117/statsmodels,cbmoore/statsmodels,ChadFulton/statsmodels,pprett/statsmodels,statsmodels/statsmodels,gef756/statsmodels,rgommers/statsmodels,alekz112/statsmodels,jstoxrocky/statsmodels,kiyoto/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,jstoxrocky/statsmodels,gef756/statsmodels,phobson/statsmodels,edhuckle/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,Averroes/statsmodels,yl565/statsmodels,bert9bert/statsmodels,huongttlan/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,alekz112/statsmodels,wzbozon/statsmodels,wzbozon/statsmodels,wkfwkf/statsmodels,wwf5067/statsmodels,Averroes/statsmodels,wwf5067/statsmodels,yl565/statsmodels,rgommers/statsmodels,bavardage/statsmodels,hlin117/statsmodels,cbmoore/statsmodels,adammenges/statsmodels,ChadFulton/statsmodels,adammenges/statsmodels,statsmodels/statsmodels,phobson/statsmodels,yarikoptic/pystatsmodels,bsipocz/statsmodels,edhuckle/statsmodels,astocko/statsmodels,yarikoptic/pystatsmodels,DonBeo/statsmodels,alekz112/statsmodels,YihaoLu/statsmodels,nvoron23/statsmodels,cbmoore/statsmodels,bsipocz/statsmodels,DonBeo/statsmodels,waynenilsen/statsmodels,kiyoto/statsmodels,nvoron23/statsmodels,waynenilsen/statsmodels,jseabold/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,astocko/statsmodels,bzero/statsmodels,jseabold/statsmodels,bavardage/statsmodels,pprett/statsmodels,bzero/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,wdurhamh/statsmodels,kiyoto/statsmodels,rgommers/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,wzbozon/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,pprett/statsmodels,hainm/statsmodels,bashtage/statsmodels,wdurhamh/statsmodels,saketkc/statsmodels,bert9bert/statsmodels,wdurhamh/statsmodels,statsmodels/statsmodels,gef756/statsmodels,hainm/statsmodels,rgommers/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,cbmoore/statsmodels,wdurhamh/statsmodels,musically-ut/statsmodels,phobson/statsmodels,kiyoto/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,YihaoLu/statsmodels,huongttlan/statsmodels,bert9bert/statsmodels,josef-pkt/statsmodels,detrout/debian-statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,hainm/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,bavardage/statsmodels,DonBeo/statsmodels,bzero/statsmodels,jseabold/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,gef756/statsmodels,hlin117/statsmodels,waynenilsen/statsmodels,jseabold/statsmodels,astocko/statsmodels,bavardage/statsmodels,jseabold/statsmodels,wkfwkf/statsmodels,pprett/statsmodels,yarikoptic/pystatsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,ChadFulton/statsmodels,saketkc/statsmodels,nguyentu1602/statsmodels,musically-ut/statsmodels,cbmoore/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,gef756/statsmodels,nguyentu1602/statsmodels,astocko/statsmodels,bzero/statsmodels,bzero/statsmodels,nvoron23/statsmodels,nvoron23/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,jstoxrocky/statsmodels,phobson/statsmodels,adammenges/statsmodels,wwf5067/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,huongttlan/statsmodels,bert9bert/statsmodels,hainm/statsmodels,bavardage/statsmodels,YihaoLu/statsmodels,DonBeo/statsmodels,detrout/debian-statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,huongttlan/statsmodels,josef-pkt/statsmodels,wzbozon/statsmodels,hlin117/statsmodels,yl565/statsmodels,wzbozon/statsmodels,saketkc/statsmodels,nguyentu1602/statsmodels,edhuckle/statsmodels,yl565/statsmodels
|
python
|
## Code Before:
import pandas
import numpy as np
from scikits.statsmodels.tools import data
def test_missing_data_pandas():
"""
Fixes GH: #144
"""
X = np.random.random((10,5))
X[1,2] = np.nan
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(rnames, [0,2,3,4,5,6,7,8,9])
## Instruction:
TST: Add some tests for unused function
## Code After:
import pandas
import numpy as np
from scikits.statsmodels.tools import data
def test_missing_data_pandas():
"""
Fixes GH: #144
"""
X = np.random.random((10,5))
X[1,2] = np.nan
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(rnames, [0,2,3,4,5,6,7,8,9])
def test_structarray():
X = np.random.random((10,)).astype([('var1', 'f8'),
('var2', 'f8'),
('var3', 'f8')])
vals, cnames, rnames = data.interpret_data(X)
np.testing.assert_equal(cnames, X.dtype.names)
np.testing.assert_equal(vals, X.view((float,3)))
np.testing.assert_equal(rnames, None)
def test_recarray():
X = np.random.random((10,)).astype([('var1', 'f8'),
('var2', 'f8'),
('var3', 'f8')])
vals, cnames, rnames = data.interpret_data(X.view(np.recarray))
np.testing.assert_equal(cnames, X.dtype.names)
np.testing.assert_equal(vals, X.view((float,3)))
np.testing.assert_equal(rnames, None)
def test_dataframe():
X = np.random.random((10,5))
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(vals, df.values)
np.testing.assert_equal(rnames, df.index)
np.testing.assert_equal(cnames, df.columns)
|
// ... existing code ...
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(rnames, [0,2,3,4,5,6,7,8,9])
def test_structarray():
X = np.random.random((10,)).astype([('var1', 'f8'),
('var2', 'f8'),
('var3', 'f8')])
vals, cnames, rnames = data.interpret_data(X)
np.testing.assert_equal(cnames, X.dtype.names)
np.testing.assert_equal(vals, X.view((float,3)))
np.testing.assert_equal(rnames, None)
def test_recarray():
X = np.random.random((10,)).astype([('var1', 'f8'),
('var2', 'f8'),
('var3', 'f8')])
vals, cnames, rnames = data.interpret_data(X.view(np.recarray))
np.testing.assert_equal(cnames, X.dtype.names)
np.testing.assert_equal(vals, X.view((float,3)))
np.testing.assert_equal(rnames, None)
def test_dataframe():
X = np.random.random((10,5))
df = pandas.DataFrame(X)
vals, cnames, rnames = data.interpret_data(df)
np.testing.assert_equal(vals, df.values)
np.testing.assert_equal(rnames, df.index)
np.testing.assert_equal(cnames, df.columns)
// ... rest of the code ...
|
b0154580bda2b9cfd09e1d817aef5e08cf360238
|
feature-reminder/src/main/java/fr/o80/featurereminder/receiver/OnStartupReceiver.kt
|
feature-reminder/src/main/java/fr/o80/featurereminder/receiver/OnStartupReceiver.kt
|
package fr.o80.featurereminder.receiver
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import fr.o80.featurereminder.ScheduleRemind
import fr.o80.featurereminder.dagger.DaggerReminderComponent
import timber.log.Timber
import javax.inject.Inject
class OnStartupReceiver : BroadcastReceiver() {
@Inject
lateinit var scheduler: ScheduleRemind
override fun onReceive(context: Context, intent: Intent?) {
Timber.i("Schedule the reminder at phone boot")
DaggerReminderComponent.builder()
.build()
.inject(this)
scheduler.scheduleReminder()
}
}
|
package fr.o80.featurereminder.receiver
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import fr.o80.featurereminder.ScheduleRemind
import fr.o80.featurereminder.dagger.DaggerReminderComponent
import fr.o80.sample.lib.core.LibApplication
import timber.log.Timber
import javax.inject.Inject
class OnStartupReceiver : BroadcastReceiver() {
@Inject
lateinit var scheduler: ScheduleRemind
override fun onReceive(context: Context, intent: Intent?) {
Timber.i("Schedule the reminder at phone boot")
DaggerReminderComponent.builder()
.libComponent((context as LibApplication).component)
.build()
.inject(this)
scheduler.scheduleReminder()
}
}
|
Fix the build of Dagger in the BOOT_COMPLETE receiver
|
Fix the build of Dagger in the BOOT_COMPLETE receiver
|
Kotlin
|
apache-2.0
|
olivierperez/crapp
|
kotlin
|
## Code Before:
package fr.o80.featurereminder.receiver
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import fr.o80.featurereminder.ScheduleRemind
import fr.o80.featurereminder.dagger.DaggerReminderComponent
import timber.log.Timber
import javax.inject.Inject
class OnStartupReceiver : BroadcastReceiver() {
@Inject
lateinit var scheduler: ScheduleRemind
override fun onReceive(context: Context, intent: Intent?) {
Timber.i("Schedule the reminder at phone boot")
DaggerReminderComponent.builder()
.build()
.inject(this)
scheduler.scheduleReminder()
}
}
## Instruction:
Fix the build of Dagger in the BOOT_COMPLETE receiver
## Code After:
package fr.o80.featurereminder.receiver
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import fr.o80.featurereminder.ScheduleRemind
import fr.o80.featurereminder.dagger.DaggerReminderComponent
import fr.o80.sample.lib.core.LibApplication
import timber.log.Timber
import javax.inject.Inject
class OnStartupReceiver : BroadcastReceiver() {
@Inject
lateinit var scheduler: ScheduleRemind
override fun onReceive(context: Context, intent: Intent?) {
Timber.i("Schedule the reminder at phone boot")
DaggerReminderComponent.builder()
.libComponent((context as LibApplication).component)
.build()
.inject(this)
scheduler.scheduleReminder()
}
}
|
# ... existing code ...
import android.content.Intent
import fr.o80.featurereminder.ScheduleRemind
import fr.o80.featurereminder.dagger.DaggerReminderComponent
import fr.o80.sample.lib.core.LibApplication
import timber.log.Timber
import javax.inject.Inject
# ... modified code ...
override fun onReceive(context: Context, intent: Intent?) {
Timber.i("Schedule the reminder at phone boot")
DaggerReminderComponent.builder()
.libComponent((context as LibApplication).component)
.build()
.inject(this)
scheduler.scheduleReminder()
# ... rest of the code ...
|
748959483d936f0b19ec69bd0832c33998573c25
|
src/main/java/in/twizmwaz/cardinal/filter/type/ObjectiveFilter.java
|
src/main/java/in/twizmwaz/cardinal/filter/type/ObjectiveFilter.java
|
package in.twizmwaz.cardinal.filter.type;
import in.twizmwaz.cardinal.filter.Filter;
import in.twizmwaz.cardinal.filter.FilterState;
import org.bukkit.Bukkit;
import java.util.logging.Level;
import static in.twizmwaz.cardinal.filter.FilterState.*;
public class ObjectiveFilter extends Filter {
@Override
public FilterState getState(final Object o) {
Bukkit.getLogger().log(Level.INFO, "Objective Filters are not yet supported in CardinalPGM.");
return ABSTAIN;
}
}
|
package in.twizmwaz.cardinal.filter.type;
import in.twizmwaz.cardinal.filter.Filter;
import in.twizmwaz.cardinal.filter.FilterState;
import in.twizmwaz.cardinal.module.GameObjective;
import org.bukkit.Bukkit;
import java.util.logging.Level;
import static in.twizmwaz.cardinal.filter.FilterState.*;
public class ObjectiveFilter extends Filter {
private final GameObjective objective;
public ObjectiveFilter(final GameObjective objective) {
this.objective = objective;
}
@Override
public FilterState getState(final Object o) {
if (objective.isComplete()) return ALLOW;
else return DENY;
}
}
|
Add support for Objective filters
|
Add support for Objective filters
|
Java
|
mit
|
Alan736/NotCardinalPGM,twizmwazin/CardinalPGM,Alan736/NotCardinalPGM,angelitorb99/CardinalPGM,dentmaged/Cardinal-Dev,TheMolkaPL/CardinalPGM,CaptainElliott/CardinalPGM,iPGz/CardinalPGM,dentmaged/Cardinal-Dev,dentmaged/Cardinal-Plus,dentmaged/CardinalPGM,dentmaged/CardinalPGM,TheMolkaPL/CardinalPGM,Pablete1234/CardinalPGM,Electroid/ExperimentalPGM,dentmaged/Cardinal-Plus,Aaron1011/CardinalPGM,Electroid/ExperimentalPGM,SungMatt/CardinalPGM
|
java
|
## Code Before:
package in.twizmwaz.cardinal.filter.type;
import in.twizmwaz.cardinal.filter.Filter;
import in.twizmwaz.cardinal.filter.FilterState;
import org.bukkit.Bukkit;
import java.util.logging.Level;
import static in.twizmwaz.cardinal.filter.FilterState.*;
public class ObjectiveFilter extends Filter {
@Override
public FilterState getState(final Object o) {
Bukkit.getLogger().log(Level.INFO, "Objective Filters are not yet supported in CardinalPGM.");
return ABSTAIN;
}
}
## Instruction:
Add support for Objective filters
## Code After:
package in.twizmwaz.cardinal.filter.type;
import in.twizmwaz.cardinal.filter.Filter;
import in.twizmwaz.cardinal.filter.FilterState;
import in.twizmwaz.cardinal.module.GameObjective;
import org.bukkit.Bukkit;
import java.util.logging.Level;
import static in.twizmwaz.cardinal.filter.FilterState.*;
public class ObjectiveFilter extends Filter {
private final GameObjective objective;
public ObjectiveFilter(final GameObjective objective) {
this.objective = objective;
}
@Override
public FilterState getState(final Object o) {
if (objective.isComplete()) return ALLOW;
else return DENY;
}
}
|
...
import in.twizmwaz.cardinal.filter.Filter;
import in.twizmwaz.cardinal.filter.FilterState;
import in.twizmwaz.cardinal.module.GameObjective;
import org.bukkit.Bukkit;
import java.util.logging.Level;
...
public class ObjectiveFilter extends Filter {
private final GameObjective objective;
public ObjectiveFilter(final GameObjective objective) {
this.objective = objective;
}
@Override
public FilterState getState(final Object o) {
if (objective.isComplete()) return ALLOW;
else return DENY;
}
}
...
|
0a6078f5d0537cea9f36894b736fa274c3fa3e47
|
molo/core/cookiecutter/scaffold/{{cookiecutter.directory}}/{{cookiecutter.app_name}}/forms.py
|
molo/core/cookiecutter/scaffold/{{cookiecutter.directory}}/{{cookiecutter.app_name}}/forms.py
|
from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
|
from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
|
Fix upload form to only accept .zip files
|
Fix upload form to only accept .zip files
|
Python
|
bsd-2-clause
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
python
|
## Code Before:
from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
## Instruction:
Fix upload form to only accept .zip files
## Code After:
from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
|
// ... existing code ...
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
// ... modified code ...
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
// ... rest of the code ...
|
39e00164541535db2de8c8143d8728e5624f98f9
|
configuration/development.py
|
configuration/development.py
|
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'app.db')
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
del os
|
import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
del pathlib
|
Move the db back to the correct location
|
Move the db back to the correct location
|
Python
|
agpl-3.0
|
interactomix/iis,interactomix/iis
|
python
|
## Code Before:
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'app.db')
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
del os
## Instruction:
Move the db back to the correct location
## Code After:
import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
del pathlib
|
# ... existing code ...
import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SECRET_KEY = 'INSECURE'
# ... modified code ...
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
del pathlib
# ... rest of the code ...
|
fa33394bb70481412493fcf40d53ebdb2e738058
|
test/CodeGen/ffp-contract-option.c
|
test/CodeGen/ffp-contract-option.c
|
// RUN: %clang_cc1 -O3 -ffp-contract=fast -triple=powerpc-apple-darwin10 -S -o - %s | FileCheck %s
// REQUIRES: powerpc-registered-target
float fma_test1(float a, float b, float c) {
// CHECK: fmadds
float x = a * b;
float y = x + c;
return y;
}
|
// RUN: %clang_cc1 -O3 -ffp-contract=fast -triple=aarch64-apple-darwin -S -o - %s | FileCheck %s
// REQUIRES: aarch64-registered-target
float fma_test1(float a, float b, float c) {
// CHECK: fmadd
float x = a * b;
float y = x + c;
return y;
}
|
Change -ffp-contract=fast test to run on Aarch64
|
Change -ffp-contract=fast test to run on Aarch64
(I don't have powerpc enabled in my build and I am changing
how -ffp-contract=fast works.)
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@298468 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -O3 -ffp-contract=fast -triple=powerpc-apple-darwin10 -S -o - %s | FileCheck %s
// REQUIRES: powerpc-registered-target
float fma_test1(float a, float b, float c) {
// CHECK: fmadds
float x = a * b;
float y = x + c;
return y;
}
## Instruction:
Change -ffp-contract=fast test to run on Aarch64
(I don't have powerpc enabled in my build and I am changing
how -ffp-contract=fast works.)
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@298468 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -O3 -ffp-contract=fast -triple=aarch64-apple-darwin -S -o - %s | FileCheck %s
// REQUIRES: aarch64-registered-target
float fma_test1(float a, float b, float c) {
// CHECK: fmadd
float x = a * b;
float y = x + c;
return y;
}
|
# ... existing code ...
// RUN: %clang_cc1 -O3 -ffp-contract=fast -triple=aarch64-apple-darwin -S -o - %s | FileCheck %s
// REQUIRES: aarch64-registered-target
float fma_test1(float a, float b, float c) {
// CHECK: fmadd
float x = a * b;
float y = x + c;
return y;
# ... rest of the code ...
|
ddf2075228a8c250cf75ec85914801262cb73177
|
zerver/migrations/0032_verify_all_medium_avatar_images.py
|
zerver/migrations/0032_verify_all_medium_avatar_images.py
|
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import upload_backend
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from mock import patch
from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
from zerver.models import UserProfile
from typing import Text
import hashlib
# We hackishly patch this function in order to revert it to the state
# it had when this migration was first written. This is a balance
# between copying in a historical version of hundreds of lines of code
# from zerver.lib.upload (which would pretty annoying, but would be a
# pain) and just using the current version, which doesn't work
# since we rearranged the avatars in Zulip 1.6.
def patched_user_avatar_path(user_profile):
# type: (UserProfile) -> Text
email = user_profile.email
user_key = email.lower() + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
@patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
Make migration 0032 use an old version of user_avatar_path.
|
Make migration 0032 use an old version of user_avatar_path.
This fixes upgrading from very old Zulip servers (e.g. 1.4.3) all the
way to current.
Fixes: #6516.
|
Python
|
apache-2.0
|
hackerkid/zulip,kou/zulip,amanharitsh123/zulip,brockwhittaker/zulip,showell/zulip,hackerkid/zulip,rishig/zulip,verma-varsha/zulip,synicalsyntax/zulip,zulip/zulip,amanharitsh123/zulip,showell/zulip,punchagan/zulip,amanharitsh123/zulip,punchagan/zulip,timabbott/zulip,rht/zulip,tommyip/zulip,eeshangarg/zulip,Galexrt/zulip,eeshangarg/zulip,rishig/zulip,rishig/zulip,Galexrt/zulip,dhcrzf/zulip,rht/zulip,shubhamdhama/zulip,rht/zulip,tommyip/zulip,mahim97/zulip,kou/zulip,kou/zulip,timabbott/zulip,zulip/zulip,brainwane/zulip,verma-varsha/zulip,verma-varsha/zulip,punchagan/zulip,brainwane/zulip,zulip/zulip,eeshangarg/zulip,timabbott/zulip,synicalsyntax/zulip,brockwhittaker/zulip,brockwhittaker/zulip,brainwane/zulip,showell/zulip,brainwane/zulip,brainwane/zulip,rishig/zulip,tommyip/zulip,Galexrt/zulip,tommyip/zulip,synicalsyntax/zulip,zulip/zulip,jackrzhang/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,punchagan/zulip,rht/zulip,kou/zulip,andersk/zulip,kou/zulip,timabbott/zulip,jackrzhang/zulip,synicalsyntax/zulip,eeshangarg/zulip,brockwhittaker/zulip,tommyip/zulip,brainwane/zulip,dhcrzf/zulip,dhcrzf/zulip,rishig/zulip,showell/zulip,showell/zulip,andersk/zulip,hackerkid/zulip,brockwhittaker/zulip,andersk/zulip,showell/zulip,timabbott/zulip,dhcrzf/zulip,mahim97/zulip,jackrzhang/zulip,andersk/zulip,hackerkid/zulip,jackrzhang/zulip,jackrzhang/zulip,amanharitsh123/zulip,shubhamdhama/zulip,amanharitsh123/zulip,mahim97/zulip,kou/zulip,tommyip/zulip,dhcrzf/zulip,shubhamdhama/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,shubhamdhama/zulip,jackrzhang/zulip,shubhamdhama/zulip,zulip/zulip,punchagan/zulip,rishig/zulip,shubhamdhama/zulip,hackerkid/zulip,shubhamdhama/zulip,dhcrzf/zulip,timabbott/zulip,rishig/zulip,punchagan/zulip,zulip/zulip,verma-varsha/zulip,showell/zulip,Galexrt/zulip,mahim97/zulip,Galexrt/zulip,verma-varsha/zulip,punchagan/zulip,hackerkid/zulip,jackrzhang/zulip,Galexrt/zulip,brainwane/zulip,hackerkid/zulip,synicalsyntax/zulip,eeshangarg/zulip,zulip/zulip,dhcrzf/zulip,andersk/zulip,brockwhittaker/zulip,mahim97/zulip,amanharitsh123/zulip,timabbott/zulip,synicalsyntax/zulip,rht/zulip,tommyip/zulip,synicalsyntax/zulip,verma-varsha/zulip,mahim97/zulip,Galexrt/zulip,andersk/zulip
|
python
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import upload_backend
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
## Instruction:
Make migration 0032 use an old version of user_avatar_path.
This fixes upgrading from very old Zulip servers (e.g. 1.4.3) all the
way to current.
Fixes: #6516.
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from mock import patch
from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
from zerver.models import UserProfile
from typing import Text
import hashlib
# We hackishly patch this function in order to revert it to the state
# it had when this migration was first written. This is a balance
# between copying in a historical version of hundreds of lines of code
# from zerver.lib.upload (which would pretty annoying, but would be a
# pain) and just using the current version, which doesn't work
# since we rearranged the avatars in Zulip 1.6.
def patched_user_avatar_path(user_profile):
# type: (UserProfile) -> Text
email = user_profile.email
user_key = email.lower() + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
@patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
// ... existing code ...
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from mock import patch
from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
from zerver.models import UserProfile
from typing import Text
import hashlib
# We hackishly patch this function in order to revert it to the state
# it had when this migration was first written. This is a balance
# between copying in a historical version of hundreds of lines of code
# from zerver.lib.upload (which would pretty annoying, but would be a
# pain) and just using the current version, which doesn't work
# since we rearranged the avatars in Zulip 1.6.
def patched_user_avatar_path(user_profile):
# type: (UserProfile) -> Text
email = user_profile.email
user_key = email.lower() + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
@patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
// ... rest of the code ...
|
f2e0fdb2a0ab17ce80e5eed3e553235e186b02cd
|
jasypt-maven-plugin/src/main/java/com/ulisesbocchio/jasyptmavenplugin/mojo/DecryptValueMojo.java
|
jasypt-maven-plugin/src/main/java/com/ulisesbocchio/jasyptmavenplugin/mojo/DecryptValueMojo.java
|
package com.ulisesbocchio.jasyptmavenplugin.mojo;
import com.ulisesbocchio.jasyptmavenplugin.encrypt.EncryptionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts demarcated values in properties files.
*
* @author ubocchio
*/
@Mojo(name = "decrypt-value", defaultPhase = LifecyclePhase.PROCESS_RESOURCES)
@Slf4j
public class DecryptValueMojo extends AbstractValueJasyptMojo {
@Override
protected void run(final EncryptionService service, final String value, String encryptPrefix, String encryptSuffix, String decryptPrefix, String decryptSuffix) throws
MojoExecutionException {
try {
String actualValue = value.startsWith(encryptPrefix) ? value.substring(encryptPrefix.length(), value.length() - encryptSuffix.length()) : value;
log.info("Decrypting value " + actualValue);
String decryptedValue = service.decryptValue(actualValue);
log.info("\n" + decryptedValue);
} catch (Exception e) {
throw new MojoExecutionException("Error Decrypting: " + e.getMessage(), e);
}
}
}
|
package com.ulisesbocchio.jasyptmavenplugin.mojo;
import com.ulisesbocchio.jasyptmavenplugin.encrypt.EncryptionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts values.
*
* @author ubocchio
*/
@Mojo(name = "decrypt-value", defaultPhase = LifecyclePhase.PROCESS_RESOURCES)
@Slf4j
public class DecryptValueMojo extends AbstractValueJasyptMojo {
@Override
protected void run(final EncryptionService service, final String value, String encryptPrefix, String encryptSuffix, String decryptPrefix, String decryptSuffix) throws
MojoExecutionException {
try {
String actualValue = value.startsWith(encryptPrefix) ? value.substring(encryptPrefix.length(), value.length() - encryptSuffix.length()) : value;
log.info("Decrypting value " + actualValue);
String decryptedValue = service.decryptValue(actualValue);
log.info("\n" + decryptedValue);
} catch (Exception e) {
throw new MojoExecutionException("Error Decrypting: " + e.getMessage(), e);
}
}
}
|
Fix Copy&Paste Error und documentation
|
Fix Copy&Paste Error und documentation
|
Java
|
mit
|
ulisesbocchio/jasypt-spring-boot
|
java
|
## Code Before:
package com.ulisesbocchio.jasyptmavenplugin.mojo;
import com.ulisesbocchio.jasyptmavenplugin.encrypt.EncryptionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts demarcated values in properties files.
*
* @author ubocchio
*/
@Mojo(name = "decrypt-value", defaultPhase = LifecyclePhase.PROCESS_RESOURCES)
@Slf4j
public class DecryptValueMojo extends AbstractValueJasyptMojo {
@Override
protected void run(final EncryptionService service, final String value, String encryptPrefix, String encryptSuffix, String decryptPrefix, String decryptSuffix) throws
MojoExecutionException {
try {
String actualValue = value.startsWith(encryptPrefix) ? value.substring(encryptPrefix.length(), value.length() - encryptSuffix.length()) : value;
log.info("Decrypting value " + actualValue);
String decryptedValue = service.decryptValue(actualValue);
log.info("\n" + decryptedValue);
} catch (Exception e) {
throw new MojoExecutionException("Error Decrypting: " + e.getMessage(), e);
}
}
}
## Instruction:
Fix Copy&Paste Error und documentation
## Code After:
package com.ulisesbocchio.jasyptmavenplugin.mojo;
import com.ulisesbocchio.jasyptmavenplugin.encrypt.EncryptionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts values.
*
* @author ubocchio
*/
@Mojo(name = "decrypt-value", defaultPhase = LifecyclePhase.PROCESS_RESOURCES)
@Slf4j
public class DecryptValueMojo extends AbstractValueJasyptMojo {
@Override
protected void run(final EncryptionService service, final String value, String encryptPrefix, String encryptSuffix, String decryptPrefix, String decryptSuffix) throws
MojoExecutionException {
try {
String actualValue = value.startsWith(encryptPrefix) ? value.substring(encryptPrefix.length(), value.length() - encryptSuffix.length()) : value;
log.info("Decrypting value " + actualValue);
String decryptedValue = service.decryptValue(actualValue);
log.info("\n" + decryptedValue);
} catch (Exception e) {
throw new MojoExecutionException("Error Decrypting: " + e.getMessage(), e);
}
}
}
|
// ... existing code ...
import org.apache.maven.plugins.annotations.Mojo;
/**
* Goal which decrypts values.
*
* @author ubocchio
*/
// ... rest of the code ...
|
1c0969525f2500603fbb9f2360fbda3439831003
|
thingshub/CDZThingsHubErrorDomain.h
|
thingshub/CDZThingsHubErrorDomain.h
|
//
// CDZThingsHubErrorDomain.h
// thingshub
//
// Created by Chris Dzombak on 1/14/14.
// Copyright (c) 2014 Chris Dzombak. All rights reserved.
//
#import "CDZThingsHubApplication.h"
extern NSString * const kThingsHubErrorDomain;
typedef NS_ENUM(NSInteger, CDZErrorCode) {
CDZErrorCodeConfigurationValidationError = CDZThingsHubApplicationReturnCodeConfigError,
CDZErrorCodeTestError = -1,
};
|
//
// CDZThingsHubErrorDomain.h
// thingshub
//
// Created by Chris Dzombak on 1/14/14.
// Copyright (c) 2014 Chris Dzombak. All rights reserved.
//
#import "CDZThingsHubApplication.h"
extern NSString * const kThingsHubErrorDomain;
typedef NS_ENUM(NSInteger, CDZErrorCode) {
CDZErrorCodeTestError = 0,
CDZErrorCodeAuthError = CDZThingsHubApplicationReturnCodeAuthError,
CDZErrorCodeConfigurationValidationError = CDZThingsHubApplicationReturnCodeConfigError,
CDZErrorCodeSyncFailure = CDZThingsHubApplicationReturnCodeSyncFailed,
};
|
Make error domain constants mirror app return codes
|
Make error domain constants mirror app return codes
|
C
|
mit
|
cdzombak/thingshub,cdzombak/thingshub,cdzombak/thingshub
|
c
|
## Code Before:
//
// CDZThingsHubErrorDomain.h
// thingshub
//
// Created by Chris Dzombak on 1/14/14.
// Copyright (c) 2014 Chris Dzombak. All rights reserved.
//
#import "CDZThingsHubApplication.h"
extern NSString * const kThingsHubErrorDomain;
typedef NS_ENUM(NSInteger, CDZErrorCode) {
CDZErrorCodeConfigurationValidationError = CDZThingsHubApplicationReturnCodeConfigError,
CDZErrorCodeTestError = -1,
};
## Instruction:
Make error domain constants mirror app return codes
## Code After:
//
// CDZThingsHubErrorDomain.h
// thingshub
//
// Created by Chris Dzombak on 1/14/14.
// Copyright (c) 2014 Chris Dzombak. All rights reserved.
//
#import "CDZThingsHubApplication.h"
extern NSString * const kThingsHubErrorDomain;
typedef NS_ENUM(NSInteger, CDZErrorCode) {
CDZErrorCodeTestError = 0,
CDZErrorCodeAuthError = CDZThingsHubApplicationReturnCodeAuthError,
CDZErrorCodeConfigurationValidationError = CDZThingsHubApplicationReturnCodeConfigError,
CDZErrorCodeSyncFailure = CDZThingsHubApplicationReturnCodeSyncFailed,
};
|
// ... existing code ...
extern NSString * const kThingsHubErrorDomain;
typedef NS_ENUM(NSInteger, CDZErrorCode) {
CDZErrorCodeTestError = 0,
CDZErrorCodeAuthError = CDZThingsHubApplicationReturnCodeAuthError,
CDZErrorCodeConfigurationValidationError = CDZThingsHubApplicationReturnCodeConfigError,
CDZErrorCodeSyncFailure = CDZThingsHubApplicationReturnCodeSyncFailed,
};
// ... rest of the code ...
|
a1f90ee075cda4288a335063d42284118cce1d43
|
src/PairingListCellRenderer.java
|
src/PairingListCellRenderer.java
|
import java.awt.Component;
import java.awt.image.BufferedImage;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.ListCellRenderer;
@SuppressWarnings("unused")
public class PairingListCellRenderer implements ListCellRenderer<Pairing> {
@Override
public Component getListCellRendererComponent(JList<? extends Pairing> list, Pairing value, int index, boolean isSelected, boolean cellHasFocus) {
String latex = "?";
if (value != null) {
String type = value.getVariableExpression().getType().toString();
if (value.isPaired()) {
//make a copy of the expression so that any selections can be removed,
//otherwise the selected subexpression will show as highlighted in the pairings list.
Expression copy = value.getPairedExpression().duplicate();
copy.deselectRecursive();
latex = type + " " + value.getVariableExpression().toLatex() + " \\leftarrow " + copy.toLatex();
//latex = value.getVariableExpression().toLatex() + " \\leftarrow " + value.getPairedExpression().toLatex();
}
else {
latex = type + " " + value.getVariableExpression().toLatex() + " \\textrm{ is unpaired}";
}
if (isSelected) {
latex = "\\bgcolor{" + LookAndFeel.SELECTED_LATEX_COLOR + "}{" + latex + "}";
}
}
return new JLabel(new ImageIcon(LatexHandler.latexToImage(latex)));
}
}
|
import java.awt.Component;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.ListCellRenderer;
public class PairingListCellRenderer implements ListCellRenderer<Pairing> {
@Override
public Component getListCellRendererComponent(JList<? extends Pairing> list, Pairing value, int index, boolean isSelected, boolean cellHasFocus) {
String latex = "?";
if (value != null) {
String type = value.getVariableExpression().getType().toString();
if (value.isPaired()) {
//make a copy of the expression so that any selections can be removed,
//otherwise the selected subexpression will show as highlighted in the pairings list.
Expression copy = value.getPairedExpression().duplicate();
copy.deselectRecursive();
latex = String.format("\\textrm{%s } %s \\leftarrow %s",
type,
value.getVariableExpression().toLatex(),
copy.toLatex());
}
else {
latex = String.format("\\textrm{%s } %s \\textrm{ is unpaired}",
type,
value.getVariableExpression().toLatex());
}
if (isSelected)
latex = String.format("\\bgcolor{%s}{%s}",
LookAndFeel.SELECTED_LATEX_COLOR,
latex);
}
return new JLabel(new ImageIcon(LatexHandler.latexToImage(latex)));
}
}
|
Improve output for Pairing lists
|
Improve output for Pairing lists
|
Java
|
mit
|
poser3/Prove-It
|
java
|
## Code Before:
import java.awt.Component;
import java.awt.image.BufferedImage;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.ListCellRenderer;
@SuppressWarnings("unused")
public class PairingListCellRenderer implements ListCellRenderer<Pairing> {
@Override
public Component getListCellRendererComponent(JList<? extends Pairing> list, Pairing value, int index, boolean isSelected, boolean cellHasFocus) {
String latex = "?";
if (value != null) {
String type = value.getVariableExpression().getType().toString();
if (value.isPaired()) {
//make a copy of the expression so that any selections can be removed,
//otherwise the selected subexpression will show as highlighted in the pairings list.
Expression copy = value.getPairedExpression().duplicate();
copy.deselectRecursive();
latex = type + " " + value.getVariableExpression().toLatex() + " \\leftarrow " + copy.toLatex();
//latex = value.getVariableExpression().toLatex() + " \\leftarrow " + value.getPairedExpression().toLatex();
}
else {
latex = type + " " + value.getVariableExpression().toLatex() + " \\textrm{ is unpaired}";
}
if (isSelected) {
latex = "\\bgcolor{" + LookAndFeel.SELECTED_LATEX_COLOR + "}{" + latex + "}";
}
}
return new JLabel(new ImageIcon(LatexHandler.latexToImage(latex)));
}
}
## Instruction:
Improve output for Pairing lists
## Code After:
import java.awt.Component;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.ListCellRenderer;
public class PairingListCellRenderer implements ListCellRenderer<Pairing> {
@Override
public Component getListCellRendererComponent(JList<? extends Pairing> list, Pairing value, int index, boolean isSelected, boolean cellHasFocus) {
String latex = "?";
if (value != null) {
String type = value.getVariableExpression().getType().toString();
if (value.isPaired()) {
//make a copy of the expression so that any selections can be removed,
//otherwise the selected subexpression will show as highlighted in the pairings list.
Expression copy = value.getPairedExpression().duplicate();
copy.deselectRecursive();
latex = String.format("\\textrm{%s } %s \\leftarrow %s",
type,
value.getVariableExpression().toLatex(),
copy.toLatex());
}
else {
latex = String.format("\\textrm{%s } %s \\textrm{ is unpaired}",
type,
value.getVariableExpression().toLatex());
}
if (isSelected)
latex = String.format("\\bgcolor{%s}{%s}",
LookAndFeel.SELECTED_LATEX_COLOR,
latex);
}
return new JLabel(new ImageIcon(LatexHandler.latexToImage(latex)));
}
}
|
# ... existing code ...
import java.awt.Component;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.ListCellRenderer;
public class PairingListCellRenderer implements ListCellRenderer<Pairing> {
@Override
# ... modified code ...
//otherwise the selected subexpression will show as highlighted in the pairings list.
Expression copy = value.getPairedExpression().duplicate();
copy.deselectRecursive();
latex = String.format("\\textrm{%s } %s \\leftarrow %s",
type,
value.getVariableExpression().toLatex(),
copy.toLatex());
}
else {
latex = String.format("\\textrm{%s } %s \\textrm{ is unpaired}",
type,
value.getVariableExpression().toLatex());
}
if (isSelected)
latex = String.format("\\bgcolor{%s}{%s}",
LookAndFeel.SELECTED_LATEX_COLOR,
latex);
}
return new JLabel(new ImageIcon(LatexHandler.latexToImage(latex)));
# ... rest of the code ...
|
297ab1ed8f0df41dc1d0d52cdaec8f709e2f58fe
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
setup(
name='caminae',
version='1.0.dev0',
author='Makina Corpus',
author_email='[email protected]',
url='http://makina-corpus.com',
description="Caminae",
long_description=open(os.path.join(here, 'README.rst')).read(),
install_requires = [
'django == 1.4',
'South == 0.7.5',
'psycopg2 == 2.4.1',
'GDAL == 1.9.1',
'django-modeltranslation == 0.3.3',
'django-leaflet == 0.0.2',
'django-geojson',
],
tests_requires = [
'factory_boy == 1.1.5',
],
packages=find_packages(),
classifiers = ['Natural Language :: English',
'Environment :: Web Environment',
'Framework :: Django',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.5'],
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
test_requirements = [
'factory_boy == 1.1.5',
]
setup(
name='caminae',
version='1.0.dev0',
author='Makina Corpus',
author_email='[email protected]',
url='http://makina-corpus.com',
description="Caminae",
long_description=open(os.path.join(here, 'README.rst')).read(),
install_requires = [
'django == 1.4',
'South == 0.7.5',
'psycopg2 == 2.4.1',
'GDAL == 1.9.1',
'django-modeltranslation == 0.3.3',
'django-leaflet == 0.0.2',
'django-geojson',
] + test_requirements,
tests_requires = test_requirements,
packages=find_packages(),
classifiers = ['Natural Language :: English',
'Environment :: Web Environment',
'Framework :: Django',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.5'],
)
|
Add tests requirements to install requirements
|
Add tests requirements to install requirements
|
Python
|
bsd-2-clause
|
mabhub/Geotrek,camillemonchicourt/Geotrek,johan--/Geotrek,camillemonchicourt/Geotrek,mabhub/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,mabhub/Geotrek,johan--/Geotrek,mabhub/Geotrek
|
python
|
## Code Before:
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
setup(
name='caminae',
version='1.0.dev0',
author='Makina Corpus',
author_email='[email protected]',
url='http://makina-corpus.com',
description="Caminae",
long_description=open(os.path.join(here, 'README.rst')).read(),
install_requires = [
'django == 1.4',
'South == 0.7.5',
'psycopg2 == 2.4.1',
'GDAL == 1.9.1',
'django-modeltranslation == 0.3.3',
'django-leaflet == 0.0.2',
'django-geojson',
],
tests_requires = [
'factory_boy == 1.1.5',
],
packages=find_packages(),
classifiers = ['Natural Language :: English',
'Environment :: Web Environment',
'Framework :: Django',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.5'],
)
## Instruction:
Add tests requirements to install requirements
## Code After:
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
test_requirements = [
'factory_boy == 1.1.5',
]
setup(
name='caminae',
version='1.0.dev0',
author='Makina Corpus',
author_email='[email protected]',
url='http://makina-corpus.com',
description="Caminae",
long_description=open(os.path.join(here, 'README.rst')).read(),
install_requires = [
'django == 1.4',
'South == 0.7.5',
'psycopg2 == 2.4.1',
'GDAL == 1.9.1',
'django-modeltranslation == 0.3.3',
'django-leaflet == 0.0.2',
'django-geojson',
] + test_requirements,
tests_requires = test_requirements,
packages=find_packages(),
classifiers = ['Natural Language :: English',
'Environment :: Web Environment',
'Framework :: Django',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.5'],
)
|
// ... existing code ...
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
test_requirements = [
'factory_boy == 1.1.5',
]
setup(
name='caminae',
// ... modified code ...
'django-modeltranslation == 0.3.3',
'django-leaflet == 0.0.2',
'django-geojson',
] + test_requirements,
tests_requires = test_requirements,
packages=find_packages(),
classifiers = ['Natural Language :: English',
'Environment :: Web Environment',
// ... rest of the code ...
|
8a4819daa627f06e1a0eac87ab44176b7e2a0115
|
openerp/addons/openupgrade_records/lib/apriori.py
|
openerp/addons/openupgrade_records/lib/apriori.py
|
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
|
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
Correct renamed module names for bank-statement-import repository.
|
[FIX] Correct renamed module names for bank-statement-import repository.
|
Python
|
agpl-3.0
|
OpenUpgrade/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade
|
python
|
## Code Before:
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
## Instruction:
[FIX] Correct renamed module names for bank-statement-import repository.
## Code After:
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
# ... existing code ...
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
# ... rest of the code ...
|
cf3c5b7a44762f666ad515b7774348e782b42503
|
extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/graal/ScannerSubstitutions.java
|
extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/graal/ScannerSubstitutions.java
|
package io.quarkus.flyway.runtime.graal;
import java.nio.charset.Charset;
import java.util.Collection;
import org.flywaydb.core.api.Location;
import org.flywaydb.core.internal.scanner.LocationScannerCache;
import org.flywaydb.core.internal.scanner.ResourceNameCache;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
/**
* Needed to get rid of some Android related classes
*/
@TargetClass(className = "org.flywaydb.core.internal.scanner.Scanner")
public final class ScannerSubstitutions {
@Substitute
public ScannerSubstitutions(Class<?> implementedInterface, Collection<Location> locations, ClassLoader classLoader,
Charset encoding,
boolean stream,
ResourceNameCache resourceNameCache, LocationScannerCache locationScannerCache) {
throw new IllegalStateException("'org.flywaydb.core.internal.scanner.Scanner' is never used in Quarkus");
}
}
|
package io.quarkus.flyway.runtime.graal;
import java.nio.charset.Charset;
import java.util.Collection;
import org.flywaydb.core.api.Location;
import org.flywaydb.core.internal.scanner.LocationScannerCache;
import org.flywaydb.core.internal.scanner.ResourceNameCache;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
/**
* Needed to get rid of some Android related classes
*/
@TargetClass(className = "org.flywaydb.core.internal.scanner.Scanner")
public final class ScannerSubstitutions {
@Substitute
public ScannerSubstitutions(Class<?> implementedInterface, Collection<Location> locations, ClassLoader classLoader,
Charset encoding,
boolean detectEncoding,
boolean stream,
ResourceNameCache resourceNameCache, LocationScannerCache locationScannerCache,
boolean throwOnMissingLocations) {
throw new IllegalStateException("'org.flywaydb.core.internal.scanner.Scanner' is never used in Quarkus");
}
}
|
Fix Scanner constructor substitution for Flyway 7.9.0
|
Flyway: Fix Scanner constructor substitution for Flyway 7.9.0
|
Java
|
apache-2.0
|
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
|
java
|
## Code Before:
package io.quarkus.flyway.runtime.graal;
import java.nio.charset.Charset;
import java.util.Collection;
import org.flywaydb.core.api.Location;
import org.flywaydb.core.internal.scanner.LocationScannerCache;
import org.flywaydb.core.internal.scanner.ResourceNameCache;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
/**
* Needed to get rid of some Android related classes
*/
@TargetClass(className = "org.flywaydb.core.internal.scanner.Scanner")
public final class ScannerSubstitutions {
@Substitute
public ScannerSubstitutions(Class<?> implementedInterface, Collection<Location> locations, ClassLoader classLoader,
Charset encoding,
boolean stream,
ResourceNameCache resourceNameCache, LocationScannerCache locationScannerCache) {
throw new IllegalStateException("'org.flywaydb.core.internal.scanner.Scanner' is never used in Quarkus");
}
}
## Instruction:
Flyway: Fix Scanner constructor substitution for Flyway 7.9.0
## Code After:
package io.quarkus.flyway.runtime.graal;
import java.nio.charset.Charset;
import java.util.Collection;
import org.flywaydb.core.api.Location;
import org.flywaydb.core.internal.scanner.LocationScannerCache;
import org.flywaydb.core.internal.scanner.ResourceNameCache;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
/**
* Needed to get rid of some Android related classes
*/
@TargetClass(className = "org.flywaydb.core.internal.scanner.Scanner")
public final class ScannerSubstitutions {
@Substitute
public ScannerSubstitutions(Class<?> implementedInterface, Collection<Location> locations, ClassLoader classLoader,
Charset encoding,
boolean detectEncoding,
boolean stream,
ResourceNameCache resourceNameCache, LocationScannerCache locationScannerCache,
boolean throwOnMissingLocations) {
throw new IllegalStateException("'org.flywaydb.core.internal.scanner.Scanner' is never used in Quarkus");
}
}
|
// ... existing code ...
@Substitute
public ScannerSubstitutions(Class<?> implementedInterface, Collection<Location> locations, ClassLoader classLoader,
Charset encoding,
boolean detectEncoding,
boolean stream,
ResourceNameCache resourceNameCache, LocationScannerCache locationScannerCache,
boolean throwOnMissingLocations) {
throw new IllegalStateException("'org.flywaydb.core.internal.scanner.Scanner' is never used in Quarkus");
}
}
// ... rest of the code ...
|
5834127e59b1da93bd814575cd7cbba391e253c8
|
run_borealis.py
|
run_borealis.py
|
from borealis import BotBorealis
try:
print("Welcome to BOREALIS.")
print("Initializing BOREALIS and its subcomponents.")
bot = BotBorealis("config.yml")
print("Initialization completed. Readying subcomponents.")
bot.setup()
print("Subcomponents ready. All systems functional.")
print("Starting BOREALIS.")
bot.start_borealis()
except Exception as e:
print("Danger! Exception caught!")
print(e)
print("BOREALIS has been shut down.")
print("Check the log for further details.")
input("Press Enter to exit.")
|
from borealis import BotBorealis
import time
while True:
bot = None
try:
print("Welcome to BOREALIS.")
print("Initializing BOREALIS and its subcomponents.")
bot = BotBorealis("config.yml")
print("Initialization completed. Readying subcomponents.")
bot.setup()
print("Subcomponents ready. All systems functional.")
print("Starting BOREALIS.")
bot.start_borealis()
except Exception as e:
print("Danger! Exception caught!")
print(e)
print("Deleting bot!")
# Delete the bot, run it again.
del bot
# Sleep for a bit before restarting!
time.sleep(60)
print("Restarting loop.\n\n\n")
# Should never get here, but just in case.
print("We somehow exited the main loop. :ree:")
input("Press Enter to exit.")
|
Implement recovery Bot will now automatically restart after an exception is caught.
|
Implement recovery
Bot will now automatically restart after an exception is caught.
|
Python
|
agpl-3.0
|
Aurorastation/BOREALISbot2
|
python
|
## Code Before:
from borealis import BotBorealis
try:
print("Welcome to BOREALIS.")
print("Initializing BOREALIS and its subcomponents.")
bot = BotBorealis("config.yml")
print("Initialization completed. Readying subcomponents.")
bot.setup()
print("Subcomponents ready. All systems functional.")
print("Starting BOREALIS.")
bot.start_borealis()
except Exception as e:
print("Danger! Exception caught!")
print(e)
print("BOREALIS has been shut down.")
print("Check the log for further details.")
input("Press Enter to exit.")
## Instruction:
Implement recovery
Bot will now automatically restart after an exception is caught.
## Code After:
from borealis import BotBorealis
import time
while True:
bot = None
try:
print("Welcome to BOREALIS.")
print("Initializing BOREALIS and its subcomponents.")
bot = BotBorealis("config.yml")
print("Initialization completed. Readying subcomponents.")
bot.setup()
print("Subcomponents ready. All systems functional.")
print("Starting BOREALIS.")
bot.start_borealis()
except Exception as e:
print("Danger! Exception caught!")
print(e)
print("Deleting bot!")
# Delete the bot, run it again.
del bot
# Sleep for a bit before restarting!
time.sleep(60)
print("Restarting loop.\n\n\n")
# Should never get here, but just in case.
print("We somehow exited the main loop. :ree:")
input("Press Enter to exit.")
|
// ... existing code ...
from borealis import BotBorealis
import time
while True:
bot = None
try:
print("Welcome to BOREALIS.")
print("Initializing BOREALIS and its subcomponents.")
bot = BotBorealis("config.yml")
print("Initialization completed. Readying subcomponents.")
bot.setup()
print("Subcomponents ready. All systems functional.")
print("Starting BOREALIS.")
bot.start_borealis()
except Exception as e:
print("Danger! Exception caught!")
print(e)
print("Deleting bot!")
# Delete the bot, run it again.
del bot
# Sleep for a bit before restarting!
time.sleep(60)
print("Restarting loop.\n\n\n")
# Should never get here, but just in case.
print("We somehow exited the main loop. :ree:")
input("Press Enter to exit.")
// ... rest of the code ...
|
d36ac9a113608aadbda79c724f6aa6f6da5ec0bd
|
cellcounter/mixins.py
|
cellcounter/mixins.py
|
import simplejson as json
from django.http import HttpResponse
class JSONResponseMixin(object):
"""
A Mixin that renders context as a JSON response
"""
def render_to_response(self, context):
"""
Returns a JSON response containing 'context' as payload
"""
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"""
Construct an `HttpResponse` object.
"""
response = HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
return response
def convert_context_to_json(self, context):
"""
Convert the context dictionary into a JSON object
"""
return json.dumps(context, indent=4)
|
import json
from django.http import HttpResponse
class JSONResponseMixin(object):
"""
A Mixin that renders context as a JSON response
"""
def render_to_response(self, context):
"""
Returns a JSON response containing 'context' as payload
"""
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"""
Construct an `HttpResponse` object.
"""
response = HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
return response
def convert_context_to_json(self, context):
"""
Convert the context dictionary into a JSON object
"""
return json.dumps(context, indent=4)
|
Use json rather than simplejson
|
Use json rather than simplejson
|
Python
|
mit
|
haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter,haematologic/cellcounter
|
python
|
## Code Before:
import simplejson as json
from django.http import HttpResponse
class JSONResponseMixin(object):
"""
A Mixin that renders context as a JSON response
"""
def render_to_response(self, context):
"""
Returns a JSON response containing 'context' as payload
"""
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"""
Construct an `HttpResponse` object.
"""
response = HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
return response
def convert_context_to_json(self, context):
"""
Convert the context dictionary into a JSON object
"""
return json.dumps(context, indent=4)
## Instruction:
Use json rather than simplejson
## Code After:
import json
from django.http import HttpResponse
class JSONResponseMixin(object):
"""
A Mixin that renders context as a JSON response
"""
def render_to_response(self, context):
"""
Returns a JSON response containing 'context' as payload
"""
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"""
Construct an `HttpResponse` object.
"""
response = HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
return response
def convert_context_to_json(self, context):
"""
Convert the context dictionary into a JSON object
"""
return json.dumps(context, indent=4)
|
# ... existing code ...
import json
from django.http import HttpResponse
# ... rest of the code ...
|
34a2b3a93bd96643d74fcb3c8d2f8db52d18253f
|
desubot.py
|
desubot.py
|
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
thread = threading.Thread(target=worker)
thread.start()
while True:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
if __name__ == '__main__':
main()
else:
bot = IRCBot('desutest', 'irc.rizon.net', command_prefix='!')
|
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
import traceback
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
desubot.bot.join('#animu')
desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
try:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
except:
traceback.print_exc()
if __name__ == '__main__':
main()
else:
bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
|
Make exception on reload not crash input
|
Make exception on reload not crash input
|
Python
|
mit
|
Motoko11/MotoBot
|
python
|
## Code Before:
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
thread = threading.Thread(target=worker)
thread.start()
while True:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
if __name__ == '__main__':
main()
else:
bot = IRCBot('desutest', 'irc.rizon.net', command_prefix='!')
## Instruction:
Make exception on reload not crash input
## Code After:
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
import traceback
def worker():
desubot.bot.run()
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
desubot.bot.join('#animu')
desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
try:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
except:
traceback.print_exc()
if __name__ == '__main__':
main()
else:
bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
|
# ... existing code ...
from motobot.irc_bot import IRCBot, IRCLevel
import desubot
import threading
import traceback
def worker():
desubot.bot.run()
# ... modified code ...
def main():
desubot.bot.load_plugins('plugins')
desubot.bot.join('#Moto-chan')
desubot.bot.join('#animu')
desubot.bot.join('#anime-planet.com')
thread = threading.Thread(target=worker)
thread.start()
while True:
try:
msg = input()
if msg.startswith(':'):
desubot.bot.load_plugins('plugins')
else:
desubot.bot.send(msg)
except:
traceback.print_exc()
if __name__ == '__main__':
main()
else:
bot = IRCBot('desubot', 'irc.rizon.net', command_prefix='!')
# ... rest of the code ...
|
56ecad6907dea785ebffba414dfe3ff586e5f2e0
|
src/shutdown/hpr_wall.c
|
src/shutdown/hpr_wall.c
|
/* ISC license. */
#include <string.h>
#include <utmpx.h>
#include <skalibs/posixishard.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include "hpr.h"
#ifndef UT_LINESIZE
#define UT_LINESIZE 32
#endif
void hpr_wall (char const *s)
{
size_t n = strlen(s) ;
char tty[10 + UT_LINESIZE] = "/dev/" ;
char msg[n+1] ;
memcpy(msg, s, n) ;
msg[n++] = '\n' ;
setutxent() ;
for (;;)
{
size_t linelen ;
int fd ;
struct utmpx *utx = getutxent() ;
if (!utx) break ;
if (utx->ut_type != USER_PROCESS) continue ;
linelen = strnlen(utx->ut_line, UT_LINESIZE) ;
memcpy(tty + 5, utx->ut_line, linelen) ;
tty[5 + linelen] = 0 ;
fd = open_append(tty) ;
if (fd == -1) continue ;
allwrite(fd, msg, n) ;
fd_close(fd) ;
}
endutxent() ;
}
|
/* ISC license. */
#include <string.h>
#include <utmpx.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include <skalibs/posixishard.h>
#include "hpr.h"
#ifndef UT_LINESIZE
#define UT_LINESIZE 32
#endif
void hpr_wall (char const *s)
{
size_t n = strlen(s) ;
char tty[10 + UT_LINESIZE] = "/dev/" ;
char msg[n+1] ;
memcpy(msg, s, n) ;
msg[n++] = '\n' ;
setutxent() ;
for (;;)
{
size_t linelen ;
int fd ;
struct utmpx *utx = getutxent() ;
if (!utx) break ;
if (utx->ut_type != USER_PROCESS) continue ;
linelen = strnlen(utx->ut_line, UT_LINESIZE) ;
memcpy(tty + 5, utx->ut_line, linelen) ;
tty[5 + linelen] = 0 ;
fd = open_append(tty) ;
if (fd == -1) continue ;
allwrite(fd, msg, n) ;
fd_close(fd) ;
}
endutxent() ;
}
|
Include posixishard as late as possible
|
Include posixishard as late as possible
|
C
|
isc
|
skarnet/s6-linux-init,skarnet/s6-linux-init
|
c
|
## Code Before:
/* ISC license. */
#include <string.h>
#include <utmpx.h>
#include <skalibs/posixishard.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include "hpr.h"
#ifndef UT_LINESIZE
#define UT_LINESIZE 32
#endif
void hpr_wall (char const *s)
{
size_t n = strlen(s) ;
char tty[10 + UT_LINESIZE] = "/dev/" ;
char msg[n+1] ;
memcpy(msg, s, n) ;
msg[n++] = '\n' ;
setutxent() ;
for (;;)
{
size_t linelen ;
int fd ;
struct utmpx *utx = getutxent() ;
if (!utx) break ;
if (utx->ut_type != USER_PROCESS) continue ;
linelen = strnlen(utx->ut_line, UT_LINESIZE) ;
memcpy(tty + 5, utx->ut_line, linelen) ;
tty[5 + linelen] = 0 ;
fd = open_append(tty) ;
if (fd == -1) continue ;
allwrite(fd, msg, n) ;
fd_close(fd) ;
}
endutxent() ;
}
## Instruction:
Include posixishard as late as possible
## Code After:
/* ISC license. */
#include <string.h>
#include <utmpx.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include <skalibs/posixishard.h>
#include "hpr.h"
#ifndef UT_LINESIZE
#define UT_LINESIZE 32
#endif
void hpr_wall (char const *s)
{
size_t n = strlen(s) ;
char tty[10 + UT_LINESIZE] = "/dev/" ;
char msg[n+1] ;
memcpy(msg, s, n) ;
msg[n++] = '\n' ;
setutxent() ;
for (;;)
{
size_t linelen ;
int fd ;
struct utmpx *utx = getutxent() ;
if (!utx) break ;
if (utx->ut_type != USER_PROCESS) continue ;
linelen = strnlen(utx->ut_line, UT_LINESIZE) ;
memcpy(tty + 5, utx->ut_line, linelen) ;
tty[5 + linelen] = 0 ;
fd = open_append(tty) ;
if (fd == -1) continue ;
allwrite(fd, msg, n) ;
fd_close(fd) ;
}
endutxent() ;
}
|
// ... existing code ...
#include <string.h>
#include <utmpx.h>
#include <skalibs/allreadwrite.h>
#include <skalibs/strerr2.h>
#include <skalibs/djbunix.h>
#include <skalibs/posixishard.h>
#include "hpr.h"
// ... rest of the code ...
|
e9b9e850dc43acb35bf5220c2e8c1b831e53acb5
|
src/main/java/org/bitsofinfo/hazelcast/discovery/docker/swarm/DockerSwarmDiscoveryStrategyFactory.java
|
src/main/java/org/bitsofinfo/hazelcast/discovery/docker/swarm/DockerSwarmDiscoveryStrategyFactory.java
|
package org.bitsofinfo.hazelcast.discovery.docker.swarm;
import com.hazelcast.config.properties.PropertyDefinition;
import com.hazelcast.logging.ILogger;
import com.hazelcast.spi.discovery.DiscoveryNode;
import com.hazelcast.spi.discovery.DiscoveryStrategy;
import com.hazelcast.spi.discovery.DiscoveryStrategyFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
public class DockerSwarmDiscoveryStrategyFactory implements DiscoveryStrategyFactory {
private static final Collection<PropertyDefinition> PROPERTIES =
Arrays.asList(DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_LABELS,
DockerSwarmDiscoveryConfiguration.DOCKER_NETWORK_NAMES,
DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_NAMES,
DockerSwarmDiscoveryConfiguration.HAZELCAST_PEER_PORT,
DockerSwarmDiscoveryConfiguration.SWARM_MGR_URI,
DockerSwarmDiscoveryConfiguration.SKIP_VERIFY_SSL,
DockerSwarmDiscoveryConfiguration.LOG_ALL_SERVICE_NAMES_ON_FAILED_DISCOVERY);
public Class<? extends DiscoveryStrategy> getDiscoveryStrategyType() {
// Returns the actual class type of the DiscoveryStrategy
// implementation, to match it against the configuration
return DockerSwarmDiscoveryStrategy.class;
}
public Collection<PropertyDefinition> getConfigurationProperties() {
return PROPERTIES;
}
public DiscoveryStrategy newDiscoveryStrategy(DiscoveryNode discoveryNode,
ILogger logger,
Map<String, Comparable> properties) {
return new DockerSwarmDiscoveryStrategy(discoveryNode, logger, properties);
}
}
|
package org.bitsofinfo.hazelcast.discovery.docker.swarm;
import com.hazelcast.config.properties.PropertyDefinition;
import com.hazelcast.logging.ILogger;
import com.hazelcast.spi.discovery.DiscoveryNode;
import com.hazelcast.spi.discovery.DiscoveryStrategy;
import com.hazelcast.spi.discovery.DiscoveryStrategyFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
public class DockerSwarmDiscoveryStrategyFactory implements DiscoveryStrategyFactory {
private static final Collection<PropertyDefinition> PROPERTIES =
Arrays.asList(DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_LABELS,
DockerSwarmDiscoveryConfiguration.DOCKER_NETWORK_NAMES,
DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_NAMES,
DockerSwarmDiscoveryConfiguration.HAZELCAST_PEER_PORT,
DockerSwarmDiscoveryConfiguration.SWARM_MGR_URI,
DockerSwarmDiscoveryConfiguration.SKIP_VERIFY_SSL,
DockerSwarmDiscoveryConfiguration.LOG_ALL_SERVICE_NAMES_ON_FAILED_DISCOVERY,
DockerSwarmDiscoveryConfiguration.STRICT_DOCKER_SERVICE_NAME_COMPARISON);
public Class<? extends DiscoveryStrategy> getDiscoveryStrategyType() {
// Returns the actual class type of the DiscoveryStrategy
// implementation, to match it against the configuration
return DockerSwarmDiscoveryStrategy.class;
}
public Collection<PropertyDefinition> getConfigurationProperties() {
return PROPERTIES;
}
public DiscoveryStrategy newDiscoveryStrategy(DiscoveryNode discoveryNode,
ILogger logger,
Map<String, Comparable> properties) {
return new DockerSwarmDiscoveryStrategy(discoveryNode, logger, properties);
}
}
|
Add STRICT_DOCKER_SERVICE_NAME_COMPARISON to Collection of PropertyDefinition
|
Add STRICT_DOCKER_SERVICE_NAME_COMPARISON to Collection of PropertyDefinition
|
Java
|
apache-2.0
|
bitsofinfo/hazelcast-docker-swarm-discovery-spi
|
java
|
## Code Before:
package org.bitsofinfo.hazelcast.discovery.docker.swarm;
import com.hazelcast.config.properties.PropertyDefinition;
import com.hazelcast.logging.ILogger;
import com.hazelcast.spi.discovery.DiscoveryNode;
import com.hazelcast.spi.discovery.DiscoveryStrategy;
import com.hazelcast.spi.discovery.DiscoveryStrategyFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
public class DockerSwarmDiscoveryStrategyFactory implements DiscoveryStrategyFactory {
private static final Collection<PropertyDefinition> PROPERTIES =
Arrays.asList(DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_LABELS,
DockerSwarmDiscoveryConfiguration.DOCKER_NETWORK_NAMES,
DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_NAMES,
DockerSwarmDiscoveryConfiguration.HAZELCAST_PEER_PORT,
DockerSwarmDiscoveryConfiguration.SWARM_MGR_URI,
DockerSwarmDiscoveryConfiguration.SKIP_VERIFY_SSL,
DockerSwarmDiscoveryConfiguration.LOG_ALL_SERVICE_NAMES_ON_FAILED_DISCOVERY);
public Class<? extends DiscoveryStrategy> getDiscoveryStrategyType() {
// Returns the actual class type of the DiscoveryStrategy
// implementation, to match it against the configuration
return DockerSwarmDiscoveryStrategy.class;
}
public Collection<PropertyDefinition> getConfigurationProperties() {
return PROPERTIES;
}
public DiscoveryStrategy newDiscoveryStrategy(DiscoveryNode discoveryNode,
ILogger logger,
Map<String, Comparable> properties) {
return new DockerSwarmDiscoveryStrategy(discoveryNode, logger, properties);
}
}
## Instruction:
Add STRICT_DOCKER_SERVICE_NAME_COMPARISON to Collection of PropertyDefinition
## Code After:
package org.bitsofinfo.hazelcast.discovery.docker.swarm;
import com.hazelcast.config.properties.PropertyDefinition;
import com.hazelcast.logging.ILogger;
import com.hazelcast.spi.discovery.DiscoveryNode;
import com.hazelcast.spi.discovery.DiscoveryStrategy;
import com.hazelcast.spi.discovery.DiscoveryStrategyFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
public class DockerSwarmDiscoveryStrategyFactory implements DiscoveryStrategyFactory {
private static final Collection<PropertyDefinition> PROPERTIES =
Arrays.asList(DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_LABELS,
DockerSwarmDiscoveryConfiguration.DOCKER_NETWORK_NAMES,
DockerSwarmDiscoveryConfiguration.DOCKER_SERVICE_NAMES,
DockerSwarmDiscoveryConfiguration.HAZELCAST_PEER_PORT,
DockerSwarmDiscoveryConfiguration.SWARM_MGR_URI,
DockerSwarmDiscoveryConfiguration.SKIP_VERIFY_SSL,
DockerSwarmDiscoveryConfiguration.LOG_ALL_SERVICE_NAMES_ON_FAILED_DISCOVERY,
DockerSwarmDiscoveryConfiguration.STRICT_DOCKER_SERVICE_NAME_COMPARISON);
public Class<? extends DiscoveryStrategy> getDiscoveryStrategyType() {
// Returns the actual class type of the DiscoveryStrategy
// implementation, to match it against the configuration
return DockerSwarmDiscoveryStrategy.class;
}
public Collection<PropertyDefinition> getConfigurationProperties() {
return PROPERTIES;
}
public DiscoveryStrategy newDiscoveryStrategy(DiscoveryNode discoveryNode,
ILogger logger,
Map<String, Comparable> properties) {
return new DockerSwarmDiscoveryStrategy(discoveryNode, logger, properties);
}
}
|
# ... existing code ...
DockerSwarmDiscoveryConfiguration.HAZELCAST_PEER_PORT,
DockerSwarmDiscoveryConfiguration.SWARM_MGR_URI,
DockerSwarmDiscoveryConfiguration.SKIP_VERIFY_SSL,
DockerSwarmDiscoveryConfiguration.LOG_ALL_SERVICE_NAMES_ON_FAILED_DISCOVERY,
DockerSwarmDiscoveryConfiguration.STRICT_DOCKER_SERVICE_NAME_COMPARISON);
public Class<? extends DiscoveryStrategy> getDiscoveryStrategyType() {
// Returns the actual class type of the DiscoveryStrategy
# ... rest of the code ...
|
760ce74fca8fa9a640167eabb4af83e31e902500
|
openedx/core/djangoapps/api_admin/utils.py
|
openedx/core/djangoapps/api_admin/utils.py
|
""" Course Discovery API Service. """
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.theming import helpers
from openedx.core.lib.token_utils import get_id_token
from provider.oauth2.models import Client
CLIENT_NAME = 'course-discovery'
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return EdxRestApiClient(
course_discovery_client.url,
jwt=get_id_token(user, CLIENT_NAME, secret_key=secret_key)
)
|
""" Course Discovery API Service. """
import datetime
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
import jwt
from openedx.core.djangoapps.theming import helpers
from provider.oauth2.models import Client
from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
def get_id_token(user):
"""
Return a JWT for `user`, suitable for use with the course discovery service.
Arguments:
user (User): User for whom to generate the JWT.
Returns:
str: The JWT.
"""
try:
# Service users may not have user profiles.
full_name = UserProfile.objects.get(user=user).name
except UserProfile.DoesNotExist:
full_name = None
now = datetime.datetime.utcnow()
expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
payload = {
'preferred_username': user.username,
'name': full_name,
'email': user.email,
'administrator': user.is_staff,
'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
'exp': now + datetime.timedelta(seconds=expires_in),
'iat': now,
'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
'sub': anonymous_id_for_user(user, None),
}
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
|
Use correct JWT audience when connecting to course discovery.
|
Use correct JWT audience when connecting to course discovery.
|
Python
|
agpl-3.0
|
cecep-edu/edx-platform,ahmedaljazzar/edx-platform,fintech-circle/edx-platform,waheedahmed/edx-platform,proversity-org/edx-platform,pabloborrego93/edx-platform,mbareta/edx-platform-ft,ESOedX/edx-platform,longmen21/edx-platform,pepeportela/edx-platform,chrisndodge/edx-platform,procangroup/edx-platform,ampax/edx-platform,fintech-circle/edx-platform,Stanford-Online/edx-platform,proversity-org/edx-platform,EDUlib/edx-platform,Edraak/edraak-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,JioEducation/edx-platform,stvstnfrd/edx-platform,gsehub/edx-platform,mitocw/edx-platform,proversity-org/edx-platform,chrisndodge/edx-platform,mitocw/edx-platform,lduarte1991/edx-platform,jzoldak/edx-platform,mitocw/edx-platform,waheedahmed/edx-platform,prarthitm/edxplatform,Livit/Livit.Learn.EdX,amir-qayyum-khan/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,jjmiranda/edx-platform,amir-qayyum-khan/edx-platform,msegado/edx-platform,gymnasium/edx-platform,mbareta/edx-platform-ft,pepeportela/edx-platform,kmoocdev2/edx-platform,kmoocdev2/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,ahmedaljazzar/edx-platform,Lektorium-LLC/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,edx-solutions/edx-platform,jolyonb/edx-platform,caesar2164/edx-platform,deepsrijit1105/edx-platform,ampax/edx-platform,arbrandes/edx-platform,philanthropy-u/edx-platform,ESOedX/edx-platform,TeachAtTUM/edx-platform,cecep-edu/edx-platform,longmen21/edx-platform,romain-li/edx-platform,raccoongang/edx-platform,a-parhom/edx-platform,procangroup/edx-platform,kmoocdev2/edx-platform,shabab12/edx-platform,pepeportela/edx-platform,TeachAtTUM/edx-platform,jzoldak/edx-platform,proversity-org/edx-platform,jolyonb/edx-platform,appsembler/edx-platform,teltek/edx-platform,caesar2164/edx-platform,ahmedaljazzar/edx-platform,JioEducation/edx-platform,pabloborrego93/edx-platform,Stanford-Online/edx-platform,chrisndodge/edx-platform,mbareta/edx-platform-ft,Edraak/edraak-platform,lduarte1991/edx-platform,cpennington/edx-platform,lduarte1991/edx-platform,deepsrijit1105/edx-platform,deepsrijit1105/edx-platform,Edraak/edraak-platform,procangroup/edx-platform,shabab12/edx-platform,Edraak/edraak-platform,CredoReference/edx-platform,stvstnfrd/edx-platform,longmen21/edx-platform,itsjeyd/edx-platform,naresh21/synergetics-edx-platform,eduNEXT/edunext-platform,gymnasium/edx-platform,louyihua/edx-platform,msegado/edx-platform,louyihua/edx-platform,prarthitm/edxplatform,jjmiranda/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,Lektorium-LLC/edx-platform,pabloborrego93/edx-platform,synergeticsedx/deployment-wipro,arbrandes/edx-platform,marcore/edx-platform,naresh21/synergetics-edx-platform,chrisndodge/edx-platform,marcore/edx-platform,louyihua/edx-platform,gymnasium/edx-platform,procangroup/edx-platform,gsehub/edx-platform,amir-qayyum-khan/edx-platform,kmoocdev2/edx-platform,marcore/edx-platform,marcore/edx-platform,eduNEXT/edunext-platform,a-parhom/edx-platform,ESOedX/edx-platform,stvstnfrd/edx-platform,Stanford-Online/edx-platform,BehavioralInsightsTeam/edx-platform,JioEducation/edx-platform,fintech-circle/edx-platform,synergeticsedx/deployment-wipro,msegado/edx-platform,edx/edx-platform,cpennington/edx-platform,TeachAtTUM/edx-platform,longmen21/edx-platform,Stanford-Online/edx-platform,Livit/Livit.Learn.EdX,philanthropy-u/edx-platform,miptliot/edx-platform,raccoongang/edx-platform,EDUlib/edx-platform,synergeticsedx/deployment-wipro,a-parhom/edx-platform,BehavioralInsightsTeam/edx-platform,mbareta/edx-platform-ft,hastexo/edx-platform,jolyonb/edx-platform,Livit/Livit.Learn.EdX,Livit/Livit.Learn.EdX,teltek/edx-platform,kmoocdev2/edx-platform,eduNEXT/edunext-platform,jjmiranda/edx-platform,edx/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,teltek/edx-platform,waheedahmed/edx-platform,ampax/edx-platform,EDUlib/edx-platform,deepsrijit1105/edx-platform,tanmaykm/edx-platform,Lektorium-LLC/edx-platform,tanmaykm/edx-platform,eduNEXT/edx-platform,philanthropy-u/edx-platform,cpennington/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,cecep-edu/edx-platform,cpennington/edx-platform,CredoReference/edx-platform,romain-li/edx-platform,romain-li/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,lduarte1991/edx-platform,waheedahmed/edx-platform,appsembler/edx-platform,miptliot/edx-platform,appsembler/edx-platform,itsjeyd/edx-platform,jzoldak/edx-platform,jjmiranda/edx-platform,pepeportela/edx-platform,ampax/edx-platform,gsehub/edx-platform,pabloborrego93/edx-platform,hastexo/edx-platform,tanmaykm/edx-platform,angelapper/edx-platform,gsehub/edx-platform,teltek/edx-platform,EDUlib/edx-platform,CredoReference/edx-platform,naresh21/synergetics-edx-platform,tanmaykm/edx-platform,arbrandes/edx-platform,hastexo/edx-platform,raccoongang/edx-platform,itsjeyd/edx-platform,raccoongang/edx-platform,Lektorium-LLC/edx-platform,synergeticsedx/deployment-wipro,cecep-edu/edx-platform,shabab12/edx-platform,fintech-circle/edx-platform,edx/edx-platform,naresh21/synergetics-edx-platform,waheedahmed/edx-platform,mitocw/edx-platform,louyihua/edx-platform,prarthitm/edxplatform,romain-li/edx-platform,caesar2164/edx-platform,msegado/edx-platform,caesar2164/edx-platform,miptliot/edx-platform,JioEducation/edx-platform,miptliot/edx-platform,shabab12/edx-platform,hastexo/edx-platform,itsjeyd/edx-platform,stvstnfrd/edx-platform,a-parhom/edx-platform,BehavioralInsightsTeam/edx-platform,ESOedX/edx-platform,longmen21/edx-platform,prarthitm/edxplatform,edx-solutions/edx-platform,jzoldak/edx-platform
|
python
|
## Code Before:
""" Course Discovery API Service. """
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.theming import helpers
from openedx.core.lib.token_utils import get_id_token
from provider.oauth2.models import Client
CLIENT_NAME = 'course-discovery'
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return EdxRestApiClient(
course_discovery_client.url,
jwt=get_id_token(user, CLIENT_NAME, secret_key=secret_key)
)
## Instruction:
Use correct JWT audience when connecting to course discovery.
## Code After:
""" Course Discovery API Service. """
import datetime
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
import jwt
from openedx.core.djangoapps.theming import helpers
from provider.oauth2.models import Client
from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
def get_id_token(user):
"""
Return a JWT for `user`, suitable for use with the course discovery service.
Arguments:
user (User): User for whom to generate the JWT.
Returns:
str: The JWT.
"""
try:
# Service users may not have user profiles.
full_name = UserProfile.objects.get(user=user).name
except UserProfile.DoesNotExist:
full_name = None
now = datetime.datetime.utcnow()
expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
payload = {
'preferred_username': user.username,
'name': full_name,
'email': user.email,
'administrator': user.is_staff,
'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
'exp': now + datetime.timedelta(seconds=expires_in),
'iat': now,
'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
'sub': anonymous_id_for_user(user, None),
}
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
|
# ... existing code ...
""" Course Discovery API Service. """
import datetime
from django.conf import settings
from edx_rest_api_client.client import EdxRestApiClient
import jwt
from openedx.core.djangoapps.theming import helpers
from provider.oauth2.models import Client
from student.models import UserProfile, anonymous_id_for_user
CLIENT_NAME = 'course-discovery'
def get_id_token(user):
"""
Return a JWT for `user`, suitable for use with the course discovery service.
Arguments:
user (User): User for whom to generate the JWT.
Returns:
str: The JWT.
"""
try:
# Service users may not have user profiles.
full_name = UserProfile.objects.get(user=user).name
except UserProfile.DoesNotExist:
full_name = None
now = datetime.datetime.utcnow()
expires_in = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
payload = {
'preferred_username': user.username,
'name': full_name,
'email': user.email,
'administrator': user.is_staff,
'iss': helpers.get_value('OAUTH_OIDC_ISSUER', settings.OAUTH_OIDC_ISSUER),
'exp': now + datetime.timedelta(seconds=expires_in),
'iat': now,
'aud': helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_AUDIENCE'],
'sub': anonymous_id_for_user(user, None),
}
secret_key = helpers.get_value('JWT_AUTH', settings.JWT_AUTH)['JWT_SECRET_KEY']
return jwt.encode(payload, secret_key)
def course_discovery_api_client(user):
""" Returns a Course Discovery API client setup with authentication for the specified user. """
course_discovery_client = Client.objects.get(name=CLIENT_NAME)
return EdxRestApiClient(course_discovery_client.url, jwt=get_id_token(user))
# ... rest of the code ...
|
7a7b6351f21c95b3620059984470b0b7619c1e9d
|
docopt_dispatch.py
|
docopt_dispatch.py
|
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
from docopt import docopt
__all__ = ('dispatch', 'DispatchError')
__author__ = 'Vladimir Keleshev <[email protected]>'
__version__ = '0.0.0'
__license__ = 'MIT'
__keywords__ = 'docopt dispatch function adapter kwargs'
__url__ = 'https://github.com/halst/docopt-dispatch'
class DispatchError(Exception):
pass
class Dispatch(object):
def __init__(self):
self._functions = OrderedDict()
def on(self, argument):
def decorator(function):
self._functions[argument] = function
return function
return decorator
def __call__(self, *args, **kwargs):
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
function(**self._kwargify(arguments))
return
raise DispatchError('None of dispatch conditions (%s) is triggered'
% ', '.join(self._functions.keys()))
@staticmethod
def _kwargify(arguments):
kwargify = lambda string: re.sub('\W', '_', string).strip('_')
return dict((kwargify(key), value) for key, value in arguments.items())
dispatch = Dispatch()
|
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
__all__ = ('dispatch', 'DispatchError')
__author__ = 'Vladimir Keleshev <[email protected]>'
__version__ = '0.0.0'
__license__ = 'MIT'
__keywords__ = 'docopt dispatch function adapter kwargs'
__url__ = 'https://github.com/halst/docopt-dispatch'
class DispatchError(Exception):
pass
class Dispatch(object):
def __init__(self):
self._functions = OrderedDict()
def on(self, argument):
def decorator(function):
self._functions[argument] = function
return function
return decorator
def __call__(self, *args, **kwargs):
from docopt import docopt
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
function(**self._kwargify(arguments))
return
raise DispatchError('None of dispatch conditions (%s) is triggered'
% ', '.join(self._functions.keys()))
@staticmethod
def _kwargify(arguments):
kwargify = lambda string: re.sub('\W', '_', string).strip('_')
return dict((kwargify(key), value) for key, value in arguments.items())
dispatch = Dispatch()
|
Load docopt lazily (so that setup.py works)
|
Load docopt lazily (so that setup.py works)
|
Python
|
mit
|
keleshev/docopt-dispatch
|
python
|
## Code Before:
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
from docopt import docopt
__all__ = ('dispatch', 'DispatchError')
__author__ = 'Vladimir Keleshev <[email protected]>'
__version__ = '0.0.0'
__license__ = 'MIT'
__keywords__ = 'docopt dispatch function adapter kwargs'
__url__ = 'https://github.com/halst/docopt-dispatch'
class DispatchError(Exception):
pass
class Dispatch(object):
def __init__(self):
self._functions = OrderedDict()
def on(self, argument):
def decorator(function):
self._functions[argument] = function
return function
return decorator
def __call__(self, *args, **kwargs):
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
function(**self._kwargify(arguments))
return
raise DispatchError('None of dispatch conditions (%s) is triggered'
% ', '.join(self._functions.keys()))
@staticmethod
def _kwargify(arguments):
kwargify = lambda string: re.sub('\W', '_', string).strip('_')
return dict((kwargify(key), value) for key, value in arguments.items())
dispatch = Dispatch()
## Instruction:
Load docopt lazily (so that setup.py works)
## Code After:
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
__all__ = ('dispatch', 'DispatchError')
__author__ = 'Vladimir Keleshev <[email protected]>'
__version__ = '0.0.0'
__license__ = 'MIT'
__keywords__ = 'docopt dispatch function adapter kwargs'
__url__ = 'https://github.com/halst/docopt-dispatch'
class DispatchError(Exception):
pass
class Dispatch(object):
def __init__(self):
self._functions = OrderedDict()
def on(self, argument):
def decorator(function):
self._functions[argument] = function
return function
return decorator
def __call__(self, *args, **kwargs):
from docopt import docopt
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
function(**self._kwargify(arguments))
return
raise DispatchError('None of dispatch conditions (%s) is triggered'
% ', '.join(self._functions.keys()))
@staticmethod
def _kwargify(arguments):
kwargify = lambda string: re.sub('\W', '_', string).strip('_')
return dict((kwargify(key), value) for key, value in arguments.items())
dispatch = Dispatch()
|
// ... existing code ...
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
__all__ = ('dispatch', 'DispatchError')
// ... modified code ...
return decorator
def __call__(self, *args, **kwargs):
from docopt import docopt
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.