commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
b7a8711afdbd4eaf7dfbf4ae4daab9d340c192b3
|
numdifftools/testing.py
|
numdifftools/testing.py
|
'''
Created on Apr 4, 2016
@author: pab
'''
import inspect
import numpy as np
def rosen(x):
"""Rosenbrock function
This is a non-convex function used as a performance test problem for
optimization algorithms introduced by Howard H. Rosenbrock in 1960.[1]
"""
x = np.atleast_1d(x)
return (1 - x[0])**2 + 105. * (x[1] - x[0]**2)**2
def test_docstrings():
# np.set_printoptions(precision=6)
import doctest
print('Testing docstrings in %s' % inspect.stack()[1][1])
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)
|
'''
Created on Apr 4, 2016
@author: pab
'''
import inspect
import numpy as np
def rosen(x):
"""Rosenbrock function
This is a non-convex function used as a performance test problem for
optimization algorithms introduced by Howard H. Rosenbrock in 1960.[1]
"""
x = np.atleast_1d(x)
return (1 - x[0])**2 + 105. * (x[1] - x[0]**2)**2
def test_docstrings():
# np.set_printoptions(precision=6)
import doctest
print('Testing docstrings in {}'.format(inspect.stack()[1][1]))
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)
|
Replace string interpolation with format()
|
Replace string interpolation with format()
|
Python
|
bsd-3-clause
|
pbrod/numdifftools,pbrod/numdifftools
|
python
|
## Code Before:
'''
Created on Apr 4, 2016
@author: pab
'''
import inspect
import numpy as np
def rosen(x):
"""Rosenbrock function
This is a non-convex function used as a performance test problem for
optimization algorithms introduced by Howard H. Rosenbrock in 1960.[1]
"""
x = np.atleast_1d(x)
return (1 - x[0])**2 + 105. * (x[1] - x[0]**2)**2
def test_docstrings():
# np.set_printoptions(precision=6)
import doctest
print('Testing docstrings in %s' % inspect.stack()[1][1])
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)
## Instruction:
Replace string interpolation with format()
## Code After:
'''
Created on Apr 4, 2016
@author: pab
'''
import inspect
import numpy as np
def rosen(x):
"""Rosenbrock function
This is a non-convex function used as a performance test problem for
optimization algorithms introduced by Howard H. Rosenbrock in 1960.[1]
"""
x = np.atleast_1d(x)
return (1 - x[0])**2 + 105. * (x[1] - x[0]**2)**2
def test_docstrings():
# np.set_printoptions(precision=6)
import doctest
print('Testing docstrings in {}'.format(inspect.stack()[1][1]))
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)
|
# ... existing code ...
def test_docstrings():
# np.set_printoptions(precision=6)
import doctest
print('Testing docstrings in {}'.format(inspect.stack()[1][1]))
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)
# ... rest of the code ...
|
b8ad378a796ee867acfa3198e04d47a500dd90d3
|
mla/neuralnet/activations.py
|
mla/neuralnet/activations.py
|
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def leakyrelu(z, a=0.01):
return np.maximum(z * a, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
Add Leaky ReLU activation. Differentiation with autograd package confirmed to work correctly.
|
Add Leaky ReLU activation.
Differentiation with autograd package confirmed to work correctly.
|
Python
|
mit
|
rushter/MLAlgorithms
|
python
|
## Code Before:
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
## Instruction:
Add Leaky ReLU activation.
Differentiation with autograd package confirmed to work correctly.
## Code After:
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def leakyrelu(z, a=0.01):
return np.maximum(z * a, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
# ... existing code ...
return np.maximum(0, z)
def leakyrelu(z, a=0.01):
return np.maximum(z * a, z)
def get_activation(name):
"""Return activation function by name"""
try:
# ... rest of the code ...
|
290a1f7a2c6860ec57bdb74b9c97207e93e611f0
|
visualize_data.py
|
visualize_data.py
|
from __future__ import division
import argparse
import cv2
import h5py
import util
def main():
parser = argparse.ArgumentParser()
parser.add_argument('hdf5_fname', type=str)
parser.add_argument('--vis_scale', '-r', type=int, default=10, metavar='R', help='rescale image by R for visualization')
args = parser.parse_args()
with h5py.File(args.hdf5_fname, 'r') as hdf5_file:
for image_curr, vel, image_diff in zip(hdf5_file['image_curr'], hdf5_file['vel'], hdf5_file['image_diff']):
image_next = image_curr + image_diff
vis_image, done = util.visualize_images_callback(image_curr, image_next, vis_scale=args.vis_scale, delay=0)
if done:
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
from __future__ import division
import argparse
import cv2
import h5py
import util
def main():
parser = argparse.ArgumentParser()
parser.add_argument('hdf5_fname', type=str)
parser.add_argument('--vis_scale', '-r', type=int, default=10, metavar='R', help='rescale image by R for visualization')
parser.add_argument('--reverse', action='store_true')
args = parser.parse_args()
with h5py.File(args.hdf5_fname, 'r') as hdf5_file:
dsets = (hdf5_file['image_curr'], hdf5_file['vel'], hdf5_file['image_diff'])
if args.reverse:
dsets = tuple(dset[()][::-1] for dset in dsets)
for image_curr, vel, image_diff in zip(*dsets):
image_next = image_curr + image_diff
vis_image, done = util.visualize_images_callback(image_curr, image_next, vis_scale=args.vis_scale, delay=0)
if done:
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
Add option to visualize data in reverse
|
Add option to visualize data in reverse
|
Python
|
mit
|
alexlee-gk/visual_dynamics
|
python
|
## Code Before:
from __future__ import division
import argparse
import cv2
import h5py
import util
def main():
parser = argparse.ArgumentParser()
parser.add_argument('hdf5_fname', type=str)
parser.add_argument('--vis_scale', '-r', type=int, default=10, metavar='R', help='rescale image by R for visualization')
args = parser.parse_args()
with h5py.File(args.hdf5_fname, 'r') as hdf5_file:
for image_curr, vel, image_diff in zip(hdf5_file['image_curr'], hdf5_file['vel'], hdf5_file['image_diff']):
image_next = image_curr + image_diff
vis_image, done = util.visualize_images_callback(image_curr, image_next, vis_scale=args.vis_scale, delay=0)
if done:
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
## Instruction:
Add option to visualize data in reverse
## Code After:
from __future__ import division
import argparse
import cv2
import h5py
import util
def main():
parser = argparse.ArgumentParser()
parser.add_argument('hdf5_fname', type=str)
parser.add_argument('--vis_scale', '-r', type=int, default=10, metavar='R', help='rescale image by R for visualization')
parser.add_argument('--reverse', action='store_true')
args = parser.parse_args()
with h5py.File(args.hdf5_fname, 'r') as hdf5_file:
dsets = (hdf5_file['image_curr'], hdf5_file['vel'], hdf5_file['image_diff'])
if args.reverse:
dsets = tuple(dset[()][::-1] for dset in dsets)
for image_curr, vel, image_diff in zip(*dsets):
image_next = image_curr + image_diff
vis_image, done = util.visualize_images_callback(image_curr, image_next, vis_scale=args.vis_scale, delay=0)
if done:
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
# ... existing code ...
parser = argparse.ArgumentParser()
parser.add_argument('hdf5_fname', type=str)
parser.add_argument('--vis_scale', '-r', type=int, default=10, metavar='R', help='rescale image by R for visualization')
parser.add_argument('--reverse', action='store_true')
args = parser.parse_args()
with h5py.File(args.hdf5_fname, 'r') as hdf5_file:
dsets = (hdf5_file['image_curr'], hdf5_file['vel'], hdf5_file['image_diff'])
if args.reverse:
dsets = tuple(dset[()][::-1] for dset in dsets)
for image_curr, vel, image_diff in zip(*dsets):
image_next = image_curr + image_diff
vis_image, done = util.visualize_images_callback(image_curr, image_next, vis_scale=args.vis_scale, delay=0)
if done:
# ... rest of the code ...
|
b5be2d7c2edd88a08a15c2b524bfd6408339f380
|
src/main/java/au/gov/dto/dibp/appointments/config/AppConfig.java
|
src/main/java/au/gov/dto/dibp/appointments/config/AppConfig.java
|
package au.gov.dto.dibp.appointments.config;
import au.gov.dto.dibp.appointments.initializer.HttpsOnlyFilter;
import au.gov.dto.dibp.appointments.initializer.LogClientIdFilter;
import au.gov.dto.dibp.appointments.initializer.NoHttpSessionFilter;
import com.oakfusion.security.SecurityCookieService;
import org.springframework.boot.context.embedded.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan(basePackages = "au.gov.dto.dibp.appointments")
public class AppConfig {
@Bean
public FilterRegistrationBean httpsFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new HttpsOnlyFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean noHttpSessionFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new NoHttpSessionFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean logClientIdFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new LogClientIdFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public SecurityCookieService securityCookieService() {
return new SecurityCookieService("session", "secretkey");
}
}
|
package au.gov.dto.dibp.appointments.config;
import au.gov.dto.dibp.appointments.initializer.HttpsOnlyFilter;
import au.gov.dto.dibp.appointments.initializer.LogClientIdFilter;
import au.gov.dto.dibp.appointments.initializer.NoHttpSessionFilter;
import com.oakfusion.security.SecurityCookieService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.embedded.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan(basePackages = "au.gov.dto.dibp.appointments")
public class AppConfig {
@Bean
public FilterRegistrationBean httpsFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new HttpsOnlyFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean noHttpSessionFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new NoHttpSessionFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean logClientIdFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new LogClientIdFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public SecurityCookieService securityCookieService(@Value("${session.encryption.key}") String sessionEncryptionKey) {
return new SecurityCookieService("session", sessionEncryptionKey);
}
}
|
Use session cookie encryption key from environment variable
|
Use session cookie encryption key from environment variable
|
Java
|
mit
|
AusDTO/citizenship-appointment-server,AusDTO/citizenship-appointment-server,AusDTO/citizenship-appointment-server
|
java
|
## Code Before:
package au.gov.dto.dibp.appointments.config;
import au.gov.dto.dibp.appointments.initializer.HttpsOnlyFilter;
import au.gov.dto.dibp.appointments.initializer.LogClientIdFilter;
import au.gov.dto.dibp.appointments.initializer.NoHttpSessionFilter;
import com.oakfusion.security.SecurityCookieService;
import org.springframework.boot.context.embedded.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan(basePackages = "au.gov.dto.dibp.appointments")
public class AppConfig {
@Bean
public FilterRegistrationBean httpsFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new HttpsOnlyFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean noHttpSessionFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new NoHttpSessionFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean logClientIdFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new LogClientIdFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public SecurityCookieService securityCookieService() {
return new SecurityCookieService("session", "secretkey");
}
}
## Instruction:
Use session cookie encryption key from environment variable
## Code After:
package au.gov.dto.dibp.appointments.config;
import au.gov.dto.dibp.appointments.initializer.HttpsOnlyFilter;
import au.gov.dto.dibp.appointments.initializer.LogClientIdFilter;
import au.gov.dto.dibp.appointments.initializer.NoHttpSessionFilter;
import com.oakfusion.security.SecurityCookieService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.embedded.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan(basePackages = "au.gov.dto.dibp.appointments")
public class AppConfig {
@Bean
public FilterRegistrationBean httpsFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new HttpsOnlyFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean noHttpSessionFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new NoHttpSessionFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public FilterRegistrationBean logClientIdFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
registration.setFilter(new LogClientIdFilter());
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public SecurityCookieService securityCookieService(@Value("${session.encryption.key}") String sessionEncryptionKey) {
return new SecurityCookieService("session", sessionEncryptionKey);
}
}
|
...
import au.gov.dto.dibp.appointments.initializer.LogClientIdFilter;
import au.gov.dto.dibp.appointments.initializer.NoHttpSessionFilter;
import com.oakfusion.security.SecurityCookieService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.embedded.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
...
@Configuration
@ComponentScan(basePackages = "au.gov.dto.dibp.appointments")
public class AppConfig {
@Bean
public FilterRegistrationBean httpsFilter() {
FilterRegistrationBean registration = new FilterRegistrationBean();
...
}
@Bean
public SecurityCookieService securityCookieService(@Value("${session.encryption.key}") String sessionEncryptionKey) {
return new SecurityCookieService("session", sessionEncryptionKey);
}
}
...
|
d0791ccd79dea2ec30d890ad9060f58d1e8b1c7c
|
run_tests.py
|
run_tests.py
|
import pytest
from bs4 import BeautifulSoup as BS
pytest.main(['--durations', '10', '--cov-report', 'html'])
url = r'htmlcov/index.html'
page = open(url)
soup = BS(page.read(), features='html5lib')
aggregate_total = soup.find_all('tr', {'class': 'total'})
final = None
for x in aggregate_total:
pct = x.text.replace(' ', '').replace('\n', ' ').split(' ')
final = pct[6]
with open('test_report.txt', 'w') as report:
report.write(final.strip().replace('%', ''))
|
import pytest
from bs4 import BeautifulSoup as BS
pytest.main(['--durations', '10', '--cov-report', 'html', '--junit-xml', 'test-reports/results.xml', '--verbose'])
url = r'htmlcov/index.html'
page = open(url)
soup = BS(page.read(), features='html5lib')
aggregate_total = soup.find_all('tr', {'class': 'total'})
final = None
for x in aggregate_total:
pct = x.text.replace(' ', '').replace('\n', ' ').split(' ')
final = pct[6]
with open('test_report.txt', 'w') as report:
report.write(final.strip().replace('%', ''))
|
Update test file - add flag for reports
|
Update test file - add flag for reports
|
Python
|
mit
|
misachi/job_match,misachi/job_match,misachi/job_match
|
python
|
## Code Before:
import pytest
from bs4 import BeautifulSoup as BS
pytest.main(['--durations', '10', '--cov-report', 'html'])
url = r'htmlcov/index.html'
page = open(url)
soup = BS(page.read(), features='html5lib')
aggregate_total = soup.find_all('tr', {'class': 'total'})
final = None
for x in aggregate_total:
pct = x.text.replace(' ', '').replace('\n', ' ').split(' ')
final = pct[6]
with open('test_report.txt', 'w') as report:
report.write(final.strip().replace('%', ''))
## Instruction:
Update test file - add flag for reports
## Code After:
import pytest
from bs4 import BeautifulSoup as BS
pytest.main(['--durations', '10', '--cov-report', 'html', '--junit-xml', 'test-reports/results.xml', '--verbose'])
url = r'htmlcov/index.html'
page = open(url)
soup = BS(page.read(), features='html5lib')
aggregate_total = soup.find_all('tr', {'class': 'total'})
final = None
for x in aggregate_total:
pct = x.text.replace(' ', '').replace('\n', ' ').split(' ')
final = pct[6]
with open('test_report.txt', 'w') as report:
report.write(final.strip().replace('%', ''))
|
// ... existing code ...
import pytest
from bs4 import BeautifulSoup as BS
pytest.main(['--durations', '10', '--cov-report', 'html', '--junit-xml', 'test-reports/results.xml', '--verbose'])
url = r'htmlcov/index.html'
page = open(url)
soup = BS(page.read(), features='html5lib')
// ... rest of the code ...
|
6a58c7f0eb1b92ec12d0e48d7fd3f2586de20755
|
sal/management/commands/update_admin_user.py
|
sal/management/commands/update_admin_user.py
|
'''
Creates an admin user if there aren't any existing superusers
'''
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from optparse import make_option
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise StandardError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print('{0} updated'.format(username))
else:
print('There are already {0} superusers'.format(su_count))
|
"""Creates an admin user if there aren't any existing superusers."""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print(f'{username} updated')
else:
print(f'There are already {su_count} superusers')
|
Fix exception handling in management command. Clean up.
|
Fix exception handling in management command. Clean up.
|
Python
|
apache-2.0
|
salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal
|
python
|
## Code Before:
'''
Creates an admin user if there aren't any existing superusers
'''
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from optparse import make_option
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise StandardError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print('{0} updated'.format(username))
else:
print('There are already {0} superusers'.format(su_count))
## Instruction:
Fix exception handling in management command. Clean up.
## Code After:
"""Creates an admin user if there aren't any existing superusers."""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print(f'{username} updated')
else:
print(f'There are already {su_count} superusers')
|
# ... existing code ...
"""Creates an admin user if there aren't any existing superusers."""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
# ... modified code ...
username = options.get('username')
password = options.get('password')
if not username or not password:
raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
...
user.is_staff = True
user.is_superuser = True
user.save()
print(f'{username} updated')
else:
print(f'There are already {su_count} superusers')
# ... rest of the code ...
|
b900fc04b666933709a66e6164cdc5ba29f5ae11
|
generators/server/templates/src/test/java/package/cucumber/stepdefs/_StepDefs.java
|
generators/server/templates/src/test/java/package/cucumber/stepdefs/_StepDefs.java
|
package <%=packageName%>.cucumber.stepdefs;
import <%=packageName%>.<%= mainClass %>;
import org.springframework.boot.test.SpringApplicationContextLoader;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.ResultActions;
@WebAppConfiguration
@ContextConfiguration(classes = <%= mainClass %>.class, loader = SpringApplicationContextLoader.class)
public abstract class StepDefs {
protected ResultActions actions;
}
|
package <%=packageName%>.cucumber.stepdefs;
import <%=packageName%>.<%= mainClass %>;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.boot.test.context.SpringBootTest;
@WebAppConfiguration
@SpringBootTest
@ContextConfiguration(classes = <%= mainClass %>.class)
public abstract class StepDefs {
protected ResultActions actions;
}
|
Fix cucumber tests after SB1.5.1 upgrade
|
Fix cucumber tests after SB1.5.1 upgrade
|
Java
|
apache-2.0
|
ctamisier/generator-jhipster,rifatdover/generator-jhipster,stevehouel/generator-jhipster,JulienMrgrd/generator-jhipster,deepu105/generator-jhipster,danielpetisme/generator-jhipster,sohibegit/generator-jhipster,duderoot/generator-jhipster,JulienMrgrd/generator-jhipster,robertmilowski/generator-jhipster,dimeros/generator-jhipster,JulienMrgrd/generator-jhipster,ctamisier/generator-jhipster,Tcharl/generator-jhipster,yongli82/generator-jhipster,jkutner/generator-jhipster,PierreBesson/generator-jhipster,dalbelap/generator-jhipster,mraible/generator-jhipster,mraible/generator-jhipster,robertmilowski/generator-jhipster,ziogiugno/generator-jhipster,sendilkumarn/generator-jhipster,jkutner/generator-jhipster,cbornet/generator-jhipster,PierreBesson/generator-jhipster,gzsombor/generator-jhipster,duderoot/generator-jhipster,danielpetisme/generator-jhipster,ramzimaalej/generator-jhipster,nkolosnjaji/generator-jhipster,dynamicguy/generator-jhipster,sendilkumarn/generator-jhipster,stevehouel/generator-jhipster,eosimosu/generator-jhipster,ruddell/generator-jhipster,gmarziou/generator-jhipster,liseri/generator-jhipster,wmarques/generator-jhipster,erikkemperman/generator-jhipster,robertmilowski/generator-jhipster,jhipster/generator-jhipster,siliconharborlabs/generator-jhipster,ziogiugno/generator-jhipster,dimeros/generator-jhipster,gzsombor/generator-jhipster,yongli82/generator-jhipster,PierreBesson/generator-jhipster,erikkemperman/generator-jhipster,pascalgrimaud/generator-jhipster,ziogiugno/generator-jhipster,nkolosnjaji/generator-jhipster,jkutner/generator-jhipster,liseri/generator-jhipster,ramzimaalej/generator-jhipster,eosimosu/generator-jhipster,vivekmore/generator-jhipster,yongli82/generator-jhipster,ziogiugno/generator-jhipster,vivekmore/generator-jhipster,rifatdover/generator-jhipster,jhipster/generator-jhipster,cbornet/generator-jhipster,ruddell/generator-jhipster,vivekmore/generator-jhipster,stevehouel/generator-jhipster,pascalgrimaud/generator-jhipster,dynamicguy/generator-jhipster,siliconharborlabs/generator-jhipster,jhipster/generator-jhipster,dynamicguy/generator-jhipster,rkohel/generator-jhipster,PierreBesson/generator-jhipster,rkohel/generator-jhipster,vivekmore/generator-jhipster,sendilkumarn/generator-jhipster,atomfrede/generator-jhipster,hdurix/generator-jhipster,dimeros/generator-jhipster,deepu105/generator-jhipster,gmarziou/generator-jhipster,eosimosu/generator-jhipster,mosoft521/generator-jhipster,mosoft521/generator-jhipster,dimeros/generator-jhipster,JulienMrgrd/generator-jhipster,gzsombor/generator-jhipster,cbornet/generator-jhipster,stevehouel/generator-jhipster,wmarques/generator-jhipster,liseri/generator-jhipster,vivekmore/generator-jhipster,sendilkumarn/generator-jhipster,sendilkumarn/generator-jhipster,duderoot/generator-jhipster,deepu105/generator-jhipster,siliconharborlabs/generator-jhipster,mraible/generator-jhipster,mosoft521/generator-jhipster,mraible/generator-jhipster,nkolosnjaji/generator-jhipster,pascalgrimaud/generator-jhipster,atomfrede/generator-jhipster,rkohel/generator-jhipster,jkutner/generator-jhipster,robertmilowski/generator-jhipster,sohibegit/generator-jhipster,erikkemperman/generator-jhipster,ctamisier/generator-jhipster,dalbelap/generator-jhipster,atomfrede/generator-jhipster,gmarziou/generator-jhipster,dynamicguy/generator-jhipster,hdurix/generator-jhipster,ctamisier/generator-jhipster,wmarques/generator-jhipster,danielpetisme/generator-jhipster,siliconharborlabs/generator-jhipster,cbornet/generator-jhipster,nkolosnjaji/generator-jhipster,JulienMrgrd/generator-jhipster,siliconharborlabs/generator-jhipster,PierreBesson/generator-jhipster,sohibegit/generator-jhipster,ziogiugno/generator-jhipster,dalbelap/generator-jhipster,ruddell/generator-jhipster,sohibegit/generator-jhipster,gmarziou/generator-jhipster,atomfrede/generator-jhipster,eosimosu/generator-jhipster,wmarques/generator-jhipster,stevehouel/generator-jhipster,pascalgrimaud/generator-jhipster,liseri/generator-jhipster,Tcharl/generator-jhipster,pascalgrimaud/generator-jhipster,duderoot/generator-jhipster,duderoot/generator-jhipster,wmarques/generator-jhipster,danielpetisme/generator-jhipster,erikkemperman/generator-jhipster,yongli82/generator-jhipster,dalbelap/generator-jhipster,cbornet/generator-jhipster,rkohel/generator-jhipster,ruddell/generator-jhipster,erikkemperman/generator-jhipster,ramzimaalej/generator-jhipster,hdurix/generator-jhipster,jhipster/generator-jhipster,Tcharl/generator-jhipster,rkohel/generator-jhipster,jhipster/generator-jhipster,hdurix/generator-jhipster,nkolosnjaji/generator-jhipster,rifatdover/generator-jhipster,deepu105/generator-jhipster,mraible/generator-jhipster,mosoft521/generator-jhipster,gzsombor/generator-jhipster,dimeros/generator-jhipster,ctamisier/generator-jhipster,robertmilowski/generator-jhipster,gmarziou/generator-jhipster,mosoft521/generator-jhipster,liseri/generator-jhipster,sohibegit/generator-jhipster,jkutner/generator-jhipster,yongli82/generator-jhipster,dalbelap/generator-jhipster,hdurix/generator-jhipster,Tcharl/generator-jhipster,deepu105/generator-jhipster,gzsombor/generator-jhipster,Tcharl/generator-jhipster,ruddell/generator-jhipster,atomfrede/generator-jhipster,danielpetisme/generator-jhipster,eosimosu/generator-jhipster
|
java
|
## Code Before:
package <%=packageName%>.cucumber.stepdefs;
import <%=packageName%>.<%= mainClass %>;
import org.springframework.boot.test.SpringApplicationContextLoader;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.ResultActions;
@WebAppConfiguration
@ContextConfiguration(classes = <%= mainClass %>.class, loader = SpringApplicationContextLoader.class)
public abstract class StepDefs {
protected ResultActions actions;
}
## Instruction:
Fix cucumber tests after SB1.5.1 upgrade
## Code After:
package <%=packageName%>.cucumber.stepdefs;
import <%=packageName%>.<%= mainClass %>;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.boot.test.context.SpringBootTest;
@WebAppConfiguration
@SpringBootTest
@ContextConfiguration(classes = <%= mainClass %>.class)
public abstract class StepDefs {
protected ResultActions actions;
}
|
...
import <%=packageName%>.<%= mainClass %>;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.boot.test.context.SpringBootTest;
@WebAppConfiguration
@SpringBootTest
@ContextConfiguration(classes = <%= mainClass %>.class)
public abstract class StepDefs {
protected ResultActions actions;
...
|
402f71cc65f714cf880c9c9569e83f5bcd47ec72
|
paintstore/widgets.py
|
paintstore/widgets.py
|
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
class ColorPickerWidget(forms.TextInput):
class Media:
css = {
"all": ("%s/%s" % (settings.STATIC_URL, "paintstore/css/colorpicker.css"),)
}
js = (
("%s/%s" % (settings.STATIC_URL, "paintstore/jquery_1.7.2.js")),
("%s/%s" % (settings.STATIC_URL, "paintstore/colorpicker.js"))
)
input_type = 'colorpicker'
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){
$('#%s').ColorPicker({
onSubmit: function(hsb, hex, rgb, el, parent) {
$(el).val('#' + hex);
$(el).ColorPickerHide();
},
onBeforeShow: function () {
$(this).ColorPickerSetColor(this.value);
}
}).bind('keyup', function(){
$(this).ColorPickerSetColor(this.value.replace('#', ''));
});
});
</script>
""" % ("id_%s" % name,)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
|
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
class ColorPickerWidget(forms.TextInput):
class Media:
css = {
"all": ("%s/%s" % (settings.STATIC_URL, "paintstore/css/colorpicker.css"),)
}
js = (
("%s/%s" % (settings.STATIC_URL, "paintstore/jquery_1.7.2.js")),
("%s/%s" % (settings.STATIC_URL, "paintstore/colorpicker.js"))
)
input_type = 'colorpicker'
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){{
$('#{0}').ColorPicker({{
onSubmit: function(hsb, hex, rgb, el, parent) {{
$(el).val('#' + hex);
$(el).ColorPickerHide();
$('#{0}').css('background-color', '#' + hex);
}},
onBeforeShow: function () {{
$(this).ColorPickerSetColor(this.value);
}}
}}).bind('keyup', function(){{
$(this).ColorPickerSetColor(this.value.replace('#', ''));
}});
$('#{0}').css('background-color', $('#{0}').val());
}});
</script>
""".format(u'id_'+name)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
|
Change the background to reflect the color chosen
|
Change the background to reflect the color chosen
|
Python
|
mit
|
jamescw/django-paintstore,jamescw/django-paintstore
|
python
|
## Code Before:
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
class ColorPickerWidget(forms.TextInput):
class Media:
css = {
"all": ("%s/%s" % (settings.STATIC_URL, "paintstore/css/colorpicker.css"),)
}
js = (
("%s/%s" % (settings.STATIC_URL, "paintstore/jquery_1.7.2.js")),
("%s/%s" % (settings.STATIC_URL, "paintstore/colorpicker.js"))
)
input_type = 'colorpicker'
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){
$('#%s').ColorPicker({
onSubmit: function(hsb, hex, rgb, el, parent) {
$(el).val('#' + hex);
$(el).ColorPickerHide();
},
onBeforeShow: function () {
$(this).ColorPickerSetColor(this.value);
}
}).bind('keyup', function(){
$(this).ColorPickerSetColor(this.value.replace('#', ''));
});
});
</script>
""" % ("id_%s" % name,)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
## Instruction:
Change the background to reflect the color chosen
## Code After:
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
class ColorPickerWidget(forms.TextInput):
class Media:
css = {
"all": ("%s/%s" % (settings.STATIC_URL, "paintstore/css/colorpicker.css"),)
}
js = (
("%s/%s" % (settings.STATIC_URL, "paintstore/jquery_1.7.2.js")),
("%s/%s" % (settings.STATIC_URL, "paintstore/colorpicker.js"))
)
input_type = 'colorpicker'
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){{
$('#{0}').ColorPicker({{
onSubmit: function(hsb, hex, rgb, el, parent) {{
$(el).val('#' + hex);
$(el).ColorPickerHide();
$('#{0}').css('background-color', '#' + hex);
}},
onBeforeShow: function () {{
$(this).ColorPickerSetColor(this.value);
}}
}}).bind('keyup', function(){{
$(this).ColorPickerSetColor(this.value.replace('#', ''));
}});
$('#{0}').css('background-color', $('#{0}').val());
}});
</script>
""".format(u'id_'+name)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
|
// ... existing code ...
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){{
$('#{0}').ColorPicker({{
onSubmit: function(hsb, hex, rgb, el, parent) {{
$(el).val('#' + hex);
$(el).ColorPickerHide();
$('#{0}').css('background-color', '#' + hex);
}},
onBeforeShow: function () {{
$(this).ColorPickerSetColor(this.value);
}}
}}).bind('keyup', function(){{
$(this).ColorPickerSetColor(this.value.replace('#', ''));
}});
$('#{0}').css('background-color', $('#{0}').val());
}});
</script>
""".format(u'id_'+name)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
// ... rest of the code ...
|
df4bf37f48e8d47d3c3876e9b6342033851bb4b5
|
bridgesample/src/main/java/com/livefront/bridgesample/base/BridgeBaseFragment.kt
|
bridgesample/src/main/java/com/livefront/bridgesample/base/BridgeBaseFragment.kt
|
package com.livefront.bridgesample.base
import android.os.Bundle
import android.support.v4.app.Fragment
import com.livefront.bridge.Bridge
abstract class BridgeBaseFragment : Fragment() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Bridge.restoreInstanceState(this, savedInstanceState)
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
Bridge.saveInstanceState(this, outState)
}
override fun onDestroy() {
super.onDestroy()
Bridge.clear(this)
}
}
|
package com.livefront.bridgesample.base
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v4.app.FragmentStatePagerAdapter
import com.livefront.bridge.Bridge
abstract class BridgeBaseFragment : Fragment() {
/**
* Determines whether or not [Bridge.clear] will be called in [onDestroy]. This is enabled by
* default but may be disabled in scenarios where Fragments are "recycled" (such as when using
* a [FragmentStatePagerAdapter].
*/
open val shouldClearOnDestroy: Boolean = true
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Bridge.restoreInstanceState(this, savedInstanceState)
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
Bridge.saveInstanceState(this, outState)
}
override fun onDestroy() {
super.onDestroy()
if (shouldClearOnDestroy) Bridge.clear(this)
}
}
|
Add ability to disable Bridge clearing in onDestroy
|
Add ability to disable Bridge clearing in onDestroy
|
Kotlin
|
apache-2.0
|
livefront/bridge,livefront/bridge
|
kotlin
|
## Code Before:
package com.livefront.bridgesample.base
import android.os.Bundle
import android.support.v4.app.Fragment
import com.livefront.bridge.Bridge
abstract class BridgeBaseFragment : Fragment() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Bridge.restoreInstanceState(this, savedInstanceState)
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
Bridge.saveInstanceState(this, outState)
}
override fun onDestroy() {
super.onDestroy()
Bridge.clear(this)
}
}
## Instruction:
Add ability to disable Bridge clearing in onDestroy
## Code After:
package com.livefront.bridgesample.base
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v4.app.FragmentStatePagerAdapter
import com.livefront.bridge.Bridge
abstract class BridgeBaseFragment : Fragment() {
/**
* Determines whether or not [Bridge.clear] will be called in [onDestroy]. This is enabled by
* default but may be disabled in scenarios where Fragments are "recycled" (such as when using
* a [FragmentStatePagerAdapter].
*/
open val shouldClearOnDestroy: Boolean = true
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Bridge.restoreInstanceState(this, savedInstanceState)
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
Bridge.saveInstanceState(this, outState)
}
override fun onDestroy() {
super.onDestroy()
if (shouldClearOnDestroy) Bridge.clear(this)
}
}
|
# ... existing code ...
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v4.app.FragmentStatePagerAdapter
import com.livefront.bridge.Bridge
abstract class BridgeBaseFragment : Fragment() {
/**
* Determines whether or not [Bridge.clear] will be called in [onDestroy]. This is enabled by
* default but may be disabled in scenarios where Fragments are "recycled" (such as when using
* a [FragmentStatePagerAdapter].
*/
open val shouldClearOnDestroy: Boolean = true
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Bridge.restoreInstanceState(this, savedInstanceState)
# ... modified code ...
override fun onDestroy() {
super.onDestroy()
if (shouldClearOnDestroy) Bridge.clear(this)
}
}
# ... rest of the code ...
|
0f7853c3568791f0e93ece57d2fc750dbc93b963
|
starlette/concurrency.py
|
starlette/concurrency.py
|
import asyncio
import functools
import typing
from typing import Any, AsyncGenerator, Iterator
try:
import contextvars # Python 3.7+ only.
except ImportError: # pragma: no cover
contextvars = None # type: ignore
async def run_in_threadpool(
func: typing.Callable, *args: typing.Any, **kwargs: typing.Any
) -> typing.Any:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
child = functools.partial(func, *args, **kwargs)
context = contextvars.copy_context()
func = context.run
args = (child,)
elif kwargs: # pragma: no cover
# loop.run_in_executor doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await loop.run_in_executor(None, func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: Iterator) -> Any:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(iterator: Iterator) -> AsyncGenerator:
while True:
try:
yield await run_in_threadpool(_next, iterator)
except _StopIteration:
break
|
import asyncio
import functools
import typing
from typing import Any, AsyncGenerator, Iterator
try:
import contextvars # Python 3.7+ only.
except ImportError: # pragma: no cover
contextvars = None # type: ignore
T = typing.TypeVar("T")
async def run_in_threadpool(
func: typing.Callable[..., T], *args: typing.Any, **kwargs: typing.Any
) -> T:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
child = functools.partial(func, *args, **kwargs)
context = contextvars.copy_context()
func = context.run
args = (child,)
elif kwargs: # pragma: no cover
# loop.run_in_executor doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await loop.run_in_executor(None, func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: Iterator) -> Any:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(iterator: Iterator) -> AsyncGenerator:
while True:
try:
yield await run_in_threadpool(_next, iterator)
except _StopIteration:
break
|
Add type hint for run_in_threadpool return type
|
Add type hint for run_in_threadpool return type
|
Python
|
bsd-3-clause
|
encode/starlette,encode/starlette
|
python
|
## Code Before:
import asyncio
import functools
import typing
from typing import Any, AsyncGenerator, Iterator
try:
import contextvars # Python 3.7+ only.
except ImportError: # pragma: no cover
contextvars = None # type: ignore
async def run_in_threadpool(
func: typing.Callable, *args: typing.Any, **kwargs: typing.Any
) -> typing.Any:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
child = functools.partial(func, *args, **kwargs)
context = contextvars.copy_context()
func = context.run
args = (child,)
elif kwargs: # pragma: no cover
# loop.run_in_executor doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await loop.run_in_executor(None, func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: Iterator) -> Any:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(iterator: Iterator) -> AsyncGenerator:
while True:
try:
yield await run_in_threadpool(_next, iterator)
except _StopIteration:
break
## Instruction:
Add type hint for run_in_threadpool return type
## Code After:
import asyncio
import functools
import typing
from typing import Any, AsyncGenerator, Iterator
try:
import contextvars # Python 3.7+ only.
except ImportError: # pragma: no cover
contextvars = None # type: ignore
T = typing.TypeVar("T")
async def run_in_threadpool(
func: typing.Callable[..., T], *args: typing.Any, **kwargs: typing.Any
) -> T:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
child = functools.partial(func, *args, **kwargs)
context = contextvars.copy_context()
func = context.run
args = (child,)
elif kwargs: # pragma: no cover
# loop.run_in_executor doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await loop.run_in_executor(None, func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: Iterator) -> Any:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(iterator: Iterator) -> AsyncGenerator:
while True:
try:
yield await run_in_threadpool(_next, iterator)
except _StopIteration:
break
|
...
except ImportError: # pragma: no cover
contextvars = None # type: ignore
T = typing.TypeVar("T")
async def run_in_threadpool(
func: typing.Callable[..., T], *args: typing.Any, **kwargs: typing.Any
) -> T:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
...
|
2c502a77ad18d34470e2be89ed1c7a38e6f3799d
|
tests/test_drogher.py
|
tests/test_drogher.py
|
import pytest
import drogher
from drogher.exceptions import InvalidBarcode
class TestDrogher:
def test_barcode(self):
shipper = drogher.barcode('1Z999AA10123456784')
assert shipper.shipper == 'UPS'
def test_invalid_barcode(self):
with pytest.raises(InvalidBarcode):
drogher.barcode('1234')
|
import pytest
import drogher
from drogher.exceptions import InvalidBarcode
class TestDrogher:
def test_dhl_barcode(self):
shipper = drogher.barcode('1656740256')
assert shipper.shipper == 'DHL'
def test_fedex_express_barcode(self):
shipper = drogher.barcode('9632001960000000000400152152152158')
assert shipper.shipper == 'FedEx'
def test_fedex_ground_barcode(self):
shipper = drogher.barcode('9611019012345612345671')
assert shipper.shipper == 'FedEx'
def test_ups_barcode(self):
shipper = drogher.barcode('1Z999AA10123456784')
assert shipper.shipper == 'UPS'
def test_usps_barcode(self):
shipper = drogher.barcode('420221539101026837331000039521')
assert shipper.shipper == 'USPS'
def test_invalid_barcode(self):
with pytest.raises(InvalidBarcode):
drogher.barcode('1234')
|
Test barcode function with all shippers
|
Test barcode function with all shippers
|
Python
|
bsd-3-clause
|
jbittel/drogher
|
python
|
## Code Before:
import pytest
import drogher
from drogher.exceptions import InvalidBarcode
class TestDrogher:
def test_barcode(self):
shipper = drogher.barcode('1Z999AA10123456784')
assert shipper.shipper == 'UPS'
def test_invalid_barcode(self):
with pytest.raises(InvalidBarcode):
drogher.barcode('1234')
## Instruction:
Test barcode function with all shippers
## Code After:
import pytest
import drogher
from drogher.exceptions import InvalidBarcode
class TestDrogher:
def test_dhl_barcode(self):
shipper = drogher.barcode('1656740256')
assert shipper.shipper == 'DHL'
def test_fedex_express_barcode(self):
shipper = drogher.barcode('9632001960000000000400152152152158')
assert shipper.shipper == 'FedEx'
def test_fedex_ground_barcode(self):
shipper = drogher.barcode('9611019012345612345671')
assert shipper.shipper == 'FedEx'
def test_ups_barcode(self):
shipper = drogher.barcode('1Z999AA10123456784')
assert shipper.shipper == 'UPS'
def test_usps_barcode(self):
shipper = drogher.barcode('420221539101026837331000039521')
assert shipper.shipper == 'USPS'
def test_invalid_barcode(self):
with pytest.raises(InvalidBarcode):
drogher.barcode('1234')
|
# ... existing code ...
class TestDrogher:
def test_dhl_barcode(self):
shipper = drogher.barcode('1656740256')
assert shipper.shipper == 'DHL'
def test_fedex_express_barcode(self):
shipper = drogher.barcode('9632001960000000000400152152152158')
assert shipper.shipper == 'FedEx'
def test_fedex_ground_barcode(self):
shipper = drogher.barcode('9611019012345612345671')
assert shipper.shipper == 'FedEx'
def test_ups_barcode(self):
shipper = drogher.barcode('1Z999AA10123456784')
assert shipper.shipper == 'UPS'
def test_usps_barcode(self):
shipper = drogher.barcode('420221539101026837331000039521')
assert shipper.shipper == 'USPS'
def test_invalid_barcode(self):
with pytest.raises(InvalidBarcode):
# ... rest of the code ...
|
7bf477f2ce728ed4af4163a0a96f9ec1b3b76d8d
|
tests/cyclus_tools.py
|
tests/cyclus_tools.py
|
import os
from tools import check_cmd
def run_cyclus(cyclus, cwd, sim_files):
"""Runs cyclus with various inputs and creates output databases
"""
for sim_input, sim_output in sim_files:
holdsrtn = [1] # needed because nose does not send() to test generator
# make sure the output target directory exists
if not os.path.exists(sim_output):
os.makedirs(sim_output)
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don"t execute further commands
|
import os
from tools import check_cmd
def run_cyclus(cyclus, cwd, sim_files):
"""Runs cyclus with various inputs and creates output databases
"""
for sim_input, sim_output in sim_files:
holdsrtn = [1] # needed because nose does not send() to test generator
# make sure the output target directory exists
if not os.path.exists(os.path.dirname(sim_output)):
os.makedirs(os.path.dirname(sim_output))
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don"t execute further commands
|
Correct a bug in creating directories as needed for output.
|
Correct a bug in creating directories as needed for output.
|
Python
|
bsd-3-clause
|
Baaaaam/cyBaM,Baaaaam/cyBaM,Baaaaam/cyBaM,rwcarlsen/cycamore,rwcarlsen/cycamore,gonuke/cycamore,Baaaaam/cycamore,Baaaaam/cyBaM,jlittell/cycamore,rwcarlsen/cycamore,jlittell/cycamore,cyclus/cycaless,gonuke/cycamore,Baaaaam/cyCLASS,jlittell/cycamore,gonuke/cycamore,jlittell/cycamore,Baaaaam/cycamore,gonuke/cycamore,cyclus/cycaless,Baaaaam/cyCLASS,rwcarlsen/cycamore,Baaaaam/cycamore
|
python
|
## Code Before:
import os
from tools import check_cmd
def run_cyclus(cyclus, cwd, sim_files):
"""Runs cyclus with various inputs and creates output databases
"""
for sim_input, sim_output in sim_files:
holdsrtn = [1] # needed because nose does not send() to test generator
# make sure the output target directory exists
if not os.path.exists(sim_output):
os.makedirs(sim_output)
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don"t execute further commands
## Instruction:
Correct a bug in creating directories as needed for output.
## Code After:
import os
from tools import check_cmd
def run_cyclus(cyclus, cwd, sim_files):
"""Runs cyclus with various inputs and creates output databases
"""
for sim_input, sim_output in sim_files:
holdsrtn = [1] # needed because nose does not send() to test generator
# make sure the output target directory exists
if not os.path.exists(os.path.dirname(sim_output)):
os.makedirs(os.path.dirname(sim_output))
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don"t execute further commands
|
...
for sim_input, sim_output in sim_files:
holdsrtn = [1] # needed because nose does not send() to test generator
# make sure the output target directory exists
if not os.path.exists(os.path.dirname(sim_output)):
os.makedirs(os.path.dirname(sim_output))
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
...
|
f96989d067f6fd073d04f96bdf2ae314c9b02d49
|
uoftscrapers/scrapers/utils/layers.py
|
uoftscrapers/scrapers/utils/layers.py
|
import requests
import json
from . import Scraper
class LayersScraper:
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
host = 'http://map.utoronto.ca/'
s = requests.Session()
@staticmethod
def get_layers_json(campus):
"""Retrieve the JSON structure from host."""
Scraper.logger.info('Retrieving map layers for %s.' % campus.upper())
headers = {
'Referer': LayersScraper.host
}
html = LayersScraper.s.get('%s%s%s' % (
LayersScraper.host,
'data/map/',
campus
), headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
import requests
import json
from . import Scraper
class LayersScraper:
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
host = 'http://map.utoronto.ca/'
@staticmethod
def get_layers_json(campus):
"""Retrieve the JSON structure from host."""
Scraper.logger.info('Retrieving map layers for %s.' % campus.upper())
headers = {'Referer': LayersScraper.host}
data = Scraper.get('%s%s%s' % (
LayersScraper.host,
'data/map/',
campus
), headers=headers, json=True)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
Use request helper function in LayersScraper
|
Use request helper function in LayersScraper
|
Python
|
mit
|
kshvmdn/uoft-scrapers,cobalt-uoft/uoft-scrapers,arkon/uoft-scrapers,g3wanghc/uoft-scrapers
|
python
|
## Code Before:
import requests
import json
from . import Scraper
class LayersScraper:
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
host = 'http://map.utoronto.ca/'
s = requests.Session()
@staticmethod
def get_layers_json(campus):
"""Retrieve the JSON structure from host."""
Scraper.logger.info('Retrieving map layers for %s.' % campus.upper())
headers = {
'Referer': LayersScraper.host
}
html = LayersScraper.s.get('%s%s%s' % (
LayersScraper.host,
'data/map/',
campus
), headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
## Instruction:
Use request helper function in LayersScraper
## Code After:
import requests
import json
from . import Scraper
class LayersScraper:
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
host = 'http://map.utoronto.ca/'
@staticmethod
def get_layers_json(campus):
"""Retrieve the JSON structure from host."""
Scraper.logger.info('Retrieving map layers for %s.' % campus.upper())
headers = {'Referer': LayersScraper.host}
data = Scraper.get('%s%s%s' % (
LayersScraper.host,
'data/map/',
campus
), headers=headers, json=True)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
...
"""
host = 'http://map.utoronto.ca/'
@staticmethod
def get_layers_json(campus):
...
Scraper.logger.info('Retrieving map layers for %s.' % campus.upper())
headers = {'Referer': LayersScraper.host}
data = Scraper.get('%s%s%s' % (
LayersScraper.host,
'data/map/',
campus
), headers=headers, json=True)
return data['layers']
@staticmethod
...
|
62e40ee27413b170d40791912d8509e26b981398
|
examples/tools/print_devices.py
|
examples/tools/print_devices.py
|
import pyopencl as cl
def main():
dev_type_str = {}
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'ID: %s' % platform_index
print platform.name
print platform.profile
print platform.vendor
print platform.version
for device in platform.get_devices():
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
try:
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print '\t',
if param == 'TYPE':
value = '%s (%s)' % (
value,
dev_type_str.get(value, 'UNDEF')
)
print '%s:\t%s' % (
param,
value
)
print ''
if __name__ == '__main__':
main()
|
import pyopencl as cl
def main():
dev_type_str = {}
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'platform: %s' % platform_index
print '%s' % platform.name
print '%s' % platform.profile
print '%s' % platform.vendor
print '%s' % platform.version
for device in platform.get_devices():
print ' device: %s' % platform_index
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
try:
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print ' ',
if param == 'TYPE':
value = '%s (%s)' % (
value,
dev_type_str.get(value, 'UNDEF')
)
print '%s:\t%s' % (
param,
value
)
print ''
if __name__ == '__main__':
main()
|
Print devices example - change out format
|
Print devices example - change out format
|
Python
|
mit
|
openre/openre,openre/openre
|
python
|
## Code Before:
import pyopencl as cl
def main():
dev_type_str = {}
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'ID: %s' % platform_index
print platform.name
print platform.profile
print platform.vendor
print platform.version
for device in platform.get_devices():
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
try:
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print '\t',
if param == 'TYPE':
value = '%s (%s)' % (
value,
dev_type_str.get(value, 'UNDEF')
)
print '%s:\t%s' % (
param,
value
)
print ''
if __name__ == '__main__':
main()
## Instruction:
Print devices example - change out format
## Code After:
import pyopencl as cl
def main():
dev_type_str = {}
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'platform: %s' % platform_index
print '%s' % platform.name
print '%s' % platform.profile
print '%s' % platform.vendor
print '%s' % platform.version
for device in platform.get_devices():
print ' device: %s' % platform_index
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
try:
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print ' ',
if param == 'TYPE':
value = '%s (%s)' % (
value,
dev_type_str.get(value, 'UNDEF')
)
print '%s:\t%s' % (
param,
value
)
print ''
if __name__ == '__main__':
main()
|
# ... existing code ...
for dev_type in ['ACCELERATOR', 'ALL', 'CPU', 'CUSTOM', 'DEFAULT', 'GPU']:
dev_type_str[getattr(cl.device_type, dev_type)] = dev_type
for platform_index, platform in enumerate(cl.get_platforms()):
print 'platform: %s' % platform_index
print '%s' % platform.name
print '%s' % platform.profile
print '%s' % platform.vendor
print '%s' % platform.version
for device in platform.get_devices():
print ' device: %s' % platform_index
for param in ['NAME', 'BUILT_IN_KERNELS', 'MAX_COMPUTE_UNITS',
'GLOBAL_MEM_SIZE', 'MAX_MEM_ALLOC_SIZE', 'TYPE',
'MAX_WORK_GROUP_SIZE']:
# ... modified code ...
value = device.get_info(getattr(cl.device_info, param))
except (cl.LogicError, AttributeError):
continue
print ' ',
if param == 'TYPE':
value = '%s (%s)' % (
value,
# ... rest of the code ...
|
a02739cc7b1384e51f44d86a05af5a9845469fca
|
pygame/__init__.py
|
pygame/__init__.py
|
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
# map our exceptions on pygame's default
error = SDLError
|
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
from pygame.mask import Mask
# map our exceptions on pygame's default
error = SDLError
|
Add Mask to toplevel pygame namespace
|
Add Mask to toplevel pygame namespace
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi
|
python
|
## Code Before:
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
# map our exceptions on pygame's default
error = SDLError
## Instruction:
Add Mask to toplevel pygame namespace
## Code After:
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
from pygame.mask import Mask
# map our exceptions on pygame's default
error = SDLError
|
...
register_quit
)
from pygame._error import get_error, set_error, SDLError
from pygame.mask import Mask
# map our exceptions on pygame's default
error = SDLError
...
|
a1853cdd10905ca85c07cb25822b5fc8f68cdee8
|
test/sqlancer/TestMain.java
|
test/sqlancer/TestMain.java
|
package sqlancer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class TestMain {
private static final String NUM_QUERIES = "1000";
private static final String SECONDS = "300";
@Test
public void testDuckDB() {
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-queries", NUM_QUERIES,
"duckdb", "--oracle", "NoREC" }));
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-queries", NUM_QUERIES,
"duckdb", "--oracle", "QUERY_PARTITIONING" }));
}
}
|
package sqlancer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class TestMain {
private static final String NUM_QUERIES = "1000";
private static final String SECONDS = "300";
@Test
public void testDuckDB() {
// run with one thread due to multithreading issues, see https://github.com/sqlancer/sqlancer/pull/45
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-threads", "1",
"--num-queries", NUM_QUERIES, "duckdb", "--oracle", "NoREC" }));
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-threads", "1",
"--num-queries", NUM_QUERIES, "duckdb", "--oracle", "QUERY_PARTITIONING" }));
}
}
|
Test DuckDB only with one thread due to multithreading issues
|
[DuckDB] Test DuckDB only with one thread due to multithreading issues
|
Java
|
mit
|
sqlancer/sqlancer,sqlancer/sqlancer
|
java
|
## Code Before:
package sqlancer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class TestMain {
private static final String NUM_QUERIES = "1000";
private static final String SECONDS = "300";
@Test
public void testDuckDB() {
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-queries", NUM_QUERIES,
"duckdb", "--oracle", "NoREC" }));
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-queries", NUM_QUERIES,
"duckdb", "--oracle", "QUERY_PARTITIONING" }));
}
}
## Instruction:
[DuckDB] Test DuckDB only with one thread due to multithreading issues
## Code After:
package sqlancer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class TestMain {
private static final String NUM_QUERIES = "1000";
private static final String SECONDS = "300";
@Test
public void testDuckDB() {
// run with one thread due to multithreading issues, see https://github.com/sqlancer/sqlancer/pull/45
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-threads", "1",
"--num-queries", NUM_QUERIES, "duckdb", "--oracle", "NoREC" }));
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-threads", "1",
"--num-queries", NUM_QUERIES, "duckdb", "--oracle", "QUERY_PARTITIONING" }));
}
}
|
# ... existing code ...
@Test
public void testDuckDB() {
// run with one thread due to multithreading issues, see https://github.com/sqlancer/sqlancer/pull/45
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-threads", "1",
"--num-queries", NUM_QUERIES, "duckdb", "--oracle", "NoREC" }));
assertEquals(0, Main.executeMain(new String[] { "--timeout-seconds", SECONDS, "--num-threads", "1",
"--num-queries", NUM_QUERIES, "duckdb", "--oracle", "QUERY_PARTITIONING" }));
}
}
# ... rest of the code ...
|
a3f01fd10a6c2a1c1f73aec36c4f75ebd0e1a108
|
src/main/java/com/easternedgerobotics/rov/fx/MainView.java
|
src/main/java/com/easternedgerobotics/rov/fx/MainView.java
|
package com.easternedgerobotics.rov.fx;
import javafx.geometry.Insets;
import javafx.scene.Parent;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.BorderPane;
import javax.inject.Inject;
public class MainView implements View {
static final int SPACING = 10;
final BorderPane box = new BorderPane();
final ToggleButton button = new ToggleButton("Start");
@Inject
public MainView() {
button.setMaxWidth(Double.MAX_VALUE);
button.setMaxHeight(Double.MAX_VALUE);
box.setPadding(new Insets(SPACING));
box.setCenter(button);
}
@Override
public final Parent getParent() {
return box;
}
}
|
package com.easternedgerobotics.rov.fx;
import javafx.geometry.Insets;
import javafx.scene.Parent;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.BorderPane;
import javax.inject.Inject;
public class MainView implements View {
static final int SPACING = 10;
static final int TOGGLE_BOX_W = 256;
static final int TOGGLE_BOX_H = 64;
final BorderPane box = new BorderPane();
final ToggleButton button = new ToggleButton("Start");
@Inject
public MainView() {
button.setMaxWidth(Double.MAX_VALUE);
button.setMaxHeight(Double.MAX_VALUE);
button.setPrefSize(TOGGLE_BOX_W, TOGGLE_BOX_H);
box.setPadding(new Insets(SPACING));
box.setCenter(button);
}
@Override
public final Parent getParent() {
return box;
}
}
|
Set initial size for main toggle box
|
Set initial size for main toggle box
|
Java
|
mit
|
EasternEdgeRobotics/2016,EasternEdgeRobotics/2016,EasternEdgeRobotics/2016,EasternEdgeRobotics/2016
|
java
|
## Code Before:
package com.easternedgerobotics.rov.fx;
import javafx.geometry.Insets;
import javafx.scene.Parent;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.BorderPane;
import javax.inject.Inject;
public class MainView implements View {
static final int SPACING = 10;
final BorderPane box = new BorderPane();
final ToggleButton button = new ToggleButton("Start");
@Inject
public MainView() {
button.setMaxWidth(Double.MAX_VALUE);
button.setMaxHeight(Double.MAX_VALUE);
box.setPadding(new Insets(SPACING));
box.setCenter(button);
}
@Override
public final Parent getParent() {
return box;
}
}
## Instruction:
Set initial size for main toggle box
## Code After:
package com.easternedgerobotics.rov.fx;
import javafx.geometry.Insets;
import javafx.scene.Parent;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.BorderPane;
import javax.inject.Inject;
public class MainView implements View {
static final int SPACING = 10;
static final int TOGGLE_BOX_W = 256;
static final int TOGGLE_BOX_H = 64;
final BorderPane box = new BorderPane();
final ToggleButton button = new ToggleButton("Start");
@Inject
public MainView() {
button.setMaxWidth(Double.MAX_VALUE);
button.setMaxHeight(Double.MAX_VALUE);
button.setPrefSize(TOGGLE_BOX_W, TOGGLE_BOX_H);
box.setPadding(new Insets(SPACING));
box.setCenter(button);
}
@Override
public final Parent getParent() {
return box;
}
}
|
// ... existing code ...
public class MainView implements View {
static final int SPACING = 10;
static final int TOGGLE_BOX_W = 256;
static final int TOGGLE_BOX_H = 64;
final BorderPane box = new BorderPane();
final ToggleButton button = new ToggleButton("Start");
// ... modified code ...
public MainView() {
button.setMaxWidth(Double.MAX_VALUE);
button.setMaxHeight(Double.MAX_VALUE);
button.setPrefSize(TOGGLE_BOX_W, TOGGLE_BOX_H);
box.setPadding(new Insets(SPACING));
box.setCenter(button);
}
// ... rest of the code ...
|
6f745ae05a22031a36cb5cedc6b627cbf7ba6512
|
import_goodline_iptv.py
|
import_goodline_iptv.py
|
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
|
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
|
Add environment variable for ability to set the output directory
|
Add environment variable for ability to set the output directory
|
Python
|
mit
|
nsadovskiy/goodline_tv
|
python
|
## Code Before:
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
## Instruction:
Add environment variable for ability to set the output directory
## Code After:
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
|
// ... existing code ...
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
// ... rest of the code ...
|
f55af10f1767d39fdba65fb4c17beee526f96748
|
lib/__init__.py
|
lib/__init__.py
|
"""retriever.lib contains the core EcoData Retriever modules."""
|
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
|
Check for and use system proxies for downloading files
|
Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them
|
Python
|
mit
|
embaldridge/retriever,davharris/retriever,davharris/retriever,davharris/retriever,embaldridge/retriever,goelakash/retriever,henrykironde/deletedret,goelakash/retriever,henrykironde/deletedret,embaldridge/retriever
|
python
|
## Code Before:
"""retriever.lib contains the core EcoData Retriever modules."""
## Instruction:
Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them
## Code After:
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
|
# ... existing code ...
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
# ... rest of the code ...
|
2c1673930a40fc94c3d7c7d4f764ea423b638d26
|
mccurse/cli.py
|
mccurse/cli.py
|
"""Package command line interface."""
import click
from .curse import Game, Mod
# Static data
MINECRAFT = {'id': 432, 'name': 'Minecraft'}
@click.group()
def cli():
"""Minecraft Curse CLI client."""
@cli.command()
@click.option(
'--refresh', is_flag=True, default=False,
help='Force refreshing of search data.'
)
@click.argument('text', nargs=-1, type=str)
def search(refresh, text):
"""Search for TEXT in mods on CurseForge."""
mc = Game(**MINECRAFT)
text = ' '.join(text)
refresh = refresh or not mc.have_fresh_data()
if refresh:
click.echo('Refreshing search data, please wait…', err=True)
mc.refresh_data()
mod_fmt = '{0.name}: {0.summary}'
for mod in Mod.search(mc.database.session(), text):
click.echo(mod_fmt.format(mod))
# If run as a package, run whole cli
cli()
|
"""Package command line interface."""
import click
from .curse import Game, Mod
# Static data
MINECRAFT = {'id': 432, 'name': 'Minecraft'}
@click.group()
def cli():
"""Minecraft Curse CLI client."""
@cli.command()
@click.option(
'--refresh', is_flag=True, default=False,
help='Force refreshing of search data.'
)
@click.argument('text', nargs=-1, type=str)
def search(refresh, text):
"""Search for TEXT in mods on CurseForge."""
if not text:
raise SystemExit('No text to search for!')
mc = Game(**MINECRAFT)
text = ' '.join(text)
refresh = refresh or not mc.have_fresh_data()
if refresh:
click.echo('Refreshing search data, please wait…', err=True)
mc.refresh_data()
mod_fmt = '{0.name}: {0.summary}'
for mod in Mod.search(mc.database.session(), text):
click.echo(mod_fmt.format(mod))
# If run as a package, run whole cli
cli()
|
Raise error when there is no term to search for
|
Raise error when there is no term to search for
|
Python
|
agpl-3.0
|
khardix/mccurse
|
python
|
## Code Before:
"""Package command line interface."""
import click
from .curse import Game, Mod
# Static data
MINECRAFT = {'id': 432, 'name': 'Minecraft'}
@click.group()
def cli():
"""Minecraft Curse CLI client."""
@cli.command()
@click.option(
'--refresh', is_flag=True, default=False,
help='Force refreshing of search data.'
)
@click.argument('text', nargs=-1, type=str)
def search(refresh, text):
"""Search for TEXT in mods on CurseForge."""
mc = Game(**MINECRAFT)
text = ' '.join(text)
refresh = refresh or not mc.have_fresh_data()
if refresh:
click.echo('Refreshing search data, please wait…', err=True)
mc.refresh_data()
mod_fmt = '{0.name}: {0.summary}'
for mod in Mod.search(mc.database.session(), text):
click.echo(mod_fmt.format(mod))
# If run as a package, run whole cli
cli()
## Instruction:
Raise error when there is no term to search for
## Code After:
"""Package command line interface."""
import click
from .curse import Game, Mod
# Static data
MINECRAFT = {'id': 432, 'name': 'Minecraft'}
@click.group()
def cli():
"""Minecraft Curse CLI client."""
@cli.command()
@click.option(
'--refresh', is_flag=True, default=False,
help='Force refreshing of search data.'
)
@click.argument('text', nargs=-1, type=str)
def search(refresh, text):
"""Search for TEXT in mods on CurseForge."""
if not text:
raise SystemExit('No text to search for!')
mc = Game(**MINECRAFT)
text = ' '.join(text)
refresh = refresh or not mc.have_fresh_data()
if refresh:
click.echo('Refreshing search data, please wait…', err=True)
mc.refresh_data()
mod_fmt = '{0.name}: {0.summary}'
for mod in Mod.search(mc.database.session(), text):
click.echo(mod_fmt.format(mod))
# If run as a package, run whole cli
cli()
|
// ... existing code ...
def search(refresh, text):
"""Search for TEXT in mods on CurseForge."""
if not text:
raise SystemExit('No text to search for!')
mc = Game(**MINECRAFT)
text = ' '.join(text)
// ... rest of the code ...
|
7ec5786efbdb20b9cbcdf0b4f1b583a7e07e0644
|
comrade/core/tests.py
|
comrade/core/tests.py
|
from nose.tools import ok_, eq_
import unittest
import models
class SimpleModel(models.ComradeBaseModel):
def __unicode__(self):
return u'This is a unicode string'
class TestBaseModel(unittest.TestCase):
def setUp(self):
super(TestBaseModel, self).setUp()
self.obj = SimpleModel()
def test_repr(self):
ok_(isinstance(self.obj.__repr__(), str))
def test_str(self):
ok_(isinstance(self.obj.__str__(), str))
def test_unicode(self):
ok_(isinstance(self.obj.__unicode__(), unicode))
|
from nose.tools import ok_, eq_
import unittest
import models
def check_direct_to_template(prefix, pattern):
from django import test
from django.core.urlresolvers import reverse
client = test.Client()
response = client.get(reverse(prefix + ':' + pattern.name))
template_name = pattern.default_args['template']
template_names = [t.name for t in test.testcases.to_list(response.template)]
ok_(template_names)
ok_(template_name in template_names,
"Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s" %
(template_name, u', '.join(template_names)))
class SimpleModel(models.ComradeBaseModel):
def __unicode__(self):
return u'This is a unicode string'
class TestBaseModel(unittest.TestCase):
def setUp(self):
super(TestBaseModel, self).setUp()
self.obj = SimpleModel()
def test_repr(self):
ok_(isinstance(self.obj.__repr__(), str))
def test_str(self):
ok_(isinstance(self.obj.__str__(), str))
def test_unicode(self):
ok_(isinstance(self.obj.__unicode__(), unicode))
|
Add test helper method for checking direct_to_template views.
|
Add test helper method for checking direct_to_template views.
|
Python
|
mit
|
bueda/django-comrade
|
python
|
## Code Before:
from nose.tools import ok_, eq_
import unittest
import models
class SimpleModel(models.ComradeBaseModel):
def __unicode__(self):
return u'This is a unicode string'
class TestBaseModel(unittest.TestCase):
def setUp(self):
super(TestBaseModel, self).setUp()
self.obj = SimpleModel()
def test_repr(self):
ok_(isinstance(self.obj.__repr__(), str))
def test_str(self):
ok_(isinstance(self.obj.__str__(), str))
def test_unicode(self):
ok_(isinstance(self.obj.__unicode__(), unicode))
## Instruction:
Add test helper method for checking direct_to_template views.
## Code After:
from nose.tools import ok_, eq_
import unittest
import models
def check_direct_to_template(prefix, pattern):
from django import test
from django.core.urlresolvers import reverse
client = test.Client()
response = client.get(reverse(prefix + ':' + pattern.name))
template_name = pattern.default_args['template']
template_names = [t.name for t in test.testcases.to_list(response.template)]
ok_(template_names)
ok_(template_name in template_names,
"Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s" %
(template_name, u', '.join(template_names)))
class SimpleModel(models.ComradeBaseModel):
def __unicode__(self):
return u'This is a unicode string'
class TestBaseModel(unittest.TestCase):
def setUp(self):
super(TestBaseModel, self).setUp()
self.obj = SimpleModel()
def test_repr(self):
ok_(isinstance(self.obj.__repr__(), str))
def test_str(self):
ok_(isinstance(self.obj.__str__(), str))
def test_unicode(self):
ok_(isinstance(self.obj.__unicode__(), unicode))
|
// ... existing code ...
import unittest
import models
def check_direct_to_template(prefix, pattern):
from django import test
from django.core.urlresolvers import reverse
client = test.Client()
response = client.get(reverse(prefix + ':' + pattern.name))
template_name = pattern.default_args['template']
template_names = [t.name for t in test.testcases.to_list(response.template)]
ok_(template_names)
ok_(template_name in template_names,
"Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s" %
(template_name, u', '.join(template_names)))
class SimpleModel(models.ComradeBaseModel):
def __unicode__(self):
// ... modified code ...
def test_unicode(self):
ok_(isinstance(self.obj.__unicode__(), unicode))
// ... rest of the code ...
|
9be04ea1030b423b7414dbd386ae2db2f4761f07
|
third_party/bunch/bunch/python3_compat.py
|
third_party/bunch/bunch/python3_compat.py
|
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
Fix Python 3 version detection in bunch
|
Fix Python 3 version detection in bunch
|
Python
|
apache-2.0
|
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
|
python
|
## Code Before:
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
## Instruction:
Fix Python 3 version detection in bunch
## Code After:
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
...
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
...
|
d96b07d529ea7ced5cbe5f5accaa84485e14395a
|
Lib/test/test_tk.py
|
Lib/test/test_tk.py
|
from test import support
# Skip test if _tkinter wasn't built.
support.import_module('_tkinter')
import tkinter
from tkinter.test import runtktests
import unittest
import tkinter
try:
tkinter.Button()
except tkinter.TclError as msg:
# assuming tk is not available
raise unittest.SkipTest("tk not available: %s" % msg)
def test_main(enable_gui=False):
if enable_gui:
if support.use_resources is None:
support.use_resources = ['gui']
elif 'gui' not in support.use_resources:
support.use_resources.append('gui')
support.run_unittest(
*runtktests.get_tests(text=False, packages=['test_tkinter']))
if __name__ == '__main__':
test_main(enable_gui=True)
|
from test import support
# Skip test if _tkinter wasn't built.
support.import_module('_tkinter')
import tkinter
from tkinter.test import runtktests
import unittest
try:
tkinter.Button()
except tkinter.TclError as msg:
# assuming tk is not available
raise unittest.SkipTest("tk not available: %s" % msg)
def test_main(enable_gui=False):
if enable_gui:
if support.use_resources is None:
support.use_resources = ['gui']
elif 'gui' not in support.use_resources:
support.use_resources.append('gui')
support.run_unittest(
*runtktests.get_tests(text=False, packages=['test_tkinter']))
if __name__ == '__main__':
test_main(enable_gui=True)
|
Remove redundant import of tkinter.
|
Remove redundant import of tkinter.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
python
|
## Code Before:
from test import support
# Skip test if _tkinter wasn't built.
support.import_module('_tkinter')
import tkinter
from tkinter.test import runtktests
import unittest
import tkinter
try:
tkinter.Button()
except tkinter.TclError as msg:
# assuming tk is not available
raise unittest.SkipTest("tk not available: %s" % msg)
def test_main(enable_gui=False):
if enable_gui:
if support.use_resources is None:
support.use_resources = ['gui']
elif 'gui' not in support.use_resources:
support.use_resources.append('gui')
support.run_unittest(
*runtktests.get_tests(text=False, packages=['test_tkinter']))
if __name__ == '__main__':
test_main(enable_gui=True)
## Instruction:
Remove redundant import of tkinter.
## Code After:
from test import support
# Skip test if _tkinter wasn't built.
support.import_module('_tkinter')
import tkinter
from tkinter.test import runtktests
import unittest
try:
tkinter.Button()
except tkinter.TclError as msg:
# assuming tk is not available
raise unittest.SkipTest("tk not available: %s" % msg)
def test_main(enable_gui=False):
if enable_gui:
if support.use_resources is None:
support.use_resources = ['gui']
elif 'gui' not in support.use_resources:
support.use_resources.append('gui')
support.run_unittest(
*runtktests.get_tests(text=False, packages=['test_tkinter']))
if __name__ == '__main__':
test_main(enable_gui=True)
|
// ... existing code ...
import tkinter
from tkinter.test import runtktests
import unittest
try:
tkinter.Button()
// ... rest of the code ...
|
d0172776eac403c0f4df9da1bc99f00321449266
|
src/timestamps.h
|
src/timestamps.h
|
static const unsigned int ENTROPY_SWITCH_TIME = 1362791041; // Sat, 09 Mar 2013 01:04:01 GMT
static const unsigned int STAKE_SWITCH_TIME = 1371686400; // Thu, 20 Jun 2013 00:00:00 GMT
static const unsigned int TARGETS_SWITCH_TIME = 1374278400; // Sat, 20 Jul 2013 00:00:00 GMT
static const unsigned int CHAINCHECKS_SWITCH_TIME = 1379635200; // Fri, 20 Sep 2013 00:00:00 GMT
static const unsigned int STAKECURVE_SWITCH_TIME = 1382227200; // Sun, 20 Oct 2013 00:00:00 GMT
static const unsigned int FEE_SWITCH_TIME = 1405814400; // Sun, 20 Jul 2014 00:00:00 GMT
static const unsigned int VALIDATION_SWITCH_TIME = 1408492800; // Wed, 20 Aug 2014 00:00:00 GMT
#endif
|
static const unsigned int ENTROPY_SWITCH_TIME = 1362791041; // Sat, 09 Mar 2013 01:04:01 GMT
static const unsigned int STAKE_SWITCH_TIME = 1371686400; // Thu, 20 Jun 2013 00:00:00 GMT
static const unsigned int TARGETS_SWITCH_TIME = 1374278400; // Sat, 20 Jul 2013 00:00:00 GMT
static const unsigned int CHAINCHECKS_SWITCH_TIME = 1379635200; // Fri, 20 Sep 2013 00:00:00 GMT
static const unsigned int STAKECURVE_SWITCH_TIME = 1382227200; // Sun, 20 Oct 2013 00:00:00 GMT
static const unsigned int FEE_SWITCH_TIME = 1405814400; // Sun, 20 Jul 2014 00:00:00 GMT
static const unsigned int VALIDATION_SWITCH_TIME = 1408492800; // Wed, 20 Aug 2014 00:00:00 GMT
static const unsigned int SIG_SWITCH_TIME = 1411171200; // Sat, 20 Sep 2014 00:00:00 GMT
#endif
|
Add SIG_SWITCH_TIME at Sat, 20 Sep 2014
|
Add SIG_SWITCH_TIME at Sat, 20 Sep 2014
|
C
|
mit
|
novacoin-project/novacoin,stamhe/novacoin,stamhe/novacoin,stamhe/novacoin,penek/novacoin,gades/novacoin,novacoin-project/novacoin,elambert2014/novacoin,fsb4000/novacoin,penek/novacoin,fsb4000/novacoin,fsb4000/novacoin,stamhe/novacoin,byncoin-project/byncoin,byncoin-project/byncoin,elambert2014/novacoin,byncoin-project/byncoin,elambert2014/novacoin,elambert2014/novacoin,byncoin-project/byncoin,fsb4000/novacoin,penek/novacoin,gades/novacoin,elambert2014/novacoin,fsb4000/novacoin,gades/novacoin,gades/novacoin,penek/novacoin,novacoin-project/novacoin,byncoin-project/byncoin,novacoin-project/novacoin,stamhe/novacoin,penek/novacoin,gades/novacoin,gades/novacoin,novacoin-project/novacoin,novacoin-project/novacoin,fsb4000/novacoin,elambert2014/novacoin,byncoin-project/byncoin,penek/novacoin
|
c
|
## Code Before:
static const unsigned int ENTROPY_SWITCH_TIME = 1362791041; // Sat, 09 Mar 2013 01:04:01 GMT
static const unsigned int STAKE_SWITCH_TIME = 1371686400; // Thu, 20 Jun 2013 00:00:00 GMT
static const unsigned int TARGETS_SWITCH_TIME = 1374278400; // Sat, 20 Jul 2013 00:00:00 GMT
static const unsigned int CHAINCHECKS_SWITCH_TIME = 1379635200; // Fri, 20 Sep 2013 00:00:00 GMT
static const unsigned int STAKECURVE_SWITCH_TIME = 1382227200; // Sun, 20 Oct 2013 00:00:00 GMT
static const unsigned int FEE_SWITCH_TIME = 1405814400; // Sun, 20 Jul 2014 00:00:00 GMT
static const unsigned int VALIDATION_SWITCH_TIME = 1408492800; // Wed, 20 Aug 2014 00:00:00 GMT
#endif
## Instruction:
Add SIG_SWITCH_TIME at Sat, 20 Sep 2014
## Code After:
static const unsigned int ENTROPY_SWITCH_TIME = 1362791041; // Sat, 09 Mar 2013 01:04:01 GMT
static const unsigned int STAKE_SWITCH_TIME = 1371686400; // Thu, 20 Jun 2013 00:00:00 GMT
static const unsigned int TARGETS_SWITCH_TIME = 1374278400; // Sat, 20 Jul 2013 00:00:00 GMT
static const unsigned int CHAINCHECKS_SWITCH_TIME = 1379635200; // Fri, 20 Sep 2013 00:00:00 GMT
static const unsigned int STAKECURVE_SWITCH_TIME = 1382227200; // Sun, 20 Oct 2013 00:00:00 GMT
static const unsigned int FEE_SWITCH_TIME = 1405814400; // Sun, 20 Jul 2014 00:00:00 GMT
static const unsigned int VALIDATION_SWITCH_TIME = 1408492800; // Wed, 20 Aug 2014 00:00:00 GMT
static const unsigned int SIG_SWITCH_TIME = 1411171200; // Sat, 20 Sep 2014 00:00:00 GMT
#endif
|
...
static const unsigned int FEE_SWITCH_TIME = 1405814400; // Sun, 20 Jul 2014 00:00:00 GMT
static const unsigned int VALIDATION_SWITCH_TIME = 1408492800; // Wed, 20 Aug 2014 00:00:00 GMT
static const unsigned int SIG_SWITCH_TIME = 1411171200; // Sat, 20 Sep 2014 00:00:00 GMT
#endif
...
|
73669e716e1125e23e7b21c6058baf811dc213c9
|
CriminalIntent/app/src/main/java/com/bignerdranch/android/criminalintent/Crime.java
|
CriminalIntent/app/src/main/java/com/bignerdranch/android/criminalintent/Crime.java
|
package com.bignerdranch.android.criminalintent;
import java.util.Date;
import java.util.UUID;
public class Crime {
private UUID mId;
private String mTitle;
private Date mDate;
private boolean mSolved;
public Crime() {
this(UUID.randomUUID());
}
public Crime(UUID id) {
mId = id;
mDate = new Date();
}
// Accesors
public UUID getId() {
return mId;
}
public String getTitle() {
return mTitle;
}
public void setTitle(String title) {
mTitle = title;
}
public boolean isSolved() {
return mSolved;
}
public void setSolved(boolean solved) {
mSolved = solved;
}
public Date getDate() {
return mDate;
}
public void setDate(Date date) {
mDate = date;
}
}
|
package com.bignerdranch.android.criminalintent;
import java.util.Date;
import java.util.UUID;
public class Crime {
private UUID mId;
private String mTitle;
private Date mDate;
private boolean mSolved;
private String mSuspect;
public Crime() {
this(UUID.randomUUID());
}
public Crime(UUID id) {
mId = id;
mDate = new Date();
}
// Accesors
public UUID getId() {
return mId;
}
public String getTitle() {
return mTitle;
}
public void setTitle(String title) {
mTitle = title;
}
public boolean isSolved() {
return mSolved;
}
public void setSolved(boolean solved) {
mSolved = solved;
}
public Date getDate() {
return mDate;
}
public void setDate(Date date) {
mDate = date;
}
public String getSuspect() {
return mSuspect;
}
public void setSuspect(String suspect) {
mSuspect = suspect;
}
}
|
Update the model to include the suspect
|
Update the model to include the suspect
|
Java
|
apache-2.0
|
dcordero/BigNerdRanch-Android
|
java
|
## Code Before:
package com.bignerdranch.android.criminalintent;
import java.util.Date;
import java.util.UUID;
public class Crime {
private UUID mId;
private String mTitle;
private Date mDate;
private boolean mSolved;
public Crime() {
this(UUID.randomUUID());
}
public Crime(UUID id) {
mId = id;
mDate = new Date();
}
// Accesors
public UUID getId() {
return mId;
}
public String getTitle() {
return mTitle;
}
public void setTitle(String title) {
mTitle = title;
}
public boolean isSolved() {
return mSolved;
}
public void setSolved(boolean solved) {
mSolved = solved;
}
public Date getDate() {
return mDate;
}
public void setDate(Date date) {
mDate = date;
}
}
## Instruction:
Update the model to include the suspect
## Code After:
package com.bignerdranch.android.criminalintent;
import java.util.Date;
import java.util.UUID;
public class Crime {
private UUID mId;
private String mTitle;
private Date mDate;
private boolean mSolved;
private String mSuspect;
public Crime() {
this(UUID.randomUUID());
}
public Crime(UUID id) {
mId = id;
mDate = new Date();
}
// Accesors
public UUID getId() {
return mId;
}
public String getTitle() {
return mTitle;
}
public void setTitle(String title) {
mTitle = title;
}
public boolean isSolved() {
return mSolved;
}
public void setSolved(boolean solved) {
mSolved = solved;
}
public Date getDate() {
return mDate;
}
public void setDate(Date date) {
mDate = date;
}
public String getSuspect() {
return mSuspect;
}
public void setSuspect(String suspect) {
mSuspect = suspect;
}
}
|
// ... existing code ...
private String mTitle;
private Date mDate;
private boolean mSolved;
private String mSuspect;
public Crime() {
this(UUID.randomUUID());
// ... modified code ...
public void setDate(Date date) {
mDate = date;
}
public String getSuspect() {
return mSuspect;
}
public void setSuspect(String suspect) {
mSuspect = suspect;
}
}
// ... rest of the code ...
|
febb5d9890b074985ca99f05c5b8ffc2572d2652
|
apps/posters/forms.py
|
apps/posters/forms.py
|
from django import forms
from apps.posters.models import Poster
class AddPosterForm(forms.ModelForm):
when = forms.CharField(label=u"Event start", widget=forms.TextInput(attrs={'type': 'datetime-local'}))
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_from', 'display_to', 'comments']
class EditPosterForm(forms.ModelForm):
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_to', 'display_from', 'comments', 'finished']
|
from django import forms
from apps.posters.models import Poster
class AddPosterForm(forms.ModelForm):
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_from', 'display_to', 'comments']
class EditPosterForm(forms.ModelForm):
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_to', 'display_from', 'comments', 'finished']
|
Remove event start field from form
|
Remove event start field from form
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
python
|
## Code Before:
from django import forms
from apps.posters.models import Poster
class AddPosterForm(forms.ModelForm):
when = forms.CharField(label=u"Event start", widget=forms.TextInput(attrs={'type': 'datetime-local'}))
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_from', 'display_to', 'comments']
class EditPosterForm(forms.ModelForm):
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_to', 'display_from', 'comments', 'finished']
## Instruction:
Remove event start field from form
## Code After:
from django import forms
from apps.posters.models import Poster
class AddPosterForm(forms.ModelForm):
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_from', 'display_to', 'comments']
class EditPosterForm(forms.ModelForm):
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_to', 'display_from', 'comments', 'finished']
|
...
class AddPosterForm(forms.ModelForm):
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
...
|
b140af3ee2917bff3aa427c4e686f465f230aa50
|
platform_test/src/test/java/org/jimmutable/platform_test/AppTest.java
|
platform_test/src/test/java/org/jimmutable/platform_test/AppTest.java
|
package org.jimmutable.platform_test;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
try{
App.main(null);
assertTrue( true );
} catch (Exception e) {
assert(false);
}
}
}
|
package org.jimmutable.platform_test;
import static org.junit.Assert.*;
import org.junit.Test;
public class AppTest extends App
{
// @Test
public void testMain()
{
try{
App.main(null);
assertTrue( true );
} catch (Exception e) {
e.printStackTrace();
assert(false);
}
}
}
|
Update to platform_test test class
|
Update to platform_test test class
|
Java
|
bsd-3-clause
|
jimmutable/core,jimmutable/core
|
java
|
## Code Before:
package org.jimmutable.platform_test;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
try{
App.main(null);
assertTrue( true );
} catch (Exception e) {
assert(false);
}
}
}
## Instruction:
Update to platform_test test class
## Code After:
package org.jimmutable.platform_test;
import static org.junit.Assert.*;
import org.junit.Test;
public class AppTest extends App
{
// @Test
public void testMain()
{
try{
App.main(null);
assertTrue( true );
} catch (Exception e) {
e.printStackTrace();
assert(false);
}
}
}
|
// ... existing code ...
package org.jimmutable.platform_test;
import static org.junit.Assert.*;
import org.junit.Test;
public class AppTest extends App
{
// @Test
public void testMain()
{
try{
App.main(null);
assertTrue( true );
} catch (Exception e) {
e.printStackTrace();
assert(false);
}
}
}
// ... rest of the code ...
|
3055fa16010a1b855142c2e5b866d76daee17c8f
|
markdown_gen/test/attributes_test.py
|
markdown_gen/test/attributes_test.py
|
import unittest
import markdown_gen.MardownGen as md
class AttributesTests(unittest.TestCase):
def test_italic(self):
expected = "*italic text*"
self.assertEqual(expected, md.gen_italic("italic text"))
expected = "_italic text alternative_"
self.assertEqual(expected, md.gen_italic("italic text alternative", True))
def test_bold(self):
expected = "**bold text**"
self.assertEqual(expected, md.gen_bold("bold text"))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_monspace(self):
expected = "`monospace`"
self.assertEqual(expected, md.gen_monospace("monospace"))
def test_strikethrough(self):
expected = "~~strikethrough~~"
self.assertEqual(expected, md.gen_strikethrough("strikethrough"))
if __name__ == '__main__':
unittest.main()
|
import unittest
import markdown_gen.MardownGen as md
class AttributesTests(unittest.TestCase):
def test_italic(self):
expected = "*italic text*"
self.assertEqual(expected, md.gen_italic("italic text"))
expected = "_italic text alternative_"
self.assertEqual(expected, md.gen_italic("italic text alternative", True))
def test_bold(self):
expected = "**bold text**"
self.assertEqual(expected, md.gen_bold("bold text"))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_bold_and_italic(self):
expected = "***bold and italic text***"
self.assertEqual(expected, md.gen_italic(md.gen_bold("bold text")))
self.assertEqual(expected, md.gen_bold(md.gen_italic("bold text")))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_monspace(self):
expected = "`monospace`"
self.assertEqual(expected, md.gen_monospace("monospace"))
def test_strikethrough(self):
expected = "~~strikethrough~~"
self.assertEqual(expected, md.gen_strikethrough("strikethrough"))
if __name__ == '__main__':
unittest.main()
|
Add test for bold and italic text
|
Add test for bold and italic text
|
Python
|
epl-1.0
|
LukasWoodtli/PyMarkdownGen
|
python
|
## Code Before:
import unittest
import markdown_gen.MardownGen as md
class AttributesTests(unittest.TestCase):
def test_italic(self):
expected = "*italic text*"
self.assertEqual(expected, md.gen_italic("italic text"))
expected = "_italic text alternative_"
self.assertEqual(expected, md.gen_italic("italic text alternative", True))
def test_bold(self):
expected = "**bold text**"
self.assertEqual(expected, md.gen_bold("bold text"))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_monspace(self):
expected = "`monospace`"
self.assertEqual(expected, md.gen_monospace("monospace"))
def test_strikethrough(self):
expected = "~~strikethrough~~"
self.assertEqual(expected, md.gen_strikethrough("strikethrough"))
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for bold and italic text
## Code After:
import unittest
import markdown_gen.MardownGen as md
class AttributesTests(unittest.TestCase):
def test_italic(self):
expected = "*italic text*"
self.assertEqual(expected, md.gen_italic("italic text"))
expected = "_italic text alternative_"
self.assertEqual(expected, md.gen_italic("italic text alternative", True))
def test_bold(self):
expected = "**bold text**"
self.assertEqual(expected, md.gen_bold("bold text"))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_bold_and_italic(self):
expected = "***bold and italic text***"
self.assertEqual(expected, md.gen_italic(md.gen_bold("bold text")))
self.assertEqual(expected, md.gen_bold(md.gen_italic("bold text")))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_monspace(self):
expected = "`monospace`"
self.assertEqual(expected, md.gen_monospace("monospace"))
def test_strikethrough(self):
expected = "~~strikethrough~~"
self.assertEqual(expected, md.gen_strikethrough("strikethrough"))
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_bold_and_italic(self):
expected = "***bold and italic text***"
self.assertEqual(expected, md.gen_italic(md.gen_bold("bold text")))
self.assertEqual(expected, md.gen_bold(md.gen_italic("bold text")))
expected = "__bold text alternative__"
self.assertEqual(expected, md.gen_bold("bold text alternative", True))
def test_monspace(self):
expected = "`monospace`"
# ... rest of the code ...
|
aefa8a3d6d4c809c7e470b22a0c9fb2c0875ba8b
|
project/project/urls.py
|
project/project/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk', app_name='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app', app_name='example_app')
),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app')
),
url(
r'^admin/',
admin.site.urls
),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Remove unneeded app_name from test project to be django 2 compatible
|
Remove unneeded app_name from test project to be django 2 compatible
|
Python
|
mit
|
crunchr/silk,mtford90/silk,jazzband/silk,crunchr/silk,mtford90/silk,jazzband/silk,crunchr/silk,django-silk/silk,django-silk/silk,jazzband/silk,django-silk/silk,crunchr/silk,mtford90/silk,jazzband/silk,mtford90/silk,django-silk/silk
|
python
|
## Code Before:
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk', app_name='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app', app_name='example_app')
),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
## Instruction:
Remove unneeded app_name from test project to be django 2 compatible
## Code After:
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app')
),
url(
r'^admin/',
admin.site.urls
),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
...
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app')
),
url(
r'^admin/',
admin.site.urls
),
]
...
|
535b07758a16dec2ce79781f19b34a96044b99d3
|
fluent_contents/conf/plugin_template/models.py
|
fluent_contents/conf/plugin_template/models.py
|
from django.db import models
from django.utils.six import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
@python_2_unicode_compatible
class {{ model }}(ContentItem):
"""
CMS plugin data model to ...
"""
title = models.CharField(_("Title"), max_length=200)
class Meta:
verbose_name = _("{{ model|title }}")
verbose_name_plural = _("{{ model|title }}s")
def __str__(self):
return self.title
|
from django.db import models
from django.utils.translation import gettext_lazy as _
from fluent_contents.models import ContentItem
class {{ model }}(ContentItem):
"""
CMS plugin data model to ...
"""
title = models.CharField(_("Title"), max_length=200)
class Meta:
verbose_name = _("{{ model|title }}")
verbose_name_plural = _("{{ model|title }}s")
def __str__(self):
return self.title
|
Update plugin_template to Python 3-only standards
|
Update plugin_template to Python 3-only standards
|
Python
|
apache-2.0
|
edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents
|
python
|
## Code Before:
from django.db import models
from django.utils.six import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
@python_2_unicode_compatible
class {{ model }}(ContentItem):
"""
CMS plugin data model to ...
"""
title = models.CharField(_("Title"), max_length=200)
class Meta:
verbose_name = _("{{ model|title }}")
verbose_name_plural = _("{{ model|title }}s")
def __str__(self):
return self.title
## Instruction:
Update plugin_template to Python 3-only standards
## Code After:
from django.db import models
from django.utils.translation import gettext_lazy as _
from fluent_contents.models import ContentItem
class {{ model }}(ContentItem):
"""
CMS plugin data model to ...
"""
title = models.CharField(_("Title"), max_length=200)
class Meta:
verbose_name = _("{{ model|title }}")
verbose_name_plural = _("{{ model|title }}s")
def __str__(self):
return self.title
|
...
from django.db import models
from django.utils.translation import gettext_lazy as _
from fluent_contents.models import ContentItem
class {{ model }}(ContentItem):
"""
CMS plugin data model to ...
...
|
ca4e752c35e8a6254d85fae156cc256f2658de2a
|
src/ip.c
|
src/ip.c
|
/*****************************************************************************/
/* */
/* Telize 2.0.0 */
/* Copyright (c) 2013-2018, Frederic Cambus */
/* https://www.telize.com */
/* */
/* Created: 2013-08-15 */
/* Last Updated: 2018-10-04 */
/* */
/* Telize is released under the BSD 2-Clause license. */
/* See LICENSE file for details. */
/* */
/*****************************************************************************/
#include <sys/socket.h>
#include <kore/kore.h>
#include <kore/http.h>
int ip(struct http_request *);
int
ip(struct http_request *req)
{
char addr[INET6_ADDRSTRLEN];
if (req->owner->addrtype == AF_INET) {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv4.sin_addr), addr, sizeof(addr));
} else {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv6.sin6_addr), addr, sizeof(addr));
}
http_response(req, 200, addr, strlen(addr));
return (KORE_RESULT_OK);
}
|
/*****************************************************************************/
/* */
/* Telize 2.0.0 */
/* Copyright (c) 2013-2018, Frederic Cambus */
/* https://www.telize.com */
/* */
/* Created: 2013-08-15 */
/* Last Updated: 2018-10-04 */
/* */
/* Telize is released under the BSD 2-Clause license. */
/* See LICENSE file for details. */
/* */
/*****************************************************************************/
#include <sys/socket.h>
#include <kore/kore.h>
#include <kore/http.h>
int ip(struct http_request *);
int
ip(struct http_request *req)
{
const char *visitor_ip;
char *ip, addr[INET6_ADDRSTRLEN];
if (req->owner->addrtype == AF_INET) {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv4.sin_addr), addr, sizeof(addr));
} else {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv6.sin6_addr), addr, sizeof(addr));
}
if (http_request_header(req, "X-Forwarded-For", &visitor_ip)) {
ip = kore_strdup(visitor_ip);
} else {
ip = addr;
}
http_response(req, 200, ip, strlen(ip));
return (KORE_RESULT_OK);
}
|
Handle the 'X-Forwarded-For' HTTP header if present
|
Handle the 'X-Forwarded-For' HTTP header if present
|
C
|
bsd-2-clause
|
fcambus/telize
|
c
|
## Code Before:
/*****************************************************************************/
/* */
/* Telize 2.0.0 */
/* Copyright (c) 2013-2018, Frederic Cambus */
/* https://www.telize.com */
/* */
/* Created: 2013-08-15 */
/* Last Updated: 2018-10-04 */
/* */
/* Telize is released under the BSD 2-Clause license. */
/* See LICENSE file for details. */
/* */
/*****************************************************************************/
#include <sys/socket.h>
#include <kore/kore.h>
#include <kore/http.h>
int ip(struct http_request *);
int
ip(struct http_request *req)
{
char addr[INET6_ADDRSTRLEN];
if (req->owner->addrtype == AF_INET) {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv4.sin_addr), addr, sizeof(addr));
} else {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv6.sin6_addr), addr, sizeof(addr));
}
http_response(req, 200, addr, strlen(addr));
return (KORE_RESULT_OK);
}
## Instruction:
Handle the 'X-Forwarded-For' HTTP header if present
## Code After:
/*****************************************************************************/
/* */
/* Telize 2.0.0 */
/* Copyright (c) 2013-2018, Frederic Cambus */
/* https://www.telize.com */
/* */
/* Created: 2013-08-15 */
/* Last Updated: 2018-10-04 */
/* */
/* Telize is released under the BSD 2-Clause license. */
/* See LICENSE file for details. */
/* */
/*****************************************************************************/
#include <sys/socket.h>
#include <kore/kore.h>
#include <kore/http.h>
int ip(struct http_request *);
int
ip(struct http_request *req)
{
const char *visitor_ip;
char *ip, addr[INET6_ADDRSTRLEN];
if (req->owner->addrtype == AF_INET) {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv4.sin_addr), addr, sizeof(addr));
} else {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv6.sin6_addr), addr, sizeof(addr));
}
if (http_request_header(req, "X-Forwarded-For", &visitor_ip)) {
ip = kore_strdup(visitor_ip);
} else {
ip = addr;
}
http_response(req, 200, ip, strlen(ip));
return (KORE_RESULT_OK);
}
|
// ... existing code ...
int
ip(struct http_request *req)
{
const char *visitor_ip;
char *ip, addr[INET6_ADDRSTRLEN];
if (req->owner->addrtype == AF_INET) {
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv4.sin_addr), addr, sizeof(addr));
// ... modified code ...
inet_ntop(req->owner->addrtype, &(req->owner->addr.ipv6.sin6_addr), addr, sizeof(addr));
}
if (http_request_header(req, "X-Forwarded-For", &visitor_ip)) {
ip = kore_strdup(visitor_ip);
} else {
ip = addr;
}
http_response(req, 200, ip, strlen(ip));
return (KORE_RESULT_OK);
}
// ... rest of the code ...
|
6fee21a630a9ba3b54f58152cb4549b4170b833f
|
docdata/urls.py
|
docdata/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('docdata.views',
# Status change notifications
url(r'^status_change/$', 'status_change', name='status_change'),
)
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('docdata.views',
# Status change notifications
url(r'^status_change/$', 'status_change', name='status_change'),
)
|
Fix URL's to work with Django 1.5
|
Fix URL's to work with Django 1.5
|
Python
|
agpl-3.0
|
dokterbob/django-docdata
|
python
|
## Code Before:
from django.conf.urls.defaults import *
urlpatterns = patterns('docdata.views',
# Status change notifications
url(r'^status_change/$', 'status_change', name='status_change'),
)
## Instruction:
Fix URL's to work with Django 1.5
## Code After:
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('docdata.views',
# Status change notifications
url(r'^status_change/$', 'status_change', name='status_change'),
)
|
// ... existing code ...
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('docdata.views',
// ... rest of the code ...
|
f4b50b12ae8ad4da6e04ddc186c077c31af00611
|
SimpleHTTP404Server.py
|
SimpleHTTP404Server.py
|
import os
import SimpleHTTPServer
class GitHubHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""
Overrides the default request handler to handle GitHub custom 404 pages.
(Pretty much a 404.html page in your root.)
See https://help.github.com/articles/custom-404-pages
This currently only works for erroneous pages in the root directory, but
that's enough to test what the 404 page looks like.
"""
def do_GET(self):
path = self.translate_path(self.path)
print(self.path)
print(path)
# If the path doesn't exist, fake it to be the 404 page.
if not os.path.exists(path):
self.path = '404.html'
# Call the superclass methods to actually serve the page.
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
print(self.path)
print(self.translate_path(self.path))
SimpleHTTPServer.test(GitHubHandler)
|
import os
import SimpleHTTPServer
class GitHubHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""
Overrides the default request handler to handle GitHub custom 404 pages.
(Pretty much a 404.html page in your root.)
See https://help.github.com/articles/custom-404-pages
This currently only works for erroneous pages in the root directory, but
that's enough to test what the 404 page looks like.
"""
def do_GET(self):
path = self.translate_path(self.path)
# If the path doesn't exist, fake it to be the 404 page.
if not os.path.exists(path):
self.path = '404.html'
# Call the superclass methods to actually serve the page.
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
SimpleHTTPServer.test(GitHubHandler)
|
Remove some print lines from the fake server.
|
Remove some print lines from the fake server.
|
Python
|
mit
|
clokep/SimpleHTTP404Server,clokep/SimpleHTTP404Server
|
python
|
## Code Before:
import os
import SimpleHTTPServer
class GitHubHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""
Overrides the default request handler to handle GitHub custom 404 pages.
(Pretty much a 404.html page in your root.)
See https://help.github.com/articles/custom-404-pages
This currently only works for erroneous pages in the root directory, but
that's enough to test what the 404 page looks like.
"""
def do_GET(self):
path = self.translate_path(self.path)
print(self.path)
print(path)
# If the path doesn't exist, fake it to be the 404 page.
if not os.path.exists(path):
self.path = '404.html'
# Call the superclass methods to actually serve the page.
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
print(self.path)
print(self.translate_path(self.path))
SimpleHTTPServer.test(GitHubHandler)
## Instruction:
Remove some print lines from the fake server.
## Code After:
import os
import SimpleHTTPServer
class GitHubHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""
Overrides the default request handler to handle GitHub custom 404 pages.
(Pretty much a 404.html page in your root.)
See https://help.github.com/articles/custom-404-pages
This currently only works for erroneous pages in the root directory, but
that's enough to test what the 404 page looks like.
"""
def do_GET(self):
path = self.translate_path(self.path)
# If the path doesn't exist, fake it to be the 404 page.
if not os.path.exists(path):
self.path = '404.html'
# Call the superclass methods to actually serve the page.
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
SimpleHTTPServer.test(GitHubHandler)
|
# ... existing code ...
"""
def do_GET(self):
path = self.translate_path(self.path)
# If the path doesn't exist, fake it to be the 404 page.
if not os.path.exists(path):
self.path = '404.html'
# ... modified code ...
# Call the superclass methods to actually serve the page.
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
SimpleHTTPServer.test(GitHubHandler)
# ... rest of the code ...
|
6c157525bc32f1e6005be69bd6fde61d0d002ad3
|
wizard/post_function.py
|
wizard/post_function.py
|
from openerp import pooler
def call_post_function(cr, uid, context):
"""This functionality allows users of module account.move.reversal
to call a function of the desired openerp model, after the
reversal of the move.
The call automatically sends at least the database cursor (cr) and
the user id (uid) for security reasons.
Two key parameters are required in the context to do so:
- 'post_function_obj': the osv model where the function is defined,
- 'post_function_name': the name of the function to call,
And two optional key parameters:
- 'post_function_args': an iterable object listing the required
arguments to pass after 'cr, uid',
- 'post_function_kwargs': a dictionary object listing the
optionnal keyword args to pass.
"""
if 'post_function_obj' in context:
# We get the function addr by its name,
# and call it with (cr, uid, *args, **kwargs)
getattr(
pooler.get_pool(cr.dbname)[context['post_function_obj']],
context['post_function_name']
)(
cr, uid,
*context['post_function_args'],
**context['post_function_kwargs']
)
# We clean the context to avoid multiple calls of the function.
context.pop('post_function_obj')
context.pop('post_function_name')
context.pop('post_function_args')
context.pop('post_function_kwargs')
|
from openerp import pooler
def call_post_function(cr, uid, context):
"""This functionality allows users of module account.move.reversal
to call a function of the desired openerp model, after the
reversal of the move.
The call automatically sends at least the database cursor (cr) and
the user id (uid) for security reasons.
Two key parameters are required in the context to do so:
- 'post_function_obj': the osv model where the function is defined,
- 'post_function_name': the name of the function to call,
And two optional key parameters:
- 'post_function_args': an iterable object listing the required
arguments to pass after 'cr, uid',
- 'post_function_kwargs': a dictionary object listing the
optionnal keyword args to pass.
"""
if 'post_function_obj' in context:
# We get the function addr by its name,
# and call it with (cr, uid, *args, **kwargs)
getattr(
pooler.get_pool(cr.dbname)[context['post_function_obj']],
context['post_function_name']
)(
cr, uid,
*context.get('post_function_args', []),
**context.get('post_function_kwargs', {})
)
# We clean the context to avoid multiple calls of the function.
context.pop('post_function_obj')
context.pop('post_function_name')
context.pop('post_function_args')
context.pop('post_function_kwargs')
|
Remove some required arguments in post function context
|
Remove some required arguments in post function context
|
Python
|
agpl-3.0
|
xcgd/account_move_reversal
|
python
|
## Code Before:
from openerp import pooler
def call_post_function(cr, uid, context):
"""This functionality allows users of module account.move.reversal
to call a function of the desired openerp model, after the
reversal of the move.
The call automatically sends at least the database cursor (cr) and
the user id (uid) for security reasons.
Two key parameters are required in the context to do so:
- 'post_function_obj': the osv model where the function is defined,
- 'post_function_name': the name of the function to call,
And two optional key parameters:
- 'post_function_args': an iterable object listing the required
arguments to pass after 'cr, uid',
- 'post_function_kwargs': a dictionary object listing the
optionnal keyword args to pass.
"""
if 'post_function_obj' in context:
# We get the function addr by its name,
# and call it with (cr, uid, *args, **kwargs)
getattr(
pooler.get_pool(cr.dbname)[context['post_function_obj']],
context['post_function_name']
)(
cr, uid,
*context['post_function_args'],
**context['post_function_kwargs']
)
# We clean the context to avoid multiple calls of the function.
context.pop('post_function_obj')
context.pop('post_function_name')
context.pop('post_function_args')
context.pop('post_function_kwargs')
## Instruction:
Remove some required arguments in post function context
## Code After:
from openerp import pooler
def call_post_function(cr, uid, context):
"""This functionality allows users of module account.move.reversal
to call a function of the desired openerp model, after the
reversal of the move.
The call automatically sends at least the database cursor (cr) and
the user id (uid) for security reasons.
Two key parameters are required in the context to do so:
- 'post_function_obj': the osv model where the function is defined,
- 'post_function_name': the name of the function to call,
And two optional key parameters:
- 'post_function_args': an iterable object listing the required
arguments to pass after 'cr, uid',
- 'post_function_kwargs': a dictionary object listing the
optionnal keyword args to pass.
"""
if 'post_function_obj' in context:
# We get the function addr by its name,
# and call it with (cr, uid, *args, **kwargs)
getattr(
pooler.get_pool(cr.dbname)[context['post_function_obj']],
context['post_function_name']
)(
cr, uid,
*context.get('post_function_args', []),
**context.get('post_function_kwargs', {})
)
# We clean the context to avoid multiple calls of the function.
context.pop('post_function_obj')
context.pop('post_function_name')
context.pop('post_function_args')
context.pop('post_function_kwargs')
|
...
context['post_function_name']
)(
cr, uid,
*context.get('post_function_args', []),
**context.get('post_function_kwargs', {})
)
# We clean the context to avoid multiple calls of the function.
context.pop('post_function_obj')
...
|
ae689c9de698daeaf8ab5275c384183cb665c903
|
neutron_classifier/common/constants.py
|
neutron_classifier/common/constants.py
|
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
Remove CLASSIFIER_TYPES constant - it was never used
|
Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882
|
Python
|
apache-2.0
|
openstack/neutron-classifier,openstack/neutron-classifier
|
python
|
## Code Before:
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
## Instruction:
Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882
## Code After:
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
...
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
...
|
0538d4bcaba5da966ea3590aa8b2faf5395404f0
|
OpenEdXMobile/src/main/java/org/edx/mobile/view/custom/EdxWebView.java
|
OpenEdXMobile/src/main/java/org/edx/mobile/view/custom/EdxWebView.java
|
package org.edx.mobile.view.custom;
import android.annotation.SuppressLint;
import android.content.Context;
import android.util.AttributeSet;
import android.webkit.WebSettings;
import android.webkit.WebView;
import org.edx.mobile.BuildConfig;
import org.edx.mobile.R;
public class EdxWebView extends WebView {
@SuppressLint("SetJavaScriptEnabled")
public EdxWebView(Context context, AttributeSet attrs) {
super(context, attrs);
final WebSettings settings = getSettings();
settings.setJavaScriptEnabled(true);
settings.setLoadWithOverviewMode(true);
settings.setBuiltInZoomControls(false);
settings.setSupportZoom(true);
settings.setLoadsImagesAutomatically(true);
settings.setDomStorageEnabled(true);
settings.setUserAgentString(
settings.getUserAgentString() + " " +
context.getString(R.string.app_name) + "/" +
BuildConfig.APPLICATION_ID + "/" +
BuildConfig.VERSION_NAME
);
}
}
|
package org.edx.mobile.view.custom;
import android.annotation.SuppressLint;
import android.content.Context;
import android.util.AttributeSet;
import android.webkit.WebSettings;
import android.webkit.WebView;
import org.edx.mobile.BuildConfig;
import org.edx.mobile.R;
public class EdxWebView extends WebView {
@SuppressLint("SetJavaScriptEnabled")
public EdxWebView(Context context, AttributeSet attrs) {
super(context, attrs);
final WebSettings settings = getSettings();
settings.setJavaScriptEnabled(true);
settings.setLoadWithOverviewMode(true);
settings.setBuiltInZoomControls(false);
settings.setSupportZoom(true);
settings.setLoadsImagesAutomatically(true);
settings.setDomStorageEnabled(true);
settings.setUserAgentString(
settings.getUserAgentString() + " " +
context.getString(R.string.app_name) + "/" +
BuildConfig.APPLICATION_ID + "/" +
BuildConfig.VERSION_NAME
);
setLayerType(LAYER_TYPE_HARDWARE, null);
}
}
|
Enable hardware acceleration for all WebView
|
Enable hardware acceleration for all WebView
- LEARNER-7224
|
Java
|
apache-2.0
|
edx/edx-app-android,edx/edx-app-android,edx/edx-app-android,edx/edx-app-android,edx/edx-app-android,edx/edx-app-android
|
java
|
## Code Before:
package org.edx.mobile.view.custom;
import android.annotation.SuppressLint;
import android.content.Context;
import android.util.AttributeSet;
import android.webkit.WebSettings;
import android.webkit.WebView;
import org.edx.mobile.BuildConfig;
import org.edx.mobile.R;
public class EdxWebView extends WebView {
@SuppressLint("SetJavaScriptEnabled")
public EdxWebView(Context context, AttributeSet attrs) {
super(context, attrs);
final WebSettings settings = getSettings();
settings.setJavaScriptEnabled(true);
settings.setLoadWithOverviewMode(true);
settings.setBuiltInZoomControls(false);
settings.setSupportZoom(true);
settings.setLoadsImagesAutomatically(true);
settings.setDomStorageEnabled(true);
settings.setUserAgentString(
settings.getUserAgentString() + " " +
context.getString(R.string.app_name) + "/" +
BuildConfig.APPLICATION_ID + "/" +
BuildConfig.VERSION_NAME
);
}
}
## Instruction:
Enable hardware acceleration for all WebView
- LEARNER-7224
## Code After:
package org.edx.mobile.view.custom;
import android.annotation.SuppressLint;
import android.content.Context;
import android.util.AttributeSet;
import android.webkit.WebSettings;
import android.webkit.WebView;
import org.edx.mobile.BuildConfig;
import org.edx.mobile.R;
public class EdxWebView extends WebView {
@SuppressLint("SetJavaScriptEnabled")
public EdxWebView(Context context, AttributeSet attrs) {
super(context, attrs);
final WebSettings settings = getSettings();
settings.setJavaScriptEnabled(true);
settings.setLoadWithOverviewMode(true);
settings.setBuiltInZoomControls(false);
settings.setSupportZoom(true);
settings.setLoadsImagesAutomatically(true);
settings.setDomStorageEnabled(true);
settings.setUserAgentString(
settings.getUserAgentString() + " " +
context.getString(R.string.app_name) + "/" +
BuildConfig.APPLICATION_ID + "/" +
BuildConfig.VERSION_NAME
);
setLayerType(LAYER_TYPE_HARDWARE, null);
}
}
|
// ... existing code ...
BuildConfig.APPLICATION_ID + "/" +
BuildConfig.VERSION_NAME
);
setLayerType(LAYER_TYPE_HARDWARE, null);
}
}
// ... rest of the code ...
|
20df724e1d905d1834e54958115b80565ae618c1
|
code-sample-angular-kotlin/code-sample-jwt-token-example/kotlin-backend/src/main/kotlin/codesample/kotlin/jwtexample/security/config/SecurityConfig.kt
|
code-sample-angular-kotlin/code-sample-jwt-token-example/kotlin-backend/src/main/kotlin/codesample/kotlin/jwtexample/security/config/SecurityConfig.kt
|
package codesample.kotlin.jwtexample.security.config
import codesample.kotlin.jwtexample.security.AuthExceptionsEntry
import org.springframework.context.annotation.Configuration
import org.springframework.security.config.annotation.web.builders.HttpSecurity
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter
import org.springframework.security.config.http.SessionCreationPolicy
@Configuration
@EnableWebSecurity
class SecurityConfig (val authExceptionsEntry: AuthExceptionsEntry)
: WebSecurityConfigurerAdapter() {
override fun configure(http: HttpSecurity) {
http
// Route all auth exceptions to this class. See it's comment for more info
.exceptionHandling().authenticationEntryPoint(authExceptionsEntry)
.and()
// Do not store any session info. We are going to authenticate each request with JWT token
.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.authorizeRequests()
.antMatchers("/h2-console/**/**").permitAll()
.antMatchers("/auth/**").permitAll()
.anyRequest().authenticated()
}
}
|
package codesample.kotlin.jwtexample.security.config
import codesample.kotlin.jwtexample.security.AuthExceptionsEntry
import org.springframework.context.annotation.Configuration
import org.springframework.security.config.annotation.web.builders.HttpSecurity
import org.springframework.security.config.annotation.web.builders.WebSecurity
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter
import org.springframework.security.config.http.SessionCreationPolicy
@Configuration
@EnableWebSecurity
class SecurityConfig (val authExceptionsEntry: AuthExceptionsEntry)
: WebSecurityConfigurerAdapter() {
override fun configure(http: HttpSecurity) {
http
// Route all auth exceptions to this class. See it's comment for more info
.exceptionHandling().authenticationEntryPoint(authExceptionsEntry)
.and()
// Do not store any session info. We are going to authenticate each request with JWT token
.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.authorizeRequests()
.antMatchers("/auth/**").permitAll()
.anyRequest().authenticated()
}
override fun configure(web: WebSecurity) {
web
.ignoring().antMatchers("/h2-console/**/**")
}
}
|
Fix h2-console being blocked by spring-security
|
Fix h2-console being blocked by spring-security
|
Kotlin
|
mit
|
aquatir/remember_java_api,aquatir/remember_java_api,aquatir/remember_java_api,aquatir/remember_java_api
|
kotlin
|
## Code Before:
package codesample.kotlin.jwtexample.security.config
import codesample.kotlin.jwtexample.security.AuthExceptionsEntry
import org.springframework.context.annotation.Configuration
import org.springframework.security.config.annotation.web.builders.HttpSecurity
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter
import org.springframework.security.config.http.SessionCreationPolicy
@Configuration
@EnableWebSecurity
class SecurityConfig (val authExceptionsEntry: AuthExceptionsEntry)
: WebSecurityConfigurerAdapter() {
override fun configure(http: HttpSecurity) {
http
// Route all auth exceptions to this class. See it's comment for more info
.exceptionHandling().authenticationEntryPoint(authExceptionsEntry)
.and()
// Do not store any session info. We are going to authenticate each request with JWT token
.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.authorizeRequests()
.antMatchers("/h2-console/**/**").permitAll()
.antMatchers("/auth/**").permitAll()
.anyRequest().authenticated()
}
}
## Instruction:
Fix h2-console being blocked by spring-security
## Code After:
package codesample.kotlin.jwtexample.security.config
import codesample.kotlin.jwtexample.security.AuthExceptionsEntry
import org.springframework.context.annotation.Configuration
import org.springframework.security.config.annotation.web.builders.HttpSecurity
import org.springframework.security.config.annotation.web.builders.WebSecurity
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter
import org.springframework.security.config.http.SessionCreationPolicy
@Configuration
@EnableWebSecurity
class SecurityConfig (val authExceptionsEntry: AuthExceptionsEntry)
: WebSecurityConfigurerAdapter() {
override fun configure(http: HttpSecurity) {
http
// Route all auth exceptions to this class. See it's comment for more info
.exceptionHandling().authenticationEntryPoint(authExceptionsEntry)
.and()
// Do not store any session info. We are going to authenticate each request with JWT token
.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.authorizeRequests()
.antMatchers("/auth/**").permitAll()
.anyRequest().authenticated()
}
override fun configure(web: WebSecurity) {
web
.ignoring().antMatchers("/h2-console/**/**")
}
}
|
...
import codesample.kotlin.jwtexample.security.AuthExceptionsEntry
import org.springframework.context.annotation.Configuration
import org.springframework.security.config.annotation.web.builders.HttpSecurity
import org.springframework.security.config.annotation.web.builders.WebSecurity
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter
import org.springframework.security.config.http.SessionCreationPolicy
...
.authorizeRequests()
.antMatchers("/auth/**").permitAll()
.anyRequest().authenticated()
}
override fun configure(web: WebSecurity) {
web
.ignoring().antMatchers("/h2-console/**/**")
}
}
...
|
cb08d632fac453403bc8b91391b14669dbe932cc
|
circonus/__init__.py
|
circonus/__init__.py
|
from __future__ import absolute_import
__title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
|
__title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
|
Remove unnecessary absolute import statement.
|
Remove unnecessary absolute import statement.
|
Python
|
mit
|
monetate/circonus,monetate/circonus
|
python
|
## Code Before:
from __future__ import absolute_import
__title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
## Instruction:
Remove unnecessary absolute import statement.
## Code After:
__title__ = "circonus"
__version__ = "0.0.0"
from logging import NullHandler
import logging
from circonus.client import CirconusClient
logging.getLogger(__name__).addHandler(NullHandler())
|
// ... existing code ...
__title__ = "circonus"
__version__ = "0.0.0"
// ... rest of the code ...
|
bc680b2d0d88196c81d362c6c9c8a3f8af53e0a7
|
src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt
|
src/nl/rubensten/texifyidea/run/evince/EvinceForwardSearch.kt
|
package nl.rubensten.texifyidea.run.evince
import com.intellij.execution.runners.ExecutionEnvironment
import nl.rubensten.texifyidea.TeXception
import nl.rubensten.texifyidea.psi.LatexEnvironment
import nl.rubensten.texifyidea.run.LatexRunConfiguration
import nl.rubensten.texifyidea.util.*
import org.jetbrains.concurrency.runAsync
/**
* Provides forward search for Evince.
*/
class EvinceForwardSearch {
/**
* Execute forward search based on the given environment.
*/
fun execute(runConfig: LatexRunConfiguration, environment: ExecutionEnvironment) {
run {
val psiFile = runConfig.mainFile.psiFile(environment.project) ?: return@run
val document = psiFile.document() ?: return@run
val editor = psiFile.openedEditor() ?: return@run
if (document != editor.document) {
return@run
}
// Do not do forward search when editing the preamble.
if (psiFile.isRoot()) {
val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run
val environment = element.parentOfType(LatexEnvironment::class) ?: return@run
if (environment.name()?.text != "document") {
return@run
}
}
val line = document.getLineNumber(editor.caretOffset()) + 1
runAsync {
try {
// This will start Evince if it is not running yet
EvinceConversation.forwardSearch(pdfFilePath = runConfig.outputFilePath, sourceFilePath = psiFile.virtualFile.path, line = line)
} catch (ignored: TeXception) {
}
}
}
}
}
|
package nl.rubensten.texifyidea.run.evince
import com.intellij.execution.runners.ExecutionEnvironment
import nl.rubensten.texifyidea.TeXception
import nl.rubensten.texifyidea.psi.LatexEnvironment
import nl.rubensten.texifyidea.run.LatexRunConfiguration
import nl.rubensten.texifyidea.util.*
import org.jetbrains.concurrency.runAsync
/**
* Provides forward search for Evince.
*/
class EvinceForwardSearch {
/**
* Execute forward search based on the given environment.
*/
fun execute(runConfig: LatexRunConfiguration, environment: ExecutionEnvironment) {
run {
val psiFile = runConfig.mainFile.psiFile(environment.project) ?: return@run
val document = psiFile.document() ?: return@run
val editor = psiFile.openedEditor() ?: return@run
if (document != editor.document) {
return@run
}
val line = document.getLineNumber(editor.caretOffset()) + 1
runAsync {
try {
// This will start Evince if it is not running yet
EvinceConversation.forwardSearch(pdfFilePath = runConfig.outputFilePath, sourceFilePath = psiFile.virtualFile.path, line = line)
} catch (ignored: TeXception) {
}
}
}
}
}
|
Remove check which does not do forward search in preamble because this is unnecessary (forward search works fine) and behaves incorrectly (at least in one case).
|
Remove check which does not do forward search in preamble because this is unnecessary (forward search works fine) and behaves incorrectly (at least in one case).
|
Kotlin
|
mit
|
Ruben-Sten/TeXiFy-IDEA,Ruben-Sten/TeXiFy-IDEA,Ruben-Sten/TeXiFy-IDEA,Ruben-Sten/TeXiFy-IDEA
|
kotlin
|
## Code Before:
package nl.rubensten.texifyidea.run.evince
import com.intellij.execution.runners.ExecutionEnvironment
import nl.rubensten.texifyidea.TeXception
import nl.rubensten.texifyidea.psi.LatexEnvironment
import nl.rubensten.texifyidea.run.LatexRunConfiguration
import nl.rubensten.texifyidea.util.*
import org.jetbrains.concurrency.runAsync
/**
* Provides forward search for Evince.
*/
class EvinceForwardSearch {
/**
* Execute forward search based on the given environment.
*/
fun execute(runConfig: LatexRunConfiguration, environment: ExecutionEnvironment) {
run {
val psiFile = runConfig.mainFile.psiFile(environment.project) ?: return@run
val document = psiFile.document() ?: return@run
val editor = psiFile.openedEditor() ?: return@run
if (document != editor.document) {
return@run
}
// Do not do forward search when editing the preamble.
if (psiFile.isRoot()) {
val element = psiFile.findElementAt(editor.caretOffset()) ?: return@run
val environment = element.parentOfType(LatexEnvironment::class) ?: return@run
if (environment.name()?.text != "document") {
return@run
}
}
val line = document.getLineNumber(editor.caretOffset()) + 1
runAsync {
try {
// This will start Evince if it is not running yet
EvinceConversation.forwardSearch(pdfFilePath = runConfig.outputFilePath, sourceFilePath = psiFile.virtualFile.path, line = line)
} catch (ignored: TeXception) {
}
}
}
}
}
## Instruction:
Remove check which does not do forward search in preamble because this is unnecessary (forward search works fine) and behaves incorrectly (at least in one case).
## Code After:
package nl.rubensten.texifyidea.run.evince
import com.intellij.execution.runners.ExecutionEnvironment
import nl.rubensten.texifyidea.TeXception
import nl.rubensten.texifyidea.psi.LatexEnvironment
import nl.rubensten.texifyidea.run.LatexRunConfiguration
import nl.rubensten.texifyidea.util.*
import org.jetbrains.concurrency.runAsync
/**
* Provides forward search for Evince.
*/
class EvinceForwardSearch {
/**
* Execute forward search based on the given environment.
*/
fun execute(runConfig: LatexRunConfiguration, environment: ExecutionEnvironment) {
run {
val psiFile = runConfig.mainFile.psiFile(environment.project) ?: return@run
val document = psiFile.document() ?: return@run
val editor = psiFile.openedEditor() ?: return@run
if (document != editor.document) {
return@run
}
val line = document.getLineNumber(editor.caretOffset()) + 1
runAsync {
try {
// This will start Evince if it is not running yet
EvinceConversation.forwardSearch(pdfFilePath = runConfig.outputFilePath, sourceFilePath = psiFile.virtualFile.path, line = line)
} catch (ignored: TeXception) {
}
}
}
}
}
|
# ... existing code ...
return@run
}
val line = document.getLineNumber(editor.caretOffset()) + 1
runAsync {
# ... rest of the code ...
|
bb4c88c29ab312e2be118ac857daa7c93399d6e1
|
src/include/postmaster/fork_process.h
|
src/include/postmaster/fork_process.h
|
extern pid_t fork_process(void);
#endif /* ! FORK_PROCESS_H */
|
/*-------------------------------------------------------------------------
*
* fork_process.h
* Exports from postmaster/fork_process.c.
*
* Copyright (c) 1996-2005, PostgreSQL Global Development Group
*
* $PostgreSQL: pgsql/src/include/postmaster/fork_process.h,v 1.2 2005/03/13 23:32:26 tgl Exp $
*
*-------------------------------------------------------------------------
*/
#ifndef FORK_PROCESS_H
#define FORK_PROCESS_H
extern pid_t fork_process(void);
#endif /* FORK_PROCESS_H */
|
Add missing identification comment, remove entirely inappropriate include of postgres.h.
|
Add missing identification comment, remove entirely inappropriate include
of postgres.h.
|
C
|
apache-2.0
|
50wu/gpdb,lisakowen/gpdb,lpetrov-pivotal/gpdb,greenplum-db/gpdb,zeroae/postgres-xl,adam8157/gpdb,rubikloud/gpdb,zeroae/postgres-xl,cjcjameson/gpdb,xinzweb/gpdb,edespino/gpdb,oberstet/postgres-xl,ahachete/gpdb,lpetrov-pivotal/gpdb,adam8157/gpdb,rubikloud/gpdb,jmcatamney/gpdb,ahachete/gpdb,oberstet/postgres-xl,ovr/postgres-xl,xuegang/gpdb,yuanzhao/gpdb,royc1/gpdb,CraigHarris/gpdb,pavanvd/postgres-xl,ashwinstar/gpdb,snaga/postgres-xl,cjcjameson/gpdb,Quikling/gpdb,janebeckman/gpdb,kmjungersen/PostgresXL,rubikloud/gpdb,randomtask1155/gpdb,oberstet/postgres-xl,chrishajas/gpdb,pavanvd/postgres-xl,kmjungersen/PostgresXL,royc1/gpdb,50wu/gpdb,lintzc/gpdb,50wu/gpdb,greenplum-db/gpdb,yazun/postgres-xl,CraigHarris/gpdb,CraigHarris/gpdb,snaga/postgres-xl,kaknikhil/gpdb,lpetrov-pivotal/gpdb,rvs/gpdb,jmcatamney/gpdb,greenplum-db/gpdb,royc1/gpdb,yuanzhao/gpdb,edespino/gpdb,Quikling/gpdb,foyzur/gpdb,atris/gpdb,techdragon/Postgres-XL,adam8157/gpdb,lintzc/gpdb,zaksoup/gpdb,tangp3/gpdb,tangp3/gpdb,ovr/postgres-xl,jmcatamney/gpdb,Postgres-XL/Postgres-XL,greenplum-db/gpdb,cjcjameson/gpdb,kaknikhil/gpdb,Chibin/gpdb,rvs/gpdb,CraigHarris/gpdb,Chibin/gpdb,tangp3/gpdb,xinzweb/gpdb,tangp3/gpdb,lintzc/gpdb,Quikling/gpdb,kaknikhil/gpdb,zaksoup/gpdb,yuanzhao/gpdb,snaga/postgres-xl,yazun/postgres-xl,yuanzhao/gpdb,arcivanov/postgres-xl,zeroae/postgres-xl,Chibin/gpdb,ashwinstar/gpdb,0x0FFF/gpdb,postmind-net/postgres-xl,xinzweb/gpdb,adam8157/gpdb,lisakowen/gpdb,randomtask1155/gpdb,rubikloud/gpdb,tangp3/gpdb,cjcjameson/gpdb,Postgres-XL/Postgres-XL,lpetrov-pivotal/gpdb,ashwinstar/gpdb,0x0FFF/gpdb,ahachete/gpdb,tangp3/gpdb,lpetrov-pivotal/gpdb,ahachete/gpdb,foyzur/gpdb,edespino/gpdb,ahachete/gpdb,ahachete/gpdb,ovr/postgres-xl,janebeckman/gpdb,pavanvd/postgres-xl,janebeckman/gpdb,50wu/gpdb,randomtask1155/gpdb,rvs/gpdb,tangp3/gpdb,tpostgres-projects/tPostgres,tpostgres-projects/tPostgres,rvs/gpdb,jmcatamney/gpdb,lintzc/gpdb,kaknikhil/gpdb,randomtask1155/gpdb,lpetrov-pivotal/gpdb,atris/gpdb,edespino/gpdb,greenplum-db/gpdb,postmind-net/postgres-xl,lisakowen/gpdb,lintzc/gpdb,pavanvd/postgres-xl,atris/gpdb,0x0FFF/gpdb,Chibin/gpdb,Quikling/gpdb,oberstet/postgres-xl,janebeckman/gpdb,edespino/gpdb,xinzweb/gpdb,0x0FFF/gpdb,atris/gpdb,chrishajas/gpdb,oberstet/postgres-xl,lisakowen/gpdb,CraigHarris/gpdb,snaga/postgres-xl,lisakowen/gpdb,royc1/gpdb,Postgres-XL/Postgres-XL,zaksoup/gpdb,yazun/postgres-xl,yuanzhao/gpdb,cjcjameson/gpdb,chrishajas/gpdb,xuegang/gpdb,ashwinstar/gpdb,lisakowen/gpdb,chrishajas/gpdb,ashwinstar/gpdb,foyzur/gpdb,Chibin/gpdb,kaknikhil/gpdb,yuanzhao/gpdb,zeroae/postgres-xl,xinzweb/gpdb,lintzc/gpdb,arcivanov/postgres-xl,Quikling/gpdb,Chibin/gpdb,chrishajas/gpdb,Postgres-XL/Postgres-XL,janebeckman/gpdb,cjcjameson/gpdb,techdragon/Postgres-XL,atris/gpdb,adam8157/gpdb,jmcatamney/gpdb,yazun/postgres-xl,cjcjameson/gpdb,edespino/gpdb,foyzur/gpdb,foyzur/gpdb,arcivanov/postgres-xl,kmjungersen/PostgresXL,jmcatamney/gpdb,CraigHarris/gpdb,lintzc/gpdb,randomtask1155/gpdb,xinzweb/gpdb,rvs/gpdb,tpostgres-projects/tPostgres,rubikloud/gpdb,rvs/gpdb,lpetrov-pivotal/gpdb,techdragon/Postgres-XL,CraigHarris/gpdb,50wu/gpdb,xuegang/gpdb,kmjungersen/PostgresXL,zaksoup/gpdb,janebeckman/gpdb,rubikloud/gpdb,arcivanov/postgres-xl,kmjungersen/PostgresXL,jmcatamney/gpdb,postmind-net/postgres-xl,royc1/gpdb,50wu/gpdb,pavanvd/postgres-xl,yuanzhao/gpdb,chrishajas/gpdb,CraigHarris/gpdb,janebeckman/gpdb,arcivanov/postgres-xl,postmind-net/postgres-xl,randomtask1155/gpdb,xuegang/gpdb,janebeckman/gpdb,xuegang/gpdb,edespino/gpdb,adam8157/gpdb,50wu/gpdb,xuegang/gpdb,ashwinstar/gpdb,foyzur/gpdb,kaknikhil/gpdb,rvs/gpdb,yuanzhao/gpdb,Quikling/gpdb,cjcjameson/gpdb,tpostgres-projects/tPostgres,lintzc/gpdb,0x0FFF/gpdb,zeroae/postgres-xl,xuegang/gpdb,tpostgres-projects/tPostgres,xuegang/gpdb,ovr/postgres-xl,lpetrov-pivotal/gpdb,ashwinstar/gpdb,0x0FFF/gpdb,cjcjameson/gpdb,adam8157/gpdb,rvs/gpdb,Quikling/gpdb,lisakowen/gpdb,kaknikhil/gpdb,zaksoup/gpdb,edespino/gpdb,edespino/gpdb,CraigHarris/gpdb,edespino/gpdb,royc1/gpdb,kaknikhil/gpdb,Quikling/gpdb,janebeckman/gpdb,Chibin/gpdb,yazun/postgres-xl,randomtask1155/gpdb,zaksoup/gpdb,foyzur/gpdb,Chibin/gpdb,ashwinstar/gpdb,xinzweb/gpdb,atris/gpdb,0x0FFF/gpdb,chrishajas/gpdb,yuanzhao/gpdb,jmcatamney/gpdb,postmind-net/postgres-xl,techdragon/Postgres-XL,0x0FFF/gpdb,greenplum-db/gpdb,arcivanov/postgres-xl,lisakowen/gpdb,Quikling/gpdb,Quikling/gpdb,xuegang/gpdb,zaksoup/gpdb,atris/gpdb,chrishajas/gpdb,zaksoup/gpdb,kaknikhil/gpdb,Chibin/gpdb,adam8157/gpdb,ovr/postgres-xl,snaga/postgres-xl,tangp3/gpdb,rvs/gpdb,royc1/gpdb,greenplum-db/gpdb,ahachete/gpdb,kaknikhil/gpdb,Chibin/gpdb,xinzweb/gpdb,cjcjameson/gpdb,yuanzhao/gpdb,atris/gpdb,rubikloud/gpdb,rubikloud/gpdb,randomtask1155/gpdb,Postgres-XL/Postgres-XL,janebeckman/gpdb,royc1/gpdb,lintzc/gpdb,foyzur/gpdb,50wu/gpdb,ahachete/gpdb,greenplum-db/gpdb,techdragon/Postgres-XL,rvs/gpdb
|
c
|
## Code Before:
extern pid_t fork_process(void);
#endif /* ! FORK_PROCESS_H */
## Instruction:
Add missing identification comment, remove entirely inappropriate include
of postgres.h.
## Code After:
/*-------------------------------------------------------------------------
*
* fork_process.h
* Exports from postmaster/fork_process.c.
*
* Copyright (c) 1996-2005, PostgreSQL Global Development Group
*
* $PostgreSQL: pgsql/src/include/postmaster/fork_process.h,v 1.2 2005/03/13 23:32:26 tgl Exp $
*
*-------------------------------------------------------------------------
*/
#ifndef FORK_PROCESS_H
#define FORK_PROCESS_H
extern pid_t fork_process(void);
#endif /* FORK_PROCESS_H */
|
...
/*-------------------------------------------------------------------------
*
* fork_process.h
* Exports from postmaster/fork_process.c.
*
* Copyright (c) 1996-2005, PostgreSQL Global Development Group
*
* $PostgreSQL: pgsql/src/include/postmaster/fork_process.h,v 1.2 2005/03/13 23:32:26 tgl Exp $
*
*-------------------------------------------------------------------------
*/
#ifndef FORK_PROCESS_H
#define FORK_PROCESS_H
extern pid_t fork_process(void);
#endif /* FORK_PROCESS_H */
...
|
b2570afb469266c6cc4e0aa31124c5fbf128f9ab
|
SeriesGuide/src/com/battlelancer/seriesguide/ui/ConnectTraktFragment.java
|
SeriesGuide/src/com/battlelancer/seriesguide/ui/ConnectTraktFragment.java
|
package com.battlelancer.seriesguide.ui;
import android.os.Bundle;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import com.actionbarsherlock.app.SherlockFragment;
import com.uwetrottmann.seriesguide.R;
/**
* Tells about trakt and how it integrates with SeriesGuide, allows to proceed
* to entering credentials step.
*/
public class ConnectTraktFragment extends SherlockFragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.connect_trakt_fragment, container, false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// connect button
getView().findViewById(R.id.buttonConnectTrakt).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
ConnectTraktCredentialsFragment f = ConnectTraktCredentialsFragment.newInstance();
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.replace(R.id.root_container, f);
ft.commit();
}
});
// discard button
getView().findViewById(R.id.buttonDiscard).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
getActivity().finish();
}
});
}
}
|
package com.battlelancer.seriesguide.ui;
import android.os.Bundle;
import android.support.v4.app.FragmentTransaction;
import android.text.method.LinkMovementMethod;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockFragment;
import com.uwetrottmann.seriesguide.R;
/**
* Tells about trakt and how it integrates with SeriesGuide, allows to proceed
* to entering credentials step.
*/
public class ConnectTraktFragment extends SherlockFragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.connect_trakt_fragment, container, false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// connect button
getView().findViewById(R.id.buttonConnectTrakt).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
ConnectTraktCredentialsFragment f = ConnectTraktCredentialsFragment.newInstance();
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.replace(R.id.root_container, f);
ft.commit();
}
});
// discard button
getView().findViewById(R.id.buttonDiscard).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
getActivity().finish();
}
});
// make learn more link clickable
((TextView) getView().findViewById(R.id.textViewAbout))
.setMovementMethod(LinkMovementMethod.getInstance());
}
}
|
Make learn more link clickable.
|
Make learn more link clickable.
|
Java
|
apache-2.0
|
UweTrottmann/SeriesGuide,epiphany27/SeriesGuide,0359xiaodong/SeriesGuide,r00t-user/SeriesGuide,artemnikitin/SeriesGuide,UweTrottmann/SeriesGuide,hoanganhx86/SeriesGuide
|
java
|
## Code Before:
package com.battlelancer.seriesguide.ui;
import android.os.Bundle;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import com.actionbarsherlock.app.SherlockFragment;
import com.uwetrottmann.seriesguide.R;
/**
* Tells about trakt and how it integrates with SeriesGuide, allows to proceed
* to entering credentials step.
*/
public class ConnectTraktFragment extends SherlockFragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.connect_trakt_fragment, container, false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// connect button
getView().findViewById(R.id.buttonConnectTrakt).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
ConnectTraktCredentialsFragment f = ConnectTraktCredentialsFragment.newInstance();
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.replace(R.id.root_container, f);
ft.commit();
}
});
// discard button
getView().findViewById(R.id.buttonDiscard).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
getActivity().finish();
}
});
}
}
## Instruction:
Make learn more link clickable.
## Code After:
package com.battlelancer.seriesguide.ui;
import android.os.Bundle;
import android.support.v4.app.FragmentTransaction;
import android.text.method.LinkMovementMethod;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockFragment;
import com.uwetrottmann.seriesguide.R;
/**
* Tells about trakt and how it integrates with SeriesGuide, allows to proceed
* to entering credentials step.
*/
public class ConnectTraktFragment extends SherlockFragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.connect_trakt_fragment, container, false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// connect button
getView().findViewById(R.id.buttonConnectTrakt).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
ConnectTraktCredentialsFragment f = ConnectTraktCredentialsFragment.newInstance();
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.replace(R.id.root_container, f);
ft.commit();
}
});
// discard button
getView().findViewById(R.id.buttonDiscard).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
getActivity().finish();
}
});
// make learn more link clickable
((TextView) getView().findViewById(R.id.textViewAbout))
.setMovementMethod(LinkMovementMethod.getInstance());
}
}
|
// ... existing code ...
import android.os.Bundle;
import android.support.v4.app.FragmentTransaction;
import android.text.method.LinkMovementMethod;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockFragment;
import com.uwetrottmann.seriesguide.R;
// ... modified code ...
getActivity().finish();
}
});
// make learn more link clickable
((TextView) getView().findViewById(R.id.textViewAbout))
.setMovementMethod(LinkMovementMethod.getInstance());
}
}
// ... rest of the code ...
|
591a40b6e1f4ac8b1d21050ccfa10779dc9dbf7c
|
analytic_code.py
|
analytic_code.py
|
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
Add string to display the name of the field Dimension during the import
|
Add string to display the name of the field Dimension during the import
|
Python
|
agpl-3.0
|
xcgd/analytic_structure
|
python
|
## Code Before:
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
## Instruction:
Add string to display the name of the field Dimension during the import
## Code After:
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
...
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
...
|
45cc1c817e4c6707650368e5e33f532b46483f36
|
src/main/java/no/cantara/ratpack/freemarker/FreemarkerModel.java
|
src/main/java/no/cantara/ratpack/freemarker/FreemarkerModel.java
|
package no.cantara.ratpack.freemarker;
import java.util.HashMap;
public class FreemarkerModel extends HashMap<String, Object> {
private final String template;
public FreemarkerModel(String template) {
this.template = template;
}
public FreemarkerModel(String template, HashMap<String, Object> map) {
this(template);
putAll(map);
}
public String getTemplate() {
return template;
}
}
|
package no.cantara.ratpack.freemarker;
import java.util.HashMap;
import java.util.Map;
public class FreemarkerModel extends HashMap<String, Object> {
private final String template;
public FreemarkerModel(String template) {
this.template = template;
}
public FreemarkerModel(String template, Map<String, Object> map) {
this(template);
putAll(map);
}
public String getTemplate() {
return template;
}
}
|
Use Map parameter type rather than Hashmap.
|
Use Map parameter type rather than Hashmap.
|
Java
|
apache-2.0
|
Cantara/ratpack-freemarker
|
java
|
## Code Before:
package no.cantara.ratpack.freemarker;
import java.util.HashMap;
public class FreemarkerModel extends HashMap<String, Object> {
private final String template;
public FreemarkerModel(String template) {
this.template = template;
}
public FreemarkerModel(String template, HashMap<String, Object> map) {
this(template);
putAll(map);
}
public String getTemplate() {
return template;
}
}
## Instruction:
Use Map parameter type rather than Hashmap.
## Code After:
package no.cantara.ratpack.freemarker;
import java.util.HashMap;
import java.util.Map;
public class FreemarkerModel extends HashMap<String, Object> {
private final String template;
public FreemarkerModel(String template) {
this.template = template;
}
public FreemarkerModel(String template, Map<String, Object> map) {
this(template);
putAll(map);
}
public String getTemplate() {
return template;
}
}
|
...
package no.cantara.ratpack.freemarker;
import java.util.HashMap;
import java.util.Map;
public class FreemarkerModel extends HashMap<String, Object> {
private final String template;
...
this.template = template;
}
public FreemarkerModel(String template, Map<String, Object> map) {
this(template);
putAll(map);
}
...
|
89984eb5e9e8cdb8420ff1da07c54ce0dd265629
|
tests/test_git_pre_commit_hook_utils.py
|
tests/test_git_pre_commit_hook_utils.py
|
import git_pre_commit_hook_utils as utils
def test_is_python_code_by_path():
file_at_index = utils.FileAtIndex(
contents='',
size=0,
mode='',
sha1='',
status='',
path='some/path/main.py',
)
assert file_at_index.is_python_code()
def test_is_python_code_by_contents():
file_at_index = utils.FileAtIndex(
contents='#!/usr/bin/env/python\nprint "hello"\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/python_script',
)
assert file_at_index.is_python_code()
def test_is_not_python_code():
file_at_index = utils.FileAtIndex(
contents='some text with python\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/not_python_script.cpp',
)
assert not file_at_index.is_python_code()
|
import git_pre_commit_hook_utils as utils
import scripttest
import os
import copy
def test_with_empty_repo(tmpdir):
os_environ = copy.deepcopy(os.environ)
os_environ['GIT_DIR'] = str(tmpdir)
os_environ['GIT_WORK_TREE'] = str(tmpdir)
env = scripttest.TestFileEnvironment(
str(tmpdir),
start_clear=False,
template_path='data',
environ=os_environ,
)
env.writefile('empty_file', content='')
env.run('git', 'init')
env.run('git', 'add', 'empty_file')
files_staged_for_commit = list(utils.files_staged_for_commit())
assert len(files_staged_for_commit) == 1
file_at_index = files_staged_for_commit[0]
assert file_at_index.path == 'empty_file'
assert file_at_index.contents == ''
assert file_at_index.size == 0
assert file_at_index.status == 'A'
def test_is_python_code_by_path():
file_at_index = utils.FileAtIndex(
contents='',
size=0,
mode='',
sha1='',
status='',
path='some/path/main.py',
)
assert file_at_index.is_python_code()
def test_is_python_code_by_contents():
file_at_index = utils.FileAtIndex(
contents='#!/usr/bin/env/python\nprint "hello"\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/python_script',
)
assert file_at_index.is_python_code()
def test_is_not_python_code():
file_at_index = utils.FileAtIndex(
contents='some text with python\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/not_python_script.cpp',
)
assert not file_at_index.is_python_code()
|
Add test for commit to empty repo case
|
Add test for commit to empty repo case
|
Python
|
mit
|
evvers/git-pre-commit-hook-utils
|
python
|
## Code Before:
import git_pre_commit_hook_utils as utils
def test_is_python_code_by_path():
file_at_index = utils.FileAtIndex(
contents='',
size=0,
mode='',
sha1='',
status='',
path='some/path/main.py',
)
assert file_at_index.is_python_code()
def test_is_python_code_by_contents():
file_at_index = utils.FileAtIndex(
contents='#!/usr/bin/env/python\nprint "hello"\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/python_script',
)
assert file_at_index.is_python_code()
def test_is_not_python_code():
file_at_index = utils.FileAtIndex(
contents='some text with python\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/not_python_script.cpp',
)
assert not file_at_index.is_python_code()
## Instruction:
Add test for commit to empty repo case
## Code After:
import git_pre_commit_hook_utils as utils
import scripttest
import os
import copy
def test_with_empty_repo(tmpdir):
os_environ = copy.deepcopy(os.environ)
os_environ['GIT_DIR'] = str(tmpdir)
os_environ['GIT_WORK_TREE'] = str(tmpdir)
env = scripttest.TestFileEnvironment(
str(tmpdir),
start_clear=False,
template_path='data',
environ=os_environ,
)
env.writefile('empty_file', content='')
env.run('git', 'init')
env.run('git', 'add', 'empty_file')
files_staged_for_commit = list(utils.files_staged_for_commit())
assert len(files_staged_for_commit) == 1
file_at_index = files_staged_for_commit[0]
assert file_at_index.path == 'empty_file'
assert file_at_index.contents == ''
assert file_at_index.size == 0
assert file_at_index.status == 'A'
def test_is_python_code_by_path():
file_at_index = utils.FileAtIndex(
contents='',
size=0,
mode='',
sha1='',
status='',
path='some/path/main.py',
)
assert file_at_index.is_python_code()
def test_is_python_code_by_contents():
file_at_index = utils.FileAtIndex(
contents='#!/usr/bin/env/python\nprint "hello"\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/python_script',
)
assert file_at_index.is_python_code()
def test_is_not_python_code():
file_at_index = utils.FileAtIndex(
contents='some text with python\n',
size=0,
mode='',
sha1='',
status='',
path='some/path/not_python_script.cpp',
)
assert not file_at_index.is_python_code()
|
...
import git_pre_commit_hook_utils as utils
import scripttest
import os
import copy
def test_with_empty_repo(tmpdir):
os_environ = copy.deepcopy(os.environ)
os_environ['GIT_DIR'] = str(tmpdir)
os_environ['GIT_WORK_TREE'] = str(tmpdir)
env = scripttest.TestFileEnvironment(
str(tmpdir),
start_clear=False,
template_path='data',
environ=os_environ,
)
env.writefile('empty_file', content='')
env.run('git', 'init')
env.run('git', 'add', 'empty_file')
files_staged_for_commit = list(utils.files_staged_for_commit())
assert len(files_staged_for_commit) == 1
file_at_index = files_staged_for_commit[0]
assert file_at_index.path == 'empty_file'
assert file_at_index.contents == ''
assert file_at_index.size == 0
assert file_at_index.status == 'A'
def test_is_python_code_by_path():
...
|
305849d57cc6897c65b4e0996f70a21f1d873d25
|
awp/main.py
|
awp/main.py
|
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
|
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
Clarify where packager.json validation error originates
|
Clarify where packager.json validation error originates
|
Python
|
mit
|
caleb531/alfred-workflow-packager
|
python
|
## Code Before:
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
## Instruction:
Clarify where packager.json validation error originates
## Code After:
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
...
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
...
|
4735804f4951835e4e3c7d116628344bddf45aa3
|
atomicpress/admin.py
|
atomicpress/admin.py
|
from flask import current_app
from flask_admin.contrib.fileadmin import FileAdmin
from flask_admin import AdminIndexView, expose, Admin
from flask_admin.contrib.sqla import ModelView
from atomicpress import models
from atomicpress.app import db
class HomeView(AdminIndexView):
@expose("/")
def index(self):
return self.render('admin/home.html')
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(ModelView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
admin.add_view(FileAdmin(app.config["UPLOADS_PATH"],
app.config["UPLOADS_URL"],
name='Upload files'))
|
from flask import current_app
from flask_admin.contrib.fileadmin import FileAdmin
from flask_admin import AdminIndexView, expose, Admin
from flask_admin.contrib.sqla import ModelView
from atomicpress import models
from atomicpress.app import db
class HomeView(AdminIndexView):
@expose("/")
def index(self):
return self.render('admin/home.html')
class PostView(ModelView):
column_default_sort = ('date', True)
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(PostView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
admin.add_view(FileAdmin(app.config["UPLOADS_PATH"],
app.config["UPLOADS_URL"],
name='Upload files'))
|
Update post view sorting (so latest comes first)
|
Update post view sorting (so latest comes first)
|
Python
|
mit
|
marteinn/AtomicPress,marteinn/AtomicPress,marteinn/AtomicPress,marteinn/AtomicPress
|
python
|
## Code Before:
from flask import current_app
from flask_admin.contrib.fileadmin import FileAdmin
from flask_admin import AdminIndexView, expose, Admin
from flask_admin.contrib.sqla import ModelView
from atomicpress import models
from atomicpress.app import db
class HomeView(AdminIndexView):
@expose("/")
def index(self):
return self.render('admin/home.html')
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(ModelView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
admin.add_view(FileAdmin(app.config["UPLOADS_PATH"],
app.config["UPLOADS_URL"],
name='Upload files'))
## Instruction:
Update post view sorting (so latest comes first)
## Code After:
from flask import current_app
from flask_admin.contrib.fileadmin import FileAdmin
from flask_admin import AdminIndexView, expose, Admin
from flask_admin.contrib.sqla import ModelView
from atomicpress import models
from atomicpress.app import db
class HomeView(AdminIndexView):
@expose("/")
def index(self):
return self.render('admin/home.html')
class PostView(ModelView):
column_default_sort = ('date', True)
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(PostView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
admin.add_view(FileAdmin(app.config["UPLOADS_PATH"],
app.config["UPLOADS_URL"],
name='Upload files'))
|
# ... existing code ...
return self.render('admin/home.html')
class PostView(ModelView):
column_default_sort = ('date', True)
def create_admin():
app = current_app._get_current_object()
admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home'))
# ... modified code ...
admin.add_view(ModelView(models.Blog, db.session, category="Blog"))
admin.add_view(ModelView(models.Author, db.session, category="Blog"))
admin.add_view(PostView(models.Post, db.session, category="Post"))
admin.add_view(ModelView(models.Tag, db.session, category="Post"))
admin.add_view(ModelView(models.Category, db.session, category="Post"))
# ... rest of the code ...
|
f531a092e9c87f29d15388da0d0088e379bd567e
|
jaq-benchmarks/src/main/java/io/jaq/spsc/SPSCQueueFactory.java
|
jaq-benchmarks/src/main/java/io/jaq/spsc/SPSCQueueFactory.java
|
package io.jaq.spsc;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedTransferQueue;
public class SPSCQueueFactory {
public static final int QUEUE_CAPACITY = 1 << Integer.getInteger("pow2.capacity", 15);
public static Queue<Integer> createQueue() {
int type = Integer.getInteger("q.type", 0);
switch (type) {
case -2:
return new LinkedTransferQueue<Integer>();
case -1:
return new ConcurrentLinkedQueue<Integer>();
case 0:
return new InlinedCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
case 1:
return new BQueue<Integer>(QUEUE_CAPACITY);
case 2:
return new FFBuffer<Integer>(QUEUE_CAPACITY);
case 3:
return new FFBufferWithOfferBatch<Integer>(QUEUE_CAPACITY);
case 4:
return new FloatingCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
}
throw new IllegalArgumentException("Type: " + type);
}
}
|
package io.jaq.spsc;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedTransferQueue;
public class SPSCQueueFactory {
public static final int QUEUE_CAPACITY = 1 << Integer.getInteger("pow2.capacity", 15);
public static final int QUEUE_TYPE = Integer.getInteger("q.type", 0);
public static Queue<Integer> createQueue() {
switch (QUEUE_TYPE) {
case -2:
return new LinkedTransferQueue<Integer>();
case -1:
return new ConcurrentLinkedQueue<Integer>();
case 0:
return new InlinedCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
case 1:
return new BQueue<Integer>(QUEUE_CAPACITY);
case 2:
return new FFBuffer<Integer>(QUEUE_CAPACITY);
case 3:
return new FFBufferWithOfferBatch<Integer>(QUEUE_CAPACITY);
case 4:
return new FloatingCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
}
throw new IllegalArgumentException("Type: " + QUEUE_TYPE);
}
}
|
Make queue type a constant
|
Make queue type a constant
|
Java
|
apache-2.0
|
thomasdarimont/JCTools,fengjiachun/JCTools,mackstone/JCTools,franz1981/JCTools,JCTools/JCTools,akarnokd/JCTools
|
java
|
## Code Before:
package io.jaq.spsc;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedTransferQueue;
public class SPSCQueueFactory {
public static final int QUEUE_CAPACITY = 1 << Integer.getInteger("pow2.capacity", 15);
public static Queue<Integer> createQueue() {
int type = Integer.getInteger("q.type", 0);
switch (type) {
case -2:
return new LinkedTransferQueue<Integer>();
case -1:
return new ConcurrentLinkedQueue<Integer>();
case 0:
return new InlinedCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
case 1:
return new BQueue<Integer>(QUEUE_CAPACITY);
case 2:
return new FFBuffer<Integer>(QUEUE_CAPACITY);
case 3:
return new FFBufferWithOfferBatch<Integer>(QUEUE_CAPACITY);
case 4:
return new FloatingCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
}
throw new IllegalArgumentException("Type: " + type);
}
}
## Instruction:
Make queue type a constant
## Code After:
package io.jaq.spsc;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedTransferQueue;
public class SPSCQueueFactory {
public static final int QUEUE_CAPACITY = 1 << Integer.getInteger("pow2.capacity", 15);
public static final int QUEUE_TYPE = Integer.getInteger("q.type", 0);
public static Queue<Integer> createQueue() {
switch (QUEUE_TYPE) {
case -2:
return new LinkedTransferQueue<Integer>();
case -1:
return new ConcurrentLinkedQueue<Integer>();
case 0:
return new InlinedCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
case 1:
return new BQueue<Integer>(QUEUE_CAPACITY);
case 2:
return new FFBuffer<Integer>(QUEUE_CAPACITY);
case 3:
return new FFBufferWithOfferBatch<Integer>(QUEUE_CAPACITY);
case 4:
return new FloatingCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
}
throw new IllegalArgumentException("Type: " + QUEUE_TYPE);
}
}
|
// ... existing code ...
public class SPSCQueueFactory {
public static final int QUEUE_CAPACITY = 1 << Integer.getInteger("pow2.capacity", 15);
public static final int QUEUE_TYPE = Integer.getInteger("q.type", 0);
public static Queue<Integer> createQueue() {
switch (QUEUE_TYPE) {
case -2:
return new LinkedTransferQueue<Integer>();
case -1:
// ... modified code ...
case 4:
return new FloatingCountersSpscConcurrentArrayQueue<Integer>(QUEUE_CAPACITY);
}
throw new IllegalArgumentException("Type: " + QUEUE_TYPE);
}
}
// ... rest of the code ...
|
79e23159c308a69896c464eda13c043dbbc8086e
|
thezombies/management/commands/validate_all_data_catalogs.py
|
thezombies/management/commands/validate_all_data_catalogs.py
|
from django.core.management.base import NoArgsCommand
from thezombies.tasks.main import validate_data_catalogs
class Command(NoArgsCommand):
"""Validate all of the agency data catalogs"""
def handle_noargs(self):
validator_group = validate_data_catalogs.delay()
self.stdout.write(u"\nSpawned data catalog task group: {0}\n".format(validator_group.id))
|
from django.core.management.base import NoArgsCommand
from thezombies.tasks.main import validate_data_catalogs
class Command(NoArgsCommand):
"""Validate all of the agency data catalogs"""
def handle_noargs(self, **options):
validator_group = validate_data_catalogs.delay()
self.stdout.write(u"\nSpawned data catalog task group: {0}\n".format(validator_group.id))
|
Fix options on NoArgsCommand. Huh.
|
Fix options on NoArgsCommand. Huh.
|
Python
|
bsd-3-clause
|
sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies
|
python
|
## Code Before:
from django.core.management.base import NoArgsCommand
from thezombies.tasks.main import validate_data_catalogs
class Command(NoArgsCommand):
"""Validate all of the agency data catalogs"""
def handle_noargs(self):
validator_group = validate_data_catalogs.delay()
self.stdout.write(u"\nSpawned data catalog task group: {0}\n".format(validator_group.id))
## Instruction:
Fix options on NoArgsCommand. Huh.
## Code After:
from django.core.management.base import NoArgsCommand
from thezombies.tasks.main import validate_data_catalogs
class Command(NoArgsCommand):
"""Validate all of the agency data catalogs"""
def handle_noargs(self, **options):
validator_group = validate_data_catalogs.delay()
self.stdout.write(u"\nSpawned data catalog task group: {0}\n".format(validator_group.id))
|
// ... existing code ...
class Command(NoArgsCommand):
"""Validate all of the agency data catalogs"""
def handle_noargs(self, **options):
validator_group = validate_data_catalogs.delay()
self.stdout.write(u"\nSpawned data catalog task group: {0}\n".format(validator_group.id))
// ... rest of the code ...
|
033c16034074d4fd6eab054a9c97888d23668316
|
tests/test_empty_polygons.py
|
tests/test_empty_polygons.py
|
from shapely.geometry import MultiPolygon, Polygon
def test_empty_polygon():
"""No constructor arg makes an empty polygon geometry."""
assert Polygon().is_empty
def test_empty_multipolygon():
"""No constructor arg makes an empty multipolygon geometry."""
assert MultiPolygon().is_empty
def test_multipolygon_empty_polygon():
"""An empty polygon passed to MultiPolygon() makes an empty
multipolygon geometry."""
assert MultiPolygon([Polygon()]).is_empty
|
from shapely.geometry import MultiPolygon, Point, Polygon
def test_empty_polygon():
"""No constructor arg makes an empty polygon geometry."""
assert Polygon().is_empty
def test_empty_multipolygon():
"""No constructor arg makes an empty multipolygon geometry."""
assert MultiPolygon().is_empty
def test_multipolygon_empty_polygon():
"""An empty polygon passed to MultiPolygon() makes an empty
multipolygon geometry."""
assert MultiPolygon([Polygon()]).is_empty
def test_multipolygon_empty_among_polygon():
"""An empty polygon passed to MultiPolygon() is ignored."""
assert len(MultiPolygon([Point(0,0).buffer(1.0), Polygon()])) == 1
|
Add test of an empty and non empty polygon
|
Add test of an empty and non empty polygon
|
Python
|
bsd-3-clause
|
jdmcbr/Shapely,jdmcbr/Shapely
|
python
|
## Code Before:
from shapely.geometry import MultiPolygon, Polygon
def test_empty_polygon():
"""No constructor arg makes an empty polygon geometry."""
assert Polygon().is_empty
def test_empty_multipolygon():
"""No constructor arg makes an empty multipolygon geometry."""
assert MultiPolygon().is_empty
def test_multipolygon_empty_polygon():
"""An empty polygon passed to MultiPolygon() makes an empty
multipolygon geometry."""
assert MultiPolygon([Polygon()]).is_empty
## Instruction:
Add test of an empty and non empty polygon
## Code After:
from shapely.geometry import MultiPolygon, Point, Polygon
def test_empty_polygon():
"""No constructor arg makes an empty polygon geometry."""
assert Polygon().is_empty
def test_empty_multipolygon():
"""No constructor arg makes an empty multipolygon geometry."""
assert MultiPolygon().is_empty
def test_multipolygon_empty_polygon():
"""An empty polygon passed to MultiPolygon() makes an empty
multipolygon geometry."""
assert MultiPolygon([Polygon()]).is_empty
def test_multipolygon_empty_among_polygon():
"""An empty polygon passed to MultiPolygon() is ignored."""
assert len(MultiPolygon([Point(0,0).buffer(1.0), Polygon()])) == 1
|
...
from shapely.geometry import MultiPolygon, Point, Polygon
def test_empty_polygon():
...
"""An empty polygon passed to MultiPolygon() makes an empty
multipolygon geometry."""
assert MultiPolygon([Polygon()]).is_empty
def test_multipolygon_empty_among_polygon():
"""An empty polygon passed to MultiPolygon() is ignored."""
assert len(MultiPolygon([Point(0,0).buffer(1.0), Polygon()])) == 1
...
|
6ac45b405fd7f929d73eb03395f713d8667b2941
|
include/nstl.h
|
include/nstl.h
|
/*!
* Master header of the nstl library.
*
* @author Louis Dionne
*/
#ifndef NSTL_H
#define NSTL_H
#include <nstl/operator.h>
#include <nstl/type.h>
#include <nstl/primitive.h>
#include <nstl/pair.h>
#include <nstl/algorithm.h>
#include <nstl/vector.h>
#endif /* !NSTL_H */
|
/*!
* Master header of the nstl library.
*
* @author Louis Dionne
*/
#ifndef NSTL_H
#define NSTL_H
#include <nstl/algorithm.h>
#include <nstl/operator.h>
#include <nstl/pair.h>
#include <nstl/primitive.h>
#include <nstl/type.h>
#include <nstl/vector.h>
#endif /* !NSTL_H */
|
Reorder the includes of the master include in alphabetical order.
|
Reorder the includes of the master include in alphabetical order.
|
C
|
mit
|
ldionne/nstl,ldionne/nstl
|
c
|
## Code Before:
/*!
* Master header of the nstl library.
*
* @author Louis Dionne
*/
#ifndef NSTL_H
#define NSTL_H
#include <nstl/operator.h>
#include <nstl/type.h>
#include <nstl/primitive.h>
#include <nstl/pair.h>
#include <nstl/algorithm.h>
#include <nstl/vector.h>
#endif /* !NSTL_H */
## Instruction:
Reorder the includes of the master include in alphabetical order.
## Code After:
/*!
* Master header of the nstl library.
*
* @author Louis Dionne
*/
#ifndef NSTL_H
#define NSTL_H
#include <nstl/algorithm.h>
#include <nstl/operator.h>
#include <nstl/pair.h>
#include <nstl/primitive.h>
#include <nstl/type.h>
#include <nstl/vector.h>
#endif /* !NSTL_H */
|
// ... existing code ...
#ifndef NSTL_H
#define NSTL_H
#include <nstl/algorithm.h>
#include <nstl/operator.h>
#include <nstl/pair.h>
#include <nstl/primitive.h>
#include <nstl/type.h>
#include <nstl/vector.h>
#endif /* !NSTL_H */
// ... rest of the code ...
|
c43ddf1f36535604167e496508d242a15c813496
|
roamer/main.py
|
roamer/main.py
|
import os
from roamer.python_edit import file_editor
from roamer.directory import Directory
from roamer.edit_directory import EditDirectory
from roamer.engine import Engine
from roamer.record import Record
from roamer.constant import TRASH_DIR
def main():
"""
argh
"""
if not os.path.exists(TRASH_DIR):
os.makedirs(TRASH_DIR)
cwd = os.getcwd()
raw_entries = os.listdir(cwd)
directory = Directory(cwd, raw_entries)
output = file_editor(directory.text())
edit_directory = EditDirectory(cwd, output)
engine = Engine(directory, edit_directory)
print engine.print_commands()
engine.run_commands()
Record().add_dir(Directory(cwd, os.listdir(cwd)))
if __name__ == "__main__":
main()
|
import os
from roamer.python_edit import file_editor
from roamer.directory import Directory
from roamer.edit_directory import EditDirectory
from roamer.engine import Engine
from roamer.record import Record
from roamer.constant import TRASH_DIR
def main():
"""
argh
"""
if not os.path.exists(TRASH_DIR):
os.makedirs(TRASH_DIR)
cwd = os.getcwd()
raw_entries = os.listdir(cwd)
directory = Directory(cwd, raw_entries)
Record().add_dir(directory)
output = file_editor(directory.text())
edit_directory = EditDirectory(cwd, output)
engine = Engine(directory, edit_directory)
print engine.print_commands()
engine.run_commands()
Record().add_dir(Directory(cwd, os.listdir(cwd)))
if __name__ == "__main__":
main()
|
Fix references not available after pulling up two instances of roamer
|
Fix references not available after pulling up two instances of roamer
|
Python
|
mit
|
abaldwin88/roamer
|
python
|
## Code Before:
import os
from roamer.python_edit import file_editor
from roamer.directory import Directory
from roamer.edit_directory import EditDirectory
from roamer.engine import Engine
from roamer.record import Record
from roamer.constant import TRASH_DIR
def main():
"""
argh
"""
if not os.path.exists(TRASH_DIR):
os.makedirs(TRASH_DIR)
cwd = os.getcwd()
raw_entries = os.listdir(cwd)
directory = Directory(cwd, raw_entries)
output = file_editor(directory.text())
edit_directory = EditDirectory(cwd, output)
engine = Engine(directory, edit_directory)
print engine.print_commands()
engine.run_commands()
Record().add_dir(Directory(cwd, os.listdir(cwd)))
if __name__ == "__main__":
main()
## Instruction:
Fix references not available after pulling up two instances of roamer
## Code After:
import os
from roamer.python_edit import file_editor
from roamer.directory import Directory
from roamer.edit_directory import EditDirectory
from roamer.engine import Engine
from roamer.record import Record
from roamer.constant import TRASH_DIR
def main():
"""
argh
"""
if not os.path.exists(TRASH_DIR):
os.makedirs(TRASH_DIR)
cwd = os.getcwd()
raw_entries = os.listdir(cwd)
directory = Directory(cwd, raw_entries)
Record().add_dir(directory)
output = file_editor(directory.text())
edit_directory = EditDirectory(cwd, output)
engine = Engine(directory, edit_directory)
print engine.print_commands()
engine.run_commands()
Record().add_dir(Directory(cwd, os.listdir(cwd)))
if __name__ == "__main__":
main()
|
// ... existing code ...
raw_entries = os.listdir(cwd)
directory = Directory(cwd, raw_entries)
Record().add_dir(directory)
output = file_editor(directory.text())
edit_directory = EditDirectory(cwd, output)
engine = Engine(directory, edit_directory)
// ... rest of the code ...
|
a33ce18a57b3e0889c9b9fd1003e847cd590ed4c
|
app/src/main/java/net/squanchy/support/time/DateTimeFormatter.kt
|
app/src/main/java/net/squanchy/support/time/DateTimeFormatter.kt
|
package net.squanchy.support.time
import org.threeten.bp.format.DateTimeFormatter
fun createShortTimeFormatter(): DateTimeFormatter = DateTimeFormatter.ofPattern("h:mm a")
fun createShortDateFormatter(): DateTimeFormatter = DateTimeFormatter.ofPattern("MM/dd/yyyy")
fun createWeekDayAndDayFormatter(): DateTimeFormatter = DateTimeFormatter.ofPattern("EEE d")
|
package net.squanchy.support.time
import org.threeten.bp.format.DateTimeFormatter
import java.util.Locale
fun createShortTimeFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("h:mm a").withLocale(locale)
fun createShortDateFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("MM/dd/yyyy").withLocale(locale)
fun createWeekDayAndDayFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("EEE d").withLocale(locale)
|
Add locale to formatter creator functions
|
Add locale to formatter creator functions
|
Kotlin
|
apache-2.0
|
squanchy-dev/squanchy-android,squanchy-dev/squanchy-android,squanchy-dev/squanchy-android
|
kotlin
|
## Code Before:
package net.squanchy.support.time
import org.threeten.bp.format.DateTimeFormatter
fun createShortTimeFormatter(): DateTimeFormatter = DateTimeFormatter.ofPattern("h:mm a")
fun createShortDateFormatter(): DateTimeFormatter = DateTimeFormatter.ofPattern("MM/dd/yyyy")
fun createWeekDayAndDayFormatter(): DateTimeFormatter = DateTimeFormatter.ofPattern("EEE d")
## Instruction:
Add locale to formatter creator functions
## Code After:
package net.squanchy.support.time
import org.threeten.bp.format.DateTimeFormatter
import java.util.Locale
fun createShortTimeFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("h:mm a").withLocale(locale)
fun createShortDateFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("MM/dd/yyyy").withLocale(locale)
fun createWeekDayAndDayFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("EEE d").withLocale(locale)
|
...
package net.squanchy.support.time
import org.threeten.bp.format.DateTimeFormatter
import java.util.Locale
fun createShortTimeFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("h:mm a").withLocale(locale)
fun createShortDateFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("MM/dd/yyyy").withLocale(locale)
fun createWeekDayAndDayFormatter(locale: Locale = Locale.getDefault()): DateTimeFormatter =
DateTimeFormatter.ofPattern("EEE d").withLocale(locale)
...
|
4e0c34ec4807465e36aee075a8626a80ec0d9492
|
plugin/src/main/groovy/com/github/gfx/ribbonizer/plugin/Ribbonizer.java
|
plugin/src/main/groovy/com/github/gfx/ribbonizer/plugin/Ribbonizer.java
|
package com.github.gfx.ribbonizer.plugin;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.function.Consumer;
import java.util.stream.Stream;
import javax.imageio.ImageIO;
public class Ribbonizer {
final File inputFile;
final File outputFile;
final BufferedImage image;
public Ribbonizer(File inputFile, File outputFile) throws IOException {
this.inputFile = inputFile;
this.outputFile = outputFile;
image = ImageIO.read(inputFile);
}
public void save() throws IOException {
outputFile.getParentFile().mkdirs();
ImageIO.write(image, "png", outputFile);
}
public void process(Stream<Consumer<BufferedImage>> filters) {
filters.forEach(new Consumer<Consumer<BufferedImage>>() {
@Override
public void accept(Consumer<BufferedImage> filter) {
filter.accept(image);
}
});
}
}
|
package com.github.gfx.ribbonizer.plugin;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.function.Consumer;
import java.util.stream.Stream;
import javax.imageio.ImageIO;
public class Ribbonizer {
final File inputFile;
final File outputFile;
final BufferedImage image;
public Ribbonizer(File inputFile, File outputFile) throws IOException {
this.inputFile = inputFile;
this.outputFile = outputFile;
image = ImageIO.read(inputFile);
}
public void save() throws IOException {
outputFile.getParentFile().mkdirs();
ImageIO.write(image, "png", outputFile);
}
public void process(Stream<Consumer<BufferedImage>> filters) {
filters.forEach(new Consumer<Consumer<BufferedImage>>() {
@Override
public void accept(Consumer<BufferedImage> filter) {
if (filter != null) {
filter.accept(image);
}
}
});
}
}
|
Allow a null filter to skip ribbonizing
|
Allow a null filter to skip ribbonizing
|
Java
|
mit
|
gfx/gradle-android-ribbonizer-plugin
|
java
|
## Code Before:
package com.github.gfx.ribbonizer.plugin;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.function.Consumer;
import java.util.stream.Stream;
import javax.imageio.ImageIO;
public class Ribbonizer {
final File inputFile;
final File outputFile;
final BufferedImage image;
public Ribbonizer(File inputFile, File outputFile) throws IOException {
this.inputFile = inputFile;
this.outputFile = outputFile;
image = ImageIO.read(inputFile);
}
public void save() throws IOException {
outputFile.getParentFile().mkdirs();
ImageIO.write(image, "png", outputFile);
}
public void process(Stream<Consumer<BufferedImage>> filters) {
filters.forEach(new Consumer<Consumer<BufferedImage>>() {
@Override
public void accept(Consumer<BufferedImage> filter) {
filter.accept(image);
}
});
}
}
## Instruction:
Allow a null filter to skip ribbonizing
## Code After:
package com.github.gfx.ribbonizer.plugin;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.function.Consumer;
import java.util.stream.Stream;
import javax.imageio.ImageIO;
public class Ribbonizer {
final File inputFile;
final File outputFile;
final BufferedImage image;
public Ribbonizer(File inputFile, File outputFile) throws IOException {
this.inputFile = inputFile;
this.outputFile = outputFile;
image = ImageIO.read(inputFile);
}
public void save() throws IOException {
outputFile.getParentFile().mkdirs();
ImageIO.write(image, "png", outputFile);
}
public void process(Stream<Consumer<BufferedImage>> filters) {
filters.forEach(new Consumer<Consumer<BufferedImage>>() {
@Override
public void accept(Consumer<BufferedImage> filter) {
if (filter != null) {
filter.accept(image);
}
}
});
}
}
|
# ... existing code ...
filters.forEach(new Consumer<Consumer<BufferedImage>>() {
@Override
public void accept(Consumer<BufferedImage> filter) {
if (filter != null) {
filter.accept(image);
}
}
});
}
# ... rest of the code ...
|
36307802a45f94cb218ce9bbe4a4abc7704a973a
|
graphics/savefig.py
|
graphics/savefig.py
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
|
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
Save figures with white space cropped out
|
Save figures with white space cropped out
|
Python
|
mit
|
joelfrederico/SciSalt
|
python
|
## Code Before:
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,**kwargs)
else:
plt.savefig(final_filename,**kwargs)
## Instruction:
Save figures with white space cropped out
## Code After:
import os
import matplotlib.pyplot as plt
def savefig(filename,path="figs",fig=None,ext='eps',**kwargs):
# try:
# os.remove(path)
# except OSError as e:
# try:
# os.mkdir(path)
# except:
# pass
if not os.path.exists(path):
os.makedirs(path)
filename = ''.join([path,'/',filename])
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
|
...
final_filename = '{}.{}'.format(filename,ext).replace(" ","").replace("\n","")
if fig != None:
fig.savefig(final_filename,bbox_inches='tight',**kwargs)
else:
plt.savefig(final_filename,bbox_inches='tight',**kwargs)
...
|
592a2c778bf7c87b7aad6f9ba14c1ba83da033e8
|
scoring_engine/web/views/services.py
|
scoring_engine/web/views/services.py
|
from flask import Blueprint, render_template, flash
from flask_login import login_required, current_user
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
flash('Only blue teams can access services', 'error')
return render_template('overview.html')
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
return render_template('service.html', service=id)
|
from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
service = Service.query.get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', service=service)
|
Add unauthorize to service template
|
Add unauthorize to service template
Signed-off-by: Brandon Myers <[email protected]>
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
python
|
## Code Before:
from flask import Blueprint, render_template, flash
from flask_login import login_required, current_user
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
flash('Only blue teams can access services', 'error')
return render_template('overview.html')
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
return render_template('service.html', service=id)
## Instruction:
Add unauthorize to service template
Signed-off-by: Brandon Myers <[email protected]>
## Code After:
from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
service = Service.query.get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', service=service)
|
// ... existing code ...
from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
// ... modified code ...
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
...
@mod.route('/service/<id>')
@login_required
def service(id):
service = Service.query.get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', service=service)
// ... rest of the code ...
|
df02d190fd8b6e6a483f95d376df562a99bea4f5
|
h2o-core/src/test/java/water/runner/CleanNewKeysTask.java
|
h2o-core/src/test/java/water/runner/CleanNewKeysTask.java
|
package water.runner;
import org.junit.Ignore;
import water.*;
import java.util.Set;
@Ignore
public class CleanNewKeysTask extends KeysMRTask<CleanNewKeysTask> {
@Override
protected void setupLocal() {
final Set<Key> initKeys = LocalTestRuntime.initKeys;
final Set<Key> actualKeys = H2O.localKeySet();
for (Key actualKey : actualKeys){
final Value value = Value.STORE_get(actualKey);
if (initKeys.contains(actualKey) || isIgnorableKeyLeak(actualKey, value)) continue;
if (!(value.get() instanceof Keyed)) {
// Keyed objects might override remove_impl to excerscise their own removal strategy
// Non-keyed objects should just be removed from the DKV
DKV.remove(actualKey);
} else {
actualKey.remove();
}
}
}
}
|
package water.runner;
import org.junit.Ignore;
import water.*;
import java.util.Set;
@Ignore
public class CleanNewKeysTask extends KeysMRTask<CleanNewKeysTask> {
@Override
protected void setupLocal() {
DKVManager.retain(LocalTestRuntime.initKeys.toArray(new Key[0]));
}
}
|
Use DKVManager to clean newly added keys.
|
Use DKVManager to clean newly added keys.
|
Java
|
apache-2.0
|
h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3
|
java
|
## Code Before:
package water.runner;
import org.junit.Ignore;
import water.*;
import java.util.Set;
@Ignore
public class CleanNewKeysTask extends KeysMRTask<CleanNewKeysTask> {
@Override
protected void setupLocal() {
final Set<Key> initKeys = LocalTestRuntime.initKeys;
final Set<Key> actualKeys = H2O.localKeySet();
for (Key actualKey : actualKeys){
final Value value = Value.STORE_get(actualKey);
if (initKeys.contains(actualKey) || isIgnorableKeyLeak(actualKey, value)) continue;
if (!(value.get() instanceof Keyed)) {
// Keyed objects might override remove_impl to excerscise their own removal strategy
// Non-keyed objects should just be removed from the DKV
DKV.remove(actualKey);
} else {
actualKey.remove();
}
}
}
}
## Instruction:
Use DKVManager to clean newly added keys.
## Code After:
package water.runner;
import org.junit.Ignore;
import water.*;
import java.util.Set;
@Ignore
public class CleanNewKeysTask extends KeysMRTask<CleanNewKeysTask> {
@Override
protected void setupLocal() {
DKVManager.retain(LocalTestRuntime.initKeys.toArray(new Key[0]));
}
}
|
# ... existing code ...
@Override
protected void setupLocal() {
DKVManager.retain(LocalTestRuntime.initKeys.toArray(new Key[0]));
}
}
# ... rest of the code ...
|
a46b7fabdb79de949d0d795cf2ec0accc2a34a4b
|
Ndapi/Ndapi.h
|
Ndapi/Ndapi.h
|
using namespace System;
using namespace System::Runtime::InteropServices;
namespace Ndapi
{
[Serializable]
public ref class NdapiException : public Exception
{
private:
long _status;
public:
property long Status
{
long get()
{
return _status;
}
};
public protected:
NdapiException() : Exception() {}
NdapiException(String^ message) : Exception(message) {}
NdapiException(String^ message, Exception^ inner) : Exception(message, inner) {}
NdapiException(String^ message, long status) : Exception(message)
{
_status = status;
}
};
template<class T>
class NativeString
{
private:
T* value;
NativeString(const NativeString&);
NativeString& operator = (const NativeString&);
public:
NativeString(String^ s);
~NativeString() { Marshal::FreeHGlobal(IntPtr(value)); }
operator T* () { return value; }
};
}
|
using namespace System;
using namespace System::Runtime::InteropServices;
namespace Ndapi
{
[Serializable]
public ref class NdapiException : public Exception
{
private:
long _status;
public:
property long Status
{
long get()
{
return _status;
}
};
public protected:
NdapiException() : Exception() {}
NdapiException(String^ message) : Exception(message) {}
NdapiException(String^ message, Exception^ inner) : Exception(message, inner) {}
NdapiException(String^ message, long status) : Exception(message)
{
_status = status;
}
};
template<class T>
class NativeString
{
private:
T* value;
public:
NativeString(String^ s);
NativeString(const NativeString&) = delete;
NativeString& operator = (const NativeString&) = delete;
~NativeString() { Marshal::FreeHGlobal(IntPtr(value)); }
operator T* () { return value; }
};
}
|
Use =delete to disallow copy
|
Use =delete to disallow copy
|
C
|
mit
|
felipebz/ndapi
|
c
|
## Code Before:
using namespace System;
using namespace System::Runtime::InteropServices;
namespace Ndapi
{
[Serializable]
public ref class NdapiException : public Exception
{
private:
long _status;
public:
property long Status
{
long get()
{
return _status;
}
};
public protected:
NdapiException() : Exception() {}
NdapiException(String^ message) : Exception(message) {}
NdapiException(String^ message, Exception^ inner) : Exception(message, inner) {}
NdapiException(String^ message, long status) : Exception(message)
{
_status = status;
}
};
template<class T>
class NativeString
{
private:
T* value;
NativeString(const NativeString&);
NativeString& operator = (const NativeString&);
public:
NativeString(String^ s);
~NativeString() { Marshal::FreeHGlobal(IntPtr(value)); }
operator T* () { return value; }
};
}
## Instruction:
Use =delete to disallow copy
## Code After:
using namespace System;
using namespace System::Runtime::InteropServices;
namespace Ndapi
{
[Serializable]
public ref class NdapiException : public Exception
{
private:
long _status;
public:
property long Status
{
long get()
{
return _status;
}
};
public protected:
NdapiException() : Exception() {}
NdapiException(String^ message) : Exception(message) {}
NdapiException(String^ message, Exception^ inner) : Exception(message, inner) {}
NdapiException(String^ message, long status) : Exception(message)
{
_status = status;
}
};
template<class T>
class NativeString
{
private:
T* value;
public:
NativeString(String^ s);
NativeString(const NativeString&) = delete;
NativeString& operator = (const NativeString&) = delete;
~NativeString() { Marshal::FreeHGlobal(IntPtr(value)); }
operator T* () { return value; }
};
}
|
...
private:
long _status;
public:
property long Status
{
long get()
...
{
private:
T* value;
public:
NativeString(String^ s);
NativeString(const NativeString&) = delete;
NativeString& operator = (const NativeString&) = delete;
~NativeString() { Marshal::FreeHGlobal(IntPtr(value)); }
operator T* () { return value; }
};
...
|
d68935dfb34f7c5fc463f94e49f0c060717b17b8
|
cmsplugin_contact_plus/checks.py
|
cmsplugin_contact_plus/checks.py
|
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
|
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
|
Comment out warning for renamed field
|
Comment out warning for renamed field
|
Python
|
bsd-3-clause
|
arteria/cmsplugin-contact-plus,arteria/cmsplugin-contact-plus,worthwhile/cmsplugin-remote-form,worthwhile/cmsplugin-remote-form
|
python
|
## Code Before:
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
## Instruction:
Comment out warning for renamed field
## Code After:
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
|
...
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
...
|
e70292db060e0cb2b690ff413608a7ba4bf5c7fd
|
src/main/java/interpres/ast/StringLiteral.java
|
src/main/java/interpres/ast/StringLiteral.java
|
package interpres.ast;
import java.util.List;
import java.util.ArrayList;
import interpres.language.DefinitionTable;
import interpres.language.values.Value;
public class StringLiteral extends AST {
private String literal;
public StringLiteral(String literal) {
this.literal = literal;
}
public Value evaluate(DefinitionTable definitionTable) {
List<String> instructions = new ArrayList<String>();
for (int i = literal.length() - 1; i >= 0; i--) {
instructions.add("LOADL " + (int) literal.charAt(i));
}
return new interpres.language.values.List(instructions);
}
public String quote() {
return this.literal;
}
}
|
package interpres.ast;
import java.util.List;
import java.util.ArrayList;
import interpres.language.DefinitionTable;
import interpres.language.values.Value;
public class StringLiteral extends AST {
private String literal;
public StringLiteral(String literal) {
this.literal = literal;
}
public Value evaluate(DefinitionTable definitionTable) {
return ListExpression.buildFunctionCall("asm.loads", new QuotedExpression(this)).evaluate(definitionTable);
}
public String quote() {
return this.literal;
}
}
|
Call asm.loads when a string is encountered
|
Call asm.loads when a string is encountered
|
Java
|
mit
|
thomasbrus/interpres
|
java
|
## Code Before:
package interpres.ast;
import java.util.List;
import java.util.ArrayList;
import interpres.language.DefinitionTable;
import interpres.language.values.Value;
public class StringLiteral extends AST {
private String literal;
public StringLiteral(String literal) {
this.literal = literal;
}
public Value evaluate(DefinitionTable definitionTable) {
List<String> instructions = new ArrayList<String>();
for (int i = literal.length() - 1; i >= 0; i--) {
instructions.add("LOADL " + (int) literal.charAt(i));
}
return new interpres.language.values.List(instructions);
}
public String quote() {
return this.literal;
}
}
## Instruction:
Call asm.loads when a string is encountered
## Code After:
package interpres.ast;
import java.util.List;
import java.util.ArrayList;
import interpres.language.DefinitionTable;
import interpres.language.values.Value;
public class StringLiteral extends AST {
private String literal;
public StringLiteral(String literal) {
this.literal = literal;
}
public Value evaluate(DefinitionTable definitionTable) {
return ListExpression.buildFunctionCall("asm.loads", new QuotedExpression(this)).evaluate(definitionTable);
}
public String quote() {
return this.literal;
}
}
|
// ... existing code ...
}
public Value evaluate(DefinitionTable definitionTable) {
return ListExpression.buildFunctionCall("asm.loads", new QuotedExpression(this)).evaluate(definitionTable);
}
public String quote() {
// ... rest of the code ...
|
6fcf03532dcc549a3a95390b7c999482a64fc6c6
|
tests/unit/utils/test_pycrypto.py
|
tests/unit/utils/test_pycrypto.py
|
from __future__ import absolute_import, print_function, unicode_literals
import logging
import re
# Import Salt Libs
import salt.utils.pycrypto
import salt.utils.platform
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
log = logging.getLogger(__name__)
class PycryptoTestCase(TestCase):
'''
TestCase for salt.utils.pycrypto module
'''
@skipIf(salt.utils.platform.is_windows(), 'No crypto module for Windows')
def test_gen_hash(self):
'''
Test gen_hash
'''
passwd = 'test_password'
id = '$'
if salt.utils.platform.is_darwin():
id = ''
ret = salt.utils.pycrypto.gen_hash(password=passwd)
self.assertTrue(ret.startswith('$6{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='md5')
self.assertTrue(ret.startswith('$1{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='sha256')
self.assertTrue(ret.startswith('$5{0}'.format(id)))
def test_secure_password(self):
'''
test secure_password
'''
ret = salt.utils.pycrypto.secure_password()
check = re.compile(r'[!@#$%^&*()_=+]')
assert check.search(ret) is None
assert ret
|
from __future__ import absolute_import, print_function, unicode_literals
import logging
import re
# Import Salt Libs
import salt.utils.pycrypto
import salt.utils.platform
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
log = logging.getLogger(__name__)
class PycryptoTestCase(TestCase):
'''
TestCase for salt.utils.pycrypto module
'''
# The crypt module is only available on Unix systems
# https://docs.python.org/dev/library/crypt.html
@skipIf(not salt.utils.pycrypto.HAS_CRYPT, 'crypt module not available')
def test_gen_hash(self):
'''
Test gen_hash
'''
passwd = 'test_password'
id = '$'
if salt.utils.platform.is_darwin():
id = ''
ret = salt.utils.pycrypto.gen_hash(password=passwd)
self.assertTrue(ret.startswith('$6{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='md5')
self.assertTrue(ret.startswith('$1{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='sha256')
self.assertTrue(ret.startswith('$5{0}'.format(id)))
def test_secure_password(self):
'''
test secure_password
'''
ret = salt.utils.pycrypto.secure_password()
check = re.compile(r'[!@#$%^&*()_=+]')
assert check.search(ret) is None
assert ret
|
Make the skip apply to any system missing crypt
|
Make the skip apply to any system missing crypt
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
python
|
## Code Before:
from __future__ import absolute_import, print_function, unicode_literals
import logging
import re
# Import Salt Libs
import salt.utils.pycrypto
import salt.utils.platform
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
log = logging.getLogger(__name__)
class PycryptoTestCase(TestCase):
'''
TestCase for salt.utils.pycrypto module
'''
@skipIf(salt.utils.platform.is_windows(), 'No crypto module for Windows')
def test_gen_hash(self):
'''
Test gen_hash
'''
passwd = 'test_password'
id = '$'
if salt.utils.platform.is_darwin():
id = ''
ret = salt.utils.pycrypto.gen_hash(password=passwd)
self.assertTrue(ret.startswith('$6{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='md5')
self.assertTrue(ret.startswith('$1{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='sha256')
self.assertTrue(ret.startswith('$5{0}'.format(id)))
def test_secure_password(self):
'''
test secure_password
'''
ret = salt.utils.pycrypto.secure_password()
check = re.compile(r'[!@#$%^&*()_=+]')
assert check.search(ret) is None
assert ret
## Instruction:
Make the skip apply to any system missing crypt
## Code After:
from __future__ import absolute_import, print_function, unicode_literals
import logging
import re
# Import Salt Libs
import salt.utils.pycrypto
import salt.utils.platform
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
log = logging.getLogger(__name__)
class PycryptoTestCase(TestCase):
'''
TestCase for salt.utils.pycrypto module
'''
# The crypt module is only available on Unix systems
# https://docs.python.org/dev/library/crypt.html
@skipIf(not salt.utils.pycrypto.HAS_CRYPT, 'crypt module not available')
def test_gen_hash(self):
'''
Test gen_hash
'''
passwd = 'test_password'
id = '$'
if salt.utils.platform.is_darwin():
id = ''
ret = salt.utils.pycrypto.gen_hash(password=passwd)
self.assertTrue(ret.startswith('$6{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='md5')
self.assertTrue(ret.startswith('$1{0}'.format(id)))
ret = salt.utils.pycrypto.gen_hash(password=passwd, algorithm='sha256')
self.assertTrue(ret.startswith('$5{0}'.format(id)))
def test_secure_password(self):
'''
test secure_password
'''
ret = salt.utils.pycrypto.secure_password()
check = re.compile(r'[!@#$%^&*()_=+]')
assert check.search(ret) is None
assert ret
|
// ... existing code ...
TestCase for salt.utils.pycrypto module
'''
# The crypt module is only available on Unix systems
# https://docs.python.org/dev/library/crypt.html
@skipIf(not salt.utils.pycrypto.HAS_CRYPT, 'crypt module not available')
def test_gen_hash(self):
'''
Test gen_hash
// ... rest of the code ...
|
67cd74ec17e6061d70d38d48b7c56ce448063f35
|
test/Driver/output-file-is-dir.c
|
test/Driver/output-file-is-dir.c
|
// XFAIL: hexagon
// RUN: rm -rf %t.dir
// RUN: mkdir -p %t.dir/a.out
// RUN: cd %t.dir && not %clang %s
// RUN: test -d %t.dir/a.out
// REQUIRES: shell
int main() { return 0; }
|
// RUN: rm -rf %t.dir
// RUN: mkdir -p %t.dir
// RUN: not %clang %s -c -emit-llvm -o %t.dir
// RUN: test -d %t.dir
int main() { return 0; }
|
Simplify test so that it is more portable.
|
Simplify test so that it is more portable.
I have checked that the test still fails when the "|| !P.isRegularFile()" from
the original patch is removed.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@179464 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang
|
c
|
## Code Before:
// XFAIL: hexagon
// RUN: rm -rf %t.dir
// RUN: mkdir -p %t.dir/a.out
// RUN: cd %t.dir && not %clang %s
// RUN: test -d %t.dir/a.out
// REQUIRES: shell
int main() { return 0; }
## Instruction:
Simplify test so that it is more portable.
I have checked that the test still fails when the "|| !P.isRegularFile()" from
the original patch is removed.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@179464 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: rm -rf %t.dir
// RUN: mkdir -p %t.dir
// RUN: not %clang %s -c -emit-llvm -o %t.dir
// RUN: test -d %t.dir
int main() { return 0; }
|
# ... existing code ...
// RUN: rm -rf %t.dir
// RUN: mkdir -p %t.dir
// RUN: not %clang %s -c -emit-llvm -o %t.dir
// RUN: test -d %t.dir
int main() { return 0; }
# ... rest of the code ...
|
69a94a60d04991ba5f8c25276455dedc3a0b898c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='[email protected]',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
|
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='[email protected]',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
Install assets when installing the package.
|
Install assets when installing the package.
|
Python
|
bsd-2-clause
|
micktwomey/pypicache
|
python
|
## Code Before:
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='[email protected]',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
)
## Instruction:
Install assets when installing the package.
## Code After:
from distutils.core import setup
setup(
name='pypicache',
version='0.1',
description='PyPI caching and proxying server',
author='Michael Twomey',
author_email='[email protected]',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
|
...
author_email='[email protected]',
url='http://readthedocs.org/projects/pypicache/',
packages=['pypicache'],
package_data={
'pypicache': [
'static/*/*',
'templates/*.html',
]
}
)
...
|
be915a11ebd0d9c4e8a0a52b1bdcc7ca2abfbfb1
|
sms_sender.py
|
sms_sender.py
|
from kafka import KafkaConsumer
import os
import nexmo
import json
client = nexmo.Client( key=os.environ["API_KEY"],
secret=os.environ["API_SECRET"])
consumer = KafkaConsumer(bootstrap_servers=os.environ["KAFKA"],
value_deserializer=lambda m: json.loads(m.decode('ascii')))
consumer.subscribe(['kafka_test1'])
while True:
for message in consumer:
client.send_message({ 'from' : message.value['from'],
'to' : message.value['to'],
'text' : message.value['text']})
|
from kafka import KafkaConsumer
import os
import nexmo
import json
client = nexmo.Client( key=os.environ["API_KEY"],
secret=os.environ["API_SECRET"])
consumer = KafkaConsumer(bootstrap_servers=os.environ["KAFKA"],
value_deserializer=lambda m: json.loads(m.decode('ascii')))
consumer.subscribe(['sms_response'])
while True:
for message in consumer:
try:
client.send_message({ 'from' : message.value['from'],
'to' : message.value['to'],
'text' : message.value['text']})
except:
print 'Unexpected error'
|
Change topic + add exception handling
|
Change topic + add exception handling
|
Python
|
apache-2.0
|
antongorshkov/kafkasms
|
python
|
## Code Before:
from kafka import KafkaConsumer
import os
import nexmo
import json
client = nexmo.Client( key=os.environ["API_KEY"],
secret=os.environ["API_SECRET"])
consumer = KafkaConsumer(bootstrap_servers=os.environ["KAFKA"],
value_deserializer=lambda m: json.loads(m.decode('ascii')))
consumer.subscribe(['kafka_test1'])
while True:
for message in consumer:
client.send_message({ 'from' : message.value['from'],
'to' : message.value['to'],
'text' : message.value['text']})
## Instruction:
Change topic + add exception handling
## Code After:
from kafka import KafkaConsumer
import os
import nexmo
import json
client = nexmo.Client( key=os.environ["API_KEY"],
secret=os.environ["API_SECRET"])
consumer = KafkaConsumer(bootstrap_servers=os.environ["KAFKA"],
value_deserializer=lambda m: json.loads(m.decode('ascii')))
consumer.subscribe(['sms_response'])
while True:
for message in consumer:
try:
client.send_message({ 'from' : message.value['from'],
'to' : message.value['to'],
'text' : message.value['text']})
except:
print 'Unexpected error'
|
# ... existing code ...
secret=os.environ["API_SECRET"])
consumer = KafkaConsumer(bootstrap_servers=os.environ["KAFKA"],
value_deserializer=lambda m: json.loads(m.decode('ascii')))
consumer.subscribe(['sms_response'])
while True:
for message in consumer:
try:
client.send_message({ 'from' : message.value['from'],
'to' : message.value['to'],
'text' : message.value['text']})
except:
print 'Unexpected error'
# ... rest of the code ...
|
c5103eea181455afded264528bb97ac8a9982db0
|
enable/__init__.py
|
enable/__init__.py
|
from __future__ import absolute_import
from ._version import full_version as __version__
__requires__ = [
'traitsui',
'PIL',
'kiwisolver',
]
|
from enable._version import full_version as __version__
__requires__ = [
'traitsui',
'PIL',
'kiwisolver',
]
|
Use an absolute import to avoid breaking the docs build.
|
Use an absolute import to avoid breaking the docs build.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
python
|
## Code Before:
from __future__ import absolute_import
from ._version import full_version as __version__
__requires__ = [
'traitsui',
'PIL',
'kiwisolver',
]
## Instruction:
Use an absolute import to avoid breaking the docs build.
## Code After:
from enable._version import full_version as __version__
__requires__ = [
'traitsui',
'PIL',
'kiwisolver',
]
|
...
from enable._version import full_version as __version__
__requires__ = [
'traitsui',
...
|
cb52e7b1a507ca7b6065c6994d11d3c07a41e6f1
|
uniqueids/tasks.py
|
uniqueids/tasks.py
|
from celery.task import Task
from celery.utils.log import get_task_logger
from hellomama_registration import utils
logger = get_task_logger(__name__)
class AddUniqueIDToIdentity(Task):
def run(self, identity, unique_id, write_to, **kwargs):
"""
identity: the identity to receive the payload.
unique_id: the unique_id to add to the identity
write_to: the key to write the unique_id to
"""
full_identity = utils.get_identity(identity)
if "details" in full_identity:
# not a 404
partial_identity = {
"details": full_identity["details"]
}
partial_identity["details"][write_to] = unique_id
utils.patch_identity(identity, partial_identity)
return "Identity <%s> now has <%s> of <%s>" % (
identity, write_to, str(unique_id))
else:
return "Identity <%s> not found" % (identity,)
add_unique_id_to_identity = AddUniqueIDToIdentity()
|
from celery.task import Task
from celery.utils.log import get_task_logger
from hellomama_registration import utils
logger = get_task_logger(__name__)
class AddUniqueIDToIdentity(Task):
def run(self, identity, unique_id, write_to, **kwargs):
"""
identity: the identity to receive the payload.
unique_id: the unique_id to add to the identity
write_to: the key to write the unique_id to
"""
full_identity = utils.get_identity(identity)
if "details" in full_identity:
# not a 404
partial_identity = {
"details": full_identity["details"]
}
# convert to string to enable Django filter lookups
partial_identity["details"][write_to] = str(unique_id)
utils.patch_identity(identity, partial_identity)
return "Identity <%s> now has <%s> of <%s>" % (
identity, write_to, str(unique_id))
else:
return "Identity <%s> not found" % (identity,)
add_unique_id_to_identity = AddUniqueIDToIdentity()
|
Make auto gen ID's strings on save to Identity
|
Make auto gen ID's strings on save to Identity
|
Python
|
bsd-3-clause
|
praekelt/hellomama-registration,praekelt/hellomama-registration
|
python
|
## Code Before:
from celery.task import Task
from celery.utils.log import get_task_logger
from hellomama_registration import utils
logger = get_task_logger(__name__)
class AddUniqueIDToIdentity(Task):
def run(self, identity, unique_id, write_to, **kwargs):
"""
identity: the identity to receive the payload.
unique_id: the unique_id to add to the identity
write_to: the key to write the unique_id to
"""
full_identity = utils.get_identity(identity)
if "details" in full_identity:
# not a 404
partial_identity = {
"details": full_identity["details"]
}
partial_identity["details"][write_to] = unique_id
utils.patch_identity(identity, partial_identity)
return "Identity <%s> now has <%s> of <%s>" % (
identity, write_to, str(unique_id))
else:
return "Identity <%s> not found" % (identity,)
add_unique_id_to_identity = AddUniqueIDToIdentity()
## Instruction:
Make auto gen ID's strings on save to Identity
## Code After:
from celery.task import Task
from celery.utils.log import get_task_logger
from hellomama_registration import utils
logger = get_task_logger(__name__)
class AddUniqueIDToIdentity(Task):
def run(self, identity, unique_id, write_to, **kwargs):
"""
identity: the identity to receive the payload.
unique_id: the unique_id to add to the identity
write_to: the key to write the unique_id to
"""
full_identity = utils.get_identity(identity)
if "details" in full_identity:
# not a 404
partial_identity = {
"details": full_identity["details"]
}
# convert to string to enable Django filter lookups
partial_identity["details"][write_to] = str(unique_id)
utils.patch_identity(identity, partial_identity)
return "Identity <%s> now has <%s> of <%s>" % (
identity, write_to, str(unique_id))
else:
return "Identity <%s> not found" % (identity,)
add_unique_id_to_identity = AddUniqueIDToIdentity()
|
// ... existing code ...
partial_identity = {
"details": full_identity["details"]
}
# convert to string to enable Django filter lookups
partial_identity["details"][write_to] = str(unique_id)
utils.patch_identity(identity, partial_identity)
return "Identity <%s> now has <%s> of <%s>" % (
identity, write_to, str(unique_id))
// ... rest of the code ...
|
b09bd3d3896c448f8817ee7515e3af0314605ab1
|
plrCommon/plrCompare.c
|
plrCommon/plrCompare.c
|
int plrC_compareArgs(const syscallArgs_t *args1, const syscallArgs_t *args2) {
int foundDiff = 0;
if (args1->addr != args2->addr) { foundDiff = 1; }
else if (args1->arg[0] != args2->arg[0]) { foundDiff = 2; }
else if (args1->arg[1] != args2->arg[1]) { foundDiff = 3; }
else if (args1->arg[2] != args2->arg[2]) { foundDiff = 4; }
else if (args1->arg[3] != args2->arg[3]) { foundDiff = 5; }
else if (args1->arg[4] != args2->arg[4]) { foundDiff = 6; }
else if (args1->arg[5] != args2->arg[5]) { foundDiff = 7; }
return foundDiff;
}
|
int plrC_compareArgs(const syscallArgs_t *args1, const syscallArgs_t *args2) {
int faultVal = 0;
#define CompareElement(elem, faultBit) \
if (args1->elem != args2->elem) { \
faultVal |= 1 << faultBit; \
printf("Argument miscompare in " #elem ", 0x%lX != 0x%lX\n", \
(unsigned long)args1->elem, (unsigned long)args2->elem); \
}
CompareElement(addr, 0);
CompareElement(arg[0], 1);
CompareElement(arg[1], 2);
CompareElement(arg[2], 3);
CompareElement(arg[3], 4);
CompareElement(arg[4], 5);
CompareElement(arg[5], 6);
return faultVal;
}
|
Add logging to syscall arg compare for debugging purposes
|
Add logging to syscall arg compare for debugging purposes
|
C
|
mit
|
apogeedev/plr,apogeedev/plr
|
c
|
## Code Before:
int plrC_compareArgs(const syscallArgs_t *args1, const syscallArgs_t *args2) {
int foundDiff = 0;
if (args1->addr != args2->addr) { foundDiff = 1; }
else if (args1->arg[0] != args2->arg[0]) { foundDiff = 2; }
else if (args1->arg[1] != args2->arg[1]) { foundDiff = 3; }
else if (args1->arg[2] != args2->arg[2]) { foundDiff = 4; }
else if (args1->arg[3] != args2->arg[3]) { foundDiff = 5; }
else if (args1->arg[4] != args2->arg[4]) { foundDiff = 6; }
else if (args1->arg[5] != args2->arg[5]) { foundDiff = 7; }
return foundDiff;
}
## Instruction:
Add logging to syscall arg compare for debugging purposes
## Code After:
int plrC_compareArgs(const syscallArgs_t *args1, const syscallArgs_t *args2) {
int faultVal = 0;
#define CompareElement(elem, faultBit) \
if (args1->elem != args2->elem) { \
faultVal |= 1 << faultBit; \
printf("Argument miscompare in " #elem ", 0x%lX != 0x%lX\n", \
(unsigned long)args1->elem, (unsigned long)args2->elem); \
}
CompareElement(addr, 0);
CompareElement(arg[0], 1);
CompareElement(arg[1], 2);
CompareElement(arg[2], 3);
CompareElement(arg[3], 4);
CompareElement(arg[4], 5);
CompareElement(arg[5], 6);
return faultVal;
}
|
...
int plrC_compareArgs(const syscallArgs_t *args1, const syscallArgs_t *args2) {
int faultVal = 0;
#define CompareElement(elem, faultBit) \
if (args1->elem != args2->elem) { \
faultVal |= 1 << faultBit; \
printf("Argument miscompare in " #elem ", 0x%lX != 0x%lX\n", \
(unsigned long)args1->elem, (unsigned long)args2->elem); \
}
CompareElement(addr, 0);
CompareElement(arg[0], 1);
CompareElement(arg[1], 2);
CompareElement(arg[2], 3);
CompareElement(arg[3], 4);
CompareElement(arg[4], 5);
CompareElement(arg[5], 6);
return faultVal;
}
...
|
d8fc931822bbbf3fe5dbd27657925a6c9cf2b5f4
|
src/com/team254/frc2013/commands/ShootCommand.java
|
src/com/team254/frc2013/commands/ShootCommand.java
|
package com.team254.frc2013.commands;
import edu.wpi.first.wpilibj.Timer;
/**
* Shoots a disc that is already loaded into the shooter.
*
* @author [email protected] (Tom Bottiglieri)
* @author [email protected] (Patrick Fairbank)
*/
public class ShootCommand extends CommandBase {
private Timer shooterTimer;
public ShootCommand() {
requires(shooter);
shooterTimer = new Timer();
}
protected void initialize() {
// Don't fire the piston if the shooter is not turned on.
if (shooter.isOn()) {
shooter.extend();
shooterTimer.reset();
shooterTimer.start();
}
}
protected void execute() {
}
protected boolean isFinished() {
return shooterTimer.get() > 0.2 || !shooter.isOn();
}
protected void end() {
System.out.println("RPM of shot: " + shooter.getRpm());
shooter.retract();
}
protected void interrupted() {
}
}
|
package com.team254.frc2013.commands;
import edu.wpi.first.wpilibj.Timer;
/**
* Shoots a disc that is already loaded into the shooter.
*
* @author [email protected] (Tom Bottiglieri)
* @author [email protected] (Patrick Fairbank)
*/
public class ShootCommand extends CommandBase {
private Timer shooterTimer;
public ShootCommand() {
requires(shooter);
shooterTimer = new Timer();
}
protected void initialize() {
// Don't fire the piston if the shooter is not turned on.
if (shooter.isOn()) {
shooter.extend();
shooterTimer.reset();
shooterTimer.start();
}
conveyor.setMotor(-.175);
intake.setIntakePower(-.1);
}
protected void execute() {
}
protected boolean isFinished() {
return shooterTimer.get() > 0.2 || !shooter.isOn();
}
protected void end() {
System.out.println("RPM of shot: " + shooter.getRpm());
shooter.retract();
}
protected void interrupted() {
}
}
|
Make conveyor run backwards as disc is shooting to prevent jams
|
Make conveyor run backwards as disc is shooting to prevent jams
|
Java
|
bsd-2-clause
|
Team254/FRC-2013,Team254/FRC-2013
|
java
|
## Code Before:
package com.team254.frc2013.commands;
import edu.wpi.first.wpilibj.Timer;
/**
* Shoots a disc that is already loaded into the shooter.
*
* @author [email protected] (Tom Bottiglieri)
* @author [email protected] (Patrick Fairbank)
*/
public class ShootCommand extends CommandBase {
private Timer shooterTimer;
public ShootCommand() {
requires(shooter);
shooterTimer = new Timer();
}
protected void initialize() {
// Don't fire the piston if the shooter is not turned on.
if (shooter.isOn()) {
shooter.extend();
shooterTimer.reset();
shooterTimer.start();
}
}
protected void execute() {
}
protected boolean isFinished() {
return shooterTimer.get() > 0.2 || !shooter.isOn();
}
protected void end() {
System.out.println("RPM of shot: " + shooter.getRpm());
shooter.retract();
}
protected void interrupted() {
}
}
## Instruction:
Make conveyor run backwards as disc is shooting to prevent jams
## Code After:
package com.team254.frc2013.commands;
import edu.wpi.first.wpilibj.Timer;
/**
* Shoots a disc that is already loaded into the shooter.
*
* @author [email protected] (Tom Bottiglieri)
* @author [email protected] (Patrick Fairbank)
*/
public class ShootCommand extends CommandBase {
private Timer shooterTimer;
public ShootCommand() {
requires(shooter);
shooterTimer = new Timer();
}
protected void initialize() {
// Don't fire the piston if the shooter is not turned on.
if (shooter.isOn()) {
shooter.extend();
shooterTimer.reset();
shooterTimer.start();
}
conveyor.setMotor(-.175);
intake.setIntakePower(-.1);
}
protected void execute() {
}
protected boolean isFinished() {
return shooterTimer.get() > 0.2 || !shooter.isOn();
}
protected void end() {
System.out.println("RPM of shot: " + shooter.getRpm());
shooter.retract();
}
protected void interrupted() {
}
}
|
# ... existing code ...
shooterTimer.reset();
shooterTimer.start();
}
conveyor.setMotor(-.175);
intake.setIntakePower(-.1);
}
protected void execute() {
# ... rest of the code ...
|
8cbd2830a7eb261743153626859d36437675fd18
|
io7m-jcanephora-jogl/src/test/java/com/io7m/jcanephora/JOGL30RenderbuffersTest.java
|
io7m-jcanephora-jogl/src/test/java/com/io7m/jcanephora/JOGL30RenderbuffersTest.java
|
package com.io7m.jcanephora;
import javax.media.opengl.GLContext;
import org.junit.After;
import org.junit.Before;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jcanephora.contracts.FramebuffersContract;
import com.io7m.jlog.Log;
public final class JOGL30RenderbuffersTest extends FramebuffersContract
{
private GLContext context;
@Override public GLInterface getGL()
throws GLException,
ConstraintError
{
return new GLInterfaceJOGL30(this.context, JOGL30TestLog.getLog());
}
@Override public Log getLog()
{
return JOGL30TestLog.getLog();
}
@Before public void setUp()
throws Exception
{
this.context = JOGL30.createOffscreenDisplay(640, 480);
}
@After public void tearDown()
throws Exception
{
JOGL30.destroyDisplay(this.context);
}
}
|
package com.io7m.jcanephora;
import javax.media.opengl.GLContext;
import org.junit.After;
import org.junit.Before;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jcanephora.contracts.RenderbuffersContract;
import com.io7m.jlog.Log;
public final class JOGL30RenderbuffersTest extends RenderbuffersContract
{
private GLContext context;
@Override public GLInterface getGL()
throws GLException,
ConstraintError
{
return new GLInterfaceJOGL30(this.context, JOGL30TestLog.getLog());
}
@Override public Log getLog()
{
return JOGL30TestLog.getLog();
}
@Before public void setUp()
throws Exception
{
this.context = JOGL30.createOffscreenDisplay(640, 480);
}
@After public void tearDown()
throws Exception
{
JOGL30.destroyDisplay(this.context);
}
}
|
Correct extends statement; Renderbuffers, not Framebuffers
|
Correct extends statement; Renderbuffers, not Framebuffers
|
Java
|
isc
|
io7m/jcanephora,io7m/jcanephora
|
java
|
## Code Before:
package com.io7m.jcanephora;
import javax.media.opengl.GLContext;
import org.junit.After;
import org.junit.Before;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jcanephora.contracts.FramebuffersContract;
import com.io7m.jlog.Log;
public final class JOGL30RenderbuffersTest extends FramebuffersContract
{
private GLContext context;
@Override public GLInterface getGL()
throws GLException,
ConstraintError
{
return new GLInterfaceJOGL30(this.context, JOGL30TestLog.getLog());
}
@Override public Log getLog()
{
return JOGL30TestLog.getLog();
}
@Before public void setUp()
throws Exception
{
this.context = JOGL30.createOffscreenDisplay(640, 480);
}
@After public void tearDown()
throws Exception
{
JOGL30.destroyDisplay(this.context);
}
}
## Instruction:
Correct extends statement; Renderbuffers, not Framebuffers
## Code After:
package com.io7m.jcanephora;
import javax.media.opengl.GLContext;
import org.junit.After;
import org.junit.Before;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jcanephora.contracts.RenderbuffersContract;
import com.io7m.jlog.Log;
public final class JOGL30RenderbuffersTest extends RenderbuffersContract
{
private GLContext context;
@Override public GLInterface getGL()
throws GLException,
ConstraintError
{
return new GLInterfaceJOGL30(this.context, JOGL30TestLog.getLog());
}
@Override public Log getLog()
{
return JOGL30TestLog.getLog();
}
@Before public void setUp()
throws Exception
{
this.context = JOGL30.createOffscreenDisplay(640, 480);
}
@After public void tearDown()
throws Exception
{
JOGL30.destroyDisplay(this.context);
}
}
|
# ... existing code ...
import org.junit.Before;
import com.io7m.jaux.Constraints.ConstraintError;
import com.io7m.jcanephora.contracts.RenderbuffersContract;
import com.io7m.jlog.Log;
public final class JOGL30RenderbuffersTest extends RenderbuffersContract
{
private GLContext context;
# ... rest of the code ...
|
36a0e32f27de076ff29d4dd0778df3603f5cf1ba
|
app/src/main/java/org/stepik/android/remote/stories/service/StoryService.kt
|
app/src/main/java/org/stepik/android/remote/stories/service/StoryService.kt
|
package org.stepik.android.remote.stories.service
import io.reactivex.Single
import org.stepik.android.remote.stories.model.StoryTemplatesResponse
import retrofit2.http.GET
import retrofit2.http.Query
interface StoryService {
@GET("api/story-templates")
fun getStoryTemplate(
@Query("page") page: Int,
@Query("is_published") isPublished: Boolean,
@Query("language") language: String
): Single<StoryTemplatesResponse>
}
|
package org.stepik.android.remote.stories.service
import io.reactivex.Single
import org.stepik.android.remote.stories.model.StoryTemplatesResponse
import retrofit2.http.GET
import retrofit2.http.Query
interface StoryService {
@GET("api/story-templates?platform=mobile,android")
fun getStoryTemplate(
@Query("page") page: Int,
@Query("is_published") isPublished: Boolean,
@Query("language") language: String
): Single<StoryTemplatesResponse>
}
|
Add platform query parameter to stories
|
Add platform query parameter to stories
|
Kotlin
|
apache-2.0
|
StepicOrg/stepic-android,StepicOrg/stepic-android,StepicOrg/stepik-android,StepicOrg/stepic-android,StepicOrg/stepik-android,StepicOrg/stepik-android
|
kotlin
|
## Code Before:
package org.stepik.android.remote.stories.service
import io.reactivex.Single
import org.stepik.android.remote.stories.model.StoryTemplatesResponse
import retrofit2.http.GET
import retrofit2.http.Query
interface StoryService {
@GET("api/story-templates")
fun getStoryTemplate(
@Query("page") page: Int,
@Query("is_published") isPublished: Boolean,
@Query("language") language: String
): Single<StoryTemplatesResponse>
}
## Instruction:
Add platform query parameter to stories
## Code After:
package org.stepik.android.remote.stories.service
import io.reactivex.Single
import org.stepik.android.remote.stories.model.StoryTemplatesResponse
import retrofit2.http.GET
import retrofit2.http.Query
interface StoryService {
@GET("api/story-templates?platform=mobile,android")
fun getStoryTemplate(
@Query("page") page: Int,
@Query("is_published") isPublished: Boolean,
@Query("language") language: String
): Single<StoryTemplatesResponse>
}
|
# ... existing code ...
import retrofit2.http.Query
interface StoryService {
@GET("api/story-templates?platform=mobile,android")
fun getStoryTemplate(
@Query("page") page: Int,
@Query("is_published") isPublished: Boolean,
# ... rest of the code ...
|
9db378f028cf84d81165e7c50f62db794eb8fee2
|
tests/query_test/test_chars.py
|
tests/query_test/test_chars.py
|
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
Fix char test to only run on test/none.
|
Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <[email protected]>
|
Python
|
apache-2.0
|
michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,cloudera/Impala
|
python
|
## Code Before:
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
## Instruction:
Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <[email protected]>
## Code After:
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
# ... existing code ...
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
# ... rest of the code ...
|
0654f5efd0d9058f1976e77b48d1915e75a4cf4e
|
src/main/java/me/jjm_223/pt/PetTransportation.java
|
src/main/java/me/jjm_223/pt/PetTransportation.java
|
package me.jjm_223.pt;
import me.jjm_223.pt.listeners.EggClick;
import me.jjm_223.pt.listeners.EggHit;
import me.jjm_223.pt.listeners.ItemDespawn;
import me.jjm_223.pt.utils.DataStorage;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import java.io.File;
import java.io.IOException;
/**
* Main class for PetTransportation.
*/
public class PetTransportation extends JavaPlugin {
private DataStorage storage;
@Override
public void onEnable() {
//Register relevant events.
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(new EggHit(this), this);
pm.registerEvents(new EggClick(this), this);
pm.registerEvents(new ItemDespawn(this), this);
storage = new DataStorage(this);
}
@Override
public void onDisable() {
try {
storage.save();
} catch (IOException e) {
e.printStackTrace();
}
}
public DataStorage getStorage()
{
return this.storage;
}
}
|
package me.jjm_223.pt;
import me.jjm_223.pt.listeners.EggClick;
import me.jjm_223.pt.listeners.EggHit;
import me.jjm_223.pt.listeners.ItemDespawn;
import me.jjm_223.pt.utils.DataStorage;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import java.io.File;
import java.io.IOException;
/**
* Main class for PetTransportation.
*/
public class PetTransportation extends JavaPlugin {
private DataStorage storage;
@Override
public void onEnable() {
storage = new DataStorage(this);
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(new EggHit(this), this);
pm.registerEvents(new EggClick(this), this);
pm.registerEvents(new ItemDespawn(this), this);
}
@Override
public void onDisable() {
try {
storage.save();
} catch (IOException e) {
e.printStackTrace();
}
}
public DataStorage getStorage()
{
return this.storage;
}
}
|
Fix NPE on item despawn or destroy.
|
Fix NPE on item despawn or destroy.
|
Java
|
mit
|
CweepahCraft/PetTransportation
|
java
|
## Code Before:
package me.jjm_223.pt;
import me.jjm_223.pt.listeners.EggClick;
import me.jjm_223.pt.listeners.EggHit;
import me.jjm_223.pt.listeners.ItemDespawn;
import me.jjm_223.pt.utils.DataStorage;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import java.io.File;
import java.io.IOException;
/**
* Main class for PetTransportation.
*/
public class PetTransportation extends JavaPlugin {
private DataStorage storage;
@Override
public void onEnable() {
//Register relevant events.
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(new EggHit(this), this);
pm.registerEvents(new EggClick(this), this);
pm.registerEvents(new ItemDespawn(this), this);
storage = new DataStorage(this);
}
@Override
public void onDisable() {
try {
storage.save();
} catch (IOException e) {
e.printStackTrace();
}
}
public DataStorage getStorage()
{
return this.storage;
}
}
## Instruction:
Fix NPE on item despawn or destroy.
## Code After:
package me.jjm_223.pt;
import me.jjm_223.pt.listeners.EggClick;
import me.jjm_223.pt.listeners.EggHit;
import me.jjm_223.pt.listeners.ItemDespawn;
import me.jjm_223.pt.utils.DataStorage;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import java.io.File;
import java.io.IOException;
/**
* Main class for PetTransportation.
*/
public class PetTransportation extends JavaPlugin {
private DataStorage storage;
@Override
public void onEnable() {
storage = new DataStorage(this);
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(new EggHit(this), this);
pm.registerEvents(new EggClick(this), this);
pm.registerEvents(new ItemDespawn(this), this);
}
@Override
public void onDisable() {
try {
storage.save();
} catch (IOException e) {
e.printStackTrace();
}
}
public DataStorage getStorage()
{
return this.storage;
}
}
|
# ... existing code ...
@Override
public void onEnable() {
storage = new DataStorage(this);
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(new EggHit(this), this);
pm.registerEvents(new EggClick(this), this);
pm.registerEvents(new ItemDespawn(this), this);
}
@Override
# ... rest of the code ...
|
e55c5b80d67edcde6c6f31665f39ebfb70660bc1
|
scripts/update_lookup_stats.py
|
scripts/update_lookup_stats.py
|
import re
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.stats import update_lookup_stats
def main(script, opts, args):
db = script.engine.connect()
redis = script.redis
for key, count in redis.hgetall('lookups').iteritems():
count = int(count)
date, hour, application_id, type = key.split(':')
if not count:
# the only way this could be 0 is if we already processed it and
# nothing touched it since then, so it's safe to delete
redis.hdel('lookups', key)
else:
update_lookup_stats(db, application_id, date, hour, type, count)
redis.hincrby('lookups', key, -count)
run_script(main)
|
import re
import urllib
import urllib2
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.stats import update_lookup_stats
def call_internal_api(func, **kwargs):
url = script.config.cluster.base_master_url.rstrip('/') + '/v2/internal/' + func
data = dict(kwargs)
data['secret'] = script.config.cluster.secret
urllib2.urlopen(url, urllib.urlencode(data))
def main(script, opts, args):
db = script.engine.connect()
redis = script.redis
for key, count in redis.hgetall('lookups').iteritems():
count = int(count)
date, hour, application_id, type = key.split(':')
if not count:
# the only way this could be 0 is if we already processed it and
# nothing touched it since then, so it's safe to delete
redis.hdel('lookups', key)
else:
if script.config.cluster.role == 'master':
update_lookup_stats(db, application_id, date, hour, type, count)
else:
call_internal_api('update_lookup_stats', date=date, hour=hour,
application_id=application_id, type=type, count=count)
redis.hincrby('lookups', key, -count)
run_script(main)
|
Handle lookup stats update on a slave server
|
Handle lookup stats update on a slave server
|
Python
|
mit
|
lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server
|
python
|
## Code Before:
import re
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.stats import update_lookup_stats
def main(script, opts, args):
db = script.engine.connect()
redis = script.redis
for key, count in redis.hgetall('lookups').iteritems():
count = int(count)
date, hour, application_id, type = key.split(':')
if not count:
# the only way this could be 0 is if we already processed it and
# nothing touched it since then, so it's safe to delete
redis.hdel('lookups', key)
else:
update_lookup_stats(db, application_id, date, hour, type, count)
redis.hincrby('lookups', key, -count)
run_script(main)
## Instruction:
Handle lookup stats update on a slave server
## Code After:
import re
import urllib
import urllib2
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.stats import update_lookup_stats
def call_internal_api(func, **kwargs):
url = script.config.cluster.base_master_url.rstrip('/') + '/v2/internal/' + func
data = dict(kwargs)
data['secret'] = script.config.cluster.secret
urllib2.urlopen(url, urllib.urlencode(data))
def main(script, opts, args):
db = script.engine.connect()
redis = script.redis
for key, count in redis.hgetall('lookups').iteritems():
count = int(count)
date, hour, application_id, type = key.split(':')
if not count:
# the only way this could be 0 is if we already processed it and
# nothing touched it since then, so it's safe to delete
redis.hdel('lookups', key)
else:
if script.config.cluster.role == 'master':
update_lookup_stats(db, application_id, date, hour, type, count)
else:
call_internal_api('update_lookup_stats', date=date, hour=hour,
application_id=application_id, type=type, count=count)
redis.hincrby('lookups', key, -count)
run_script(main)
|
// ... existing code ...
import re
import urllib
import urllib2
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.stats import update_lookup_stats
def call_internal_api(func, **kwargs):
url = script.config.cluster.base_master_url.rstrip('/') + '/v2/internal/' + func
data = dict(kwargs)
data['secret'] = script.config.cluster.secret
urllib2.urlopen(url, urllib.urlencode(data))
def main(script, opts, args):
// ... modified code ...
# nothing touched it since then, so it's safe to delete
redis.hdel('lookups', key)
else:
if script.config.cluster.role == 'master':
update_lookup_stats(db, application_id, date, hour, type, count)
else:
call_internal_api('update_lookup_stats', date=date, hour=hour,
application_id=application_id, type=type, count=count)
redis.hincrby('lookups', key, -count)
// ... rest of the code ...
|
215746e2fd7e5f78b6dae031aae6a935ab164dd1
|
pyjokes/pyjokes.py
|
pyjokes/pyjokes.py
|
from __future__ import absolute_import
import random
import importlib
def get_joke(category='neutral', language='en'):
"""
Parameters
----------
category: str
Choices: 'neutral', 'explicit', 'chuck', 'all'
lang: str
Choices: 'en', 'de', 'es'
Returns
-------
joke: str
"""
if language == 'en':
from .jokes_en import jokes
elif language == 'de':
from .jokes_de import jokes
elif language == 'es':
from .jokes_es import jokes
try:
jokes = jokes[category]
except:
return 'Could not get the joke. Choose another category.'
else:
return random.choice(jokes)
|
from __future__ import absolute_import
import random
from .jokes_en import jokes as jokes_en
from .jokes_de import jokes as jokes_de
from .jokes_es import jokes as jokes_es
all_jokes = {
'en': jokes_en,
'de': jokes_de,
'es': jokes_es,
}
class LanguageNotFoundError(Exception):
pass
class CategoryNotFoundError(Exception):
pass
def get_joke(category='neutral', language='en'):
"""
Parameters
----------
category: str
Choices: 'neutral', 'explicit', 'chuck', 'all'
lang: str
Choices: 'en', 'de', 'es'
Returns
-------
joke: str
"""
if language in all_jokes:
jokes = all_jokes[language]
else:
raise LanguageNotFoundError('No such language %s' % language)
if category in jokes:
jokes = jokes[category]
return random.choice(jokes)
else:
raise CategoryNotFound('No such category %s' % category)
|
Use dict not if/else, add exceptions
|
Use dict not if/else, add exceptions
|
Python
|
bsd-3-clause
|
borjaayerdi/pyjokes,birdsarah/pyjokes,pyjokes/pyjokes,bennuttall/pyjokes,trojjer/pyjokes,gmarkall/pyjokes,martinohanlon/pyjokes,ElectronicsGeek/pyjokes
|
python
|
## Code Before:
from __future__ import absolute_import
import random
import importlib
def get_joke(category='neutral', language='en'):
"""
Parameters
----------
category: str
Choices: 'neutral', 'explicit', 'chuck', 'all'
lang: str
Choices: 'en', 'de', 'es'
Returns
-------
joke: str
"""
if language == 'en':
from .jokes_en import jokes
elif language == 'de':
from .jokes_de import jokes
elif language == 'es':
from .jokes_es import jokes
try:
jokes = jokes[category]
except:
return 'Could not get the joke. Choose another category.'
else:
return random.choice(jokes)
## Instruction:
Use dict not if/else, add exceptions
## Code After:
from __future__ import absolute_import
import random
from .jokes_en import jokes as jokes_en
from .jokes_de import jokes as jokes_de
from .jokes_es import jokes as jokes_es
all_jokes = {
'en': jokes_en,
'de': jokes_de,
'es': jokes_es,
}
class LanguageNotFoundError(Exception):
pass
class CategoryNotFoundError(Exception):
pass
def get_joke(category='neutral', language='en'):
"""
Parameters
----------
category: str
Choices: 'neutral', 'explicit', 'chuck', 'all'
lang: str
Choices: 'en', 'de', 'es'
Returns
-------
joke: str
"""
if language in all_jokes:
jokes = all_jokes[language]
else:
raise LanguageNotFoundError('No such language %s' % language)
if category in jokes:
jokes = jokes[category]
return random.choice(jokes)
else:
raise CategoryNotFound('No such category %s' % category)
|
// ... existing code ...
from __future__ import absolute_import
import random
from .jokes_en import jokes as jokes_en
from .jokes_de import jokes as jokes_de
from .jokes_es import jokes as jokes_es
all_jokes = {
'en': jokes_en,
'de': jokes_de,
'es': jokes_es,
}
class LanguageNotFoundError(Exception):
pass
class CategoryNotFoundError(Exception):
pass
def get_joke(category='neutral', language='en'):
"""
// ... modified code ...
joke: str
"""
if language in all_jokes:
jokes = all_jokes[language]
else:
raise LanguageNotFoundError('No such language %s' % language)
if category in jokes:
jokes = jokes[category]
return random.choice(jokes)
else:
raise CategoryNotFound('No such category %s' % category)
// ... rest of the code ...
|
68682a04435447f37b4d867e20d819bcda1a0409
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="[email protected]",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="[email protected]",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
Fix package_data to include templates
|
Fix package_data to include templates
|
Python
|
mit
|
justin8/portinus,justin8/portinus
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="[email protected]",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
## Instruction:
Fix package_data to include templates
## Code After:
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="[email protected]",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
...
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="[email protected]",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
...
|
0d73cc1b38703653c3302d8f9ff4efbeaaa2b406
|
credentials/apps/records/models.py
|
credentials/apps/records/models.py
|
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
Revert early removal of certificate field
|
Revert early removal of certificate field
|
Python
|
agpl-3.0
|
edx/credentials,edx/credentials,edx/credentials,edx/credentials
|
python
|
## Code Before:
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
## Instruction:
Revert early removal of certificate field
## Code After:
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
# ... existing code ...
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
# ... modified code ...
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
# ... rest of the code ...
|
d90edf3b4d8fa714e7e24acbc22fb35bc828911d
|
services/controllers/interpolator.py
|
services/controllers/interpolator.py
|
class Interpolator:
def __init__(self):
self.data = []
def addIndexValue(self, index, value):
self.data.append((index, value))
def valueAtIndex(self, target_index):
if target_index < self.data[0][0]:
return None
elif self.data[-1][0] < target_index:
return None
else:
start = None
end = None
for (index, value) in self.data:
if index == target_index:
return value
else:
if index <= target_index:
start = (index, value)
elif target_index < index:
end = (index, value)
break
index_delta = end[0] - start[0]
percent = (target_index - start[0]) / index_delta
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
if __name__ == "__main__":
pass
|
class Interpolator:
def __init__(self):
self.data = []
def addIndexValue(self, index, value):
self.data.append((index, value))
def valueAtIndex(self, target_index):
if target_index < self.data[0][0]:
return None
elif self.data[-1][0] < target_index:
return None
else:
start = None
end = None
for (index, value) in self.data:
if index == target_index:
return value
else:
if index <= target_index:
start = (index, value)
elif target_index < index:
end = (index, value)
break
index_delta = end[0] - start[0]
percent = (target_index - start[0]) / index_delta
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
def to_array(self):
result = []
for (index, value) in self.data:
result.append(index)
result.append(value)
return result
def from_array(self, array):
self.data = []
for i in range(0, len(array), 2):
self.addIndexValue(array[i], array[i + 1])
if __name__ == "__main__":
pass
|
Add ability to convert to/from an array
|
Add ability to convert to/from an array
This is needed as an easy way to serialize an interpolator for sending/receiving over HTTP
|
Python
|
bsd-3-clause
|
gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2
|
python
|
## Code Before:
class Interpolator:
def __init__(self):
self.data = []
def addIndexValue(self, index, value):
self.data.append((index, value))
def valueAtIndex(self, target_index):
if target_index < self.data[0][0]:
return None
elif self.data[-1][0] < target_index:
return None
else:
start = None
end = None
for (index, value) in self.data:
if index == target_index:
return value
else:
if index <= target_index:
start = (index, value)
elif target_index < index:
end = (index, value)
break
index_delta = end[0] - start[0]
percent = (target_index - start[0]) / index_delta
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
if __name__ == "__main__":
pass
## Instruction:
Add ability to convert to/from an array
This is needed as an easy way to serialize an interpolator for sending/receiving over HTTP
## Code After:
class Interpolator:
def __init__(self):
self.data = []
def addIndexValue(self, index, value):
self.data.append((index, value))
def valueAtIndex(self, target_index):
if target_index < self.data[0][0]:
return None
elif self.data[-1][0] < target_index:
return None
else:
start = None
end = None
for (index, value) in self.data:
if index == target_index:
return value
else:
if index <= target_index:
start = (index, value)
elif target_index < index:
end = (index, value)
break
index_delta = end[0] - start[0]
percent = (target_index - start[0]) / index_delta
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
def to_array(self):
result = []
for (index, value) in self.data:
result.append(index)
result.append(value)
return result
def from_array(self, array):
self.data = []
for i in range(0, len(array), 2):
self.addIndexValue(array[i], array[i + 1])
if __name__ == "__main__":
pass
|
# ... existing code ...
class Interpolator:
def __init__(self):
self.data = []
# ... modified code ...
return start[1] + value_delta * percent
def to_array(self):
result = []
for (index, value) in self.data:
result.append(index)
result.append(value)
return result
def from_array(self, array):
self.data = []
for i in range(0, len(array), 2):
self.addIndexValue(array[i], array[i + 1])
if __name__ == "__main__":
pass
# ... rest of the code ...
|
1261777b6aaaea6947a32477e340ef1597045866
|
nested_admin/urls.py
|
nested_admin/urls.py
|
try:
from django.conf.urls.defaults import patterns, url
except ImportError:
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^server-data\.js$', 'nested_admin.views.server_data_js',
name="nesting_server_data"),
)
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^server-data\.js$', 'nested_admin.views.server_data_js',
name="nesting_server_data"),
)
|
Fix DeprecationWarning in Django 1.5
|
Fix DeprecationWarning in Django 1.5
|
Python
|
bsd-2-clause
|
sbussetti/django-nested-admin,sbussetti/django-nested-admin,olivierdalang/django-nested-admin,sbussetti/django-nested-admin,olivierdalang/django-nested-admin,olivierdalang/django-nested-admin
|
python
|
## Code Before:
try:
from django.conf.urls.defaults import patterns, url
except ImportError:
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^server-data\.js$', 'nested_admin.views.server_data_js',
name="nesting_server_data"),
)
## Instruction:
Fix DeprecationWarning in Django 1.5
## Code After:
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^server-data\.js$', 'nested_admin.views.server_data_js',
name="nesting_server_data"),
)
|
...
from django.conf.urls import patterns, url
urlpatterns = patterns('',
...
|
3ee56764f7f3a629a76b7a2ea742dbc632fcc123
|
core/xal/tools/coding/Coder.java
|
core/xal/tools/coding/Coder.java
|
//
// ConversionAdaptor.java
//
//
// Created by Tom Pelaia on 12/27/11
// Copyright 2011 Oak Ridge National Lab. All rights reserved.
//
package xal.tools.coding;
import java.util.List;
/** Declares methods a coder should implement. */
public interface Coder {
/** Encode an object */
public String encode( final Object value );
/** Decode the archive */
public Object decode( final String archive );
/** Register the custom type by class and its associated adaptor */
public <CustomType,RepresentationType> void registerType( final Class<CustomType> type, final ConversionAdaptor<CustomType,RepresentationType> adaptor );
/** Get a list of all types which are supported for coding and decoding */
public List<String> getSupportedTypes();
}
|
//
// Coder.java
//
//
// Created by Tom Pelaia on 1/12/12
// Copyright 2012 Oak Ridge National Lab. All rights reserved.
//
package xal.tools.coding;
import java.util.List;
/** Declares methods a coder should implement. */
public interface Coder {
/** Encode an object */
public String encode( final Object value );
/** Decode the archive */
public Object decode( final String archive );
/** Register the custom type by class and its associated adaptor */
public <CustomType,RepresentationType> void registerType( final Class<CustomType> type, final ConversionAdaptor<CustomType,RepresentationType> adaptor );
/** Get a list of all types which are supported for coding and decoding */
public List<String> getSupportedTypes();
}
|
Fix the header comment to correct the file name and date.
|
Fix the header comment to correct the file name and date.
|
Java
|
bsd-3-clause
|
openxal/openxal,openxal/openxal,openxal/openxal,luxiaohan/openxal-csns-luxh,openxal/openxal,openxal/openxal,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal
|
java
|
## Code Before:
//
// ConversionAdaptor.java
//
//
// Created by Tom Pelaia on 12/27/11
// Copyright 2011 Oak Ridge National Lab. All rights reserved.
//
package xal.tools.coding;
import java.util.List;
/** Declares methods a coder should implement. */
public interface Coder {
/** Encode an object */
public String encode( final Object value );
/** Decode the archive */
public Object decode( final String archive );
/** Register the custom type by class and its associated adaptor */
public <CustomType,RepresentationType> void registerType( final Class<CustomType> type, final ConversionAdaptor<CustomType,RepresentationType> adaptor );
/** Get a list of all types which are supported for coding and decoding */
public List<String> getSupportedTypes();
}
## Instruction:
Fix the header comment to correct the file name and date.
## Code After:
//
// Coder.java
//
//
// Created by Tom Pelaia on 1/12/12
// Copyright 2012 Oak Ridge National Lab. All rights reserved.
//
package xal.tools.coding;
import java.util.List;
/** Declares methods a coder should implement. */
public interface Coder {
/** Encode an object */
public String encode( final Object value );
/** Decode the archive */
public Object decode( final String archive );
/** Register the custom type by class and its associated adaptor */
public <CustomType,RepresentationType> void registerType( final Class<CustomType> type, final ConversionAdaptor<CustomType,RepresentationType> adaptor );
/** Get a list of all types which are supported for coding and decoding */
public List<String> getSupportedTypes();
}
|
// ... existing code ...
//
// Coder.java
//
//
// Created by Tom Pelaia on 1/12/12
// Copyright 2012 Oak Ridge National Lab. All rights reserved.
//
package xal.tools.coding;
// ... rest of the code ...
|
1b104b747d3467eabb423846a161cf1a03cdf8cc
|
src/main/java/participants/Defender.java
|
src/main/java/participants/Defender.java
|
package participants;
import dices.DiceCollection;
public class Defender extends BattleParticipant {
private static final int MIN_ARMY_SIZE = 1;
private static final int MAX_DICES_NUM = 2;
public Defender(int armySize) {
super(armySize);
}
@Override
public int getMinArmySize() {
return 1;
}
@Override
protected void setDiceCollection(final int armySize) {
int effectiveArmySize = armySize;
diceCollection = new DiceCollection((armySize < MAX_DICES_NUM) ?
armySize : MAX_DICES_NUM, DICE_FACES_NUM);
}
}
|
package participants;
import dices.DiceCollection;
public class Defender extends BattleParticipant {
private static final int MIN_ARMY_SIZE = 1;
private static final int MAX_DICES_NUM = 2;
public Defender(int armySize) {
super(armySize);
}
@Override
public int getMinArmySize() {
return 1;
}
@Override
protected void setDiceCollection(final int armySize) {
int effectiveArmySize = armySize;
diceCollection = new DiceCollection((effectiveArmySize < MAX_DICES_NUM) ?
effectiveArmySize : MAX_DICES_NUM, DICE_FACES_NUM);
}
}
|
Use effective army size when creating dice collection
|
Use effective army size when creating dice collection
|
Java
|
apache-2.0
|
grzegorz-swirski/risk-battle
|
java
|
## Code Before:
package participants;
import dices.DiceCollection;
public class Defender extends BattleParticipant {
private static final int MIN_ARMY_SIZE = 1;
private static final int MAX_DICES_NUM = 2;
public Defender(int armySize) {
super(armySize);
}
@Override
public int getMinArmySize() {
return 1;
}
@Override
protected void setDiceCollection(final int armySize) {
int effectiveArmySize = armySize;
diceCollection = new DiceCollection((armySize < MAX_DICES_NUM) ?
armySize : MAX_DICES_NUM, DICE_FACES_NUM);
}
}
## Instruction:
Use effective army size when creating dice collection
## Code After:
package participants;
import dices.DiceCollection;
public class Defender extends BattleParticipant {
private static final int MIN_ARMY_SIZE = 1;
private static final int MAX_DICES_NUM = 2;
public Defender(int armySize) {
super(armySize);
}
@Override
public int getMinArmySize() {
return 1;
}
@Override
protected void setDiceCollection(final int armySize) {
int effectiveArmySize = armySize;
diceCollection = new DiceCollection((effectiveArmySize < MAX_DICES_NUM) ?
effectiveArmySize : MAX_DICES_NUM, DICE_FACES_NUM);
}
}
|
...
@Override
protected void setDiceCollection(final int armySize) {
int effectiveArmySize = armySize;
diceCollection = new DiceCollection((effectiveArmySize < MAX_DICES_NUM) ?
effectiveArmySize : MAX_DICES_NUM, DICE_FACES_NUM);
}
}
...
|
801d4dbebe122d94aab34550049ce698a8599b18
|
src/main/java/ch/squix/esp8266/fontconverter/rest/time/TimeResource.java
|
src/main/java/ch/squix/esp8266/fontconverter/rest/time/TimeResource.java
|
package ch.squix.esp8266.fontconverter.rest.time;
import java.awt.FontFormatException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.restlet.resource.Post;
import org.restlet.resource.ServerResource;
public class TimeResource extends ServerResource {
@Post(value = "json")
public TimeOutDto execute(TimeInDto inDto) throws FontFormatException, IOException {
TimeOutDto outDto = new TimeOutDto();
DateTime dateTime = new DateTime();
Locale locale = new Locale(inDto.getLanguage(), inDto.getCountry());
outDto.setMillisOfDayUtc(dateTime.getMillisOfDay());
List<TimeZoneDto> timeZones = new ArrayList<>();
outDto.setTimeZoneDto(timeZones);
for (String timeZoneId : inDto.getTimeZoneIds()) {
DateTimeZone zone = DateTimeZone.forID(timeZoneId);
DateTime localDateTime = new DateTime(zone);
TimeZoneDto zoneDto = new TimeZoneDto();
zoneDto.setIndex(timeZones.size());
zoneDto.setTimeZoneId(timeZoneId);
zoneDto.setTimeZoneOffsetToUtcMillis(zone.getOffset(dateTime.getMillis()));
zoneDto.setFormattedDate(localDateTime.toString(inDto.getDateFormat(), locale));
timeZones.add(zoneDto);
}
return outDto;
}
}
|
package ch.squix.esp8266.fontconverter.rest.time;
import java.awt.FontFormatException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.restlet.resource.Post;
import org.restlet.resource.ServerResource;
public class TimeResource extends ServerResource {
@Post(value = "json")
public TimeOutDto execute(TimeInDto inDto) throws FontFormatException, IOException {
TimeOutDto outDto = new TimeOutDto();
DateTime dateTime = new DateTime(DateTimeZone.UTC);
Locale locale = new Locale(inDto.getLanguage(), inDto.getCountry());
outDto.setMillisOfDayUtc(dateTime.getMillisOfDay());
List<TimeZoneDto> timeZones = new ArrayList<>();
outDto.setTimeZoneDto(timeZones);
for (String timeZoneId : inDto.getTimeZoneIds()) {
DateTimeZone zone = DateTimeZone.forID(timeZoneId);
DateTime localDateTime = new DateTime(zone);
TimeZoneDto zoneDto = new TimeZoneDto();
zoneDto.setIndex(timeZones.size());
zoneDto.setTimeZoneId(timeZoneId);
zoneDto.setTimeZoneOffsetToUtcMillis(zone.getOffset(dateTime.getMillis()));
zoneDto.setFormattedDate(localDateTime.toString(inDto.getDateFormat(), locale));
timeZones.add(zoneDto);
}
return outDto;
}
}
|
Set time to utc time
|
Set time to utc time
|
Java
|
mit
|
squix78/esp8266-oled-ssd1306-font-converter,squix78/esp8266-oled-ssd1306-font-converter,squix78/esp8266-oled-ssd1306-font-converter,squix78/esp8266-oled-ssd1306-font-converter,squix78/esp8266-oled-ssd1306-font-converter
|
java
|
## Code Before:
package ch.squix.esp8266.fontconverter.rest.time;
import java.awt.FontFormatException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.restlet.resource.Post;
import org.restlet.resource.ServerResource;
public class TimeResource extends ServerResource {
@Post(value = "json")
public TimeOutDto execute(TimeInDto inDto) throws FontFormatException, IOException {
TimeOutDto outDto = new TimeOutDto();
DateTime dateTime = new DateTime();
Locale locale = new Locale(inDto.getLanguage(), inDto.getCountry());
outDto.setMillisOfDayUtc(dateTime.getMillisOfDay());
List<TimeZoneDto> timeZones = new ArrayList<>();
outDto.setTimeZoneDto(timeZones);
for (String timeZoneId : inDto.getTimeZoneIds()) {
DateTimeZone zone = DateTimeZone.forID(timeZoneId);
DateTime localDateTime = new DateTime(zone);
TimeZoneDto zoneDto = new TimeZoneDto();
zoneDto.setIndex(timeZones.size());
zoneDto.setTimeZoneId(timeZoneId);
zoneDto.setTimeZoneOffsetToUtcMillis(zone.getOffset(dateTime.getMillis()));
zoneDto.setFormattedDate(localDateTime.toString(inDto.getDateFormat(), locale));
timeZones.add(zoneDto);
}
return outDto;
}
}
## Instruction:
Set time to utc time
## Code After:
package ch.squix.esp8266.fontconverter.rest.time;
import java.awt.FontFormatException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.restlet.resource.Post;
import org.restlet.resource.ServerResource;
public class TimeResource extends ServerResource {
@Post(value = "json")
public TimeOutDto execute(TimeInDto inDto) throws FontFormatException, IOException {
TimeOutDto outDto = new TimeOutDto();
DateTime dateTime = new DateTime(DateTimeZone.UTC);
Locale locale = new Locale(inDto.getLanguage(), inDto.getCountry());
outDto.setMillisOfDayUtc(dateTime.getMillisOfDay());
List<TimeZoneDto> timeZones = new ArrayList<>();
outDto.setTimeZoneDto(timeZones);
for (String timeZoneId : inDto.getTimeZoneIds()) {
DateTimeZone zone = DateTimeZone.forID(timeZoneId);
DateTime localDateTime = new DateTime(zone);
TimeZoneDto zoneDto = new TimeZoneDto();
zoneDto.setIndex(timeZones.size());
zoneDto.setTimeZoneId(timeZoneId);
zoneDto.setTimeZoneOffsetToUtcMillis(zone.getOffset(dateTime.getMillis()));
zoneDto.setFormattedDate(localDateTime.toString(inDto.getDateFormat(), locale));
timeZones.add(zoneDto);
}
return outDto;
}
}
|
// ... existing code ...
@Post(value = "json")
public TimeOutDto execute(TimeInDto inDto) throws FontFormatException, IOException {
TimeOutDto outDto = new TimeOutDto();
DateTime dateTime = new DateTime(DateTimeZone.UTC);
Locale locale = new Locale(inDto.getLanguage(), inDto.getCountry());
outDto.setMillisOfDayUtc(dateTime.getMillisOfDay());
// ... rest of the code ...
|
c6926dda0a9e6e1515721e54788c29d0ef8b58a4
|
tests/test_sqlcompletion.py
|
tests/test_sqlcompletion.py
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggest_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggest_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggest_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggest_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_multiple_cols_suggest_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_multiple_tables_suggest_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
|
Add a test for where clause and rename all tests functions.
|
Add a test for where clause and rename all tests functions.
|
Python
|
bsd-3-clause
|
thedrow/pgcli,d33tah/pgcli,n-someya/pgcli,bitmonk/pgcli,joewalnes/pgcli,yx91490/pgcli,TamasNo1/pgcli,MattOates/pgcli,TamasNo1/pgcli,j-bennet/pgcli,lk1ngaa7/pgcli,zhiyuanshi/pgcli,koljonen/pgcli,dbcli/vcli,dbcli/pgcli,lk1ngaa7/pgcli,dbcli/pgcli,j-bennet/pgcli,suzukaze/pgcli,janusnic/pgcli,darikg/pgcli,johshoff/pgcli,nosun/pgcli,w4ngyi/pgcli,darikg/pgcli,stuartquin/pgcli,dbcli/vcli,bitemyapp/pgcli,d33tah/pgcli,suzukaze/pgcli,bitemyapp/pgcli,koljonen/pgcli,thedrow/pgcli,joewalnes/pgcli,nosun/pgcli,yx91490/pgcli,johshoff/pgcli,janusnic/pgcli,n-someya/pgcli,bitmonk/pgcli,zhiyuanshi/pgcli,w4ngyi/pgcli,MattOates/pgcli
|
python
|
## Code Before:
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggest_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggest_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggest_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggest_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_multiple_cols_suggest_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_multiple_tables_suggest_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
## Instruction:
Add a test for where clause and rename all tests functions.
## Code After:
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
|
...
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
...
|
b551a45cf1f620bd8bffda850e623c03837611f8
|
sal-jira-plugin/src/main/java/com/atlassian/sal/jira/JiraApplicationProperties.java
|
sal-jira-plugin/src/main/java/com/atlassian/sal/jira/JiraApplicationProperties.java
|
package com.atlassian.sal.jira;
import com.atlassian.jira.util.BuildUtils;
import com.atlassian.jira.util.velocity.VelocityRequestContextFactory;
import com.atlassian.sal.api.ApplicationProperties;
import java.util.Date;
import java.io.File;
/**
* JIRA implementation of WebProperties
*/
public class JiraApplicationProperties implements ApplicationProperties
{
private final VelocityRequestContextFactory velocityRequestContextFactory;
public JiraApplicationProperties(VelocityRequestContextFactory velocityRequestContextFactory)
{
this.velocityRequestContextFactory = velocityRequestContextFactory;
}
public String getBaseUrl()
{
return velocityRequestContextFactory.getJiraVelocityRequestContext().getCanonicalBaseUrl();
}
public String getDisplayName()
{
return "JIRA";
}
public String getVersion()
{
return BuildUtils.getVersion();
}
public Date getBuildDate()
{
return BuildUtils.getCurrentBuildDate();
}
public String getBuildNumber()
{
return BuildUtils.getCurrentBuildNumber();
}
public File getHomeDirectory()
{
throw new UnsupportedOperationException();
}
}
|
package com.atlassian.sal.jira;
import com.atlassian.jira.util.BuildUtils;
import com.atlassian.jira.util.velocity.VelocityRequestContextFactory;
import com.atlassian.jira.config.util.JiraHome;
import com.atlassian.sal.api.ApplicationProperties;
import java.util.Date;
import java.io.File;
/**
* JIRA implementation of WebProperties
*/
public class JiraApplicationProperties implements ApplicationProperties
{
private final VelocityRequestContextFactory velocityRequestContextFactory;
private final JiraHome jiraHome;
public JiraApplicationProperties(VelocityRequestContextFactory velocityRequestContextFactory, JiraHome jiraHome)
{
this.velocityRequestContextFactory = velocityRequestContextFactory;
this.jiraHome = jiraHome;
}
public String getBaseUrl()
{
return velocityRequestContextFactory.getJiraVelocityRequestContext().getCanonicalBaseUrl();
}
public String getDisplayName()
{
return "JIRA";
}
public String getVersion()
{
return BuildUtils.getVersion();
}
public Date getBuildDate()
{
return BuildUtils.getCurrentBuildDate();
}
public String getBuildNumber()
{
return BuildUtils.getCurrentBuildNumber();
}
public File getHomeDirectory()
{
return jiraHome.getHome();
}
}
|
Return JIRA home directory SAL-59
|
Return JIRA home directory
SAL-59
git-svn-id: 2433085265c0232ec46d186f8a8240da49417e22@36264 2c54a935-e501-0410-bc05-97a93f6bca70
|
Java
|
bsd-3-clause
|
mrdon/SAL
|
java
|
## Code Before:
package com.atlassian.sal.jira;
import com.atlassian.jira.util.BuildUtils;
import com.atlassian.jira.util.velocity.VelocityRequestContextFactory;
import com.atlassian.sal.api.ApplicationProperties;
import java.util.Date;
import java.io.File;
/**
* JIRA implementation of WebProperties
*/
public class JiraApplicationProperties implements ApplicationProperties
{
private final VelocityRequestContextFactory velocityRequestContextFactory;
public JiraApplicationProperties(VelocityRequestContextFactory velocityRequestContextFactory)
{
this.velocityRequestContextFactory = velocityRequestContextFactory;
}
public String getBaseUrl()
{
return velocityRequestContextFactory.getJiraVelocityRequestContext().getCanonicalBaseUrl();
}
public String getDisplayName()
{
return "JIRA";
}
public String getVersion()
{
return BuildUtils.getVersion();
}
public Date getBuildDate()
{
return BuildUtils.getCurrentBuildDate();
}
public String getBuildNumber()
{
return BuildUtils.getCurrentBuildNumber();
}
public File getHomeDirectory()
{
throw new UnsupportedOperationException();
}
}
## Instruction:
Return JIRA home directory
SAL-59
git-svn-id: 2433085265c0232ec46d186f8a8240da49417e22@36264 2c54a935-e501-0410-bc05-97a93f6bca70
## Code After:
package com.atlassian.sal.jira;
import com.atlassian.jira.util.BuildUtils;
import com.atlassian.jira.util.velocity.VelocityRequestContextFactory;
import com.atlassian.jira.config.util.JiraHome;
import com.atlassian.sal.api.ApplicationProperties;
import java.util.Date;
import java.io.File;
/**
* JIRA implementation of WebProperties
*/
public class JiraApplicationProperties implements ApplicationProperties
{
private final VelocityRequestContextFactory velocityRequestContextFactory;
private final JiraHome jiraHome;
public JiraApplicationProperties(VelocityRequestContextFactory velocityRequestContextFactory, JiraHome jiraHome)
{
this.velocityRequestContextFactory = velocityRequestContextFactory;
this.jiraHome = jiraHome;
}
public String getBaseUrl()
{
return velocityRequestContextFactory.getJiraVelocityRequestContext().getCanonicalBaseUrl();
}
public String getDisplayName()
{
return "JIRA";
}
public String getVersion()
{
return BuildUtils.getVersion();
}
public Date getBuildDate()
{
return BuildUtils.getCurrentBuildDate();
}
public String getBuildNumber()
{
return BuildUtils.getCurrentBuildNumber();
}
public File getHomeDirectory()
{
return jiraHome.getHome();
}
}
|
// ... existing code ...
import com.atlassian.jira.util.BuildUtils;
import com.atlassian.jira.util.velocity.VelocityRequestContextFactory;
import com.atlassian.jira.config.util.JiraHome;
import com.atlassian.sal.api.ApplicationProperties;
import java.util.Date;
// ... modified code ...
public class JiraApplicationProperties implements ApplicationProperties
{
private final VelocityRequestContextFactory velocityRequestContextFactory;
private final JiraHome jiraHome;
public JiraApplicationProperties(VelocityRequestContextFactory velocityRequestContextFactory, JiraHome jiraHome)
{
this.velocityRequestContextFactory = velocityRequestContextFactory;
this.jiraHome = jiraHome;
}
public String getBaseUrl()
...
public File getHomeDirectory()
{
return jiraHome.getHome();
}
}
// ... rest of the code ...
|
aa6c638f6aac2f452049f6314e5885c8e02fd874
|
quotations/apps/api/v1.py
|
quotations/apps/api/v1.py
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
Allow filtering by author name
|
Allow filtering by author name
|
Python
|
mit
|
jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted,jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted
|
python
|
## Code Before:
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
## Instruction:
Allow filtering by author name
## Code After:
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
// ... existing code ...
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
// ... rest of the code ...
|
6aa0defed8b1c8be23d4eeff737ed12c45e31fff
|
src/java/org/apache/velocity/runtime/parser/node/ASTText.java
|
src/java/org/apache/velocity/runtime/parser/node/ASTText.java
|
/* Generated By:JJTree: Do not edit this line. ASTText.java */
package org.apache.velocity.runtime.parser.node;
import java.io.Writer;
import java.io.IOException;
import org.apache.velocity.Context;
import org.apache.velocity.runtime.parser.*;
public class ASTText extends SimpleNode
{
private String text;
public ASTText(int id)
{
super(id);
}
public ASTText(Parser p, int id)
{
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data)
{
return visitor.visit(this, data);
}
public Object init(Context context, Object data) throws Exception
{
// text = NodeUtils.specialText(getFirstToken()) +
// getFirstToken().image;
/*
* there is only one special case we care about now : if the specialToken leads with a $
* Everything else seems to be working right now
*/
text = getFirstToken().image;
if (NodeUtils.specialText(getFirstToken()).startsWith("$") )
text = "$" + text;
return data;
}
public boolean render(Context context, Writer writer)
throws IOException
{
writer.write(text);
return true;
}
}
|
/* Generated By:JJTree: Do not edit this line. ASTText.java */
package org.apache.velocity.runtime.parser.node;
import java.io.Writer;
import java.io.IOException;
import org.apache.velocity.Context;
import org.apache.velocity.runtime.parser.*;
public class ASTText extends SimpleNode
{
private String text;
public ASTText(int id)
{
super(id);
}
public ASTText(Parser p, int id)
{
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data)
{
return visitor.visit(this, data);
}
public Object init(Context context, Object data) throws Exception
{
// text = NodeUtils.specialText(getFirstToken()) +
// getFirstToken().image;
/*
* there is only one special case we care about now : if the specialToken leads with a $
* Everything else seems to be working right now
*/
text = getFirstToken().image;
if (NodeUtils.specialText(getFirstToken()).startsWith("$"))
text = "$" + text;
else if ( NodeUtils.specialText(getFirstToken()).startsWith("#") )
text = "#" + text;
return data;
}
public boolean render(Context context, Writer writer)
throws IOException
{
writer.write(text);
return true;
}
}
|
Support for symmetrization of DIRECTIVE and REFERENCE. See the 1.20 Parser.jjt
|
Support for symmetrization of DIRECTIVE and REFERENCE. See the 1.20 Parser.jjt
PR:
Obtained from:
Submitted by:
Reviewed by:
git-svn-id: 7267684f36935cb3df12efc1f4c0216d758271d4@73570 13f79535-47bb-0310-9956-ffa450edef68
|
Java
|
apache-2.0
|
diydyq/velocity-engine,pcollaog/velocity-engine,diydyq/velocity-engine,pcollaog/velocity-engine
|
java
|
## Code Before:
/* Generated By:JJTree: Do not edit this line. ASTText.java */
package org.apache.velocity.runtime.parser.node;
import java.io.Writer;
import java.io.IOException;
import org.apache.velocity.Context;
import org.apache.velocity.runtime.parser.*;
public class ASTText extends SimpleNode
{
private String text;
public ASTText(int id)
{
super(id);
}
public ASTText(Parser p, int id)
{
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data)
{
return visitor.visit(this, data);
}
public Object init(Context context, Object data) throws Exception
{
// text = NodeUtils.specialText(getFirstToken()) +
// getFirstToken().image;
/*
* there is only one special case we care about now : if the specialToken leads with a $
* Everything else seems to be working right now
*/
text = getFirstToken().image;
if (NodeUtils.specialText(getFirstToken()).startsWith("$") )
text = "$" + text;
return data;
}
public boolean render(Context context, Writer writer)
throws IOException
{
writer.write(text);
return true;
}
}
## Instruction:
Support for symmetrization of DIRECTIVE and REFERENCE. See the 1.20 Parser.jjt
PR:
Obtained from:
Submitted by:
Reviewed by:
git-svn-id: 7267684f36935cb3df12efc1f4c0216d758271d4@73570 13f79535-47bb-0310-9956-ffa450edef68
## Code After:
/* Generated By:JJTree: Do not edit this line. ASTText.java */
package org.apache.velocity.runtime.parser.node;
import java.io.Writer;
import java.io.IOException;
import org.apache.velocity.Context;
import org.apache.velocity.runtime.parser.*;
public class ASTText extends SimpleNode
{
private String text;
public ASTText(int id)
{
super(id);
}
public ASTText(Parser p, int id)
{
super(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(ParserVisitor visitor, Object data)
{
return visitor.visit(this, data);
}
public Object init(Context context, Object data) throws Exception
{
// text = NodeUtils.specialText(getFirstToken()) +
// getFirstToken().image;
/*
* there is only one special case we care about now : if the specialToken leads with a $
* Everything else seems to be working right now
*/
text = getFirstToken().image;
if (NodeUtils.specialText(getFirstToken()).startsWith("$"))
text = "$" + text;
else if ( NodeUtils.specialText(getFirstToken()).startsWith("#") )
text = "#" + text;
return data;
}
public boolean render(Context context, Writer writer)
throws IOException
{
writer.write(text);
return true;
}
}
|
// ... existing code ...
text = getFirstToken().image;
if (NodeUtils.specialText(getFirstToken()).startsWith("$"))
text = "$" + text;
else if ( NodeUtils.specialText(getFirstToken()).startsWith("#") )
text = "#" + text;
return data;
}
// ... rest of the code ...
|
2b3c0b50e5f67ca673f5305ccf0219a4bca6bb7b
|
luigi/tasks/rgd/__init__.py
|
luigi/tasks/rgd/__init__.py
|
import luigi
from tasks.config import rgd
from databases.rgd import helpers
from .organism import RgdOrganism
class Rgd(luigi.WrapperTask):
def requires(self):
for organism in helpers.gene_files(rgd().host):
yield RgdOrganism(organism=organism)
|
import luigi
from databases.rgd import helpers
from .organism import RgdOrganism
class Rgd(luigi.WrapperTask):
def requires(self):
for organism in helpers.known_organisms():
yield RgdOrganism(organism=organism)
|
Correct method to detect known organisms
|
Correct method to detect known organisms
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
python
|
## Code Before:
import luigi
from tasks.config import rgd
from databases.rgd import helpers
from .organism import RgdOrganism
class Rgd(luigi.WrapperTask):
def requires(self):
for organism in helpers.gene_files(rgd().host):
yield RgdOrganism(organism=organism)
## Instruction:
Correct method to detect known organisms
## Code After:
import luigi
from databases.rgd import helpers
from .organism import RgdOrganism
class Rgd(luigi.WrapperTask):
def requires(self):
for organism in helpers.known_organisms():
yield RgdOrganism(organism=organism)
|
// ... existing code ...
import luigi
from databases.rgd import helpers
from .organism import RgdOrganism
// ... modified code ...
class Rgd(luigi.WrapperTask):
def requires(self):
for organism in helpers.known_organisms():
yield RgdOrganism(organism=organism)
// ... rest of the code ...
|
742569a4781132d11de6d41811ee11ad55560294
|
django_slack/exceptions.py
|
django_slack/exceptions.py
|
import six
class SlackException(ValueError):
def __init__(self, message, message_data):
super(SlackException, self).__init__(message)
self.message_data = message_data
@six.python_2_unicode_compatible
class ChannelNotFound(SlackException):
def __str__(self):
# Override base __str__ to ensure we include the channel name in the
# error message
return u"{}: channel '{}' could not be found".format(
self.__class__.__name__,
self.message_data['channel'],
)
class IsArchived(SlackException):
pass
class MsgTooLong(SlackException):
pass
class NoText(SlackException):
pass
class RateLimited(SlackException):
pass
class NotAuthed(SlackException):
pass
class InvalidAuth(SlackException):
pass
class TokenRevoked(SlackException):
pass
class AccountInactive(SlackException):
pass
class UserIsBot(SlackException):
pass
LABEL_TO_EXCEPTION = {
'channel_not_found': ChannelNotFound,
'is_archived': IsArchived,
'msg_too_long': MsgTooLong,
'no_text': NoText,
'rate_limited': RateLimited,
'not_authed': NotAuthed,
'invalid_auth': InvalidAuth,
'token_revoked': TokenRevoked,
'account_inactive': AccountInactive,
'user_is_bot': UserIsBot,
}
|
import six
class SlackException(ValueError):
def __init__(self, message, message_data):
super(SlackException, self).__init__(message)
self.message_data = message_data
@six.python_2_unicode_compatible
class ChannelNotFound(SlackException):
def __str__(self):
# Override base __str__ to ensure we include the channel name in the
# error message
return u"{}: channel '{}' could not be found".format(
self.__class__.__name__,
self.message_data['channel'],
)
class IsArchived(SlackException):
pass
class FatalError(SlackException):
pass
class MsgTooLong(SlackException):
pass
class NoText(SlackException):
pass
class RateLimited(SlackException):
pass
class NotAuthed(SlackException):
pass
class InvalidAuth(SlackException):
pass
class TokenRevoked(SlackException):
pass
class AccountInactive(SlackException):
pass
class UserIsBot(SlackException):
pass
LABEL_TO_EXCEPTION = {
'channel_not_found': ChannelNotFound,
'is_archived': IsArchived,
'fatal_error': FatalError,
'msg_too_long': MsgTooLong,
'no_text': NoText,
'rate_limited': RateLimited,
'not_authed': NotAuthed,
'invalid_auth': InvalidAuth,
'token_revoked': TokenRevoked,
'account_inactive': AccountInactive,
'user_is_bot': UserIsBot,
}
|
Add another specific error class
|
Add another specific error class
|
Python
|
bsd-3-clause
|
lamby/django-slack
|
python
|
## Code Before:
import six
class SlackException(ValueError):
def __init__(self, message, message_data):
super(SlackException, self).__init__(message)
self.message_data = message_data
@six.python_2_unicode_compatible
class ChannelNotFound(SlackException):
def __str__(self):
# Override base __str__ to ensure we include the channel name in the
# error message
return u"{}: channel '{}' could not be found".format(
self.__class__.__name__,
self.message_data['channel'],
)
class IsArchived(SlackException):
pass
class MsgTooLong(SlackException):
pass
class NoText(SlackException):
pass
class RateLimited(SlackException):
pass
class NotAuthed(SlackException):
pass
class InvalidAuth(SlackException):
pass
class TokenRevoked(SlackException):
pass
class AccountInactive(SlackException):
pass
class UserIsBot(SlackException):
pass
LABEL_TO_EXCEPTION = {
'channel_not_found': ChannelNotFound,
'is_archived': IsArchived,
'msg_too_long': MsgTooLong,
'no_text': NoText,
'rate_limited': RateLimited,
'not_authed': NotAuthed,
'invalid_auth': InvalidAuth,
'token_revoked': TokenRevoked,
'account_inactive': AccountInactive,
'user_is_bot': UserIsBot,
}
## Instruction:
Add another specific error class
## Code After:
import six
class SlackException(ValueError):
def __init__(self, message, message_data):
super(SlackException, self).__init__(message)
self.message_data = message_data
@six.python_2_unicode_compatible
class ChannelNotFound(SlackException):
def __str__(self):
# Override base __str__ to ensure we include the channel name in the
# error message
return u"{}: channel '{}' could not be found".format(
self.__class__.__name__,
self.message_data['channel'],
)
class IsArchived(SlackException):
pass
class FatalError(SlackException):
pass
class MsgTooLong(SlackException):
pass
class NoText(SlackException):
pass
class RateLimited(SlackException):
pass
class NotAuthed(SlackException):
pass
class InvalidAuth(SlackException):
pass
class TokenRevoked(SlackException):
pass
class AccountInactive(SlackException):
pass
class UserIsBot(SlackException):
pass
LABEL_TO_EXCEPTION = {
'channel_not_found': ChannelNotFound,
'is_archived': IsArchived,
'fatal_error': FatalError,
'msg_too_long': MsgTooLong,
'no_text': NoText,
'rate_limited': RateLimited,
'not_authed': NotAuthed,
'invalid_auth': InvalidAuth,
'token_revoked': TokenRevoked,
'account_inactive': AccountInactive,
'user_is_bot': UserIsBot,
}
|
...
class IsArchived(SlackException):
pass
class FatalError(SlackException):
pass
...
LABEL_TO_EXCEPTION = {
'channel_not_found': ChannelNotFound,
'is_archived': IsArchived,
'fatal_error': FatalError,
'msg_too_long': MsgTooLong,
'no_text': NoText,
'rate_limited': RateLimited,
...
|
5caa758b5638e0244da6818aa27092ad41801cc1
|
kazoo/tests/test_interrupt.py
|
kazoo/tests/test_interrupt.py
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
Add a sanity check per @bbangert
|
Add a sanity check per @bbangert
|
Python
|
apache-2.0
|
AlexanderplUs/kazoo,bsanders/kazoo,tempbottle/kazoo,rockerbox/kazoo,python-zk/kazoo,AlexanderplUs/kazoo,pombredanne/kazoo,Asana/kazoo,kormat/kazoo,harlowja/kazoo,rockerbox/kazoo,rgs1/kazoo,jacksontj/kazoo,max0d41/kazoo,bsanders/kazoo,rgs1/kazoo,tempbottle/kazoo,pombredanne/kazoo,harlowja/kazoo,max0d41/kazoo,kormat/kazoo,jacksontj/kazoo,python-zk/kazoo
|
python
|
## Code Before:
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
## Instruction:
Add a sanity check per @bbangert
## Code After:
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
...
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
...
|
54ab41cb8c30ddd46154f23e89947286222616e1
|
raven/__init__.py
|
raven/__init__.py
|
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
from raven.base import * # NOQA
from raven.conf import * # NOQA
|
import os
import os.path
from raven.base import * # NOQA
from raven.conf import * # NOQA
__all__ = ('VERSION', 'Client', 'load', 'get_version')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read().strip()[:7]
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
|
Add git sha to version if available
|
Add git sha to version if available
|
Python
|
bsd-3-clause
|
getsentry/raven-python,icereval/raven-python,inspirehep/raven-python,inspirehep/raven-python,dbravender/raven-python,hzy/raven-python,getsentry/raven-python,jmagnusson/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,icereval/raven-python,ewdurbin/raven-python,smarkets/raven-python,johansteffner/raven-python,jmp0xf/raven-python,jmagnusson/raven-python,inspirehep/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jmagnusson/raven-python,ewdurbin/raven-python,ronaldevers/raven-python,jmp0xf/raven-python,someonehan/raven-python,smarkets/raven-python,icereval/raven-python,jbarbuto/raven-python,danriti/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,arthurlogilab/raven-python,smarkets/raven-python,percipient/raven-python,hzy/raven-python,danriti/raven-python,dbravender/raven-python,ewdurbin/raven-python,recht/raven-python,akheron/raven-python,Photonomie/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,dbravender/raven-python,recht/raven-python,lepture/raven-python,danriti/raven-python,lepture/raven-python,ronaldevers/raven-python,arthurlogilab/raven-python,Photonomie/raven-python,nikolas/raven-python,akheron/raven-python,icereval/raven-python,johansteffner/raven-python,inspirehep/raven-python,Photonomie/raven-python,jbarbuto/raven-python,nikolas/raven-python,akalipetis/raven-python,akheron/raven-python,percipient/raven-python,someonehan/raven-python,jbarbuto/raven-python,getsentry/raven-python,someonehan/raven-python,ronaldevers/raven-python,lepture/raven-python,arthurlogilab/raven-python,akalipetis/raven-python,hzy/raven-python,jmp0xf/raven-python,smarkets/raven-python,nikolas/raven-python,akalipetis/raven-python,recht/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jbarbuto/raven-python,nikolas/raven-python,percipient/raven-python
|
python
|
## Code Before:
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
from raven.base import * # NOQA
from raven.conf import * # NOQA
## Instruction:
Add git sha to version if available
## Code After:
import os
import os.path
from raven.base import * # NOQA
from raven.conf import * # NOQA
__all__ = ('VERSION', 'Client', 'load', 'get_version')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read().strip()[:7]
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
|
...
import os
import os.path
from raven.base import * # NOQA
from raven.conf import * # NOQA
__all__ = ('VERSION', 'Client', 'load', 'get_version')
try:
VERSION = __import__('pkg_resources') \
...
except Exception as e:
VERSION = 'unknown'
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read().strip()[:7]
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
...
|
acdd1c290f5c2dee627f5dc9f9f2a415f0a4a6d4
|
tests/de/gurkenlabs/litiengine/environment/tilemap/xml/CustomPropertyProviderTests.java
|
tests/de/gurkenlabs/litiengine/environment/tilemap/xml/CustomPropertyProviderTests.java
|
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import org.junit.Test;
import junit.framework.Assert;
public class CustomPropertyProviderTests {
@Test
public void testSetCustomProperty() {
CustomPropertyProvider propProvider = new CustomPropertyProvider();
propProvider.setCustomProperty("test", "testvalue");
Assert.assertEquals("testvalue", propProvider.getCustomProperty("test"));
Assert.assertNull(propProvider.getCustomProperty("test2"));
Assert.assertEquals(1, propProvider.getAllCustomProperties().size());
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
}
}
|
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import java.util.ArrayList;
import org.junit.Test;
import junit.framework.Assert;
public class CustomPropertyProviderTests {
@Test
public void testSetCustomProperty() {
CustomPropertyProvider propProvider = new CustomPropertyProvider();
propProvider.setCustomProperty("test", "testvalue");
Assert.assertEquals("testvalue", propProvider.getCustomProperty("test"));
Assert.assertNull(propProvider.getCustomProperty("test2"));
Assert.assertEquals(1, propProvider.getAllCustomProperties().size());
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
ArrayList<Property> props = new ArrayList<>();
props.add(new Property("test2", "testvalue3"));
props.add(new Property("test3", "testvalue4"));
propProvider.setCustomProperties(props);
Assert.assertEquals(2, propProvider.getAllCustomProperties().size());
Assert.assertEquals("testvalue3", propProvider.getCustomProperty("test2"));
Assert.assertEquals("testvalue4", propProvider.getCustomProperty("test3"));
}
}
|
Add additional test case for setting custom properties by a collection.
|
Add additional test case for setting custom properties by a collection.
|
Java
|
mit
|
gurkenlabs/litiengine,gurkenlabs/litiengine
|
java
|
## Code Before:
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import org.junit.Test;
import junit.framework.Assert;
public class CustomPropertyProviderTests {
@Test
public void testSetCustomProperty() {
CustomPropertyProvider propProvider = new CustomPropertyProvider();
propProvider.setCustomProperty("test", "testvalue");
Assert.assertEquals("testvalue", propProvider.getCustomProperty("test"));
Assert.assertNull(propProvider.getCustomProperty("test2"));
Assert.assertEquals(1, propProvider.getAllCustomProperties().size());
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
}
}
## Instruction:
Add additional test case for setting custom properties by a collection.
## Code After:
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import java.util.ArrayList;
import org.junit.Test;
import junit.framework.Assert;
public class CustomPropertyProviderTests {
@Test
public void testSetCustomProperty() {
CustomPropertyProvider propProvider = new CustomPropertyProvider();
propProvider.setCustomProperty("test", "testvalue");
Assert.assertEquals("testvalue", propProvider.getCustomProperty("test"));
Assert.assertNull(propProvider.getCustomProperty("test2"));
Assert.assertEquals(1, propProvider.getAllCustomProperties().size());
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
ArrayList<Property> props = new ArrayList<>();
props.add(new Property("test2", "testvalue3"));
props.add(new Property("test3", "testvalue4"));
propProvider.setCustomProperties(props);
Assert.assertEquals(2, propProvider.getAllCustomProperties().size());
Assert.assertEquals("testvalue3", propProvider.getCustomProperty("test2"));
Assert.assertEquals("testvalue4", propProvider.getCustomProperty("test3"));
}
}
|
...
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import java.util.ArrayList;
import org.junit.Test;
...
propProvider.setCustomProperty("test", "testvalue2");
Assert.assertEquals("testvalue2", propProvider.getCustomProperty("test"));
ArrayList<Property> props = new ArrayList<>();
props.add(new Property("test2", "testvalue3"));
props.add(new Property("test3", "testvalue4"));
propProvider.setCustomProperties(props);
Assert.assertEquals(2, propProvider.getAllCustomProperties().size());
Assert.assertEquals("testvalue3", propProvider.getCustomProperty("test2"));
Assert.assertEquals("testvalue4", propProvider.getCustomProperty("test3"));
}
}
...
|
fa609681c2732e655cde9075182af918983ccc1f
|
photutils/utils/_misc.py
|
photutils/utils/_misc.py
|
from datetime import datetime, timezone
def _get_version_info():
"""
Return a dictionary of the installed version numbers for photutils
and its dependencies.
Returns
-------
result : dict
A dictionary containing the version numbers for photutils and
its dependencies.
"""
versions = {}
packages = ('photutils', 'astropy', 'numpy', 'scipy', 'skimage')
for package in packages:
try:
pkg = __import__(package)
version = pkg.__version__
except ImportError:
version = None
versions[package] = version
return versions
def _get_date(utc=False):
"""
Return a string of the current date/time.
Parameters
----------
utz : bool, optional
Whether to use the UTZ timezone instead of the local timezone.
Returns
-------
result : str
The current date/time.
"""
if not utc:
now = datetime.now().astimezone()
else:
now = datetime.now(timezone.utc)
return now.strftime('%Y-%m-%d %H:%M:%S %Z')
def _get_meta(utc=False):
"""
Return a metadata dictionary with the package versions and current
date/time.
"""
return {'date': _get_date(utc=utc),
'version': _get_version_info()}
|
from datetime import datetime, timezone
import sys
def _get_version_info():
"""
Return a dictionary of the installed version numbers for photutils
and its dependencies.
Returns
-------
result : dict
A dictionary containing the version numbers for photutils and
its dependencies.
"""
versions = {'Python': sys.version.split()[0]}
packages = ('photutils', 'astropy', 'numpy', 'scipy', 'skimage',
'sklearn', 'matplotlib', 'gwcs', 'bottleneck')
for package in packages:
try:
pkg = __import__(package)
version = pkg.__version__
except ImportError:
version = None
versions[package] = version
return versions
def _get_date(utc=False):
"""
Return a string of the current date/time.
Parameters
----------
utz : bool, optional
Whether to use the UTZ timezone instead of the local timezone.
Returns
-------
result : str
The current date/time.
"""
if not utc:
now = datetime.now().astimezone()
else:
now = datetime.now(timezone.utc)
return now.strftime('%Y-%m-%d %H:%M:%S %Z')
def _get_meta(utc=False):
"""
Return a metadata dictionary with the package versions and current
date/time.
"""
return {'date': _get_date(utc=utc),
'version': _get_version_info()}
|
Add all optional dependencies to version info dict
|
Add all optional dependencies to version info dict
|
Python
|
bsd-3-clause
|
larrybradley/photutils,astropy/photutils
|
python
|
## Code Before:
from datetime import datetime, timezone
def _get_version_info():
"""
Return a dictionary of the installed version numbers for photutils
and its dependencies.
Returns
-------
result : dict
A dictionary containing the version numbers for photutils and
its dependencies.
"""
versions = {}
packages = ('photutils', 'astropy', 'numpy', 'scipy', 'skimage')
for package in packages:
try:
pkg = __import__(package)
version = pkg.__version__
except ImportError:
version = None
versions[package] = version
return versions
def _get_date(utc=False):
"""
Return a string of the current date/time.
Parameters
----------
utz : bool, optional
Whether to use the UTZ timezone instead of the local timezone.
Returns
-------
result : str
The current date/time.
"""
if not utc:
now = datetime.now().astimezone()
else:
now = datetime.now(timezone.utc)
return now.strftime('%Y-%m-%d %H:%M:%S %Z')
def _get_meta(utc=False):
"""
Return a metadata dictionary with the package versions and current
date/time.
"""
return {'date': _get_date(utc=utc),
'version': _get_version_info()}
## Instruction:
Add all optional dependencies to version info dict
## Code After:
from datetime import datetime, timezone
import sys
def _get_version_info():
"""
Return a dictionary of the installed version numbers for photutils
and its dependencies.
Returns
-------
result : dict
A dictionary containing the version numbers for photutils and
its dependencies.
"""
versions = {'Python': sys.version.split()[0]}
packages = ('photutils', 'astropy', 'numpy', 'scipy', 'skimage',
'sklearn', 'matplotlib', 'gwcs', 'bottleneck')
for package in packages:
try:
pkg = __import__(package)
version = pkg.__version__
except ImportError:
version = None
versions[package] = version
return versions
def _get_date(utc=False):
"""
Return a string of the current date/time.
Parameters
----------
utz : bool, optional
Whether to use the UTZ timezone instead of the local timezone.
Returns
-------
result : str
The current date/time.
"""
if not utc:
now = datetime.now().astimezone()
else:
now = datetime.now(timezone.utc)
return now.strftime('%Y-%m-%d %H:%M:%S %Z')
def _get_meta(utc=False):
"""
Return a metadata dictionary with the package versions and current
date/time.
"""
return {'date': _get_date(utc=utc),
'version': _get_version_info()}
|
...
from datetime import datetime, timezone
import sys
def _get_version_info():
...
A dictionary containing the version numbers for photutils and
its dependencies.
"""
versions = {'Python': sys.version.split()[0]}
packages = ('photutils', 'astropy', 'numpy', 'scipy', 'skimage',
'sklearn', 'matplotlib', 'gwcs', 'bottleneck')
for package in packages:
try:
pkg = __import__(package)
...
|
d5e1f7d690d9f663e12cd4ee85979d10e2df04ea
|
test/test_get_rest.py
|
test/test_get_rest.py
|
import os
import unittest
import subprocess
import utils
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
test_dir = utils.set_search_paths(TOPDIR)
utils.set_search_paths(TOPDIR)
from allosmod.util import check_output
class Tests(unittest.TestCase):
def test_bad(self):
"""Test wrong arguments to get_rest"""
for args in ([], [''] * 2):
out = check_output(['allosmod', 'get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
out = check_output(['python', '-m',
'allosmod.get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
def test_simple(self):
"""Simple complete run of get_rest"""
with open('get_rest.in', 'w') as fh:
pass
out = check_output(['allosmod', 'get_rest',
os.path.join(test_dir, 'input',
'asite_pdb1.pdb')])
os.unlink('get_rest.in')
# PDB file contains no sugars, so no restraints should be output
self.assertEqual(out, '')
if __name__ == '__main__':
unittest.main()
|
import os
import unittest
import subprocess
import utils
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
test_dir = utils.set_search_paths(TOPDIR)
utils.set_search_paths(TOPDIR)
from allosmod.util import check_output
class Tests(unittest.TestCase):
def test_bad(self):
"""Test wrong arguments to get_rest"""
for args in ([], [''] * 2):
out = check_output(['allosmod', 'get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
out = check_output(['python', '-m',
'allosmod.get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
def test_simple(self):
"""Simple complete run of get_rest"""
with open('get_rest.in', 'w') as fh:
pass
out = check_output(['allosmod', 'get_rest',
os.path.join(test_dir, 'input',
'asite_pdb1.pdb')],
universal_newlines=True)
os.unlink('get_rest.in')
# PDB file contains no sugars, so no restraints should be output
self.assertEqual(out, '')
if __name__ == '__main__':
unittest.main()
|
Make sure that output is text in Python 2 & 3.
|
Make sure that output is text in Python 2 & 3.
|
Python
|
lgpl-2.1
|
salilab/allosmod-lib,salilab/allosmod-lib,salilab/allosmod-lib,salilab/allosmod-lib,salilab/allosmod-lib
|
python
|
## Code Before:
import os
import unittest
import subprocess
import utils
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
test_dir = utils.set_search_paths(TOPDIR)
utils.set_search_paths(TOPDIR)
from allosmod.util import check_output
class Tests(unittest.TestCase):
def test_bad(self):
"""Test wrong arguments to get_rest"""
for args in ([], [''] * 2):
out = check_output(['allosmod', 'get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
out = check_output(['python', '-m',
'allosmod.get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
def test_simple(self):
"""Simple complete run of get_rest"""
with open('get_rest.in', 'w') as fh:
pass
out = check_output(['allosmod', 'get_rest',
os.path.join(test_dir, 'input',
'asite_pdb1.pdb')])
os.unlink('get_rest.in')
# PDB file contains no sugars, so no restraints should be output
self.assertEqual(out, '')
if __name__ == '__main__':
unittest.main()
## Instruction:
Make sure that output is text in Python 2 & 3.
## Code After:
import os
import unittest
import subprocess
import utils
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
test_dir = utils.set_search_paths(TOPDIR)
utils.set_search_paths(TOPDIR)
from allosmod.util import check_output
class Tests(unittest.TestCase):
def test_bad(self):
"""Test wrong arguments to get_rest"""
for args in ([], [''] * 2):
out = check_output(['allosmod', 'get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
out = check_output(['python', '-m',
'allosmod.get_rest'] + args,
stderr=subprocess.STDOUT, retcode=2)
def test_simple(self):
"""Simple complete run of get_rest"""
with open('get_rest.in', 'w') as fh:
pass
out = check_output(['allosmod', 'get_rest',
os.path.join(test_dir, 'input',
'asite_pdb1.pdb')],
universal_newlines=True)
os.unlink('get_rest.in')
# PDB file contains no sugars, so no restraints should be output
self.assertEqual(out, '')
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
pass
out = check_output(['allosmod', 'get_rest',
os.path.join(test_dir, 'input',
'asite_pdb1.pdb')],
universal_newlines=True)
os.unlink('get_rest.in')
# PDB file contains no sugars, so no restraints should be output
self.assertEqual(out, '')
# ... rest of the code ...
|
be8acb2453985ad3de52a13fdcdd70783626aabe
|
src/main/java/info/u_team/u_team_core/item/armor/UArmorItem.java
|
src/main/java/info/u_team/u_team_core/item/armor/UArmorItem.java
|
package info.u_team.u_team_core.item.armor;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.EquipmentSlotType;
import net.minecraft.item.*;
public class UArmorItem extends ArmorItem {
protected final String name;
public UArmorItem(String name, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
this(name, null, properties, material, slot);
}
public UArmorItem(String name, ItemGroup group, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
super(material, slot, group == null ? properties : properties.group(group));
this.name = name;
}
@Override
public String getArmorTexture(ItemStack stack, Entity entity, EquipmentSlotType slot, String type) {
if (!material.getName().equals("invalid")) {
return null;
}
return String.format("%s:textures/models/armor/%s_layer_%d%s.png", getRegistryName().getNamespace(), name, (slot == EquipmentSlotType.LEGS ? 2 : 1), type == null ? "" : String.format("_%s", type));
}
protected String getTypeString(EquipmentSlotType slot) {
switch (slot) {
case HEAD:
return "helmet";
case CHEST:
return "chestplate";
case LEGS:
return "leggings";
case FEET:
return "boots";
default:
return "invalid";
}
}
}
|
package info.u_team.u_team_core.item.armor;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.EquipmentSlotType;
import net.minecraft.item.*;
public class UArmorItem extends ArmorItem {
protected final String textureName;
public UArmorItem(String textureName, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
this(textureName, null, properties, material, slot);
}
public UArmorItem(String textureName, ItemGroup group, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
super(material, slot, group == null ? properties : properties.group(group));
this.textureName = textureName;
}
@Override
public String getArmorTexture(ItemStack stack, Entity entity, EquipmentSlotType slot, String type) {
if (!material.getName().equals("invalid")) {
return null;
}
return String.format("%s:textures/models/armor/%s_layer_%d%s.png", getRegistryName().getNamespace(), textureName, (slot == EquipmentSlotType.LEGS ? 2 : 1), type == null ? "" : String.format("_%s", type));
}
protected String getTypeString(EquipmentSlotType slot) {
switch (slot) {
case HEAD:
return "helmet";
case CHEST:
return "chestplate";
case LEGS:
return "leggings";
case FEET:
return "boots";
default:
return "invalid";
}
}
}
|
Change name to texture name
|
Change name to texture name
|
Java
|
apache-2.0
|
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
|
java
|
## Code Before:
package info.u_team.u_team_core.item.armor;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.EquipmentSlotType;
import net.minecraft.item.*;
public class UArmorItem extends ArmorItem {
protected final String name;
public UArmorItem(String name, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
this(name, null, properties, material, slot);
}
public UArmorItem(String name, ItemGroup group, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
super(material, slot, group == null ? properties : properties.group(group));
this.name = name;
}
@Override
public String getArmorTexture(ItemStack stack, Entity entity, EquipmentSlotType slot, String type) {
if (!material.getName().equals("invalid")) {
return null;
}
return String.format("%s:textures/models/armor/%s_layer_%d%s.png", getRegistryName().getNamespace(), name, (slot == EquipmentSlotType.LEGS ? 2 : 1), type == null ? "" : String.format("_%s", type));
}
protected String getTypeString(EquipmentSlotType slot) {
switch (slot) {
case HEAD:
return "helmet";
case CHEST:
return "chestplate";
case LEGS:
return "leggings";
case FEET:
return "boots";
default:
return "invalid";
}
}
}
## Instruction:
Change name to texture name
## Code After:
package info.u_team.u_team_core.item.armor;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.EquipmentSlotType;
import net.minecraft.item.*;
public class UArmorItem extends ArmorItem {
protected final String textureName;
public UArmorItem(String textureName, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
this(textureName, null, properties, material, slot);
}
public UArmorItem(String textureName, ItemGroup group, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
super(material, slot, group == null ? properties : properties.group(group));
this.textureName = textureName;
}
@Override
public String getArmorTexture(ItemStack stack, Entity entity, EquipmentSlotType slot, String type) {
if (!material.getName().equals("invalid")) {
return null;
}
return String.format("%s:textures/models/armor/%s_layer_%d%s.png", getRegistryName().getNamespace(), textureName, (slot == EquipmentSlotType.LEGS ? 2 : 1), type == null ? "" : String.format("_%s", type));
}
protected String getTypeString(EquipmentSlotType slot) {
switch (slot) {
case HEAD:
return "helmet";
case CHEST:
return "chestplate";
case LEGS:
return "leggings";
case FEET:
return "boots";
default:
return "invalid";
}
}
}
|
// ... existing code ...
public class UArmorItem extends ArmorItem {
protected final String textureName;
public UArmorItem(String textureName, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
this(textureName, null, properties, material, slot);
}
public UArmorItem(String textureName, ItemGroup group, Properties properties, IArmorMaterial material, EquipmentSlotType slot) {
super(material, slot, group == null ? properties : properties.group(group));
this.textureName = textureName;
}
@Override
// ... modified code ...
if (!material.getName().equals("invalid")) {
return null;
}
return String.format("%s:textures/models/armor/%s_layer_%d%s.png", getRegistryName().getNamespace(), textureName, (slot == EquipmentSlotType.LEGS ? 2 : 1), type == null ? "" : String.format("_%s", type));
}
protected String getTypeString(EquipmentSlotType slot) {
// ... rest of the code ...
|
4848dfc9e965f7f82eb1f7aa4d90e8b39489a6a0
|
recipes/pyglet/display_import_tests.py
|
recipes/pyglet/display_import_tests.py
|
test_imports = [
'pyglet.font',
'pyglet.gl',
'pyglet.graphics',
'pyglet.image',
'pyglet.image.codecs',
'pyglet.input',
'pyglet.media',
'pyglet.media.drivers',
'pyglet.media.drivers.directsound',
'pyglet.window',
'pyglet.text',
'pyglet.text.formats',
]
def expected_fail(module):
try:
__import__(module)
except Exception as e:
# Yes, make the exception general, because we can't import the specific
# exception on linux without an actual display. Look at the source
# code if you want to see why.
assert 'No standard config is available.' in str(e)
# Handle an import that should only happen on linux and requires
# a display.
for module in test_imports:
expected_fail(module)
import sys
if sys.platform.startswith('linux'):
expected_fail('pyglet.window.xlib')
# And another import that is expected to fail in...
if sys.platform == 'darwin':
expected_fail('pyglet.window.cocoa')
|
test_imports = [
'pyglet.font',
'pyglet.gl',
'pyglet.graphics',
'pyglet.image',
'pyglet.image.codecs',
'pyglet.input',
'pyglet.media',
'pyglet.media.drivers',
'pyglet.media.drivers.directsound',
'pyglet.window',
'pyglet.text',
'pyglet.text.formats',
]
def expected_fail(module):
try:
print('Importing {}'.format(module))
__import__(module)
except Exception as e:
# Yes, make the exception general, because we can't import the specific
# exception on linux without an actual display. Look at the source
# code if you want to see why.
assert 'No standard config is available.' in str(e)
# Handle an import that should only happen on linux and requires
# a display.
for module in test_imports:
expected_fail(module)
import sys
if sys.platform.startswith('linux'):
expected_fail('pyglet.window.xlib')
# And another import that is expected to fail in...
if sys.platform == 'darwin':
expected_fail('pyglet.window.cocoa')
|
Add a tiny bit of output
|
Add a tiny bit of output
|
Python
|
bsd-3-clause
|
data-exp-lab/staged-recipes,Savvysherpa/staged-recipes,hbredin/staged-recipes,tylere/staged-recipes,johannesring/staged-recipes,shadowwalkersb/staged-recipes,kwilcox/staged-recipes,mcernak/staged-recipes,rvalieris/staged-recipes,barkls/staged-recipes,johanneskoester/staged-recipes,birdsarah/staged-recipes,rmcgibbo/staged-recipes,bmabey/staged-recipes,hadim/staged-recipes,caspervdw/staged-recipes,caspervdw/staged-recipes,dschreij/staged-recipes,guillochon/staged-recipes,atedstone/staged-recipes,koverholt/staged-recipes,larray-project/staged-recipes,hajapy/staged-recipes,isuruf/staged-recipes,gqmelo/staged-recipes,chrisburr/staged-recipes,NOAA-ORR-ERD/staged-recipes,mcs07/staged-recipes,conda-forge/staged-recipes,pstjohn/staged-recipes,grlee77/staged-recipes,igortg/staged-recipes,jochym/staged-recipes,jakirkham/staged-recipes,rmcgibbo/staged-recipes,asmeurer/staged-recipes,jcb91/staged-recipes,OpenPIV/staged-recipes,data-exp-lab/staged-recipes,benvandyke/staged-recipes,goanpeca/staged-recipes,tylere/staged-recipes,khallock/staged-recipes,stuertz/staged-recipes,pmlandwehr/staged-recipes,cpaulik/staged-recipes,nicoddemus/staged-recipes,planetarypy/staged-recipes,mcernak/staged-recipes,ceholden/staged-recipes,pstjohn/staged-recipes,sodre/staged-recipes,hadim/staged-recipes,basnijholt/staged-recipes,patricksnape/staged-recipes,scopatz/staged-recipes,glemaitre/staged-recipes,mariusvniekerk/staged-recipes,rolando-contrib/staged-recipes,planetarypy/staged-recipes,bmabey/staged-recipes,mcs07/staged-recipes,NOAA-ORR-ERD/staged-recipes,basnijholt/staged-recipes,goanpeca/staged-recipes,sodre/staged-recipes,hajapy/staged-recipes,SylvainCorlay/staged-recipes,shadowwalkersb/staged-recipes,dfroger/staged-recipes,ReimarBauer/staged-recipes,gqmelo/staged-recipes,jcb91/staged-recipes,stuertz/staged-recipes,chohner/staged-recipes,asmeurer/staged-recipes,scopatz/staged-recipes,patricksnape/staged-recipes,richardotis/staged-recipes,valgur/staged-recipes,JohnGreeley/staged-recipes,richardotis/staged-recipes,blowekamp/staged-recipes,sannykr/staged-recipes,dschreij/staged-recipes,kwilcox/staged-recipes,petrushy/staged-recipes,petrushy/staged-recipes,koverholt/staged-recipes,jerowe/staged-recipes,chrisburr/staged-recipes,benvandyke/staged-recipes,dharhas/staged-recipes,ocefpaf/staged-recipes,guillochon/staged-recipes,Cashalow/staged-recipes,jjhelmus/staged-recipes,sodre/staged-recipes,rvalieris/staged-recipes,blowekamp/staged-recipes,Cashalow/staged-recipes,jochym/staged-recipes,johannesring/staged-recipes,grlee77/staged-recipes,atedstone/staged-recipes,vamega/staged-recipes,SylvainCorlay/staged-recipes,vamega/staged-recipes,cpaulik/staged-recipes,glemaitre/staged-recipes,barkls/staged-recipes,conda-forge/staged-recipes,larray-project/staged-recipes,dfroger/staged-recipes,mariusvniekerk/staged-recipes,Juanlu001/staged-recipes,chohner/staged-recipes,rolando-contrib/staged-recipes,ceholden/staged-recipes,jerowe/staged-recipes,JohnGreeley/staged-recipes,OpenPIV/staged-recipes,khallock/staged-recipes,johanneskoester/staged-recipes,ocefpaf/staged-recipes,ReimarBauer/staged-recipes,isuruf/staged-recipes,Juanlu001/staged-recipes,nicoddemus/staged-recipes,synapticarbors/staged-recipes,jjhelmus/staged-recipes,Savvysherpa/staged-recipes,pmlandwehr/staged-recipes,hbredin/staged-recipes,sannykr/staged-recipes,synapticarbors/staged-recipes,dharhas/staged-recipes,jakirkham/staged-recipes,igortg/staged-recipes,birdsarah/staged-recipes,valgur/staged-recipes
|
python
|
## Code Before:
test_imports = [
'pyglet.font',
'pyglet.gl',
'pyglet.graphics',
'pyglet.image',
'pyglet.image.codecs',
'pyglet.input',
'pyglet.media',
'pyglet.media.drivers',
'pyglet.media.drivers.directsound',
'pyglet.window',
'pyglet.text',
'pyglet.text.formats',
]
def expected_fail(module):
try:
__import__(module)
except Exception as e:
# Yes, make the exception general, because we can't import the specific
# exception on linux without an actual display. Look at the source
# code if you want to see why.
assert 'No standard config is available.' in str(e)
# Handle an import that should only happen on linux and requires
# a display.
for module in test_imports:
expected_fail(module)
import sys
if sys.platform.startswith('linux'):
expected_fail('pyglet.window.xlib')
# And another import that is expected to fail in...
if sys.platform == 'darwin':
expected_fail('pyglet.window.cocoa')
## Instruction:
Add a tiny bit of output
## Code After:
test_imports = [
'pyglet.font',
'pyglet.gl',
'pyglet.graphics',
'pyglet.image',
'pyglet.image.codecs',
'pyglet.input',
'pyglet.media',
'pyglet.media.drivers',
'pyglet.media.drivers.directsound',
'pyglet.window',
'pyglet.text',
'pyglet.text.formats',
]
def expected_fail(module):
try:
print('Importing {}'.format(module))
__import__(module)
except Exception as e:
# Yes, make the exception general, because we can't import the specific
# exception on linux without an actual display. Look at the source
# code if you want to see why.
assert 'No standard config is available.' in str(e)
# Handle an import that should only happen on linux and requires
# a display.
for module in test_imports:
expected_fail(module)
import sys
if sys.platform.startswith('linux'):
expected_fail('pyglet.window.xlib')
# And another import that is expected to fail in...
if sys.platform == 'darwin':
expected_fail('pyglet.window.cocoa')
|
// ... existing code ...
def expected_fail(module):
try:
print('Importing {}'.format(module))
__import__(module)
except Exception as e:
# Yes, make the exception general, because we can't import the specific
// ... rest of the code ...
|
22ae3a2e9a236de61c078d234d920a3e6bc62d7b
|
pylisp/application/lispd/address_tree/ddt_container_node.py
|
pylisp/application/lispd/address_tree/ddt_container_node.py
|
'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
|
'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
|
Add a bit of docs
|
Add a bit of docs
|
Python
|
bsd-3-clause
|
steffann/pylisp
|
python
|
## Code Before:
'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
## Instruction:
Add a bit of docs
## Code After:
'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
|
# ... existing code ...
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
# ... rest of the code ...
|
803987497eab9c188f4abfcd0dc49b4d311d3dad
|
src/main/java/simplemods/cheesemod/blocks/BlockHotPepperCheeseOre.java
|
src/main/java/simplemods/cheesemod/blocks/BlockHotPepperCheeseOre.java
|
package simplemods.cheesemod.blocks;
import java.util.Random;
import simplemods.cheesemod.BaseMod;
import simplemods.cheesemod.CommonProxy;
import simplemods.cheesemod.inventory.CCreativeTabs;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.item.Item;
/**
* No, this is not a hopper...
*/
public class BlockHotPepperCheeseOre extends Block{
public BlockHotPepperCheeseOre(Material p_i45394_1_) {
super(Material.rock);
setCreativeTab(CCreativeTabs.tabBlock);
setBlockName("pepper_cheese_ore");
setBlockTextureName("cheesemod:pepper_cheese_ore");
setLightLevel(0.325F);
}
@Override
public Item getItemDropped(int p_149650_1_, Random p_149650_2_,
int p_149650_3_) {
return CommonProxy.hot_pepper_cheese;
}
}
|
package simplemods.cheesemod.blocks;
import java.util.Random;
import simplemods.cheesemod.BaseMod;
import simplemods.cheesemod.CommonProxy;
import simplemods.cheesemod.inventory.CCreativeTabs;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.item.Item;
import net.minecraft.item.ItemPickaxe;
/**
* No, this is not a hopper...
*/
public class BlockHotPepperCheeseOre extends Block{
public BlockHotPepperCheeseOre(Material p_i45394_1_) {
super(Material.rock);
setCreativeTab(CCreativeTabs.tabBlock);
setBlockName("pepper_cheese_ore");
setBlockTextureName("cheesemod:pepper_cheese_ore");
setLightLevel(0.325F);
setHardness(0.4F);
setHarvestLevel("pickaxe", 0);
}
@Override
public Item getItemDropped(int p_149650_1_, Random p_149650_2_,
int p_149650_3_) {
return CommonProxy.hot_pepper_cheese;
}
}
|
Fix hot pepper cheese hardness bug
|
Fix hot pepper cheese hardness bug
|
Java
|
lgpl-2.1
|
cdbbnnyCode/cheese-mod
|
java
|
## Code Before:
package simplemods.cheesemod.blocks;
import java.util.Random;
import simplemods.cheesemod.BaseMod;
import simplemods.cheesemod.CommonProxy;
import simplemods.cheesemod.inventory.CCreativeTabs;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.item.Item;
/**
* No, this is not a hopper...
*/
public class BlockHotPepperCheeseOre extends Block{
public BlockHotPepperCheeseOre(Material p_i45394_1_) {
super(Material.rock);
setCreativeTab(CCreativeTabs.tabBlock);
setBlockName("pepper_cheese_ore");
setBlockTextureName("cheesemod:pepper_cheese_ore");
setLightLevel(0.325F);
}
@Override
public Item getItemDropped(int p_149650_1_, Random p_149650_2_,
int p_149650_3_) {
return CommonProxy.hot_pepper_cheese;
}
}
## Instruction:
Fix hot pepper cheese hardness bug
## Code After:
package simplemods.cheesemod.blocks;
import java.util.Random;
import simplemods.cheesemod.BaseMod;
import simplemods.cheesemod.CommonProxy;
import simplemods.cheesemod.inventory.CCreativeTabs;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.item.Item;
import net.minecraft.item.ItemPickaxe;
/**
* No, this is not a hopper...
*/
public class BlockHotPepperCheeseOre extends Block{
public BlockHotPepperCheeseOre(Material p_i45394_1_) {
super(Material.rock);
setCreativeTab(CCreativeTabs.tabBlock);
setBlockName("pepper_cheese_ore");
setBlockTextureName("cheesemod:pepper_cheese_ore");
setLightLevel(0.325F);
setHardness(0.4F);
setHarvestLevel("pickaxe", 0);
}
@Override
public Item getItemDropped(int p_149650_1_, Random p_149650_2_,
int p_149650_3_) {
return CommonProxy.hot_pepper_cheese;
}
}
|
// ... existing code ...
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.item.Item;
import net.minecraft.item.ItemPickaxe;
/**
* No, this is not a hopper...
*/
// ... modified code ...
setBlockName("pepper_cheese_ore");
setBlockTextureName("cheesemod:pepper_cheese_ore");
setLightLevel(0.325F);
setHardness(0.4F);
setHarvestLevel("pickaxe", 0);
}
@Override
// ... rest of the code ...
|
eff7f0bf52507013859788eec29eea819af6ce63
|
grow/preprocessors/routes_cache.py
|
grow/preprocessors/routes_cache.py
|
from . import base
class RoutesCachePreprocessor(base.BasePreprocessor):
KIND = '_routes_cache'
def __init__(self, pod):
self.pod = pod
def run(self, build=True):
self.pod.routes.reset_cache(rebuild=True)
def list_watched_dirs(self):
return ['/content/', '/static/']
|
import datetime
from . import base
class RoutesCachePreprocessor(base.BasePreprocessor):
KIND = '_routes_cache'
LIMIT = datetime.timedelta(seconds=1)
def __init__(self, pod):
self.pod = pod
self._last_run = None
def run(self, build=True):
# Avoid rebuilding routes cache more than once per second.
now = datetime.datetime.now()
limit = RoutesCachePreprocessor.LIMIT
if not self._last_run or (now - self._last_run) > limit:
self.pod.routes.reset_cache(rebuild=True, inject=False)
self._last_run = now
def list_watched_dirs(self):
return ['/content/']
|
Implement ratelimit on routes cache.
|
Implement ratelimit on routes cache.
|
Python
|
mit
|
denmojo/pygrow,grow/pygrow,grow/grow,denmojo/pygrow,denmojo/pygrow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/pygrow,denmojo/pygrow
|
python
|
## Code Before:
from . import base
class RoutesCachePreprocessor(base.BasePreprocessor):
KIND = '_routes_cache'
def __init__(self, pod):
self.pod = pod
def run(self, build=True):
self.pod.routes.reset_cache(rebuild=True)
def list_watched_dirs(self):
return ['/content/', '/static/']
## Instruction:
Implement ratelimit on routes cache.
## Code After:
import datetime
from . import base
class RoutesCachePreprocessor(base.BasePreprocessor):
KIND = '_routes_cache'
LIMIT = datetime.timedelta(seconds=1)
def __init__(self, pod):
self.pod = pod
self._last_run = None
def run(self, build=True):
# Avoid rebuilding routes cache more than once per second.
now = datetime.datetime.now()
limit = RoutesCachePreprocessor.LIMIT
if not self._last_run or (now - self._last_run) > limit:
self.pod.routes.reset_cache(rebuild=True, inject=False)
self._last_run = now
def list_watched_dirs(self):
return ['/content/']
|
...
import datetime
from . import base
class RoutesCachePreprocessor(base.BasePreprocessor):
KIND = '_routes_cache'
LIMIT = datetime.timedelta(seconds=1)
def __init__(self, pod):
self.pod = pod
self._last_run = None
def run(self, build=True):
# Avoid rebuilding routes cache more than once per second.
now = datetime.datetime.now()
limit = RoutesCachePreprocessor.LIMIT
if not self._last_run or (now - self._last_run) > limit:
self.pod.routes.reset_cache(rebuild=True, inject=False)
self._last_run = now
def list_watched_dirs(self):
return ['/content/']
...
|
00222bb47818ea2fdf60847e6ad42ba96c39f16b
|
whacked4/whacked4/dehacked/filters.py
|
whacked4/whacked4/dehacked/filters.py
|
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)
|
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)
|
Fix flag delimiter support from 30ec188.
|
Fix flag delimiter support from 30ec188.
|
Python
|
bsd-2-clause
|
GitExl/WhackEd4,GitExl/WhackEd4
|
python
|
## Code Before:
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)
## Instruction:
Fix flag delimiter support from 30ec188.
## Code After:
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)
|
...
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
...
|
1f3164f95f0ce40bac38ac384bf5fdd181ab5fa1
|
importlib_metadata/__init__.py
|
importlib_metadata/__init__.py
|
from .api import (
Distribution, PackageNotFoundError, distribution, distributions,
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
from . import _hooks # noqa: F401
__all__ = [
'Distribution',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
__version__ = version(__name__)
|
from .api import (
Distribution, PackageNotFoundError, distribution, distributions,
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
__import__('importlib_metadata._hooks')
__all__ = [
'Distribution',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
__version__ = version(__name__)
|
Use imperative import to avoid lint (import order) and as a good convention when side-effects is the intention.
|
Use imperative import to avoid lint (import order) and as a good convention when side-effects is the intention.
|
Python
|
apache-2.0
|
python/importlib_metadata
|
python
|
## Code Before:
from .api import (
Distribution, PackageNotFoundError, distribution, distributions,
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
from . import _hooks # noqa: F401
__all__ = [
'Distribution',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
__version__ = version(__name__)
## Instruction:
Use imperative import to avoid lint (import order) and as a good convention when side-effects is the intention.
## Code After:
from .api import (
Distribution, PackageNotFoundError, distribution, distributions,
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
__import__('importlib_metadata._hooks')
__all__ = [
'Distribution',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
__version__ = version(__name__)
|
# ... existing code ...
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
__import__('importlib_metadata._hooks')
__all__ = [
# ... rest of the code ...
|
c308ddec90d37777896a9275738fac8e5764dec7
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='Eve-Mongoengine',
version='0.0.1',
url='https://github.com/hellerstanislav/eve-mongoengine',
author='Stanislav Heller',
author_email='[email protected]',
description='An Eve extension for Mongoengine ODM support',
packages=['eve_mongoengine'],
zip_safe=False,
test_suite="tests",
include_package_data=True,
platforms='any',
install_requires=[
'Eve>=0.1',
'Mongoengine>=0.8.4',
]
)
|
from setuptools import setup, find_packages
setup(
name='Eve-Mongoengine',
version='0.0.1',
url='https://github.com/hellerstanislav/eve-mongoengine',
author='Stanislav Heller',
author_email='[email protected]',
description='An Eve extension for Mongoengine ODM support',
packages=['eve_mongoengine'],
zip_safe=False,
test_suite="tests",
include_package_data=True,
platforms='any',
install_requires=[
'Eve>=0.1',
'pymongo==2.6.2',
'Mongoengine>=0.8.4',
]
)
|
Update dependency to pymongo 2.6.2
|
Update dependency to pymongo 2.6.2
|
Python
|
mit
|
bumbeelabs2/eve-mongoengine,MongoEngine/eve-mongoengine,rudaoshi/eve-mongoengine,kcaylor/eve-mongoengine,rudaoshi/eve-mongoengine
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name='Eve-Mongoengine',
version='0.0.1',
url='https://github.com/hellerstanislav/eve-mongoengine',
author='Stanislav Heller',
author_email='[email protected]',
description='An Eve extension for Mongoengine ODM support',
packages=['eve_mongoengine'],
zip_safe=False,
test_suite="tests",
include_package_data=True,
platforms='any',
install_requires=[
'Eve>=0.1',
'Mongoengine>=0.8.4',
]
)
## Instruction:
Update dependency to pymongo 2.6.2
## Code After:
from setuptools import setup, find_packages
setup(
name='Eve-Mongoengine',
version='0.0.1',
url='https://github.com/hellerstanislav/eve-mongoengine',
author='Stanislav Heller',
author_email='[email protected]',
description='An Eve extension for Mongoengine ODM support',
packages=['eve_mongoengine'],
zip_safe=False,
test_suite="tests",
include_package_data=True,
platforms='any',
install_requires=[
'Eve>=0.1',
'pymongo==2.6.2',
'Mongoengine>=0.8.4',
]
)
|
...
platforms='any',
install_requires=[
'Eve>=0.1',
'pymongo==2.6.2',
'Mongoengine>=0.8.4',
]
)
...
|
0cd084550fc5c1315fe33fcb00e57c1c332be6ab
|
indra/tests/test_mesh.py
|
indra/tests/test_mesh.py
|
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
|
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
def test_invalid_id():
mesh_name = mesh_client.get_mesh_name('34jkgfh')
assert mesh_name is None
|
Add test for invalid MESH ID
|
Add test for invalid MESH ID
|
Python
|
bsd-2-clause
|
pvtodorov/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy
|
python
|
## Code Before:
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
## Instruction:
Add test for invalid MESH ID
## Code After:
from indra.databases import mesh_client
def test_mesh_id_lookup():
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
def test_invalid_id():
mesh_name = mesh_client.get_mesh_name('34jkgfh')
assert mesh_name is None
|
// ... existing code ...
mesh_id = 'D003094'
mesh_name = mesh_client.get_mesh_name(mesh_id)
assert mesh_name == 'Collagen'
def test_invalid_id():
mesh_name = mesh_client.get_mesh_name('34jkgfh')
assert mesh_name is None
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.